From d38f3dbe744db78df69945f40fe976e6eb871732 Mon Sep 17 00:00:00 2001 From: msinn Date: Fri, 31 Mar 2023 16:51:19 +0200 Subject: [PATCH 001/775] Set repo version to post 1.9.5 release --- __init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/__init__.py b/__init__.py index 389159c96..a0661b725 100755 --- a/__init__.py +++ b/__init__.py @@ -1,6 +1,6 @@ def plugin_release(): - return '1.9.5' + return '1.9.5.1' def plugin_branch(): - return 'master' + return 'develop' From edb4253decb19f6e09df3cc03f182e4bb5f0df46 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sat, 1 Apr 2023 10:27:13 +0200 Subject: [PATCH 002/775] gpio plugin: fix user_doc reference to configuration --- gpio/user_doc.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gpio/user_doc.rst b/gpio/user_doc.rst index b04e129fc..ea5c7bcae 100755 --- a/gpio/user_doc.rst +++ b/gpio/user_doc.rst @@ -15,7 +15,7 @@ gpio Konfiguration ============= -Die Informationen zur Konfiguration des Plugins sind unter :doc:`/user/plugins_doc/config/gpio` beschrieben. +Die Informationen zur Konfiguration des Plugins sind unter :doc:`/plugins_doc/config/gpio` beschrieben. Beschreibung ============ From b8dccd1f6064fc58dce801a1ca932366730220be Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sat, 1 Apr 2023 10:30:08 +0200 Subject: [PATCH 003/775] New Plugin: denon for Denon AV Receivers, based on SmartDevicePlugin --- denon/__init__.py | 155 + denon/commands.py | 430 ++ denon/datatypes.py | 93 + denon/plugin.yaml | 8033 ++++++++++++++++++++++++ denon/user_doc.rst | 97 + denon/webif/static/img/plugin_logo.svg | 88 + 6 files changed, 8896 insertions(+) create mode 100755 denon/__init__.py create mode 100755 denon/commands.py create mode 100755 denon/datatypes.py create mode 100755 denon/plugin.yaml create mode 100755 denon/user_doc.rst create mode 100755 denon/webif/static/img/plugin_logo.svg diff --git a/denon/__init__.py b/denon/__init__.py new file mode 100755 index 000000000..d39fb55a2 --- /dev/null +++ b/denon/__init__.py @@ -0,0 +1,155 @@ +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +######################################################################### +# Copyright 2016 +######################################################################### +# This file is part of SmartHomeNG +# +# Denon AV plugin for SmartDevicePlugin class +# +# SmartHomeNG is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SmartHomeNG is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SmartHomeNG If not, see . +######################################################################### + +import builtins +import os +import sys + +if __name__ == '__main__': + builtins.SDP_standalone = True + + class SmartPlugin(): + pass + + class SmartPluginWebIf(): + pass + + BASE = os.path.sep.join(os.path.realpath(__file__).split(os.path.sep)[:-3]) + sys.path.insert(0, BASE) + +else: + builtins.SDP_standalone = False + +from lib.model.sdp.globals import (PLUGIN_ATTR_NET_HOST, PLUGIN_ATTR_CONNECTION, PLUGIN_ATTR_SERIAL_PORT, PLUGIN_ATTR_CONN_TERMINATOR, CONN_NET_TCP_CLI, CONN_SER_ASYNC) +from lib.model.smartdeviceplugin import SmartDevicePlugin, Standalone + +if not SDP_standalone: + from .webif import WebInterface + +CUSTOM_INPUT_NAME_COMMAND = 'custom_inputnames' + + +class denon(SmartDevicePlugin): + """ Device class for Denon AV. + + Most of the work is done by the base class, so we only set default parameters + for the connection (to be overwritten by device attributes from the plugin + configuration) and add a fixed terminator byte to outgoing datagrams. + + The know-how is in the commands.py (and some DT_ classes...) + """ + + PLUGIN_VERSION = '1.0.0' + + def on_connect(self, by=None): + self.logger.debug("Checking for custom input names.") + self.send_command('general.custom_inputnames') + + def _set_device_defaults(self): + + self._custom_inputnames = {} + + # set our own preferences concerning connections + if PLUGIN_ATTR_NET_HOST in self._parameters and self._parameters[PLUGIN_ATTR_NET_HOST]: + self._parameters[PLUGIN_ATTR_CONNECTION] = CONN_NET_TCP_CLI + elif PLUGIN_ATTR_SERIAL_PORT in self._parameters and self._parameters[PLUGIN_ATTR_SERIAL_PORT]: + self._parameters[PLUGIN_ATTR_CONNECTION] = CONN_SER_ASYNC + if PLUGIN_ATTR_CONN_TERMINATOR in self._parameters: + b = self._parameters[PLUGIN_ATTR_CONN_TERMINATOR].encode() + b = b.decode('unicode-escape').encode() + self._parameters[PLUGIN_ATTR_CONN_TERMINATOR] = b + + # we need to receive data via callback, as the "reply" can be unrelated to + # the sent command. Getting it as return value would assign it to the wrong + # command and discard it... so break the "return result"-chain + def _send(self, data_dict): + self._connection.send(data_dict) + return None + + def _transform_send_data(self, data=None, **kwargs): + if data: + try: + data['limit_response'] = self._parameters.get(PLUGIN_ATTR_CONN_TERMINATOR, b'\r') + data['payload'] = f'{data.get("payload")}\r' + except Exception as e: + self.logger.error(f'ERROR {e}') + return data + + def on_data_received(self, by, data, command=None): + + commands = None + if command is not None: + self.logger.debug(f'received data "{data}" from {by} for command {command}') + commands = [command] + else: + # command == None means that we got raw data from a callback and + # don't know yet to which command this belongs to. So find out... + self.logger.debug(f'received data "{data}" from {by} without command specification') + + # command can be a string (classic single command) or + # - new - a list of strings if multiple commands are identified + # in that case, work on all strings + commands = self._commands.get_command_from_reply(data) + if not commands: + if self._discard_unknown_command: + self.logger.debug(f'data "{data}" did not identify a known command, ignoring it') + return + else: + self.logger.debug(f'data "{data}" did not identify a known command, forwarding it anyway for {self._unknown_command}') + self._dispatch_callback(self._unknown_command, data, by) + + # TODO: remove later? + assert(isinstance(commands, list)) + + # process all commands + for command in commands: + self._check_for_custominputs(command, data) + custom = None + if self.custom_commands: + custom = self._get_custom_value(command, data) + + base_command = command + value = None + try: + if CUSTOM_INPUT_NAME_COMMAND in command: + value = self._custom_inputnames + else: + value = self._commands.get_shng_data(command, data) + except OSError as e: + self.logger.warning(f'received data "{data}" for command {command}, error {e} occurred while converting. Discarding data.') + else: + self.logger.debug(f'received data "{data}" for command {command} converted to value {value}') + self._dispatch_callback(command, value, by) + + self._process_additional_data(base_command, data, value, custom, by) + + def _check_for_custominputs(self, command, data): + if CUSTOM_INPUT_NAME_COMMAND in command and isinstance(data, str): + tmp = data.split(' ', 1) + src = tmp[0][5:] + name = tmp[1] + self._custom_inputnames[src] = name + + +if __name__ == '__main__': + s = Standalone(denon, sys.argv[0]) diff --git a/denon/commands.py b/denon/commands.py new file mode 100755 index 000000000..3f5250bc2 --- /dev/null +++ b/denon/commands.py @@ -0,0 +1,430 @@ +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +""" commands for dev pioneer + +Most commands send a string (fixed for reading, attached data for writing) +while parsing the response works by extracting the needed string part by +regex. Some commands translate the device data into readable values via +lookups. +""" + +models = { + 'ALL': ['general.custom_inputnames', 'general.power', 'general.setupmenu', 'general.soundmode', 'general.inputsignal', 'general.inputrate', 'general.inputformat', 'general.inputresolution', 'general.outputresolution', 'general.ecomode', + 'tuner.preset', 'tuner.presetup', 'tuner.presetdown', 'tuner.frequency', 'tuner.frequencyup', 'tuner.frequencydown', 'tuner.band', + 'zone1.control', + 'zone1.settings.sound.general.audioinput', 'zone1.settings.sound.general.cinema_eq', 'zone1.settings.sound.general.hdmiaudioout', 'zone1.settings.sound.general.dynamicrange', 'zone1.settings.sound.general.subwoofertoggle', 'zone1.settings.sound.general.subwoofer', 'zone1.settings.sound.general.subwooferup', 'zone1.settings.sound.general.subwooferdown', 'zone1.settings.sound.general.lfe', 'zone1.settings.sound.general.lfeup', 'zone1.settings.sound.general.lfedown', 'zone1.settings.sound.tone_control', + 'zone1.settings.sound.channel_level.front_left', 'zone1.settings.sound.channel_level.front_right', 'zone1.settings.sound.channel_level.front_height_left', 'zone1.settings.sound.channel_level.front_height_right', 'zone1.settings.sound.channel_level.front_center', 'zone1.settings.sound.channel_level.surround_left', 'zone1.settings.sound.channel_level.surround_right', 'zone1.settings.sound.channel_level.surroundback_left', 'zone1.settings.sound.channel_level.surroundback_right', 'zone1.settings.sound.channel_level.rear_height_left', 'zone1.settings.sound.channel_level.rear_height_right', 'zone1.settings.sound.channel_level.subwoofer', + 'zone2.control', 'zone2.settings.sound.general.hdmiout'], + 'AVR-X6300H': ['info', 'tuner.hd', 'zone1.settings.sound.channel_level.subwoofer2', 'zone1.settings.sound.general.speakersetup', 'zone1.settings.sound.general.dialogenhance', + 'zone1.settings.video', + 'zone2.settings.sound.tone_control', 'zone2.settings.sound.channel_level', 'zone2.settings.sound.general.HPF', + 'zone3'], + 'AVR-X4300H': ['zone1.settings.sound.channel_level.subwoofer2', 'zone1.settings.video', 'zone1.settings.sound.general.dialogtoggle', 'zone1.settings.sound.general.dialog', 'zone1.settings.sound.general.dialogup', 'zone1.settings.sound.general.dialogdown', 'zone1.settings.sound.general.speakersetup', + 'zone2.settings.sound.tone_control', 'zone2.settings.sound.channel_level', 'zone2.settings.sound.general.HPF', + 'zone3'], + 'AVR-X3300W': ['tuner.title', 'tuner.album', 'tuner.artist', 'general.display', + 'zone1.settings.sound.channel_level.subwoofer2', 'zone1.settings.video.aspectratio', 'zone1.settings.video.hdmiresolution', 'zone1.settings.video.videoresolution', 'zone1.settings.video.videoinput', 'zone1.settings.video.pictureenhancer', 'zone1.settings.video.videoprocessingmode', + 'zone1.settings.sound.general.dialogtoggle', 'zone1.settings.sound.general.dialog', 'zone1.settings.sound.general.dialogup', 'zone1.settings.sound.general.dialogdown', 'zone2.settings.sound.tone_control', 'zone2.settings.sound.channel_level', 'zone2.settings.sound.general.HPF'], + 'AVR-X2300W': ['tuner.title', 'tuner.album', 'tuner.artist', 'general.display', + 'zone1.settings.video', 'zone1.settings.sound.general.dialogtoggle', 'zone1.settings.sound.general.dialog', 'zone1.settings.sound.general.dialogup', 'zone1.settings.sound.general.dialogdown', + 'zone2.settings.sound.channel_level'], + 'AVR-X1300W': ['tuner.title', 'tuner.album', 'tuner.artist', 'general.display', + 'zone1.settings.sound.general.dialogtoggle', 'zone1.settings.sound.general.dialog', 'zone1.settings.sound.general.dialogup', 'zone1.settings.sound.general.dialogdown'] +} + +commands = { + 'info': { + 'fullmodel': {'read': True, 'write': False, 'read_cmd': 'NSFRN ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': r'NSFRN\s(.*)', 'item_attrs': {'initial': True}}, + 'model': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'VIALL(AVR.*)', 'item_attrs': {'initial': True}}, + 'serialnumber': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'num', 'dev_datatype': 'raw', 'reply_pattern': r'VIALLS/N\.(.*)'}, + 'main': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': r'VIALLMAIN:(.*)'}, + 'mainfbl': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'num', 'dev_datatype': 'raw', 'reply_pattern': r'VIALLMAINFBL:(.*)'}, + 'dsp1': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'num', 'dev_datatype': 'raw', 'reply_pattern': r'VIALLDSP1:(.*)'}, + 'dsp2': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'num', 'dev_datatype': 'raw', 'reply_pattern': r'VIALLDSP2:(.*)'}, + 'dsp3': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'num', 'dev_datatype': 'raw', 'reply_pattern': r'VIALLDSP3:(.*)'}, + 'dsp4': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'num', 'dev_datatype': 'raw', 'reply_pattern': r'VIALLDSP4:(.*)'}, + 'apld': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'num', 'dev_datatype': 'raw', 'reply_pattern': r'VIALLAPLD:(.*)'}, + 'vpld': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'num', 'dev_datatype': 'raw', 'reply_pattern': r'VIALLVPLD:(.*)'}, + 'guidat': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'num', 'dev_datatype': 'raw', 'reply_pattern': r'VIALLGUIDAT:(.*)'}, + 'heosversion': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'VIALLHEOSVER:(.*)'}, + 'heosbuild': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'num', 'dev_datatype': 'raw', 'reply_pattern': r'VIALLHEOSBLD:(.*)'}, + 'heosmod': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'num', 'dev_datatype': 'raw', 'reply_pattern': r'VIALLHEOSMOD:(.*)'}, + 'heoscnf': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': r'VIALLHEOSCNF:(.*)'}, + 'heoslanguage': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': r'VIALLHEOSLCL:(.*)'}, + 'mac': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': r'VIALLMAC:(.*)'}, + 'wifimac': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': r'VIALLWIFIMAC:(.*)'}, + 'btmac': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': r'VIALLBTMAC:(.*)'}, + 'audyif': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'num', 'dev_datatype': 'raw', 'reply_pattern': r'VIALLAUDYIF:(.*)'}, + 'productid': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'num', 'dev_datatype': 'raw', 'reply_pattern': r'VIALLPRODUCTID:(.*)'}, + 'packageid': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'num', 'dev_datatype': 'raw', 'reply_pattern': r'VIALLPACKAGEID:(.*)'}, + 'cmp': {'read': True, 'write': False, 'read_cmd': 'VIALL?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': r'VIALLCMP:(.*)'}, + 'region': {'read': True, 'write': False, 'read_cmd': 'SYMODTUN ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': r'SYMODTUN\s(.*)', 'item_attrs': {'initial': True}}, + }, + 'general': { + 'custom_inputnames': {'read': True, 'write': False, 'read_cmd': 'SSFUN ?', 'item_type': 'dict', 'dev_datatype': 'str', 'reply_pattern': 'SSFUN(.*)', 'item_attrs': {'item_template': 'custom_inputnames'}}, + 'power': {'read': True, 'write': True, 'read_cmd': 'PW?', 'write_cmd': 'PW{VALUE}', 'item_type': 'bool', 'dev_datatype': 'str', 'reply_pattern': 'PW{LOOKUP}', 'lookup': 'POWER'}, + 'setupmenu': {'read': True, 'write': True, 'read_cmd': 'MNMEN?', 'write_cmd': 'MNMEN {VALUE}', 'item_type': 'bool', 'dev_datatype': 'onoff', 'reply_pattern': 'MNMEN (ON|OFF)'}, + 'display': {'read': True, 'write': False, 'read_cmd': 'NSE', 'item_type': 'str', 'dev_datatype': 'DenonDisplay', 'reply_pattern': 'NSE(.*)'}, + 'soundmode': {'read': True, 'write': False, 'read_cmd': 'SSSMG ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': r'SSSMG {LOOKUP}', 'lookup': 'SOUNDMODE', 'item_attrs': {'initial': True}}, + 'allzonestereo': {'read': True, 'write': False, 'read_cmd': 'MNZST?', 'write_cmd': 'MNZST {VALUE}', 'item_type': 'bool', 'dev_datatype': 'onoff', 'reply_pattern': r'MNZST {ON|OFF}', 'item_attrs': {'initial': True}}, + 'inputsignal': {'read': True, 'write': False, 'read_cmd': 'SSINFAISSIG ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': r'SSINFAISSIG {LOOKUP}', 'lookup': 'INPUTSIGNAL', 'item_attrs': {'initial': True}}, + 'inputrate': {'read': True, 'write': False, 'read_cmd': 'SSINFAISFSV ?', 'item_type': 'num', 'dev_datatype': 'convert0', 'reply_pattern': r'SSINFAISFSV (\d{2,3}|NON)', 'item_attrs': {'initial': True}}, + 'inputformat': {'read': True, 'write': False, 'read_cmd': 'SSINFAISFOR ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'SSINFAISFOR (.*)', 'item_attrs': {'initial': True}}, + 'inputresolution': {'read': True, 'write': False, 'read_cmd': 'SSINFSIGRES ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'SSINFSIGRES I(.*)', 'item_attrs': {'initial': True}}, + 'outputresolution': {'read': True, 'write': False, 'read_cmd': 'SSINFSIGRES ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'SSINFSIGRES O(.*)', 'item_attrs': {'read_group_levels': 0}}, + 'ecomode': {'read': True, 'write': True, 'cmd_settings': {'valid_list_ci': ['ON', 'OFF', 'AUTO']}, 'read_cmd': 'ECO?', 'write_cmd': 'ECO{RAW_VALUE_UPPER}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'ECO{VALID_LIST_CI}'}, + }, + 'tuner': { + 'title': {'read': True, 'write': False, 'read_cmd': 'NSE', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'NSE1(.*)', 'item_attrs': {'initial': True}}, + 'album': {'read': True, 'write': False, 'read_cmd': 'NSE', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'NSE4(.*)', 'item_attrs': {'read_group_levels': 0}}, + 'artist': {'read': True, 'write': False, 'read_cmd': 'NSE', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'NSE2(.*)', 'item_attrs': {'read_group_levels': 0}}, + 'preset': {'read': True, 'write': True, 'read_cmd': 'TPAN?', 'item_type': 'num', 'write_cmd': 'TPAN{RAW_VALUE:02}', 'dev_datatype': 'convert0', 'reply_pattern': r'TPAN(\d{2}|OFF)', 'item_attrs': {'initial': True}}, + 'presetup': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'TPANUP', 'dev_datatype': 'raw'}, + 'presetdown': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'TPANDOWN', 'dev_datatype': 'raw'}, + 'frequency': {'read': True, 'write': True, 'read_cmd': 'TFAN?', 'item_type': 'num', 'write_cmd': 'TFAN{RAW_VALUE:06}', 'dev_datatype': 'num', 'reply_pattern': r'TFAN(\d{6})', 'item_attrs': {'initial': True}}, + 'frequencyup': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'TFANUP', 'dev_datatype': 'raw'}, + 'frequencydown': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'TFANDOWN', 'dev_datatype': 'raw'}, + 'band': {'read': True, 'write': True, 'cmd_settings': {'valid_list_ci': ['AM', 'FM']}, 'read_cmd': 'TMAN?', 'item_type': 'str', 'write_cmd': 'TMAN{RAW_VALUE_UPPER}', 'dev_datatype': 'raw', 'reply_pattern': r'TMAN{VALID_LIST_CI}', 'item_attrs': {'initial': True}}, + 'tuningmode': {'read': True, 'write': True, 'cmd_settings': {'valid_list_ci': ['AUTO', 'MANUAL']}, 'read_cmd': 'TMAN?', 'item_type': 'str', 'write_cmd': 'TMAN{RAW_VALUE_UPPER}', 'dev_datatype': 'raw', 'reply_pattern': r'TMAN{VALID_LIST_CI}'}, + 'hd': { + 'channel': {'read': True, 'write': True, 'read_cmd': 'TFHD?', 'item_type': 'num', 'write_cmd': 'TFHD{RAW_VALUE:06}', 'dev_datatype': 'num', 'reply_pattern': r'TFHD(\d{6})', 'item_attrs': {'initial': True}}, + 'channelup': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'TFHDUP', 'dev_datatype': 'raw'}, + 'channeldown': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'TFHDDOWN', 'dev_datatype': 'raw'}, + 'multicastchannel': {'read': True, 'write': True, 'read_cmd': 'TFHD?', 'item_type': 'num', 'write_cmd': 'TFHDMC{RAW_VALUE:01}', 'dev_datatype': 'num', 'reply_pattern': r'TFHDMC(\d{1})'}, + 'presetmemory': {'read': True, 'write': True, 'item_type': 'num', 'write_cmd': 'TPHDMEM{RAW_VALUE:02}', 'dev_datatype': 'convert0', 'reply_pattern': r'TPHDMEM(\d{2}|OFF)'}, + 'preset': {'read': True, 'write': True, 'read_cmd': 'TPHD?', 'item_type': 'num', 'write_cmd': 'TPHD{RAW_VALUE:02}', 'dev_datatype': 'convert0', 'reply_pattern': r'TPHD(\d{2}|OFF)'}, + 'presetup': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'TPHDUP', 'dev_datatype': 'raw'}, + 'presetdown': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'TPHDDOWN', 'dev_datatype': 'raw'}, + 'band': {'read': True, 'write': True, 'cmd_settings': {'valid_list_ci': ['AM', 'FM', 'AUTO', 'MANUAL', 'AUTOHD', 'ANAAUTO', 'ANAMANU']}, 'read_cmd': 'TMHD?', 'item_type': 'str', 'write_cmd': 'TMHD{RAW_VALUE_UPPER}', 'dev_datatype': 'num', 'reply_pattern': r'TMHD{VALID_LIST_CI}', 'item_attrs': {'initial': True}} + } + + }, + 'zone1': { + 'control': { + 'power': {'read': True, 'write': True, 'read_cmd': 'ZM?', 'write_cmd': 'ZM{VALUE}', 'item_type': 'bool', 'dev_datatype': 'onoff', 'reply_pattern': 'ZM(ON|OFF)', 'item_attrs': {'initial': True}}, + 'mute': {'read': True, 'write': True, 'read_cmd': 'MU?', 'write_cmd': 'MU{VALUE}', 'item_type': 'bool', 'dev_datatype': 'onoff', 'reply_pattern': 'MU(ON|OFF)', 'item_attrs': {'initial': True}}, + 'volume': {'read': True, 'write': True, 'read_cmd': 'MV?', 'write_cmd': 'MV{VALUE}', 'item_type': 'num', 'dev_datatype': 'DenonVol', 'reply_pattern': r'MV(\d{2,3})', 'cmd_settings': {'force_min': 0.0, 'valid_max': 98.0}, 'item_attrs': {'initial': True}}, + 'volumeup': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'MVUP', 'dev_datatype': 'raw'}, + 'volumedown': {'read': False, 'write': True, 'write_cmd': 'MVDOWN', 'item_type': 'bool', 'dev_datatype': 'raw'}, + 'volumemax': {'opcode': '{VALUE}', 'read': True, 'write': False, 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': r'MVMAX (\d{2,3})', 'item_attrs': {'initial': True}}, + 'input': {'read': True, 'write': True, 'read_cmd': 'SI?', 'write_cmd': 'SI{VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'SI{LOOKUP}', 'lookup': 'INPUT', 'item_attrs': {'item_template': 'input', 'initial': True}}, + 'listeningmode': {'read': True, 'write': True, 'cmd_settings': {'valid_list_ci': ['MOVIE', 'MUSIC', 'GAME', 'DIRECT', 'PURE DIRECT', 'STEREO', 'AUTO', 'DOLBY DIGITAL', 'DOLBY SURROUND', 'DTS SURROUND', 'NEURAL:X', 'AURO3D', 'AURO2DSURR', 'MCH STEREO', 'ROCK ARENA', 'JAZZ CLUB', 'MONO MOVIE', 'MATRIX', 'VIDEO GAME', 'VIRTUAL', 'LEFT', 'RIGHT']}, 'read_cmd': 'MS?', 'write_cmd': 'MS{RAW_VALUE_UPPER}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': r'\s?MS(.*)', 'item_attrs': {'initial': True}}, + 'sleep': {'read': True, 'write': True, 'item_type': 'num', 'read_cmd': 'SLP?', 'write_cmd': 'SLP{VALUE}', 'dev_datatype': 'convert0', 'reply_pattern': r'SLP(\d{3}|OFF)', 'cmd_settings': {'force_min': 0, 'force_max': 120}, 'item_attrs': {'initial': True}}, + 'standby': {'read': True, 'write': True, 'item_type': 'num', 'read_cmd': 'STBY?', 'write_cmd': 'STBY{VALUE}', 'dev_datatype': 'DenonStandby1', 'reply_pattern': r'STBY(\d{2}M|OFF)', 'cmd_settings': {'valid_list_ci': [0, 15, 30, 60]}, 'item_attrs': {'initial': True}}, + }, + 'settings': { + 'sound': { + 'channel_level': { + 'front_left': {'read': True, 'write': True, 'cmd_settings': {'force_min': -12.0, 'valid_max': 12.0}, 'read_cmd': 'CV?', 'item_type': 'num', 'write_cmd': 'CVFL {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'CVFL (\d{2,3})'}, + 'front_right': {'read': True, 'write': True, 'cmd_settings': {'force_min': -12.0, 'valid_max': 12.0}, 'read_cmd': 'CV?', 'item_type': 'num', 'write_cmd': 'CVFR {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'CVFR (\d{2,3})'}, + 'front_height_left': {'read': True, 'write': True, 'cmd_settings': {'force_min': -12.0, 'valid_max': 12.0}, 'read_cmd': 'CV?', 'item_type': 'num', 'write_cmd': 'CVFHL {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'CVFHL (\d{2,3})'}, + 'front_height_right': {'read': True, 'write': True, 'cmd_settings': {'force_min': -12.0, 'valid_max': 12.0}, 'read_cmd': 'CV?', 'item_type': 'num', 'write_cmd': 'CVFHR {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'CVFHR (\d{2,3})'}, + 'front_center': {'read': True, 'write': True, 'cmd_settings': {'force_min': -12.0, 'valid_max': 12.0}, 'read_cmd': 'CV?', 'item_type': 'num', 'write_cmd': 'CVC {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'CVC (\d{2,3})'}, + 'surround_left': {'read': True, 'write': True, 'cmd_settings': {'force_min': -12.0, 'valid_max': 12.0}, 'read_cmd': 'CV?', 'item_type': 'num', 'write_cmd': 'CVSL {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'CVSL (\d{2,3})'}, + 'surround_right': {'read': True, 'write': True, 'cmd_settings': {'force_min': -12.0, 'valid_max': 12.0}, 'read_cmd': 'CV?', 'item_type': 'num', 'write_cmd': 'CVSR {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'CVSR (\d{2,3})'}, + 'surroundback_left': {'read': True, 'write': True, 'cmd_settings': {'force_min': -12.0, 'valid_max': 12.0}, 'read_cmd': 'CV?', 'item_type': 'num', 'write_cmd': 'CVSBL {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'CVSBL (\d{2,3})'}, + 'surroundback_right': {'read': True, 'write': True, 'cmd_settings': {'force_min': -12.0, 'valid_max': 12.0}, 'read_cmd': 'CV?', 'item_type': 'num', 'write_cmd': 'CVSBR {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'CVSBR (\d{2,3})'}, + 'rear_height_left': {'read': True, 'write': True, 'cmd_settings': {'force_min': -12.0, 'valid_max': 12.0}, 'read_cmd': 'CV?', 'item_type': 'num', 'write_cmd': 'CVRHL {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'CVRHL (\d{2,3})'}, + 'rear_height_right': {'read': True, 'write': True, 'cmd_settings': {'force_min': -12.0, 'valid_max': 12.0}, 'read_cmd': 'CV?', 'item_type': 'num', 'write_cmd': 'CVRHR {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'CVRHR (\d{2,3})'}, + 'subwoofer': {'read': True, 'write': True, 'cmd_settings': {'force_min': -12.0, 'valid_max': 12.0}, 'read_cmd': 'CV?', 'item_type': 'num', 'write_cmd': 'CVSW {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'CVSW (\d{2,3})'}, + 'subwoofer2': {'read': True, 'write': True, 'cmd_settings': {'force_min': -12.0, 'valid_max': 12.0}, 'read_cmd': 'CV?', 'item_type': 'num', 'write_cmd': 'CVSW2 {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'CVSW2 (\d{2,3})'} + }, + 'tone_control': { + 'tone': {'read': True, 'write': True, 'read_cmd': 'PSTONE CTRL ?', 'write_cmd': 'PSTONE CTRL {VALUE}', 'item_type': 'bool', 'dev_datatype': 'onoff', 'reply_pattern': 'PSTONE CTRL (ON|OFF)'}, + 'treble': {'read': True, 'write': True, 'read_cmd': 'PSTRE ?', 'item_type': 'num', 'cmd_settings': {'force_min': -6, 'force_max': 6}, 'write_cmd': 'PSTRE {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'PSTRE (\d{2})'}, + 'trebleup': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'PSTRE UP', 'dev_datatype': 'raw'}, + 'trebledown': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'PSTRE DOWN', 'dev_datatype': 'raw'}, + 'bass': {'read': True, 'write': True, 'read_cmd': 'PSBAS ?', 'item_type': 'num', 'cmd_settings': {'force_min': -6, 'force_max': 6}, 'write_cmd': 'PSBAS {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'PSBAS (\d{2})'}, + 'bassup': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'PSBAS UP', 'dev_datatype': 'raw'}, + 'bassdown': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'PSBAS DOWN', 'dev_datatype': 'raw'} + }, + 'general': { + 'cinema_eq': {'read': True, 'write': True, 'read_cmd': 'PSCINEMA EQ. ?', 'write_cmd': 'PSCINEMA EQ.{VALUE}', 'item_type': 'bool', 'dev_datatype': 'onoff', 'reply_pattern': 'PSCINEMA EQ.(ON|OFF)'}, + 'dynamic_eq': {'read': True, 'write': True, 'read_cmd': 'PSDYNEQ ?', 'write_cmd': 'PSDYNEQ {VALUE}', 'item_type': 'bool', 'dev_datatype': 'onoff', 'reply_pattern': 'PSDYNEQ (ON|OFF)'}, + 'multeq': {'read': True, 'write': True, 'read_cmd': 'PSMULTEQ: ?', 'write_cmd': 'PSMULTEQ: {RAW_VALUE_UPPER}', 'item_type': 'bool', 'dev_datatype': 'str', 'cmd_settings': {'valid_list_ci': ['AUDYSSEY', 'BYP.LR', 'FLAT', 'OFF']}, 'reply_pattern': 'PSMULTEQ:{VALID_LIST_CI}'}, + 'dynamic_vol': {'read': True, 'write': True, 'read_cmd': 'DYNVOL ?', 'write_cmd': 'DYNVOL {VALUE}', 'item_type': 'bool', 'dev_datatype': 'num', 'reply_pattern': 'DYNVOL {LOOKUP}', 'lookup': 'DYNVOL'}, + 'speakersetup': {'read': True, 'write': True, 'cmd_settings': {'valid_list_ci': ['FL', 'HF']}, 'read_cmd': 'PSSP: ?', 'write_cmd': 'PSSP:{RAW_VALUE_UPPER}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'PSSP:{VALID_LIST_CI}'}, + 'hdmiaudioout': {'read': True, 'write': True, 'item_type': 'str', 'read_cmd': 'VSAUDIO ?', 'write_cmd': 'VSAUDIO {RAW_VALUE_UPPER}', 'dev_datatype': 'str', 'reply_pattern': 'VSAUDIO {VALID_LIST_CI}', 'cmd_settings': {'valid_list_ci': ['TV', 'AMP']}}, + 'dynamicrange': {'read': True, 'write': True, 'read_cmd': 'PSDRC ?', 'item_type': 'num', 'write_cmd': 'PSDRC {VALUE}', 'dev_datatype': 'str', 'reply_pattern': 'PSDRC {LOOKUP}', 'lookup': 'DYNAM'}, + 'dialogtoggle': {'read': True, 'write': True, 'read_cmd': 'PSDIL ?', 'write_cmd': 'PSDIL {VALUE}', 'item_type': 'bool', 'dev_datatype': 'onoff', 'reply_pattern': 'PSDIL (ON|OFF)'}, + 'dialog': {'read': True, 'write': True, 'read_cmd': 'PSDIL ?', 'item_type': 'num', 'cmd_settings': {'force_min': -12, 'force_max': 12}, 'write_cmd': 'PSDIL {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'PSDIL (\d{2})'}, + 'dialogup': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'PSDIL UP', 'dev_datatype': 'raw'}, + 'dialogdown': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'PSDIL DOWN', 'dev_datatype': 'raw'}, + 'dialogenhance': {'read': True, 'write': True, 'read_cmd': 'PSDEH ?', 'write_cmd': 'PSDEH {VALUE}', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': 'PSDEH {LOOKUP}', 'lookup': 'DIALOG'}, + 'subwoofertoggle': {'read': True, 'write': True, 'read_cmd': 'PSSWL ?', 'write_cmd': 'PSSWL {VALUE}', 'item_type': 'bool', 'dev_datatype': 'onoff', 'reply_pattern': 'PSSWL (ON|OFF)'}, + 'subwoofer': {'read': True, 'write': True, 'read_cmd': 'PSSWL ?', 'item_type': 'num', 'cmd_settings': {'force_min': -12, 'valid_max': 12}, 'write_cmd': 'PSSWL {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'PSSWL (\d{2})'}, + 'subwooferup': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'PSSWL UP', 'dev_datatype': 'raw'}, + 'subwooferdown': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'PSSWL DOWN', 'dev_datatype': 'raw'}, + 'lfe': {'read': True, 'write': True, 'read_cmd': 'PSLFE ?', 'item_type': 'num', 'cmd_settings': {'force_min': -10, 'valid_max': 3}, 'write_cmd': 'PSLFE {RAW_VALUE:02}', 'dev_datatype': 'int', 'reply_pattern': r'PSLFE (\d{2})'}, + 'lfeup': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'PSLFE UP', 'dev_datatype': 'raw'}, + 'lfedown': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'PSLFE DOWN', 'dev_datatype': 'raw'}, + 'digitalinput': {'read': True, 'write': True, 'cmd_settings': {'valid_list_ci': ['AUTO', 'PCM', 'DTS']}, 'read_cmd': 'DC?', 'write_cmd': 'DC{RAW_VALUE_UPPER}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'DC{VALID_LIST_CI}'}, + 'audioinput': {'read': True, 'write': True, 'cmd_settings': {'valid_list_ci': ['AUTO', 'HDMI', 'DIGITAL', 'ANALOG']}, 'read_cmd': 'SD?', 'write_cmd': 'SD{RAW_VALUE_UPPER}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'SD{VALID_LIST_CI}'} + } + }, + 'video': { + 'aspectratio': {'read': True, 'write': True, 'read_cmd': 'VSASP ?', 'write_cmd': 'VSASP{VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'VSASP{LOOKUP}', 'lookup': 'ASPECT'}, + 'hdmimonitor': {'read': True, 'write': True, 'cmd_settings': {'force_min': 0, 'force_max': 2}, 'read_cmd': 'VSMONI ?', 'write_cmd': 'VSMONI{VALUE}', 'item_type': 'num', 'dev_datatype': 'convertAuto', 'reply_pattern': 'VSMONI(AUTO|1|2)'}, + 'hdmiresolution': {'read': True, 'write': True, 'read_cmd': 'VSSCH ?', 'write_cmd': 'VSSCH{VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'VSSCH{LOOKUP}', 'lookup': 'RESOLUTION'}, + 'videoprocessingmode': {'read': True, 'write': True, 'item_type': 'str', 'read_cmd': 'VSVPM ?', 'write_cmd': 'VSVPM{VALUE}', 'dev_datatype': 'str', 'reply_pattern': 'VSVPM{LOOKUP}', 'lookup': 'VIDEOPROCESS'}, + 'videoresolution': {'read': True, 'write': True, 'read_cmd': 'VSSC ?', 'write_cmd': 'VSSC{VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'VSSC{LOOKUP}', 'lookup': 'RESOLUTION'}, + 'pictureenhancer': {'read': True, 'write': True, 'read_cmd': 'PVENH ?', 'item_type': 'num', 'cmd_settings': {'force_min': 0, 'force_max': 12}, 'write_cmd': 'PVENH {RAW_VALUE:02}', 'dev_datatype': 'int', 'reply_pattern': r'PVENH (\d{2})'}, + 'videoinput': {'read': True, 'write': True, 'cmd_settings': {'valid_list_ci': ['DVD', 'BD', 'TV', 'SAT/CBL', 'MPLAY', 'GAME' 'AUX1', 'AUX2', 'CD', 'ON', 'OFF']}, 'read_cmd': 'SV?', 'write_cmd': 'SV{RAW_VALUE_UPPER}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'SV{VALID_LIST_CI}'} + } + } + }, + 'zone2': { + 'control': { + 'power': {'read': True, 'write': True, 'read_cmd': 'Z2?', 'write_cmd': 'Z2{VALUE}', 'item_type': 'bool', 'dev_datatype': 'onoff', 'reply_pattern': 'Z2(ON|OFF)'}, + 'mute': {'read': True, 'write': True, 'read_cmd': 'Z2MU?', 'write_cmd': 'Z2MU{VALUE}', 'item_type': 'bool', 'dev_datatype': 'onoff', 'reply_pattern': 'Z2MU(ON|OFF)'}, + 'volume': {'read': True, 'write': True, 'read_cmd': 'Z2?', 'write_cmd': 'Z2{VALUE}', 'item_type': 'num', 'dev_datatype': 'DenonVol', 'reply_pattern': r'Z2(\d{2,3})', 'cmd_settings': {'force_min': 0.0, 'valid_max': 98.0}}, + 'volumeup': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'Z2UP', 'dev_datatype': 'raw'}, + 'volumedown': {'read': False, 'write': True, 'write_cmd': 'Z2DOWN', 'item_type': 'bool', 'dev_datatype': 'raw'}, + 'input': {'read': True, 'write': True, 'read_cmd': 'Z2?', 'write_cmd': 'Z2{VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'Z2{LOOKUP}', 'lookup': 'INPUT', 'item_attrs': {'item_template': 'input'}}, + 'sleep': {'read': True, 'write': True, 'item_type': 'num', 'read_cmd': 'Z2SLP?', 'write_cmd': 'Z2SLP{VALUE}', 'dev_datatype': 'convert0', 'reply_pattern': r'Z2SLP(\d{3}|OFF)', 'cmd_settings': {'force_min': 0, 'force_max': 120}}, + 'standby': {'read': True, 'write': True, 'item_type': 'num', 'read_cmd': 'Z2STBY?', 'write_cmd': 'Z2STBY{VALUE}', 'dev_datatype': 'DenonStandby', 'reply_pattern': r'Z2STBY(\dH|OFF)', 'cmd_settings': {'valid_list_ci': [0, 2, 4, 8]}}, + }, + 'settings': { + 'sound': { + 'channel_level': { + 'front_left': {'read': True, 'write': True, 'cmd_settings': {'force_min': -12, 'valid_max': 12}, 'read_cmd': 'Z2CV?', 'item_type': 'num', 'write_cmd': 'Z2CVFL {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'Z2CVFL (\d{2})'}, + 'front_right': {'read': True, 'write': True, 'cmd_settings': {'force_min': -12, 'valid_max': 12}, 'read_cmd': 'Z2CV?', 'item_type': 'num', 'write_cmd': 'Z2CVFR {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'Z2CVFR (\d{2})'} + }, + 'tone_control': { + 'treble': {'read': True, 'write': True, 'read_cmd': 'Z2PSTRE ?', 'item_type': 'num', 'cmd_settings': {'force_min': -10, 'force_max': 10}, 'write_cmd': 'Z2PSTRE {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'Z2PSTRE (\d{2})'}, + 'trebleup': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'Z2PSTRE UP', 'dev_datatype': 'raw'}, + 'trebledown': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'Z2PSTRE DOWN', 'dev_datatype': 'raw'}, + 'bass': {'read': True, 'write': True, 'read_cmd': 'Z2PSBAS ?', 'item_type': 'num', 'cmd_settings': {'force_min': -10, 'force_max': 10}, 'write_cmd': 'Z2PSBAS {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'Z2PSBAS (\d{2})'}, + 'bassup': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'Z2PSBAS UP', 'dev_datatype': 'raw'}, + 'bassdown': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'Z2PSBAS DOWN', 'dev_datatype': 'raw'} + }, + 'general': { + 'hdmiout': {'read': True, 'write': True, 'item_type': 'str', 'read_cmd': 'Z2HDA?', 'write_cmd': 'Z2HDA {RAW_VALUE_UPPER}', 'dev_datatype': 'str', 'reply_pattern': 'Z2HDA {VALID_LIST_CI}', 'cmd_settings': {'valid_list_ci': ['THR', 'PCM']}}, + 'HPF': {'read': True, 'write': True, 'read_cmd': 'Z2HPF?', 'write_cmd': 'Z2HPF{VALUE}', 'item_type': 'bool', 'dev_datatype': 'onoff', 'reply_pattern': 'Z2HPF(ON|OFF)'} + } + } + } + }, + 'zone3': { + 'control': { + 'power': {'read': True, 'write': True, 'read_cmd': 'Z3?', 'write_cmd': 'Z3{VALUE}', 'item_type': 'bool', 'dev_datatype': 'onoff', 'reply_pattern': 'Z3(ON|OFF)'}, + 'mute': {'read': True, 'write': True, 'read_cmd': 'Z3MU?', 'write_cmd': 'Z3MU{VALUE}', 'item_type': 'bool', 'dev_datatype': 'onoff', 'reply_pattern': 'Z3MU(ON|OFF)'}, + 'volume': {'read': True, 'write': True, 'read_cmd': 'Z3?', 'write_cmd': 'Z3{VALUE}', 'item_type': 'num', 'dev_datatype': 'DenonVol', 'reply_pattern': r'Z3(\d{2,3})', 'cmd_settings': {'force_min': 0.0, 'valid_max': 98.0}}, + 'volumeup': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'Z3UP', 'dev_datatype': 'raw'}, + 'volumedown': {'read': False, 'write': True, 'write_cmd': 'Z3DOWN', 'item_type': 'bool', 'dev_datatype': 'raw'}, + 'sleep': {'read': True, 'write': True, 'item_type': 'num', 'read_cmd': 'Z3SLP?', 'write_cmd': 'Z3SLP{VALUE}', 'dev_datatype': 'convert0', 'reply_pattern': r'Z3SLP(\d{3}|OFF)', 'cmd_settings': {'force_min': 0, 'valid_max': 120}}, + 'standby': {'read': True, 'write': True, 'item_type': 'num', 'read_cmd': 'Z3STBY?', 'write_cmd': 'Z3STBY{VALUE}', 'dev_datatype': 'DenonStandby', 'reply_pattern': r'Z3STBY(\dH|OFF)', 'cmd_settings': {'valid_list_ci': [0, 2, 4, 8]}}, + 'input': {'read': True, 'write': True, 'read_cmd': 'Z3?', 'write_cmd': 'Z3{RAW_VALUE_UPPER}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'Z3{LOOKUP}', 'lookup': 'INPUT3', 'item_attrs': {'item_template': 'input'}} + }, + 'settings': { + 'sound': { + 'channel_level': { + 'front_left': {'read': True, 'write': True, 'cmd_settings': {'force_min': -12, 'valid_max': 12}, 'read_cmd': 'Z3CV?', 'item_type': 'num', 'write_cmd': 'Z3CVFL {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'Z3CVFL (\d{2})'}, + 'front_right': {'read': True, 'write': True, 'cmd_settings': {'force_min': -12, 'valid_max': 12}, 'read_cmd': 'Z3CV?', 'item_type': 'num', 'write_cmd': 'Z3CVFR {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'Z3CVFR (\d{2})'} + }, + 'tone_control': { + 'treble': {'read': True, 'write': True, 'read_cmd': 'Z3PSTRE ?', 'item_type': 'num', 'cmd_settings': {'force_min': -10, 'force_max': 10}, 'write_cmd': 'Z3PSTRE {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'Z3PSTRE (\d{2})'}, + 'trebleup': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'Z3PSTRE UP', 'dev_datatype': 'raw'}, + 'trebledown': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'Z3PSTRE DOWN', 'dev_datatype': 'raw'}, + 'bass': {'read': True, 'write': True, 'read_cmd': 'Z3PSBAS ?', 'item_type': 'num', 'cmd_settings': {'force_min': -10, 'force_max': 10}, 'write_cmd': 'Z3PSBAS {VALUE}', 'dev_datatype': 'remap50to0', 'reply_pattern': r'Z3PSBAS (\d{2})'}, + 'bassup': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'Z3PSBAS UP', 'dev_datatype': 'raw'}, + 'bassdown': {'read': False, 'write': True, 'item_type': 'bool', 'write_cmd': 'Z3PSBAS DOWN', 'dev_datatype': 'raw'} + }, + 'general': { + 'HPF': {'read': True, 'write': True, 'read_cmd': 'Z3HPF?', 'write_cmd': 'Z3HPF{VALUE}', 'item_type': 'bool', 'dev_datatype': 'onoff', 'reply_pattern': 'Z3HPF(ON|OFF)'}, + } + } + } + } +} + +lookups = { + 'ALL': { + 'INPUTSIGNAL': { + '01': 'Analog', + '02': 'PCM', + '03': 'Dolby Digital', + '04': 'Dolby TrueHD', + '05': 'Dolby Atmos', + '06': 'DTS', + '07': '07', + '08': 'DTS-HD Hi Res', + '09': 'DTS-HD MSTR', + '10': '10', + '11': '11', + '12': 'Unknown', + '13': 'PCM Zero', + '14': '14', + '15': 'MP3', + '16': '16', + '17': 'AAC', + '18': 'FLAC', + }, + 'RESOLUTION': { + '48P': '480p/576p', + '10I': '1080i', + '72P': '720p', + '10P': '1080p', + '10P24': '1080p:24Hz', + '4K': '4K', + '4KF': '4K(60/50)', + 'AUTO': 'Auto' + }, + 'ASPECT': { + 'NRM': '4:3', + 'FUL': '16:9' + }, + 'POWER': { + 'ON': True, + 'STANDBY': False + }, + 'SOUNDMODE': { + 'MUS': 'MUSIC', + 'MOV': 'MOVIE', + 'GAM': 'GAME', + 'PUR': 'PURE DIRECT' + }, + 'DYNAM': { + 'OFF': 0, + 'LOW': 1, + 'MID': 2, + 'HI': 3, + 'AUTO': 4 + }, + 'DYNVOL': { + 'OFF': 0, + 'LIT': 1, + 'MED': 2, + 'HEV': 3 + }, + 'DIALOG': { + 'OFF': 0, + 'LOW': 1, + 'MED': 2, + 'HIGH': 3, + 'AUTO': 4 + }, + 'VIDEOPROCESS': { + 'MOVI': 'Movie', + 'BYP': 'Bypass', + 'GAME': 'Game', + 'AUTO': 'Auto' + }, + 'INPUT': { + 'SOURCE': 'SOURCE', + 'TUNER': 'TUNER', + 'DVD': 'DVD', + 'BD': 'BD', + 'TV': 'TV', + 'SAT/CBL': 'SAT/CBL', + 'MPLAY': 'MPLAY', + 'GAME': 'GAME', + 'HDRADIO': 'HDRADIO', + 'NET': 'NET', + 'AUX1': 'AUX1', + 'BT': 'BT' + }, + 'INPUT3': { + 'SOURCE': 'SOURCE', + 'TUNER': 'TUNER', + 'PHONO': 'PHONO', + 'CD': 'CD', + 'DVD': 'DVD', + 'BD': 'BD', + 'TV': 'TV', + 'SAT/CBL': 'SAT/CBL', + 'MPLAY': 'MPLAY', + 'GAME': 'GAME', + 'NET': 'NET', + 'AUX1': 'AUX1', + 'AUX2': 'AUX2', + 'BT': 'BT', + 'QUICK1': 'QUICK1', + 'QUICK2': 'QUICK2', + 'QUICK3': 'QUICK3', + 'QUICK4': 'QUICK4', + 'QUICK5': 'QUICK5', + 'QUICK1 MEMORY': 'QUICK1 MEMORY', + 'QUICK2 MEMORY': 'QUICK2 MEMORY', + 'QUICK3 MEMORY': 'QUICK3 MEMORY', + 'QUICK4 MEMORY': 'QUICK4 MEMORY', + 'QUICK5 MEMORY': 'QUICK5 MEMORY' + } + }, + 'AVR-X6300H': { + 'INPUT': { + 'PHONO': 'PHONO', + 'CD': 'CD', + 'AUX2': 'AUX2' + } + }, + 'AVR-X4300H': { + 'INPUT': { + 'PHONO': 'PHONO', + 'CD': 'CD', + 'AUX2': 'AUX2' + } + }, + 'AVR-X3300W': { + 'INPUT': { + 'CD': 'CD', + 'AUX2': 'AUX2', + 'IRADIO': 'IRADIO', + 'SERVER': 'SERVER', + 'FAVORITES': 'FAVORITES', + 'USB/IPOD': 'USB/IPOD', + 'USB': 'USB', + 'IPD': 'IPD', + 'IRP': 'IRP', + 'FVP': 'FVP' + } + }, + 'AVR-X2300W': { + 'INPUT': { + 'CD': 'CD', + 'AUX2': 'AUX2', + 'IRADIO': 'IRADIO', + 'SERVER': 'SERVER', + 'FAVORITES': 'FAVORITES', + 'USB/IPOD': 'USB/IPOD', + 'USB': 'USB', + 'IPD': 'IPD', + 'IRP': 'IRP', + 'FVP': 'FVP' + } + }, + 'AVR-X1300W': { + 'INPUT': { + 'IRADIO': 'IRADIO', + 'SERVER': 'SERVER', + 'FAVORITES': 'FAVORITES', + 'USB/IPOD': 'USB/IPOD', + 'USB': 'USB', + 'IPD': 'IPD', + 'IRP': 'IRP', + 'FVP': 'FVP' + } + } +} + +item_templates = { + 'custom_inputnames': { + 'cache': True, + 'reverse': { + 'type': 'dict', + 'eval': '{} if sh...() == {} else {v: k for (k, v) in sh...().items()}', + 'update': { + 'type': 'bool', + 'eval': 'sh...timer(2, {})', + 'eval_trigger': '...' + } + } + }, + 'input': { + 'on_change': [".custom_name = '' if sh.....general.custom_inputnames() == {} else sh.....general.custom_inputnames()[value]",], + 'custom_name': { + 'type': 'str', + 'on_change': ".. = '' if sh......general.custom_inputnames.reverse() == {} else sh......general.custom_inputnames.reverse()[value]" + } + } +} diff --git a/denon/datatypes.py b/denon/datatypes.py new file mode 100755 index 000000000..11dbe3490 --- /dev/null +++ b/denon/datatypes.py @@ -0,0 +1,93 @@ +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab + +import lib.model.sdp.datatypes as DT + + +# read only. Depending on a status field, the result is sliced. +class DT_DenonDisplay(DT.Datatype): + def get_shng_data(self, data, type=None, **kwargs): + infotype = data[3:4] + if infotype.isdigit(): + if infotype == 0: + data = data[4:] + elif infotype == 1: + data = data[5:] + else: + data = data[6:] + return data + + return None + + +# handle pseudo-decimal values without decimal point +class DT_DenonVol(DT.Datatype): + def get_send_data(self, data, **kwargs): + if int(data) == data: + # "real" integer + return f'{int(data):02}' + else: + # float with fractional value + return f'{int(data):02}5' + + def get_shng_data(self, data, type=None, **kwargs): + if len(data) == 3: + return int(data) / 10 + else: + return data + + +class DT_DenonStandby(DT.Datatype): + def get_send_data(self, data, **kwargs): + return 'OFF' if data == 0 else f"{data:01}H" + + def get_shng_data(self, data, type=None, **kwargs): + return 0 if data == 'OFF' else data.split('H')[0] + + +class DT_DenonStandby1(DT.Datatype): + def get_send_data(self, data, **kwargs): + return 'OFF' if data == 0 else f"{data:02}M" + + def get_shng_data(self, data, type=None, **kwargs): + return 0 if data == 'OFF' else data.split('M')[0] + + +class DT_onoff(DT.Datatype): + def get_send_data(self, data, **kwargs): + return 'ON' if data else 'OFF' + + def get_shng_data(self, data, type=None, **kwargs): + return False if data == 'OFF' else True + + +class DT_convert0(DT.Datatype): + def get_send_data(self, data, **kwargs): + return 'OFF' if data == 0 else f"{data:03}" + + def get_shng_data(self, data, type=None, **kwargs): + return 0 if data in ['OFF', 'NON'] else data + + +class DT_convertAuto(DT.Datatype): + def get_send_data(self, data, **kwargs): + return 'AUTO' if data == 0 else data + + def get_shng_data(self, data, type=None, **kwargs): + return 0 if data == 'AUTO' else data + + +class DT_remap50to0(DT.Datatype): + def get_send_data(self, data, **kwargs): + if int(data) == data: + # "real" integer + return f'{(int(data)+50):02}' + else: + # float with fractional value + return f'{(int(data)+50):02}5' + + def get_shng_data(self, data, type=None, **kwargs): + if len(data) == 3: + return int(data) / 10 - 50 + else: + return int(data) - 50 diff --git a/denon/plugin.yaml b/denon/plugin.yaml new file mode 100755 index 000000000..5f3cf4f3b --- /dev/null +++ b/denon/plugin.yaml @@ -0,0 +1,8033 @@ + +plugin: + type: interface + description: Denon AV-Receiver + maintainer: OnkelAndy + tester: Morg + state: develop + keywords: iot device + version: 1.0.0 + sh_minversion: 1.9.5.1 + py_minversion: 3.6 + multi_instance: false + restartable: true + classname: denon + +parameters: + + model: + type: str + mandatory: false + valid_list: + - '' + - AVR-X6300H + - AVR-X4300H + - AVR-X3300W + - AVR-X2300W + - AVR-X1300W + + description: + de: Modellauswahl + en: model selection + + timeout: + type: num + default: 3 + + description: + de: Timeout für Geräteantwort + en: timeout for device replies + + terminator: + type: str + default: "\r" + + description: + de: Zeilen-/Antwortbegrenzer + en: line or reply terminator + + binary: + type: bool + default: false + + description: + de: Binärer Übertragungsmodus + en: binary communication mode + + autoreconnect: + type: bool + default: true + + description: + de: Automatisches Neuverbinden bei Abbruch + en: automatic reconnect on disconnect + + autoconnect: + type: bool + default: true + + description: + de: Automatisches Verbinden bei Senden + en: automatic connect on send + + connect_retries: + type: num + default: 5 + + description: + de: Anzahl Verbindungsversuche + en: number of connect retries + + connect_cycle: + type: num + default: 3 + + description: + de: Pause zwischen Verbindungsversuchen + en: wait time between connect retries + + host: + type: str + mandatory: false + + description: + de: Netzwerkziel/-host + en: network host + + port: + type: int + default: 23 + + description: + de: Port für Netzwerkverbindung + en: network port + + serialport: + type: str + mandatory: false + + description: + de: Serieller Anschluss (z.B. /dev/ttyUSB0 oder COM1) + en: serial port (e.g. /dev/ttyUSB0 or COM1) + + conn_type: + type: str + mandatory: false + valid_list: + - '' + - net_tcp_request + - net_tcp_client + - net_tcp_jsonrpc + - net_udp_server + - serial + - serial_async + + description: + de: Verbindungstyp + en: connection type + + command_class: + type: str + default: SDPCommandParseStr + valid_list: + - SDPCommand + - SDPCommandStr + - SDPCommandParseStr + - SDPCommandJSON + - SDPCommandViessmann + + description: + de: Klasse für Verarbeitung von Kommandos + en: class for command processing + +item_attributes: + + denon_command: + type: str + + description: + de: Legt das angegebene Kommando für das Item fest + en: Assigns the given command to the item + + denon_read: + type: bool + + description: + de: Item liest/erhält Werte vom Gerät + en: Item reads/receives data from the device + + denon_read_group: + type: list(str) + + description: + de: Weist das Item der angegebenen Gruppe zum gesammelten Lesen zu. Mehrere Gruppen können als Liste angegeben werden. + en: Assigns the item to the given group for collective reading. Multiple groups can be provided as a list. + + denon_read_cycle: + type: num + + description: + de: Konfiguriert ein Intervall in Sekunden für regelmäßiges Lesen + en: Configures a interval in seconds for cyclic read actions + + denon_read_initial: + type: bool + + description: + de: Legt fest, dass der Wert beim Start vom Gerät gelesen wird + en: Sets item value to be read from the device on startup + + denon_write: + type: bool + + description: + de: Änderung des Items werden an das Gerät gesendet + en: Changes to this item will be sent to the device + + denon_read_group_trigger: + type: str + + description: + de: Wenn diesem Item ein beliebiger Wert zugewiesen wird, werden alle zum Lesen konfigurierten Items der angegebenen Gruppe neu vom Gerät gelesen, bei Gruppe 0 werden alle zum Lesen konfigurierten Items neu gelesen. Das Item kann nicht gleichzeitig mit denon_command belegt werden. + en: When set to any value, all items configured for reading for the given group will update their value from the device, if group is 0, all items configured for reading will update. The item cannot be used with denon_command in parallel. + + denon_lookup: + type: str + + description: + de: Der Inhalt der Lookup-Tabelle mit dem angegebenen Namen wird beim Start einmalig als dict oder list in das Item geschrieben. + en: The lookup table with the given name will be assigned to the item in dict or list format once on startup. + + description_long: + de: "Der Inhalt der Lookup-Tabelle mit dem angegebenen Namen wird beim\nStart einmalig als dict oder list in das Item geschrieben.\n\n\nDurch Anhängen von \"#\" an den Namen der Tabelle kann die Art\nder Tabelle ausgewählt werden:\n- fwd liefert die Tabelle Gerät -> SmartHomeNG (Standard)\n- rev liefert die Tabelle SmartHomeNG -> Gerät\n- rci liefert die Tabelle SmarthomeNG -> Gerät in Kleinbuchstaben\n- list liefert die Liste der Namen für SmartHomeNG (z.B. für Auswahllisten in der Visu)" + en: "The lookup table with the given name will be assigned to the item\nin dict or list format once on startup.\n\n\nBy appending \"#\" to the tables name the type of table can\nbe selected:\n- fwd returns the table device -> SmartHomeNG (default)\n- rev returns the table SmartHomeNG -> device\n- rci returns the table SmartHomeNG -> device in lower case\n- list return the list of names for SmartHomeNG (e.g. for selection dropdowns in visu applications)" + +item_structs: + + info: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: info + + fullmodel: + type: str + denon_command: info.fullmodel + denon_read: true + denon_write: false + denon_read_group: + - info + denon_read_initial: true + + model: + type: str + denon_command: info.model + denon_read: true + denon_write: false + denon_read_group: + - info + denon_read_initial: true + + serialnumber: + type: num + denon_command: info.serialnumber + denon_read: true + denon_write: false + denon_read_group: + - info + + main: + type: str + denon_command: info.main + denon_read: true + denon_write: false + denon_read_group: + - info + + mainfbl: + type: num + denon_command: info.mainfbl + denon_read: true + denon_write: false + denon_read_group: + - info + + dsp1: + type: num + denon_command: info.dsp1 + denon_read: true + denon_write: false + denon_read_group: + - info + + dsp2: + type: num + denon_command: info.dsp2 + denon_read: true + denon_write: false + denon_read_group: + - info + + dsp3: + type: num + denon_command: info.dsp3 + denon_read: true + denon_write: false + denon_read_group: + - info + + dsp4: + type: num + denon_command: info.dsp4 + denon_read: true + denon_write: false + denon_read_group: + - info + + apld: + type: num + denon_command: info.apld + denon_read: true + denon_write: false + denon_read_group: + - info + + vpld: + type: num + denon_command: info.vpld + denon_read: true + denon_write: false + denon_read_group: + - info + + guidat: + type: num + denon_command: info.guidat + denon_read: true + denon_write: false + denon_read_group: + - info + + heosversion: + type: str + denon_command: info.heosversion + denon_read: true + denon_write: false + denon_read_group: + - info + + heosbuild: + type: num + denon_command: info.heosbuild + denon_read: true + denon_write: false + denon_read_group: + - info + + heosmod: + type: num + denon_command: info.heosmod + denon_read: true + denon_write: false + denon_read_group: + - info + + heoscnf: + type: str + denon_command: info.heoscnf + denon_read: true + denon_write: false + denon_read_group: + - info + + heoslanguage: + type: str + denon_command: info.heoslanguage + denon_read: true + denon_write: false + denon_read_group: + - info + + mac: + type: str + denon_command: info.mac + denon_read: true + denon_write: false + denon_read_group: + - info + + wifimac: + type: str + denon_command: info.wifimac + denon_read: true + denon_write: false + denon_read_group: + - info + + btmac: + type: str + denon_command: info.btmac + denon_read: true + denon_write: false + denon_read_group: + - info + + audyif: + type: num + denon_command: info.audyif + denon_read: true + denon_write: false + denon_read_group: + - info + + productid: + type: num + denon_command: info.productid + denon_read: true + denon_write: false + denon_read_group: + - info + + packageid: + type: num + denon_command: info.packageid + denon_read: true + denon_write: false + denon_read_group: + - info + + cmp: + type: str + denon_command: info.cmp + denon_read: true + denon_write: false + denon_read_group: + - info + + region: + type: str + denon_command: info.region + denon_read: true + denon_write: false + denon_read_group: + - info + denon_read_initial: true + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: general + + custom_inputnames: + type: dict + denon_command: general.custom_inputnames + denon_read: true + denon_write: false + denon_read_group: + - general + cache: true + + reverse: + type: dict + eval: '{} if sh...() == {} else {v: k for (k, v) in sh...().items()}' + + update: + type: bool + eval: sh...timer(2, {}) + eval_trigger: '...' + + power: + type: bool + denon_command: general.power + denon_read: true + denon_write: true + denon_read_group: + - general + + setupmenu: + type: bool + denon_command: general.setupmenu + denon_read: true + denon_write: true + denon_read_group: + - general + + display: + type: str + denon_command: general.display + denon_read: true + denon_write: false + denon_read_group: + - general + + soundmode: + type: str + denon_command: general.soundmode + denon_read: true + denon_write: false + denon_read_group: + - general + denon_read_initial: true + + allzonestereo: + type: bool + denon_command: general.allzonestereo + denon_read: true + denon_write: false + denon_read_group: + - general + denon_read_initial: true + + inputsignal: + type: str + denon_command: general.inputsignal + denon_read: true + denon_write: false + denon_read_group: + - general + denon_read_initial: true + + inputrate: + type: num + denon_command: general.inputrate + denon_read: true + denon_write: false + denon_read_group: + - general + denon_read_initial: true + + inputformat: + type: str + denon_command: general.inputformat + denon_read: true + denon_write: false + denon_read_group: + - general + denon_read_initial: true + + inputresolution: + type: str + denon_command: general.inputresolution + denon_read: true + denon_write: false + denon_read_group: + - general + denon_read_initial: true + + outputresolution: + type: str + denon_command: general.outputresolution + denon_read: true + denon_write: false + + ecomode: + type: str + denon_command: general.ecomode + denon_read: true + denon_write: true + denon_read_group: + - general + + tuner: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: tuner + + title: + type: str + denon_command: tuner.title + denon_read: true + denon_write: false + denon_read_group: + - tuner + denon_read_initial: true + + album: + type: str + denon_command: tuner.album + denon_read: true + denon_write: false + + artist: + type: str + denon_command: tuner.artist + denon_read: true + denon_write: false + + preset: + type: num + denon_command: tuner.preset + denon_read: true + denon_write: true + denon_read_group: + - tuner + denon_read_initial: true + + presetup: + type: bool + denon_command: tuner.presetup + denon_read: false + denon_write: true + + presetdown: + type: bool + denon_command: tuner.presetdown + denon_read: false + denon_write: true + + frequency: + type: num + denon_command: tuner.frequency + denon_read: true + denon_write: true + denon_read_group: + - tuner + denon_read_initial: true + + frequencyup: + type: bool + denon_command: tuner.frequencyup + denon_read: false + denon_write: true + + frequencydown: + type: bool + denon_command: tuner.frequencydown + denon_read: false + denon_write: true + + band: + type: str + denon_command: tuner.band + denon_read: true + denon_write: true + denon_read_group: + - tuner + denon_read_initial: true + + hd: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: tuner.hd + + channel: + type: num + denon_command: tuner.hd.channel + denon_read: true + denon_write: true + denon_read_group: + - tuner + - tuner.hd + denon_read_initial: true + + channelup: + type: bool + denon_command: tuner.hd.channelup + denon_read: false + denon_write: true + + channeldown: + type: bool + denon_command: tuner.hd.channeldown + denon_read: false + denon_write: true + + multicastchannel: + type: num + denon_command: tuner.hd.multicastchannel + denon_read: true + denon_write: true + denon_read_group: + - tuner + - tuner.hd + + presetmemory: + type: num + denon_command: tuner.hd.presetmemory + denon_read: true + denon_write: true + + preset: + type: num + denon_command: tuner.hd.preset + denon_read: true + denon_write: true + denon_read_group: + - tuner + - tuner.hd + + presetup: + type: bool + denon_command: tuner.hd.presetup + denon_read: false + denon_write: true + + presetdown: + type: bool + denon_command: tuner.hd.presetdown + denon_read: false + denon_write: true + + band: + type: str + denon_command: tuner.hd.band + denon_read: true + denon_write: true + denon_read_group: + - tuner + - tuner.hd + denon_read_initial: true + + zone1: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: zone1 + + control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: zone1.control + + power: + type: bool + denon_command: zone1.control.power + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.control + denon_read_initial: true + + mute: + type: bool + denon_command: zone1.control.mute + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.control + denon_read_initial: true + + volume: + type: num + denon_command: zone1.control.volume + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.control + denon_read_initial: true + + volumeup: + type: bool + denon_command: zone1.control.volumeup + denon_read: false + denon_write: true + + volumedown: + type: bool + denon_command: zone1.control.volumedown + denon_read: false + denon_write: true + + volumemax: + type: num + denon_command: zone1.control.volumemax + denon_read: true + denon_write: false + denon_read_group: + - zone1 + - zone1.control + denon_read_initial: true + + input: + type: str + denon_command: zone1.control.input + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.control + denon_read_initial: true + on_change: + - .custom_name = '' if sh.....general.custom_inputnames() == {} else sh.....general.custom_inputnames()[value] + + custom_name: + type: str + on_change: .. = '' if sh......general.custom_inputnames.reverse() == {} else sh......general.custom_inputnames.reverse()[value] + + listeningmode: + type: str + denon_command: zone1.control.listeningmode + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.control + denon_read_initial: true + + sleep: + type: num + denon_command: zone1.control.sleep + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.control + denon_read_initial: true + + standby: + type: num + denon_command: zone1.control.standby + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.control + denon_read_initial: true + + settings: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: zone1.settings + + sound: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: zone1.settings.sound + + channel_level: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: zone1.settings.sound.channel_level + + front_left: + type: num + denon_command: zone1.settings.sound.channel_level.front_left + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.channel_level + + front_right: + type: num + denon_command: zone1.settings.sound.channel_level.front_right + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.channel_level + + front_height_left: + type: num + denon_command: zone1.settings.sound.channel_level.front_height_left + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.channel_level + + front_height_right: + type: num + denon_command: zone1.settings.sound.channel_level.front_height_right + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.channel_level + + front_center: + type: num + denon_command: zone1.settings.sound.channel_level.front_center + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.channel_level + + surround_left: + type: num + denon_command: zone1.settings.sound.channel_level.surround_left + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.channel_level + + surround_right: + type: num + denon_command: zone1.settings.sound.channel_level.surround_right + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.channel_level + + surroundback_left: + type: num + denon_command: zone1.settings.sound.channel_level.surroundback_left + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.channel_level + + surroundback_right: + type: num + denon_command: zone1.settings.sound.channel_level.surroundback_right + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.channel_level + + rear_height_left: + type: num + denon_command: zone1.settings.sound.channel_level.rear_height_left + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.channel_level + + rear_height_right: + type: num + denon_command: zone1.settings.sound.channel_level.rear_height_right + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.channel_level + + subwoofer: + type: num + denon_command: zone1.settings.sound.channel_level.subwoofer + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.channel_level + + subwoofer2: + type: num + denon_command: zone1.settings.sound.channel_level.subwoofer2 + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.channel_level + + tone_control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: zone1.settings.sound.tone_control + + tone: + type: bool + denon_command: zone1.settings.sound.tone_control.tone + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.tone_control + + treble: + type: num + denon_command: zone1.settings.sound.tone_control.treble + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.tone_control + + trebleup: + type: bool + denon_command: zone1.settings.sound.tone_control.trebleup + denon_read: false + denon_write: true + + trebledown: + type: bool + denon_command: zone1.settings.sound.tone_control.trebledown + denon_read: false + denon_write: true + + bass: + type: num + denon_command: zone1.settings.sound.tone_control.bass + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.tone_control + + bassup: + type: bool + denon_command: zone1.settings.sound.tone_control.bassup + denon_read: false + denon_write: true + + bassdown: + type: bool + denon_command: zone1.settings.sound.tone_control.bassdown + denon_read: false + denon_write: true + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: zone1.settings.sound.general + + cinema_eq: + type: bool + denon_command: zone1.settings.sound.general.cinema_eq + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.general + + dynamic_eq: + type: bool + denon_command: zone1.settings.sound.general.dynamic_eq + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.general + + multeq: + type: bool + denon_command: zone1.settings.sound.general.multeq + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.general + + dynamic_vol: + type: bool + denon_command: zone1.settings.sound.general.dynamic_vol + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.general + + speakersetup: + type: str + denon_command: zone1.settings.sound.general.speakersetup + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.general + + hdmiaudioout: + type: str + denon_command: zone1.settings.sound.general.hdmiaudioout + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.general + + dynamicrange: + type: num + denon_command: zone1.settings.sound.general.dynamicrange + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.general + + dialogtoggle: + type: bool + denon_command: zone1.settings.sound.general.dialogtoggle + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.general + + dialog: + type: num + denon_command: zone1.settings.sound.general.dialog + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.general + + dialogup: + type: bool + denon_command: zone1.settings.sound.general.dialogup + denon_read: false + denon_write: true + + dialogdown: + type: bool + denon_command: zone1.settings.sound.general.dialogdown + denon_read: false + denon_write: true + + dialogenhance: + type: num + denon_command: zone1.settings.sound.general.dialogenhance + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.general + + subwoofertoggle: + type: bool + denon_command: zone1.settings.sound.general.subwoofertoggle + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.general + + subwoofer: + type: num + denon_command: zone1.settings.sound.general.subwoofer + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.general + + subwooferup: + type: bool + denon_command: zone1.settings.sound.general.subwooferup + denon_read: false + denon_write: true + + subwooferdown: + type: bool + denon_command: zone1.settings.sound.general.subwooferdown + denon_read: false + denon_write: true + + lfe: + type: num + denon_command: zone1.settings.sound.general.lfe + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.general + + lfeup: + type: bool + denon_command: zone1.settings.sound.general.lfeup + denon_read: false + denon_write: true + + lfedown: + type: bool + denon_command: zone1.settings.sound.general.lfedown + denon_read: false + denon_write: true + + digitalinput: + type: str + denon_command: zone1.settings.sound.general.digitalinput + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.general + + audioinput: + type: str + denon_command: zone1.settings.sound.general.audioinput + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.sound + - zone1.settings.sound.general + + video: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: zone1.settings.video + + aspectratio: + type: str + denon_command: zone1.settings.video.aspectratio + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.video + + hdmimonitor: + type: num + denon_command: zone1.settings.video.hdmimonitor + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.video + + hdmiresolution: + type: str + denon_command: zone1.settings.video.hdmiresolution + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.video + + videoprocessingmode: + type: str + denon_command: zone1.settings.video.videoprocessingmode + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.video + + videoresolution: + type: str + denon_command: zone1.settings.video.videoresolution + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.video + + pictureenhancer: + type: num + denon_command: zone1.settings.video.pictureenhancer + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.video + + videoinput: + type: str + denon_command: zone1.settings.video.videoinput + denon_read: true + denon_write: true + denon_read_group: + - zone1 + - zone1.settings + - zone1.settings.video + + zone2: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: zone2 + + control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: zone2.control + + power: + type: bool + denon_command: zone2.control.power + denon_read: true + denon_write: true + denon_read_group: + - zone2 + - zone2.control + + mute: + type: bool + denon_command: zone2.control.mute + denon_read: true + denon_write: true + denon_read_group: + - zone2 + - zone2.control + + volume: + type: num + denon_command: zone2.control.volume + denon_read: true + denon_write: true + denon_read_group: + - zone2 + - zone2.control + + volumeup: + type: bool + denon_command: zone2.control.volumeup + denon_read: false + denon_write: true + + volumedown: + type: bool + denon_command: zone2.control.volumedown + denon_read: false + denon_write: true + + input: + type: str + denon_command: zone2.control.input + denon_read: true + denon_write: true + denon_read_group: + - zone2 + - zone2.control + on_change: + - .custom_name = '' if sh.....general.custom_inputnames() == {} else sh.....general.custom_inputnames()[value] + + custom_name: + type: str + on_change: .. = '' if sh......general.custom_inputnames.reverse() == {} else sh......general.custom_inputnames.reverse()[value] + + sleep: + type: num + denon_command: zone2.control.sleep + denon_read: true + denon_write: true + denon_read_group: + - zone2 + - zone2.control + + standby: + type: num + denon_command: zone2.control.standby + denon_read: true + denon_write: true + denon_read_group: + - zone2 + - zone2.control + + settings: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: zone2.settings + + sound: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: zone2.settings.sound + + channel_level: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: zone2.settings.sound.channel_level + + front_left: + type: num + denon_command: zone2.settings.sound.channel_level.front_left + denon_read: true + denon_write: true + denon_read_group: + - zone2 + - zone2.settings + - zone2.settings.sound + - zone2.settings.sound.channel_level + + front_right: + type: num + denon_command: zone2.settings.sound.channel_level.front_right + denon_read: true + denon_write: true + denon_read_group: + - zone2 + - zone2.settings + - zone2.settings.sound + - zone2.settings.sound.channel_level + + tone_control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: zone2.settings.sound.tone_control + + treble: + type: num + denon_command: zone2.settings.sound.tone_control.treble + denon_read: true + denon_write: true + denon_read_group: + - zone2 + - zone2.settings + - zone2.settings.sound + - zone2.settings.sound.tone_control + + trebleup: + type: bool + denon_command: zone2.settings.sound.tone_control.trebleup + denon_read: false + denon_write: true + + trebledown: + type: bool + denon_command: zone2.settings.sound.tone_control.trebledown + denon_read: false + denon_write: true + + bass: + type: num + denon_command: zone2.settings.sound.tone_control.bass + denon_read: true + denon_write: true + denon_read_group: + - zone2 + - zone2.settings + - zone2.settings.sound + - zone2.settings.sound.tone_control + + bassup: + type: bool + denon_command: zone2.settings.sound.tone_control.bassup + denon_read: false + denon_write: true + + bassdown: + type: bool + denon_command: zone2.settings.sound.tone_control.bassdown + denon_read: false + denon_write: true + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: zone2.settings.sound.general + + hdmiout: + type: str + denon_command: zone2.settings.sound.general.hdmiout + denon_read: true + denon_write: true + denon_read_group: + - zone2 + - zone2.settings + - zone2.settings.sound + - zone2.settings.sound.general + + HPF: + type: bool + denon_command: zone2.settings.sound.general.HPF + denon_read: true + denon_write: true + denon_read_group: + - zone2 + - zone2.settings + - zone2.settings.sound + - zone2.settings.sound.general + + zone3: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: zone3 + + control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: zone3.control + + power: + type: bool + denon_command: zone3.control.power + denon_read: true + denon_write: true + denon_read_group: + - zone3 + - zone3.control + + mute: + type: bool + denon_command: zone3.control.mute + denon_read: true + denon_write: true + denon_read_group: + - zone3 + - zone3.control + + volume: + type: num + denon_command: zone3.control.volume + denon_read: true + denon_write: true + denon_read_group: + - zone3 + - zone3.control + + volumeup: + type: bool + denon_command: zone3.control.volumeup + denon_read: false + denon_write: true + + volumedown: + type: bool + denon_command: zone3.control.volumedown + denon_read: false + denon_write: true + + sleep: + type: num + denon_command: zone3.control.sleep + denon_read: true + denon_write: true + denon_read_group: + - zone3 + - zone3.control + + standby: + type: num + denon_command: zone3.control.standby + denon_read: true + denon_write: true + denon_read_group: + - zone3 + - zone3.control + + input: + type: str + denon_command: zone3.control.input + denon_read: true + denon_write: true + denon_read_group: + - zone3 + - zone3.control + on_change: + - .custom_name = '' if sh.....general.custom_inputnames() == {} else sh.....general.custom_inputnames()[value] + + custom_name: + type: str + on_change: .. = '' if sh......general.custom_inputnames.reverse() == {} else sh......general.custom_inputnames.reverse()[value] + + settings: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: zone3.settings + + sound: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: zone3.settings.sound + + channel_level: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: zone3.settings.sound.channel_level + + front_left: + type: num + denon_command: zone3.settings.sound.channel_level.front_left + denon_read: true + denon_write: true + denon_read_group: + - zone3 + - zone3.settings + - zone3.settings.sound + - zone3.settings.sound.channel_level + + front_right: + type: num + denon_command: zone3.settings.sound.channel_level.front_right + denon_read: true + denon_write: true + denon_read_group: + - zone3 + - zone3.settings + - zone3.settings.sound + - zone3.settings.sound.channel_level + + tone_control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: zone3.settings.sound.tone_control + + treble: + type: num + denon_command: zone3.settings.sound.tone_control.treble + denon_read: true + denon_write: true + denon_read_group: + - zone3 + - zone3.settings + - zone3.settings.sound + - zone3.settings.sound.tone_control + + trebleup: + type: bool + denon_command: zone3.settings.sound.tone_control.trebleup + denon_read: false + denon_write: true + + trebledown: + type: bool + denon_command: zone3.settings.sound.tone_control.trebledown + denon_read: false + denon_write: true + + bass: + type: num + denon_command: zone3.settings.sound.tone_control.bass + denon_read: true + denon_write: true + denon_read_group: + - zone3 + - zone3.settings + - zone3.settings.sound + - zone3.settings.sound.tone_control + + bassup: + type: bool + denon_command: zone3.settings.sound.tone_control.bassup + denon_read: false + denon_write: true + + bassdown: + type: bool + denon_command: zone3.settings.sound.tone_control.bassdown + denon_read: false + denon_write: true + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: zone3.settings.sound.general + + HPF: + type: bool + denon_command: zone3.settings.sound.general.HPF + denon_read: true + denon_write: true + denon_read_group: + - zone3 + - zone3.settings + - zone3.settings.sound + - zone3.settings.sound.general + + ALL: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: ALL + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: ALL.general + + custom_inputnames: + type: dict + denon_command: general.custom_inputnames + denon_read: true + denon_write: false + denon_read_group: + - ALL + - ALL.general + cache: true + + reverse: + type: dict + eval: '{} if sh...() == {} else {v: k for (k, v) in sh...().items()}' + + update: + type: bool + eval: sh...timer(2, {}) + eval_trigger: '...' + + power: + type: bool + denon_command: general.power + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.general + + setupmenu: + type: bool + denon_command: general.setupmenu + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.general + + soundmode: + type: str + denon_command: general.soundmode + denon_read: true + denon_write: false + denon_read_group: + - ALL + - ALL.general + denon_read_initial: true + + inputsignal: + type: str + denon_command: general.inputsignal + denon_read: true + denon_write: false + denon_read_group: + - ALL + - ALL.general + denon_read_initial: true + + inputrate: + type: num + denon_command: general.inputrate + denon_read: true + denon_write: false + denon_read_group: + - ALL + - ALL.general + denon_read_initial: true + + inputformat: + type: str + denon_command: general.inputformat + denon_read: true + denon_write: false + denon_read_group: + - ALL + - ALL.general + denon_read_initial: true + + inputresolution: + type: str + denon_command: general.inputresolution + denon_read: true + denon_write: false + denon_read_group: + - ALL + - ALL.general + denon_read_initial: true + + outputresolution: + type: str + denon_command: general.outputresolution + denon_read: true + denon_write: false + + ecomode: + type: str + denon_command: general.ecomode + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.general + + tuner: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: ALL.tuner + + preset: + type: num + denon_command: tuner.preset + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.tuner + denon_read_initial: true + + presetup: + type: bool + denon_command: tuner.presetup + denon_read: false + denon_write: true + + presetdown: + type: bool + denon_command: tuner.presetdown + denon_read: false + denon_write: true + + frequency: + type: num + denon_command: tuner.frequency + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.tuner + denon_read_initial: true + + frequencyup: + type: bool + denon_command: tuner.frequencyup + denon_read: false + denon_write: true + + frequencydown: + type: bool + denon_command: tuner.frequencydown + denon_read: false + denon_write: true + + band: + type: str + denon_command: tuner.band + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.tuner + denon_read_initial: true + + zone1: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: ALL.zone1 + + control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: ALL.zone1.control + + power: + type: bool + denon_command: zone1.control.power + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.control + denon_read_initial: true + + mute: + type: bool + denon_command: zone1.control.mute + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.control + denon_read_initial: true + + volume: + type: num + denon_command: zone1.control.volume + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.control + denon_read_initial: true + + volumeup: + type: bool + denon_command: zone1.control.volumeup + denon_read: false + denon_write: true + + volumedown: + type: bool + denon_command: zone1.control.volumedown + denon_read: false + denon_write: true + + volumemax: + type: num + denon_command: zone1.control.volumemax + denon_read: true + denon_write: false + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.control + denon_read_initial: true + + input: + type: str + denon_command: zone1.control.input + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.control + denon_read_initial: true + on_change: + - .custom_name = '' if sh.....general.custom_inputnames() == {} else sh.....general.custom_inputnames()[value] + + custom_name: + type: str + on_change: .. = '' if sh......general.custom_inputnames.reverse() == {} else sh......general.custom_inputnames.reverse()[value] + + listeningmode: + type: str + denon_command: zone1.control.listeningmode + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.control + denon_read_initial: true + + sleep: + type: num + denon_command: zone1.control.sleep + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.control + denon_read_initial: true + + standby: + type: num + denon_command: zone1.control.standby + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.control + denon_read_initial: true + + settings: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: ALL.zone1.settings + + sound: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: ALL.zone1.settings.sound + + channel_level: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: ALL.zone1.settings.sound.channel_level + + front_left: + type: num + denon_command: zone1.settings.sound.channel_level.front_left + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + - ALL.zone1.settings.sound + - ALL.zone1.settings.sound.channel_level + + front_right: + type: num + denon_command: zone1.settings.sound.channel_level.front_right + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + - ALL.zone1.settings.sound + - ALL.zone1.settings.sound.channel_level + + front_height_left: + type: num + denon_command: zone1.settings.sound.channel_level.front_height_left + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + - ALL.zone1.settings.sound + - ALL.zone1.settings.sound.channel_level + + front_height_right: + type: num + denon_command: zone1.settings.sound.channel_level.front_height_right + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + - ALL.zone1.settings.sound + - ALL.zone1.settings.sound.channel_level + + front_center: + type: num + denon_command: zone1.settings.sound.channel_level.front_center + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + - ALL.zone1.settings.sound + - ALL.zone1.settings.sound.channel_level + + surround_left: + type: num + denon_command: zone1.settings.sound.channel_level.surround_left + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + - ALL.zone1.settings.sound + - ALL.zone1.settings.sound.channel_level + + surround_right: + type: num + denon_command: zone1.settings.sound.channel_level.surround_right + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + - ALL.zone1.settings.sound + - ALL.zone1.settings.sound.channel_level + + surroundback_left: + type: num + denon_command: zone1.settings.sound.channel_level.surroundback_left + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + - ALL.zone1.settings.sound + - ALL.zone1.settings.sound.channel_level + + surroundback_right: + type: num + denon_command: zone1.settings.sound.channel_level.surroundback_right + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + - ALL.zone1.settings.sound + - ALL.zone1.settings.sound.channel_level + + rear_height_left: + type: num + denon_command: zone1.settings.sound.channel_level.rear_height_left + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + - ALL.zone1.settings.sound + - ALL.zone1.settings.sound.channel_level + + rear_height_right: + type: num + denon_command: zone1.settings.sound.channel_level.rear_height_right + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + - ALL.zone1.settings.sound + - ALL.zone1.settings.sound.channel_level + + subwoofer: + type: num + denon_command: zone1.settings.sound.channel_level.subwoofer + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + - ALL.zone1.settings.sound + - ALL.zone1.settings.sound.channel_level + + tone_control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: ALL.zone1.settings.sound.tone_control + + tone: + type: bool + denon_command: zone1.settings.sound.tone_control.tone + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + - ALL.zone1.settings.sound + - ALL.zone1.settings.sound.tone_control + + treble: + type: num + denon_command: zone1.settings.sound.tone_control.treble + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + - ALL.zone1.settings.sound + - ALL.zone1.settings.sound.tone_control + + trebleup: + type: bool + denon_command: zone1.settings.sound.tone_control.trebleup + denon_read: false + denon_write: true + + trebledown: + type: bool + denon_command: zone1.settings.sound.tone_control.trebledown + denon_read: false + denon_write: true + + bass: + type: num + denon_command: zone1.settings.sound.tone_control.bass + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + - ALL.zone1.settings.sound + - ALL.zone1.settings.sound.tone_control + + bassup: + type: bool + denon_command: zone1.settings.sound.tone_control.bassup + denon_read: false + denon_write: true + + bassdown: + type: bool + denon_command: zone1.settings.sound.tone_control.bassdown + denon_read: false + denon_write: true + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: ALL.zone1.settings.sound.general + + cinema_eq: + type: bool + denon_command: zone1.settings.sound.general.cinema_eq + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + - ALL.zone1.settings.sound + - ALL.zone1.settings.sound.general + + hdmiaudioout: + type: str + denon_command: zone1.settings.sound.general.hdmiaudioout + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + - ALL.zone1.settings.sound + - ALL.zone1.settings.sound.general + + dynamicrange: + type: num + denon_command: zone1.settings.sound.general.dynamicrange + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + - ALL.zone1.settings.sound + - ALL.zone1.settings.sound.general + + subwoofertoggle: + type: bool + denon_command: zone1.settings.sound.general.subwoofertoggle + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + - ALL.zone1.settings.sound + - ALL.zone1.settings.sound.general + + subwoofer: + type: num + denon_command: zone1.settings.sound.general.subwoofer + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + - ALL.zone1.settings.sound + - ALL.zone1.settings.sound.general + + subwooferup: + type: bool + denon_command: zone1.settings.sound.general.subwooferup + denon_read: false + denon_write: true + + subwooferdown: + type: bool + denon_command: zone1.settings.sound.general.subwooferdown + denon_read: false + denon_write: true + + lfe: + type: num + denon_command: zone1.settings.sound.general.lfe + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + - ALL.zone1.settings.sound + - ALL.zone1.settings.sound.general + + lfeup: + type: bool + denon_command: zone1.settings.sound.general.lfeup + denon_read: false + denon_write: true + + lfedown: + type: bool + denon_command: zone1.settings.sound.general.lfedown + denon_read: false + denon_write: true + + audioinput: + type: str + denon_command: zone1.settings.sound.general.audioinput + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + - ALL.zone1.settings.sound + - ALL.zone1.settings.sound.general + + zone2: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: ALL.zone2 + + control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: ALL.zone2.control + + power: + type: bool + denon_command: zone2.control.power + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone2 + - ALL.zone2.control + + mute: + type: bool + denon_command: zone2.control.mute + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone2 + - ALL.zone2.control + + volume: + type: num + denon_command: zone2.control.volume + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone2 + - ALL.zone2.control + + volumeup: + type: bool + denon_command: zone2.control.volumeup + denon_read: false + denon_write: true + + volumedown: + type: bool + denon_command: zone2.control.volumedown + denon_read: false + denon_write: true + + input: + type: str + denon_command: zone2.control.input + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone2 + - ALL.zone2.control + on_change: + - .custom_name = '' if sh.....general.custom_inputnames() == {} else sh.....general.custom_inputnames()[value] + + custom_name: + type: str + on_change: .. = '' if sh......general.custom_inputnames.reverse() == {} else sh......general.custom_inputnames.reverse()[value] + + sleep: + type: num + denon_command: zone2.control.sleep + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone2 + - ALL.zone2.control + + standby: + type: num + denon_command: zone2.control.standby + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone2 + - ALL.zone2.control + + settings: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: ALL.zone2.settings + + sound: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: ALL.zone2.settings.sound + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: ALL.zone2.settings.sound.general + + hdmiout: + type: str + denon_command: zone2.settings.sound.general.hdmiout + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.zone2 + - ALL.zone2.settings + - ALL.zone2.settings.sound + - ALL.zone2.settings.sound.general + + AVR-X6300H: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H + + info: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.info + + fullmodel: + type: str + denon_command: info.fullmodel + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + denon_read_initial: true + + model: + type: str + denon_command: info.model + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + denon_read_initial: true + + serialnumber: + type: num + denon_command: info.serialnumber + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + + main: + type: str + denon_command: info.main + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + + mainfbl: + type: num + denon_command: info.mainfbl + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + + dsp1: + type: num + denon_command: info.dsp1 + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + + dsp2: + type: num + denon_command: info.dsp2 + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + + dsp3: + type: num + denon_command: info.dsp3 + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + + dsp4: + type: num + denon_command: info.dsp4 + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + + apld: + type: num + denon_command: info.apld + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + + vpld: + type: num + denon_command: info.vpld + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + + guidat: + type: num + denon_command: info.guidat + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + + heosversion: + type: str + denon_command: info.heosversion + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + + heosbuild: + type: num + denon_command: info.heosbuild + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + + heosmod: + type: num + denon_command: info.heosmod + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + + heoscnf: + type: str + denon_command: info.heoscnf + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + + heoslanguage: + type: str + denon_command: info.heoslanguage + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + + mac: + type: str + denon_command: info.mac + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + + wifimac: + type: str + denon_command: info.wifimac + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + + btmac: + type: str + denon_command: info.btmac + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + + audyif: + type: num + denon_command: info.audyif + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + + productid: + type: num + denon_command: info.productid + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + + packageid: + type: num + denon_command: info.packageid + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + + cmp: + type: str + denon_command: info.cmp + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + + region: + type: str + denon_command: info.region + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.info + denon_read_initial: true + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.general + + custom_inputnames: + type: dict + denon_command: general.custom_inputnames + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.general + cache: true + + reverse: + type: dict + eval: '{} if sh...() == {} else {v: k for (k, v) in sh...().items()}' + + update: + type: bool + eval: sh...timer(2, {}) + eval_trigger: '...' + + power: + type: bool + denon_command: general.power + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.general + + setupmenu: + type: bool + denon_command: general.setupmenu + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.general + + soundmode: + type: str + denon_command: general.soundmode + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.general + denon_read_initial: true + + inputsignal: + type: str + denon_command: general.inputsignal + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.general + denon_read_initial: true + + inputrate: + type: num + denon_command: general.inputrate + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.general + denon_read_initial: true + + inputformat: + type: str + denon_command: general.inputformat + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.general + denon_read_initial: true + + inputresolution: + type: str + denon_command: general.inputresolution + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.general + denon_read_initial: true + + outputresolution: + type: str + denon_command: general.outputresolution + denon_read: true + denon_write: false + + ecomode: + type: str + denon_command: general.ecomode + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.general + + tuner: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.tuner + + preset: + type: num + denon_command: tuner.preset + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.tuner + denon_read_initial: true + + presetup: + type: bool + denon_command: tuner.presetup + denon_read: false + denon_write: true + + presetdown: + type: bool + denon_command: tuner.presetdown + denon_read: false + denon_write: true + + frequency: + type: num + denon_command: tuner.frequency + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.tuner + denon_read_initial: true + + frequencyup: + type: bool + denon_command: tuner.frequencyup + denon_read: false + denon_write: true + + frequencydown: + type: bool + denon_command: tuner.frequencydown + denon_read: false + denon_write: true + + band: + type: str + denon_command: tuner.band + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.tuner + denon_read_initial: true + + hd: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.tuner.hd + + channel: + type: num + denon_command: tuner.hd.channel + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.tuner + - AVR-X6300H.tuner.hd + denon_read_initial: true + + channelup: + type: bool + denon_command: tuner.hd.channelup + denon_read: false + denon_write: true + + channeldown: + type: bool + denon_command: tuner.hd.channeldown + denon_read: false + denon_write: true + + multicastchannel: + type: num + denon_command: tuner.hd.multicastchannel + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.tuner + - AVR-X6300H.tuner.hd + + presetmemory: + type: num + denon_command: tuner.hd.presetmemory + denon_read: true + denon_write: true + + preset: + type: num + denon_command: tuner.hd.preset + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.tuner + - AVR-X6300H.tuner.hd + + presetup: + type: bool + denon_command: tuner.hd.presetup + denon_read: false + denon_write: true + + presetdown: + type: bool + denon_command: tuner.hd.presetdown + denon_read: false + denon_write: true + + band: + type: str + denon_command: tuner.hd.band + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.tuner + - AVR-X6300H.tuner.hd + denon_read_initial: true + + zone1: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.zone1 + + control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.zone1.control + + power: + type: bool + denon_command: zone1.control.power + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.control + denon_read_initial: true + + mute: + type: bool + denon_command: zone1.control.mute + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.control + denon_read_initial: true + + volume: + type: num + denon_command: zone1.control.volume + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.control + denon_read_initial: true + + volumeup: + type: bool + denon_command: zone1.control.volumeup + denon_read: false + denon_write: true + + volumedown: + type: bool + denon_command: zone1.control.volumedown + denon_read: false + denon_write: true + + volumemax: + type: num + denon_command: zone1.control.volumemax + denon_read: true + denon_write: false + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.control + denon_read_initial: true + + input: + type: str + denon_command: zone1.control.input + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.control + denon_read_initial: true + on_change: + - .custom_name = '' if sh.....general.custom_inputnames() == {} else sh.....general.custom_inputnames()[value] + + custom_name: + type: str + on_change: .. = '' if sh......general.custom_inputnames.reverse() == {} else sh......general.custom_inputnames.reverse()[value] + + listeningmode: + type: str + denon_command: zone1.control.listeningmode + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.control + denon_read_initial: true + + sleep: + type: num + denon_command: zone1.control.sleep + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.control + denon_read_initial: true + + standby: + type: num + denon_command: zone1.control.standby + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.control + denon_read_initial: true + + settings: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.zone1.settings + + sound: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.zone1.settings.sound + + channel_level: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.zone1.settings.sound.channel_level + + front_left: + type: num + denon_command: zone1.settings.sound.channel_level.front_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.channel_level + + front_right: + type: num + denon_command: zone1.settings.sound.channel_level.front_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.channel_level + + front_height_left: + type: num + denon_command: zone1.settings.sound.channel_level.front_height_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.channel_level + + front_height_right: + type: num + denon_command: zone1.settings.sound.channel_level.front_height_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.channel_level + + front_center: + type: num + denon_command: zone1.settings.sound.channel_level.front_center + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.channel_level + + surround_left: + type: num + denon_command: zone1.settings.sound.channel_level.surround_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.channel_level + + surround_right: + type: num + denon_command: zone1.settings.sound.channel_level.surround_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.channel_level + + surroundback_left: + type: num + denon_command: zone1.settings.sound.channel_level.surroundback_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.channel_level + + surroundback_right: + type: num + denon_command: zone1.settings.sound.channel_level.surroundback_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.channel_level + + rear_height_left: + type: num + denon_command: zone1.settings.sound.channel_level.rear_height_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.channel_level + + rear_height_right: + type: num + denon_command: zone1.settings.sound.channel_level.rear_height_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.channel_level + + subwoofer: + type: num + denon_command: zone1.settings.sound.channel_level.subwoofer + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.channel_level + + subwoofer2: + type: num + denon_command: zone1.settings.sound.channel_level.subwoofer2 + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.channel_level + + tone_control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.zone1.settings.sound.tone_control + + tone: + type: bool + denon_command: zone1.settings.sound.tone_control.tone + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.tone_control + + treble: + type: num + denon_command: zone1.settings.sound.tone_control.treble + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.tone_control + + trebleup: + type: bool + denon_command: zone1.settings.sound.tone_control.trebleup + denon_read: false + denon_write: true + + trebledown: + type: bool + denon_command: zone1.settings.sound.tone_control.trebledown + denon_read: false + denon_write: true + + bass: + type: num + denon_command: zone1.settings.sound.tone_control.bass + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.tone_control + + bassup: + type: bool + denon_command: zone1.settings.sound.tone_control.bassup + denon_read: false + denon_write: true + + bassdown: + type: bool + denon_command: zone1.settings.sound.tone_control.bassdown + denon_read: false + denon_write: true + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.zone1.settings.sound.general + + cinema_eq: + type: bool + denon_command: zone1.settings.sound.general.cinema_eq + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.general + + speakersetup: + type: str + denon_command: zone1.settings.sound.general.speakersetup + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.general + + hdmiaudioout: + type: str + denon_command: zone1.settings.sound.general.hdmiaudioout + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.general + + dynamicrange: + type: num + denon_command: zone1.settings.sound.general.dynamicrange + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.general + + dialogenhance: + type: num + denon_command: zone1.settings.sound.general.dialogenhance + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.general + + subwoofertoggle: + type: bool + denon_command: zone1.settings.sound.general.subwoofertoggle + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.general + + subwoofer: + type: num + denon_command: zone1.settings.sound.general.subwoofer + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.general + + subwooferup: + type: bool + denon_command: zone1.settings.sound.general.subwooferup + denon_read: false + denon_write: true + + subwooferdown: + type: bool + denon_command: zone1.settings.sound.general.subwooferdown + denon_read: false + denon_write: true + + lfe: + type: num + denon_command: zone1.settings.sound.general.lfe + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.general + + lfeup: + type: bool + denon_command: zone1.settings.sound.general.lfeup + denon_read: false + denon_write: true + + lfedown: + type: bool + denon_command: zone1.settings.sound.general.lfedown + denon_read: false + denon_write: true + + audioinput: + type: str + denon_command: zone1.settings.sound.general.audioinput + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.sound + - AVR-X6300H.zone1.settings.sound.general + + video: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.zone1.settings.video + + aspectratio: + type: str + denon_command: zone1.settings.video.aspectratio + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.video + + hdmimonitor: + type: num + denon_command: zone1.settings.video.hdmimonitor + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.video + + hdmiresolution: + type: str + denon_command: zone1.settings.video.hdmiresolution + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.video + + videoprocessingmode: + type: str + denon_command: zone1.settings.video.videoprocessingmode + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.video + + videoresolution: + type: str + denon_command: zone1.settings.video.videoresolution + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.video + + pictureenhancer: + type: num + denon_command: zone1.settings.video.pictureenhancer + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.video + + videoinput: + type: str + denon_command: zone1.settings.video.videoinput + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone1 + - AVR-X6300H.zone1.settings + - AVR-X6300H.zone1.settings.video + + zone2: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.zone2 + + control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.zone2.control + + power: + type: bool + denon_command: zone2.control.power + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone2 + - AVR-X6300H.zone2.control + + mute: + type: bool + denon_command: zone2.control.mute + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone2 + - AVR-X6300H.zone2.control + + volume: + type: num + denon_command: zone2.control.volume + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone2 + - AVR-X6300H.zone2.control + + volumeup: + type: bool + denon_command: zone2.control.volumeup + denon_read: false + denon_write: true + + volumedown: + type: bool + denon_command: zone2.control.volumedown + denon_read: false + denon_write: true + + input: + type: str + denon_command: zone2.control.input + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone2 + - AVR-X6300H.zone2.control + on_change: + - .custom_name = '' if sh.....general.custom_inputnames() == {} else sh.....general.custom_inputnames()[value] + + custom_name: + type: str + on_change: .. = '' if sh......general.custom_inputnames.reverse() == {} else sh......general.custom_inputnames.reverse()[value] + + sleep: + type: num + denon_command: zone2.control.sleep + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone2 + - AVR-X6300H.zone2.control + + standby: + type: num + denon_command: zone2.control.standby + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone2 + - AVR-X6300H.zone2.control + + settings: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.zone2.settings + + sound: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.zone2.settings.sound + + channel_level: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.zone2.settings.sound.channel_level + + front_left: + type: num + denon_command: zone2.settings.sound.channel_level.front_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone2 + - AVR-X6300H.zone2.settings + - AVR-X6300H.zone2.settings.sound + - AVR-X6300H.zone2.settings.sound.channel_level + + front_right: + type: num + denon_command: zone2.settings.sound.channel_level.front_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone2 + - AVR-X6300H.zone2.settings + - AVR-X6300H.zone2.settings.sound + - AVR-X6300H.zone2.settings.sound.channel_level + + tone_control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.zone2.settings.sound.tone_control + + treble: + type: num + denon_command: zone2.settings.sound.tone_control.treble + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone2 + - AVR-X6300H.zone2.settings + - AVR-X6300H.zone2.settings.sound + - AVR-X6300H.zone2.settings.sound.tone_control + + trebleup: + type: bool + denon_command: zone2.settings.sound.tone_control.trebleup + denon_read: false + denon_write: true + + trebledown: + type: bool + denon_command: zone2.settings.sound.tone_control.trebledown + denon_read: false + denon_write: true + + bass: + type: num + denon_command: zone2.settings.sound.tone_control.bass + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone2 + - AVR-X6300H.zone2.settings + - AVR-X6300H.zone2.settings.sound + - AVR-X6300H.zone2.settings.sound.tone_control + + bassup: + type: bool + denon_command: zone2.settings.sound.tone_control.bassup + denon_read: false + denon_write: true + + bassdown: + type: bool + denon_command: zone2.settings.sound.tone_control.bassdown + denon_read: false + denon_write: true + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.zone2.settings.sound.general + + hdmiout: + type: str + denon_command: zone2.settings.sound.general.hdmiout + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone2 + - AVR-X6300H.zone2.settings + - AVR-X6300H.zone2.settings.sound + - AVR-X6300H.zone2.settings.sound.general + + HPF: + type: bool + denon_command: zone2.settings.sound.general.HPF + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone2 + - AVR-X6300H.zone2.settings + - AVR-X6300H.zone2.settings.sound + - AVR-X6300H.zone2.settings.sound.general + + zone3: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.zone3 + + control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.zone3.control + + power: + type: bool + denon_command: zone3.control.power + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone3 + - AVR-X6300H.zone3.control + + mute: + type: bool + denon_command: zone3.control.mute + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone3 + - AVR-X6300H.zone3.control + + volume: + type: num + denon_command: zone3.control.volume + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone3 + - AVR-X6300H.zone3.control + + volumeup: + type: bool + denon_command: zone3.control.volumeup + denon_read: false + denon_write: true + + volumedown: + type: bool + denon_command: zone3.control.volumedown + denon_read: false + denon_write: true + + sleep: + type: num + denon_command: zone3.control.sleep + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone3 + - AVR-X6300H.zone3.control + + standby: + type: num + denon_command: zone3.control.standby + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone3 + - AVR-X6300H.zone3.control + + input: + type: str + denon_command: zone3.control.input + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone3 + - AVR-X6300H.zone3.control + on_change: + - .custom_name = '' if sh.....general.custom_inputnames() == {} else sh.....general.custom_inputnames()[value] + + custom_name: + type: str + on_change: .. = '' if sh......general.custom_inputnames.reverse() == {} else sh......general.custom_inputnames.reverse()[value] + + settings: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.zone3.settings + + sound: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.zone3.settings.sound + + channel_level: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.zone3.settings.sound.channel_level + + front_left: + type: num + denon_command: zone3.settings.sound.channel_level.front_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone3 + - AVR-X6300H.zone3.settings + - AVR-X6300H.zone3.settings.sound + - AVR-X6300H.zone3.settings.sound.channel_level + + front_right: + type: num + denon_command: zone3.settings.sound.channel_level.front_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone3 + - AVR-X6300H.zone3.settings + - AVR-X6300H.zone3.settings.sound + - AVR-X6300H.zone3.settings.sound.channel_level + + tone_control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.zone3.settings.sound.tone_control + + treble: + type: num + denon_command: zone3.settings.sound.tone_control.treble + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone3 + - AVR-X6300H.zone3.settings + - AVR-X6300H.zone3.settings.sound + - AVR-X6300H.zone3.settings.sound.tone_control + + trebleup: + type: bool + denon_command: zone3.settings.sound.tone_control.trebleup + denon_read: false + denon_write: true + + trebledown: + type: bool + denon_command: zone3.settings.sound.tone_control.trebledown + denon_read: false + denon_write: true + + bass: + type: num + denon_command: zone3.settings.sound.tone_control.bass + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone3 + - AVR-X6300H.zone3.settings + - AVR-X6300H.zone3.settings.sound + - AVR-X6300H.zone3.settings.sound.tone_control + + bassup: + type: bool + denon_command: zone3.settings.sound.tone_control.bassup + denon_read: false + denon_write: true + + bassdown: + type: bool + denon_command: zone3.settings.sound.tone_control.bassdown + denon_read: false + denon_write: true + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X6300H.zone3.settings.sound.general + + HPF: + type: bool + denon_command: zone3.settings.sound.general.HPF + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.zone3 + - AVR-X6300H.zone3.settings + - AVR-X6300H.zone3.settings.sound + - AVR-X6300H.zone3.settings.sound.general + + AVR-X4300H: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.general + + custom_inputnames: + type: dict + denon_command: general.custom_inputnames + denon_read: true + denon_write: false + denon_read_group: + - AVR-X4300H + - AVR-X4300H.general + cache: true + + reverse: + type: dict + eval: '{} if sh...() == {} else {v: k for (k, v) in sh...().items()}' + + update: + type: bool + eval: sh...timer(2, {}) + eval_trigger: '...' + + power: + type: bool + denon_command: general.power + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.general + + setupmenu: + type: bool + denon_command: general.setupmenu + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.general + + soundmode: + type: str + denon_command: general.soundmode + denon_read: true + denon_write: false + denon_read_group: + - AVR-X4300H + - AVR-X4300H.general + denon_read_initial: true + + inputsignal: + type: str + denon_command: general.inputsignal + denon_read: true + denon_write: false + denon_read_group: + - AVR-X4300H + - AVR-X4300H.general + denon_read_initial: true + + inputrate: + type: num + denon_command: general.inputrate + denon_read: true + denon_write: false + denon_read_group: + - AVR-X4300H + - AVR-X4300H.general + denon_read_initial: true + + inputformat: + type: str + denon_command: general.inputformat + denon_read: true + denon_write: false + denon_read_group: + - AVR-X4300H + - AVR-X4300H.general + denon_read_initial: true + + inputresolution: + type: str + denon_command: general.inputresolution + denon_read: true + denon_write: false + denon_read_group: + - AVR-X4300H + - AVR-X4300H.general + denon_read_initial: true + + outputresolution: + type: str + denon_command: general.outputresolution + denon_read: true + denon_write: false + + ecomode: + type: str + denon_command: general.ecomode + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.general + + tuner: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.tuner + + preset: + type: num + denon_command: tuner.preset + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.tuner + denon_read_initial: true + + presetup: + type: bool + denon_command: tuner.presetup + denon_read: false + denon_write: true + + presetdown: + type: bool + denon_command: tuner.presetdown + denon_read: false + denon_write: true + + frequency: + type: num + denon_command: tuner.frequency + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.tuner + denon_read_initial: true + + frequencyup: + type: bool + denon_command: tuner.frequencyup + denon_read: false + denon_write: true + + frequencydown: + type: bool + denon_command: tuner.frequencydown + denon_read: false + denon_write: true + + band: + type: str + denon_command: tuner.band + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.tuner + denon_read_initial: true + + zone1: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.zone1 + + control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.zone1.control + + power: + type: bool + denon_command: zone1.control.power + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.control + denon_read_initial: true + + mute: + type: bool + denon_command: zone1.control.mute + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.control + denon_read_initial: true + + volume: + type: num + denon_command: zone1.control.volume + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.control + denon_read_initial: true + + volumeup: + type: bool + denon_command: zone1.control.volumeup + denon_read: false + denon_write: true + + volumedown: + type: bool + denon_command: zone1.control.volumedown + denon_read: false + denon_write: true + + volumemax: + type: num + denon_command: zone1.control.volumemax + denon_read: true + denon_write: false + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.control + denon_read_initial: true + + input: + type: str + denon_command: zone1.control.input + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.control + denon_read_initial: true + on_change: + - .custom_name = '' if sh.....general.custom_inputnames() == {} else sh.....general.custom_inputnames()[value] + + custom_name: + type: str + on_change: .. = '' if sh......general.custom_inputnames.reverse() == {} else sh......general.custom_inputnames.reverse()[value] + + listeningmode: + type: str + denon_command: zone1.control.listeningmode + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.control + denon_read_initial: true + + sleep: + type: num + denon_command: zone1.control.sleep + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.control + denon_read_initial: true + + standby: + type: num + denon_command: zone1.control.standby + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.control + denon_read_initial: true + + settings: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.zone1.settings + + sound: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.zone1.settings.sound + + channel_level: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.zone1.settings.sound.channel_level + + front_left: + type: num + denon_command: zone1.settings.sound.channel_level.front_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.channel_level + + front_right: + type: num + denon_command: zone1.settings.sound.channel_level.front_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.channel_level + + front_height_left: + type: num + denon_command: zone1.settings.sound.channel_level.front_height_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.channel_level + + front_height_right: + type: num + denon_command: zone1.settings.sound.channel_level.front_height_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.channel_level + + front_center: + type: num + denon_command: zone1.settings.sound.channel_level.front_center + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.channel_level + + surround_left: + type: num + denon_command: zone1.settings.sound.channel_level.surround_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.channel_level + + surround_right: + type: num + denon_command: zone1.settings.sound.channel_level.surround_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.channel_level + + surroundback_left: + type: num + denon_command: zone1.settings.sound.channel_level.surroundback_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.channel_level + + surroundback_right: + type: num + denon_command: zone1.settings.sound.channel_level.surroundback_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.channel_level + + rear_height_left: + type: num + denon_command: zone1.settings.sound.channel_level.rear_height_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.channel_level + + rear_height_right: + type: num + denon_command: zone1.settings.sound.channel_level.rear_height_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.channel_level + + subwoofer: + type: num + denon_command: zone1.settings.sound.channel_level.subwoofer + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.channel_level + + subwoofer2: + type: num + denon_command: zone1.settings.sound.channel_level.subwoofer2 + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.channel_level + + tone_control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.zone1.settings.sound.tone_control + + tone: + type: bool + denon_command: zone1.settings.sound.tone_control.tone + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.tone_control + + treble: + type: num + denon_command: zone1.settings.sound.tone_control.treble + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.tone_control + + trebleup: + type: bool + denon_command: zone1.settings.sound.tone_control.trebleup + denon_read: false + denon_write: true + + trebledown: + type: bool + denon_command: zone1.settings.sound.tone_control.trebledown + denon_read: false + denon_write: true + + bass: + type: num + denon_command: zone1.settings.sound.tone_control.bass + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.tone_control + + bassup: + type: bool + denon_command: zone1.settings.sound.tone_control.bassup + denon_read: false + denon_write: true + + bassdown: + type: bool + denon_command: zone1.settings.sound.tone_control.bassdown + denon_read: false + denon_write: true + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.zone1.settings.sound.general + + cinema_eq: + type: bool + denon_command: zone1.settings.sound.general.cinema_eq + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.general + + speakersetup: + type: str + denon_command: zone1.settings.sound.general.speakersetup + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.general + + hdmiaudioout: + type: str + denon_command: zone1.settings.sound.general.hdmiaudioout + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.general + + dynamicrange: + type: num + denon_command: zone1.settings.sound.general.dynamicrange + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.general + + dialogtoggle: + type: bool + denon_command: zone1.settings.sound.general.dialogtoggle + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.general + + dialog: + type: num + denon_command: zone1.settings.sound.general.dialog + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.general + + dialogup: + type: bool + denon_command: zone1.settings.sound.general.dialogup + denon_read: false + denon_write: true + + dialogdown: + type: bool + denon_command: zone1.settings.sound.general.dialogdown + denon_read: false + denon_write: true + + subwoofertoggle: + type: bool + denon_command: zone1.settings.sound.general.subwoofertoggle + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.general + + subwoofer: + type: num + denon_command: zone1.settings.sound.general.subwoofer + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.general + + subwooferup: + type: bool + denon_command: zone1.settings.sound.general.subwooferup + denon_read: false + denon_write: true + + subwooferdown: + type: bool + denon_command: zone1.settings.sound.general.subwooferdown + denon_read: false + denon_write: true + + lfe: + type: num + denon_command: zone1.settings.sound.general.lfe + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.general + + lfeup: + type: bool + denon_command: zone1.settings.sound.general.lfeup + denon_read: false + denon_write: true + + lfedown: + type: bool + denon_command: zone1.settings.sound.general.lfedown + denon_read: false + denon_write: true + + audioinput: + type: str + denon_command: zone1.settings.sound.general.audioinput + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.sound + - AVR-X4300H.zone1.settings.sound.general + + video: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.zone1.settings.video + + aspectratio: + type: str + denon_command: zone1.settings.video.aspectratio + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.video + + hdmimonitor: + type: num + denon_command: zone1.settings.video.hdmimonitor + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.video + + hdmiresolution: + type: str + denon_command: zone1.settings.video.hdmiresolution + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.video + + videoprocessingmode: + type: str + denon_command: zone1.settings.video.videoprocessingmode + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.video + + videoresolution: + type: str + denon_command: zone1.settings.video.videoresolution + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.video + + pictureenhancer: + type: num + denon_command: zone1.settings.video.pictureenhancer + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.video + + videoinput: + type: str + denon_command: zone1.settings.video.videoinput + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone1 + - AVR-X4300H.zone1.settings + - AVR-X4300H.zone1.settings.video + + zone2: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.zone2 + + control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.zone2.control + + power: + type: bool + denon_command: zone2.control.power + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone2 + - AVR-X4300H.zone2.control + + mute: + type: bool + denon_command: zone2.control.mute + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone2 + - AVR-X4300H.zone2.control + + volume: + type: num + denon_command: zone2.control.volume + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone2 + - AVR-X4300H.zone2.control + + volumeup: + type: bool + denon_command: zone2.control.volumeup + denon_read: false + denon_write: true + + volumedown: + type: bool + denon_command: zone2.control.volumedown + denon_read: false + denon_write: true + + input: + type: str + denon_command: zone2.control.input + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone2 + - AVR-X4300H.zone2.control + on_change: + - .custom_name = '' if sh.....general.custom_inputnames() == {} else sh.....general.custom_inputnames()[value] + + custom_name: + type: str + on_change: .. = '' if sh......general.custom_inputnames.reverse() == {} else sh......general.custom_inputnames.reverse()[value] + + sleep: + type: num + denon_command: zone2.control.sleep + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone2 + - AVR-X4300H.zone2.control + + standby: + type: num + denon_command: zone2.control.standby + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone2 + - AVR-X4300H.zone2.control + + settings: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.zone2.settings + + sound: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.zone2.settings.sound + + channel_level: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.zone2.settings.sound.channel_level + + front_left: + type: num + denon_command: zone2.settings.sound.channel_level.front_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone2 + - AVR-X4300H.zone2.settings + - AVR-X4300H.zone2.settings.sound + - AVR-X4300H.zone2.settings.sound.channel_level + + front_right: + type: num + denon_command: zone2.settings.sound.channel_level.front_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone2 + - AVR-X4300H.zone2.settings + - AVR-X4300H.zone2.settings.sound + - AVR-X4300H.zone2.settings.sound.channel_level + + tone_control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.zone2.settings.sound.tone_control + + treble: + type: num + denon_command: zone2.settings.sound.tone_control.treble + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone2 + - AVR-X4300H.zone2.settings + - AVR-X4300H.zone2.settings.sound + - AVR-X4300H.zone2.settings.sound.tone_control + + trebleup: + type: bool + denon_command: zone2.settings.sound.tone_control.trebleup + denon_read: false + denon_write: true + + trebledown: + type: bool + denon_command: zone2.settings.sound.tone_control.trebledown + denon_read: false + denon_write: true + + bass: + type: num + denon_command: zone2.settings.sound.tone_control.bass + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone2 + - AVR-X4300H.zone2.settings + - AVR-X4300H.zone2.settings.sound + - AVR-X4300H.zone2.settings.sound.tone_control + + bassup: + type: bool + denon_command: zone2.settings.sound.tone_control.bassup + denon_read: false + denon_write: true + + bassdown: + type: bool + denon_command: zone2.settings.sound.tone_control.bassdown + denon_read: false + denon_write: true + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.zone2.settings.sound.general + + hdmiout: + type: str + denon_command: zone2.settings.sound.general.hdmiout + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone2 + - AVR-X4300H.zone2.settings + - AVR-X4300H.zone2.settings.sound + - AVR-X4300H.zone2.settings.sound.general + + HPF: + type: bool + denon_command: zone2.settings.sound.general.HPF + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone2 + - AVR-X4300H.zone2.settings + - AVR-X4300H.zone2.settings.sound + - AVR-X4300H.zone2.settings.sound.general + + zone3: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.zone3 + + control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.zone3.control + + power: + type: bool + denon_command: zone3.control.power + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone3 + - AVR-X4300H.zone3.control + + mute: + type: bool + denon_command: zone3.control.mute + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone3 + - AVR-X4300H.zone3.control + + volume: + type: num + denon_command: zone3.control.volume + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone3 + - AVR-X4300H.zone3.control + + volumeup: + type: bool + denon_command: zone3.control.volumeup + denon_read: false + denon_write: true + + volumedown: + type: bool + denon_command: zone3.control.volumedown + denon_read: false + denon_write: true + + sleep: + type: num + denon_command: zone3.control.sleep + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone3 + - AVR-X4300H.zone3.control + + standby: + type: num + denon_command: zone3.control.standby + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone3 + - AVR-X4300H.zone3.control + + input: + type: str + denon_command: zone3.control.input + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone3 + - AVR-X4300H.zone3.control + on_change: + - .custom_name = '' if sh.....general.custom_inputnames() == {} else sh.....general.custom_inputnames()[value] + + custom_name: + type: str + on_change: .. = '' if sh......general.custom_inputnames.reverse() == {} else sh......general.custom_inputnames.reverse()[value] + + settings: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.zone3.settings + + sound: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.zone3.settings.sound + + channel_level: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.zone3.settings.sound.channel_level + + front_left: + type: num + denon_command: zone3.settings.sound.channel_level.front_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone3 + - AVR-X4300H.zone3.settings + - AVR-X4300H.zone3.settings.sound + - AVR-X4300H.zone3.settings.sound.channel_level + + front_right: + type: num + denon_command: zone3.settings.sound.channel_level.front_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone3 + - AVR-X4300H.zone3.settings + - AVR-X4300H.zone3.settings.sound + - AVR-X4300H.zone3.settings.sound.channel_level + + tone_control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.zone3.settings.sound.tone_control + + treble: + type: num + denon_command: zone3.settings.sound.tone_control.treble + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone3 + - AVR-X4300H.zone3.settings + - AVR-X4300H.zone3.settings.sound + - AVR-X4300H.zone3.settings.sound.tone_control + + trebleup: + type: bool + denon_command: zone3.settings.sound.tone_control.trebleup + denon_read: false + denon_write: true + + trebledown: + type: bool + denon_command: zone3.settings.sound.tone_control.trebledown + denon_read: false + denon_write: true + + bass: + type: num + denon_command: zone3.settings.sound.tone_control.bass + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone3 + - AVR-X4300H.zone3.settings + - AVR-X4300H.zone3.settings.sound + - AVR-X4300H.zone3.settings.sound.tone_control + + bassup: + type: bool + denon_command: zone3.settings.sound.tone_control.bassup + denon_read: false + denon_write: true + + bassdown: + type: bool + denon_command: zone3.settings.sound.tone_control.bassdown + denon_read: false + denon_write: true + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X4300H.zone3.settings.sound.general + + HPF: + type: bool + denon_command: zone3.settings.sound.general.HPF + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.zone3 + - AVR-X4300H.zone3.settings + - AVR-X4300H.zone3.settings.sound + - AVR-X4300H.zone3.settings.sound.general + + AVR-X3300W: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X3300W + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X3300W.general + + custom_inputnames: + type: dict + denon_command: general.custom_inputnames + denon_read: true + denon_write: false + denon_read_group: + - AVR-X3300W + - AVR-X3300W.general + cache: true + + reverse: + type: dict + eval: '{} if sh...() == {} else {v: k for (k, v) in sh...().items()}' + + update: + type: bool + eval: sh...timer(2, {}) + eval_trigger: '...' + + power: + type: bool + denon_command: general.power + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.general + + setupmenu: + type: bool + denon_command: general.setupmenu + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.general + + display: + type: str + denon_command: general.display + denon_read: true + denon_write: false + denon_read_group: + - AVR-X3300W + - AVR-X3300W.general + + soundmode: + type: str + denon_command: general.soundmode + denon_read: true + denon_write: false + denon_read_group: + - AVR-X3300W + - AVR-X3300W.general + denon_read_initial: true + + inputsignal: + type: str + denon_command: general.inputsignal + denon_read: true + denon_write: false + denon_read_group: + - AVR-X3300W + - AVR-X3300W.general + denon_read_initial: true + + inputrate: + type: num + denon_command: general.inputrate + denon_read: true + denon_write: false + denon_read_group: + - AVR-X3300W + - AVR-X3300W.general + denon_read_initial: true + + inputformat: + type: str + denon_command: general.inputformat + denon_read: true + denon_write: false + denon_read_group: + - AVR-X3300W + - AVR-X3300W.general + denon_read_initial: true + + inputresolution: + type: str + denon_command: general.inputresolution + denon_read: true + denon_write: false + denon_read_group: + - AVR-X3300W + - AVR-X3300W.general + denon_read_initial: true + + outputresolution: + type: str + denon_command: general.outputresolution + denon_read: true + denon_write: false + + ecomode: + type: str + denon_command: general.ecomode + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.general + + tuner: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X3300W.tuner + + title: + type: str + denon_command: tuner.title + denon_read: true + denon_write: false + denon_read_group: + - AVR-X3300W + - AVR-X3300W.tuner + denon_read_initial: true + + album: + type: str + denon_command: tuner.album + denon_read: true + denon_write: false + + artist: + type: str + denon_command: tuner.artist + denon_read: true + denon_write: false + + preset: + type: num + denon_command: tuner.preset + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.tuner + denon_read_initial: true + + presetup: + type: bool + denon_command: tuner.presetup + denon_read: false + denon_write: true + + presetdown: + type: bool + denon_command: tuner.presetdown + denon_read: false + denon_write: true + + frequency: + type: num + denon_command: tuner.frequency + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.tuner + denon_read_initial: true + + frequencyup: + type: bool + denon_command: tuner.frequencyup + denon_read: false + denon_write: true + + frequencydown: + type: bool + denon_command: tuner.frequencydown + denon_read: false + denon_write: true + + band: + type: str + denon_command: tuner.band + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.tuner + denon_read_initial: true + + zone1: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X3300W.zone1 + + control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X3300W.zone1.control + + power: + type: bool + denon_command: zone1.control.power + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.control + denon_read_initial: true + + mute: + type: bool + denon_command: zone1.control.mute + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.control + denon_read_initial: true + + volume: + type: num + denon_command: zone1.control.volume + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.control + denon_read_initial: true + + volumeup: + type: bool + denon_command: zone1.control.volumeup + denon_read: false + denon_write: true + + volumedown: + type: bool + denon_command: zone1.control.volumedown + denon_read: false + denon_write: true + + volumemax: + type: num + denon_command: zone1.control.volumemax + denon_read: true + denon_write: false + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.control + denon_read_initial: true + + input: + type: str + denon_command: zone1.control.input + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.control + denon_read_initial: true + on_change: + - .custom_name = '' if sh.....general.custom_inputnames() == {} else sh.....general.custom_inputnames()[value] + + custom_name: + type: str + on_change: .. = '' if sh......general.custom_inputnames.reverse() == {} else sh......general.custom_inputnames.reverse()[value] + + listeningmode: + type: str + denon_command: zone1.control.listeningmode + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.control + denon_read_initial: true + + sleep: + type: num + denon_command: zone1.control.sleep + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.control + denon_read_initial: true + + standby: + type: num + denon_command: zone1.control.standby + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.control + denon_read_initial: true + + settings: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X3300W.zone1.settings + + sound: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X3300W.zone1.settings.sound + + channel_level: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X3300W.zone1.settings.sound.channel_level + + front_left: + type: num + denon_command: zone1.settings.sound.channel_level.front_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.channel_level + + front_right: + type: num + denon_command: zone1.settings.sound.channel_level.front_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.channel_level + + front_height_left: + type: num + denon_command: zone1.settings.sound.channel_level.front_height_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.channel_level + + front_height_right: + type: num + denon_command: zone1.settings.sound.channel_level.front_height_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.channel_level + + front_center: + type: num + denon_command: zone1.settings.sound.channel_level.front_center + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.channel_level + + surround_left: + type: num + denon_command: zone1.settings.sound.channel_level.surround_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.channel_level + + surround_right: + type: num + denon_command: zone1.settings.sound.channel_level.surround_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.channel_level + + surroundback_left: + type: num + denon_command: zone1.settings.sound.channel_level.surroundback_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.channel_level + + surroundback_right: + type: num + denon_command: zone1.settings.sound.channel_level.surroundback_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.channel_level + + rear_height_left: + type: num + denon_command: zone1.settings.sound.channel_level.rear_height_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.channel_level + + rear_height_right: + type: num + denon_command: zone1.settings.sound.channel_level.rear_height_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.channel_level + + subwoofer: + type: num + denon_command: zone1.settings.sound.channel_level.subwoofer + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.channel_level + + subwoofer2: + type: num + denon_command: zone1.settings.sound.channel_level.subwoofer2 + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.channel_level + + tone_control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X3300W.zone1.settings.sound.tone_control + + tone: + type: bool + denon_command: zone1.settings.sound.tone_control.tone + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.tone_control + + treble: + type: num + denon_command: zone1.settings.sound.tone_control.treble + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.tone_control + + trebleup: + type: bool + denon_command: zone1.settings.sound.tone_control.trebleup + denon_read: false + denon_write: true + + trebledown: + type: bool + denon_command: zone1.settings.sound.tone_control.trebledown + denon_read: false + denon_write: true + + bass: + type: num + denon_command: zone1.settings.sound.tone_control.bass + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.tone_control + + bassup: + type: bool + denon_command: zone1.settings.sound.tone_control.bassup + denon_read: false + denon_write: true + + bassdown: + type: bool + denon_command: zone1.settings.sound.tone_control.bassdown + denon_read: false + denon_write: true + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X3300W.zone1.settings.sound.general + + cinema_eq: + type: bool + denon_command: zone1.settings.sound.general.cinema_eq + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.general + + hdmiaudioout: + type: str + denon_command: zone1.settings.sound.general.hdmiaudioout + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.general + + dynamicrange: + type: num + denon_command: zone1.settings.sound.general.dynamicrange + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.general + + dialogtoggle: + type: bool + denon_command: zone1.settings.sound.general.dialogtoggle + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.general + + dialog: + type: num + denon_command: zone1.settings.sound.general.dialog + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.general + + dialogup: + type: bool + denon_command: zone1.settings.sound.general.dialogup + denon_read: false + denon_write: true + + dialogdown: + type: bool + denon_command: zone1.settings.sound.general.dialogdown + denon_read: false + denon_write: true + + subwoofertoggle: + type: bool + denon_command: zone1.settings.sound.general.subwoofertoggle + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.general + + subwoofer: + type: num + denon_command: zone1.settings.sound.general.subwoofer + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.general + + subwooferup: + type: bool + denon_command: zone1.settings.sound.general.subwooferup + denon_read: false + denon_write: true + + subwooferdown: + type: bool + denon_command: zone1.settings.sound.general.subwooferdown + denon_read: false + denon_write: true + + lfe: + type: num + denon_command: zone1.settings.sound.general.lfe + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.general + + lfeup: + type: bool + denon_command: zone1.settings.sound.general.lfeup + denon_read: false + denon_write: true + + lfedown: + type: bool + denon_command: zone1.settings.sound.general.lfedown + denon_read: false + denon_write: true + + audioinput: + type: str + denon_command: zone1.settings.sound.general.audioinput + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.sound + - AVR-X3300W.zone1.settings.sound.general + + video: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X3300W.zone1.settings.video + + aspectratio: + type: str + denon_command: zone1.settings.video.aspectratio + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.video + + hdmiresolution: + type: str + denon_command: zone1.settings.video.hdmiresolution + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.video + + videoprocessingmode: + type: str + denon_command: zone1.settings.video.videoprocessingmode + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.video + + videoresolution: + type: str + denon_command: zone1.settings.video.videoresolution + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.video + + pictureenhancer: + type: num + denon_command: zone1.settings.video.pictureenhancer + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.video + + videoinput: + type: str + denon_command: zone1.settings.video.videoinput + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone1 + - AVR-X3300W.zone1.settings + - AVR-X3300W.zone1.settings.video + + zone2: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X3300W.zone2 + + control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X3300W.zone2.control + + power: + type: bool + denon_command: zone2.control.power + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone2 + - AVR-X3300W.zone2.control + + mute: + type: bool + denon_command: zone2.control.mute + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone2 + - AVR-X3300W.zone2.control + + volume: + type: num + denon_command: zone2.control.volume + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone2 + - AVR-X3300W.zone2.control + + volumeup: + type: bool + denon_command: zone2.control.volumeup + denon_read: false + denon_write: true + + volumedown: + type: bool + denon_command: zone2.control.volumedown + denon_read: false + denon_write: true + + input: + type: str + denon_command: zone2.control.input + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone2 + - AVR-X3300W.zone2.control + on_change: + - .custom_name = '' if sh.....general.custom_inputnames() == {} else sh.....general.custom_inputnames()[value] + + custom_name: + type: str + on_change: .. = '' if sh......general.custom_inputnames.reverse() == {} else sh......general.custom_inputnames.reverse()[value] + + sleep: + type: num + denon_command: zone2.control.sleep + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone2 + - AVR-X3300W.zone2.control + + standby: + type: num + denon_command: zone2.control.standby + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone2 + - AVR-X3300W.zone2.control + + settings: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X3300W.zone2.settings + + sound: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X3300W.zone2.settings.sound + + channel_level: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X3300W.zone2.settings.sound.channel_level + + front_left: + type: num + denon_command: zone2.settings.sound.channel_level.front_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone2 + - AVR-X3300W.zone2.settings + - AVR-X3300W.zone2.settings.sound + - AVR-X3300W.zone2.settings.sound.channel_level + + front_right: + type: num + denon_command: zone2.settings.sound.channel_level.front_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone2 + - AVR-X3300W.zone2.settings + - AVR-X3300W.zone2.settings.sound + - AVR-X3300W.zone2.settings.sound.channel_level + + tone_control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X3300W.zone2.settings.sound.tone_control + + treble: + type: num + denon_command: zone2.settings.sound.tone_control.treble + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone2 + - AVR-X3300W.zone2.settings + - AVR-X3300W.zone2.settings.sound + - AVR-X3300W.zone2.settings.sound.tone_control + + trebleup: + type: bool + denon_command: zone2.settings.sound.tone_control.trebleup + denon_read: false + denon_write: true + + trebledown: + type: bool + denon_command: zone2.settings.sound.tone_control.trebledown + denon_read: false + denon_write: true + + bass: + type: num + denon_command: zone2.settings.sound.tone_control.bass + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone2 + - AVR-X3300W.zone2.settings + - AVR-X3300W.zone2.settings.sound + - AVR-X3300W.zone2.settings.sound.tone_control + + bassup: + type: bool + denon_command: zone2.settings.sound.tone_control.bassup + denon_read: false + denon_write: true + + bassdown: + type: bool + denon_command: zone2.settings.sound.tone_control.bassdown + denon_read: false + denon_write: true + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X3300W.zone2.settings.sound.general + + hdmiout: + type: str + denon_command: zone2.settings.sound.general.hdmiout + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone2 + - AVR-X3300W.zone2.settings + - AVR-X3300W.zone2.settings.sound + - AVR-X3300W.zone2.settings.sound.general + + HPF: + type: bool + denon_command: zone2.settings.sound.general.HPF + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.zone2 + - AVR-X3300W.zone2.settings + - AVR-X3300W.zone2.settings.sound + - AVR-X3300W.zone2.settings.sound.general + + AVR-X2300W: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X2300W + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X2300W.general + + custom_inputnames: + type: dict + denon_command: general.custom_inputnames + denon_read: true + denon_write: false + denon_read_group: + - AVR-X2300W + - AVR-X2300W.general + cache: true + + reverse: + type: dict + eval: '{} if sh...() == {} else {v: k for (k, v) in sh...().items()}' + + update: + type: bool + eval: sh...timer(2, {}) + eval_trigger: '...' + + power: + type: bool + denon_command: general.power + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.general + + setupmenu: + type: bool + denon_command: general.setupmenu + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.general + + display: + type: str + denon_command: general.display + denon_read: true + denon_write: false + denon_read_group: + - AVR-X2300W + - AVR-X2300W.general + + soundmode: + type: str + denon_command: general.soundmode + denon_read: true + denon_write: false + denon_read_group: + - AVR-X2300W + - AVR-X2300W.general + denon_read_initial: true + + inputsignal: + type: str + denon_command: general.inputsignal + denon_read: true + denon_write: false + denon_read_group: + - AVR-X2300W + - AVR-X2300W.general + denon_read_initial: true + + inputrate: + type: num + denon_command: general.inputrate + denon_read: true + denon_write: false + denon_read_group: + - AVR-X2300W + - AVR-X2300W.general + denon_read_initial: true + + inputformat: + type: str + denon_command: general.inputformat + denon_read: true + denon_write: false + denon_read_group: + - AVR-X2300W + - AVR-X2300W.general + denon_read_initial: true + + inputresolution: + type: str + denon_command: general.inputresolution + denon_read: true + denon_write: false + denon_read_group: + - AVR-X2300W + - AVR-X2300W.general + denon_read_initial: true + + outputresolution: + type: str + denon_command: general.outputresolution + denon_read: true + denon_write: false + + ecomode: + type: str + denon_command: general.ecomode + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.general + + tuner: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X2300W.tuner + + title: + type: str + denon_command: tuner.title + denon_read: true + denon_write: false + denon_read_group: + - AVR-X2300W + - AVR-X2300W.tuner + denon_read_initial: true + + album: + type: str + denon_command: tuner.album + denon_read: true + denon_write: false + + artist: + type: str + denon_command: tuner.artist + denon_read: true + denon_write: false + + preset: + type: num + denon_command: tuner.preset + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.tuner + denon_read_initial: true + + presetup: + type: bool + denon_command: tuner.presetup + denon_read: false + denon_write: true + + presetdown: + type: bool + denon_command: tuner.presetdown + denon_read: false + denon_write: true + + frequency: + type: num + denon_command: tuner.frequency + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.tuner + denon_read_initial: true + + frequencyup: + type: bool + denon_command: tuner.frequencyup + denon_read: false + denon_write: true + + frequencydown: + type: bool + denon_command: tuner.frequencydown + denon_read: false + denon_write: true + + band: + type: str + denon_command: tuner.band + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.tuner + denon_read_initial: true + + zone1: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X2300W.zone1 + + control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X2300W.zone1.control + + power: + type: bool + denon_command: zone1.control.power + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.control + denon_read_initial: true + + mute: + type: bool + denon_command: zone1.control.mute + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.control + denon_read_initial: true + + volume: + type: num + denon_command: zone1.control.volume + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.control + denon_read_initial: true + + volumeup: + type: bool + denon_command: zone1.control.volumeup + denon_read: false + denon_write: true + + volumedown: + type: bool + denon_command: zone1.control.volumedown + denon_read: false + denon_write: true + + volumemax: + type: num + denon_command: zone1.control.volumemax + denon_read: true + denon_write: false + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.control + denon_read_initial: true + + input: + type: str + denon_command: zone1.control.input + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.control + denon_read_initial: true + on_change: + - .custom_name = '' if sh.....general.custom_inputnames() == {} else sh.....general.custom_inputnames()[value] + + custom_name: + type: str + on_change: .. = '' if sh......general.custom_inputnames.reverse() == {} else sh......general.custom_inputnames.reverse()[value] + + listeningmode: + type: str + denon_command: zone1.control.listeningmode + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.control + denon_read_initial: true + + sleep: + type: num + denon_command: zone1.control.sleep + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.control + denon_read_initial: true + + standby: + type: num + denon_command: zone1.control.standby + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.control + denon_read_initial: true + + settings: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X2300W.zone1.settings + + sound: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X2300W.zone1.settings.sound + + channel_level: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X2300W.zone1.settings.sound.channel_level + + front_left: + type: num + denon_command: zone1.settings.sound.channel_level.front_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.channel_level + + front_right: + type: num + denon_command: zone1.settings.sound.channel_level.front_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.channel_level + + front_height_left: + type: num + denon_command: zone1.settings.sound.channel_level.front_height_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.channel_level + + front_height_right: + type: num + denon_command: zone1.settings.sound.channel_level.front_height_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.channel_level + + front_center: + type: num + denon_command: zone1.settings.sound.channel_level.front_center + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.channel_level + + surround_left: + type: num + denon_command: zone1.settings.sound.channel_level.surround_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.channel_level + + surround_right: + type: num + denon_command: zone1.settings.sound.channel_level.surround_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.channel_level + + surroundback_left: + type: num + denon_command: zone1.settings.sound.channel_level.surroundback_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.channel_level + + surroundback_right: + type: num + denon_command: zone1.settings.sound.channel_level.surroundback_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.channel_level + + rear_height_left: + type: num + denon_command: zone1.settings.sound.channel_level.rear_height_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.channel_level + + rear_height_right: + type: num + denon_command: zone1.settings.sound.channel_level.rear_height_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.channel_level + + subwoofer: + type: num + denon_command: zone1.settings.sound.channel_level.subwoofer + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.channel_level + + tone_control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X2300W.zone1.settings.sound.tone_control + + tone: + type: bool + denon_command: zone1.settings.sound.tone_control.tone + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.tone_control + + treble: + type: num + denon_command: zone1.settings.sound.tone_control.treble + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.tone_control + + trebleup: + type: bool + denon_command: zone1.settings.sound.tone_control.trebleup + denon_read: false + denon_write: true + + trebledown: + type: bool + denon_command: zone1.settings.sound.tone_control.trebledown + denon_read: false + denon_write: true + + bass: + type: num + denon_command: zone1.settings.sound.tone_control.bass + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.tone_control + + bassup: + type: bool + denon_command: zone1.settings.sound.tone_control.bassup + denon_read: false + denon_write: true + + bassdown: + type: bool + denon_command: zone1.settings.sound.tone_control.bassdown + denon_read: false + denon_write: true + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X2300W.zone1.settings.sound.general + + cinema_eq: + type: bool + denon_command: zone1.settings.sound.general.cinema_eq + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.general + + hdmiaudioout: + type: str + denon_command: zone1.settings.sound.general.hdmiaudioout + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.general + + dynamicrange: + type: num + denon_command: zone1.settings.sound.general.dynamicrange + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.general + + dialogtoggle: + type: bool + denon_command: zone1.settings.sound.general.dialogtoggle + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.general + + dialog: + type: num + denon_command: zone1.settings.sound.general.dialog + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.general + + dialogup: + type: bool + denon_command: zone1.settings.sound.general.dialogup + denon_read: false + denon_write: true + + dialogdown: + type: bool + denon_command: zone1.settings.sound.general.dialogdown + denon_read: false + denon_write: true + + subwoofertoggle: + type: bool + denon_command: zone1.settings.sound.general.subwoofertoggle + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.general + + subwoofer: + type: num + denon_command: zone1.settings.sound.general.subwoofer + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.general + + subwooferup: + type: bool + denon_command: zone1.settings.sound.general.subwooferup + denon_read: false + denon_write: true + + subwooferdown: + type: bool + denon_command: zone1.settings.sound.general.subwooferdown + denon_read: false + denon_write: true + + lfe: + type: num + denon_command: zone1.settings.sound.general.lfe + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.general + + lfeup: + type: bool + denon_command: zone1.settings.sound.general.lfeup + denon_read: false + denon_write: true + + lfedown: + type: bool + denon_command: zone1.settings.sound.general.lfedown + denon_read: false + denon_write: true + + audioinput: + type: str + denon_command: zone1.settings.sound.general.audioinput + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.sound + - AVR-X2300W.zone1.settings.sound.general + + video: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X2300W.zone1.settings.video + + aspectratio: + type: str + denon_command: zone1.settings.video.aspectratio + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.video + + hdmimonitor: + type: num + denon_command: zone1.settings.video.hdmimonitor + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.video + + hdmiresolution: + type: str + denon_command: zone1.settings.video.hdmiresolution + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.video + + videoprocessingmode: + type: str + denon_command: zone1.settings.video.videoprocessingmode + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.video + + videoresolution: + type: str + denon_command: zone1.settings.video.videoresolution + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.video + + pictureenhancer: + type: num + denon_command: zone1.settings.video.pictureenhancer + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.video + + videoinput: + type: str + denon_command: zone1.settings.video.videoinput + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone1 + - AVR-X2300W.zone1.settings + - AVR-X2300W.zone1.settings.video + + zone2: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X2300W.zone2 + + control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X2300W.zone2.control + + power: + type: bool + denon_command: zone2.control.power + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone2 + - AVR-X2300W.zone2.control + + mute: + type: bool + denon_command: zone2.control.mute + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone2 + - AVR-X2300W.zone2.control + + volume: + type: num + denon_command: zone2.control.volume + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone2 + - AVR-X2300W.zone2.control + + volumeup: + type: bool + denon_command: zone2.control.volumeup + denon_read: false + denon_write: true + + volumedown: + type: bool + denon_command: zone2.control.volumedown + denon_read: false + denon_write: true + + input: + type: str + denon_command: zone2.control.input + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone2 + - AVR-X2300W.zone2.control + on_change: + - .custom_name = '' if sh.....general.custom_inputnames() == {} else sh.....general.custom_inputnames()[value] + + custom_name: + type: str + on_change: .. = '' if sh......general.custom_inputnames.reverse() == {} else sh......general.custom_inputnames.reverse()[value] + + sleep: + type: num + denon_command: zone2.control.sleep + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone2 + - AVR-X2300W.zone2.control + + standby: + type: num + denon_command: zone2.control.standby + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone2 + - AVR-X2300W.zone2.control + + settings: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X2300W.zone2.settings + + sound: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X2300W.zone2.settings.sound + + channel_level: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X2300W.zone2.settings.sound.channel_level + + front_left: + type: num + denon_command: zone2.settings.sound.channel_level.front_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone2 + - AVR-X2300W.zone2.settings + - AVR-X2300W.zone2.settings.sound + - AVR-X2300W.zone2.settings.sound.channel_level + + front_right: + type: num + denon_command: zone2.settings.sound.channel_level.front_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone2 + - AVR-X2300W.zone2.settings + - AVR-X2300W.zone2.settings.sound + - AVR-X2300W.zone2.settings.sound.channel_level + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X2300W.zone2.settings.sound.general + + hdmiout: + type: str + denon_command: zone2.settings.sound.general.hdmiout + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.zone2 + - AVR-X2300W.zone2.settings + - AVR-X2300W.zone2.settings.sound + - AVR-X2300W.zone2.settings.sound.general + + AVR-X1300W: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X1300W + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X1300W.general + + custom_inputnames: + type: dict + denon_command: general.custom_inputnames + denon_read: true + denon_write: false + denon_read_group: + - AVR-X1300W + - AVR-X1300W.general + cache: true + + reverse: + type: dict + eval: '{} if sh...() == {} else {v: k for (k, v) in sh...().items()}' + + update: + type: bool + eval: sh...timer(2, {}) + eval_trigger: '...' + + power: + type: bool + denon_command: general.power + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.general + + setupmenu: + type: bool + denon_command: general.setupmenu + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.general + + display: + type: str + denon_command: general.display + denon_read: true + denon_write: false + denon_read_group: + - AVR-X1300W + - AVR-X1300W.general + + soundmode: + type: str + denon_command: general.soundmode + denon_read: true + denon_write: false + denon_read_group: + - AVR-X1300W + - AVR-X1300W.general + denon_read_initial: true + + inputsignal: + type: str + denon_command: general.inputsignal + denon_read: true + denon_write: false + denon_read_group: + - AVR-X1300W + - AVR-X1300W.general + denon_read_initial: true + + inputrate: + type: num + denon_command: general.inputrate + denon_read: true + denon_write: false + denon_read_group: + - AVR-X1300W + - AVR-X1300W.general + denon_read_initial: true + + inputformat: + type: str + denon_command: general.inputformat + denon_read: true + denon_write: false + denon_read_group: + - AVR-X1300W + - AVR-X1300W.general + denon_read_initial: true + + inputresolution: + type: str + denon_command: general.inputresolution + denon_read: true + denon_write: false + denon_read_group: + - AVR-X1300W + - AVR-X1300W.general + denon_read_initial: true + + outputresolution: + type: str + denon_command: general.outputresolution + denon_read: true + denon_write: false + + ecomode: + type: str + denon_command: general.ecomode + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.general + + tuner: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X1300W.tuner + + title: + type: str + denon_command: tuner.title + denon_read: true + denon_write: false + denon_read_group: + - AVR-X1300W + - AVR-X1300W.tuner + denon_read_initial: true + + album: + type: str + denon_command: tuner.album + denon_read: true + denon_write: false + + artist: + type: str + denon_command: tuner.artist + denon_read: true + denon_write: false + + preset: + type: num + denon_command: tuner.preset + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.tuner + denon_read_initial: true + + presetup: + type: bool + denon_command: tuner.presetup + denon_read: false + denon_write: true + + presetdown: + type: bool + denon_command: tuner.presetdown + denon_read: false + denon_write: true + + frequency: + type: num + denon_command: tuner.frequency + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.tuner + denon_read_initial: true + + frequencyup: + type: bool + denon_command: tuner.frequencyup + denon_read: false + denon_write: true + + frequencydown: + type: bool + denon_command: tuner.frequencydown + denon_read: false + denon_write: true + + band: + type: str + denon_command: tuner.band + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.tuner + denon_read_initial: true + + zone1: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X1300W.zone1 + + control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X1300W.zone1.control + + power: + type: bool + denon_command: zone1.control.power + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.control + denon_read_initial: true + + mute: + type: bool + denon_command: zone1.control.mute + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.control + denon_read_initial: true + + volume: + type: num + denon_command: zone1.control.volume + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.control + denon_read_initial: true + + volumeup: + type: bool + denon_command: zone1.control.volumeup + denon_read: false + denon_write: true + + volumedown: + type: bool + denon_command: zone1.control.volumedown + denon_read: false + denon_write: true + + volumemax: + type: num + denon_command: zone1.control.volumemax + denon_read: true + denon_write: false + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.control + denon_read_initial: true + + input: + type: str + denon_command: zone1.control.input + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.control + denon_read_initial: true + on_change: + - .custom_name = '' if sh.....general.custom_inputnames() == {} else sh.....general.custom_inputnames()[value] + + custom_name: + type: str + on_change: .. = '' if sh......general.custom_inputnames.reverse() == {} else sh......general.custom_inputnames.reverse()[value] + + listeningmode: + type: str + denon_command: zone1.control.listeningmode + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.control + denon_read_initial: true + + sleep: + type: num + denon_command: zone1.control.sleep + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.control + denon_read_initial: true + + standby: + type: num + denon_command: zone1.control.standby + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.control + denon_read_initial: true + + settings: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X1300W.zone1.settings + + sound: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X1300W.zone1.settings.sound + + channel_level: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X1300W.zone1.settings.sound.channel_level + + front_left: + type: num + denon_command: zone1.settings.sound.channel_level.front_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.channel_level + + front_right: + type: num + denon_command: zone1.settings.sound.channel_level.front_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.channel_level + + front_height_left: + type: num + denon_command: zone1.settings.sound.channel_level.front_height_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.channel_level + + front_height_right: + type: num + denon_command: zone1.settings.sound.channel_level.front_height_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.channel_level + + front_center: + type: num + denon_command: zone1.settings.sound.channel_level.front_center + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.channel_level + + surround_left: + type: num + denon_command: zone1.settings.sound.channel_level.surround_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.channel_level + + surround_right: + type: num + denon_command: zone1.settings.sound.channel_level.surround_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.channel_level + + surroundback_left: + type: num + denon_command: zone1.settings.sound.channel_level.surroundback_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.channel_level + + surroundback_right: + type: num + denon_command: zone1.settings.sound.channel_level.surroundback_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.channel_level + + rear_height_left: + type: num + denon_command: zone1.settings.sound.channel_level.rear_height_left + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.channel_level + + rear_height_right: + type: num + denon_command: zone1.settings.sound.channel_level.rear_height_right + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.channel_level + + subwoofer: + type: num + denon_command: zone1.settings.sound.channel_level.subwoofer + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.channel_level + + tone_control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X1300W.zone1.settings.sound.tone_control + + tone: + type: bool + denon_command: zone1.settings.sound.tone_control.tone + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.tone_control + + treble: + type: num + denon_command: zone1.settings.sound.tone_control.treble + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.tone_control + + trebleup: + type: bool + denon_command: zone1.settings.sound.tone_control.trebleup + denon_read: false + denon_write: true + + trebledown: + type: bool + denon_command: zone1.settings.sound.tone_control.trebledown + denon_read: false + denon_write: true + + bass: + type: num + denon_command: zone1.settings.sound.tone_control.bass + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.tone_control + + bassup: + type: bool + denon_command: zone1.settings.sound.tone_control.bassup + denon_read: false + denon_write: true + + bassdown: + type: bool + denon_command: zone1.settings.sound.tone_control.bassdown + denon_read: false + denon_write: true + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X1300W.zone1.settings.sound.general + + cinema_eq: + type: bool + denon_command: zone1.settings.sound.general.cinema_eq + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.general + + hdmiaudioout: + type: str + denon_command: zone1.settings.sound.general.hdmiaudioout + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.general + + dynamicrange: + type: num + denon_command: zone1.settings.sound.general.dynamicrange + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.general + + dialogtoggle: + type: bool + denon_command: zone1.settings.sound.general.dialogtoggle + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.general + + dialog: + type: num + denon_command: zone1.settings.sound.general.dialog + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.general + + dialogup: + type: bool + denon_command: zone1.settings.sound.general.dialogup + denon_read: false + denon_write: true + + dialogdown: + type: bool + denon_command: zone1.settings.sound.general.dialogdown + denon_read: false + denon_write: true + + subwoofertoggle: + type: bool + denon_command: zone1.settings.sound.general.subwoofertoggle + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.general + + subwoofer: + type: num + denon_command: zone1.settings.sound.general.subwoofer + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.general + + subwooferup: + type: bool + denon_command: zone1.settings.sound.general.subwooferup + denon_read: false + denon_write: true + + subwooferdown: + type: bool + denon_command: zone1.settings.sound.general.subwooferdown + denon_read: false + denon_write: true + + lfe: + type: num + denon_command: zone1.settings.sound.general.lfe + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.general + + lfeup: + type: bool + denon_command: zone1.settings.sound.general.lfeup + denon_read: false + denon_write: true + + lfedown: + type: bool + denon_command: zone1.settings.sound.general.lfedown + denon_read: false + denon_write: true + + audioinput: + type: str + denon_command: zone1.settings.sound.general.audioinput + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone1 + - AVR-X1300W.zone1.settings + - AVR-X1300W.zone1.settings.sound + - AVR-X1300W.zone1.settings.sound.general + + zone2: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X1300W.zone2 + + control: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X1300W.zone2.control + + power: + type: bool + denon_command: zone2.control.power + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone2 + - AVR-X1300W.zone2.control + + mute: + type: bool + denon_command: zone2.control.mute + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone2 + - AVR-X1300W.zone2.control + + volume: + type: num + denon_command: zone2.control.volume + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone2 + - AVR-X1300W.zone2.control + + volumeup: + type: bool + denon_command: zone2.control.volumeup + denon_read: false + denon_write: true + + volumedown: + type: bool + denon_command: zone2.control.volumedown + denon_read: false + denon_write: true + + input: + type: str + denon_command: zone2.control.input + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone2 + - AVR-X1300W.zone2.control + on_change: + - .custom_name = '' if sh.....general.custom_inputnames() == {} else sh.....general.custom_inputnames()[value] + + custom_name: + type: str + on_change: .. = '' if sh......general.custom_inputnames.reverse() == {} else sh......general.custom_inputnames.reverse()[value] + + sleep: + type: num + denon_command: zone2.control.sleep + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone2 + - AVR-X1300W.zone2.control + + standby: + type: num + denon_command: zone2.control.standby + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone2 + - AVR-X1300W.zone2.control + + settings: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X1300W.zone2.settings + + sound: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X1300W.zone2.settings.sound + + general: + + read: + type: bool + enforce_updates: true + denon_read_group_trigger: AVR-X1300W.zone2.settings.sound.general + + hdmiout: + type: str + denon_command: zone2.settings.sound.general.hdmiout + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.zone2 + - AVR-X1300W.zone2.settings + - AVR-X1300W.zone2.settings.sound + - AVR-X1300W.zone2.settings.sound.general +plugin_functions: NONE +logic_parameters: NONE diff --git a/denon/user_doc.rst b/denon/user_doc.rst new file mode 100755 index 000000000..eae7e2ec4 --- /dev/null +++ b/denon/user_doc.rst @@ -0,0 +1,97 @@ +.. index:: Plugins; denon +.. index:: denon + +===== +denon +===== + +.. image:: webif/static/img/plugin_logo.svg + :alt: plugin logo + :width: 300px + :height: 300px + :scale: 50 % + :align: center + +Steuerung eines Denon AV Gerätes über TCP/IP oder RS232 Schnittstelle. + +Das Plugin unterstützt eine Vielzahl von Denon Verstärkern. Folgende Modelle wurden +konkret berücksichtigt, andere Modelle funktionieren aber mit hoher Wahrscheinlichkeit +auch. + +- AVR-X6300H +- AVR-X4300H +- AVR-X3300W +- AVR-X2300W +- AVR-X1300W + + +Konfiguration +============= + +Diese Plugin Parameter und die Informationen zur Item-spezifischen Konfiguration des Plugins sind +unter :doc:`/plugins_doc/config/denon` beschrieben. + + +plugin.yaml +----------- + +.. code-block:: yaml + + # etc/plugin.yaml + denon: + plugin_name: denon + model: AVR-X6300H + timeout: 3 + terminator: "\r" + binary: false + autoreconnect: true + autoconnect: true + connect_retries: 5 + connect_cycle: 3 + host: 192.168.0.111 + port: 23 + serialport: /dev/ttyUSB0 + conn_type: serial_async + command_class: SDPCommandParseStr + + +Struct Vorlagen +=============== + +Der Itembaum sollte jedenfalls über die structs Funktion eingebunden werden. Hierzu gibt es vier +Varianten, wobei die letzte die optimale Lösung darstellt: + +- einzelne Struct-Teile wie denon.info, denon.general, denon.tuner, denon.zone1, denon.zone2, denon.zone3 +- denon.ALL: Hierbei werden sämtliche Kommandos eingebunden, die vom Plugin vorgesehen sind +- denon.AVR-X6300H bzw. die anderen unterstützten Modelle, um nur die relevanten Items einzubinden +- denon.MODEL: Es wird automatisch der Itembaum für das Modell geladen, das im plugin.yaml angegeben ist. + +Sollte das selbst verwendete Modell nicht im Plugin vorhanden sein, kann der Plugin Maintainer +angeschrieben werden, um das Modell aufzunehmen. + +.. code-block:: yaml + + # items/my.yaml + Denon: + type: foo + struct: denon.MODEL + + +Kommandos +========= + +Die RS232 oder IP-Befehle des Geräts sind in der Datei `commands.py` hinterlegt. Etwaige +Anpassungen und Ergänzungen sollten als Pull Request oder durch Rücksprache mit dem Maintainer +direkt ins Plugin einfließen, damit diese auch von anderen Nutzer:innen eingesetzt werden können. + +Über die Datei `datatypes.py` sowie die Lookup Tabellen im `commandy.py` File sind +bereits sämtliche nötige Konvertierungen abgedeckt. So werden +beispielsweise Lautstärkeangaben mit Kommawerten oder boolsche Werte automatisch +korrekt interpretiert. + + +Web Interface +============= + +Aktuell ist kein Web Interface integriert. In naher Zukunft soll dies über die +SmartDevicePlugin Bibliothek automatisch zur Verfügung gestellt werden. diff --git a/denon/webif/static/img/plugin_logo.svg b/denon/webif/static/img/plugin_logo.svg new file mode 100755 index 000000000..e1c8a9993 --- /dev/null +++ b/denon/webif/static/img/plugin_logo.svg @@ -0,0 +1,88 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +Multidevice + From a982bfe046bf9378cf60edaa4edb881038da17b8 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sat, 1 Apr 2023 10:48:05 +0200 Subject: [PATCH 004/775] denon plugin: fix init and add tuning command to structs --- denon/__init__.py | 3 ++- denon/commands.py | 2 +- denon/plugin.yaml | 62 +++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 65 insertions(+), 2 deletions(-) diff --git a/denon/__init__.py b/denon/__init__.py index d39fb55a2..c505cb1cc 100755 --- a/denon/__init__.py +++ b/denon/__init__.py @@ -44,7 +44,8 @@ class SmartPluginWebIf(): from lib.model.smartdeviceplugin import SmartDevicePlugin, Standalone if not SDP_standalone: - from .webif import WebInterface + #from .webif import WebInterface + pass CUSTOM_INPUT_NAME_COMMAND = 'custom_inputnames' diff --git a/denon/commands.py b/denon/commands.py index 3f5250bc2..27afbd0fa 100755 --- a/denon/commands.py +++ b/denon/commands.py @@ -10,7 +10,7 @@ models = { 'ALL': ['general.custom_inputnames', 'general.power', 'general.setupmenu', 'general.soundmode', 'general.inputsignal', 'general.inputrate', 'general.inputformat', 'general.inputresolution', 'general.outputresolution', 'general.ecomode', - 'tuner.preset', 'tuner.presetup', 'tuner.presetdown', 'tuner.frequency', 'tuner.frequencyup', 'tuner.frequencydown', 'tuner.band', + 'tuner.preset', 'tuner.presetup', 'tuner.presetdown', 'tuner.frequency', 'tuner.frequencyup', 'tuner.frequencydown', 'tuner.band', 'tuner.tuningmode', 'zone1.control', 'zone1.settings.sound.general.audioinput', 'zone1.settings.sound.general.cinema_eq', 'zone1.settings.sound.general.hdmiaudioout', 'zone1.settings.sound.general.dynamicrange', 'zone1.settings.sound.general.subwoofertoggle', 'zone1.settings.sound.general.subwoofer', 'zone1.settings.sound.general.subwooferup', 'zone1.settings.sound.general.subwooferdown', 'zone1.settings.sound.general.lfe', 'zone1.settings.sound.general.lfeup', 'zone1.settings.sound.general.lfedown', 'zone1.settings.sound.tone_control', 'zone1.settings.sound.channel_level.front_left', 'zone1.settings.sound.channel_level.front_right', 'zone1.settings.sound.channel_level.front_height_left', 'zone1.settings.sound.channel_level.front_height_right', 'zone1.settings.sound.channel_level.front_center', 'zone1.settings.sound.channel_level.surround_left', 'zone1.settings.sound.channel_level.surround_right', 'zone1.settings.sound.channel_level.surroundback_left', 'zone1.settings.sound.channel_level.surroundback_right', 'zone1.settings.sound.channel_level.rear_height_left', 'zone1.settings.sound.channel_level.rear_height_right', 'zone1.settings.sound.channel_level.subwoofer', diff --git a/denon/plugin.yaml b/denon/plugin.yaml index 5f3cf4f3b..356bc0d35 100755 --- a/denon/plugin.yaml +++ b/denon/plugin.yaml @@ -610,6 +610,14 @@ item_structs: - tuner denon_read_initial: true + tuningmode: + type: str + denon_command: tuner.tuningmode + denon_read: true + denon_write: true + denon_read_group: + - tuner + hd: read: @@ -1898,6 +1906,15 @@ item_structs: - ALL.tuner denon_read_initial: true + tuningmode: + type: str + denon_command: tuner.tuningmode + denon_read: true + denon_write: true + denon_read_group: + - ALL + - ALL.tuner + zone1: read: @@ -2902,6 +2919,15 @@ item_structs: - AVR-X6300H.tuner denon_read_initial: true + tuningmode: + type: str + denon_command: tuner.tuningmode + denon_read: true + denon_write: true + denon_read_group: + - AVR-X6300H + - AVR-X6300H.tuner + hd: read: @@ -4179,6 +4205,15 @@ item_structs: - AVR-X4300H.tuner denon_read_initial: true + tuningmode: + type: str + denon_command: tuner.tuningmode + denon_read: true + denon_write: true + denon_read_group: + - AVR-X4300H + - AVR-X4300H.tuner + zone1: read: @@ -5432,6 +5467,15 @@ item_structs: - AVR-X3300W.tuner denon_read_initial: true + tuningmode: + type: str + denon_command: tuner.tuningmode + denon_read: true + denon_write: true + denon_read_group: + - AVR-X3300W + - AVR-X3300W.tuner + zone1: read: @@ -6451,6 +6495,15 @@ item_structs: - AVR-X2300W.tuner denon_read_initial: true + tuningmode: + type: str + denon_command: tuner.tuningmode + denon_read: true + denon_write: true + denon_read_group: + - AVR-X2300W + - AVR-X2300W.tuner + zone1: read: @@ -7402,6 +7455,15 @@ item_structs: - AVR-X1300W.tuner denon_read_initial: true + tuningmode: + type: str + denon_command: tuner.tuningmode + denon_read: true + denon_write: true + denon_read_group: + - AVR-X1300W + - AVR-X1300W.tuner + zone1: read: From 63734560ef1b93d53d65af445df495c32084d6aa Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sat, 1 Apr 2023 11:01:15 +0200 Subject: [PATCH 005/775] rpi_info Plugin: Update and fix web interface --- rpi_info/webif/templates/index.html | 40 ++++++++++------------------- 1 file changed, 14 insertions(+), 26 deletions(-) diff --git a/rpi_info/webif/templates/index.html b/rpi_info/webif/templates/index.html index c14b14207..750c9836d 100755 --- a/rpi_info/webif/templates/index.html +++ b/rpi_info/webif/templates/index.html @@ -37,7 +37,7 @@ shngInsertText(item+'_last_update', objResponse['items'][item]['last_update'], 'maintable'); shngInsertText(item+'_last_change', objResponse['items'][item]['last_change'], 'maintable'); } - + if (objResponse['plugin_suspended'] === false) { document.getElementById('play').classList = 'btn btn-success btn-sm'; document.getElementById('play').disabled = true; @@ -56,22 +56,15 @@ - -{% endblock pluginscripts %} - -{% block headtable %} - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
{{_('Broker Host')}}{{ p.broker_config.host }}{{_('Broker Port')}}{{ p.broker_config.port }}
{{_('Benutzer')}}{{ p.broker_config.user }}{{_('Passwort')}} - {% if p.broker_config.password %} - {% for letter in p.broker_config.password %}*{% endfor %} - {% endif %} -
{{_('QoS')}}{{ p.broker_config.qos }}{{_('full_topic')}}{{ p.full_topic }}
-{% endblock headtable %} - - -{% block buttons %} -{% endblock %} - - -{% set tabcount = 6 %} - - -{% if p.tasmota_items != [] %} - {% set start_tab = 1 %} -{% endif %} - - -{% if items != [] %} - {% set tab1title = _("" ~ plugin_shortname ~ " Items") %} -{% else %} - {% set tab1title = "hidden" %} -{% endif %} -{% set tab2title = _("" ~ plugin_shortname ~ " Devices") %} -{% set tab3title = _("" ~ plugin_shortname ~ " " ~ _('Details') ~ "") %} -{% set tab4title = _("" ~ plugin_shortname ~ " " ~ _('Zigbee Devices') ~ "") %} -{% set tab5title = _("" ~ " Broker Information") %} -{% if maintenance %} - {% set tab6title = _("" ~ plugin_shortname ~ " " ~ _('Maintenance') ~ "") %} -{% else %} - {% set tab6title = "hidden" %} -{% endif %} - - -{% block bodytab1 %} -
-

Item Information

- - - - - - - - - - - - - - - {% for item in items %} - - - - - - - {% if p.get_iattr_value(item.conf, 'tasmota_relay') is in ['1', '2', '3', '4', '5', '6', '7', '8'] %} - - {% elif p.get_iattr_value(item.conf, 'tasmota_attr') == 'relay' %} - - {% else %} - - {% endif %} - - - - {% endfor %} - -
{{ _('Item') }}{{ _('Typ') }}{{ _('Wert') }}{{ _('Tasmota Topic') }}{{ _('Relais') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
{{ item._path }}{{ item._type }}{{ item() }}{{ p.get_iattr_value(item.conf, 'tasmota_topic') }}{{ p.get_iattr_value(item.conf, 'tasmota_relay') }}1-{{ item.last_update().strftime('%d.%m.%Y %H:%M:%S') }}{{ item.last_change().strftime('%d.%m.%Y %H:%M:%S') }}
-
-{% endblock %} - - -{% block bodytab2 %} -
-

Device Information

- - - - - - - - - - - - - - - - - - - {% for device in p.tasmota_devices %} - {% if 'fw_ver' in p.tasmota_devices[device] %} - - - - - - - - - - - - {% if p.tasmota_devices[device]['wifi_signal'] %} - - {% else %} - - {% endif %} - - - {% endif %} - {% endfor %} - -
{{ _('Tasmota Topic') }}{{ _('Online') }}{{ _('Friendy Name') }}{{ _('Mac Adresse') }}{{ _('IP Adresse') }}{{ _('Uptime') }}{{ _('Sensor Type') }}{{ _('Firmware') }}{{ _('Module') }}{{ _('Wifi') }}{{ _('Details') }}
{{ device }}{{ p.tasmota_devices[device].online }}{{ p.tasmota_devices[device].friendly_name }}{{ p.tasmota_devices[device].mac }}{{ p.tasmota_devices[device].ip }}{{ p.tasmota_devices[device].uptime }} - {% if p.tasmota_devices[device]['sensors'] != {} %} - {% for key in p.tasmota_devices[device]['sensors'] %} - {{ key }} - {%if not loop.last%}, {%endif%} - {% endfor %} - {% else %} - - - {% endif %} - {{ p.tasmota_devices[device].fw_ver }}{{ p.tasmota_devices[device].module }}{{ p.tasmota_devices[device].wifi_signal }} dBm - - {% for entry in p.tasmota_devices[device]['discovery_config'] %} - - - - - {% endfor %} -
{{ entry }}:{{ p.tasmota_devices[device]['discovery_config'][entry] }}
-
- -
-{% endblock %} - - -{% block bodytab3 %} -
-{% if p.has_energy_sensor %} -

ENERGY SENSORS

- - - - - - - - - - - - - - - - - {% for device in p.tasmota_devices %} - {% if p.tasmota_devices[device]['sensors']['ENERGY'] %} - - - - - - - - - - - - - {% endif %} - {% endfor %} - -
{{ _('Tasmota Topic') }}{{ _('Spannung') }}{{ _('Strom') }}{{ _('Leistung') }}{{ _('Heute') }}{{ _('Gestern') }}{{ _('Gesamt') }}{{ _('Gesamt - Startzeit') }}
{{ device }}{{ p.tasmota_devices[device]['sensors']['ENERGY']['voltage'] }}V.{{ p.tasmota_devices[device]['sensors']['ENERGY']['current'] }}A.{{ p.tasmota_devices[device]['sensors']['ENERGY']['power'] }}W{{ p.tasmota_devices[device]['sensors']['ENERGY']['today'] }}kWh{{ p.tasmota_devices[device]['sensors']['ENERGY']['yesterday'] }}kWh{{ p.tasmota_devices[device]['sensors']['ENERGY']['total'] }}kWh{{ p.tasmota_devices[device]['sensors']['ENERGY']['total_starttime'] }}
-
-
-{% endif %} - -{% if p.has_env_sensor %} -

ENVIRONMENTAL SENSORS

- - - - - - - - - - - - - {% if p.has_ds18b20_sensor %} - {% for device in p.tasmota_devices %} - {% if p.tasmota_devices[device]['sensors'] %} - {% if p.tasmota_devices[device]['sensors']['DS18B20'] %} - - - - - - - - - {% endif %} - {% endif %} - {% endfor %} - {% endif %} - {% if p.has_am2301_sensor or p.has_sht3x_sensor%} - {% for device in p.tasmota_devices %} - {% if p.tasmota_devices[device]['sensors'] %} - {% if p.tasmota_devices[device]['sensors']['AM2301'] %} - - - - - - - - - {% endif %} - {% if p.tasmota_devices[device]['sensors']['SHT3X'] %} - - - - - - - - - {% endif %} - {% endif %} - {% endfor %} - {% endif %} - -
{{ _('Tasmota Topic') }}{{ _('Temperatur') }}{{ _('Luftfeuchtigkeit') }}{{ _('Taupunkt') }}{{ _('1w-ID') }}
{{ device }}{{ p.tasmota_devices[device]['sensors']['DS18B20'].temperature }}°C.--{{ p.tasmota_devices[device]['sensors']['DS18B20'].id }}
{{ device }}{{ p.tasmota_devices[device]['sensors']['AM2301'].temperature }}°C.{{ p.tasmota_devices[device]['sensors']['AM2301'].humidity }}%rH.{{ p.tasmota_devices[device]['sensors']['AM2301'].dewpoint }}°C.-
{{ device }}{{ p.tasmota_devices[device]['sensors']['SHT3X'].temperature }}°C.{{ p.tasmota_devices[device]['sensors']['SHT3X'].humidity }}%rH.{{ p.tasmota_devices[device]['sensors']['SHT3X'].dewpoint }}°C.-
-
-
-{% endif %} - -{% if p.has_other_sensor %} -

OTHER SENSORS

- - - - - - - - - - {% for device in p.tasmota_devices %} - {% for sensor in p.tasmota_devices[device]['sensors'] %} - {% if sensor not in p.SENSORS %} - - - - - - {% endif %} - {% endfor %} - {% endfor %} - -
{{ _('Sensor') }}{{ _('Sensor Details') }}
{{ sensor }}{{ p.tasmota_devices[device]['sensors'][sensor] }}
-{% endif %} - -{% if p.has_lights %} -

LIGHTS

- - - - - - - - - - - - - - - - - {% if p.has_lights %} - {% for device in p.tasmota_devices %} - {% if p.tasmota_devices[device]['lights'] %} - - - - - - - - - - - - - {% endif %} - {% endfor %} - {% endif %} - -
{{ _('Tasmota Topic') }}{{ _('HSB') }}{{ _('Dimmer') }}{{ _('Color') }}{{ _('CT') }}{{ _('Scheme') }}{{ _('Fade') }}{{ _('Speed') }}{{ _('LED-Table') }}
{{ device }}{{ p.tasmota_devices[device]['lights'].hsb }}.{{ p.tasmota_devices[device]['lights'].dimmer }}.{{ p.tasmota_devices[device]['lights'].color }}.{{ p.tasmota_devices[device]['lights'].ct }}.{{ p.tasmota_devices[device]['lights'].scheme }}.{{ p.tasmota_devices[device]['lights'].fade }}.{{ p.tasmota_devices[device]['lights'].speed }}.{{ p.tasmota_devices[device]['lights'].ledtable }}.
- -
-
-{% endif %} - -{% if p.has_rf %} -

RF

- - - - - - - - - - - - {% if p.has_rf %} - {% for device in p.tasmota_devices %} - {% if p.tasmota_devices[device]['rf'] %} - - - - - - - - {% endif %} - {% endfor %} - {% endif %} - -
{{ _('Tasmota Topic') }}{{ _('RF-Received') }}{{ _('RF-Send Result') }}{{ _('RF-Key Result') }}
{{ device }}{{ p.tasmota_devices[device]['rf'].rf_received }}{{ p.tasmota_devices[device]['rf'].rf_send_result }}{{ p.tasmota_devices[device]['rf'].rfkey_result }}
- -
-
-{% endif %} -
-{% endblock %} - - -{% block bodytab4 %} -
-

Zigbee Information

- - - - - - - - - - - - - - - - {% for device in p.tasmota_zigbee_devices %} - - - - - - - - - - - - {% endfor %} - -
{{ _('Device ID') }}{{ _('IEEEAddr') }}{{ _('Hersteller') }}{{ _('ModelId') }}{{ _('LinkQuality') }}{{ _('Battery %') }}{{ _('LastSeen') }}{{ _('Data') }}
{{ device }}{{ p.tasmota_zigbee_devices[device]['ieeeaddr'] }}{{ p.tasmota_zigbee_devices[device]['manufacturer'] }}{{ p.tasmota_zigbee_devices[device]['modelid'] }}{{ p.tasmota_zigbee_devices[device]['linkquality'] }}{{ p.tasmota_zigbee_devices[device]['batterypercentage'] }}{{ p.tasmota_zigbee_devices[device]['lastseenepoch'] }}{{ p.tasmota_zigbee_devices[device] }}
-
-{% endblock %} - - -{% block bodytab5 %} -
-

Broker Information

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - {% if p.broker_monitoring %} - - - - - {% endif %} - - - -
{{ _('Merkmal') }}{{ _('Wert') }}
{{ 'Broker Version' }}{{ p._broker.version }}
{{ 'Active Clients' }}{{ p._broker.active_clients }}
{{ 'Subscriptions' }}{{ p._broker.subscriptions }}
{{ 'Messages stored' }}{{ p._broker.stored_messages }}
{{ 'Retained Messages' }}{{ p._broker.retained_messages }}
{{ _('Laufzeit') }}{{ p.broker_uptime() }}
- {% if p.broker_monitoring %} -
-
-

Broker Monitor

- - - - - - - - - - - - - - - - - - - - - -
{{ _('Message Durchsatz') }}{{ _('letzte Minute') }}{{ _('letzte 5 Min.') }}{{ _('letzte 15 Min.') }}
{{ _('Durchschnittlich Messages je Minute empfangen') }}     {{ p._broker.msg_rcv_1min }}     {{ p._broker.msg_rcv_5min }}     {{ p._broker.msg_rcv_15min }}
{{ _('Durchschnittlich Messages je Minute gesendet') }}     {{ p._broker.msg_snt_1min }}     {{ p._broker.msg_snt_5min }}     {{ p._broker.msg_snt_15min }}
-{% endif %} -
-{% endblock %} - - -{% block bodytab6 %} - - -
- - - - - - - - - {% for device in p.tasmota_devices %} - - - - - {% endfor %} - - - - - - - - - - -
{{ _('Tasmota Device') }}{{ _('Tasmota Device Details') }}
{{ device }}{{ p.tasmota_devices[device] }}
{{ 'DEVICE_DICT_1' }}{{ p.DEVICE_DICT_1 }}
{{ 'DEVICE_DICT_2' }}{{ p.DEVICE_DICT_2 }}
-
- -
- - - - - - - - - {% for device in p.tasmota_zigbee_devices %} - - - - - {% endfor %} - -
{{ _('Zigbee Device') }}{{ _('Zigbee Device Details') }}
{{ device }}{{ p.tasmota_zigbee_devices[device] }}
-
-{% endblock %} - - - +{% extends "base_plugin.html" %} +{% set logo_frame = false %} + + +{% set update_interval = [(((10 * item_count) / 1000) | round | int) * 1000, 5000]|max %} + + +{% block pluginstyles %} + +{% endblock pluginstyles %} + + +{% block pluginscripts %} + + +{% endblock pluginscripts %} + +{% block headtable %} + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
{{_('Broker Host')}}{{ p.broker_config.host }}{{_('Broker Port')}}{{ p.broker_config.port }}
{{_('Benutzer')}}{{ p.broker_config.user }}{{_('Passwort')}} + {% if p.broker_config.password %} + {% for letter in p.broker_config.password %}*{% endfor %} + {% endif %} +
{{_('QoS')}}{{ p.broker_config.qos }}{{_('full_topic')}}{{ p.full_topic }}
+{% endblock headtable %} + + +{% block buttons %} +{% endblock %} + + +{% set tabcount = 6 %} + + +{% if p.tasmota_items != [] %} + {% set start_tab = 1 %} +{% endif %} + + +{% if items != [] %} + {% set tab1title = _("" ~ plugin_shortname ~ " Items") %} +{% else %} + {% set tab1title = "hidden" %} +{% endif %} +{% set tab2title = _("" ~ plugin_shortname ~ " Devices") %} +{% set tab3title = _("" ~ plugin_shortname ~ " " ~ _('Details') ~ "") %} +{% set tab4title = _("" ~ plugin_shortname ~ " " ~ _('Zigbee Devices') ~ "") %} +{% set tab5title = _("" ~ " Broker Information") %} +{% if maintenance %} + {% set tab6title = _("" ~ plugin_shortname ~ " " ~ _('Maintenance') ~ "") %} +{% else %} + {% set tab6title = "hidden" %} +{% endif %} + + +{% block bodytab1 %} +
+

Item Information

+ + + + + + + + + + + + + + + {% for item in items %} + + + + + + + {% if p.get_iattr_value(item.conf, 'tasmota_relay') is in ['1', '2', '3', '4', '5', '6', '7', '8'] %} + + {% elif p.get_iattr_value(item.conf, 'tasmota_attr') == 'relay' %} + + {% else %} + + {% endif %} + + + + {% endfor %} + +
{{ _('Item') }}{{ _('Typ') }}{{ _('Wert') }}{{ _('Tasmota Topic') }}{{ _('Relais') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
{{ item._path }}{{ item._type }}{{ item() }}{{ p.get_iattr_value(item.conf, 'tasmota_topic') }}{{ p.get_iattr_value(item.conf, 'tasmota_relay') }}1-{{ item.last_update().strftime('%d.%m.%Y %H:%M:%S') }}{{ item.last_change().strftime('%d.%m.%Y %H:%M:%S') }}
+
+{% endblock %} + + +{% block bodytab2 %} +
+

Device Information

+ + + + + + + + + + + + + + + + + + + {% for device in p.tasmota_devices %} + {% if 'fw_ver' in p.tasmota_devices[device] %} + + + + + + + + + + + + {% if p.tasmota_devices[device]['wifi_signal'] %} + + {% else %} + + {% endif %} + + + {% endif %} + {% endfor %} + +
{{ _('Tasmota Topic') }}{{ _('Online') }}{{ _('Friendy Name') }}{{ _('Mac Adresse') }}{{ _('IP Adresse') }}{{ _('Uptime') }}{{ _('Sensor Type') }}{{ _('Firmware') }}{{ _('Module') }}{{ _('Wifi') }}{{ _('Details') }}
{{ device }}{{ p.tasmota_devices[device].online }}{{ p.tasmota_devices[device].friendly_name }}{{ p.tasmota_devices[device].mac }}{{ p.tasmota_devices[device].ip }}{{ p.tasmota_devices[device].uptime }} + {% if p.tasmota_devices[device]['sensors'] != {} %} + {% for key in p.tasmota_devices[device]['sensors'] %} + {{ key }} + {%if not loop.last%}, {%endif%} + {% endfor %} + {% else %} + - + {% endif %} + {{ p.tasmota_devices[device].fw_ver }}{{ p.tasmota_devices[device].module }}{{ p.tasmota_devices[device].wifi_signal }} dBm + + {% for entry in p.tasmota_devices[device]['discovery_config'] %} + + + + + {% endfor %} +
{{ entry }}:{{ p.tasmota_devices[device]['discovery_config'][entry] }}
+
+ +
+{% endblock %} + + +{% block bodytab3 %} +
+{% if p.has_energy_sensor %} +

ENERGY SENSORS

+ + + + + + + + + + + + + + + + + {% for device in p.tasmota_devices %} + {% if p.tasmota_devices[device]['sensors']['ENERGY'] %} + + + + + + + + + + + + + {% endif %} + {% endfor %} + +
{{ _('Tasmota Topic') }}{{ _('Spannung') }}{{ _('Strom') }}{{ _('Leistung') }}{{ _('Heute') }}{{ _('Gestern') }}{{ _('Gesamt') }}{{ _('Gesamt - Startzeit') }}
{{ device }}{{ p.tasmota_devices[device]['sensors']['ENERGY']['voltage'] }}V.{{ p.tasmota_devices[device]['sensors']['ENERGY']['current'] }}A.{{ p.tasmota_devices[device]['sensors']['ENERGY']['power'] }}W{{ p.tasmota_devices[device]['sensors']['ENERGY']['today'] }}kWh{{ p.tasmota_devices[device]['sensors']['ENERGY']['yesterday'] }}kWh{{ p.tasmota_devices[device]['sensors']['ENERGY']['total'] }}kWh{{ p.tasmota_devices[device]['sensors']['ENERGY']['total_starttime'] }}
+
+
+{% endif %} + +{% if p.has_env_sensor %} +

ENVIRONMENTAL SENSORS

+ + + + + + + + + + + + + {% if p.has_ds18b20_sensor %} + {% for device in p.tasmota_devices %} + {% if p.tasmota_devices[device]['sensors'] %} + {% if p.tasmota_devices[device]['sensors']['DS18B20'] %} + + + + + + + + + {% endif %} + {% endif %} + {% endfor %} + {% endif %} + {% if p.has_am2301_sensor or p.has_sht3x_sensor%} + {% for device in p.tasmota_devices %} + {% if p.tasmota_devices[device]['sensors'] %} + {% if p.tasmota_devices[device]['sensors']['AM2301'] %} + + + + + + + + + {% endif %} + {% if p.tasmota_devices[device]['sensors']['SHT3X'] %} + + + + + + + + + {% endif %} + {% endif %} + {% endfor %} + {% endif %} + +
{{ _('Tasmota Topic') }}{{ _('Temperatur') }}{{ _('Luftfeuchtigkeit') }}{{ _('Taupunkt') }}{{ _('1w-ID') }}
{{ device }}{{ p.tasmota_devices[device]['sensors']['DS18B20'].temperature }}°C.--{{ p.tasmota_devices[device]['sensors']['DS18B20'].id }}
{{ device }}{{ p.tasmota_devices[device]['sensors']['AM2301'].temperature }}°C.{{ p.tasmota_devices[device]['sensors']['AM2301'].humidity }}%rH.{{ p.tasmota_devices[device]['sensors']['AM2301'].dewpoint }}°C.-
{{ device }}{{ p.tasmota_devices[device]['sensors']['SHT3X'].temperature }}°C.{{ p.tasmota_devices[device]['sensors']['SHT3X'].humidity }}%rH.{{ p.tasmota_devices[device]['sensors']['SHT3X'].dewpoint }}°C.-
+
+
+{% endif %} + +{% if p.has_other_sensor %} +

OTHER SENSORS

+ + + + + + + + + + {% for device in p.tasmota_devices %} + {% for sensor in p.tasmota_devices[device]['sensors'] %} + {% if sensor not in p.SENSORS %} + + + + + + {% endif %} + {% endfor %} + {% endfor %} + +
{{ _('Sensor') }}{{ _('Sensor Details') }}
{{ sensor }}{{ p.tasmota_devices[device]['sensors'][sensor] }}
+{% endif %} + +{% if p.has_lights %} +

LIGHTS

+ + + + + + + + + + + + + + + + + {% if p.has_lights %} + {% for device in p.tasmota_devices %} + {% if p.tasmota_devices[device]['lights'] %} + + + + + + + + + + + + + {% endif %} + {% endfor %} + {% endif %} + +
{{ _('Tasmota Topic') }}{{ _('HSB') }}{{ _('Dimmer') }}{{ _('Color') }}{{ _('CT') }}{{ _('Scheme') }}{{ _('Fade') }}{{ _('Speed') }}{{ _('LED-Table') }}
{{ device }}{{ p.tasmota_devices[device]['lights'].hsb }}.{{ p.tasmota_devices[device]['lights'].dimmer }}.{{ p.tasmota_devices[device]['lights'].color }}.{{ p.tasmota_devices[device]['lights'].ct }}.{{ p.tasmota_devices[device]['lights'].scheme }}.{{ p.tasmota_devices[device]['lights'].fade }}.{{ p.tasmota_devices[device]['lights'].speed }}.{{ p.tasmota_devices[device]['lights'].ledtable }}.
+ +
+
+{% endif %} + +{% if p.has_rf %} +

RF

+ + + + + + + + + + + + {% if p.has_rf %} + {% for device in p.tasmota_devices %} + {% if p.tasmota_devices[device]['rf'] %} + + + + + + + + {% endif %} + {% endfor %} + {% endif %} + +
{{ _('Tasmota Topic') }}{{ _('RF-Received') }}{{ _('RF-Send Result') }}{{ _('RF-Key Result') }}
{{ device }}{{ p.tasmota_devices[device]['rf'].rf_received }}{{ p.tasmota_devices[device]['rf'].rf_send_result }}{{ p.tasmota_devices[device]['rf'].rfkey_result }}
+ +
+
+{% endif %} +
+{% endblock %} + + +{% block bodytab4 %} +
+

Zigbee Information

+ + + + + + + + + + + + + + + + {% for device in p.tasmota_zigbee_devices %} + + + + + + + + + + + + {% endfor %} + +
{{ _('Device ID') }}{{ _('IEEEAddr') }}{{ _('Hersteller') }}{{ _('ModelId') }}{{ _('LinkQuality') }}{{ _('Battery %') }}{{ _('LastSeen') }}{{ _('Data') }}
{{ device }}{{ p.tasmota_zigbee_devices[device]['ieeeaddr'] }}{{ p.tasmota_zigbee_devices[device]['manufacturer'] }}{{ p.tasmota_zigbee_devices[device]['modelid'] }}{{ p.tasmota_zigbee_devices[device]['linkquality'] }}{{ p.tasmota_zigbee_devices[device]['batterypercentage'] }}{{ p.tasmota_zigbee_devices[device]['lastseenepoch'] }}{{ p.tasmota_zigbee_devices[device] }}
+
+{% endblock %} + + +{% block bodytab5 %} +
+

Broker Information

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + {% if p.broker_monitoring %} + + + + + {% endif %} + + + +
{{ _('Merkmal') }}{{ _('Wert') }}
{{ 'Broker Version' }}{{ p._broker.version }}
{{ 'Active Clients' }}{{ p._broker.active_clients }}
{{ 'Subscriptions' }}{{ p._broker.subscriptions }}
{{ 'Messages stored' }}{{ p._broker.stored_messages }}
{{ 'Retained Messages' }}{{ p._broker.retained_messages }}
{{ _('Laufzeit') }}{{ p.broker_uptime() }}
+ {% if p.broker_monitoring %} +
+
+

Broker Monitor

+ + + + + + + + + + + + + + + + + + + + + +
{{ _('Message Durchsatz') }}{{ _('letzte Minute') }}{{ _('letzte 5 Min.') }}{{ _('letzte 15 Min.') }}
{{ _('Durchschnittlich Messages je Minute empfangen') }}     {{ p._broker.msg_rcv_1min }}     {{ p._broker.msg_rcv_5min }}     {{ p._broker.msg_rcv_15min }}
{{ _('Durchschnittlich Messages je Minute gesendet') }}     {{ p._broker.msg_snt_1min }}     {{ p._broker.msg_snt_5min }}     {{ p._broker.msg_snt_15min }}
+{% endif %} +
+{% endblock %} + + +{% block bodytab6 %} + + +
+ + + + + + + + + {% for device in p.tasmota_devices %} + + + + + {% endfor %} + + + + + + + + + + +
{{ _('Tasmota Device') }}{{ _('Tasmota Device Details') }}
{{ device }}{{ p.tasmota_devices[device] }}
{{ 'DEVICE_DICT_1' }}{{ p.DEVICE_DICT_1 }}
{{ 'DEVICE_DICT_2' }}{{ p.DEVICE_DICT_2 }}
+
+ +
+ + + + + + + + + {% for device in p.tasmota_zigbee_devices %} + + + + + {% endfor %} + +
{{ _('Zigbee Device') }}{{ _('Zigbee Device Details') }}
{{ device }}{{ p.tasmota_zigbee_devices[device] }}
+
+{% endblock %} + + + From d1843978f9bd5addc8a31e1cdaee33e096a0138a Mon Sep 17 00:00:00 2001 From: ivande Date: Sat, 15 Apr 2023 11:03:01 +0200 Subject: [PATCH 044/775] V1.8.0 new package, use async --- telegram/__init__.py | 572 ++++++++++++++++------------ telegram/plugin.yaml | 2 +- telegram/requirements.txt | 2 +- telegram/user_doc.rst | 17 +- telegram/webif/templates/index.html | 46 ++- 5 files changed, 373 insertions(+), 266 deletions(-) diff --git a/telegram/__init__.py b/telegram/__init__.py index ab1848ebd..fe01657d8 100755 --- a/telegram/__init__.py +++ b/telegram/__init__.py @@ -24,27 +24,28 @@ ######################################################################### import datetime +import time import logging +import asyncio +import queue import re import requests import traceback from io import BytesIO +from queue import Queue from lib.logic import Logics from lib.model.smartplugin import SmartPlugin from .webif import WebInterface try: - import telegram - import telegram.ext + from telegram import Update + from telegram.ext import Updater, Application, CommandHandler, ContextTypes, MessageHandler, filters from telegram.error import TelegramError - from telegram.ext import Updater - from telegram.ext import CommandHandler - from telegram.ext import MessageHandler, Filters REQUIRED_PACKAGE_IMPORTED = True -except Exception: - REQUIRED_PACKAGE_IMPORTED = False +except Exception as e: + REQUIRED_PACKAGE_IMPORTED = e ITEM_ATTR_MESSAGE = 'telegram_message' # Send message on item change ITEM_ATTR_CONDITION = 'telegram_condition' # when to send the message, if not given send any time, @@ -66,7 +67,7 @@ class Telegram(SmartPlugin): - PLUGIN_VERSION = "1.7.1" + PLUGIN_VERSION = "1.8.0" _items = [] # all items using attribute ``telegram_message`` _items_info = {} # dict used whith the info-command: key = attribute_value, val= item_list telegram_info @@ -74,7 +75,7 @@ class Telegram(SmartPlugin): _items_control = {} # dict used whith the control-command: _chat_ids_item = {} # an item with a dict of chat_id and write access _waitAnswer = None # wait a specific answer Yes/No - or num (change_item) - + _queue = None def __init__(self, sh): """ @@ -83,21 +84,24 @@ def __init__(self, sh): """ self.logger.info('Init telegram plugin') - + # Call init code of parent class (SmartPlugin or MqttPlugin) super().__init__() if not self._init_complete: return - - if self.logger.isEnabledFor(logging.DEBUG): + self.debug_enabled = self.logger.isEnabledFor(logging.DEBUG) + + if self.debug_enabled: self.logger.debug(f"init {__name__}") self._init_complete = False # Exit if the required package(s) could not be imported - if not REQUIRED_PACKAGE_IMPORTED: - self.logger.error(f"{self.get_fullname()}: Unable to import Python package 'python-telegram-bot'") + if REQUIRED_PACKAGE_IMPORTED is not True: + self.logger.error(f"{self.get_fullname()}: Unable to import Python package 'python-telegram-bot' [{REQUIRED_PACKAGE_IMPORTED}]") return + self._loop = asyncio.get_event_loop() + self.alive = False self._name = self.get_parameter_value('name') self._token = self.get_parameter_value('token') @@ -108,45 +112,38 @@ def __init__(self, sh): self._no_write_access_msg = self.get_parameter_value('no_write_access_msg') self._long_polling_timeout = self.get_parameter_value('long_polling_timeout') self._pretty_thread_names = self.get_parameter_value('pretty_thread_names') - - # the Updater class continuously fetches new updates from telegram and passes them on to the Dispatcher class. - try: - self._updater = Updater(token=self._token, use_context=True) - self._bot = self._updater.bot - self.logger.info(f"Telegram bot is listening: {self._bot.getMe()}") - except TelegramError as e: - # catch Unauthorized errors due to an invalid token - self.logger.error(f"Unable to start up Telegram conversation. Maybe an invalid token? {e}") + + self._application = None + self._bot = None + self._queue = Queue() + + self._application = Application.builder().token(self._token).build() + + if self.debug_enabled: + self.logger.debug("adding command handlers to application") + + self._application.add_error_handler(self.eHandler) + self._application.add_handler(CommandHandler('time', self.cHandler_time)) + self._application.add_handler(CommandHandler('help', self.cHandler_help)) + self._application.add_handler(CommandHandler('hide', self.cHandler_hide)) + self._application.add_handler(CommandHandler('list', self.cHandler_list)) + self._application.add_handler(CommandHandler('info', self.cHandler_info)) + self._application.add_handler(CommandHandler('start', self.cHandler_start)) + self._application.add_handler(CommandHandler('lo', self.cHandler_lo)) + self._application.add_handler(CommandHandler('tr', self.cHandler_tr)) + self._application.add_handler(CommandHandler('control', self.cHandler_control)) + # Filters.text includes also commands, starting with ``/`` so it is needed to exclude them. + self._application.add_handler(MessageHandler(filters.TEXT & (~filters.COMMAND), self.mHandler)) + + self.init_webinterface() + if not self.init_webinterface(WebInterface): + self.logger.error("Unable to start Webinterface") + self._init_complete = False else: - if self.logger.isEnabledFor(logging.DEBUG): - self.logger.debug("adding command handlers to dispatcher") - - # Dispatcher that handles the updates and dispatches them to the handlers. - dispatcher = self._updater.dispatcher - dispatcher.add_error_handler(self.eHandler) - dispatcher.add_handler(CommandHandler('time', self.cHandler_time)) - dispatcher.add_handler(CommandHandler('help', self.cHandler_help)) - dispatcher.add_handler(CommandHandler('hide', self.cHandler_hide)) - dispatcher.add_handler(CommandHandler('list', self.cHandler_list)) - dispatcher.add_handler(CommandHandler('info', self.cHandler_info)) - dispatcher.add_handler(CommandHandler('start', self.cHandler_start)) - dispatcher.add_handler(CommandHandler('lo', self.cHandler_lo)) - dispatcher.add_handler(CommandHandler('tr', self.cHandler_tr, pass_args=True)) - dispatcher.add_handler(CommandHandler('control', self.cHandler_control)) - - # Filters.text includes also commands, starting with ``/`` so it is needed to exclude them. - # This came with lib version 12.4 - dispatcher.add_handler(MessageHandler(Filters.text & (~Filters.command), self.mHandler)) - self.init_webinterface() - - if not self.init_webinterface(WebInterface): - self.logger.error("Unable to start Webinterface") - self._init_complete = False - else: - if self.logger.isEnabledFor(logging.DEBUG): - self.logger.debug("Init complete") - - self._init_complete = True + if self.debug_enabled: + self.logger.debug("Init complete") + + self._init_complete = True def __call__(self, msg, chat_id=None): """ @@ -162,61 +159,126 @@ def run(self): """ This is called when the plugins thread is about to run """ - self.alive = True + if self.debug_enabled: + self.logger.debug("Run method called") + self.logics = Logics.get_instance() # Returns the instance of the Logics class, to be used to access the logics-api - q = self._updater.start_polling(timeout=self._long_polling_timeout) # (poll_interval=0.0, timeout=10, network_delay=None, clean=False, bootstrap_retries=0, read_latency=2.0, allowed_updates=None) - if self._pretty_thread_names: - if self.logger.isEnabledFor(logging.DEBUG): - self.logger.debug("Changing Telegrams thread names to pretty thread names") - try: - for t in self._updater._Updater__threads: - if 'dispatcher' in t.name: - t.name = 'Telegram Dispatcher' - if 'updater' in t.name: - t.name = 'Telegram Updater' - - for t in self._updater.dispatcher._Dispatcher__async_threads: - *_, num = t.name.split('_') - t.name = f'Telegram Worker {num}' if num.isnumeric() else num - - # from telegram.jobqueue.py @ line 301 thread is named - # name=f"Bot:{self._dispatcher.bot.id}:job_queue" - if hasattr(self._updater.job_queue, '_JobQueue__thread'): - t = self._updater.job_queue._JobQueue__thread - if t.name.startswith('Bot'): - _, id, _ = t.name.split(':') - self._updater.job_queue._JobQueue__thread.name = f"Telegram JobQueue for id {id}" - else: - # model in telegram.ext.jobqueue.py might be changed now - pass - except Exception as e: - self.logger.warning(f"Error '{e}' occurred. Could not assign pretty names to Telegrams threads, maybe object model of python-telegram-bot module has changed? Please inform the author of plugin!") - if self.logger.isEnabledFor(logging.DEBUG): - self.logger.debug(f"started polling the updater, Queue is {q}") - if self._welcome_msg: - self.msg_broadcast(self._welcome_msg) - if self.logger.isEnabledFor(logging.DEBUG): - self.logger.debug(f"sent welcome message {self._welcome_msg}") - + + self.alive = True + + self._loop.run_until_complete(self.run_coros()) + if self.debug_enabled: + self.logger.debug(f"Run method ended") + def stop(self): """ This is called when the plugins thread is about to stop """ - self.alive = False - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug("stop telegram plugin") + try: if self._bye_msg: - self.msg_broadcast(self._bye_msg) - if self.logger.isEnabledFor(logging.DEBUG): + cids = [key for key, value in self._chat_ids_item().items() if value == 1] + self.msg_broadcast(self._bye_msg, chat_id=cids) + if self.debug_enabled: self.logger.debug("sent bye message") - except Exception: - if self.logger.isEnabledFor(logging.DEBUG): - self.logger.debug("could not send bye message") - self._updater.stop() - if self.logger.isEnabledFor(logging.DEBUG): + except Exception as e: + self.logger.error(f"could not send bye message [{e}]") + + time.sleep(1) + + try: + self._taskConn.cancel() + self._taskQueue.cancel() + + asyncio.gather(self._taskConn, self._taskQueue) + self.disconnect() + + while self._loop.is_running(): + asyncio.sleep(0.1) + self._loop.close() + self.alive = False + except Exception as e: + self.logger.error(f"An error occurred while stopping the plugin [{e}]") + + if self.debug_enabled: self.logger.debug("stop telegram plugin finished") + async def run_coros(self): + """ + This method run multiple coroutines concurrently using asyncio + """ + self._taskConn = asyncio.create_task(self.connect()) + self._taskQueue = asyncio.create_task(self.startSendQueue()) + await asyncio.gather(self._taskConn, self._taskQueue) + + async def connect(self): + """ + Connects + """ + if self.debug_enabled: + self.logger.debug("connect method called") + try: + await self._application.initialize() + await self._application.start() + self._updater = self._application.updater + + q = await self._updater.start_polling(timeout=self._long_polling_timeout) + + if self.debug_enabled: + self.logger.debug(f"started polling the updater, Queue is {q}") + + self._bot = self._updater.bot + self.logger.info(f"Telegram bot is listening: {await self._updater.bot.getMe()}") + if self._welcome_msg: + if self.debug_enabled: + self.logger.debug(f"sent welcome message {self._welcome_msg}") + cids = [key for key, value in self._chat_ids_item().items() if value == 1] + await self.async_msg_broadcast(self._welcome_msg, chat_id=cids) + + except TelegramError as e: + # catch Unauthorized errors due to an invalid token + self.logger.error(f"Unable to start up Telegram conversation. Maybe an invalid token? {e}") + return False + if self.debug_enabled: + self.logger.debug("connect method end") + + async def startSendQueue(self): + """ + Waiting for messages to be sent in the queue and sending them to Telegram. + The queue expects a dictionary with various parameters + """ + if self.debug_enabled: + self.logger.debug(f"startSendQueue called - queue: [{self._queue}]") + while True: + try: + message = self._queue.get_nowait() + except queue.Empty: # Keine Nachricht in der Queue + await asyncio.sleep(1) + except Exception as e: + self.logger.debug(f"messageQueue Exception [{e}]") + else: + if self.debug_enabled: + self.logger.debug(f"message queue {message}") + if message["msgType"] == "Text": + await self.async_msg_broadcast(message["msg"], message["chat_id"], message["reply_markup"], message["parse_mode"]) + if message["msgType"] == "Photo": + await self.async_photo_broadcast(message["photofile_or_url"], message["caption"], message["chat_id"], message["local_prepare"]) + + async def disconnect(self): + """ + Stop listening to push updates and logout of this istances Apple TV + """ + self.logger.info(f"disconnecting") + + await self._application.updater.stop() + await self._application.stop() + await self._application.shutdown() + + if self.debug_enabled: + self.logger.debug(f"disconnect end") + def parse_item(self, item): """ Default plugin parse_item method. Is called when the plugin is initialized. @@ -229,7 +291,7 @@ def parse_item(self, item): self._chat_ids_item = item if self.has_iattr(item.conf, ITEM_ATTR_MESSAGE): - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"parse item: {item}") self._items.append(item) return self.update_item @@ -241,24 +303,24 @@ def parse_item(self, item): if self.has_iattr(item.conf, ITEM_ATTR_INFO): key = self.get_iattr_value(item.conf, ITEM_ATTR_INFO) if self.is_valid_command(key): - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"parse item: {item} with command: {key}") if key in self._items_info: self._items_info[key].append(item) - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"Append a new item '{item}' to command '{key}'") else: self._items_info[key] = [item] # dem dict neue Liste hinzufuegen - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"Register new command '{key}', add item '{item}' and register a handler") # add a handler for each info-attribute - self._updater.dispatcher.add_handler(CommandHandler(key, self.cHandler_info_attr)) + self._application.add_handler(CommandHandler(key, self.cHandler_info_attr)) return self.update_item else: self.logger.error(f"Command '{key}' chosen for item '{item}' is invalid for telegram botfather") if self.has_iattr(item.conf, ITEM_ATTR_TEXT): - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"parse item: {item.id()}") value = self.get_iattr_value(item.conf, ITEM_ATTR_TEXT) if value in ['true', 'True', '1']: @@ -291,19 +353,19 @@ def parse_item(self, item): if 'max' in k: max = v - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"parse control-item: {item} with command: {key}") dicCtl = {'name': key, 'type': changeType, 'item': item, 'question': question, 'timeout': timeout, 'min': min, 'max': max} if key not in self._items_control: - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"Append a new control-item '{item}' to command '{key}'") self._items_control[key] = dicCtl # add to dict - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"Register new command '{key}', add item '{item}' and register a handler") # add a handler for each control-attribute - self._updater.dispatcher.add_handler(CommandHandler(key, self.cHandler_control_attr)) + self._application.add_handler(CommandHandler(key, self.cHandler_control_attr)) return self.update_item return None @@ -386,29 +448,25 @@ def update_item(self, item, caller=None, source=None, dest=None): cond = self.get_iattr_value(item.conf, ITEM_ATTR_CONDITION).lower() if cond == "on_change": if item.property.value != item.property.last_value and item.property.last_update <= item.property.last_change: - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"condition {cond} met: {item.property.value}!={item.property.last_value}, last_update_age {item.property.last_update}, last_change_age {item.property.last_change}") else: - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"condition {cond} not met: {item.property.value}=={item.property.last_value}, last_update_age {item.property.last_update}, last_change_age {item.property.last_change}") return elif cond == "on_update": # this is standard behaviour pass else: - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"ignoring unknown condition {cond}") # sending the message - if self.logger.isEnabledFor(logging.DEBUG): - self.logger.debug(f"send Message: {msg_txt} to Chat_ID {msg_chat_id_txt}") + # if self.debug_enabled: + # self.logger.debug(f"send Message: {msg_txt} to Chat_ID {msg_chat_id_txt}") self.msg_broadcast(msg_txt, msg_chat_id) - def _msg_broadcast(self, msg, chat_id=None): - self.logger.warning("deprecated, please use msg_broadcast instead") - self.msg_broadcast(msg, chat_id) - - def msg_broadcast(self, msg, chat_id=None, reply_markup=None, parse_mode=None): + async def async_msg_broadcast(self, msg, chat_id=None, reply_markup=None, parse_mode=None): """ Send a message to the given chat_id @@ -417,16 +475,34 @@ def msg_broadcast(self, msg, chat_id=None, reply_markup=None, parse_mode=None): :param reply_markup: :param parse_mode: """ + if self.debug_enabled: + self.logger.debug(f"async msg_broadcast called") + for cid in self.get_chat_id_list(chat_id): try: - self._bot.send_message(chat_id=cid, text=msg, reply_markup=reply_markup, parse_mode=parse_mode) + response = await self._bot.send_message(chat_id=cid, text=msg, reply_markup=reply_markup, parse_mode=parse_mode) + if response: + if self.debug_enabled: + self.logger.debug(f"Message sent:[{msg}] to Chat_ID:[{cid}] Bot:[{self._bot.bot}] response:[{response}]") + else: + self.logger.error(f"could not broadcast to chat id [{cid}] response: {response}") except TelegramError as e: self.logger.error(f"could not broadcast to chat id [{cid}] due to error {e}") except Exception as e: - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"Exception '{e}' occurred, please inform plugin maintainer!") - def photo_broadcast(self, photofile_or_url, caption=None, chat_id=None, local_prepare=True): + def msg_broadcast(self, msg, chat_id=None, reply_markup=None, parse_mode=None): + if self.alive: + if self.debug_enabled: + self.logger.debug(f"msg_broadcast called") + q_msg= {"msgType":"Text", "msg":msg, "chat_id":chat_id, "reply_markup":reply_markup, "parse_mode":parse_mode } + try: + self._queue.put(q_msg) + except Exception as e: + self.logger.debug(f"Exception '{e}' occurred, please inform plugin maintainer!") + + async def async_photo_broadcast(self, photofile_or_url, caption=None, chat_id=None, local_prepare=True): """ Send an image to the given chat @@ -441,14 +517,32 @@ def photo_broadcast(self, photofile_or_url, caption=None, chat_id=None, local_pr if local_prepare: photo_raw = requests.get(photofile_or_url) photo_data = BytesIO(photo_raw.content) - self._bot.send_photo(chat_id=cid, photo=photo_data, caption=caption) + await self._bot.send_photo(chat_id=cid, photo=photo_data, caption=caption) else: - self._bot.send_photo(chat_id=cid, photo=photofile_or_url, caption=caption) + await self._bot.send_photo(chat_id=cid, photo=photofile_or_url, caption=caption) else: - self._bot.send_photo(chat_id=cid, photo=open(str(photofile_or_url), 'rb'), caption=caption) + await self._bot.send_photo(chat_id=cid, photo=open(str(photofile_or_url), 'rb'), caption=caption) except Exception as e: self.logger.error(f"Error '{e}' could not send image {photofile_or_url} to chat id {cid}") + def photo_broadcast(self, photofile_or_url, caption=None, chat_id=None, local_prepare=True): + """ + Send an image to the given chat + + :param photofile_or_url: either a local file or a URL with a link to an image resource + :param local_prepare: Image will be prepared locally instead of passing a link to Telegram. Needed if an image e.g. of a local network webcam is to be sent. + :param caption: caption of image to send + :param chat_id: a chat id or a list of chat ids to identificate the chat(s) + """ + if self.alive: + if self.debug_enabled: + self.logger.debug(f"photo_broadcast called") + q_msg= {"msgType":"Photo", "photofile_or_url":photofile_or_url, "chat_id":chat_id, "caption":caption, "local_prepare":local_prepare } + try: + self._queue.put(q_msg) + except Exception as e: + self.logger.debug(f"Exception '{e}' occurred, please inform plugin maintainer!") + def get_chat_id_list(self, att_chat_id): chat_ids_to_send = [] # new list if att_chat_id is None: # no attribute specified @@ -541,7 +635,7 @@ def has_write_access_right(self, user_id): user_data """ - def eHandler(self, update, context): + async def eHandler(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None: """ Just logs an error in case of a problem """ @@ -550,30 +644,30 @@ def eHandler(self, update, context): except Exception: pass - def mHandler(self, update, context): + async def mHandler(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None: """ write the content (text) of the message in an SH-item """ - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"write the content (text) of the message in an SH-item for update={update}, chat_id={update.message.chat.id} and context={dir(context)}") if self.has_write_access_right(update.message.chat.id): try: if self._waitAnswer is None: # keine Antwort erwartet (control-Item/question) - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"update.message.from_user.name={update.message.from_user.name}") text = update.message.from_user.name + ": " text += str(update.message.chat.id) + ": " # add the message.chat.id text += update.message.text # add the message.text for item in self._items_text_message: - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"write item: {item.id()} value: {text}") item(text, caller=self.get_fullname()) # write text to SH-item else: # Antwort von control-Item/question wird erwartet text = update.message.text dicCtl = self._waitAnswer # _waitAnswer enthält dict mit weiteren Parametern valid = True # für Prüfung des Wertebereiches bei num - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"update.message.from_user.name={update.message.from_user.name} answer={text} name={dicCtl['name']}") if text == 'On': if dicCtl['type'] == 'onoff': @@ -601,97 +695,98 @@ def mHandler(self, update, context): self._bot.send_message(chat_id=update.message.chat.id, text=self.translate("Control/Change item-values:"), reply_markup={"keyboard":self.create_control_reply_markup()}) elif dicCtl['type'] == 'num': if type(text) == int or float: - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"control-item: answer is num ") item = dicCtl['item'] newValue = text if dicCtl['min'] is not None: if float(newValue) < float(dicCtl['min']): valid = False - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"control-item: value:{newValue} to low:{dicCtl['min']}") if dicCtl['max'] is not None: if float(newValue) > float(dicCtl['max']): valid = False - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"control-item: value:{newValue} to high:{dicCtl['max']}") if valid: msg = f"{dicCtl['name']} \n change from:{item()} to:{newValue}" - self._bot.sendMessage(chat_id=update.message.chat.id, text=msg) + await context.bot.sendMessage(chat_id=update.message.chat.id, text=msg) item(newValue) if self.scheduler_get('telegram_change_item_timeout'): self.scheduler_remove('telegram_change_item_timeout') self._waitAnswer = None else: msg = f"{dicCtl['name']} \n out off range" - self._bot.sendMessage(chat_id=update.message.chat.id, text=msg) + await context.bot.sendMessage(chat_id=update.message.chat.id, text=msg) else: - self._bot.send_message(chat_id=update.message.chat.id, text=self.translate("Control/Change item-values:"), reply_markup={"keyboard": self.create_control_reply_markup()}) + await context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("Control/Change item-values:"), reply_markup={"keyboard": self.create_control_reply_markup()}) self._waitAnswer = None except Exception as e: - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"Exception '{e}' occurred, traceback '{traceback.format_exc()}' Please inform plugin maintainer!") - def cHandler_time(self, update, context): + async def cHandler_time(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None: """ /time: return server time """ - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"/time: return server time for update={update}, chat_id={update.message.chat.id} and context={dir(context)}") if self.has_access_right(update.message.chat.id): - context.bot.send_message(chat_id=update.message.chat.id, text=str(datetime.datetime.now())) + await context.bot.send_message(chat_id=update.message.chat.id, text=str(datetime.datetime.now())) - def cHandler_help(self, update, context): + async def cHandler_help(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None: """ /help: show available commands as keyboard """ - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"/help: show available commands as keyboard for update={update}, chat_id={update.message.chat.id} and context={dir(context)}") if self.has_access_right(update.message.chat.id): - context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("choose"), reply_markup={"keyboard": [["/hide","/start"], ["/time","/list"], ["/lo","/info"], ["/control", "/tr "]]}) + await context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("choose"), reply_markup={"keyboard": [["/hide","/start"], ["/time","/list"], ["/lo","/info"], ["/control", "/tr "]]}) - def cHandler_hide(self, update, context): + async def cHandler_hide(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None: """ /hide: hide keyboard """ - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"/hide: hide keyboard for bot={context.bot} and chat_id={update.message.chat.id}") if self.has_access_right(update.message.chat.id): hide_keyboard = {'hide_keyboard': True} - context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("I'll hide the keyboard"), reply_markup=hide_keyboard) + await context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("I'll hide the keyboard"), reply_markup=hide_keyboard) - def cHandler_list(self, update, context): + async def cHandler_list(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None: """ /list: show registered items and value """ - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"/list: show registered items and value for chat_id={update.message.chat.id}") if self.has_access_right(update.message.chat.id): - self.list_items(update.message.chat.id) + await context.bot.send_message(chat_id=update.message.chat.id, text=self.list_items()) + #self.list_items(update.message.chat.id) - def cHandler_info(self, update, context): + async def cHandler_info(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None: """ /info: show item-menu with registered items with specific attribute """ - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"/info: show item-menu with registered items with specific attribute for chat_id={update.message.chat.id}") if self.has_access_right(update.message.chat.id): if len(self._items_info) > 0: - context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("Infos from the items:"), reply_markup={"keyboard": self.create_info_reply_markup()}) + await context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("Infos from the items:"), reply_markup={"keyboard": self.create_info_reply_markup()}) else: - context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("No items have attribute telegram_info!"), reply_markup={"keyboard": self.create_info_reply_markup()}) + await context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("No items have attribute telegram_info!"), reply_markup={"keyboard": self.create_info_reply_markup()}) - def cHandler_start(self, update, context): + async def cHandler_start(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None: """ /start: show a welcome together with asking to add chat id to trusted chat ids """ - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"/start: show a welcome together with asking to add chat id to trusted chat ids for chat_id={update.message.chat.id}") text = "" if self._chat_ids_item: ids = self._chat_ids_item() text = self.translate(f"Your chat id is: {update.message.chat.id}") - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f'update.message.chat.id={update.message.chat.id} with type={type(update.message.chat.id)}') self.logger.debug(f'ids dict={ids}') if update.message.chat.id in ids: @@ -704,32 +799,33 @@ def cHandler_start(self, update, context): else: self.logger.warning('No chat_ids defined') - context.bot.send_message(chat_id=update.message.chat.id, text=text) + await context.bot.send_message(chat_id=update.message.chat.id, text=text) - def cHandler_info_attr(self, update, context): + async def cHandler_info_attr(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None: """ /command show registered items and value with specific attribute/key where ``command`` is the value from an item with ``telegram_info`` attribute """ - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug("Enter cHandler_info_attr") if self.has_access_right(update.message.chat.id): - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"Gathering items to fulfill command {update.message.text}") c_key = update.message.text.replace("/", "", 1) if c_key in self._items_info: - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"info-command: {c_key}") - self.list_items_info(update.message.chat.id, c_key) + #self.list_items_info(update.message.chat.id, c_key) + await context.bot.sendMessage(chat_id=update.message.chat.id, text=self.list_items_info(c_key)) else: - self._bot.sendMessage(chat_id=update.message.chat.id, text=self.translate("unknown command %s") % c_key) + await context.bot.sendMessage(chat_id=update.message.chat.id, text=self.translate("unknown command %s") % c_key) else: - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"Chat with id {update.message.chat.id} has no right to use command {update.message.text}") - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug("Leave cHandler_info_attr") - def cHandler_lo(self, update, context): + async def cHandler_lo(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None: """ /lo: show all logics with next scheduled execution time """ @@ -748,85 +844,81 @@ def cHandler_lo(self, update, context): tmp_msg += f" ({', '.join(data)})" tmp_msg += "\n" self.logger.info(f"send Message: {tmp_msg}") - self._bot.sendMessage(chat_id=update.message.chat.id, text=tmp_msg) + await context.bot.sendMessage(chat_id=update.message.chat.id, text=tmp_msg) - def cHandler_tr(self, update, context): + async def cHandler_tr(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None: """ Trigger a logic with command ``/tr xx`` where xx is the name of the logic to trigger """ if self.has_access_right(update.message.chat.id): logicname = context.args[0] try: - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"trigger_logic: {context.args}") self.logics.trigger_logic(logicname, by=self.get_shortname()) # Trigger a logic except Exception as e: tmp_msg = f"could not trigger logic {logicname} due to error {e}" self.logger.warning(tmp_msg) - self._bot.sendMessage(chat_id=update.message.chat.id, text=tmp_msg) + await context.bot.sendMessage(chat_id=update.message.chat.id, text=tmp_msg) - def cHandler_control(self, update, context): + async def cHandler_control(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None: """ /control: Change values of items with specific attribute """ - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"/control: show item-menu with registered items with specific attribute for chat_id={update.message.chat.id}") if self.has_write_access_right(update.message.chat.id): if len(self._items_control) > 0: - self._bot.send_message(chat_id=update.message.chat.id, text=self.translate("Control/Change item-values:"), reply_markup={"keyboard":self.create_control_reply_markup()}) - self.list_items_control(update.message.chat.id) + await context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("Control/Change item-values:"), reply_markup={"keyboard":self.create_control_reply_markup()}) + await context.bot.send_message(chat_id=update.message.chat.id, text=self.list_items_control()) + #self.list_items_control(update.message.chat.id) else: - context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("No items have attribute telegram_control!"), reply_markup={"keyboard": self.create_control_reply_markup()}) + await context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("No items have attribute telegram_control!"), reply_markup={"keyboard": self.create_control_reply_markup()}) - def cHandler_control_attr(self, update, context): + async def cHandler_control_attr(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None: """ /xx change value from registered items """ - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug("Enter cHandler_control_attr") if self.has_write_access_right(update.message.chat.id): c_key = update.message.text.replace("/", "", 1) if c_key in self._items_control: dicCtl = self._items_control[c_key] #{'type':type,'item':item} - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"control-command: name:{c_key} dictCtl:{dicCtl}") - self.change_item(update=update, context=context, name=c_key, dicCtl=dicCtl) + await self.change_item(update=update, context=context, name=c_key, dicCtl=dicCtl) else: - self._bot.sendMessage(chat_id=update.message.chat.id, text=self.translate("unknown control-command %s") % (c_key)) + await context.bot.sendMessage(chat_id=update.message.chat.id, text=self.translate("unknown control-command %s") % (c_key)) # helper functions - def list_items(self, chat_id): + def list_items(self): """ Send a message with all items that are marked with an attribute ``telegram_message`` """ - if self.has_access_right(chat_id): - text = "" - for item in self._items: - if item.type(): - text += f"{item.id()} = {item()}\n" - else: - text += f"{item.id()}\n" - if not text: - text = "no items found with the attribute:" + ITEM_ATTR_MESSAGE - self._bot.sendMessage(chat_id=chat_id, text=text) + text = "" + for item in self._items: + if item.type(): + text += f"{item.id()} = {item()}\n" + else: + text += f"{item.id()}\n" + if not text: + text = "no items found with the attribute:" + ITEM_ATTR_MESSAGE + return text - def list_items_info(self, chat_id, key): + def list_items_info(self, key): """ Show registered items and value with specific attribute/key """ - if self.has_access_right(chat_id): - text = "" - for item in self._items_info[key]: - if item.type(): - text += f"{item.id()} = {item()}\n" - else: - text += f"{item.id()}\n" - if not text: - text = self.translate("no items found with the attribute %s") % ITEM_ATTR_INFO - self._bot.sendMessage(chat_id=chat_id, text=text) - else: - if self.logger.isEnabledFor(logging.DEBUG): - self.logger.debug(f"Chat with id {chat_id} has no right to list items with key {key}") + text = "" + for item in self._items_info[key]: + if item.type(): + text += f"{item.id()} = {item()}\n" + else: + text += f"{item.id()}\n" + if not text: + text = self.translate("no items found with the attribute %s") % ITEM_ATTR_INFO + return text def create_info_reply_markup(self): """ @@ -851,13 +943,13 @@ def create_control_reply_markup(self): for key, value in sorted(self._items_control.items()): button_list.append("/"+key) - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"button_list: {button_list}") header = ["/help"] - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"header: {header}") keyboard = self.build_menu(button_list, n_cols=3, header_buttons=header) - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"keyboard: {keyboard}") return keyboard @@ -872,26 +964,22 @@ def build_menu(self, buttons, n_cols, header_buttons=None, footer_buttons=None): menu.append(footer_buttons) return menu - def list_items_control(self, chat_id): + def list_items_control(self): """ Show registered items and value with specific attribute ITEM_ATTR_CONTROL """ - if self.has_access_right(chat_id): - text = "" - for key, value in sorted(self._items_control.items()): # {'type':type,'item':item} - item = value['item'] - if item.type(): - text += f"{key} = {item()}\n" - else: - text += f"{key}\n" - if not text: - text = self.translate("no items found with the attribute %s") % ITEM_ATTR_CONTROL - self._bot.sendMessage(chat_id=chat_id, text=text) - else: - if self.logger.isEnabledFor(logging.DEBUG): - self.logger.debug(f"Chat with id {chat_id} has no right to list items with attribute {ITEM_ATTR_CONTROL}") + for key, value in sorted(self._items_control.items()): # {'type':type,'item':item} + item = value['item'] + if item.type(): + text += f"{key} = {item()}\n" + else: + text += f"{key}\n" + if not text: + text = self.translate("no items found with the attribute %s") % ITEM_ATTR_CONTROL + #self._bot.sendMessage(chat_id=chat_id, text=text) + return text - def change_item(self, update, context, name, dicCtl): + async def change_item(self, update, context, name, dicCtl): """ util to change a item-value name:bla, type:toggle/on/off/onoff/trigger/num question:'wirklich einschalten?' @@ -903,16 +991,16 @@ def change_item(self, update, context, name, dicCtl): timeout = dicCtl['timeout'] text = "" if changeType == 'toggle': - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"control-item: type:toggle") if question != '': nd = (datetime.datetime.now()+ datetime.timedelta(seconds=timeout)).replace(tzinfo=self._sh.tzinfo()) self._waitAnswer = dicCtl - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"control-item: add scheduler for answer-timout") self.scheduler_add('telegram_change_item_timeout', self.telegram_change_item_timeout, value={'update': update, 'context': context}, next=nd) text = question - self._bot.send_message(chat_id=update.message.chat.id, text=text, reply_markup={"keyboard": [['Yes', 'No']]}) + await context.bot.sendMessage(chat_id=update.message.chat.id, text=text, reply_markup={"keyboard": [['Yes', 'No']]}) else: value = item() if item.type() == "bool": @@ -924,71 +1012,73 @@ def change_item(self, update, context, name, dicCtl): self._bot.sendMessage(chat_id=chat_id, text=text) item(newValue) text = f"{name}: {item()}\n" - self._bot.sendMessage(chat_id=chat_id, text=text) + await context.bot.sendMessage(chat_id=chat_id, text=text) if changeType == 'on': - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"control-item: type:on") if question != '': nd = (datetime.datetime.now() + datetime.timedelta(seconds=timeout)).replace(tzinfo=self._sh.tzinfo()) self._waitAnswer = dicCtl - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"control-item: add scheduler for answer-timout") self.scheduler_add('telegram_change_item_timeout', self.telegram_change_item_timeout, value={'update': update, 'context': context}, next=nd) text = question - self._bot.send_message(chat_id=update.message.chat.id, text=text, reply_markup={"keyboard": [['Yes', 'No']]}) + await context.bot.sendMessage(chat_id=update.message.chat.id, text=text, reply_markup={"keyboard": [['Yes', 'No']]}) else: if item.type() == "bool": item(True) text = f"{name}: {item()}\n" self._bot.sendMessage(chat_id=chat_id, text=text) if changeType == 'off': - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"control-item: type:off") if question != '': nd = (datetime.datetime.now() + datetime.timedelta(seconds=timeout)).replace(tzinfo=self._sh.tzinfo()) self._waitAnswer = dicCtl - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"control-item: add scheduler for answer-timout") self.scheduler_add('telegram_change_item_timeout', self.telegram_change_item_timeout, value={'update': update, 'context': context}, next=nd) text = question - self._bot.send_message(chat_id=update.message.chat.id, text=text, reply_markup={"keyboard": [['Yes', 'No']]}) + await context.bot.sendMessage(chat_id=update.message.chat.id, text=text, reply_markup={"keyboard": [['Yes', 'No']]}) else: if item.type() == "bool": item(False) text = f"{name}: {item()}\n" - self._bot.sendMessage(chat_id=chat_id, text=text) + await context.bot.sendMessage(chat_id=chat_id, text=text) if changeType == 'onoff': nd = (datetime.datetime.now() + datetime.timedelta(seconds=timeout)).replace(tzinfo=self._sh.tzinfo()) self._waitAnswer = dicCtl - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"control-item: add scheduler for answer-timout") self.scheduler_add('telegram_change_item_timeout', self.telegram_change_item_timeout, value={'update': update, 'context': context}, next=nd) if question == '': text = self.translate("choose") else: text = question - self._bot.send_message(chat_id=update.message.chat.id, text=text, reply_markup={"keyboard": [['On', 'Off']]}) + await context.bot.sendMessage(chat_id=update.message.chat.id, text=text, reply_markup={"keyboard": [['On', 'Off']]}) if changeType == 'num': text = self.translate("insert a value") nd = (datetime.datetime.now() + datetime.timedelta(seconds=timeout)).replace(tzinfo=self._sh.tzinfo()) self._waitAnswer = dicCtl - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"control-item: add scheduler for answer-timout") self.scheduler_add('telegram_change_item_timeout', self.telegram_change_item_timeout, value={'update': update, 'context': context}, next=nd) - self._bot.sendMessage(chat_id=chat_id, text=text) + await context.bot.sendMessage(chat_id=chat_id, text=text) if not text: text = self.translate("no items found with the attribute %s") % ITEM_ATTR_CONTROL - self._bot.sendMessage(chat_id=chat_id, text=text) + await context.bot.sendMessage(chat_id=chat_id, text=text) - def telegram_change_item_timeout(self, **kwargs): + async def telegram_change_item_timeout(self, **kwargs): update = None context = None if 'update' in kwargs: update = kwargs['update'] if 'context' in kwargs: context = kwargs['context'] - if self.logger.isEnabledFor(logging.DEBUG): + if self.debug_enabled: self.logger.debug(f"Answer control_item timeout update:{update} context:{context}") if self._waitAnswer is not None: self._waitAnswer = None - self._bot.send_message(chat_id=update.message.chat.id, text=self.translate("Control/Change item-values:"), reply_markup={"keyboard": self.create_control_reply_markup()}) + # self._bot.send_message(chat_id=update.message.chat.id, text=self.translate("Control/Change item-values:"), reply_markup={"keyboard": self.create_control_reply_markup()}) + await context.bot.sendMessage(chat_id=update.message.chat.id, text=self.translate("Control/Change item-values:"), reply_markup={"keyboard": self.create_control_reply_markup()}) + diff --git a/telegram/plugin.yaml b/telegram/plugin.yaml index a85382e8f..6feb27fef 100755 --- a/telegram/plugin.yaml +++ b/telegram/plugin.yaml @@ -12,7 +12,7 @@ plugin: documentation: http://smarthomeng.de/user/plugins/telegram/user_doc.html support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1548691-support-thread-für-das-telegram-plugin - version: 1.7.1 # Plugin version + version: 1.8.0 # Plugin version sh_minversion: 1.8 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) py_minversion: 3.6 # minimum Python version to use for this plugin diff --git a/telegram/requirements.txt b/telegram/requirements.txt index 76dc58511..d3e3b3a18 100755 --- a/telegram/requirements.txt +++ b/telegram/requirements.txt @@ -1,2 +1,2 @@ -python-telegram-bot>=12.8.0, <14.0 +python-telegram-bot>=20.2 requests \ No newline at end of file diff --git a/telegram/user_doc.rst b/telegram/user_doc.rst index d98fbdbd5..3afa7dfda 100755 --- a/telegram/user_doc.rst +++ b/telegram/user_doc.rst @@ -69,7 +69,10 @@ Im Dictionary sind Paare von Chat-ID und Berechtigung gespeichert. # es wird dann der letzte Wert geladen cache: 'True' # Beispiel value: '{ 3234123342: 1, 9234123341: 0 }' - # Ein Dictionary mit chat id und 1 für Lese und Schreibzugriff oder 0 für einen nur Lese-Zugriff + # Ein Dictionary mit chat id und: + # 2 für Lese und Schreibzugriff ohne Willkommens- und Ende Nachricht + # 1 für Lese und Schreibzugriff + # 0 für einen nur Lese-Zugriff # Nachfolgend ein Chat dem Lese- und Schreibrechte gewährt werden value: '{ 3234123342: 1 }' @@ -376,23 +379,25 @@ Die folgende Beispiellogik zeigt einige Nutzungsmöglichkeiten für die Funktion .. code:: python + telegram_plugin = sh.plugins.return_plugin('telegram') + # Eine Nachricht `Hello world!` wird an alle vertrauten Chat Ids gesendet msg = "Hello world!" - sh.telegram.msg_broadcast(msg) + telegram_plugin.msg_broadcast(msg) # Ein Bild von einem externen Server soll gesendet werden. # Nur die URL wird an Telegram gesendet und keine Daten lokal aufbereitet - sh.telegram.photo_broadcast("https://cdn.pixabay.com/photo/2018/10/09/16/20/dog-3735336_960_720.jpg", "A dog", None, False) + telegram_plugin.photo_broadcast("https://cdn.pixabay.com/photo/2018/10/09/16/20/dog-3735336_960_720.jpg", "A dog", None, False) # Bild auf lokalem Server mit aktueller Zeit an Telegram senden my_webcam_url = "http:// .... bitte lokale URL hier einfügen zum Test ..." - sh.telegram.photo_broadcast(my_webcam_url, "My webcam at {:%Y-%m-%d %H:%M:%S}".format(sh.shtime.now())) + telegram_plugin.photo_broadcast(my_webcam_url, "My webcam at {:%Y-%m-%d %H:%M:%S}".format(sh.shtime.now())) # Bild senden aber den Inhalt lokal vorbereiten - sh.telegram.photo_broadcast("https://cdn.pixabay.com/photo/2018/10/09/16/20/dog-3735336_960_720.jpg", "The dog again (data locally prepared)") + telegram_plugin.photo_broadcast("https://cdn.pixabay.com/photo/2018/10/09/16/20/dog-3735336_960_720.jpg", "The dog again (data locally prepared)") local_file = "/usr/local/smarthome/var/ ... bitte eine lokal gespeicherte Datei angeben ..." - sh.telegram.photo_broadcast(local_file, local_file) + telegram_plugin.photo_broadcast(local_file, local_file) Anwendungen diff --git a/telegram/webif/templates/index.html b/telegram/webif/templates/index.html index d80a32c57..290518780 100755 --- a/telegram/webif/templates/index.html +++ b/telegram/webif/templates/index.html @@ -62,7 +62,7 @@ -{% set tabcount = 4 %} +{% set tabcount = 5 %} {% set tab2title = "Input Items"%} {% block bodytab2 %}
@@ -130,6 +133,10 @@
{% endblock bodytab2 %} + + {% set tab3title = "Telegram Control"%} {% block bodytab3 %}
@@ -159,47 +166,52 @@
{% endblock bodytab3 %} - {% set tab4title = "Telegram Infos" %} {% block bodytab4 %} -Chat-IDs
- +
- - + + - {% for key in p._chat_ids_item() %} + {% for key in p._items_info %} - + {% endfor %}
{{ _('Registrierte Chat-ID') }}{{ _('Zugriff') }}{{ _('Info') }}{{ _('Info-Items') }}
{{ key }}{{ p._chat_ids_item()[key] }}{{ p._items_info[key] }}
+
+{% endblock bodytab4 %} -Telegram Infos - + +{% set tab5title = "Chat-ID's" %} +{% block bodytab5 %} +
+
- - + + - {% for key in p._items_info %} + {% for key in p._chat_ids_item() %} - + {% endfor %}
{{ _('Info') }}{{ _('Info-Items') }}{{ _('Registrierte Chat-ID') }}{{ _('Zugriff') }}
{{ key }}{{ p._items_info[key] }}{{ p._chat_ids_item()[key] }}
- -{% endblock bodytab4 %} + +{% endblock bodytab5 %} \ No newline at end of file From 8c3035d3ec85cb2451e830204f905b2b105be389 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Sun, 16 Apr 2023 11:53:53 +0200 Subject: [PATCH 045/775] Tasmota Plugin: Improve RF-Bridge - Allow send of learned keys - improve of using ZigbeeDevice Shortname --- tasmota/__init__.py | 32 +++++++++++++++++++++++--------- tasmota/locale.yaml | 0 tasmota/plugin.yaml | 3 +-- tasmota/user_doc.rst | 0 4 files changed, 24 insertions(+), 11 deletions(-) mode change 100755 => 100644 tasmota/locale.yaml mode change 100755 => 100644 tasmota/user_doc.rst diff --git a/tasmota/__init__.py b/tasmota/__init__.py index b6925cedc..78b927f18 100644 --- a/tasmota/__init__.py +++ b/tasmota/__init__.py @@ -174,7 +174,7 @@ def parse_item(self, item): tasmota_attr = self.get_iattr_value(item.conf, 'tasmota_attr') tasmota_relay = self.get_iattr_value(item.conf, 'tasmota_relay') - tasmota_rf_details = self.get_iattr_value(item.conf, 'tasmota_rf_key') + tasmota_rf_details = self.get_iattr_value(item.conf, 'tasmota_rf_details') tasmota_zb_device = self.get_iattr_value(item.conf, 'tasmota_zb_device') tasmota_zb_group = self.get_iattr_value(item.conf, 'tasmota_zb_group') tasmota_zb_attr = self.get_iattr_value(item.conf, 'tasmota_zb_attr') @@ -211,11 +211,12 @@ def parse_item(self, item): # check if zigbee device short name has been used without parentheses; if so this will be normally parsed to a number and therefore mismatch with definition try: tasmota_zb_device = int(tasmota_zb_device) + except ValueError: + pass + else: self.logger.warning(f"Probably for item {item.path()} the device short name as been used for attribute 'tasmota_zb_device'. Trying to make that work but it will cause exceptions. To prevent this, the short name need to be defined as string by using parentheses") tasmota_zb_device = str(hex(tasmota_zb_device)) tasmota_zb_device = tasmota_zb_device[0:2] + tasmota_zb_device[2:len(tasmota_zb_device)].upper() - except Exception as e: - pass # define item_config item_mapping = f'{tasmota_topic}.{tasmota_zb_device}.{tasmota_zb_attr}' @@ -354,8 +355,15 @@ def update_item(self, item, caller: str = None, source: str = None, dest: str = return elif tasmota_attr == 'rf_key_send': - detail = f"{detail}{value}" - value = 1 + # send learned RF data per defined / selected Key + detail = f"{detail}{value}" # RfKey; number of key in range 1:17 + # 1 = send default RF data for RfKey using RfSync, RfLow, RfHigh and RfHost parameters + # 2 = learn RF data for RfKey + # 3 = unlearn RF data for RfKey + # 4 = save RF data using RfSync, RfLow, RfHigh and last RfCode parameters + # 5 = show default or learned RF data + # 6 = send learned RF data + value = 6 elif tasmota_attr == 'rf_key': if not tasmota_rf_details: @@ -366,6 +374,8 @@ def update_item(self, item, caller: str = None, source: str = None, dest: str = var = tasmota_rf_details.split('=') if len(var) == 2: tasmota_rf_details, tasmota_rf_key_param = var + else: + return detail = f"{detail}{tasmota_rf_details}" value = 1 @@ -1469,9 +1479,13 @@ def _set_item_value(self, tasmota_topic: str, item_type: str, value, info_topic: self.logger.info(f"{tasmota_topic}: More than one item for item_type '{item_type}' found to be set to '{value}' provided by '{src}'. First one will be used.") item = item_list[0] - tasmota_rf_details = self.get_iattr_value(item.conf, 'tasmota_rf_key') + tasmota_rf_details = self.get_iattr_value(item.conf, 'tasmota_rf_details') if tasmota_rf_details and '=' in tasmota_rf_details: - tasmota_rf_key, tasmota_rf_key_param = tasmota_rf_details.split('=') + var = tasmota_rf_details.split('=') + if len(var) == 2: + tasmota_rf_details, tasmota_rf_key_param = var + else: + return if tasmota_rf_key_param.lower() == 'true': value = True @@ -1480,7 +1494,7 @@ def _set_item_value(self, tasmota_topic: str, item_type: str, value, info_topic: elif tasmota_rf_key_param.lower() == 'toggle': value = not(item()) else: - self.logger.warning(f"Parameter of tasmota_rf_key unknown, Need to be True, False, Toggle") + self.logger.warning(f"Parameter of tasmota_rf_key unknown, Need to be 'True', 'False', 'Toggle'") return # set item value @@ -1583,7 +1597,7 @@ def _configure_zigbee_bridge_settings(self, device: str) -> None: """ Configures Zigbee Bridge settings - :param device: Zigbee bridge to be set to get MQTT Messages in right format") + :param device: Zigbee bridge to be set to get MQTT Messages in right format """ self.logger.info(f"_configure_zigbee_bridge_settings: Do settings of ZigbeeBridge {device}") diff --git a/tasmota/locale.yaml b/tasmota/locale.yaml old mode 100755 new mode 100644 diff --git a/tasmota/plugin.yaml b/tasmota/plugin.yaml index 56c8411d7..0b68e2c56 100644 --- a/tasmota/plugin.yaml +++ b/tasmota/plugin.yaml @@ -129,9 +129,8 @@ item_attributes: tasmota_rf_details: type: str - default: 1 description: - de: "Nummer des auszulösenden RF Keys im Tasmota Device=Aktion bei Empfang" + de: "Nummer des auszulösenden RF Keys im Tasmota Device und Aktion bei Empfang als String; bspw: '16=true' - Bei Empfang des Key16, setze Item auf True" en: "Number of rf keys to be used for sending command" tasmota_zb_device: diff --git a/tasmota/user_doc.rst b/tasmota/user_doc.rst old mode 100755 new mode 100644 From de23ff65521dcf87cedc178a5e7ba04d0f93a9b7 Mon Sep 17 00:00:00 2001 From: SmarthomeNG User Date: Sun, 16 Apr 2023 12:01:23 +0200 Subject: [PATCH 046/775] update tasmota --- tasmota/locale.yaml | 0 tasmota/user_doc.rst | 0 2 files changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 tasmota/locale.yaml mode change 100644 => 100755 tasmota/user_doc.rst diff --git a/tasmota/locale.yaml b/tasmota/locale.yaml old mode 100644 new mode 100755 diff --git a/tasmota/user_doc.rst b/tasmota/user_doc.rst old mode 100644 new mode 100755 From eb04b7e6069e37556c6eb7d40ed90ed6616a4369 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Sun, 16 Apr 2023 12:24:31 +0200 Subject: [PATCH 047/775] AVM Plugin: Improve stop - improve stop of plugin --- avm/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/avm/__init__.py b/avm/__init__.py index ecedbbb7c..f42291bd0 100644 --- a/avm/__init__.py +++ b/avm/__init__.py @@ -148,7 +148,7 @@ def __init__(self, sh): self.logger.warning(f"{e} occurred during establishing connection to FritzDevice via TR064-Interface. Not connected.") self.fritz_device = None else: - self.logger.debug("Connection to FritzDevice established.") + self.logger.debug("Connection to FritzDevice via TR064-Interface established.") # init FritzHome try: @@ -183,7 +183,7 @@ def run(self): self.create_cyclic_scheduler(target='tr064', items=self.fritz_device.items, fct=self.fritz_device.cyclic_item_update, offset=2) self.fritz_device.cyclic_item_update(read_all=True) - if self._aha_http_interface and self.fritz_device is not None and self.fritz_device.is_fritzbox(): + if self._aha_http_interface and self.fritz_device and self.fritz_device.is_fritzbox() and self.fritz_home: # add scheduler for updating items self.create_cyclic_scheduler(target='aha', items=self.fritz_home.items, fct=self.fritz_home.cyclic_item_update, offset=4) self.fritz_home.cyclic_item_update(read_all=True) @@ -201,7 +201,7 @@ def stop(self): """ self.logger.debug("Stop method called") self.scheduler_remove('poll_tr064') - if self._aha_http_interface: + if self.fritz_home: self.scheduler_remove('poll_aha') self.scheduler_remove('check_sid') self.fritz_home.logout() From 88d51e201f1651fbb9a9701c401c695eacea6b00 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Sun, 16 Apr 2023 19:16:06 +0200 Subject: [PATCH 048/775] TASMOTA Plugin: Improve handling of incoming messages - improve handling of received messages and taking info from dicts - bugfix update item with relay - improve exception handling - improve clearing retained messages --- tasmota/__init__.py | 650 ++++++++++++++++++++++++-------------------- 1 file changed, 350 insertions(+), 300 deletions(-) diff --git a/tasmota/__init__.py b/tasmota/__init__.py index 78b927f18..16c092a26 100644 --- a/tasmota/__init__.py +++ b/tasmota/__init__.py @@ -24,7 +24,7 @@ from datetime import datetime, timedelta from lib.model.mqttplugin import * -from lib.item import Items +# from lib.item import Items from lib.item.item import Item from .webif import WebInterface @@ -92,8 +92,6 @@ def __init__(self, sh): # Call init code of parent class (MqttPlugin) super().__init__() - if not self._init_complete: - return # get the parameters for the plugin (as defined in metadata plugin.yaml): self.telemetry_period = self.get_parameter_value('telemetry_period') @@ -115,15 +113,14 @@ def __init__(self, sh): # Add subscription to get device discovery self.add_subscription('tasmota/discovery/#', 'dict', callback=self.on_mqtt_discovery_message) # Add subscription to get device LWT - self.add_tasmota_subscription('tele', '+', 'LWT', 'bool', bool_values=['Offline', 'Online'], callback=self.on_mqtt_lwt_message) + self.add_tasmota_subscription('tele', '+', 'LWT', 'bool', bool_values=['Offline', 'Online'], callback=self.on_mqtt_lwt_message) # Add subscription to get device status - self.add_tasmota_subscription('stat', '+', 'STATUS0', 'dict', callback=self.on_mqtt_status0_message) + self.add_tasmota_subscription('stat', '+', 'STATUS0', 'dict', callback=self.on_mqtt_status0_message) # Add subscription to get device actions result - self.add_tasmota_subscription('stat', '+', 'RESULT', 'dict', callback=self.on_mqtt_message) + self.add_tasmota_subscription('stat', '+', 'RESULT', 'dict', callback=self.on_mqtt_message) # Init WebIF self.init_webinterface(WebInterface) - return def run(self): """ @@ -336,7 +333,7 @@ def update_item(self, item, caller: str = None, source: str = None, dest: str = # do tasmota_attr specific checks and adaptations if tasmota_attr == 'relay': - if tasmota_relay > 1: + if tasmota_relay: detail = f"{detail}{tasmota_relay}" elif tasmota_attr == 'hsb': @@ -453,8 +450,6 @@ def update_item(self, item, caller: str = None, source: str = None, dest: str = # Callbacks ############################################################ - # ToDo: 2023-01-20 17:21:04 ERROR modules.mqtt _on_log: Caught exception in on_message: 'ip' - def on_mqtt_discovery_message(self, topic: str, payload: dict, qos: int = None, retain: bool = None) -> None: """ Callback function to handle received discovery messages @@ -468,106 +463,114 @@ def on_mqtt_discovery_message(self, topic: str, payload: dict, qos: int = None, self._handle_retained_message(topic, retain) + if not isinstance(payload, dict): + return + try: (tasmota, discovery, device_id, msg_type) = topic.split('/') self.logger.info(f"on_mqtt_discovery_message: device_id={device_id}, type={msg_type}, payload={payload}") - except Exception as e: + except ValueError as e: self.logger.error(f"received topic {topic} is not in correct format. Error was: {e}") - else: - if msg_type == 'config': - """ - device_id = 2CF432CC2FC5 - - payload = - { - 'ip': '192.168.2.33', // IP address - 'dn': 'NXSM200_01', // Device name - 'fn': ['NXSM200_01', None, None, None, None, None, None, None], // List of friendly names - 'hn': 'NXSM200-01-4037', // Hostname - 'mac': '2CF432CC2FC5', // MAC Adresse ohne : - 'md': 'NXSM200', // Module - 'ty': 0, // Tuya - 'if': 0, // ifan - 'ofln': 'Offline', // LWT-offline - 'onln': 'Online', // LWT-online - 'state': ['OFF', 'ON', 'TOGGLE', 'HOLD'], // StateText[0..3] - 'sw': '12.1.1', // Firmware Version - 't': 'NXSM200_01', // Topic - 'ft': '%prefix%/%topic%/', // Full Topic - 'tp': ['cmnd', 'stat', 'tele'], // Topic [SUB_PREFIX, PUB_PREFIX, PUB_PREFIX2] - 'rl': [1, 0, 0, 0, 0, 0, 0, 0], // Relays, 0: disabled, 1: relay, 2.. future extension (fan, shutter?) - 'swc': [-1, -1, -1, -1, -1, -1, -1, -1], // SwitchMode - 'swn': [None, None, None, None, None, None, None, None], // SwitchName - 'btn': [0, 0, 0, 0, 0, 0, 0, 0], // Buttons - 'so': {'4': 0, '11': 0, '13': 0, '17': 0, '20': 0, '30': 0, '68': 0, '73': 0, '82': 0, '114': 0, '117': 0}, // SetOption needed by HA to map Tasmota devices to HA entities and triggers - 'lk': 0, // ctrgb - 'lt_st': 0, // Light subtype - 'sho': [0, 0, 0, 0], - 'sht': [[0, 0, 48], [0, 0, 46], [0, 0, 110], [0, 0, 108]], - 'ver': 1 // Discovery protocol version - } - """ - - tasmota_topic = payload['t'] - if tasmota_topic: - - device_name = payload['dn'] - self.logger.info(f"Discovered Tasmota Device with topic={tasmota_topic} and device_name={device_name}") - - # if device is unknown, add it to dict - if tasmota_topic not in self.tasmota_devices: - self.logger.info(f"New device based on Discovery Message found.") - self._add_new_device_to_tasmota_devices(tasmota_topic) - - # process decoding message and set device to status 'discovered' - self.tasmota_devices[tasmota_topic]['ip'] = payload['ip'] - self.tasmota_devices[tasmota_topic]['friendly_name'] = payload['fn'][0] - self.tasmota_devices[tasmota_topic]['fw_ver'] = payload['sw'] - self.tasmota_devices[tasmota_topic]['device_id'] = device_id - self.tasmota_devices[tasmota_topic]['module'] = payload['md'] - self.tasmota_devices[tasmota_topic]['mac'] = ':'.join(device_id[i:i + 2] for i in range(0, 12, 2)) - self.tasmota_devices[tasmota_topic]['discovery_config'] = self._rename_discovery_keys(payload) - self.tasmota_devices[tasmota_topic]['status'] = 'discovered' - - # start device interview - self._interview_device(tasmota_topic) - - if payload['ft'] != self.full_topic: - self.logger.warning(f"Device {device_name} discovered, but FullTopic of device does not match plugin setting!") - - # if zigbee bridge, process those - if 'zigbee_bridge' in device_name.lower(): - self.logger.info(f"Zigbee_Bridge discovered") - self.tasmota_devices[tasmota_topic]['zigbee']['status'] = 'discovered' - self._configure_zigbee_bridge_settings(tasmota_topic) - self._discover_zigbee_bridge_devices(tasmota_topic) - - elif msg_type == 'sensors': - """ - device_id = 2CF432CC2FC5 - - payload = {'sn': {'Time': '2022-11-19T13:35:59', - 'ENERGY': {'TotalStartTime': '2019-12-23T17:02:03', 'Total': 85.314, 'Yesterday': 0.0, - 'Today': 0.0, 'Power': 0, 'ApparentPower': 0, 'ReactivePower': 0, 'Factor': 0.0, - 'Voltage': 0, 'Current': 0.0}}, 'ver': 1} - """ - - # get payload with Sensor information - sensor_payload = payload['sn'] - if 'Time' in sensor_payload: - sensor_payload.pop('Time') - - # find matching tasmota_topic - tasmota_topic = None - for entry in self.tasmota_devices: - if self.tasmota_devices[entry].get('device_id') == device_id: - tasmota_topic = entry - break - - # hand over sensor information payload for parsing - if sensor_payload and tasmota_topic: - self.logger.info(f"Discovered Tasmota Device with topic={tasmota_topic} and SensorInformation") - self._handle_sensor(tasmota_topic, '', sensor_payload) + return + + if msg_type == 'config': + """ + device_id = 2CF432CC2FC5 + + payload = + { + 'ip': '192.168.2.33', // IP address + 'dn': 'NXSM200_01', // Device name + 'fn': ['NXSM200_01', None, None, None, None, None, None, None], // List of friendly names + 'hn': 'NXSM200-01-4037', // Hostname + 'mac': '2CF432CC2FC5', // MAC Adresse ohne : + 'md': 'NXSM200', // Module + 'ty': 0, // Tuya + 'if': 0, // ifan + 'ofln': 'Offline', // LWT-offline + 'onln': 'Online', // LWT-online + 'state': ['OFF', 'ON', 'TOGGLE', 'HOLD'], // StateText[0..3] + 'sw': '12.1.1', // Firmware Version + 't': 'NXSM200_01', // Topic + 'ft': '%prefix%/%topic%/', // Full Topic + 'tp': ['cmnd', 'stat', 'tele'], // Topic [SUB_PREFIX, PUB_PREFIX, PUB_PREFIX2] + 'rl': [1, 0, 0, 0, 0, 0, 0, 0], // Relays, 0: disabled, 1: relay, 2.. future extension (fan, shutter?) + 'swc': [-1, -1, -1, -1, -1, -1, -1, -1], // SwitchMode + 'swn': [None, None, None, None, None, None, None, None], // SwitchName + 'btn': [0, 0, 0, 0, 0, 0, 0, 0], // Buttons + 'so': {'4': 0, '11': 0, '13': 0, '17': 0, '20': 0, '30': 0, '68': 0, '73': 0, '82': 0, '114': 0, '117': 0}, // SetOption needed by HA to map Tasmota devices to HA entities and triggers + 'lk': 0, // ctrgb + 'lt_st': 0, // Light subtype + 'sho': [0, 0, 0, 0], + 'sht': [[0, 0, 48], [0, 0, 46], [0, 0, 110], [0, 0, 108]], + 'ver': 1 // Discovery protocol version + } + """ + + tasmota_topic = payload.get('t') + if tasmota_topic: + + device_name = payload.get('dn') + self.logger.info(f"Discovered Tasmota Device with topic={tasmota_topic} and device_name={device_name}") + + # if device is unknown, add it to dict + if tasmota_topic not in self.tasmota_devices: + self.logger.info(f"New device based on Discovery Message found.") + self._add_new_device_to_tasmota_devices(tasmota_topic) + + # process decoding message and set device to status 'discovered' + self.tasmota_devices[tasmota_topic]['ip'] = payload.get('ip') + self.tasmota_devices[tasmota_topic]['friendly_name'] = payload.get('fn', ['None'])[0] + self.tasmota_devices[tasmota_topic]['fw_ver'] = payload.get('sw') + self.tasmota_devices[tasmota_topic]['device_id'] = device_id + self.tasmota_devices[tasmota_topic]['module'] = payload.get('md') + self.tasmota_devices[tasmota_topic]['mac'] = ':'.join(device_id[i:i + 2] for i in range(0, 12, 2)) + self.tasmota_devices[tasmota_topic]['discovery_config'] = self._rename_discovery_keys(payload) + self.tasmota_devices[tasmota_topic]['status'] = 'discovered' + + # start device interview + self._interview_device(tasmota_topic) + + if payload['ft'] != self.full_topic: + self.logger.warning(f"Device {device_name} discovered, but FullTopic of device does not match plugin setting!") + + # if zigbee bridge, process those + if 'zigbee_bridge' in device_name.lower(): + self.logger.info(f"Zigbee_Bridge discovered") + self.tasmota_devices[tasmota_topic]['zigbee']['status'] = 'discovered' + self._configure_zigbee_bridge_settings(tasmota_topic) + self._discover_zigbee_bridge_devices(tasmota_topic) + + elif msg_type == 'sensors': + """ + device_id = 2CF432CC2FC5 + + payload = {'sn': {'Time': '2022-11-19T13:35:59', + 'ENERGY': {'TotalStartTime': '2019-12-23T17:02:03', 'Total': 85.314, 'Yesterday': 0.0, + 'Today': 0.0, 'Power': 0, 'ApparentPower': 0, 'ReactivePower': 0, 'Factor': 0.0, + 'Voltage': 0, 'Current': 0.0}}, 'ver': 1} + """ + + # get payload with Sensor information + sensor_payload = payload.get('sn') + + if not sensor_payload or not isinstance(sensor_payload, dict): + return + + if 'Time' in sensor_payload: + sensor_payload.pop('Time') + + # find matching tasmota_topic + tasmota_topic = None + for entry in self.tasmota_devices: + if self.tasmota_devices[entry].get('device_id') == device_id: + tasmota_topic = entry + break + + # hand over sensor information payload for parsing + if tasmota_topic: + self.logger.info(f"Discovered Tasmota Device with topic={tasmota_topic} and SensorInformation") + self._handle_sensor(tasmota_topic, '', sensor_payload) def on_mqtt_lwt_message(self, topic: str, payload: bool, qos: int = None, retain: bool = None) -> None: """ @@ -581,22 +584,26 @@ def on_mqtt_lwt_message(self, topic: str, payload: bool, qos: int = None, retain """ self._handle_retained_message(topic, retain) + if not isinstance(payload, bool): + return + try: (topic_type, tasmota_topic, info_topic) = topic.split('/') - except Exception as e: + except ValueError as e: self.logger.error(f"received topic {topic} is not in correct format. Error was: {e}") - else: - self.logger.info(f"Received LWT Message for {tasmota_topic} with value={payload} and retain={retain}") + return - if payload: - if tasmota_topic not in self.tasmota_devices: - self.logger.debug(f"New online device based on LWT Message discovered.") - self._handle_new_discovered_device(tasmota_topic) - self.tasmota_devices[tasmota_topic]['online_timeout'] = datetime.now() + timedelta(seconds=self.telemetry_period + 5) - if tasmota_topic in self.tasmota_devices: - self.tasmota_devices[tasmota_topic]['online'] = payload - self._set_item_value(tasmota_topic, 'online', payload, info_topic) + self.logger.info(f"Received LWT Message for {tasmota_topic} with value={payload} and retain={retain}") + if payload: + if tasmota_topic not in self.tasmota_devices: + self.logger.debug(f"New online device based on LWT Message discovered.") + self._handle_new_discovered_device(tasmota_topic) + self.tasmota_devices[tasmota_topic]['online_timeout'] = datetime.now() + timedelta(seconds=self.telemetry_period + 5) + + if tasmota_topic in self.tasmota_devices: + self.tasmota_devices[tasmota_topic]['online'] = payload + self._set_item_value(tasmota_topic, 'online', payload, info_topic) def on_mqtt_status0_message(self, topic: str, payload: dict, qos: int = None, retain: bool = None) -> None: """ @@ -655,66 +662,98 @@ def on_mqtt_status0_message(self, topic: str, payload: dict, qos: int = None, re self._handle_retained_message(topic, retain) + if not isinstance(payload, dict): + return + try: (topic_type, tasmota_topic, info_topic) = topic.split('/') self.logger.info(f"on_mqtt_status0_message: topic_type={topic_type}, tasmota_topic={tasmota_topic}, info_topic={info_topic}, payload={payload}") - except Exception as e: + except ValueError as e: self.logger.error(f"received topic {topic} is not in correct format. Error was: {e}") + return - else: - self.logger.info(f"Received Status0 Message for {tasmota_topic} with value={payload} and retain={retain}") - self.tasmota_devices[tasmota_topic]['status'] = 'interviewed' + self.logger.info(f"Received Status0 Message for {tasmota_topic} with value={payload} and retain={retain}") + self.tasmota_devices[tasmota_topic]['status'] = 'interviewed' - # handle teleperiod - self._handle_teleperiod(tasmota_topic, payload['StatusLOG']) + # handle teleperiod + status_log = payload.get('StatusLOG') + if not status_log and not isinstance(status_log, dict): + return + self._handle_teleperiod(tasmota_topic, status_log) - if self.tasmota_devices[tasmota_topic]['status'] != 'interviewed': - if self.tasmota_devices[tasmota_topic]['status'] != 'discovered': - # friendly name - self.tasmota_devices[tasmota_topic]['friendly_name'] = payload['Status']['FriendlyName'][0] + # get status information + status = self.tasmota_devices[tasmota_topic]['status'] + if status != 'interviewed' and status != 'discovered': + # friendly name + try: + self.tasmota_devices[tasmota_topic]['friendly_name'] = payload['Status']['FriendlyName'][0] + except KeyError: + pass - # IP Address - ip = payload['StatusNET']['IPAddress'] - ip_eth = payload['StatusNET'].get('Ethernet', {}).get('IPAddress') - ip = ip_eth if ip == '0.0.0.0' else None - self.tasmota_devices[tasmota_topic]['ip'] = ip + # IP Address + ip = None + ip_eth = None + try: + ip = payload['StatusNET']['IPAddress'] + except KeyError: + pass + try: + ip_eth = payload['StatusNET'].get('Ethernet', {}).get('IPAddress') + except KeyError: + pass - # Firmware - self.tasmota_devices[tasmota_topic]['fw_ver'] = payload['StatusFWR']['Version'].split('(')[0] + if ip: + if ip_eth and ip == '0.0.0.0': + ip = ip_eth + self.tasmota_devices[tasmota_topic]['ip'] = ip - # MAC - self.tasmota_devices[tasmota_topic]['mac'] = payload['StatusNET']['Mac'] + # Firmware + try: + self.tasmota_devices[tasmota_topic]['fw_ver'] = payload['StatusFWR']['Version'].split('(')[0] + except KeyError: + pass + + # MAC + try: + self.tasmota_devices[tasmota_topic]['mac'] = payload['StatusNET']['Mac'] + except KeyError: + pass - # Module No + # Module No + try: self.tasmota_devices[tasmota_topic]['template'] = payload['Status']['Module'] + except KeyError: + pass - # get detailed status using payload['StatusSTS'] - status_sts = payload['StatusSTS'] + # get detailed status using payload['StatusSTS'] + status_sts = payload.get('StatusSTS') + if not status_sts and not isinstance(status_sts, dict): + return - # Handling Lights and Dimmer - if any([i in status_sts for i in self.LIGHT_MSG]): - self._handle_lights(tasmota_topic, info_topic, status_sts) + # Handling Lights and Dimmer + if any([i in status_sts for i in self.LIGHT_MSG]): + self._handle_lights(tasmota_topic, info_topic, status_sts) - # Handling of Power - if any(item.startswith("POWER") for item in status_sts.keys()): - self._handle_power(tasmota_topic, info_topic, status_sts) + # Handling of Power + if any(item.startswith("POWER") for item in status_sts.keys()): + self._handle_power(tasmota_topic, info_topic, status_sts) - # Handling of RF messages - if any(item.startswith("Rf") for item in status_sts.keys()): - self._handle_rf(tasmota_topic, info_topic, status_sts) + # Handling of RF messages + if any(item.startswith("Rf") for item in status_sts.keys()): + self._handle_rf(tasmota_topic, info_topic, status_sts) - # Handling of Wi-Fi - if 'Wifi' in status_sts: - self._handle_wifi(tasmota_topic, status_sts['Wifi']) + # Handling of Wi-Fi + if 'Wifi' in status_sts: + self._handle_wifi(tasmota_topic, status_sts['Wifi']) - # Handling of Uptime - if 'Uptime' in status_sts: - self._handle_uptime(tasmota_topic, status_sts['Uptime']) + # Handling of Uptime + if 'Uptime' in status_sts: + self._handle_uptime(tasmota_topic, status_sts['Uptime']) - # Handling of UptimeSec - if 'UptimeSec' in status_sts: - self.logger.info(f"Received Message contains UptimeSec information.") - self._handle_uptime_sec(tasmota_topic, status_sts['UptimeSec']) + # Handling of UptimeSec + if 'UptimeSec' in status_sts: + self.logger.info(f"Received Message contains UptimeSec information.") + self._handle_uptime_sec(tasmota_topic, status_sts['UptimeSec']) def on_mqtt_info_message(self, topic: str, payload: dict, qos: int = None, retain: bool = None) -> None: """ @@ -729,28 +768,44 @@ def on_mqtt_info_message(self, topic: str, payload: dict, qos: int = None, retai self._handle_retained_message(topic, retain) + if not isinstance(payload, dict): + return + try: (topic_type, tasmota_topic, info_topic) = topic.split('/') self.logger.debug(f"on_mqtt_message: topic_type={topic_type}, tasmota_topic={tasmota_topic}, info_topic={info_topic}, payload={payload}") - except Exception as e: + except ValueError as e: self.logger.error(f"received topic {topic} is not in correct format. Error was: {e}") - else: - if info_topic == 'INFO1': - # payload={'Info1': {'Module': 'Sonoff Basic', 'Version': '11.0.0(tasmota)', 'FallbackTopic': 'cmnd/DVES_2EB8AE_fb/', 'GroupTopic': 'cmnd/tasmotas/'}} - self.logger.debug(f"Received Message decoded as INFO1 message.") + return + + if info_topic == 'INFO1': + # payload={'Info1': {'Module': 'Sonoff Basic', 'Version': '11.0.0(tasmota)', 'FallbackTopic': 'cmnd/DVES_2EB8AE_fb/', 'GroupTopic': 'cmnd/tasmotas/'}} + self.logger.debug(f"Received Message decoded as INFO1 message.") + try: self.tasmota_devices[tasmota_topic]['fw_ver'] = payload['Info1']['Version'].split('(')[0] + except KeyError: + pass + try: self.tasmota_devices[tasmota_topic]['module_no'] = payload['Info1']['Module'] + except KeyError: + pass - elif info_topic == 'INFO2': - # payload={'Info2': {'WebServerMode': 'Admin', 'Hostname': 'SONOFF-B1-6318', 'IPAddress': '192.168.2.25'}} - self.logger.debug(f"Received Message decoded as INFO2 message.") + elif info_topic == 'INFO2': + # payload={'Info2': {'WebServerMode': 'Admin', 'Hostname': 'SONOFF-B1-6318', 'IPAddress': '192.168.2.25'}} + self.logger.debug(f"Received Message decoded as INFO2 message.") + try: self.tasmota_devices[tasmota_topic]['ip'] = payload['Info2']['IPAddress'] + except KeyError: + pass - elif info_topic == 'INFO3': - # payload={'Info3': {'RestartReason': 'Software/System restart', 'BootCount': 1395}} - self.logger.debug(f"Received Message decoded as INFO3 message.") + elif info_topic == 'INFO3': + # payload={'Info3': {'RestartReason': 'Software/System restart', 'BootCount': 1395}} + self.logger.debug(f"Received Message decoded as INFO3 message.") + try: restart_reason = payload['Info3']['RestartReason'] - self.logger.warning(f"Device {tasmota_topic} (IP={self.tasmota_devices[tasmota_topic]['ip']}) just startet. Reason={restart_reason}") + self.logger.info(f"Device {tasmota_topic} (IP={self.tasmota_devices[tasmota_topic]['ip']}) just startet. Reason={restart_reason}") + except KeyError: + pass def on_mqtt_message(self, topic: str, payload: dict, qos: int = None, retain: bool = None) -> None: """ @@ -765,91 +820,94 @@ def on_mqtt_message(self, topic: str, payload: dict, qos: int = None, retain: bo self._handle_retained_message(topic, retain) + if not isinstance(payload, dict): + return + try: (topic_type, tasmota_topic, info_topic) = topic.split('/') self.logger.info(f"on_mqtt_message: topic_type={topic_type}, tasmota_topic={tasmota_topic}, info_topic={info_topic}, payload={payload}") - except Exception as e: + except ValueError as e: self.logger.error(f"received topic {topic} is not in correct format. Error was: {e}") - else: + return - # handle unknown device - if tasmota_topic not in self.tasmota_devices: - self._handle_new_discovered_device(tasmota_topic) + # handle unknown device + if tasmota_topic not in self.tasmota_devices: + self._handle_new_discovered_device(tasmota_topic) + + # handle message + if info_topic in ['STATE', 'RESULT']: + + # Handling of TelePeriod + if 'TelePeriod' in payload: + self.logger.info(f"Received Message decoded as teleperiod message.") + self._handle_teleperiod(tasmota_topic, payload['TelePeriod']) + + elif 'Module' in payload: + self.logger.info(f"Received Message decoded as Module message.") + self._handle_module(tasmota_topic, payload['Module']) + + # Handling of Light messages + elif any([i in payload for i in self.LIGHT_MSG]): + self.logger.info(f"Received Message decoded as light message.") + self._handle_lights(tasmota_topic, info_topic, payload) + + # Handling of Power messages + elif any(item.startswith("POWER") for item in payload.keys()): + self.logger.info(f"Received Message decoded as power message.") + self._handle_power(tasmota_topic, info_topic, payload) + + # Handling of RF messages payload={'Time': '2022-11-21T11:22:55', 'RfReceived': {'Sync': 10120, 'Low': 330, 'High': 980, 'Data': '3602B8', 'RfKey': 'None'}} + elif 'RfReceived' in payload: + self.logger.info(f"Received Message decoded as RF message.") + self._handle_rf(tasmota_topic, info_topic, payload['RfReceived']) + + # Handling of Setting messages + elif next(iter(payload)).startswith("SetOption"): + # elif any(item.startswith("SetOption") for item in payload.keys()): + self.logger.info(f"Received Message decoded as Tasmota Setting message.") + self._handle_setting(tasmota_topic, payload) + + # Handling of Zigbee Bridge Config messages + elif 'ZbConfig' in payload: + self.logger.info(f"Received Message decoded as Zigbee Config message.") + self._handle_zbconfig(tasmota_topic, payload['ZbConfig']) + + # Handling of Zigbee Bridge Status messages + elif any(item.startswith("ZbStatus") for item in payload.keys()): + self.logger.info(f"Received Message decoded as Zigbee ZbStatus message.") + self._handle_zbstatus(tasmota_topic, payload) - # handle message - if isinstance(payload, dict) and info_topic in ['STATE', 'RESULT']: - - # Handling of TelePeriod - if 'TelePeriod' in payload: - self.logger.info(f"Received Message decoded as teleperiod message.") - self._handle_teleperiod(tasmota_topic, payload['TelePeriod']) - - elif 'Module' in payload: - self.logger.info(f"Received Message decoded as Module message.") - self._handle_module(tasmota_topic, payload['Module']) - - # Handling of Light messages - elif any([i in payload for i in self.LIGHT_MSG]): - self.logger.info(f"Received Message decoded as light message.") - self._handle_lights(tasmota_topic, info_topic, payload) - - # Handling of Power messages - elif any(item.startswith("POWER") for item in payload.keys()): - self.logger.info(f"Received Message decoded as power message.") - self._handle_power(tasmota_topic, info_topic, payload) - - # Handling of RF messages payload={'Time': '2022-11-21T11:22:55', 'RfReceived': {'Sync': 10120, 'Low': 330, 'High': 980, 'Data': '3602B8', 'RfKey': 'None'}} - elif 'RfReceived' in payload: - self.logger.info(f"Received Message decoded as RF message.") - self._handle_rf(tasmota_topic, info_topic, payload['RfReceived']) - - # Handling of Setting messages - elif next(iter(payload)).startswith("SetOption"): - # elif any(item.startswith("SetOption") for item in payload.keys()): - self.logger.info(f"Received Message decoded as Tasmota Setting message.") - self._handle_setting(tasmota_topic, payload) - - # Handling of Zigbee Bridge Config messages - elif 'ZbConfig' in payload: - self.logger.info(f"Received Message decoded as Zigbee Config message.") - self._handle_zbconfig(tasmota_topic, payload['ZbConfig']) - - # Handling of Zigbee Bridge Status messages - elif any(item.startswith("ZbStatus") for item in payload.keys()): - self.logger.info(f"Received Message decoded as Zigbee ZbStatus message.") - self._handle_zbstatus(tasmota_topic, payload) - - # Handling of Wi-Fi - if 'Wifi' in payload: - self.logger.info(f"Received Message contains Wifi information.") - self._handle_wifi(tasmota_topic, payload['Wifi']) - - # Handling of Uptime - if 'Uptime' in payload: - self.logger.info(f"Received Message contains Uptime information.") - self._handle_uptime(tasmota_topic, payload['Uptime']) - - # Handling of UptimeSec - if 'UptimeSec' in payload: - self.logger.info(f"Received Message contains UptimeSec information.") - self._handle_uptime_sec(tasmota_topic, payload['UptimeSec']) - - elif isinstance(payload, dict) and info_topic == 'SENSOR': - self.logger.info(f"Received Message contains sensor information.") - self._handle_sensor(tasmota_topic, info_topic, payload) + # Handling of Wi-Fi + if 'Wifi' in payload: + self.logger.info(f"Received Message contains Wifi information.") + self._handle_wifi(tasmota_topic, payload['Wifi']) - else: - self.logger.warning(f"Received Message '{payload}' not handled within plugin.") + # Handling of Uptime + if 'Uptime' in payload: + self.logger.info(f"Received Message contains Uptime information.") + self._handle_uptime(tasmota_topic, payload['Uptime']) - # setting new online-timeout - self.tasmota_devices[tasmota_topic]['online_timeout'] = datetime.now() + timedelta(seconds=self.telemetry_period + 5) + # Handling of UptimeSec + if 'UptimeSec' in payload: + self.logger.info(f"Received Message contains UptimeSec information.") + self._handle_uptime_sec(tasmota_topic, payload['UptimeSec']) - # setting online_item to True - self._set_item_value(tasmota_topic, 'online', True, info_topic) + elif info_topic == 'SENSOR': + self.logger.info(f"Received Message contains sensor information.") + self._handle_sensor(tasmota_topic, info_topic, payload) + + else: + self.logger.warning(f"Received Message '{payload}' not handled within plugin.") + + # setting new online-timeout + self.tasmota_devices[tasmota_topic]['online_timeout'] = datetime.now() + timedelta(seconds=self.telemetry_period + 5) + + # setting online_item to True + self._set_item_value(tasmota_topic, 'online', True, info_topic) def on_mqtt_power_message(self, topic: str, payload: dict, qos: int = None, retain: bool = None) -> None: """ - Callback function to handle received messages + Callback function to handle received power messages :param topic: MQTT topic :param payload: MQTT message payload @@ -860,31 +918,26 @@ def on_mqtt_power_message(self, topic: str, payload: dict, qos: int = None, ret self._handle_retained_message(topic, retain) - # check for retained message and handle it - if bool(retain): - if topic not in self.topics_of_retained_messages: - self.topics_of_retained_messages.append(topic) - else: - if topic in self.topics_of_retained_messages: - self.topics_of_retained_messages.remove(topic) + if not isinstance(payload, dict): + return - # handle incoming message try: (topic_type, tasmota_topic, info_topic) = topic.split('/') self.logger.info(f"on_mqtt_power_message: topic_type={topic_type}, tasmota_topic={tasmota_topic}, info_topic={info_topic}, payload={payload}") - except Exception as e: + except ValueError as e: self.logger.error(f"received topic {topic} is not in correct format. Error was: {e}") - else: - device = self.tasmota_devices.get(tasmota_topic, None) - if not device: - return + return + + device = self.tasmota_devices.get(tasmota_topic) + if not device: + return - if info_topic.startswith('POWER'): - tasmota_relay = str(info_topic[5:]) - if not tasmota_relay: - tasmota_relay = 1 - self._set_item_value(tasmota_topic, f'relay.{tasmota_relay}', payload == 'ON', info_topic) - self.tasmota_devices[tasmota_topic]['relais'][info_topic] = payload + if info_topic.startswith('POWER'): + tasmota_relay = str(info_topic[5:]) + if not tasmota_relay: + tasmota_relay = 1 + self._set_item_value(tasmota_topic, f'relay.{tasmota_relay}', payload == 'ON', info_topic) + self.tasmota_devices[tasmota_topic]['relais'][info_topic] = payload ############################################################ # Parse detailed messages @@ -1145,7 +1198,7 @@ def _handle_power(self, device: str, function: str, payload: dict) -> None: self.tasmota_devices[device]['relais'].update(power_dict) for power in power_dict: relay_index = 1 if len(power) == 5 else str(power[5:]) - item_relay = f'relay{relay_index}' + item_relay = f'relay.{relay_index}' self._set_item_value(device, item_relay, power_dict[power], function) def _handle_module(self, device: str, payload: dict) -> None: @@ -1357,7 +1410,7 @@ def check_online_status(self): else: self.logger.debug(f'check_online_status: Checking online status of {tasmota_topic} successful') - def add_tasmota_subscription(self, prefix: str, topic: str, detail: str, payload_type: str, bool_values: list = None, item=None, callback=None) -> None: + def add_tasmota_subscription(self, prefix: str, topic: str, detail: str, payload_type: str, bool_values: list = None, item: Item=None, callback=None) -> None: """ build the topic in Tasmota style and add the subscription to mqtt @@ -1376,7 +1429,7 @@ def add_tasmota_subscription(self, prefix: str, topic: str, detail: str, payload tpc += detail self.add_subscription(tpc, payload_type, bool_values=bool_values, callback=callback) - def publish_tasmota_topic(self, prefix: str, topic: str, detail: str, payload, item=None, qos: int = None, retain: bool = False, bool_values: list = None) -> None: + def publish_tasmota_topic(self, prefix: str, topic: str, detail: str, payload, item: Item=None, qos: int = None, retain: bool = False, bool_values: list = None) -> None: """ build the topic in Tasmota style and publish to mqtt @@ -1415,16 +1468,15 @@ def clear_retained_messages(self, retained_msg=None): Method to clear all retained messages """ - if not retained_msg: - retained_msg = self.topics_of_retained_messages + if retained_msg: + retained_msg_list = list() + retained_msg_list.append(retained_msg) + else: + retained_msg_list = self.topics_of_retained_messages - for topic in retained_msg: - try: - self.logger.warning(f"Clearing retained message for topic={topic}") - self.publish_topic(topic=topic, payload="", retain=True) - except Exception as e: - self.logger.warning(f"Clearing retained message for topic={topic}, caused error {e}") - pass + for topic in retained_msg_list: + self.logger.info(f"Clearing retained message for topic={topic}") + self.publish_topic(topic=topic, payload="", retain=True) def _interview_device(self, topic: str) -> None: """ @@ -1475,43 +1527,40 @@ def _set_item_value(self, tasmota_topic: str, item_type: str, value, info_topic: self.logger.debug(f"{tasmota_topic}: No item for item_type '{item_type}' defined to set to '{value}' provided by '{src}'.") return - if len(item_list) > 1: - self.logger.info(f"{tasmota_topic}: More than one item for item_type '{item_type}' found to be set to '{value}' provided by '{src}'. First one will be used.") - - item = item_list[0] - tasmota_rf_details = self.get_iattr_value(item.conf, 'tasmota_rf_details') - if tasmota_rf_details and '=' in tasmota_rf_details: - var = tasmota_rf_details.split('=') - if len(var) == 2: - tasmota_rf_details, tasmota_rf_key_param = var - else: - return + for item in item_list: + tasmota_rf_details = self.get_iattr_value(item.conf, 'tasmota_rf_details') + if tasmota_rf_details and '=' in tasmota_rf_details: + var = tasmota_rf_details.split('=') + if len(var) == 2: + tasmota_rf_details, tasmota_rf_key_param = var + else: + return - if tasmota_rf_key_param.lower() == 'true': - value = True - elif tasmota_rf_key_param.lower() == 'false': - value = True - elif tasmota_rf_key_param.lower() == 'toggle': - value = not(item()) - else: - self.logger.warning(f"Parameter of tasmota_rf_key unknown, Need to be 'True', 'False', 'Toggle'") - return + if tasmota_rf_key_param.lower() == 'true': + value = True + elif tasmota_rf_key_param.lower() == 'false': + value = True + elif tasmota_rf_key_param.lower() == 'toggle': + value = not(item()) + else: + self.logger.warning(f"Parameter of tasmota_rf_key unknown, Need to be 'True', 'False', 'Toggle'") + return - # set item value - self.logger.info(f"Item '{item.path()}' via item_type '{item_type}' set to value '{value}' provided by '{src}'.") - item(value, self.get_shortname(), src) + # set item value + self.logger.info(f"Item '{item.path()}' via item_type '{item_type}' set to value '{value}' provided by '{src}'.") + item(value, self.get_shortname(), src) - def _handle_new_discovered_device(self, tasmota_topic): + def _handle_new_discovered_device(self, tasmota_topic: str): self._add_new_device_to_tasmota_devices(tasmota_topic) self.tasmota_devices[tasmota_topic]['status'] = 'discovered' self._interview_device(tasmota_topic) - def _add_new_device_to_tasmota_devices(self, tasmota_topic): + def _add_new_device_to_tasmota_devices(self, tasmota_topic: str): self.tasmota_devices[tasmota_topic] = self._get_device_dict_1_template() self.tasmota_devices[tasmota_topic].update(self._get_device_dict_2_template()) - def _set_device_offline(self, tasmota_topic): + def _set_device_offline(self, tasmota_topic: str): self.tasmota_devices[tasmota_topic]['online'] = False self._set_item_value(tasmota_topic, 'online', False, 'check_online_status') @@ -1640,6 +1689,7 @@ def _handle_retained_message(self, topic: str, retain: bool) -> None: if topic in self.topics_of_retained_messages: self.topics_of_retained_messages.remove(topic) + ############################################################ # Plugin Properties ############################################################ From e5052bac086133ab39f15164d1cab81a2379d7c5 Mon Sep 17 00:00:00 2001 From: ivande Date: Tue, 18 Apr 2023 22:11:54 +0200 Subject: [PATCH 049/775] V1.8.0 revised stop method --- telegram/__init__.py | 46 ++++++++++++++++++++++++++------------------ 1 file changed, 27 insertions(+), 19 deletions(-) diff --git a/telegram/__init__.py b/telegram/__init__.py index fe01657d8..88f65499f 100755 --- a/telegram/__init__.py +++ b/telegram/__init__.py @@ -53,7 +53,7 @@ ITEM_ATTR_INFO = 'telegram_info' # read items with specific item-values ITEM_ATTR_TEXT = 'telegram_text' # write message-text into the item ITEM_ATTR_MATCHREGEX = 'telegram_value_match_regex' # check a value against a condition before sending a message -ITEM_ATTR_CHAT_IDS = 'telegram_chat_ids' +ITEM_ATTR_CHAT_IDS = 'telegram_chat_ids' # specifying chat IDs and write access ITEM_ATTR_MSG_ID = 'telegram_message_chat_id' # chat_id the message should be sent to ITEM_ATTR_CONTROL = 'telegram_control' # control(=change) item-values (bool/num) @@ -72,10 +72,10 @@ class Telegram(SmartPlugin): _items = [] # all items using attribute ``telegram_message`` _items_info = {} # dict used whith the info-command: key = attribute_value, val= item_list telegram_info _items_text_message = [] # items in which the text message is written ITEM_ATTR_TEXT - _items_control = {} # dict used whith the control-command: + _items_control = {} # dict used whith the control-command: _chat_ids_item = {} # an item with a dict of chat_id and write access _waitAnswer = None # wait a specific answer Yes/No - or num (change_item) - _queue = None + _queue = None # queue for the messages to be sent def __init__(self, sh): """ @@ -101,7 +101,7 @@ def __init__(self, sh): return self._loop = asyncio.get_event_loop() - + self.alive = False self._name = self.get_parameter_value('name') self._token = self.get_parameter_value('token') @@ -113,7 +113,6 @@ def __init__(self, sh): self._long_polling_timeout = self.get_parameter_value('long_polling_timeout') self._pretty_thread_names = self.get_parameter_value('pretty_thread_names') - self._application = None self._bot = None self._queue = Queue() @@ -187,18 +186,25 @@ def stop(self): self.logger.error(f"could not send bye message [{e}]") time.sleep(1) - + self.alive = False # Clears the infiniti loop in sendQueue try: - self._taskConn.cancel() - self._taskQueue.cancel() - + # if not self._taskConn.done(): + # if self.debug_enabled: + # self.logger.debug("taskConn not done") + # self._taskConn.cancel() + # if not self._taskQueue.done(): + # if self.debug_enabled: + # self.logger.debug("taskQueue not done") + # self._taskQueue.cancel() asyncio.gather(self._taskConn, self._taskQueue) self.disconnect() - while self._loop.is_running(): - asyncio.sleep(0.1) + if self._loop.is_running(): + if self.debug_enabled: + self.logger.debug("stop telegram _loop.is_running") + while self._loop.is_running(): + asyncio.sleep(0.1) self._loop.close() - self.alive = False except Exception as e: self.logger.error(f"An error occurred while stopping the plugin [{e}]") @@ -210,7 +216,7 @@ async def run_coros(self): This method run multiple coroutines concurrently using asyncio """ self._taskConn = asyncio.create_task(self.connect()) - self._taskQueue = asyncio.create_task(self.startSendQueue()) + self._taskQueue = asyncio.create_task(self.sendQueue()) await asyncio.gather(self._taskConn, self._taskQueue) async def connect(self): @@ -223,7 +229,7 @@ async def connect(self): await self._application.initialize() await self._application.start() self._updater = self._application.updater - + q = await self._updater.start_polling(timeout=self._long_polling_timeout) if self.debug_enabled: @@ -244,27 +250,29 @@ async def connect(self): if self.debug_enabled: self.logger.debug("connect method end") - async def startSendQueue(self): + async def sendQueue(self): """ Waiting for messages to be sent in the queue and sending them to Telegram. The queue expects a dictionary with various parameters """ if self.debug_enabled: - self.logger.debug(f"startSendQueue called - queue: [{self._queue}]") - while True: + self.logger.debug(f"sendQueue called - queue: [{self._queue}]") + while self.alive: # infinite loop until self.alive = False try: message = self._queue.get_nowait() - except queue.Empty: # Keine Nachricht in der Queue + except queue.Empty: # no message to send in the queue await asyncio.sleep(1) except Exception as e: self.logger.debug(f"messageQueue Exception [{e}]") - else: + else: # message to be sent in the queue if self.debug_enabled: self.logger.debug(f"message queue {message}") if message["msgType"] == "Text": await self.async_msg_broadcast(message["msg"], message["chat_id"], message["reply_markup"], message["parse_mode"]) if message["msgType"] == "Photo": await self.async_photo_broadcast(message["photofile_or_url"], message["caption"], message["chat_id"], message["local_prepare"]) + if self.debug_enabled: + self.logger.debug("sendQueue method end") async def disconnect(self): """ From 6b231df06adb63d958deafe8674641e4c82a3cc9 Mon Sep 17 00:00:00 2001 From: gruberth Date: Wed, 19 Apr 2023 14:20:41 +0200 Subject: [PATCH 050/775] modbus_tcp: Fixed bug in __write_Registers() on sending float32 because of missing else in if --- modbus_tcp/__init__.py | 106 ++++++++++++++++++++++------------------- 1 file changed, 58 insertions(+), 48 deletions(-) diff --git a/modbus_tcp/__init__.py b/modbus_tcp/__init__.py index 53858f52a..51eb74935 100755 --- a/modbus_tcp/__init__.py +++ b/modbus_tcp/__init__.py @@ -37,6 +37,7 @@ # pymodbus library from https://github.com/riptideio/pymodbus from pymodbus.version import version + pymodbus_baseversion = int(version.short().split('.')[0]) if pymodbus_baseversion > 2: @@ -55,6 +56,7 @@ AttrObjectType = 'modBusObjectType' AttrDirection = 'modBusDirection' + class modbus_tcp(SmartPlugin): ALLOW_MULTIINSTANCE = True PLUGIN_VERSION = '1.0.8' @@ -87,7 +89,6 @@ def __init__(self, sh, *args, **kwargs): self.init_webinterface(WebInterface) - return def run(self): @@ -137,7 +138,7 @@ def parse_item(self, item): if self.has_iattr(item.conf, AttrObjectType): objectType = self.get_iattr_value(item.conf, AttrObjectType) - reg = str(objectType) # dictionary key: objectType.regAddr.slaveUnit // HoldingRegister.528.1 + reg = str(objectType) # dictionary key: objectType.regAddr.slaveUnit // HoldingRegister.528.1 reg += '.' reg += str(regAddr) reg += '.' @@ -151,14 +152,14 @@ def parse_item(self, item): byteOrder = self.get_iattr_value(item.conf, AttrByteOrder) if self.has_iattr(item.conf, AttrWordOrder): wordOrder = self.get_iattr_value(item.conf, AttrWordOrder) - if byteOrder == 'Endian.Big': # Von String in Endian-Konstante "umwandeln" + if byteOrder == 'Endian.Big': # Von String in Endian-Konstante "umwandeln" byteOrder = Endian.Big elif byteOrder == 'Endian.Little': byteOrder = Endian.Little else: byteOrder = Endian.Big self.logger.warning("Invalid byte order -> default(Endian.Big) is used") - if wordOrder == 'Endian.Big': # Von String in Endian-Konstante "umwandeln" + if wordOrder == 'Endian.Big': # Von String in Endian-Konstante "umwandeln" wordOrder = Endian.Big elif wordOrder == 'Endian.Little': wordOrder = Endian.Little @@ -166,8 +167,10 @@ def parse_item(self, item): wordOrder = Endian.Big self.logger.warning("Invalid byte order -> default(Endian.Big) is used") - regPara = {'regAddr': regAddr, 'slaveUnit': slaveUnit, 'dataType': dataType, 'factor': factor, 'byteOrder': byteOrder, - 'wordOrder': wordOrder, 'item': item, 'value': value, 'objectType': objectType, 'dataDir': dataDirection } + regPara = {'regAddr': regAddr, 'slaveUnit': slaveUnit, 'dataType': dataType, 'factor': factor, + 'byteOrder': byteOrder, + 'wordOrder': wordOrder, 'item': item, 'value': value, 'objectType': objectType, + 'dataDir': dataDirection} if dataDirection == 'read': self._regToRead.update({reg: regPara}) self.logger.info("parse item: {0} Attributes {1}".format(item, regPara)) @@ -177,8 +180,8 @@ def parse_item(self, item): self.logger.info("parse item: {0} Attributes {1}".format(item, regPara)) return self.update_item else: - self.logger.warning("Invalid data direction -> default(read) is used") - self._regToRead.update({reg: regPara}) + self.logger.warning("Invalid data direction -> default(read) is used") + self._regToRead.update({reg: regPara}) def poll_device(self): """ @@ -204,7 +207,6 @@ def poll_device(self): self.connected = False return - startTime = datetime.now() regCount = 0 try: @@ -212,14 +214,14 @@ def poll_device(self): with self.lock: regAddr = regPara['regAddr'] value = self.__read_Registers(regPara) - #self.logger.debug("value readed: {0} type: {1}".format(value, type(value))) + # self.logger.debug("value readed: {0} type: {1}".format(value, type(value))) if value is not None: item = regPara['item'] if regPara['factor'] != 1: value = value * regPara['factor'] - #self.logger.debug("value {0} multiply by: {1}".format(value, regPara['factor'])) + # self.logger.debug("value {0} multiply by: {1}".format(value, regPara['factor'])) item(value, self.get_fullname()) - regCount+=1 + regCount += 1 if 'read_dt' in regPara: regPara['last_read_dt'] = regPara['read_dt'] @@ -232,13 +234,13 @@ def poll_device(self): endTime = datetime.now() duration = endTime - startTime if regCount > 0: - self._pollStatus['last_dt']=datetime.now() - self._pollStatus['regCount']=regCount + self._pollStatus['last_dt'] = datetime.now() + self._pollStatus['regCount'] = regCount self.logger.debug("poll_device: {0} register readed requed-time: {1}".format(regCount, duration)) except Exception as e: self.logger.error("something went wrong in the poll_device function: {0}".format(e)) - # called each time an item changes. + # called each time an item changes. def update_item(self, item, caller=None, source=None, dest=None): """ Item has been updated @@ -256,17 +258,17 @@ def update_item(self, item, caller=None, source=None, dest=None): slaveUnit = self._slaveUnit dataDirection = 'read' - if caller == self.get_fullname(): - #self.logger.debug('item was changed by the plugin itself - caller:{0} source:{1} dest:{2} '.format(caller, source, dest)) + # self.logger.debug('item was changed by the plugin itself - caller:{0} source:{1} dest:{2} '.format(caller, source, dest)) return if self.has_iattr(item.conf, AttrDirection): dataDirection = self.get_iattr_value(item.conf, AttrDirection) if not dataDirection == 'read_write': - self.logger.debug('update_item:{0} Writing is not allowed - selected dataDirection:{1}'.format(item, dataDirection)) + self.logger.debug( + 'update_item:{0} Writing is not allowed - selected dataDirection:{1}'.format(item, dataDirection)) return - #else: + # else: # self.logger.debug('update_item:{0} dataDirection:{1}'.format(item, dataDirection)) if self.has_iattr(item.conf, AttrAddress): regAddr = int(self.get_iattr_value(item.conf, AttrAddress)) @@ -282,7 +284,7 @@ def update_item(self, item, caller=None, source=None, dest=None): else: return - reg = str(objectType) # Dict-key: HoldingRegister.528.1 *** objectType.regAddr.slaveUnit *** + reg = str(objectType) # Dict-key: HoldingRegister.528.1 *** objectType.regAddr.slaveUnit *** reg += '.' reg += str(regAddr) reg += '.' @@ -319,8 +321,8 @@ def __write_Registers(self, regPara, value): bo = regPara['byteOrder'] wo = regPara['wordOrder'] dataTypeStr = regPara['dataType'] - dataType = ''.join(filter(str.isalpha, dataTypeStr)) # vom dataType die Ziffen entfernen z.B. uint16 = uint - registerCount = 0 # Anzahl der zu schreibenden Register (Words) + dataType = ''.join(filter(str.isalpha, dataTypeStr)) # vom dataType die Ziffen entfernen z.B. uint16 = uint + registerCount = 0 # Anzahl der zu schreibenden Register (Words) try: bits = int(''.join(filter(str.isdigit, dataTypeStr))) # bit-Zahl aus aus dataType z.B. uint16 = 16 @@ -328,15 +330,17 @@ def __write_Registers(self, regPara, value): bits = 16 if dataType.lower() == 'string': - registerCount = int(bits/2) # bei string: bits = bytes !! string16 -> 16Byte - 8 registerCount + registerCount = int(bits / 2) # bei string: bits = bytes !! string16 -> 16Byte - 8 registerCount else: - registerCount = int(bits/16) + registerCount = int(bits / 16) if regPara['factor'] != 1: - #self.logger.debug("value {0} divided by: {1}".format(value, regPara['factor'])) - value = value * (1/regPara['factor']) + # self.logger.debug("value {0} divided by: {1}".format(value, regPara['factor'])) + value = value * (1 / regPara['factor']) - self.logger.debug("write {0} to {1}.{2}.{3} (address.slaveUnit) dataType:{4}".format(value, objectType, address, slaveUnit, dataTypeStr)) + self.logger.debug( + "write {0} to {1}.{2}.{3} (address.slaveUnit) dataType:{4}".format(value, objectType, address, slaveUnit, + dataTypeStr)) builder = BinaryPayloadBuilder(byteorder=bo, wordorder=wo) if dataType.lower() == 'uint': @@ -359,20 +363,20 @@ def __write_Registers(self, regPara, value): self.logger.error("Number of bits or datatype not supported : {0}".format(dataTypeStr)) elif dataType.lower() == 'float': if bits == 32: - builder.add_32bit_float(value) - if bits == 64: - builder.add_64bit_float(value) + builder.add_32bit_float(value) + elif bits == 64: + builder.add_64bit_float(value) else: self.logger.error("Number of bits or datatype not supported : {0}".format(dataTypeStr)) elif dataType.lower() == 'string': builder.add_string(value) elif dataType.lower() == 'bit': if objectType == 'Coil' or objectType == 'DiscreteInput': - if not type(value) == type(True): # test is boolean + if not isinstance(value, bool): # test is boolean self.logger.error("Value is not boolean: {0}".format(value)) return else: - if set(value).issubset({'0', '1'}) and bool(value): # test is bit-string '00110101' + if set(value).issubset({'0', '1'}) and bool(value): # test is bit-string '00110101' builder.add_bits(value) else: self.logger.error("Value is not a bitstring: {0}".format(value)) @@ -386,15 +390,18 @@ def __write_Registers(self, regPara, value): registers = builder.to_registers() result = self._Mclient.write_registers(address, registers, unit=slaveUnit) elif objectType == 'DiscreteInput': - self.logger.warning("this object type cannot be written {0}:{1} slaveUnit:{2}".format(objectType, address, slaveUnit)) + self.logger.warning( + "this object type cannot be written {0}:{1} slaveUnit:{2}".format(objectType, address, slaveUnit)) return elif objectType == 'InputRegister': - self.logger.warning("this object type cannot be written {0}:{1} slaveUnit:{2}".format(objectType, address, slaveUnit)) + self.logger.warning( + "this object type cannot be written {0}:{1} slaveUnit:{2}".format(objectType, address, slaveUnit)) return else: return if result.isError(): - self.logger.error("write error: {0} {1}.{2}.{3} (address.slaveUnit)".format(result, objectType, address, slaveUnit)) + self.logger.error( + "write error: {0} {1}.{2}.{3} (address.slaveUnit)".format(result, objectType, address, slaveUnit)) return None if 'write_dt' in regPara: @@ -409,9 +416,8 @@ def __write_Registers(self, regPara, value): else: regPara.update({'write_value': value}) - #regPara['write_dt'] = datetime.now() - #regPara['write_value'] = value - + # regPara['write_dt'] = datetime.now() + # regPara['write_value'] = value def __read_Registers(self, regPara): objectType = regPara['objectType'] @@ -430,15 +436,15 @@ def __read_Registers(self, regPara): bits = 16 if dataType.lower() == 'string': - registerCount = int(bits/2) # bei string: bits = bytes !! string16 -> 16Byte - 8 registerCount + registerCount = int(bits / 2) # bei string: bits = bytes !! string16 -> 16Byte - 8 registerCount else: - registerCount = int(bits/16) + registerCount = int(bits / 16) if self.connected == False: self.logger.error(" not connect {0}:{1}".format(self._host, self._port)) return None - #self.logger.debug("read {0}.{1}.{2} (address.slaveUnit) regCount:{3}".format(objectType, address, slaveUnit, registerCount)) + # self.logger.debug("read {0}.{1}.{2} (address.slaveUnit) regCount:{3}".format(objectType, address, slaveUnit, registerCount)) if objectType == 'Coil': if pymodbus_baseversion > 2: result = self._Mclient.read_coils(address, registerCount, slave=slaveUnit) @@ -464,7 +470,9 @@ def __read_Registers(self, regPara): return None if result.isError(): - self.logger.error("read error: {0} {1}.{2}.{3} (address.slaveUnit) regCount:{4}".format(result, objectType, address, slaveUnit, registerCount)) + self.logger.error( + "read error: {0} {1}.{2}.{3} (address.slaveUnit) regCount:{4}".format(result, objectType, address, + slaveUnit, registerCount)) return None if objectType == 'Coil': @@ -472,11 +480,13 @@ def __read_Registers(self, regPara): elif objectType == 'DiscreteInput': value = result.bits[0] elif objectType == 'InputRegister': - decoder = BinaryPayloadDecoder.fromRegisters(result.registers, byteorder=bo,wordorder=wo) + decoder = BinaryPayloadDecoder.fromRegisters(result.registers, byteorder=bo, wordorder=wo) else: - decoder = BinaryPayloadDecoder.fromRegisters(result.registers, byteorder=bo,wordorder=wo) + decoder = BinaryPayloadDecoder.fromRegisters(result.registers, byteorder=bo, wordorder=wo) - self.logger.debug("read {0}.{1}.{2} (address.slaveUnit) regCount:{3} result:{4}".format(objectType, address, slaveUnit, registerCount, result)) + self.logger.debug( + "read {0}.{1}.{2} (address.slaveUnit) regCount:{3} result:{4}".format(objectType, address, slaveUnit, + registerCount, result)) if dataType.lower() == 'uint': if bits == 16: @@ -499,17 +509,17 @@ def __read_Registers(self, regPara): elif dataType.lower() == 'float': if bits == 32: return decoder.decode_32bit_float() - if bits == 64: + elif bits == 64: return decoder.decode_64bit_float() else: self.logger.error("Number of bits or datatype not supported : {0}".format(dataTypeStr)) elif dataType.lower() == 'string': # bei string: bits = bytes !! string16 -> 16Byte ret = decoder.decode_string(bits) - return str( ret, 'ASCII') + return str(ret, 'ASCII') elif dataType.lower() == 'bit': if objectType == 'Coil' or objectType == 'DiscreteInput': - #self.logger.debug("readed bit value: {0}".format(value)) + # self.logger.debug("readed bit value: {0}".format(value)) return value else: self.logger.debug("readed bits values: {0}".format(value.decode_bits())) From e98e339c2f3f25a4e937b7d7a5f8114360ab801b Mon Sep 17 00:00:00 2001 From: gruberth Date: Thu, 20 Apr 2023 10:40:07 +0200 Subject: [PATCH 051/775] modbus_tcp: Changed scheduler call --- modbus_tcp/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modbus_tcp/__init__.py b/modbus_tcp/__init__.py index 51eb74935..44b46110a 100755 --- a/modbus_tcp/__init__.py +++ b/modbus_tcp/__init__.py @@ -95,7 +95,7 @@ def run(self): """ Run method for the plugin """ - self._sh.scheduler.add('modbusTCP_poll_device', self.poll_device, cycle=self._cycle) + self.scheduler_add('poll_device_' + self._host, self.poll_device, cycle=self._cycle, prio=5) self.alive = True def stop(self): @@ -104,7 +104,7 @@ def stop(self): """ self.alive = False self.logger.debug("stop modbus_tcp plugin") - self.scheduler_remove('modbusTCP_poll_device') + self.scheduler_remove('poll_device_' + self._host) self._Mclient.close() self.connected = False From f5b5af33eb707eda9f9effe8672cc42c6bf63507 Mon Sep 17 00:00:00 2001 From: gruberth Date: Thu, 20 Apr 2023 13:14:44 +0200 Subject: [PATCH 052/775] husky2: Added a poll function in addition to the websocket connection, to trigger state updates more regularly --- husky2/__init__.py | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/husky2/__init__.py b/husky2/__init__.py index 1809dafe9..507196512 100755 --- a/husky2/__init__.py +++ b/husky2/__init__.py @@ -27,7 +27,7 @@ import asyncio import threading from concurrent.futures import CancelledError -from datetime import datetime +from datetime import datetime, timedelta import time import json @@ -233,6 +233,9 @@ def __init__(self, sh): self.historylength = int(self.get_parameter_value('historylength')) self.maxgpspoints = int(self.get_parameter_value('maxgpspoints')) + # poll is only additional, because normal state updates are recieved by the websocket connection of the api + self.poll_cycle = 600 # call every 10 min to make sure the monthly api call limit of 10000 gets not exceeded + self.token = None self.tokenExp = 0 @@ -280,7 +283,7 @@ def run(self): Run method for the plugin """ # if you need to create child threads, do not make them daemon = True! - # They will not shutdown properly. (It's a python bug) + # They will not shut down properly. (It's a python bug) self.logger.debug("Run method called") try: @@ -328,11 +331,16 @@ def startFinished(self, args): self.alive = True self.logger.debug("Init finished, husky2 plugin is running") + dt = self.shtime.now() + timedelta(seconds=self.poll_cycle) + self.scheduler_add('poll_husky_device_' + self.instance, + self.poll_device, cycle=self.poll_cycle, prio=5, next=dt) + def stop(self): """ Stop method for the plugin """ self.logger.debug("Stop method called. Shutting down Thread...") + self.scheduler_remove('poll_husky_device_' + self.instance) self.asyncLoop.call_soon_threadsafe(self.asyncLoop.stop) time.sleep(2) try: @@ -463,6 +471,10 @@ def writeToStatusItem(self, txt): for item in self._items_state['message']: item(txt, self.get_shortname()) + def poll_device(self): + self.logger.debug("Poll new status") + asyncio.run_coroutine_threadsafe(self.update_worker(), self.asyncLoop) + def data_callback(self, status): """ Callback for data updates of the device @@ -491,7 +503,8 @@ def data_callback(self, status): posindex = -1 for gpsindex, gpspoint in enumerate(data['attributes']['positions']): - if (gpspoint['longitude'] == self.mowerGpspoints.get_last()[0]) and (gpspoint['latitude'] == self.mowerGpspoints.get_last()[1]): + if (gpspoint['longitude'] == self.mowerGpspoints.get_last()[0]) and ( + gpspoint['latitude'] == self.mowerGpspoints.get_last()[1]): posindex = gpsindex - 1 break elif gpsindex >= self.maxgpspoints: @@ -663,6 +676,12 @@ async def send_worker(self, cmd, value): self.logger.error("'{0}' not in available commands: {1}".format(cmd, commands.keys())) return + async def update_worker(self): + newstatus = await self.apiSession.get_status() + self.apiSession.action + self.data_callback(newstatus) + return + # ------------------------------------------ # Webinterface methods of the plugin # ------------------------------------------ From 64e4e44140cc2ff35d47e7c2332e93c53bcd169d Mon Sep 17 00:00:00 2001 From: gruberth Date: Thu, 20 Apr 2023 15:42:33 +0200 Subject: [PATCH 053/775] workflow: Fix unittest for forked repos --- .github/workflows/unittests.yml | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/.github/workflows/unittests.yml b/.github/workflows/unittests.yml index 2b2406663..c1a4864ba 100755 --- a/.github/workflows/unittests.yml +++ b/.github/workflows/unittests.yml @@ -42,15 +42,19 @@ jobs: echo github.pull_request.base.ref '${{ github.pull_request.base.ref }}' echo steps.extract_branch.outputs.branch '${{ steps.extract_branch.outputs.branch }}' - - name: Checkout core from branch '${{ steps.extract_branch.outputs.branch }}' (for push) - if: github.event_name != 'pull_request' + - name: Check if branch '${{ steps.extract_branch.outputs.branch }}' exists in smarthomeNG/smarthome + run: echo "code=$(git ls-remote --exit-code --heads https://github.com/smarthomeNG/smarthome ${{ steps.extract_branch.outputs.branch }} > /dev/null; echo $? )" >>$GITHUB_OUTPUT + id: shng_branch_check + + - name: Checkout core from branch '${{ steps.extract_branch.outputs.branch }}' (for push on known smarthomeNG/smarthome branch) + if: github.event_name != 'pull_request' && steps.shng_branch_check.outputs.code == '0' uses: actions/checkout@v3 with: repository: smarthomeNG/smarthome ref: ${{ steps.extract_branch.outputs.branch }} - - name: Checkout core from branch 'develop' (for pull request) - if: github.event_name == 'pull_request' + - name: Checkout core from branch 'develop' (for pull request or push on unknown smarthomeNG/smarthome branch) + if: github.event_name == 'pull_request' || steps.shng_branch_check.outputs.code == '2' uses: actions/checkout@v3 with: repository: smarthomeNG/smarthome @@ -59,7 +63,7 @@ jobs: - name: Checkout plugins from branch '${{steps.extract_branch.outputs.branch}}' uses: actions/checkout@v3 with: - repository: smarthomeNG/plugins + repository: ${{ github.repository_owner }}/plugins ref: ${{steps.extract_branch.outputs.branch}} path: plugins From 6d81bd036d00d66fe64570e24df58eeccac56da0 Mon Sep 17 00:00:00 2001 From: gruberth Date: Thu, 20 Apr 2023 18:40:23 +0200 Subject: [PATCH 054/775] husky2: Changed script loading in maps widget --- husky2/sv_widgets/husky2.js | 97 ++++++++++++++++--------------------- 1 file changed, 41 insertions(+), 56 deletions(-) diff --git a/husky2/sv_widgets/husky2.js b/husky2/sv_widgets/husky2.js index 640c1760c..6997ad434 100755 --- a/husky2/sv_widgets/husky2.js +++ b/husky2/sv_widgets/husky2.js @@ -1,88 +1,73 @@ - $.widget("sv.husky2", $.sv.widget, { initSelector: 'div[data-widget="husky2.map"]', options: { - mapskey: '', + mapskey: '', zoomlevel: 19, pathcolor: '#3afd02', - }, + }, _create: function () { this._super(); - this._create_map(); + + const scriptPromise = new Promise((resolve, reject) => { + const script = document.createElement('script'); + document.head.appendChild(script); + script.onload = resolve; + script.onerror = reject; + script.async = true; + script.src = 'https://maps.googleapis.com/maps/api/js?key=' + this.options.mapskey + '&callback=Function.prototype'; + }); + scriptPromise.then(() => { + this._create_map() + }); }, _create_map: function () { - try { - this.map = new google.maps.Map(this.element[0], { - zoom: this.options.zoomlevel, - mapTypeId: 'hybrid', - center: new google.maps.LatLng(0.0, 0.0), - }); - } - catch (e) { - if (e.name == "ReferenceError") { // google maps script not loaded yet - var that = this; - // google maps script is already loading in another widget - if (window.google_maps_loading) { - window.setTimeout(function () { - that._create_map() - }, 100) - return; - } - // google maps script is not loading - window.google_maps_loading = true; - $.ajax({ - url: 'https://maps.googleapis.com/maps/api/js?key=' + this.options.mapskey + '&language=de', - dataType: "script", - complete: function () { - window.google_maps_loading = false; - that._create_map() - } - }); - return; - } - else // other exceptions should be thrown - throw e; - } - this.marker_myself = new google.maps.Marker({ - map: this.map, - position: new google.maps.LatLng(0.0, 0.0), - icon: '', - title:'', - zIndex:99999999 - }); + this.map = new google.maps.Map(this.element[0], { + zoom: this.options.zoomlevel, + mapTypeId: 'hybrid', + center: new google.maps.LatLng(0.0, 0.0), + }); - this.linePath = new google.maps.Polyline({ - path: [], - strokeColor: this.options.pathcolor, - strokeOpacity: 0.6, - strokeWeight: 2, - map: this.map - }); + this.marker_myself = new google.maps.Marker({ + map: this.map, + position: new google.maps.LatLng(0.0, 0.0), + icon: '', + title: '', + zIndex: 99999999 + }); + this.linePath = new google.maps.Polyline({ + path: [], + strokeColor: this.options.pathcolor, + strokeOpacity: 0.6, + strokeWeight: 2, + map: this.map + }); }, - _update: function(response) { - if(!this.map) { + _update: function (response) { + if (!this.map) { var that = this; - window.setTimeout(function() { that._update(response) }, 100) + window.setTimeout(function () { + that._update(response) + }, 500) return; } this.marker_myself.setTitle(response[3]); - var pos = new google.maps.LatLng(parseFloat(response[0]),parseFloat(response[1])); + var pos = new google.maps.LatLng(parseFloat(response[0]), parseFloat(response[1])); this.map.setCenter(pos); this.marker_myself.setPosition(pos); var coord = []; - for (const point of response[2]){ - coord.push(new google.maps.LatLng(parseFloat(point[0]),parseFloat(point[1]))); + for (const point of response[2]) { + coord.push(new google.maps.LatLng(parseFloat(point[0]), parseFloat(point[1]))); } this.linePath.setPath(coord); From 82ba1a3175fffc32b24738e412582f502e772cff Mon Sep 17 00:00:00 2001 From: gruberth Date: Thu, 20 Apr 2023 19:25:37 +0200 Subject: [PATCH 055/775] husky2: Check first if widget script already loaded --- husky2/sv_widgets/husky2.js | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/husky2/sv_widgets/husky2.js b/husky2/sv_widgets/husky2.js index 6997ad434..c2ef89edb 100755 --- a/husky2/sv_widgets/husky2.js +++ b/husky2/sv_widgets/husky2.js @@ -10,17 +10,21 @@ $.widget("sv.husky2", $.sv.widget, { _create: function () { this._super(); - const scriptPromise = new Promise((resolve, reject) => { - const script = document.createElement('script'); - document.head.appendChild(script); - script.onload = resolve; - script.onerror = reject; - script.async = true; - script.src = 'https://maps.googleapis.com/maps/api/js?key=' + this.options.mapskey + '&callback=Function.prototype'; - }); - scriptPromise.then(() => { - this._create_map() - }); + // First check if the script already exists on the dom by searching for an id + if (document.getElementById('googleMapsScript') === null) { + const scriptPromise = new Promise((resolve, reject) => { + const script = document.createElement('script'); + script.id = 'googleMapsScript'; + script.onload = resolve; + script.onerror = reject; + script.async = true; + script.src = 'https://maps.googleapis.com/maps/api/js?key=' + this.options.mapskey + '&callback=Function.prototype'; + document.body.appendChild(script); + }); + scriptPromise.then(() => { + this._create_map(); + }); + } }, _create_map: function () { From a1c05b1f016c7ce0680a5533036309373f3ce74a Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Thu, 20 Apr 2023 21:14:29 +0200 Subject: [PATCH 056/775] AVM Plugin: - Bugfix setting Fritzdevice like start_call' --- avm/__init__.py | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/avm/__init__.py b/avm/__init__.py index f42291bd0..e6a229e2d 100644 --- a/avm/__init__.py +++ b/avm/__init__.py @@ -1208,7 +1208,8 @@ def wol(self, mac_address: str): :param mac_address: MAC address of the device to wake up """ # self.client.LanDevice.Hosts.X_AVM_DE_GetAutoWakeOnLANByMACAddress(NewMACAddress=mac_address) - return self._set_fritz_device('wol', f"NewMACAddress='{mac_address}'") + args = {'NewMACAddress': mac_address} + return self._set_fritz_device('wol', args) # ---------------------------------- # caller methods @@ -1252,7 +1253,8 @@ def set_call_origin(self, phone_name: str): :param phone_name: full phone identifier, could be e.g. '**610' for an internal device """ # self.client.InternetGatewayDevice.X_VoIP.X_AVM_DE_DialSetConfig(NewX_AVM_DE_PhoneName=phone_name.strip()) - return self._set_fritz_device('set_call_origin', f"NewX_AVM_DE_PhoneName='{phone_name.strip()}'") + args = {'NewX_AVM_DE_PhoneName': phone_name.strip()} + return self._set_fritz_device('set_call_origin', args) def start_call(self, phone_number: str): """ @@ -1263,7 +1265,8 @@ def start_call(self, phone_number: str): :param phone_number: full phone number to call """ # self.client.InternetGatewayDevice.X_VoIP.X_AVM_DE_DialNumber(NewX_AVM_DE_PhoneNumber=phone_number.strip()) - return self._set_fritz_device('start_call', f"NewX_AVM_DE_PhoneNumber='{phone_number.strip()}'") + args = {'NewX_AVM_DE_PhoneNumber': phone_number.strip()} + return self._set_fritz_device('start_call', args) def cancel_call(self): """ @@ -1406,7 +1409,8 @@ def set_wlan(self, wlan_index: int, new_enable: bool = False): self.logger.debug(f"set_wlan called: wlan_index={wlan_index}, new_enable={new_enable}") # self.client.LANDevice.WLANConfiguration[wlan_index].SetEnable(NewEnable=int(new_enable)) - response = self._set_fritz_device('set_wlan', f"NewEnable='{int(new_enable)}'", wlan_index) + args = {'NewEnable': int(new_enable)} + response = self._set_fritz_device('set_wlan', args, wlan_index) # check if remaining time is set as item self.set_wlan_time_remaining(wlan_index) @@ -1457,7 +1461,8 @@ def set_wps(self, wlan_index: int, wps_enable: bool = False): self.logger.debug(f"set_wps called: wlan_index={wlan_index}, wps_enable={wps_enable}") # self.client.LANDevice.WLANConfiguration[wlan_index].X_AVM_DE_SetWPSEnable(NewX_AVM_DE_WPSEnable=int(wps_enable)) - return self._set_fritz_device('set_wps', f"NewX_AVM_DE_WPSEnable='{int(wps_enable)}'", wlan_index) + args = {'NewX_AVM_DE_WPSEnable': int(wps_enable)} + return self._set_fritz_device('set_wps', args, wlan_index) def get_wps(self, wlan_index: int): """ @@ -1482,7 +1487,8 @@ def set_tam(self, tam_index: int = 0, new_enable: bool = False): uses: https://avm.de/fileadmin/user_upload/Global/Service/Schnittstellen/x_tam.pdf """ # self.client.InternetGatewayDevice.X_AVM_DE_TAM.SetEnable(NewIndex=tam_index, NewEnable=int(new_enable)) - return self._set_fritz_device('set_tam', f"NewIndex={tam_index}, NewEnable='{int(new_enable)}'") + args = {'NewIndex': tam_index, 'NewEnable': int(new_enable)} + return self._set_fritz_device('set_tam', args) def get_tam(self, tam_index: int = 0): """ @@ -1540,7 +1546,8 @@ def set_aha_device(self, ain: str = '', set_switch: bool = False): switch_state = "ON" # self.client.InternetGatewayDevice.X_AVM_DE_Homeauto.SetSwitch(NewAIN=ain, NewSwitchState=switch_state) - return self._set_fritz_device('set_aha_device', f"NewAIN={ain}, NewSwitchState='{switch_state}'") + args = {'NewAIN': ain, 'NewSwitchState': switch_state} + return self._set_fritz_device('set_aha_device', args) # ---------------------------------- # deflection @@ -1555,7 +1562,8 @@ def set_deflection(self, deflection_id: int = 0, new_enable: bool = False): :param new_enable: new enable (default: False) """ # self.client.InternetGatewayDevice.X_AVM_DE_OnTel.SetDeflectionEnable(NewDeflectionId=deflection_id, NewEnable=int(new_enable)) - return self._set_fritz_device('set_deflection', f"NewDeflectionId='{deflection_id}', NewEnable='{int(new_enable)}'") + args = {'NewDeflectionId': deflection_id, 'NewEnable': int(new_enable)} + return self._set_fritz_device('set_deflection', args) def get_deflection(self, deflection_id: int = 0): """Get Deflection state of deflection_id""" From 2d03543e63779fdf5c5f477b499cc83f8ec57803 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Thu, 20 Apr 2023 21:26:11 +0200 Subject: [PATCH 057/775] DB_ADDON Plugin: - Bugfix in parse_item --- db_addon/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/db_addon/__init__.py b/db_addon/__init__.py index cd8ac2a5d..e40575ee0 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -263,8 +263,8 @@ def format_db_addon_ignore_value_list() -> Union[None, list]: # get/create list of comparison operators and check it db_addon_ignore_value_list = self.get_iattr_value(item.conf, 'db_addon_ignore_value_list') - if self.has_iattr(item.conf, 'database_ignore_value'): - db_addon_ignore_value = self.get_iattr_value(item.conf, 'database_ignore_value') + if self.has_iattr(item.conf, 'db_addon_ignore_value'): + db_addon_ignore_value = self.get_iattr_value(item.conf, 'db_addon_ignore_value') if not db_addon_ignore_value_list: db_addon_ignore_value_list = [] db_addon_ignore_value_list.append(f"!= {db_addon_ignore_value}") From 5a75ec5065d043d3747b26f6ef15c6aacd64137b Mon Sep 17 00:00:00 2001 From: gruberth Date: Fri, 21 Apr 2023 17:36:38 +0200 Subject: [PATCH 058/775] husky2: Changed script import to speed up sv page containing the map --- husky2/sv_widgets/husky2.html | 42 +++++++++++++++++++---------------- husky2/sv_widgets/husky2.js | 37 ++++++++++-------------------- 2 files changed, 35 insertions(+), 44 deletions(-) diff --git a/husky2/sv_widgets/husky2.html b/husky2/sv_widgets/husky2.html index 1207b1338..84b35ff67 100755 --- a/husky2/sv_widgets/husky2.html +++ b/husky2/sv_widgets/husky2.html @@ -8,24 +8,28 @@ */ /** - * Displays a google maps (from https://www.smarthomeng.de/google-maps-widget-fuer-smartvisu-2-9 ) map with the position and the path of the mower - * - * @param {id=''} unique id for this widget - * @param {item(txt)=''} a gad/item with the name of the mower - * @param {item(num)=0.0} a gad/item for latitude - * @param {item(num)=0.0} a gad/item for longitude - * @param {item(list)=[]} a gad/item for gps points - * @param {text=''} the google maps key - * @param {num=19} zoom level for map - * @param {text='#3afd02'} color of mower path - */ +* Displays a google maps (from https://www.smarthomeng.de/google-maps-widget-fuer-smartvisu-2-9 ) map with the position and the path of the mower +* +* @param {id=''} unique id for this widget +* @param {item(txt)=''} a gad/item with the name of the mower +* @param {item(num)=0.0} a gad/item for latitude +* @param {item(num)=0.0} a gad/item for longitude +* @param {item(list)=[]} a gad/item for gps points +* @param {text=''} the google maps key +* @param {num=19} zoom level for map +* @param {text='#3afd02'} color of mower path +*/ {% macro map(id, gad_name, gad_lat, gad_lon, gad_points, mapskey, zoomlevel, pathcolor) %} -
-
+
+
+
+ +
{% endmacro %} \ No newline at end of file diff --git a/husky2/sv_widgets/husky2.js b/husky2/sv_widgets/husky2.js index c2ef89edb..f09cb5ef8 100755 --- a/husky2/sv_widgets/husky2.js +++ b/husky2/sv_widgets/husky2.js @@ -1,34 +1,18 @@ $.widget("sv.husky2", $.sv.widget, { initSelector: 'div[data-widget="husky2.map"]', + map: null, + options: { - mapskey: '', zoomlevel: 19, pathcolor: '#3afd02', }, _create: function () { this._super(); - - // First check if the script already exists on the dom by searching for an id - if (document.getElementById('googleMapsScript') === null) { - const scriptPromise = new Promise((resolve, reject) => { - const script = document.createElement('script'); - script.id = 'googleMapsScript'; - script.onload = resolve; - script.onerror = reject; - script.async = true; - script.src = 'https://maps.googleapis.com/maps/api/js?key=' + this.options.mapskey + '&callback=Function.prototype'; - document.body.appendChild(script); - }); - scriptPromise.then(() => { - this._create_map(); - }); - } }, _create_map: function () { - this.map = new google.maps.Map(this.element[0], { zoom: this.options.zoomlevel, mapTypeId: 'hybrid', @@ -50,16 +34,19 @@ $.widget("sv.husky2", $.sv.widget, { strokeWeight: 2, map: this.map }); - }, _update: function (response) { - if (!this.map) { - var that = this; - window.setTimeout(function () { - that._update(response) - }, 500) - return; + if (this.map === null) { + if (typeof google == 'undefined') { + var that = this; + window.setTimeout(function () { + that._update(response) + }, 500) + return; + } else { + this._create_map(); + } } this.marker_myself.setTitle(response[3]); From 9eb7537b5ea4907a6f954e42af90474076928ff1 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Fri, 21 Apr 2023 19:43:53 +0200 Subject: [PATCH 059/775] DB_ADDON Plugin: - allow to define database_item_path to be defined up to 3 level above db_addon_item --- db_addon/__init__.py | 145 ++++++++++++++++++++++++++++++++----------- 1 file changed, 108 insertions(+), 37 deletions(-) diff --git a/db_addon/__init__.py b/db_addon/__init__.py index e40575ee0..c7e48f69e 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -90,7 +90,7 @@ def __init__(self, sh): self.active_queue_item: str = '-' # String holding item path of currently executed item # define debug logs - self.parse_debug = False # Enable / Disable debug logging for method 'parse item' + self.parse_debug = True # Enable / Disable debug logging for method 'parse item' self.execute_debug = False # Enable / Disable debug logging for method 'execute items' self.sql_debug = False # Enable / Disable debug logging for sql stuff self.onchange_debug = False # Enable / Disable debug logging for method 'handle_onchange' @@ -189,6 +189,27 @@ def parse_item(self, item: Item): can be sent to the knx with a knx write function within the knx plugin. """ + def get_database_item_path() -> Item: + """ + Returns item from shNG config which is an item with database attribut valid for current db_addon item + """ + + _lookup_item = item + + self.logger.debug(f"get_database_item_path called with item = {item.path()}") + + for i in range(3): + if self.has_iattr(_lookup_item.conf, 'db_addon_database_item'): + self.logger.debug(f"Attribut 'db_addon_database_item' has been found for item={item.path()} {i + 1} level above item.") + _database_item_path = self.get_iattr_value(item.conf, 'db_addon_database_item') + _startup = bool(self.get_iattr_value(_lookup_item.conf, 'db_addon_startup')) + self.logger.debug(f"get_database_item_path: {_database_item_path=}, {_startup=}") + + return _database_item_path, _startup + else: + _lookup_item = _lookup_item.return_parent() + return None, None + def get_database_item() -> Item: """ Returns item from shNG config which is an item with database attribut valid for current db_addon item @@ -199,9 +220,11 @@ def get_database_item() -> Item: for i in range(2): if self.has_iattr(_lookup_item.conf, self.item_attribute_search_str): self.logger.debug(f"Attribut '{self.item_attribute_search_str}' has been found for item={item.path()} {i + 1} level above item.") - return _lookup_item + _startup = bool(self.get_iattr_value(_lookup_item.conf, 'db_addon_startup')) + return _lookup_item, _startup else: _lookup_item = _lookup_item.return_parent() + return None, None def has_db_addon_item() -> bool: """Returns item from shNG config which is item with db_addon attribut valid for database item""" @@ -249,6 +272,70 @@ def format_db_addon_ignore_value_list() -> Union[None, list]: return return db_addon_ignore_value_list_formatted + def optimize_db_addon_ignore_value_list(comp_list: list) -> list: + value_l = None + value_h = None + value_le = None + value_he = None + values_ue = [] + low_end = None, None + high_end = None, None + + # find low, low_e, high, high_e + for comp in comp_list: + op, value = comp.split(' ') + value = int(value) + if op == '<': + if value_l is None or (value < value_l): + value_l = value + elif op == '<=': + if value_le is None or (value < value_le): + value_le = value + elif op == '>': + if value_h is None or (value > value_h): + value_h = value + elif op == '>=': + if value_he is None or (value > value_he): + value_he = value + elif op == '!=': + values_ue.append(value) + elif op == '==': + self.logger.debug(f"Comparison to '{comp}' will be ignored.") + + self.logger.debug(f"optimize_db_addon_ignore_value_list: {value_l=}, {value_le=}, {value_h=}, {value_he=}, {values_ue=}") + + # find low and high + if value_l and not value_le: + low_end = ('<', value_l) + elif not value_l and value_le: + low_end = ('<=', value_le) + elif value_l and value_le: + low_end = ('<=', value_le) if value_le < value_l else ('<', value_l) + self.logger.debug(f"low_end={low_end[0]} {low_end[1]}") + + if value_h and not value_he: + high_end = ('<', value_h) + elif not value_h and value_he: + high_end = ('<=', value_he) + elif value_h and value_he: + high_end = ('>=', value_he) if value_he > value_h else ('>', value_h) + self.logger.debug(f"high_end={high_end[0]} {high_end[1]}") + + # generate comp_list + db_addon_ignore_value_list_optimized = [] + if low_end != (None, None): + db_addon_ignore_value_list_optimized.append(f"{low_end[0]} {low_end[1]}") + if high_end != (None, None): + db_addon_ignore_value_list_optimized.append(f"{high_end[0]} {high_end[1]}") + if values_ue: + for v in values_ue: + if low_end[1] and v > low_end[1]: + db_addon_ignore_value_list_optimized.append(f'!= {v}') + elif high_end[1] and v < high_end[1]: + db_addon_ignore_value_list_optimized.append(f'!= {v}') + + return db_addon_ignore_value_list_optimized + # handle all items with db_addon_fct if self.has_iattr(item.conf, 'db_addon_fct'): @@ -258,8 +345,13 @@ def format_db_addon_ignore_value_list() -> Union[None, list]: # get db_addon_fct attribute value db_addon_fct = self.get_iattr_value(item.conf, 'db_addon_fct').lower() - # get attribute value if item should be calculated at plugin startup - db_addon_startup = bool(self.get_iattr_value(item.conf, 'db_addon_startup')) + # get database item (and attribute value if item should be calculated at plugin startup) and return if not available + database_item, db_addon_startup = get_database_item_path() + if database_item is None: + database_item, db_addon_startup = get_database_item() + if database_item is None: + self.logger.warning(f"No database item found for {item.path()}: Item ignored. Maybe you should check instance of database plugin.") + return # get/create list of comparison operators and check it db_addon_ignore_value_list = self.get_iattr_value(item.conf, 'db_addon_ignore_value_list') @@ -279,58 +371,37 @@ def format_db_addon_ignore_value_list() -> Union[None, list]: if db_addon_ignore_value_list: db_addon_ignore_value_list = format_db_addon_ignore_value_list() - # get database item and return if not available - database_item_path = self.get_iattr_value(item.conf, 'db_addon_database_item') - if database_item_path is not None: - database_item = database_item_path - else: - database_item = get_database_item() - if database_item: - db_addon_startup = bool(self.get_iattr_value(database_item.conf, 'db_addon_startup')) - if database_item is None: - self.logger.warning(f"No database item found for {item.path()}: Item ignored. Maybe you should check instance of database plugin.") - return - - # return if mandatory params for ad_addon_fct not given. + # check if mandatory params for ad_addon_fct are given if db_addon_fct in ALL_NEED_PARAMS_ATTRIBUTES and not self.has_iattr(item.conf, 'db_addon_params'): self.logger.warning(f"Item '{item.path()}' with db_addon_fct={db_addon_fct} ignored, since parameter using 'db_addon_params' not given. Item will be ignored.") return # create standard items config item_config_data_dict = {'db_addon': 'function', 'db_addon_fct': db_addon_fct, 'database_item': database_item, 'ignore_value_list': db_addon_ignore_value_list} - if database_item_path is not None: + if isinstance(database_item, str): item_config_data_dict.update({'database_item_path': True}) else: - database_item_path = database_item.path() + database_item = database_item.path() + # do logging if self.parse_debug: - self.logger.debug(f"Item '{item.path()}' added with db_addon_fct={db_addon_fct} and database_item={database_item_path}") + self.logger.debug(f"Item '{item.path()}' added with db_addon_fct={db_addon_fct} and database_item={database_item}") - # handle daily items + # add cycle for item groups if db_addon_fct in ALL_DAILY_ATTRIBUTES: item_config_data_dict.update({'cycle': 'daily'}) - - # handle weekly items elif db_addon_fct in ALL_WEEKLY_ATTRIBUTES: item_config_data_dict.update({'cycle': 'weekly'}) - - # handle monthly items elif db_addon_fct in ALL_MONTHLY_ATTRIBUTES: item_config_data_dict.update({'cycle': 'monthly'}) - - # handle yearly items elif db_addon_fct in ALL_YEARLY_ATTRIBUTES: item_config_data_dict.update({'cycle': 'yearly'}) - - # handle static items elif db_addon_fct in ALL_GEN_ATTRIBUTES: item_config_data_dict.update({'cycle': 'static'}) - - # handle on-change items elif db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: item_config_data_dict.update({'cycle': 'on-change'}) - # handle all functions with 'summe' like waermesumme, kaeltesumme, gruenlandtemperatursumme + # create item config for all functions with 'summe' like waermesumme, kaeltesumme, gruenlandtemperatursumme if 'summe' in db_addon_fct: db_addon_params = params_to_dict(self.get_iattr_value(item.conf, 'db_addon_params')) if db_addon_params is None or 'year' not in db_addon_params: @@ -338,7 +409,7 @@ def format_db_addon_ignore_value_list() -> Union[None, list]: db_addon_params = {'year': 'current'} item_config_data_dict.update({'params': db_addon_params}) - # handle wachstumsgradtage function + # create item config for wachstumsgradtage function elif db_addon_fct == 'wachstumsgradtage': DEFAULT_THRESHOLD = 10 db_addon_params = params_to_dict(self.get_iattr_value(item.conf, 'db_addon_params')) @@ -353,7 +424,7 @@ def format_db_addon_ignore_value_list() -> Union[None, list]: db_addon_params['threshold'] = DEFAULT_THRESHOLD if threshold is None else threshold item_config_data_dict.update({'params': db_addon_params}) - # handle tagesmitteltemperatur + # create item config for tagesmitteltemperatur elif db_addon_fct == 'tagesmitteltemperatur': if not self.has_iattr(item.conf, 'db_addon_params'): self.logger.warning(f"Item '{item.path()}' with db_addon_fct={db_addon_fct} ignored, since parameter using 'db_addon_params' not given. Item will be ignored.") @@ -365,7 +436,7 @@ def format_db_addon_ignore_value_list() -> Union[None, list]: return item_config_data_dict.update({'params': db_addon_params}) - # handle db_request + # create item config for db_request elif db_addon_fct == 'db_request': if not self.has_iattr(item.conf, 'db_addon_params'): self.logger.warning(f"Item '{item.path()}' with db_addon_fct={db_addon_fct} ignored, since parameter using 'db_addon_params' not given. Item will be ignored") @@ -398,11 +469,11 @@ def format_db_addon_ignore_value_list() -> Union[None, list]: item_config_data_dict.update({'params': db_addon_params, 'cycle': update_cycle}) - # debug log item cycle + # do logging if self.parse_debug: self.logger.debug(f"Item '{item.path()}' added to be run {item_config_data_dict['cycle']}.") - # handle item to be run on startup (onchange_items shall not be run at startup, but at first noticed change of item value; therefore remove for list of items to be run at startup) + # create item config for item to be run on startup (onchange_items shall not be run at startup, but at first noticed change of item value; therefore remove for list of items to be run at startup) if (db_addon_startup and db_addon_fct not in ALL_ONCHANGE_ATTRIBUTES) or db_addon_fct in ALL_GEN_ATTRIBUTES: if self.parse_debug: self.logger.debug(f"Item '{item.path()}' added to be run on startup") From 797ebbdeffb325aa02bfb08b6d54b8fea8a9d5db Mon Sep 17 00:00:00 2001 From: Hasenradball Date: Fri, 21 Apr 2023 20:18:17 +0200 Subject: [PATCH 060/775] restructure send method and improve try except clause --- rcs1000n/__init__.py | 6 ++-- rcs1000n/cRcSocketSwitch/cRcSocketSwitch.py | 35 +++++++++++++++------ 2 files changed, 29 insertions(+), 12 deletions(-) diff --git a/rcs1000n/__init__.py b/rcs1000n/__init__.py index 0ea66c936..b024384b7 100755 --- a/rcs1000n/__init__.py +++ b/rcs1000n/__init__.py @@ -117,11 +117,11 @@ def update_item(self, item, caller=None, source=None, dest=None): try: # create Brennenstuhl RCS1000N object obj = cRcSocketSwitch.RCS1000N(self._gpio) - # prepare and send values - obj.send(*values) except Exception as err: - self.logger.error('Error: during instantiation of object or during send to device: {}'.format(err)) + self.logger.error('Error: during instantiation of object: {}'.format(err)) else: + # prepare and send values + obj.send(*values) self.logger.info('Info: setting Device {} with SystemCode {} to {}'.format(ButtonCode, SystemCode, value)) finally: # give the transmitter time to complete sending of the command (but not more than 10s) diff --git a/rcs1000n/cRcSocketSwitch/cRcSocketSwitch.py b/rcs1000n/cRcSocketSwitch/cRcSocketSwitch.py index 33d3d6161..ff8b82607 100755 --- a/rcs1000n/cRcSocketSwitch/cRcSocketSwitch.py +++ b/rcs1000n/cRcSocketSwitch/cRcSocketSwitch.py @@ -204,19 +204,36 @@ def calc_DecimalCode_python_style(self, SystemCode, ButtonCode, status): logging.info("binary string: {}\n".format(binstr)) return int(binstr, 2) + def calculateDecimalCode(self, systemCode, buttonCode, status): + ''' + Calculate the Decimal/Binary Code which to send to actuator + ''' + values = self.prepareCodes(systemCode, buttonCode, status) + self.config['code'] = self.calc_DecimalCode_python_style(*values) + return None - def send(self, systemCode, btn_code, status): + + def sendData(self, device): + ''' + send data to device + ''' + device.enable_tx() + device.tx_repeat = 10 + device.tx_code(**self.config) + return None + + + def send(self, systemCode, buttonCode, status): ''' Method to prepare the codes and send it to the actuator ''' try: rfdevice = RFDevice(self.gpio) - rfdevice.enable_tx() - rfdevice.tx_repeat = 10 - values = self.prepareCodes(systemCode, btn_code, status) - send_code = self.calc_DecimalCode_python_style(*values) - self.config['code'] = send_code - rfdevice.tx_code(**self.config) - + except Exception as err: + logging.error('Error: during instantiation of object: {}'.format(err)) + else: + self.calculateDecimalCode(systemCode, buttonCode, status) + self.sendData(rfdevice) + rfdevice.cleanup() finally: - rfdevice.cleanup() \ No newline at end of file + pass From 1a31dfd62e9a4cb5fa21da2f3dee1d1e57b01a4f Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Sat, 22 Apr 2023 12:14:59 +0200 Subject: [PATCH 061/775] DB_ADDON Plugin: - allow to define database_item_path to be defined up to 3 level above db_addon_item (bugfix) --- db_addon/__init__.py | 32 ++++++++++++++++++-------------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/db_addon/__init__.py b/db_addon/__init__.py index c7e48f69e..fcbd7ddc1 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -196,19 +196,19 @@ def get_database_item_path() -> Item: _lookup_item = item - self.logger.debug(f"get_database_item_path called with item = {item.path()}") + self.logger.debug(f"get_database_item_path called for item = {item.path()}") for i in range(3): if self.has_iattr(_lookup_item.conf, 'db_addon_database_item'): - self.logger.debug(f"Attribut 'db_addon_database_item' has been found for item={item.path()} {i + 1} level above item.") - _database_item_path = self.get_iattr_value(item.conf, 'db_addon_database_item') + self.logger.debug(f"Attribut 'db_addon_database_item' for item='{item.path()}' has been found {i + 1} level above item at '{_lookup_item.path()}'.") + _database_item_path = self.get_iattr_value(_lookup_item.conf, 'db_addon_database_item') _startup = bool(self.get_iattr_value(_lookup_item.conf, 'db_addon_startup')) self.logger.debug(f"get_database_item_path: {_database_item_path=}, {_startup=}") - return _database_item_path, _startup else: _lookup_item = _lookup_item.return_parent() - return None, None + + return None, None def get_database_item() -> Item: """ @@ -217,14 +217,17 @@ def get_database_item() -> Item: _lookup_item = item.return_parent() + self.logger.debug(f"get_database_item called for item = {item.path()}") + for i in range(2): if self.has_iattr(_lookup_item.conf, self.item_attribute_search_str): - self.logger.debug(f"Attribut '{self.item_attribute_search_str}' has been found for item={item.path()} {i + 1} level above item.") + self.logger.debug(f"Attribut '{self.item_attribute_search_str}' for item='{item.path()}' has been found {i + 1} level above item at '{_lookup_item.path()}'.") _startup = bool(self.get_iattr_value(_lookup_item.conf, 'db_addon_startup')) return _lookup_item, _startup else: _lookup_item = _lookup_item.return_parent() - return None, None + + return None, None def has_db_addon_item() -> bool: """Returns item from shNG config which is item with db_addon attribut valid for database item""" @@ -256,7 +259,7 @@ def check_db_addon_fct(check_item) -> bool: def format_db_addon_ignore_value_list() -> Union[None, list]: """ Check of list of comparison operators is formally valid """ - OPERATOR_LIST = ['<=', '<>', '!=', '>=', '<=', '=', '>', '<'] + OPERATOR_LIST = ['!=', '>=', '<=', '>', '<'] db_addon_ignore_value_list_formatted = [] for _entry in db_addon_ignore_value_list: @@ -270,7 +273,7 @@ def format_db_addon_ignore_value_list() -> Union[None, list]: break if not db_addon_ignore_value_list_formatted: return - return db_addon_ignore_value_list_formatted + return optimize_db_addon_ignore_value_list(db_addon_ignore_value_list_formatted) def optimize_db_addon_ignore_value_list(comp_list: list) -> list: value_l = None @@ -278,8 +281,6 @@ def optimize_db_addon_ignore_value_list(comp_list: list) -> list: value_le = None value_he = None values_ue = [] - low_end = None, None - high_end = None, None # find low, low_e, high, high_e for comp in comp_list: @@ -304,22 +305,25 @@ def optimize_db_addon_ignore_value_list(comp_list: list) -> list: self.logger.debug(f"optimize_db_addon_ignore_value_list: {value_l=}, {value_le=}, {value_h=}, {value_he=}, {values_ue=}") - # find low and high + # find low end if value_l and not value_le: low_end = ('<', value_l) elif not value_l and value_le: low_end = ('<=', value_le) elif value_l and value_le: low_end = ('<=', value_le) if value_le < value_l else ('<', value_l) - self.logger.debug(f"low_end={low_end[0]} {low_end[1]}") + else: + low_end = (None, None) + # find high end if value_h and not value_he: high_end = ('<', value_h) elif not value_h and value_he: high_end = ('<=', value_he) elif value_h and value_he: high_end = ('>=', value_he) if value_he > value_h else ('>', value_h) - self.logger.debug(f"high_end={high_end[0]} {high_end[1]}") + else: + high_end = (None, None) # generate comp_list db_addon_ignore_value_list_optimized = [] From aadaa6d83da1a21575eb23afcb840958c0b2988e Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sun, 23 Apr 2023 07:50:21 +0200 Subject: [PATCH 062/775] lms plugin: add playlist id/name functionality --- lms/__init__.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lms/__init__.py b/lms/__init__.py index fa6e06a84..7d5007fcc 100755 --- a/lms/__init__.py +++ b/lms/__init__.py @@ -96,9 +96,12 @@ def trigger_read(command): if command == 'player.playlist.load': self.logger.debug(f"Got command load {command} data {data} value {value} custom {custom} by {by}") trigger_read('player.playlist.id') - trigger_read('player.playlist.name') trigger_read('player.control.playmode') + if command == 'player.playlist.id': + self.logger.debug(f"Got command id {command} data {data} value {value} custom {custom} by {by}") + trigger_read('player.playlist.name') + # update on new song if command == 'player.info.title': # trigger_read('player.control.playmode') From 6fa0f2f2d909cc78670125d758cfb565c402d41b Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sun, 23 Apr 2023 07:50:58 +0200 Subject: [PATCH 063/775] lms plugin: re-introduce standalone code to be able to create structs again --- lms/__init__.py | 25 ++++++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/lms/__init__.py b/lms/__init__.py index 7d5007fcc..675258eac 100755 --- a/lms/__init__.py +++ b/lms/__init__.py @@ -22,14 +22,25 @@ ######################################################################### import builtins -import urllib.parse +import os +import sys + +if __name__ == '__main__': + + class SmartPlugin(): + pass + + class SmartPluginWebIf(): + pass -from lib.model.sdp.globals import (CUSTOM_SEP, PLUGIN_ATTR_NET_HOST, PLUGIN_ATTR_RECURSIVE, PLUGIN_ATTR_CONN_TERMINATOR) -from lib.model.smartdeviceplugin import SmartDevicePlugin + BASE = os.path.sep.join(os.path.realpath(__file__).split(os.path.sep)[:-3]) + sys.path.insert(0, BASE) -# from .webif import WebInterface -builtins.SDP_standalone = False +from lib.model.sdp.globals import (PLUGIN_ATTR_NET_HOST, PLUGIN_ATTR_CONNECTION, PLUGIN_ATTR_SERIAL_PORT, PLUGIN_ATTR_CONN_TERMINATOR, CONN_NULL, CONN_NET_TCP_CLI, CONN_SER_ASYNC) +from lib.model.smartdeviceplugin import SmartDevicePlugin, Standalone + +import urllib.parse class lms(SmartDevicePlugin): @@ -152,3 +163,7 @@ def trigger_read(command): if command == 'player.control.stop' or (command == 'player.control.playpause' and not value): self.logger.debug(f"Got command stop or pause {command} data {data} value {value} custom {custom} by {by}") trigger_read('player.control.playmode') + + +if __name__ == '__main__': + s = Standalone(lms, sys.argv[0]) From 67b56e0f3a3953ed47e613a25e4f256fe873915b Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sun, 23 Apr 2023 07:51:18 +0200 Subject: [PATCH 064/775] lms plugin: improve commands --- lms/commands.py | 31 ++++++++++---------- lms/plugin.yaml | 77 +++++++++++++++++++++++++------------------------ 2 files changed, 56 insertions(+), 52 deletions(-) diff --git a/lms/commands.py b/lms/commands.py index 18df717ee..f36865b25 100755 --- a/lms/commands.py +++ b/lms/commands.py @@ -28,12 +28,12 @@ }, 'player': { 'control': { - 'power': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} power ?', 'item_type': 'bool', 'write_cmd': '{CUSTOM_ATTR1} power {RAW_VALUE:01}', 'dev_datatype': 'str', 'reply_pattern': r'{CUSTOM_PATTERN1} (?:prefset server\s)?power (\d)', 'item_attrs': {'initial': True, 'enforce': True}}, - 'playmode': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} mode ?', 'item_type': 'str', 'write_cmd': '{CUSTOM_ATTR1} mode {VALUE}', 'dev_datatype': 'LMSPlayMode', 'cmd_settings': {'valid_list_ci': ['PLAY', 'PAUSE', 'STOP']}, 'reply_pattern': [r'{CUSTOM_PATTERN1} mode {VALID_LIST_CI}', r'{CUSTOM_PATTERN1} playlist (pause \d|stop)'], 'item_attrs': {'initial': True, 'enforce': True}}, + 'power': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} power ?', 'item_type': 'bool', 'write_cmd': '{CUSTOM_ATTR1} power {RAW_VALUE:01}', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} (?:prefset server\s)?power (\d)', '{CUSTOM_PATTERN1} status(?:.*)power:([^\s]+)'], 'item_attrs': {'enforce': True}}, + 'playmode': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} mode ?', 'item_type': 'str', 'write_cmd': '{CUSTOM_ATTR1} mode {VALUE}', 'dev_datatype': 'LMSPlayMode', 'cmd_settings': {'valid_list_ci': ['PLAY', 'PAUSE', 'STOP']}, 'reply_pattern': [r'{CUSTOM_PATTERN1} mode {VALID_LIST_CI}', r'{CUSTOM_PATTERN1} playlist (pause \d|stop)', '{CUSTOM_PATTERN1} status(?:.*)mode:([^\s]+)'], 'item_attrs': {'enforce': True}}, 'playpause': {'read': True, 'write': True, 'item_type': 'bool', 'write_cmd': '{CUSTOM_ATTR1} {VALUE}', 'dev_datatype': 'LMSPlay', 'reply_pattern': r'{CUSTOM_PATTERN1} (?:playlist\s)?(play|pause)(?:\s3)?$', 'item_attrs': {'enforce': True}}, 'stop': {'read': True, 'write': True, 'item_type': 'bool', 'write_cmd': '{CUSTOM_ATTR1} {VALUE}', 'dev_datatype': 'LMSStop', 'reply_pattern': r'{CUSTOM_PATTERN1} (?:playlist\s)?(stop)$', 'item_attrs': {'enforce': True}}, 'mute': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} mixer muting ?', 'item_type': 'bool', 'write_cmd': '{CUSTOM_ATTR1} mixer muting {RAW_VALUE:01}', 'dev_datatype': 'str', 'reply_pattern': r'{CUSTOM_PATTERN1} (?:mixer muting|prefset server mute) (\d)', 'item_attrs': {'initial': True, 'enforce': True}}, - 'volume': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} mixer volume ?', 'item_type': 'num', 'write_cmd': '{CUSTOM_ATTR1} mixer volume {VALUE}', 'dev_datatype': 'str', 'reply_pattern': r'{CUSTOM_PATTERN1} (?:mixer volume \-?|prefset server volume \-?)(\d{1,3})', 'item_attrs': {'initial': True}}, + 'volume': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} mixer volume ?', 'item_type': 'num', 'write_cmd': '{CUSTOM_ATTR1} mixer volume {VALUE}', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} (?:mixer volume \-?|prefset server volume \-?)(\d{1,3})', '{CUSTOM_PATTERN1} status(?:.*)mixer volume:([^\s]+)']}, 'volume_fading': {'read': False, 'write': True, 'item_type': 'num', 'write_cmd': '{CUSTOM_ATTR1} mixer volume {VALUE}', 'dev_datatype': 'str', 'item_attrs': {'item_template': 'volume_fading'}}, 'volume_low': {'read': False, 'write': True, 'item_type': 'num', 'write_cmd': '{CUSTOM_ATTR1} mixer volume {VALUE}', 'dev_datatype': 'str', 'item_attrs': {'attributes': {'cache': True, 'enforce_updates': True, 'initial_value': 60}}}, 'volume_high': {'read': False, 'write': True, 'item_type': 'num', 'write_cmd': '{CUSTOM_ATTR1} mixer volume {VALUE}', 'dev_datatype': 'str', 'item_attrs': {'attributes': {'cache': True, 'enforce_updates': True, 'initial_value': 80}}}, @@ -46,21 +46,20 @@ 'display': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} display ? ?', 'item_type': 'str', 'write_cmd': '{CUSTOM_ATTR1} display {VALUE}', 'dev_datatype': 'str', 'reply_pattern': r'{CUSTOM_PATTERN1} display\s?(.*)', 'item_attrs': {'initial': True}}, 'connect': {'read': True, 'write': True, 'item_type': 'str', 'write_cmd': '{CUSTOM_ATTR1} connect {VALUE}', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} connect (.*)', 'item_attrs': {'attributes': {'remark': 'ip|www.mysqueezebox.com|www.test.mysqueezebox.com'}}}, 'disconnect': {'read': True, 'write': True, 'item_type': 'str', 'write_cmd': 'disconnect {CUSTOM_ATTR1} {VALUE}', 'dev_datatype': 'str', 'reply_pattern': 'disconnect {CUSTOM_PATTERN1} (.*)', 'item_attrs': {'attributes': {'remark': 'ip|www.mysqueezebox.com|www.test.mysqueezebox.com'}}}, - 'time': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} time ?', 'write_cmd': '{CUSTOM_ATTR1} time {VALUE}', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': r'{CUSTOM_PATTERN1} time (\d+(?:\.\d{2})?)', 'item_attrs': {'item_template': 'time', 'enforce': True, 'read_groups': [{'name': 'player.control.time_poll', 'trigger': 'poll'}]}}, + 'time': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} time ?', 'write_cmd': '{CUSTOM_ATTR1} time {VALUE}', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} time (\d+(?:\.\d{2})?)', '{CUSTOM_PATTERN1} status(?:.*)time:([^\s]+)'], 'item_attrs': {'item_template': 'time', 'enforce': True, 'read_groups': [{'name': 'player.control.time_poll', 'trigger': 'poll'}]}}, 'forward': {'read': True, 'write': True, 'write_cmd': '{CUSTOM_ATTR1} time +{VALUE}', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': r'{CUSTOM_PATTERN1} time \+(\d+(?:\.\d{2})?)', 'item_attrs': {'enforce': True, 'attributes': {'initial_value': 10}}}, 'rewind': {'read': True, 'write': True, 'write_cmd': '{CUSTOM_ATTR1} time -{VALUE}', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': r'{CUSTOM_PATTERN1} time \-(\d+(?:\.\d{2})?)', 'item_attrs': {'enforce': True, 'attributes': {'initial_value': 10}}}, 'playsong': {'read': False, 'write': True, 'write_cmd': '{CUSTOM_ATTR1} playlist play {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'item_attrs': {'attributes': {'remark': 'song URL, playlist or directory'}}}, 'sleep': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} sleep ?', 'write_cmd': '{CUSTOM_ATTR1} sleep {VALUE}', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} sleep (.*[^?])', 'item_attrs': {'initial': True}} }, 'playlist': { - 'repeat': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist repeat ?', 'item_type': 'str', 'write_cmd': '{CUSTOM_ATTR1} playlist repeat {VALUE}', 'dev_datatype': 'str', 'reply_pattern': r'{CUSTOM_PATTERN1} playlist repeat {LOOKUP}', 'lookup': 'REPEAT', 'item_attrs': {'initial': True, 'attributes': {'remark': '0 = Off, 1 = Song, 2 = Playlist'}, 'lookup_item': True}}, - 'shuffle': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist shuffle ?', 'item_type': 'str', 'write_cmd': '{CUSTOM_ATTR1} playlist shuffle {VALUE}', 'dev_datatype': 'str', 'reply_pattern': r'{CUSTOM_PATTERN1} playlist shuffle {LOOKUP}', 'lookup': 'SHUFFLE', 'item_attrs': {'initial': True, 'attributes': {'remark': '0 = Off, 1 = Song, 2 = Album'}, 'lookup_item': True}}, - 'count': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} playlistcontrol ?', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': r'{CUSTOM_PATTERN1} playlistcontrol cmd:load .* count:(.*)'}, - 'index': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist index ?', 'write_cmd': '{CUSTOM_ATTR1} playlist index {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': r'{CUSTOM_PATTERN1} playlist (?:index|newsong .*) (\d+)$', 'item_attrs': {'initial': True}}, + 'repeat': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist repeat ?', 'item_type': 'str', 'write_cmd': '{CUSTOM_ATTR1} playlist repeat {VALUE}', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} playlist repeat {LOOKUP}', '{CUSTOM_PATTERN1} status(?:.*)playlist repeat:{LOOKUP}'], 'lookup': 'REPEAT', 'item_attrs': {'attributes': {'remark': '0 = Off, 1 = Song, 2 = Playlist'}, 'lookup_item': True}}, + 'shuffle': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist shuffle ?', 'item_type': 'str', 'write_cmd': '{CUSTOM_ATTR1} playlist shuffle {VALUE}', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} playlist shuffle {LOOKUP}''{CUSTOM_PATTERN1} status(?:.*)playlist shuffle:{LOOKUP}'], 'lookup': 'SHUFFLE', 'item_attrs': {'attributes': {'remark': '0 = Off, 1 = Song, 2 = Album'}, 'lookup_item': True}}, + 'index': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist index ?', 'write_cmd': '{CUSTOM_ATTR1} playlist index {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} playlist (?:index|newsong .*) (\d+)$', '{CUSTOM_PATTERN1} status(?:.*)playlist index:(\d*[^\s]+)', '{CUSTOM_PATTERN1} prefset server currentSong (\d+)$', '{CUSTOM_PATTERN1} playlist jump (\d*)'], 'item_attrs': {'initial': True}}, 'name': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist name ?', 'write_cmd': '{CUSTOM_ATTR1} playlist name {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} playlist name (.*[^?])', 'item_attrs': {'initial': True}}, - 'id': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist playlistsinfo', 'write_cmd': '{CUSTOM_ATTR1} playlistcontrol cmd:load playlist_id:{VALUE}', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': r'{CUSTOM_PATTERN1} (?:status - 1 .*|playlist playlistsinfo |playlistcontrol cmd:load playlist_)id:(\d+)'}, + 'id': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist playlistsinfo', 'write_cmd': '{CUSTOM_ATTR1} playlistcontrol cmd:load playlist_id:{VALUE}', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} (?:status - 1 .*|playlist playlistsinfo |playlistcontrol cmd:load playlist_)id:(\d*)', '{CUSTOM_PATTERN1} playlist loadtracks playlist.id=(\d*)\s']}, 'save': {'read': True, 'write': True, 'write_cmd': '{CUSTOM_ATTR1} playlist save {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} playlist save (.*)', 'item_attrs': {'enforce': True}}, - 'load': {'read': True, 'write': True, 'write_cmd': '{CUSTOM_ATTR1} playlist resume {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} playlist resume (.*)', 'item_attrs': {'enforce': True}}, + 'load': {'read': True, 'write': True, 'write_cmd': '{CUSTOM_ATTR1} playlistcontrol cmd:load playlist_name:{VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': ['{CUSTOM_PATTERN1} playlist resume (.*)', '{CUSTOM_PATTERN1} playlist loadtracks playlist.name:(.*)\s'], 'item_attrs': {'enforce': True}}, 'loadalbum': {'read': True, 'write': True, 'write_cmd': '{CUSTOM_ATTR1} playlist loadalbum {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} playlist loadalbum (.*)', 'item_attrs': {'enforce': True}}, 'loadtracks': {'read': True, 'write': True, 'write_cmd': '{CUSTOM_ATTR1} playlist loadtracks {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} playlist loadtracks (.*)', 'item_attrs': {'enforce': True}}, 'add': {'read': True, 'write': True, 'write_cmd': '{CUSTOM_ATTR1} playlist add {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} playlist add (.*)', 'item_attrs': {'enforce': True}}, @@ -68,7 +67,7 @@ 'addtracks': {'read': True, 'write': True, 'write_cmd': '{CUSTOM_ATTR1} playlist addtracks {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} playlist addtracks (.*)', 'item_attrs': {'enforce': True}}, 'insertalbum': {'read': True, 'write': True, 'write_cmd': '{CUSTOM_ATTR1} playlist insertalbum {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} playlist insertalbum (.*)', 'item_attrs': {'enforce': True}}, 'inserttracks': {'read': True, 'write': True, 'write_cmd': '{CUSTOM_ATTR1} playlist insert {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} playlist insert (.*)', 'item_attrs': {'enforce': True}}, - 'tracks': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} playlist tracks ?', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} playlist tracks (.*[^?])', 'item_attrs': {'initial': True}}, + 'tracks': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} playlist tracks ?', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} playlistcontrol cmd:load .* count:(\d*)', '{CUSTOM_PATTERN1} playlist_tracks (\d*[^?])', '{CUSTOM_PATTERN1} status(?:.*)playlist tracks:(\d*[^\s]+)']}, 'clear': {'read': True, 'write': True, 'write_cmd': '{CUSTOM_ATTR1} playlist clear', 'item_type': 'bool', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} playlist clear$', 'item_attrs': {'enforce': True, 'attributes': {'eval': 'True if value else None'}}}, 'delete': {'read': True, 'write': True, 'write_cmd': '{CUSTOM_ATTR1} playlist delete {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} playlist delete (.*)', 'item_attrs': {'enforce': True}}, 'deleteitem': {'read': True, 'write': True, 'write_cmd': '{CUSTOM_ATTR1} playlist deleteitem {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} playlist deleteitem (.*)', 'item_attrs': {'enforce': True}}, @@ -79,15 +78,17 @@ 'customskip': {'read': False, 'write': True, 'item_type': 'str', 'write_cmd': '{CUSTOM_ATTR1} customskip setfilter filter{VALUE}.cs.xml', 'dev_datatype': 'str', 'item_attrs': {'attributes': {'cache': True}}} }, 'info': { - 'connected': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} connected ?', 'item_type': 'bool', 'dev_datatype': 'LMSConnection', 'reply_pattern': r'{CUSTOM_PATTERN1} (?:connected|client) (\d|disconnect|reconnect)', 'item_attrs': {'initial': True}}, - 'name': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} name ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} name (.*)', 'item_attrs': {'initial': True}}, + 'status': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} status', 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'{CUSTOM_PATTERN1} status\s+(.*)', 'item_attrs': {'initial': True}}, + 'connected': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} connected ?', 'item_type': 'bool', 'dev_datatype': 'LMSConnection', 'reply_pattern': [r'{CUSTOM_PATTERN1} (?:connected|client) (\d|disconnect|reconnect)', '{CUSTOM_PATTERN1} status(?:.*)player_connected:([^\s]+)']}, + 'ip': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} ip ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': ['{CUSTOM_PATTERN1} ip (.*)', '{CUSTOM_PATTERN1} status(?:.*)player_ip:([^:\s]+)']}, + 'name': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} name ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': ['{CUSTOM_PATTERN1} name (.*)', '{CUSTOM_PATTERN1} status(?:.*)player_name:([^\s]+)']}, 'syncgroups': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} syncgroups ?', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': r'{CUSTOM_PATTERN1} syncgroups (\d+)', 'item_attrs': {'initial': True}}, - 'signalstrength': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} signalstrength ?', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': r'{CUSTOM_PATTERN1} signalstrength (\d+)', 'item_attrs': {'initial': True}}, + 'signalstrength': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} signalstrength ?', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': ['{CUSTOM_PATTERN1} signalstrength (\d+)', '{CUSTOM_PATTERN1} status(?:.*)signalstrength:([^\s]+)']}, 'genre': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} genre ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} genre (.*)'}, 'artist': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} artist ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} artist (.*)'}, 'album': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} album ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} album (.*)', 'item_attrs': {'initial': True}}, 'title': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} current_title ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': r'{CUSTOM_PATTERN1} (?:current_title|playlist newsong) (.*?)(?:\s\d+)?$', 'item_attrs': {'initial': True}}, - 'path': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} path ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} path (.*)'}, + 'path': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} path ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': ['{CUSTOM_PATTERN1} path (.*)', '{CUSTOM_PATTERN1} playlist open (.*)']}, 'duration': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} duration ?', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': r'{CUSTOM_PATTERN1} duration (\d+)'}, 'albumarturl': {'read': True, 'write': False, 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '(http://.*)'} } diff --git a/lms/plugin.yaml b/lms/plugin.yaml index 4e836a46e..760026190 100755 --- a/lms/plugin.yaml +++ b/lms/plugin.yaml @@ -372,7 +372,6 @@ item_structs: - player - player.control enforce_updates: true - sqb_read_initial: true playmode: type: str @@ -383,7 +382,6 @@ item_structs: - player - player.control enforce_updates: true - sqb_read_initial: true playpause: type: bool @@ -418,7 +416,6 @@ item_structs: sqb_read_group: - player - player.control - sqb_read_initial: true volume_fading: type: num @@ -596,7 +593,6 @@ item_structs: sqb_read_group: - player - player.playlist - sqb_read_initial: true remark: 0 = Off, 1 = Song, 2 = Playlist lookup: @@ -611,22 +607,12 @@ item_structs: sqb_read_group: - player - player.playlist - sqb_read_initial: true remark: 0 = Off, 1 = Song, 2 = Album lookup: type: list sqb_lookup: SHUFFLE#list - count: - type: num - sqb_command: player.playlist.count - sqb_read: true - sqb_write: false - sqb_read_group: - - player - - player.playlist - index: type: str sqb_command: player.playlist.index @@ -727,7 +713,6 @@ item_structs: sqb_read_group: - player - player.playlist - sqb_read_initial: true clear: type: bool @@ -794,6 +779,16 @@ item_structs: enforce_updates: true sqb_read_group_trigger: player.info + status: + type: str + sqb_command: player.info.status + sqb_read: true + sqb_write: false + sqb_read_group: + - player + - player.info + sqb_read_initial: true + connected: type: bool sqb_command: player.info.connected @@ -802,7 +797,15 @@ item_structs: sqb_read_group: - player - player.info - sqb_read_initial: true + + ip: + type: str + sqb_command: player.info.ip + sqb_read: true + sqb_write: false + sqb_read_group: + - player + - player.info name: type: str @@ -812,7 +815,6 @@ item_structs: sqb_read_group: - player - player.info - sqb_read_initial: true syncgroups: type: num @@ -832,7 +834,6 @@ item_structs: sqb_read_group: - player - player.info - sqb_read_initial: true genre: type: str @@ -1097,7 +1098,6 @@ item_structs: - ALL.player - ALL.player.control enforce_updates: true - sqb_read_initial: true playmode: type: str @@ -1109,7 +1109,6 @@ item_structs: - ALL.player - ALL.player.control enforce_updates: true - sqb_read_initial: true playpause: type: bool @@ -1146,7 +1145,6 @@ item_structs: - ALL - ALL.player - ALL.player.control - sqb_read_initial: true volume_fading: type: num @@ -1330,7 +1328,6 @@ item_structs: - ALL - ALL.player - ALL.player.playlist - sqb_read_initial: true remark: 0 = Off, 1 = Song, 2 = Playlist lookup: @@ -1346,23 +1343,12 @@ item_structs: - ALL - ALL.player - ALL.player.playlist - sqb_read_initial: true remark: 0 = Off, 1 = Song, 2 = Album lookup: type: list sqb_lookup: SHUFFLE#list - count: - type: num - sqb_command: player.playlist.count - sqb_read: true - sqb_write: false - sqb_read_group: - - ALL - - ALL.player - - ALL.player.playlist - index: type: str sqb_command: player.playlist.index @@ -1467,7 +1453,6 @@ item_structs: - ALL - ALL.player - ALL.player.playlist - sqb_read_initial: true clear: type: bool @@ -1534,6 +1519,17 @@ item_structs: enforce_updates: true sqb_read_group_trigger: ALL.player.info + status: + type: str + sqb_command: player.info.status + sqb_read: true + sqb_write: false + sqb_read_group: + - ALL + - ALL.player + - ALL.player.info + sqb_read_initial: true + connected: type: bool sqb_command: player.info.connected @@ -1543,7 +1539,16 @@ item_structs: - ALL - ALL.player - ALL.player.info - sqb_read_initial: true + + ip: + type: str + sqb_command: player.info.ip + sqb_read: true + sqb_write: false + sqb_read_group: + - ALL + - ALL.player + - ALL.player.info name: type: str @@ -1554,7 +1559,6 @@ item_structs: - ALL - ALL.player - ALL.player.info - sqb_read_initial: true syncgroups: type: num @@ -1576,7 +1580,6 @@ item_structs: - ALL - ALL.player - ALL.player.info - sqb_read_initial: true genre: type: str From f6d9c345df52ec565eaedcd687db2640f3195e03 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Sun, 23 Apr 2023 09:50:13 +0200 Subject: [PATCH 065/775] DB_ADDON Plugin: - add plugin parameter to enable/disable optimizing defined value filters - diverse updates --- db_addon/__init__.py | 125 +++++++++++++++++-------------------------- db_addon/plugin.yaml | 9 +++- 2 files changed, 58 insertions(+), 76 deletions(-) diff --git a/db_addon/__init__.py b/db_addon/__init__.py index fcbd7ddc1..33658a30e 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -104,7 +104,8 @@ def __init__(self, sh): self.db_configname = self.get_parameter_value('database_plugin_config') self.startup_run_delay = self.get_parameter_value('startup_run_delay') self.ignore_0 = self.get_parameter_value('ignore_0') - self.filter_values = self.get_parameter_value('filter_values') + self.value_filter = self.get_parameter_value('value_filter') + self.optimize_value_filter = self.get_parameter_value('optimize_value_filter') self.use_oldest_entry = self.get_parameter_value('use_oldest_entry') # init cache dicts @@ -196,14 +197,11 @@ def get_database_item_path() -> Item: _lookup_item = item - self.logger.debug(f"get_database_item_path called for item = {item.path()}") - for i in range(3): if self.has_iattr(_lookup_item.conf, 'db_addon_database_item'): self.logger.debug(f"Attribut 'db_addon_database_item' for item='{item.path()}' has been found {i + 1} level above item at '{_lookup_item.path()}'.") _database_item_path = self.get_iattr_value(_lookup_item.conf, 'db_addon_database_item') _startup = bool(self.get_iattr_value(_lookup_item.conf, 'db_addon_startup')) - self.logger.debug(f"get_database_item_path: {_database_item_path=}, {_startup=}") return _database_item_path, _startup else: _lookup_item = _lookup_item.return_parent() @@ -217,8 +215,6 @@ def get_database_item() -> Item: _lookup_item = item.return_parent() - self.logger.debug(f"get_database_item called for item = {item.path()}") - for i in range(2): if self.has_iattr(_lookup_item.conf, self.item_attribute_search_str): self.logger.debug(f"Attribut '{self.item_attribute_search_str}' for item='{item.path()}' has been found {i + 1} level above item at '{_lookup_item.path()}'.") @@ -252,92 +248,65 @@ def check_db_addon_fct(check_item) -> bool: """ if self.has_iattr(check_item.conf, 'db_addon_fct'): if self.get_iattr_value(check_item.conf, 'db_addon_fct').lower() in ALL_ONCHANGE_ATTRIBUTES: - self.logger.debug(f"db_addon item for database item {item.path()} found.") return True return False - def format_db_addon_ignore_value_list() -> Union[None, list]: + def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filter): """ Check of list of comparison operators is formally valid """ - OPERATOR_LIST = ['!=', '>=', '<=', '>', '<'] - + max_values = {'!=': [], '>=': [], '<=': [], '>': [], '<': []} db_addon_ignore_value_list_formatted = [] + for _entry in db_addon_ignore_value_list: _entry = _entry.strip() - for op in OPERATOR_LIST: + for op in max_values.keys(): if op in _entry: var = _entry.split(op, 1) value = var[1].strip() - if value.isdigit() or to_float(value): - db_addon_ignore_value_list_formatted.append(f"{op} {value}") - break + value = to_int_float(value) + if value is None: + continue + db_addon_ignore_value_list_formatted.append(f"{op} {value}") + max_values[op].append(value) + + self.logger.info(f"Summarized 'ignore_value_list' for item {item.path()}: {db_addon_ignore_value_list_formatted}") + if not db_addon_ignore_value_list_formatted: return - return optimize_db_addon_ignore_value_list(db_addon_ignore_value_list_formatted) - - def optimize_db_addon_ignore_value_list(comp_list: list) -> list: - value_l = None - value_h = None - value_le = None - value_he = None - values_ue = [] - - # find low, low_e, high, high_e - for comp in comp_list: - op, value = comp.split(' ') - value = int(value) - if op == '<': - if value_l is None or (value < value_l): - value_l = value - elif op == '<=': - if value_le is None or (value < value_le): - value_le = value - elif op == '>': - if value_h is None or (value > value_h): - value_h = value - elif op == '>=': - if value_he is None or (value > value_he): - value_he = value - elif op == '!=': - values_ue.append(value) - elif op == '==': - self.logger.debug(f"Comparison to '{comp}' will be ignored.") - - self.logger.debug(f"optimize_db_addon_ignore_value_list: {value_l=}, {value_le=}, {value_h=}, {value_he=}, {values_ue=}") - - # find low end - if value_l and not value_le: - low_end = ('<', value_l) - elif not value_l and value_le: - low_end = ('<=', value_le) - elif value_l and value_le: - low_end = ('<=', value_le) if value_le < value_l else ('<', value_l) + + if not optimize: + return db_addon_ignore_value_list_formatted + + self.logger.info(f"Optimizing 'ignore_value_list' for item {item.path()} active.") + # find low + lower_value_list = max_values['<'] + max_values['<='] + if lower_value_list: + max_lower_value = max(lower_value_list) + lower_op = '<' if max_lower_value in max_values['<'] else '<=' + lower_end = (lower_op, max_lower_value) else: - low_end = (None, None) - - # find high end - if value_h and not value_he: - high_end = ('<', value_h) - elif not value_h and value_he: - high_end = ('<=', value_he) - elif value_h and value_he: - high_end = ('>=', value_he) if value_he > value_h else ('>', value_h) + lower_end = (None, None) + # find high + upper_value_list = max_values['>'] + max_values['>='] + if upper_value_list: + min_upper_value = min(upper_value_list) + upper_op = '>' if min_upper_value in max_values['>'] else '>=' + upper_end = (upper_op, min_upper_value) else: - high_end = (None, None) + upper_end = (None, None) # generate comp_list db_addon_ignore_value_list_optimized = [] - if low_end != (None, None): - db_addon_ignore_value_list_optimized.append(f"{low_end[0]} {low_end[1]}") - if high_end != (None, None): - db_addon_ignore_value_list_optimized.append(f"{high_end[0]} {high_end[1]}") - if values_ue: - for v in values_ue: - if low_end[1] and v > low_end[1]: - db_addon_ignore_value_list_optimized.append(f'!= {v}') - elif high_end[1] and v < high_end[1]: + if lower_end[0]: + db_addon_ignore_value_list_optimized.append(f"{lower_end[0]} {lower_end[1]}") + if upper_end[0]: + db_addon_ignore_value_list_optimized.append(f"{upper_end[0]} {upper_end[1]}") + if max_values['!=']: + for v in max_values['!=']: + if (lower_end[0] and v >= lower_end[1]) or (upper_end[0] and v <= upper_end[1]): db_addon_ignore_value_list_optimized.append(f'!= {v}') + self.logger.info(f"Optimized 'ignore_value_list' for item {item.path()}: {db_addon_ignore_value_list_optimized}") return db_addon_ignore_value_list_optimized # handle all items with db_addon_fct @@ -368,10 +337,10 @@ def optimize_db_addon_ignore_value_list(comp_list: list) -> list: if not db_addon_ignore_value_list: db_addon_ignore_value_list = [] db_addon_ignore_value_list.append("!= 0") - if self.filter_values: - for entry in list(self.filter_values.keys()): + if self.value_filter: + for entry in list(self.value_filter.keys()): if entry in str(item.path()): - db_addon_ignore_value_list.extend(self.filter_values[entry]) + db_addon_ignore_value_list.extend(self.value_filter[entry]) if db_addon_ignore_value_list: db_addon_ignore_value_list = format_db_addon_ignore_value_list() @@ -2897,6 +2866,12 @@ def to_float(arg) -> Union[float, None]: except (ValueError, TypeError): return None +def to_int_float(arg): + try: + return int(arg) + except (ValueError, TypeError): + return to_float(arg) + ALLOWED_QUERY_TIMEFRAMES = ['year', 'month', 'week', 'day', 'hour'] ALLOWED_MINMAX_FUNCS = ['min', 'max', 'avg'] diff --git a/db_addon/plugin.yaml b/db_addon/plugin.yaml index b8894d6a1..4e3c7101f 100644 --- a/db_addon/plugin.yaml +++ b/db_addon/plugin.yaml @@ -44,12 +44,19 @@ parameters: en: "At items having a entry of that list in path, val_num=0 will be ignored for database queries. Example: temp | hum" - filter_values: + value_filter: type: dict description: de: "Definition von Wertefiltern als Wörterbuch {'Teil des Itempfades: [Liste von Vergleichsoperatoren als String]} Bsp: {'temp': ['> -10', '< 85'], 'hum': ['>= 0', '<= 100']}" en: "Definition of value filters as dict {'part of item path': [List of comparison operators als string]} Example: {'temp': ['> -10', '< 85'], 'hum': ['>= 0', '<= 100']}" + optimize_value_filter: + type: bool + default: True + description: + de: "Optimierung der gesetzen als Plugin-Parameter oder/und Item-Attribute gesetzten Wertefilter." + en: "Optimize value filters set as plugin parameter or/and item attribute" + use_oldest_entry: type: bool default: False From f439385f8b9047c16438a2c9b8283ec53bae5919 Mon Sep 17 00:00:00 2001 From: Morg42 <43153739+Morg42@users.noreply.github.com> Date: Sun, 23 Apr 2023 10:28:28 +0200 Subject: [PATCH 066/775] kodi: added standalone for struct creation --- kodi/__init__.py | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/kodi/__init__.py b/kodi/__init__.py index b56b3ad46..b3f6ba87d 100644 --- a/kodi/__init__.py +++ b/kodi/__init__.py @@ -22,12 +22,28 @@ ######################################################################### import builtins +import os +import sys + +if __name__ == '__main__': + builtins.SDP_standalone = True + + class SmartPlugin(): + pass + + class SmartPluginWebIf(): + pass + + BASE = os.path.sep.join(os.path.realpath(__file__).split(os.path.sep)[:-3]) + sys.path.insert(0, BASE) + +else: + builtins.SDP_standalone = False from lib.model.sdp.globals import JSON_MOVE_KEYS -from lib.model.smartdeviceplugin import SmartDevicePlugin -# from .webif import WebInterface +from lib.model.smartdeviceplugin import SmartDevicePlugin, Standalone -builtins.SDP_standalone = False +# from .webif import WebInterface class kodi(SmartDevicePlugin): @@ -394,3 +410,7 @@ def _update_status(self): if self._playerid: self.send_command('status.get_status_play', None) self.send_command('status.get_item', None) + + +if __name__ == '__main__': + s = Standalone(kodi, sys.argv[0]) From 484f31c8af67e76b913441f06b7036a6bde06264 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Sun, 23 Apr 2023 11:58:33 +0200 Subject: [PATCH 067/775] AVM Plugin: - optimize "get_logs" to prevent older fritz-devices from timeout - use get_logs_from tr-064 as backup solution - increase request timeout if timeout occurred --- avm/__init__.py | 65 ++++++++++++++++++++++++++++++------------- avm/webif/__init__.py | 5 +++- 2 files changed, 49 insertions(+), 21 deletions(-) diff --git a/avm/__init__.py b/avm/__init__.py index e6a229e2d..96d2f5fdc 100644 --- a/avm/__init__.py +++ b/avm/__init__.py @@ -143,7 +143,7 @@ def __init__(self, sh): # init FritzDevice try: - self.fritz_device = FritzDevice(_host, _port, ssl, _verify, _username, _passwort, _call_monitor_incoming_filter, _use_tr064_backlist, self) + self.fritz_device = FritzDevice(_host, _port, ssl, _verify, _username, _passwort, _call_monitor_incoming_filter, _use_tr064_backlist, _log_entry_count, self) except IOError as e: self.logger.warning(f"{e} occurred during establishing connection to FritzDevice via TR064-Interface. Not connected.") self.fritz_device = None @@ -492,7 +492,7 @@ class FritzDevice: ERROR_COUNT_TO_BE_BLACKLISTED = 2 - def __init__(self, host, port, ssl, verify, username, password, call_monitor_incoming_filter, use_tr064_backlist, plugin_instance): + def __init__(self, host, port, ssl, verify, username, password, call_monitor_incoming_filter, use_tr064_backlist, log_entry_count, plugin_instance): """ Init class FritzDevice """ @@ -508,6 +508,7 @@ def __init__(self, host, port, ssl, verify, username, password, call_monitor_inc self.username = username self.password = password self.use_tr064_blacklist = use_tr064_backlist + self.log_entry_count = log_entry_count self._call_monitor_incoming_filter = call_monitor_incoming_filter self._data_cache = {} self._calllist_cache = [] @@ -1396,6 +1397,28 @@ def get_device_log_from_tr064(self): else: return device_log + def get_device_log_from_tr064_separated(self): + + data = self.get_device_log_from_tr064() + + if data and isinstance(data, list): + # cut data if needed + if self.log_entry_count: + data = data[:self.log_entry_count] + + # bring data to needed format + log_list = [] + for text in data: + l_date = text[:8] + l_time = text[9:17] + l_text = text[18:] + l_cat = '-' + l_type = '-' + l_ts = int(datetime.datetime.timestamp(datetime.datetime.strptime(text[:17], '%d.%m.%y %H:%M:%S'))) + log_list.append([l_text, l_type, l_cat, l_ts, l_date, l_time]) + + return log_list + # ---------------------------------- # wlan methods # ---------------------------------- @@ -1739,6 +1762,7 @@ def __init__(self, host, ssl, verify, user, password, log_entry_count, plugin_in self._templates: Dict[str, FritzHome.FritzhomeTemplate] = {} self._logged_in = False self._session = requests.Session() + self._timeout = 10 self.items = dict() self.connected = False self.last_request = None @@ -1941,18 +1965,24 @@ def _get_item_ain(self, item) -> Union[str, None]: def item_list(self): return list(self.items.keys()) - def _request(self, url: str, params=None, timeout: int = 10, result: str = 'text'): + def _request(self, url: str, params=None, result: str = 'text'): """ Send a request with parameters. :param url: URL to be requested :param params: params for request - :param timeout: timeout :param result: type of result :return: request response """ try: - rsp = self._session.get(url, params=params, timeout=timeout, verify=self.verify) + rsp = self._session.get(url, params=params, timeout=self._timeout, verify=self.verify) + except requests.exceptions.Timeout: + if self._timeout < 31: + self._timeout += 5 + self.logger.info(f"request timed out. timeout extended by 5s to {self._timeout}") + else: + self.logger.debug(f"get request timeout.") + return except Exception as e: self.logger.error(f"Error during GET request {e} occurred.") else: @@ -2647,10 +2677,7 @@ def get_device_log_from_lua(self): params = {"sid": self._sid} # get data - try: - data = self._request(url, params, result='json') - except JSONDecodeError: - return + data = self._request(url, params, result='json') if isinstance(data, dict): data = data.get('mq_log') @@ -2660,16 +2687,16 @@ def get_device_log_from_lua(self): data = data[:self.log_entry_count] # bring data to needed format - newlog = [] - for text, typ, cat in data: + log_list = [] + for text, typ, cat, val in data: l_date = text[:8] l_time = text[9:17] l_text = text[18:] l_cat = int(cat) l_type = int(typ) l_ts = int(datetime.datetime.timestamp(datetime.datetime.strptime(text[:17], '%d.%m.%y %H:%M:%S'))) - newlog.append([l_text, l_type, l_cat, l_ts, l_date, l_time]) - return newlog + log_list.append([l_text, l_type, l_cat, l_ts, l_date, l_time]) + return log_list def get_device_log_from_lua_separated(self): """ @@ -2683,10 +2710,8 @@ def get_device_log_from_lua_separated(self): url = self._get_prefixed_host() + self.LOG_SEPARATE_ROUTE params = {"sid": self._sid} - try: - data = self._request(url, params, result='json') - except JSONDecodeError: - return + # get data + data = self._request(url, params, result='json') if isinstance(data, dict): data = data.get('mq_log') @@ -2695,11 +2720,11 @@ def get_device_log_from_lua_separated(self): data = data[:self.log_entry_count] # bring data to needed format - data_formated = [] + data_formatted = [] for entry in data: dt = datetime.datetime.strptime(f"{entry[0]} {entry[1]}", '%d.%m.%y %H:%M:%S').strftime('%d.%m.%Y %H:%M:%S') - data_formated.append([dt, entry[2], entry[3], entry[4]]) - return data_formated + data_formatted.append([dt, entry[2], entry[3], entry[4]]) + return data_formatted # FritzhomeDevice classes diff --git a/avm/webif/__init__.py b/avm/webif/__init__.py index 2b0095786..4fafcfa6c 100644 --- a/avm/webif/__init__.py +++ b/avm/webif/__init__.py @@ -74,7 +74,10 @@ def index(self, reload=None, action=None): else: aha_items = None aha_item_count = None - logentries = None + if self.plugin.fritz_device: + logentries = self.plugin.get_device_log_from_tr064_separated() + else: + logentries = None if self.plugin.monitoring_service: call_monitor_items = self.plugin.monitoring_service.item_list() From d3efb4c38aedd2d99a476a03317bff2369c8d457 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sun, 23 Apr 2023 20:58:46 +0200 Subject: [PATCH 068/775] lms plugin: improved some command reply_patterns --- lms/commands.py | 9 +++++---- lms/datatypes.py | 2 +- lms/plugin.yaml | 12 ++++++++++++ 3 files changed, 18 insertions(+), 5 deletions(-) diff --git a/lms/commands.py b/lms/commands.py index f36865b25..099448d56 100755 --- a/lms/commands.py +++ b/lms/commands.py @@ -30,7 +30,7 @@ 'control': { 'power': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} power ?', 'item_type': 'bool', 'write_cmd': '{CUSTOM_ATTR1} power {RAW_VALUE:01}', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} (?:prefset server\s)?power (\d)', '{CUSTOM_PATTERN1} status(?:.*)power:([^\s]+)'], 'item_attrs': {'enforce': True}}, 'playmode': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} mode ?', 'item_type': 'str', 'write_cmd': '{CUSTOM_ATTR1} mode {VALUE}', 'dev_datatype': 'LMSPlayMode', 'cmd_settings': {'valid_list_ci': ['PLAY', 'PAUSE', 'STOP']}, 'reply_pattern': [r'{CUSTOM_PATTERN1} mode {VALID_LIST_CI}', r'{CUSTOM_PATTERN1} playlist (pause \d|stop)', '{CUSTOM_PATTERN1} status(?:.*)mode:([^\s]+)'], 'item_attrs': {'enforce': True}}, - 'playpause': {'read': True, 'write': True, 'item_type': 'bool', 'write_cmd': '{CUSTOM_ATTR1} {VALUE}', 'dev_datatype': 'LMSPlay', 'reply_pattern': r'{CUSTOM_PATTERN1} (?:playlist\s)?(play|pause)(?:\s3)?$', 'item_attrs': {'enforce': True}}, + 'playpause': {'read': True, 'write': True, 'item_type': 'bool', 'write_cmd': '{CUSTOM_ATTR1} {VALUE}', 'dev_datatype': 'LMSPlay', 'reply_pattern': [r'{CUSTOM_PATTERN1} (?:playlist\s)?(play|pause)(?:\s3)?$', '{CUSTOM_PATTERN1} pause (0|1)'], 'item_attrs': {'enforce': True}}, 'stop': {'read': True, 'write': True, 'item_type': 'bool', 'write_cmd': '{CUSTOM_ATTR1} {VALUE}', 'dev_datatype': 'LMSStop', 'reply_pattern': r'{CUSTOM_PATTERN1} (?:playlist\s)?(stop)$', 'item_attrs': {'enforce': True}}, 'mute': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} mixer muting ?', 'item_type': 'bool', 'write_cmd': '{CUSTOM_ATTR1} mixer muting {RAW_VALUE:01}', 'dev_datatype': 'str', 'reply_pattern': r'{CUSTOM_PATTERN1} (?:mixer muting|prefset server mute) (\d)', 'item_attrs': {'initial': True, 'enforce': True}}, 'volume': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} mixer volume ?', 'item_type': 'num', 'write_cmd': '{CUSTOM_ATTR1} mixer volume {VALUE}', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} (?:mixer volume \-?|prefset server volume \-?)(\d{1,3})', '{CUSTOM_PATTERN1} status(?:.*)mixer volume:([^\s]+)']}, @@ -54,8 +54,8 @@ }, 'playlist': { 'repeat': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist repeat ?', 'item_type': 'str', 'write_cmd': '{CUSTOM_ATTR1} playlist repeat {VALUE}', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} playlist repeat {LOOKUP}', '{CUSTOM_PATTERN1} status(?:.*)playlist repeat:{LOOKUP}'], 'lookup': 'REPEAT', 'item_attrs': {'attributes': {'remark': '0 = Off, 1 = Song, 2 = Playlist'}, 'lookup_item': True}}, - 'shuffle': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist shuffle ?', 'item_type': 'str', 'write_cmd': '{CUSTOM_ATTR1} playlist shuffle {VALUE}', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} playlist shuffle {LOOKUP}''{CUSTOM_PATTERN1} status(?:.*)playlist shuffle:{LOOKUP}'], 'lookup': 'SHUFFLE', 'item_attrs': {'attributes': {'remark': '0 = Off, 1 = Song, 2 = Album'}, 'lookup_item': True}}, - 'index': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist index ?', 'write_cmd': '{CUSTOM_ATTR1} playlist index {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} playlist (?:index|newsong .*) (\d+)$', '{CUSTOM_PATTERN1} status(?:.*)playlist index:(\d*[^\s]+)', '{CUSTOM_PATTERN1} prefset server currentSong (\d+)$', '{CUSTOM_PATTERN1} playlist jump (\d*)'], 'item_attrs': {'initial': True}}, + 'shuffle': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist shuffle ?', 'item_type': 'str', 'write_cmd': '{CUSTOM_ATTR1} playlist shuffle {VALUE}', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} playlist shuffle {LOOKUP}', '{CUSTOM_PATTERN1} status(?:.*)playlist shuffle:{LOOKUP}'], 'lookup': 'SHUFFLE', 'item_attrs': {'attributes': {'remark': '0 = Off, 1 = Song, 2 = Album'}, 'lookup_item': True}}, + 'index': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist index ?', 'write_cmd': '{CUSTOM_ATTR1} playlist index {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} playlist (?:index|newsong .*) (\d+)$', '{CUSTOM_PATTERN1} status(?:.*)playlist index:(\d*[^\s]+)', '{CUSTOM_PATTERN1} prefset server currentSong (\d+)$', '{CUSTOM_PATTERN1} playlist jump (\d*)', '{CUSTOM_PATTERN1} play (\d*)'], 'item_attrs': {'initial': True}}, 'name': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist name ?', 'write_cmd': '{CUSTOM_ATTR1} playlist name {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} playlist name (.*[^?])', 'item_attrs': {'initial': True}}, 'id': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist playlistsinfo', 'write_cmd': '{CUSTOM_ATTR1} playlistcontrol cmd:load playlist_id:{VALUE}', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} (?:status - 1 .*|playlist playlistsinfo |playlistcontrol cmd:load playlist_)id:(\d*)', '{CUSTOM_PATTERN1} playlist loadtracks playlist.id=(\d*)\s']}, 'save': {'read': True, 'write': True, 'write_cmd': '{CUSTOM_ATTR1} playlist save {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} playlist save (.*)', 'item_attrs': {'enforce': True}}, @@ -88,8 +88,9 @@ 'artist': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} artist ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} artist (.*)'}, 'album': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} album ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} album (.*)', 'item_attrs': {'initial': True}}, 'title': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} current_title ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': r'{CUSTOM_PATTERN1} (?:current_title|playlist newsong) (.*?)(?:\s\d+)?$', 'item_attrs': {'initial': True}}, - 'path': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} path ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': ['{CUSTOM_PATTERN1} path (.*)', '{CUSTOM_PATTERN1} playlist open (.*)']}, + 'path': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} path ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': ['{CUSTOM_PATTERN1} path (.*)', '{CUSTOM_PATTERN1} playlist open (.*)', '{CUSTOM_PATTERN1} playlist play (.*)']}, 'duration': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} duration ?', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': r'{CUSTOM_PATTERN1} duration (\d+)'}, + 'trackstat': {'read': True, 'write': False, 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'{CUSTOM_PATTERN1} trackstat changedstatistic (.*)'}, 'albumarturl': {'read': True, 'write': False, 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '(http://.*)'} } } diff --git a/lms/datatypes.py b/lms/datatypes.py index ae78fb3b8..e331daf50 100755 --- a/lms/datatypes.py +++ b/lms/datatypes.py @@ -20,7 +20,7 @@ def get_send_data(self, data, type=None, **kwargs): return "play 3" if data is True else "pause 3" def get_shng_data(self, data, type=None, **kwargs): - return True if data == "play" else False + return True if data in ["play", "0"] else False class DT_LMSAlarms(DT.Datatype): diff --git a/lms/plugin.yaml b/lms/plugin.yaml index 760026190..d6b17ef8b 100755 --- a/lms/plugin.yaml +++ b/lms/plugin.yaml @@ -891,6 +891,12 @@ item_structs: - player - player.info + trackstat: + type: str + sqb_command: player.info.trackstat + sqb_read: true + sqb_write: false + albumarturl: type: str sqb_command: player.info.albumarturl @@ -1643,6 +1649,12 @@ item_structs: - ALL.player - ALL.player.info + trackstat: + type: str + sqb_command: player.info.trackstat + sqb_read: true + sqb_write: false + albumarturl: type: str sqb_command: player.info.albumarturl From deea82b1edbd7f645c83a20d7451002649dd01d0 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Mon, 24 Apr 2023 09:36:50 +0200 Subject: [PATCH 069/775] lms plugin: fix variable import --- lms/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lms/__init__.py b/lms/__init__.py index 675258eac..47e57f96e 100755 --- a/lms/__init__.py +++ b/lms/__init__.py @@ -37,7 +37,7 @@ class SmartPluginWebIf(): sys.path.insert(0, BASE) -from lib.model.sdp.globals import (PLUGIN_ATTR_NET_HOST, PLUGIN_ATTR_CONNECTION, PLUGIN_ATTR_SERIAL_PORT, PLUGIN_ATTR_CONN_TERMINATOR, CONN_NULL, CONN_NET_TCP_CLI, CONN_SER_ASYNC) +from lib.model.sdp.globals import (PLUGIN_ATTR_NET_HOST, PLUGIN_ATTR_CONNECTION, PLUGIN_ATTR_SERIAL_PORT, PLUGIN_ATTR_CONN_TERMINATOR, CONN_NULL, CONN_NET_TCP_CLI, CONN_SER_ASYNC, PLUGIN_ATTR_RECURSIVE, CUSTOM_SEP) from lib.model.smartdeviceplugin import SmartDevicePlugin, Standalone import urllib.parse From b0505160e4a8bcf00ed3b7aa24b37afbe032497b Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Mon, 24 Apr 2023 09:47:28 +0200 Subject: [PATCH 070/775] lms plugin: simplify variable import --- lms/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lms/__init__.py b/lms/__init__.py index 47e57f96e..e3d5dfca4 100755 --- a/lms/__init__.py +++ b/lms/__init__.py @@ -37,7 +37,7 @@ class SmartPluginWebIf(): sys.path.insert(0, BASE) -from lib.model.sdp.globals import (PLUGIN_ATTR_NET_HOST, PLUGIN_ATTR_CONNECTION, PLUGIN_ATTR_SERIAL_PORT, PLUGIN_ATTR_CONN_TERMINATOR, CONN_NULL, CONN_NET_TCP_CLI, CONN_SER_ASYNC, PLUGIN_ATTR_RECURSIVE, CUSTOM_SEP) +from lib.model.sdp.globals import (CUSTOM_SEP, PLUGIN_ATTR_NET_HOST, PLUGIN_ATTR_RECURSIVE, PLUGIN_ATTR_CONN_TERMINATOR) from lib.model.smartdeviceplugin import SmartDevicePlugin, Standalone import urllib.parse From ccfa95acc0cc430a91b894e1a2da688ab24511d5 Mon Sep 17 00:00:00 2001 From: psilo909 Date: Tue, 25 Apr 2023 07:36:54 +0200 Subject: [PATCH 071/775] Tankerkoenig plugin: added check for Nonetype results --- tankerkoenig/__init__.py | 14 +++++++++++--- tankerkoenig/plugin.yaml | 2 +- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/tankerkoenig/__init__.py b/tankerkoenig/__init__.py index 2320cd0a0..f157416f2 100755 --- a/tankerkoenig/__init__.py +++ b/tankerkoenig/__init__.py @@ -37,7 +37,7 @@ class TankerKoenig(SmartPlugin): - PLUGIN_VERSION = "2.0.2" + PLUGIN_VERSION = "2.0.3" _base_url = 'https://creativecommons.tankerkoenig.de/json' _detail_url_suffix = 'detail.php' @@ -324,7 +324,11 @@ def set_item_status_values(self): self.logger.debug(f"set_item_status_values: handle item {item} type {item.type()}") station_id = self.item_dict[item]['station_id'] tankerkoenig_attr = self.item_dict[item]['tankerkoenig_attr'] - value = self.station_prices.get(station_id, None).get(tankerkoenig_attr, None) + if self.station_prices.get(station_id, None) is not None: + value = self.station_prices.get(station_id, None).get(tankerkoenig_attr, None) + else: + self.logger.error( + f"set_item_status_values: station_id with {station_id} does not exist in station_prices.") self.logger.debug(f"set_item_status_values: station_id={station_id}, tankerkoenig_attr={tankerkoenig_attr}, value={value}") if value: item(value, self.get_shortname()) @@ -337,7 +341,11 @@ def set_item_detail_values(self): for item in self.item_dict: station_id = self.item_dict[item]['station_id'] tankerkoenig_attr = self.item_dict[item]['tankerkoenig_attr'] - value = self.station_details.get(station_id, None).get(tankerkoenig_attr, None) + if self.station_details.get(station_id, None) is not None: + value = self.station_details.get(station_id, None).get(tankerkoenig_attr, None) + else: + self.logger.error( + f"set_item_status_values: station_id with {station_id} does not exist in station_details.") self.logger.debug(f"set_item_detail_values: station_id={station_id}, tankerkoenig_attr={tankerkoenig_attr}, value={value}") if value: item(value, self.get_shortname()) diff --git a/tankerkoenig/plugin.yaml b/tankerkoenig/plugin.yaml index fc30b10ab..830cfa5ac 100755 --- a/tankerkoenig/plugin.yaml +++ b/tankerkoenig/plugin.yaml @@ -12,7 +12,7 @@ plugin: documentation: http://smarthomeng.de/user/plugins_doc/config/tankerkoenig.html support: https://knx-user-forum.de/forum/supportforen/smarthome-py/938924-benzinpreis-plugin keywords: petrol station, fuel prices, petrol prices - version: 2.0.2 # Plugin version + version: 2.0.3 # Plugin version sh_minversion: 1.9 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) # py_minversion: # minimum Python version to use for this plugin From 0726b15b5910ac86432b5e49ce357053bc72b61a Mon Sep 17 00:00:00 2001 From: gruberth Date: Tue, 25 Apr 2023 20:11:01 +0200 Subject: [PATCH 072/775] husky2: Disabled street view controls on map --- husky2/sv_widgets/husky2.js | 1 + 1 file changed, 1 insertion(+) diff --git a/husky2/sv_widgets/husky2.js b/husky2/sv_widgets/husky2.js index f09cb5ef8..ebdd72deb 100755 --- a/husky2/sv_widgets/husky2.js +++ b/husky2/sv_widgets/husky2.js @@ -17,6 +17,7 @@ $.widget("sv.husky2", $.sv.widget, { zoom: this.options.zoomlevel, mapTypeId: 'hybrid', center: new google.maps.LatLng(0.0, 0.0), + streetViewControl: false, }); this.marker_myself = new google.maps.Marker({ From 54a5061381e4b1faf956dd3881d995fe12d7c4ec Mon Sep 17 00:00:00 2001 From: ivande Date: Thu, 27 Apr 2023 16:20:25 +0200 Subject: [PATCH 073/775] new_event_loop for multi-instance --- telegram/__init__.py | 20 +++++++------------- 1 file changed, 7 insertions(+), 13 deletions(-) diff --git a/telegram/__init__.py b/telegram/__init__.py index 88f65499f..de2bf0a34 100755 --- a/telegram/__init__.py +++ b/telegram/__init__.py @@ -64,7 +64,6 @@ MESSAGE_TAG_SOURCE = '[SOURCE]' MESSAGE_TAG_DEST = '[DEST]' - class Telegram(SmartPlugin): PLUGIN_VERSION = "1.8.0" @@ -100,7 +99,8 @@ def __init__(self, sh): self.logger.error(f"{self.get_fullname()}: Unable to import Python package 'python-telegram-bot' [{REQUIRED_PACKAGE_IMPORTED}]") return - self._loop = asyncio.get_event_loop() + self._loop = asyncio.new_event_loop() # new_event is required for multi-instance + asyncio.set_event_loop(self._loop) self.alive = False self._name = self.get_parameter_value('name') @@ -188,14 +188,6 @@ def stop(self): time.sleep(1) self.alive = False # Clears the infiniti loop in sendQueue try: - # if not self._taskConn.done(): - # if self.debug_enabled: - # self.logger.debug("taskConn not done") - # self._taskConn.cancel() - # if not self._taskQueue.done(): - # if self.debug_enabled: - # self.logger.debug("taskQueue not done") - # self._taskQueue.cancel() asyncio.gather(self._taskConn, self._taskQueue) self.disconnect() @@ -229,11 +221,11 @@ async def connect(self): await self._application.initialize() await self._application.start() self._updater = self._application.updater - + q = await self._updater.start_polling(timeout=self._long_polling_timeout) - + if self.debug_enabled: - self.logger.debug(f"started polling the updater, Queue is {q}") + self.logger.debug(f"started polling the updater, Queue is {q}, event_loop: {event_loop}") self._bot = self._updater.bot self.logger.info(f"Telegram bot is listening: {await self._updater.bot.getMe()}") @@ -254,6 +246,8 @@ async def sendQueue(self): """ Waiting for messages to be sent in the queue and sending them to Telegram. The queue expects a dictionary with various parameters + dict txt: {"msgType":"Text", "msg":msg, "chat_id":chat_id, "reply_markup":reply_markup, "parse_mode":parse_mode } + dict photo: {"msgType":"Photo", "photofile_or_url":photofile_or_url, "chat_id":chat_id, "caption":caption, "local_prepare":local_prepare} """ if self.debug_enabled: self.logger.debug(f"sendQueue called - queue: [{self._queue}]") From fd2487d839f2968ad30c7c4069c6e7225c97b02c Mon Sep 17 00:00:00 2001 From: ivande Date: Thu, 27 Apr 2023 16:32:51 +0200 Subject: [PATCH 074/775] bug in log output. --- telegram/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/telegram/__init__.py b/telegram/__init__.py index de2bf0a34..7adbf3bf3 100755 --- a/telegram/__init__.py +++ b/telegram/__init__.py @@ -225,7 +225,7 @@ async def connect(self): q = await self._updater.start_polling(timeout=self._long_polling_timeout) if self.debug_enabled: - self.logger.debug(f"started polling the updater, Queue is {q}, event_loop: {event_loop}") + self.logger.debug(f"started polling the updater, Queue is {q}") self._bot = self._updater.bot self.logger.info(f"Telegram bot is listening: {await self._updater.bot.getMe()}") From e11d4525a9c2235409d8de0f533f4314f817a596 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Fri, 28 Apr 2023 07:54:15 +0200 Subject: [PATCH 075/775] Removed non-avm modified files from pull request --- db_addon/__init__.py | 5678 ++++++++++++++-------------- db_addon/plugin.yaml | 2186 +++++------ db_addon/user_doc.rst | 554 +-- db_addon/webif/__init__.py | 276 +- tasmota/__init__.py | 3446 ++++++++--------- tasmota/plugin.yaml | 566 +-- tasmota/webif/__init__.py | 250 +- tasmota/webif/templates/index.html | 1546 ++++---- 8 files changed, 7251 insertions(+), 7251 deletions(-) mode change 100644 => 100755 tasmota/__init__.py mode change 100644 => 100755 tasmota/plugin.yaml mode change 100644 => 100755 tasmota/webif/__init__.py mode change 100644 => 100755 tasmota/webif/templates/index.html diff --git a/db_addon/__init__.py b/db_addon/__init__.py index 73aab33cd..a3fa2bf73 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -1,2839 +1,2839 @@ -#!/usr/bin/env python3 -# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab -######################################################################### -# Copyright 2022- Michael Wenzel wenzel_michael@web.de -######################################################################### -# This file is part of SmartHomeNG. -# https://www.smarthomeNG.de -# https://knx-user-forum.de/forum/supportforen/smarthome-py -# -# This plugin provides additional functionality to mysql database -# connected via database plugin -# -# SmartHomeNG is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# SmartHomeNG is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with SmartHomeNG. If not, see . -# -######################################################################### - -import sqlvalidator -import datetime -import time -import re -import queue -from dateutil.relativedelta import relativedelta -from typing import Union -import threading - -from lib.model.smartplugin import SmartPlugin -from lib.item import Items -from lib.item.item import Item -from lib.shtime import Shtime -from lib.plugin import Plugins -from .webif import WebInterface -import lib.db - -DAY = 'day' -WEEK = 'week' -MONTH = 'month' -YEAR = 'year' - - -class DatabaseAddOn(SmartPlugin): - """ - Main class of the Plugin. Does all plugin specific stuff and provides the update functions for the items - """ - - PLUGIN_VERSION = '1.1.0' - - def __init__(self, sh): - """ - Initializes the plugin. - """ - - # Call init code of parent class (SmartPlugin) - super().__init__() - - # get item and shtime instance - self.shtime = Shtime.get_instance() - self.items = Items.get_instance() - self.plugins = Plugins.get_instance() - - # define cache dicts - self.current_values = {} # Dict to hold min and max value of current day / week / month / year for items - self.previous_values = {} # Dict to hold value of end of last day / week / month / year for items - self.item_cache = {} # Dict to hold item_id, oldest_log_ts and oldest_entry for items - - # define variables for database, database connection, working queue and status - self.item_queue = queue.Queue() # Queue containing all to be executed items - self.work_item_queue_thread = None # Working Thread for queue - self._db_plugin = None # object if database plugin - self._db = None # object of database - self.connection_data = None # connection data list of database - self.db_driver = None # driver of the used database - self.db_instance = None # instance of the used database - self.item_attribute_search_str = 'database' # attribute, on which an item configured for database can be identified - self.last_connect_time = 0 # mechanism for limiting db connection requests - self.alive = None # Is plugin alive? - self.startup_finished = False # Startup of Plugin finished - self.suspended = False # Is plugin activity suspended - self.active_queue_item: str = '-' # String holding item path of currently executed item - - # define debug logs - self.parse_debug = False # Enable / Disable debug logging for method 'parse item' - self.execute_debug = False # Enable / Disable debug logging for method 'execute items' - self.sql_debug = False # Enable / Disable debug logging for sql stuff - self.onchange_debug = False # Enable / Disable debug logging for method 'handle_onchange' - self.prepare_debug = False # Enable / Disable debug logging for query preparation - - # define default mysql settings - self.default_connect_timeout = 60 - self.default_net_read_timeout = 60 - - # define variables from plugin parameters - self.db_configname = self.get_parameter_value('database_plugin_config') - self.startup_run_delay = self.get_parameter_value('startup_run_delay') - self.ignore_0 = self.get_parameter_value('ignore_0') - self.use_oldest_entry = self.get_parameter_value('use_oldest_entry') - - # init cache dicts - self._init_cache_dicts() - - # activate debug logger - if self.log_level == 10: # info: 20 debug: 10 - self.parse_debug = True - self.execute_debug = True - self.sql_debug = True - self.onchange_debug = True - self.prepare_debug = True - - # init webinterface - self.init_webinterface(WebInterface) - - def run(self): - """ - Run method for the plugin - """ - - self.logger.debug("Run method called") - - # check existence of db-plugin, get parameters, and init connection to db - if not self._check_db_existence(): - self.logger.error(f"Check of existence of database plugin incl connection check failed. Plugin not loaded") - return self.deinit() - - self._db = lib.db.Database("DatabaseAddOn", self.db_driver, self.connection_data) - if not self._db.api_initialized: - self.logger.error("Initialization of database API failed") - return self.deinit() - - self.logger.debug("Initialization of database API successful") - - # init db - if not self._initialize_db(): - return self.deinit() - - # check db connection settings - if self.db_driver is not None and self.db_driver.lower() == 'pymysql': - self._check_db_connection_setting() - - # add scheduler for cyclic trigger item calculation - self.scheduler_add('cyclic', self.execute_due_items, prio=3, cron='5 0 0 * * *', cycle=None, value=None, offset=None, next=None) - - # add scheduler to trigger items to be calculated at startup with delay - dt = self.shtime.now() + datetime.timedelta(seconds=(self.startup_run_delay + 3)) - self.logger.info(f"Set scheduler for calculating startup-items with delay of {self.startup_run_delay + 3}s to {dt}.") - self.scheduler_add('startup', self.execute_startup_items, next=dt) - - # update database_items in item config, where path was given - self._update_database_items() - - # set plugin to alive - self.alive = True - - # start the queue consumer thread - self._work_item_queue_thread_startup() - - def stop(self): - """ - Stop method for the plugin - """ - - self.logger.debug("Stop method called") - self.alive = False - self.scheduler_remove('cyclic') - self._work_item_queue_thread_shutdown() - - def parse_item(self, item: Item): - """ - Default plugin parse_item method. Is called when the plugin is initialized. - - The plugin can, corresponding to its attribute keywords, decide what to do with the item in the future, like adding it to an internal array for future reference - :param item: The item to process. - :return: If the plugin needs to be informed of an items change you should return a call back function - like the function update_item down below. An example when this is needed is the knx plugin - where parse_item returns the update_item function when the attribute knx_send is found. - This means that when the items value is about to be updated, the call back function is called - with the item, caller, source and dest as arguments and in case of the knx plugin the value - can be sent to the knx with a knx write function within the knx plugin. - """ - - def get_database_item() -> Item: - """ - Returns item from shNG config which is an item with database attribut valid for current db_addon item - """ - - _lookup_item = item.return_parent() - - for i in range(2): - if self.has_iattr(_lookup_item.conf, self.item_attribute_search_str): - self.logger.debug(f"Attribut '{self.item_attribute_search_str}' has been found for item={item.path()} {i + 1} level above item.") - return _lookup_item - else: - _lookup_item = _lookup_item.return_parent() - - def has_db_addon_item() -> bool: - """Returns item from shNG config which is item with db_addon attribut valid for database item""" - - for child in item.return_children(): - if check_db_addon_fct(child): - return True - - for child_child in child.return_children(): - if check_db_addon_fct(child_child): - return True - - for child_child_child in child_child.return_children(): - if check_db_addon_fct(child_child_child): - return True - - return False - - def check_db_addon_fct(check_item) -> bool: - """ - Check if item has db_addon_fct and is onchange - """ - if self.has_iattr(check_item.conf, 'db_addon_fct'): - if self.get_iattr_value(check_item.conf, 'db_addon_fct').lower() in ALL_ONCHANGE_ATTRIBUTES: - self.logger.debug(f"db_addon item for database item {item.path()} found.") - return True - return False - - # handle all items with db_addon_fct - if self.has_iattr(item.conf, 'db_addon_fct'): - - if self.parse_debug: - self.logger.debug(f"parse item: {item.path()} due to 'db_addon_fct'") - - # get db_addon_fct attribute value - db_addon_fct = self.get_iattr_value(item.conf, 'db_addon_fct').lower() - - # get attribute value if item should be calculated at plugin startup - db_addon_startup = bool(self.get_iattr_value(item.conf, 'db_addon_startup')) - - # get attribute if certain value should be ignored at db query - if self.has_iattr(item.conf, 'database_ignore_value'): - db_addon_ignore_value = self.get_iattr_value(item.conf, 'database_ignore_value') - elif any(x in str(item.id()) for x in self.ignore_0): - db_addon_ignore_value = 0 - else: - db_addon_ignore_value = None - - # get database item and return if not available - database_item_path = self.get_iattr_value(item.conf, 'db_addon_database_item') - if database_item_path is not None: - database_item = database_item_path - else: - database_item = get_database_item() - if database_item is None: - self.logger.warning(f"No database item found for {item.path()}: Item ignored. Maybe you should check instance of database plugin.") - return - - # return if mandatory params for ad_addon_fct not given. - if db_addon_fct in ALL_NEED_PARAMS_ATTRIBUTES and not self.has_iattr(item.conf, 'db_addon_params'): - self.logger.warning(f"Item '{item.path()}' with db_addon_fct={db_addon_fct} ignored, since parameter using 'db_addon_params' not given. Item will be ignored.") - return - - # create standard items config - item_config_data_dict = {'db_addon': 'function', 'db_addon_fct': db_addon_fct, 'database_item': database_item, 'ignore_value': db_addon_ignore_value} - if database_item_path is not None: - item_config_data_dict.update({'database_item_path': True}) - else: - database_item_path = database_item.path() - - if self.parse_debug: - self.logger.debug(f"Item '{item.path()}' added with db_addon_fct={db_addon_fct} and database_item={database_item_path}") - - # handle daily items - if db_addon_fct in ALL_DAILY_ATTRIBUTES: - item_config_data_dict.update({'cycle': 'daily'}) - - # handle weekly items - elif db_addon_fct in ALL_WEEKLY_ATTRIBUTES: - item_config_data_dict.update({'cycle': 'weekly'}) - - # handle monthly items - elif db_addon_fct in ALL_MONTHLY_ATTRIBUTES: - item_config_data_dict.update({'cycle': 'monthly'}) - - # handle yearly items - elif db_addon_fct in ALL_YEARLY_ATTRIBUTES: - item_config_data_dict.update({'cycle': 'yearly'}) - - # handle static items - elif db_addon_fct in ALL_GEN_ATTRIBUTES: - item_config_data_dict.update({'cycle': 'static'}) - - # handle on-change items - elif db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: - item_config_data_dict.update({'cycle': 'on-change'}) - - # handle all functions with 'summe' like waermesumme, kaeltesumme, gruenlandtemperatursumme - if 'summe' in db_addon_fct: - db_addon_params = params_to_dict(self.get_iattr_value(item.conf, 'db_addon_params')) - if db_addon_params is None or 'year' not in db_addon_params: - self.logger.info(f"No 'year' for evaluation via 'db_addon_params' of item {item.path()} for function {db_addon_fct} given. Default with 'current year' will be used.") - db_addon_params = {'year': 'current'} - item_config_data_dict.update({'params': db_addon_params}) - - # handle wachstumsgradtage function - elif db_addon_fct == 'wachstumsgradtage': - DEFAULT_THRESHOLD = 10 - db_addon_params = params_to_dict(self.get_iattr_value(item.conf, 'db_addon_params')) - if db_addon_params is None or 'year' not in db_addon_params: - self.logger.info(f"No 'year' for evaluation via 'db_addon_params' of item {item.path()} for function {db_addon_fct} given. Default with 'current year' will be used.") - db_addon_params = {'year': 'current'} - if 'threshold' not in db_addon_params: - self.logger.info(f"No 'threshold' for evaluation via 'db_addon_params' of item {item.path()} for function {db_addon_fct} given. Default with {DEFAULT_THRESHOLD} will be used.") - db_addon_params.update({'threshold': DEFAULT_THRESHOLD}) - if not isinstance(db_addon_params['threshold'], int): - threshold = to_int(db_addon_params['threshold']) - db_addon_params['threshold'] = DEFAULT_THRESHOLD if threshold is None else threshold - item_config_data_dict.update({'params': db_addon_params}) - - # handle tagesmitteltemperatur - elif db_addon_fct == 'tagesmitteltemperatur': - if not self.has_iattr(item.conf, 'db_addon_params'): - self.logger.warning(f"Item '{item.path()}' with db_addon_fct={db_addon_fct} ignored, since parameter using 'db_addon_params' not given. Item will be ignored.") - return - - db_addon_params = params_to_dict(self.get_iattr_value(item.conf, 'db_addon_params')) - if db_addon_params is None: - self.logger.warning(f"Error occurred during parsing of item attribute 'db_addon_params' of item {item.path()}. Item will be ignored.") - return - item_config_data_dict.update({'params': db_addon_params}) - - # handle db_request - elif db_addon_fct == 'db_request': - if not self.has_iattr(item.conf, 'db_addon_params'): - self.logger.warning(f"Item '{item.path()}' with db_addon_fct={db_addon_fct} ignored, since parameter using 'db_addon_params' not given. Item will be ignored") - return - - db_addon_params = params_to_dict(self.get_iattr_value(item.conf, 'db_addon_params')) - if db_addon_params is None: - self.logger.warning(f"Error occurred during parsing of item attribute 'db_addon_params' of item {item.path()}. Item will be ignored.") - return - - if self.parse_debug: - self.logger.debug(f"parse_item: {db_addon_fct=} for item={item.path()}, {db_addon_params=}") - - if not any(param in db_addon_params for param in ('func', 'timeframe')): - self.logger.warning(f"Item '{item.path()}' with {db_addon_fct=} ignored, not all mandatory parameters in {db_addon_params=} given. Item will be ignored.") - return - - TIMEFRAMES_2_UPDATECYCLE = {'day': 'daily', - 'week': 'weekly', - 'month': 'monthly', - 'year': 'yearly'} - - _timeframe = db_addon_params.get('group', None) - if not _timeframe: - _timeframe = db_addon_params.get('timeframe', None) - update_cycle = TIMEFRAMES_2_UPDATECYCLE.get(_timeframe) - if update_cycle is None: - self.logger.warning(f"Item '{item.path()}' with {db_addon_fct=} ignored. Not able to detect update cycle.") - return - - item_config_data_dict.update({'params': db_addon_params, 'cycle': update_cycle}) - - # debug log item cycle - if self.parse_debug: - self.logger.debug(f"Item '{item.path()}' added to be run {item_config_data_dict['cycle']}.") - - # handle item to be run on startup (onchange_items shall not be run at startup, but at first noticed change of item value; therefore remove for list of items to be run at startup) - if (db_addon_startup and db_addon_fct not in ALL_ONCHANGE_ATTRIBUTES) or db_addon_fct in ALL_GEN_ATTRIBUTES: - if self.parse_debug: - self.logger.debug(f"Item '{item.path()}' added to be run on startup") - item_config_data_dict.update({'startup': True}) - else: - item_config_data_dict.update({'startup': False}) - - # add item to plugin item dict - self.add_item(item, config_data_dict=item_config_data_dict) - - # handle all items with db_addon_info - elif self.has_iattr(item.conf, 'db_addon_info'): - if self.parse_debug: - self.logger.debug(f"parse item: {item.path()} due to used item attribute 'db_addon_info'") - self.add_item(item, config_data_dict={'db_addon': 'info', 'db_addon_fct': f"info_{self.get_iattr_value(item.conf, 'db_addon_info').lower()}", 'database_item': None, 'startup': True}) - - # handle all items with db_addon_admin - elif self.has_iattr(item.conf, 'db_addon_admin'): - if self.parse_debug: - self.logger.debug(f"parse item: {item.path()} due to used item attribute 'db_addon_admin'") - self.add_item(item, config_data_dict={'db_addon': 'admin', 'db_addon_fct': f"admin_{self.get_iattr_value(item.conf, 'db_addon_admin').lower()}", 'database_item': None}) - return self.update_item - - # Reference to 'update_item' für alle Items mit Attribut 'database', um die on_change Items zu berechnen - elif self.has_iattr(item.conf, self.item_attribute_search_str) and has_db_addon_item(): - self.logger.debug(f"reference to update_item for item '{item.path()}' will be set due to on-change") - self.add_item(item, config_data_dict={'db_addon': 'database'}) - return self.update_item - - def update_item(self, item, caller=None, source=None, dest=None): - """ - Handle updated item - This method is called, if the value of an item has been updated by SmartHomeNG. - It should write the changed value out to the device (hardware/interface) that is managed by this plugin. - - :param item: item to be updated towards the plugin - :param caller: if given it represents the callers name - :param source: if given it represents the source - :param dest: if given it represents the dest - """ - - if self.alive and caller != self.get_shortname(): - # handle database items - if item in self._database_items(): - # self.logger.debug(f"update_item was called with item {item.property.path} with value {item()} from caller {caller}, source {source} and dest {dest}") - if not self.startup_finished: - self.logger.info(f"Handling of 'on-change' is paused for startup. No updated will be processed.") - elif self.suspended: - self.logger.info(f"Plugin is suspended. No updated will be processed.") - else: - self.logger.info(f"+ Updated item '{item.path()}' with value {item()} will be put to queue for processing. {self.item_queue.qsize() + 1} items to do.") - self.item_queue.put((item, item())) - - # handle admin items - elif self.has_iattr(item.conf, 'db_addon_admin'): - self.logger.debug(f"update_item was called with item {item.property.path} from caller {caller}, source {source} and dest {dest}") - if self.get_iattr_value(item.conf, 'db_addon_admin') == 'suspend': - self.suspend(item()) - elif self.get_iattr_value(item.conf, 'db_addon_admin') == 'recalc_all': - self.execute_all_items() - item(False, self.get_shortname()) - elif self.get_iattr_value(item.conf, 'db_addon_admin') == 'clean_cache_values': - self._init_cache_dicts() - item(False, self.get_shortname()) - - def execute_due_items(self) -> None: - """ - Execute all items, which are due - """ - - if self.execute_debug: - self.logger.debug("execute_due_items called") - - if not self.suspended: - _todo_items = self._create_due_items() - self.logger.info(f"{len(_todo_items)} items are due and will be calculated.") - [self.item_queue.put(i) for i in _todo_items] - else: - self.logger.info(f"Plugin is suspended. No items will be calculated.") - - def execute_startup_items(self) -> None: - """ - Execute all startup_items - """ - if self.execute_debug: - self.logger.debug("execute_startup_items called") - - if not self.suspended: - self.logger.info(f"{len(self._startup_items())} items will be calculated at startup.") - [self.item_queue.put(i) for i in self._startup_items()] - self.startup_finished = True - else: - self.logger.info(f"Plugin is suspended. No items will be calculated.") - - def execute_static_items(self) -> None: - """ - Execute all static items - """ - if self.execute_debug: - self.logger.debug("execute_static_item called") - - if not self.suspended: - self.logger.info(f"{len(self._static_items())} items will be calculated.") - [self.item_queue.put(i) for i in self._static_items()] - else: - self.logger.info(f"Plugin is suspended. No items will be calculated.") - - def execute_info_items(self) -> None: - """ - Execute all info items - """ - if self.execute_debug: - self.logger.debug("execute_info_items called") - - if not self.suspended: - self.logger.info(f"{len(self._info_items())} items will be calculated.") - [self.item_queue.put(i) for i in self._info_items()] - else: - self.logger.info(f"Plugin is suspended. No items will be calculated.") - - def execute_all_items(self) -> None: - """ - Execute all ondemand items - """ - - if not self.suspended: - self.logger.info(f"Values for all {len(self._ondemand_items())} items with 'db_addon_fct' attribute, which are not 'on-change', will be calculated!") - [self.item_queue.put(i) for i in self._ondemand_items()] - else: - self.logger.info(f"Plugin is suspended. No items will be calculated.") - - def work_item_queue(self) -> None: - """ - Handles item queue were all to be executed items were be placed in. - """ - - while self.alive: - try: - queue_entry = self.item_queue.get(True, 10) - self.logger.info(f" Queue Entry: '{queue_entry}' received.") - except queue.Empty: - self.active_queue_item = '-' - pass - else: - if isinstance(queue_entry, tuple): - item, value = queue_entry - self.logger.info(f"# {self.item_queue.qsize() + 1} item(s) to do. || 'on-change' item '{item.path()}' with {value=} will be processed.") - self.active_queue_item = str(item.path()) - self.handle_onchange(item, value) - else: - self.logger.info(f"# {self.item_queue.qsize() + 1} item(s) to do. || 'on-demand' item '{queue_entry.path()}' will be processed.") - self.active_queue_item = str(queue_entry.path()) - self.handle_ondemand(queue_entry) - - def handle_ondemand(self, item: Item) -> None: - """ - Calculate value for requested item, fill cache dicts and set item value. - - :param item: Item for which value will be calculated - """ - - # set/get parameters - item_config = self.get_item_config(item) - db_addon = item_config['db_addon'] - db_addon_fct = item_config['db_addon_fct'] - database_item = item_config['database_item'] - ignore_value = item_config.get('ignore_value') - result = None - self.logger.debug(f"handle_ondemand: Item={item.path()} with {item_config=}") - - # handle info functions - if db_addon == 'info': - # handle info_db_version - if db_addon_fct == 'info_db_version': - result = self._get_db_version() - self.logger.debug(f"handle_ondemand: info_db_version {result=}") - else: - self.logger.warning(f"No handling for attribute {db_addon_fct=} for Item {item.path()} defined.") - - # handle general functions - elif db_addon_fct in ALL_GEN_ATTRIBUTES: - # handle oldest_value - if db_addon_fct == 'general_oldest_value': - result = self._get_oldest_value(database_item) - - # handle oldest_log - elif db_addon_fct == 'general_oldest_log': - result = self._get_oldest_log(database_item) - - else: - self.logger.warning(f"No handling for attribute {db_addon_fct=} for Item {item.path()} defined.") - - # handle item starting with 'verbrauch_' - elif db_addon_fct in ALL_VERBRAUCH_ATTRIBUTES: - if self.execute_debug: - self.logger.debug(f"handle_ondemand: 'verbrauch' detected.") - - result = self._handle_verbrauch(database_item, db_addon_fct, ignore_value) - - if result and result < 0: - self.logger.warning(f"Result of item {item.path()} with {db_addon_fct=} was negative. Something seems to be wrong.") - - # handle item starting with 'zaehlerstand_' of format 'zaehlerstand_timeframe_timedelta' like 'zaehlerstand_woche_minus1' - elif db_addon_fct in ALL_ZAEHLERSTAND_ATTRIBUTES: - if self.execute_debug: - self.logger.debug(f"handle_ondemand: 'zaehlerstand' detected.") - - result = self._handle_zaehlerstand(database_item, db_addon_fct, ignore_value) - - # handle item starting with 'minmax_' - elif db_addon_fct in ALL_HISTORIE_ATTRIBUTES: - if self.execute_debug: - self.logger.debug(f"handle_ondemand: 'minmax' detected.") - - result = self._handle_min_max(database_item, db_addon_fct, ignore_value)[0][1] - - # handle item starting with 'tagesmitteltemperatur_' - elif db_addon_fct in ALL_TAGESMITTEL_ATTRIBUTES: - if self.execute_debug: - self.logger.debug(f"handle_ondemand: 'tagesmitteltemperatur' detected.") - - result = self._handle_tagesmitteltemperatur(database_item, db_addon_fct, ignore_value)[0][1] - - # handle item starting with 'serie_' - elif db_addon_fct in ALL_SERIE_ATTRIBUTES: - if 'minmax' in db_addon_fct: - if self.execute_debug: - self.logger.debug(f"handle_ondemand: 'serie_minmax' detected.") - - result = self._handle_min_max(database_item, db_addon_fct, ignore_value) - - elif 'verbrauch' in db_addon_fct: - if self.execute_debug: - self.logger.debug(f"handle_ondemand: 'serie_verbrauch' detected.") - - result = self._handle_verbrauch(database_item, db_addon_fct, ignore_value) - - elif 'zaehlerstand' in db_addon_fct: - if self.execute_debug: - self.logger.debug(f"handle_ondemand: 'serie_zaehlerstand' detected.") - - result = self._handle_zaehlerstand(database_item, db_addon_fct, ignore_value) - - elif 'tagesmitteltemperatur' in db_addon_fct: - if self.execute_debug: - self.logger.debug(f"handle_ondemand: 'serie_tagesmittelwert' detected.") - - result = self._handle_tagesmitteltemperatur(database_item, db_addon_fct, ignore_value) - else: - self.logger.warning(f"No handling for attribute {db_addon_fct=} for Item {item.path()} defined.") - - # handle kaeltesumme - elif db_addon_fct == 'kaeltesumme': - db_addon_params = item_config.get('params') - if self.execute_debug: - self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected; {db_addon_params=}") - - if db_addon_params: - db_addon_params.update({'database_item': item_config['database_item']}) - result = self._handle_kaeltesumme(**db_addon_params) - - # handle waermesumme - elif db_addon_fct == 'waermesumme': - db_addon_params = item_config.get('params') - if self.execute_debug: - self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected; {db_addon_params=}") - - if db_addon_params: - db_addon_params.update({'database_item': item_config['database_item']}) - result = self._handle_waermesumme(**db_addon_params) - - # handle gruenlandtempsumme - elif db_addon_fct == 'gruenlandtempsumme': - db_addon_params = item_config.get('params') - if self.execute_debug: - self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected; {db_addon_params=}") - - if db_addon_params: - db_addon_params.update({'database_item': item_config['database_item']}) - result = self._handle_gruenlandtemperatursumme(**db_addon_params) - - # handle wachstumsgradtage - elif db_addon_fct == 'wachstumsgradtage': - db_addon_params = item_config.get('params') - if self.execute_debug: - self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected; {db_addon_params}") - - if db_addon_params: - db_addon_params.update({'database_item': item_config['database_item']}) - result = self._handle_wachstumsgradtage(**db_addon_params) - - # handle tagesmitteltemperatur - elif db_addon_fct == 'tagesmitteltemperatur': - db_addon_params = item_config.get('params') - if self.execute_debug: - self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected; {db_addon_params=}") - - if db_addon_params: - result = self._handle_tagesmitteltemperatur(database_item, db_addon_fct, ignore_value, db_addon_params) - - # handle db_request - elif db_addon_fct == 'db_request': - db_addon_params = item_config.get('params') - if self.execute_debug: - self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected with {db_addon_params=}") - - if db_addon_params: - db_addon_params.update({'database_item': item_config['database_item']}) - if db_addon_params.keys() & {'func', 'item', 'timeframe'}: - result = self._query_item(**db_addon_params) - else: - self.logger.error(f"Attribute 'db_addon_params' not containing needed params for Item {item.id} with {db_addon_fct=}.") - - # handle everything else - else: - self.logger.warning(f"handle_ondemand: Function '{db_addon_fct}' for item {item.path()} not defined or found.") - return - - # log result - if self.execute_debug: - self.logger.debug(f"handle_ondemand: result is {result} for item '{item.path()}' with '{db_addon_fct=}'") - - if result is None: - self.logger.info(f" Result was None; No item value will be set.") - return - - # set item value and put data into plugin_item_dict - self.logger.info(f" Item value for '{item.path()}' will be set to {result}") - item_config = self.get_item_config(item) - item_config.update({'value': result}) - item(result, self.get_shortname()) - - def handle_onchange(self, updated_item: Item, value: float) -> None: - """ - Get item and item value for which an update has been detected, fill cache dicts and set item value. - - :param updated_item: Item which has been updated - :param value: Value of updated item - """ - - if self.onchange_debug: - self.logger.debug(f"handle_onchange called with updated_item={updated_item.path()} and value={value}.") - - relevant_item_list = self.get_item_list('database_item', updated_item) - if self.onchange_debug: - self.logger.debug(f"Following items where identified for update: {relevant_item_list}.") - - for item in relevant_item_list: - item_config = self.get_item_config(item) - _database_item = item_config['database_item'] - _db_addon_fct = item_config['db_addon_fct'] - _ignore_value = item_config['ignore_value'] - _var = _db_addon_fct.split('_') - - # handle minmax on-change items like minmax_heute_max, minmax_heute_min, minmax_woche_max, minmax_woche_min..... - if _db_addon_fct.startswith('minmax') and len(_var) == 3 and _var[2] in ['min', 'max']: - _timeframe = convert_timeframe(_var[1]) - _func = _var[2] - _cache_dict = self.current_values[_timeframe] - if not _timeframe: - return - - if self.onchange_debug: - self.logger.debug(f"handle_onchange: 'minmax' item {updated_item.path()} with {_func=} detected. Check for update of _cache_dicts and item value.") - - _initial_value = False - _new_value = None - - # make sure, that database item is in cache dict - if _database_item not in _cache_dict: - _cache_dict[_database_item] = {} - if _cache_dict[_database_item].get(_func) is None: - _query_params = {'func': _func, 'item': _database_item, 'timeframe': _timeframe, 'start': 0, 'end': 0, 'ignore_value': _ignore_value} - _cached_value = self._query_item(**_query_params)[0][1] - _initial_value = True - if self.onchange_debug: - self.logger.debug(f"handle_onchange: Item={updated_item.path()} with _func={_func} and _timeframe={_timeframe} not in cache dict. recent value={_cached_value}.") - else: - _cached_value = _cache_dict[_database_item][_func] - - if _cached_value: - # check value for update of cache dict - if _func == 'min' and value < _cached_value: - _new_value = value - if self.onchange_debug: - self.logger.debug(f"handle_onchange: new value={_new_value} lower then current min_value={_cached_value}. _cache_dict will be updated") - elif _func == 'max' and value > _cached_value: - _new_value = value - if self.onchange_debug: - self.logger.debug(f"handle_onchange: new value={_new_value} higher then current max_value={_cached_value}. _cache_dict will be updated") - else: - if self.onchange_debug: - self.logger.debug(f"handle_onchange: new value={_new_value} will not change max/min for period.") - else: - _cached_value = value - - if _initial_value and not _new_value: - _new_value = _cached_value - if self.onchange_debug: - self.logger.debug(f"handle_onchange: initial value for item will be set with value {_new_value}") - - if _new_value: - _cache_dict[_database_item][_func] = _new_value - self.logger.info(f"Item value for '{item.path()}' with func={_func} will be set to {_new_value}") - item_config = self.get_item_config(item) - item_config.update({'value': _new_value}) - item(_new_value, self.get_shortname()) - else: - self.logger.info(f"Received value={value} is not influencing min / max value. Therefore item {item.path()} will not be changed.") - - # handle verbrauch on-change items ending with heute, woche, monat, jahr - elif _db_addon_fct.startswith('verbrauch') and len(_var) == 2 and _var[1] in ['heute', 'woche', 'monat', 'jahr']: - _timeframe = convert_timeframe(_var[1]) - _cache_dict = self.previous_values[_timeframe] - if _timeframe is None: - return - - # make sure, that database item is in cache dict - if _database_item not in _cache_dict: - _query_params = {'func': 'max', 'item': _database_item, 'timeframe': _timeframe, 'start': 1, 'end': 1, 'ignore_value': _ignore_value} - _cached_value = self._query_item(**_query_params)[0][1] - _cache_dict[_database_item] = _cached_value - if self.onchange_debug: - self.logger.debug(f"handle_onchange: Item={updated_item.path()} with {_timeframe=} not in cache dict. Value {_cached_value} has been added.") - else: - _cached_value = _cache_dict[_database_item] - - # calculate value, set item value, put data into plugin_item_dict - if _cached_value is not None: - _new_value = round(value - _cached_value, 1) - self.logger.info(f"Item value for '{item.path()}' will be set to {_new_value}") - item_config = self.get_item_config(item) - item_config.update({'value': _new_value}) - item(_new_value, self.get_shortname()) - else: - self.logger.info(f"Value for end of last {_timeframe} not available. No item value will be set.") - - def _update_database_items(self): - for item in self._database_item_path_items(): - item_config = self.get_item_config(item) - database_item_path = item_config.get('database_item') - database_item = self.items.return_item(database_item_path) - - if database_item is None: - self.logger.warning(f"Database-Item for Item with config item path for Database-Item {database_item_path!r} not found. Item '{item.path()}' will be removed from plugin.") - self.remove_item(item) - else: - item_config.update({'database_item': database_item}) - - @property - def log_level(self): - return self.logger.getEffectiveLevel() - - def queue_backlog(self): - return self.item_queue.qsize() - - def db_version(self): - return self._get_db_version() - - def _startup_items(self) -> list: - return self.get_item_list('startup', True) - - def _onchange_items(self) -> list: - return self.get_item_list('cycle', 'on-change') - - def _daily_items(self) -> list: - return self.get_item_list('cycle', 'daily') - - def _weekly_items(self) -> list: - return self.get_item_list('cycle', 'weekly') - - def _monthly_items(self) -> list: - return self.get_item_list('cycle', 'monthly') - - def _yearly_items(self) -> list: - return self.get_item_list('cycle', 'yearly') - - def _static_items(self) -> list: - return self.get_item_list('cycle', 'static') - - def _admin_items(self) -> list: - return self.get_item_list('db_addon', 'admin') - - def _info_items(self) -> list: - return self.get_item_list('db_addon', 'info') - - def _database_items(self) -> list: - return self.get_item_list('db_addon', 'database') - - def _database_item_path_items(self) -> list: - return self.get_item_list('database_item_path', True) - - def _ondemand_items(self) -> list: - return self._daily_items() + self._weekly_items() + self._monthly_items() + self._yearly_items() + self._static_items() - - ############################## - # Public functions / Using item_path - ############################## - - def gruenlandtemperatursumme(self, item_path: str, year: Union[int, str]) -> Union[int, None]: - """ - Query database for gruenlandtemperatursumme for given year or year - https://de.wikipedia.org/wiki/Gr%C3%BCnlandtemperatursumme - - Beim Grünland wird die Wärmesumme nach Ernst und Loeper benutzt, um den Vegetationsbeginn und somit den Termin von Düngungsmaßnahmen zu bestimmen. - Dabei erfolgt die Aufsummierung der Tagesmitteltemperaturen über 0 °C, wobei der Januar mit 0.5 und der Februar mit 0.75 gewichtet wird. - Bei einer Wärmesumme von 200 Grad ist eine Düngung angesagt. - - :param item_path: item object or item_id for which the query should be done - :param year: year the gruenlandtemperatursumme should be calculated for - :return: gruenlandtemperatursumme - """ - - item = self.items.return_item(item_path) - if item: - return self._handle_gruenlandtemperatursumme(item, year) - - def waermesumme(self, item_path: str, year, month: Union[int, str] = None, threshold: int = 0) -> Union[int, None]: - """ - Query database for waermesumme for given year or year/month - https://de.wikipedia.org/wiki/W%C3%A4rmesumme - - :param item_path: item object or item_id for which the query should be done - :param year: year the waermesumme should be calculated for - :param month: month the waermesumme should be calculated for - :param threshold: threshold for temperature - :return: waermesumme - """ - - item = self.items.return_item(item_path) - if item: - return self._handle_waermesumme(item, year, month, threshold) - - def kaeltesumme(self, item_path: str, year, month: Union[int, str] = None) -> Union[int, None]: - """ - Query database for kaeltesumme for given year or year/month - https://de.wikipedia.org/wiki/K%C3%A4ltesumme - - :param item_path: item object or item_id for which the query should be done - :param year: year the kaeltesumme should be calculated for - :param month: month the kaeltesumme should be calculated for - :return: kaeltesumme - """ - - item = self.items.return_item(item_path) - if item: - return self._handle_kaeltesumme(item, year, month) - - def tagesmitteltemperatur(self, item_path: str, timeframe: str = None, count: int = None) -> list: - """ - Query database for tagesmitteltemperatur - https://www.dwd.de/DE/leistungen/klimadatendeutschland/beschreibung_tagesmonatswerte.html - - :param item_path: item object or item_id for which the query should be done - :param timeframe: timeincrement for determination - :param count: number of time increments starting from now to the left (into the past) - :return: tagesmitteltemperatur - """ - - if not timeframe: - timeframe = 'day' - - if not count: - count = 0 - - item = self.items.return_item(item_path) - if item: - return self._handle_tagesmitteltemperatur(database_item=item, db_addon_fct='tagesmitteltemperatur', params={'timeframe': timeframe, 'count': count}) - - def wachstumsgradtage(self, item_path: str, year: Union[int, str], threshold: int) -> Union[int, None]: - """ - Query database for wachstumsgradtage - https://de.wikipedia.org/wiki/Wachstumsgradtag - - :param item_path: item object or item_id for which the query should be done - :param year: year the wachstumsgradtage should be calculated for - :param threshold: Temperature in °C as threshold: Ein Tage mit einer Tagesdurchschnittstemperatur oberhalb des Schellenwertes gilt als Wachstumsgradtag - :return: wachstumsgradtage - """ - - item = self.items.return_item(item_path) - if item: - return self._handle_wachstumsgradtage(item, year, threshold) - - def temperaturserie(self, item_path: str, year: Union[int, str], method: str) -> Union[list, None]: - """ - Query database for wachstumsgradtage - https://de.wikipedia.org/wiki/Wachstumsgradtag - - :param item_path: item object or item_id for which the query should be done - :param year: year the wachstumsgradtage should be calculated for - :param method: Calculation method - :return: wachstumsgradtage - """ - - item = self.items.return_item(item_path) - if item: - return self._handle_temperaturserie(item, year, method) - - def query_item(self, func: str, item_path: str, timeframe: str, start: int = None, end: int = 0, group: str = None, group2: str = None, ignore_value=None) -> list: - item = self.items.return_item(item_path) - if item is None: - return [] - - return self._query_item(func, item, timeframe, start, end, group, group2, ignore_value) - - def fetch_log(self, func: str, item_path: str, timeframe: str, start: int = None, end: int = 0, count: int = None, group: str = None, group2: str = None, ignore_value=None) -> list: - """ - Query database, format response and return it - - :param func: function to be used at query - :param item_path: item str or item_id for which the query should be done - :param timeframe: time increment für definition of start, end, count (day, week, month, year) - :param start: start of timeframe (oldest) for query given in x time increments (default = None, meaning complete database) - :param end: end of timeframe (newest) for query given in x time increments (default = 0, meaning today, end of last week, end of last month, end of last year) - :param count: start of timeframe defined by number of time increments starting from end to the left (into the past) - :param group: first grouping parameter (default = None, possible values: day, week, month, year) - :param group2: second grouping parameter (default = None, possible values: day, week, month, year) - :param ignore_value: value of val_num, which will be ignored during query - - :return: formatted query response - """ - item = self.items.return_item(item_path) - - if count: - start, end = count_to_start(count) - - if item and start and end: - return self._query_item(func=func, item=item, timeframe=timeframe, start=start, end=end, group=group, group2=group2, ignore_value=ignore_value) - else: - return [] - - def fetch_raw(self, query: str, params: dict = None) -> Union[list, None]: - """ - Fetch database with given query string and params - - :param query: database query to be executed - :param params: query parameters - - :return: result of database query - """ - - if params is None: - params = {} - - formatted_sql = sqlvalidator.format_sql(query) - sql_query = sqlvalidator.parse(formatted_sql) - - if not sql_query.is_valid(): - self.logger.error(f"fetch_raw: Validation of query failed with error: {sql_query.errors}") - return - - return self._fetchall(query, params) - - def suspend(self, state: bool = False) -> bool: - """ - Will pause value evaluation of plugin - - """ - - if state: - self.logger.warning("Plugin is set to 'suspended'. Queries to database will not be made until suspension is cancelled.") - self.suspended = True - self._clear_queue() - else: - self.logger.warning("Plugin suspension cancelled. Queries to database will be resumed.") - self.suspended = False - - # write back value to item, if one exists - for item in self.get_item_list('db_addon', 'admin'): - item_config = self.get_item_config(item) - if item_config['db_addon_fct'] == 'suspend': - item(self.suspended, self.get_shortname()) - - return self.suspended - - ############################## - # Support stuff / Using Item Object - ############################## - - def _handle_min_max(self, database_item: Item, db_addon_fct: str, ignore_value=None) -> Union[list, None]: - """ - Handle execution of min/max calculation - """ - # handle all on_change functions of format 'minmax_timeframe_function' like 'minmax_heute_max' - if db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: - if self.execute_debug: - self.logger.debug(f"on-change function with 'min/max' detected; will be calculated by next change of database item") - return - - _var = db_addon_fct.split('_') - group = None - group2 = None - - # handle all 'last' functions in format 'minmax_last_window_function' like 'minmax_last_24h_max' - if len(_var) == 4 and _var[1] == 'last': - func = _var[3] - timeframe = convert_timeframe(_var[2][-1:]) - start = to_int(_var[2][:-1]) - end = 0 - log_text = 'minmax_last' - if timeframe is None or start is None: - return - - # handle all functions 'min/max/avg' in format 'minmax_timeframe_timedelta_func' like 'minmax_heute_minus2_max' - elif len(_var) == 4 and _var[2].startswith('minus'): - func = _var[3] # min, max, avg - timeframe = convert_timeframe(_var[1]) # day, week, month, year - start = to_int(_var[2][-1]) # 1, 2, 3, ... - end = start - log_text = 'minmax' - if timeframe is None or start is None: - return - - # handle all functions 'serie_min/max/avg' in format 'serie_minmax_timeframe_func_count_group' like 'serie_minmax_monat_min_15m' - elif _var[0] == 'serie' and _var[1] == 'minmax': - timeframe = convert_timeframe(_var[2]) - func = _var[3] - start = to_int(_var[4][:-1]) - end = 0 - group = convert_timeframe(_var[4][len(_var[4]) - 1]) - log_text = 'serie_min/max/avg' - if timeframe is None or start is None or group is None: - return - else: - self.logger.info(f"_handle_min_max: No adequate function for {db_addon_fct=} found.") - return - - if func not in ALLOWED_MINMAX_FUNCS: - self.logger.info(f"_handle_min_max: Called {func=} not in allowed functions={ALLOWED_MINMAX_FUNCS}.") - return - - query_params = {'item': database_item, 'ignore_value': ignore_value, 'func': func, 'timeframe': timeframe, 'start': start, 'end': end, 'group': group, 'group2': group2} - - if self.execute_debug: - self.logger.debug(f"_handle_min_max: db_addon_fct={log_text} function detected. {query_params=}") - - return self._query_item(**query_params) - - def _handle_zaehlerstand(self, database_item: Item, db_addon_fct: str, ignore_value=None) -> Union[list, None]: - """ - Handle execution of Zaehlerstand calculation - """ - # handle all on_change functions - if db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: - if self.execute_debug: - self.logger.debug(f"on-change function with 'zaehlerstand' detected; will be calculated by next change of database item") - return - - _var = db_addon_fct.split('_') - group = None - group2 = None - - # handle functions starting with 'zaehlerstand' like 'zaehlerstand_heute_minus1' - if len(_var) == 3 and _var[1] == 'zaehlerstand': - func = 'max' - timeframe = convert_timeframe(_var[1]) - start = to_int(_var[2][-1]) - end = start - log_text = 'zaehlerstand' - if timeframe is None or start is None: - return - - # handle all functions 'serie_min/max/avg' in format 'serie_minmax_timeframe_func_count_group' like 'serie_zaehlerstand_tag_30d' - elif _var[0] == 'serie' and _var[1] == 'zaehlerstand': - func = 'max' - timeframe = convert_timeframe(_var[2]) - start = to_int(_var[3][:-1]) - end = 0 - group = convert_timeframe(_var[3][len(_var[3]) - 1]) - log_text = 'serie_min/max/avg' - if timeframe is None or start is None or group is None: - return - else: - self.logger.info(f"_handle_zaehlerstand: No adequate function for {db_addon_fct=} found.") - return - - query_params = {'item': database_item, 'ignore_value': ignore_value, 'func': func, 'timeframe': timeframe, 'start': start, 'end': end, 'group': group, 'group2': group2} - - if self.execute_debug: - self.logger.debug(f"_handle_zaehlerstand: db_addon_fct={log_text} function detected. {query_params=}") - - return self._query_item(**query_params) - - def _handle_verbrauch(self, database_item: Item, db_addon_fct: str, ignore_value=None): - """ - Handle execution of verbrauch calculation - """ - - self.logger.debug(f"_handle_verbrauch called with {database_item=} and {db_addon_fct=}") - - def consumption_calc(c_start, c_end) -> Union[float, None]: - """ - Handle query for Verbrauch - - :param c_start: beginning of timeframe - :param c_end: end of timeframe - """ - - if self.prepare_debug: - self.logger.debug(f"_consumption_calc called with {database_item=}, {timeframe=}, {c_start=}, {c_end=}") - - _result = None - _query_params = {'item': database_item, 'timeframe': timeframe} - - # get value for end and check it; - _query_params.update({'func': 'max', 'start': c_end, 'end': c_end}) - value_end = self._query_item(**_query_params)[0][1] - - if self.prepare_debug: - self.logger.debug(f"_consumption_calc {value_end=}") - - if value_end is None: # if None (Error) return - return - elif value_end == 0: # wenn die Query "None" ergab, was wiederum bedeutet, dass zum Abfragezeitpunkt keine Daten vorhanden sind, ist der value hier gleich 0 → damit der Verbrauch für die Abfrage auch Null - return 0 - - # get value for start and check it; - _query_params.update({'func': 'min', 'start': c_end, 'end': c_end}) - value_start = self._query_item(**_query_params)[0][1] - if self.prepare_debug: - self.logger.debug(f"_consumption_calc {value_start=}") - - if value_start is None: # if None (Error) return - return - - if value_start == 0: # wenn der Wert zum Startzeitpunkt 0 ist, gab es dort keinen Eintrag (also keinen Verbrauch), dann frage den nächsten Eintrag in der DB ab. - self.logger.info(f"No DB Entry found for requested start date. Looking for next DB entry.") - _query_params.update({'func': 'next', 'start': c_start, 'end': c_end}) - value_start = self._query_item(**_query_params)[0][1] - if self.prepare_debug: - self.logger.debug(f"_consumption_calc: next available value is {value_start=}") - - # calculate result - if value_start is not None: - return round(value_end - value_start, 1) - - # handle all on_change functions of format 'verbrauch_timeframe' like 'verbrauch_heute' - if db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: - if self.execute_debug: - self.logger.debug(f"on_change function with 'verbrauch' detected; will be calculated by next change of database item") - return - - _var = db_addon_fct.split('_') - - # handle all functions 'verbrauch' in format 'verbrauch_timeframe_timedelta' like 'verbrauch_heute_minus2' - if len(_var) == 3 and _var[1] in ['heute', 'woche', 'monat', 'jahr'] and _var[2].startswith('minus'): - timeframe = convert_timeframe(_var[1]) - timedelta = to_int(_var[2][-1]) - if timedelta is None or timeframe is None: - return - - if self.execute_debug: - self.logger.debug(f"_handle_verbrauch: '{db_addon_fct}' function detected. {timeframe=}, {timedelta=}") - - return consumption_calc(c_start=timedelta + 1, c_end=timedelta) - - # handle all functions of format 'verbrauch_function_window_timeframe_timedelta' like 'verbrauch_rolling_12m_woche_minus1' - elif len(_var) == 5 and _var[1] == 'rolling' and _var[4].startswith('minus'): - func = _var[1] - window = _var[2] # 12m - window_inc = to_int(window[:-1]) # 12 - window_dur = convert_timeframe(window[-1]) # day, week, month, year - timeframe = convert_timeframe(_var[3]) # day, week, month, year - timedelta = to_int(_var[4][-1]) # 1 - endtime = timedelta - - if window_inc is None or window_dur is None or timeframe is None or timedelta is None: - return - - if self.execute_debug: - self.logger.debug(f"_handle_verbrauch: '{func}' function detected. {window=}, {timeframe=}, {timedelta=}") - - if window_dur in ['day', 'week', 'month', 'year']: - starttime = convert_duration(timeframe, window_dur) * window_inc - return consumption_calc(c_start=starttime, c_end=endtime) - - # handle all functions of format 'verbrauch_timeframe_timedelta' like 'verbrauch_jahreszeitraum_minus1' - elif len(_var) == 3 and _var[1] == 'jahreszeitraum' and _var[2].startswith('minus'): - timeframe = convert_timeframe(_var[1]) # day, week, month, year - timedelta = to_int(_var[2][-1]) # 1 oder 2 oder 3 - if timedelta is None or timeframe is None: - return - - if self.execute_debug: - self.logger.debug(f"_handle_verbrauch: '{db_addon_fct}' function detected. {timeframe=}, {timedelta=}") - - today = datetime.date.today() - year = today.year - timedelta - start_date = datetime.date(year, 1, 1) - relativedelta(days=1) # Start ist Tag vor dem 1.1., damit Abfrage den Maximalwert von 31.12. 00:00:00 bis 1.1. 00:00:00 ergibt - end_date = today - relativedelta(years=timedelta) - start = (today - start_date).days - end = (today - end_date).days - - return consumption_calc(c_start=start, c_end=end) - - # handle all functions of format 'serie_verbrauch_timeframe_countgroup' like 'serie_verbrauch_tag_30d' - elif db_addon_fct.startswith('serie_') and len(_var) == 4: - self.logger.debug(f"_handle_verbrauch serie reached") - func = 'diff_max' - timeframe = convert_timeframe(_var[2]) - start = to_int(_var[3][:-1]) - group = convert_timeframe(_var[3][len(_var[3]) - 1]) - group2 = None - if timeframe is None or start is None or group is None: - self.logger.warning(f"For calculating '{db_addon_fct}' not all mandatory parameters given. {timeframe=}, {start=}, {group=}") - return - - query_params = {'func': func, 'item': database_item, 'timeframe': timeframe, 'start': start, 'end': 0, 'group': group, 'group2': group2, 'ignore_value': ignore_value} - - if self.execute_debug: - self.logger.debug(f"_handle_verbrauch: 'serie_verbrauch_timeframe_countgroup' function detected. {query_params=}") - - return self._query_item(**query_params) - - else: - self.logger.info(f"_handle_verbrauch: No adequate function for {db_addon_fct=} found.") - return - - def _handle_tagesmitteltemperatur(self, database_item: Item, db_addon_fct: str, ignore_value=None, params: dict = None) -> list: - """ - Query database for tagesmitteltemperatur - - :param database_item: item object or item_id for which the query should be done - :param db_addon_fct - :param ignore_value - :param params: - :return: tagesmitteltemperatur - """ - - # handle all on_change functions - if db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: - if self.execute_debug: - self.logger.debug(f"on_change function with 'tagesmitteltemperatur' detected; will be calculated by next change of database item") - return [] - - _var = db_addon_fct.split('_') - group = None - group2 = None - - # handle tagesmitteltemperatur - if db_addon_fct == 'tagesmitteltemperatur': - if not params: - return [] - - func = 'max' - timeframe = convert_timeframe(params.get('timeframe')) - log_text = 'tagesmitteltemperatur' - count = to_int(params.get('count')) - if timeframe is None or not count: - return [] - - start, end = count_to_start(count) - - # handle 'tagesmittelwert_timeframe_timedelta' like 'tagesmittelwert_heute_minus1' - elif len(_var) == 3 and _var[2].startswith('minus'): - func = 'max' - timeframe = convert_timeframe(_var[1]) - start = to_int(_var[2][-1]) - end = start - log_text = 'tagesmittelwert_timeframe_timedelta' - if timeframe is None or start is None: - return [] - - # handle 'serie_tagesmittelwert_countgroup' like 'serie_tagesmittelwert_0d' - elif db_addon_fct.startswith('serie_') and len(_var) == 3: - # 'serie_tagesmittelwert_0d': {'func': 'max', 'timeframe': 'year', 'start': 0, 'end': 0, 'group': 'day'}, - func = 'max' - timeframe = 'year' - log_text = 'serie_tagesmittelwert_countgroup' - start = to_int(_var[2][:-1]) - end = 0 - group = convert_timeframe(_var[2][len(_var[2]) - 1]) - if group is None or start is None: - return [] - - # handle 'serie_tagesmittelwert_group2_count_group' like 'serie_tagesmittelwert_stunde_0d' - elif db_addon_fct.startswith('serie_') and len(_var) == 4: - # 'serie_tagesmittelwert_stunde_0d': {'func': 'avg1', 'timeframe': 'day', 'start': 0, 'end': 0, 'group': 'hour', 'group2': 'day'}, - # 'serie_tagesmittelwert_stunde_30d': {'func': 'avg1', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'hour', 'group2': 'day'}, - func = 'avg1' - timeframe = 'day' - log_text = 'serie_tagesmittelwert_group2_countgroup' - start = to_int(_var[3][:-1]) - end = 0 - group = 'hour' - group2 = convert_timeframe(_var[3][len(_var[3]) - 1]) - if group2 is None or start is None: - return [] - - # handle 'serie_tagesmittelwert_group2_start_endgroup' like 'serie_tagesmittelwert_stunde_30_0d' - elif db_addon_fct.startswith('serie_') and len(_var) == 5: - timeframe = 'day' - method = 'raw' - start = to_int(_var[3]) - end = to_int(_var[4][:-1]) - if start is None or end is None: - return [] - - return self._prepare_temperature_list(database_item=database_item, timeframe=timeframe, start=start, end=end, method=method) - - # handle everything else - else: - self.logger.info(f"_handle_tagesmitteltemperatur: No adequate function for {db_addon_fct=} found.") - return [] - - query_params = {'item': database_item, 'ignore_value': ignore_value, 'func': func, 'timeframe': timeframe, 'start': start, 'end': end, 'group': group, 'group2': group2} - - if self.execute_debug: - self.logger.debug(f"_handle_tagesmitteltemperatur: db_addon_fct={log_text} function detected. {query_params=}") - - return self._query_item(**query_params) - - def _handle_kaeltesumme(self, database_item: Item, year: Union[int, str], month: Union[int, str] = None) -> Union[int, None]: - """ - Query database for kaeltesumme for given year or year/month - https://de.wikipedia.org/wiki/K%C3%A4ltesumme - - :param database_item: item object or item_id for which the query should be done - :param year: year the kaeltesumme should be calculated for - :param month: month the kaeltesumme should be calculated for - :return: kaeltesumme - """ - - self.logger.debug(f"_handle_kaeltesumme called with {database_item=}, {year=}, {month=}") - - # check validity of given year - if not valid_year(year): - self.logger.error(f"_handle_kaeltesumme: Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") - return - - # define year - if year == 'current': - if datetime.date.today() < datetime.date(int(datetime.date.today().year), 9, 21): - year = datetime.date.today().year - 1 - else: - year = datetime.date.today().year - - # define start_date and end_date - if month is None: - start_date = datetime.date(int(year), 9, 21) - end_date = datetime.date(int(year) + 1, 3, 22) - elif valid_month(month): - start_date = datetime.date(int(year), int(month), 1) - end_date = start_date + relativedelta(months=+1) - datetime.timedelta(days=1) - else: - self.logger.error(f"_handle_kaeltesumme: Month for item={database_item.path()} was {month}. This is not a valid month. Query cancelled.") - return - - # define start / end - today = datetime.date.today() - if start_date > today: - self.logger.error(f"_handle_kaeltesumme: Start time for query of item={database_item.path()} is in future. Query cancelled.") - return - - start = (today - start_date).days - end = (today - end_date).days if end_date < today else 0 - if start < end: - self.logger.error(f"_handle_kaeltesumme: End time for query of item={database_item.path()} is before start time. Query cancelled.") - return - - # get raw data as list - self.logger.debug("_handle_kaeltesumme: Try to get raw data") - raw_data = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method='raw') - if self.execute_debug: - self.logger.debug(f"_handle_kaeltesumme: raw_value_list={raw_data=}") - - # calculate value - if raw_data is None: - return - elif isinstance(raw_data, list): - # akkumulieren alle negativen Werte - ks = 0 - for entry in raw_data: - if entry[1] < 0: - ks -= entry[1] - return int(round(ks, 0)) - - def _handle_waermesumme(self, database_item: Item, year: Union[int, str], month: Union[int, str] = None, threshold: int = 0) -> Union[int, None]: - """ - Query database for waermesumme for given year or year/month - https://de.wikipedia.org/wiki/W%C3%A4rmesumme - - :param database_item: item object or item_id for which the query should be done - :param year: year the waermesumme should be calculated for; "current" for current year - :param month: month the waermesumme should be calculated for - :return: waermesumme - """ - - # start: links / älterer Termin end: rechts / jüngerer Termin - - # check validity of given year - if not valid_year(year): - self.logger.error(f"_handle_waermesumme: Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") - return - - # define year - if year == 'current': - year = datetime.date.today().year - - # define start_date, end_date - if month is None: - start_date = datetime.date(int(year), 1, 1) - end_date = datetime.date(int(year), 9, 21) - elif valid_month(month): - start_date = datetime.date(int(year), int(month), 1) - end_date = start_date + relativedelta(months=+1) - datetime.timedelta(days=1) - else: - self.logger.error(f"_handle_waermesumme: Month for item={database_item.path()} was {month}. This is not a valid month. Query cancelled.") - return - - # check start_date - today = datetime.date.today() - if start_date > today: - self.logger.info(f"_handle_waermesumme: Start time for query of item={database_item.path()} is in future. Query cancelled.") - return - - # define start / end - start = (today - start_date).days - end = (today - end_date).days if end_date < today else 0 - - # check end - if start < end: - self.logger.error(f"_handle_waermesumme: End time for query of item={database_item.path()} is before start time. Query cancelled.") - return - - # get raw data as list - raw_data = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method='raw') - if self.execute_debug: - self.logger.debug(f"_handle_waermesumme: raw_value_list={raw_data=}") - - # set threshold to min 0 - threshold = max(0, threshold) - - # calculate value - if raw_data is None: - return - elif isinstance(raw_data, list): - # akkumulieren alle Werte, größer/gleich Schwellenwert - ws = 0 - for entry in raw_data: - if entry[1] >= threshold: - ws += entry[1] - return int(round(ws, 0)) - - def _handle_gruenlandtemperatursumme(self, database_item: Item, year: Union[int, str]) -> Union[int, None]: - """ - Query database for gruenlandtemperatursumme for given year or year/month - https://de.wikipedia.org/wiki/Gr%C3%BCnlandtemperatursumme - - :param database_item: item object for which the query should be done - :param year: year the gruenlandtemperatursumme should be calculated for - :return: gruenlandtemperatursumme - """ - - if not valid_year(year): - self.logger.error(f"_handle_gruenlandtemperatursumme: Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") - return - - # define year - if year == 'current': - year = datetime.date.today().year - - # define start_date, end_date - start_date = datetime.date(int(year), 1, 1) - end_date = datetime.date(int(year), 9, 21) - - # check start_date - today = datetime.date.today() - if start_date > today: - self.logger.info(f"_handle_gruenlandtemperatursumme: Start time for query of item={database_item.path()} is in future. Query cancelled.") - return - - # define start / end - start = (today - start_date).days - end = (today - end_date).days if end_date < today else 0 - - # check end - if start < end: - self.logger.error(f"_handle_gruenlandtemperatursumme: End time for query of item={database_item.path()} is before start time. Query cancelled.") - return - - # get raw data as list - raw_data = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method='raw') - if self.execute_debug: - self.logger.debug(f"_handle_gruenlandtemperatursumme: raw_value_list={raw_data}") - - # calculate value - if raw_data is None: - return - elif isinstance(raw_data, list): - # akkumulieren alle positiven Tagesmitteltemperaturen, im Januar gewichtet mit 50%, im Februar mit 75% - gts = 0 - for entry in raw_data: - timestamp, value = entry - if value > 0: - dt = datetime.datetime.fromtimestamp(timestamp / 1000) - if dt.month == 1: - value = value * 0.5 - elif dt.month == 2: - value = value * 0.75 - gts += value - return int(round(gts, 0)) - - def _handle_wachstumsgradtage(self, database_item: Item, year: Union[int, str], method: int = 0, threshold: int = 10): - """ - Calculate "wachstumsgradtage" for given year with temperature thershold - https://de.wikipedia.org/wiki/Wachstumsgradtag - - :param database_item: item object or item_id for which the query should be done - :param year: year the wachstumsgradtage should be calculated for - :param method: calculation method to be used - :param threshold: temperature in °C as threshold for evaluation - :return: wachstumsgradtage - """ - - if not valid_year(year): - self.logger.error(f"_handle_wachstumsgradtage: Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") - return - - # define year - if year == 'current': - year = datetime.date.today().year - - # define start_date, end_date - start_date = datetime.date(int(year), 1, 1) - end_date = datetime.date(int(year), 9, 21) - - # check start_date - today = datetime.date.today() - if start_date > today: - self.logger.info(f"_handle_wachstumsgradtage: Start time for query of item={database_item.path()} is in future. Query cancelled.") - return - - # define start / end - start = (today - start_date).days - end = (today - end_date).days if end_date < today else 0 - - # check end - if start < end: - self.logger.error(f"_handle_wachstumsgradtage: End time for query of item={database_item.path()} is before start time. Query cancelled.") - return - - # get raw data as list - raw_data = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method='minmax') - if self.execute_debug: - self.logger.debug(f"_handle_wachstumsgradtage: raw_value_list={raw_data}") - - # calculate value - if raw_data is None: - return - - elif isinstance(raw_data, list): - # Die Berechnung des einfachen Durchschnitts // akkumuliere positive Differenz aus Mittelwert aus Tagesminimaltemperatur und Tagesmaximaltemperatur limitiert auf 30°C und Schwellenwert - wgte = 0 - wgte_list = [] - if method == 0 or method == 10: - self.logger.info(f"Caluclate 'Wachstumsgradtag' according to 'Berechnung des einfachen Durchschnitts'.") - for entry in raw_data: - timestamp, min_val, max_val = entry - wgt = (((min_val + min(30, max_val)) / 2) - threshold) - if wgt > 0: - wgte += wgt - wgte_list.append([timestamp, int(round(wgte, 0))]) - if method == 0: - return int(round(wgte, 0)) - else: - return wgte_list - - # Die modifizierte Berechnung des einfachen Durchschnitts. // akkumuliere positive Differenz aus Mittelwert aus Tagesminimaltemperatur mit mind Schwellentemperatur und Tagesmaximaltemperatur limitiert auf 30°C und Schwellenwert - elif method == 1 or method == 11: - self.logger.info(f"Caluclate 'Wachstumsgradtag' according to 'Modifizierte Berechnung des einfachen Durchschnitts'.") - for entry in raw_data: - timestamp, min_val, max_val = entry - wgt = (((max(threshold, min_val) + min(30.0, max_val)) / 2) - threshold) - if wgt > 0: - wgte += wgt - wgte_list.append([timestamp, int(round(wgte, 0))]) - if method == 1: - return int(round(wgte, 0)) - else: - return wgte_list - - # Zähle Tage, bei denen die Tagesmitteltemperatur oberhalb des Schwellenwertes lag - elif method == 2 or method == 12: - self.logger.info(f"Caluclate 'Wachstumsgradtag' according to 'Anzahl der Tage, bei denen die Tagesmitteltemperatur oberhalb des Schwellenwertes lag'.") - for entry in raw_data: - timestamp, min_val, max_val = entry - wgt = (((min_val + min(30, max_val)) / 2) - threshold) - if wgt > 0: - wgte += 1 - wgte_list.append([timestamp, wgte]) - if method == 0: - return wgte - else: - return wgte_list - - else: - self.logger.info(f"Method for 'Wachstumsgradtag' calculation not defined.'") - - def _handle_temperaturserie(self, database_item: Item, year: Union[int, str], method: str = 'raw'): - """ - provide list of lists having timestamp and temperature(s) per day - - :param database_item: item object or item_id for which the query should be done - :param year: year the wachstumsgradtage should be calculated for - :param method: calculation method to be used - :return: list of temperatures - """ - - if not valid_year(year): - self.logger.error(f"_handle_temepraturserie: Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") - return - - # define year - if year == 'current': - year = datetime.date.today().year - - # define start_date, end_date - start_date = datetime.date(int(year), 1, 1) - end_date = datetime.date(int(year), 12, 31) - - # check start_date - today = datetime.date.today() - if start_date > today: - self.logger.info(f"_handle_temepraturserie: Start time for query of item={database_item.path()} is in future. Query cancelled.") - return - - # define start / end - start = (today - start_date).days - end = (today - end_date).days if end_date < today else 0 - - # check end - if start < end: - self.logger.error(f"_handle_temepraturserie: End time for query of item={database_item.path()} is before start time. Query cancelled.") - return - - # check method - if method not in ['hour', 'raw', 'minmax']: - self.logger.error(f"_handle_temepraturserie: Calculation method {method!r} unknown. Need to be 'hour', 'raw' or 'minmax'. Query cancelled.") - return - - # get raw data as list - temp_list = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method=method) - if self.execute_debug: - self.logger.debug(f"_handle_temepraturserie: {temp_list=}") - - return temp_list - - def _prepare_temperature_list(self, database_item: Item, timeframe: str, start: int, end: int = 0, ignore_value=None, method: str = 'hour') -> Union[list, None]: - """ - returns list of lists having timestamp and temperature(s) per day - - :param database_item: item object or item_id for which the query should be done - :param timeframe: timeframe for query - :param start: increments for timeframe from now to start - :param end: increments for timeframe from now to end - :param ignore_value: value to be ignored during query - :param method: Calculation method - :return: list of temperatures - """ - - def _create_temp_dict() -> dict: - """create dict based on database query result like {'date1': {'hour1': [temp values], 'hour2': [temp values], ...}, 'date2': {'hour1': [temp values], 'hour2': [temp values], ...}, ...}""" - - _temp_dict = {} - for _entry in raw_data: - dt = datetime.datetime.utcfromtimestamp(_entry[0] / 1000) - date = dt.strftime('%Y-%m-%d') - hour = dt.strftime('%H') - if date not in _temp_dict: - _temp_dict[date] = {} - if hour not in _temp_dict[date]: - _temp_dict[date][hour] = [] - _temp_dict[date][hour].append(_entry[1]) - return _temp_dict - - def _calculate_hourly_average(): - """ calculate hourly average based on list of temperatures and update temp_dict""" - - for _date in temp_dict: - for hour in temp_dict[_date]: - hour_raw_value_list = temp_dict[_date][hour] - # hour_value = round(sum(hour_raw_value_list) / len(hour_raw_value_list), 1) # Durchschnittsbildung über alle Werte der Liste - hour_value = hour_raw_value_list[0] # Nehme den ersten Wert der Liste als Stundenwert (kommt am nächsten an die Definition, den Wert exakt zur vollen Stunden zu nehmen) - temp_dict[_date][hour] = [hour_value] - - def _create_list_timestamp_avgtemp() -> list: - """Create list of list with [[timestamp1, value1], [timestamp2, value2], ...] based on temp_dict""" - - _temp_list = [] - for _date in temp_dict: - - # wenn mehr als 20 Stundenwerte vorliegen, berechne den Tagesdurchschnitt über alle Werte - if len(temp_dict[_date]) >= 20: - _values = sum(list(temp_dict[_date].values()), []) - _values_avg = round(sum(_values) / len(_values), 1) - - # wenn für 00, 06, 12 und 18 Uhr Werte vorliegen, berechne den Tagesdurchschnitt über diese Werte - elif '00' in temp_dict[_date] and '06' in temp_dict[_date] and '12' in temp_dict[_date] and '18' in temp_dict[_date]: - _values_avg = round((temp_dict[_date]['00'][0] + temp_dict[_date]['06'][0] + temp_dict[_date]['12'][0] + temp_dict[_date]['18'][0]) / 4, 1) - - # sonst berechne den Tagesdurchschnitt über alle Werte - else: - _values = sum(list(temp_dict[_date].values()), []) - _values_avg = round(sum(_values) / len(_values), 1) - - _timestamp = datetime_to_timestamp(datetime.datetime.strptime(_date, '%Y-%m-%d')) - _temp_list.append([_timestamp, _values_avg]) - return _temp_list - - def _create_list_timestamp_minmaxtemp() -> list: - """Create list of list with [[timestamp1, min value1, max_value1], [timestamp2, min value2, max_value2], ...] based on temp_dict""" - - _temp_list = [] - for _date in temp_dict: - _timestamp = datetime_to_timestamp(datetime.datetime.strptime(_date, '%Y-%m-%d')) - _day_values = sum(list(temp_dict[_date].values()), []) - _temp_list.append([_timestamp, min(_day_values), max(_day_values)]) - return _temp_list - - # temp_list = [[timestamp1, avg-value1], [timestamp2, avg-value2], [timestamp3, avg-value3], ...] Tagesmitteltemperatur pro Stunde wird in der Datenbank per avg ermittelt - if method == 'hour': - raw_data = self._query_item(func='avg', item=database_item, timeframe=timeframe, start=start, end=end, group='hour', ignore_value=ignore_value) - self.logger.debug(f"{raw_data=}") - - if raw_data and isinstance(raw_data, list): - if raw_data == [[None, None]]: - return - - # create nested dict with temps - temp_dict = _create_temp_dict() - - # create list of list like database query response - temp_list = _create_list_timestamp_avgtemp() - self.logger.debug(f"{temp_list=}") - return temp_list - - # temp_list = [[timestamp1, avg-value1], [timestamp2, avg-value2], [timestamp3, avg-value3], ...] Tagesmitteltemperatur pro Stunde wird hier im Plugin ermittelt ermittelt - elif method == 'raw': - raw_data = self._query_item(func='raw', item=database_item, timeframe=timeframe, start=start, end=end, ignore_value=ignore_value) - self.logger.debug(f"{raw_data=}") - - if raw_data and isinstance(raw_data, list): - if raw_data == [[None, None]]: - return - - # create nested dict with temps - temp_dict = _create_temp_dict() - self.logger.debug(f"raw: {temp_dict=}") - - # calculate 'tagesdurchschnitt' and create list of list like database query response - _calculate_hourly_average() - self.logger.debug(f"raw: {temp_dict=}") - - # create list of list like database query response - temp_list = _create_list_timestamp_avgtemp() - self.logger.debug(f"{temp_list=}") - return temp_list - - # temp_list = [[timestamp1, min-value1, max-value1], [timestamp2, min-value2, max-value2], [timestamp3, min-value3, max-value3], ...] - elif method == 'minmax': - raw_data = self._query_item(func='raw', item=database_item, timeframe=timeframe, start=start, end=end, ignore_value=ignore_value) - self.logger.debug(f"{raw_data=}") - - if raw_data and isinstance(raw_data, list): - if raw_data == [[None, None]]: - return - - # create nested dict with temps - temp_dict = _create_temp_dict() - self.logger.debug(f"raw: {temp_dict=}") - - # create list of list like database query response - temp_list = _create_list_timestamp_minmaxtemp() - self.logger.debug(f"{temp_list=}") - return temp_list - - def _create_due_items(self) -> list: - """ - Create set of items which are due and resets cache dicts - - :return: set of items, which need to be processed - - """ - - # täglich zu berechnende Items zur Action Liste hinzufügen - _todo_items = set() - _todo_items.update(set(self._daily_items())) - self.current_values[DAY] = {} - self.previous_values[DAY] = {} - - # wenn Wochentag == Montag, werden auch die wöchentlichen Items berechnet - if self.shtime.now().hour == 0 and self.shtime.now().minute == 0 and self.shtime.weekday(self.shtime.today()) == 1: - _todo_items.update(set(self._weekly_items())) - self.current_values[WEEK] = {} - self.previous_values[WEEK] = {} - - # wenn der erste Tage eines Monates ist, werden auch die monatlichen Items berechnet - if self.shtime.now().hour == 0 and self.shtime.now().minute == 0 and self.shtime.now().day == 1: - _todo_items.update(set(self._monthly_items())) - self.current_values[MONTH] = {} - self.previous_values[MONTH] = {} - - # wenn der erste Tage des ersten Monates eines Jahres ist, werden auch die jährlichen Items berechnet - if self.shtime.now().hour == 0 and self.shtime.now().minute == 0 and self.shtime.now().day == 1 and self.shtime.now().month == 1: - _todo_items.update(set(self._yearly_items())) - self.current_values[YEAR] = {} - self.previous_values[YEAR] = {} - - return list(_todo_items) - - def _check_db_existence(self) -> bool: - """ - Check existence of database plugin with given config name - - :return: Status of db existence - """ - - try: - _db_plugin = self.plugins.return_plugin(self.db_configname) - except Exception as e: - self.logger.error(f"Database plugin not loaded, Error was {e}. No need for DatabaseAddOn Plugin.") - return False - else: - if not _db_plugin: - self.logger.error(f"Database plugin not loaded or given ConfigName {self.db_configname} not correct. No need for DatabaseAddOn Plugin.") - return False - else: - self.logger.debug(f"Corresponding plugin 'database' with given config name '{self.db_configname}' found.") - self._db_plugin = _db_plugin - return self._get_db_parameter() - - def _get_db_parameter(self) -> bool: - """ - Get driver of database and connection parameter - - :return: Status of db connection parameters - """ - - try: - self.db_driver = self._db_plugin.get_parameter_value('driver') - except Exception as e: - self.logger.error(f"Error {e} occurred during getting database plugin parameter 'driver'. DatabaseAddOn Plugin not loaded.") - return False - else: - if self.db_driver.lower() == 'pymysql': - self.logger.debug(f"Database is of type 'mysql' found.") - if self.db_driver.lower() == 'sqlite3': - self.logger.debug(f"Database is of type 'sqlite' found.") - - # get database plugin parameters - try: - db_instance = self._db_plugin.get_instance_name() - if db_instance != "": - self.db_instance = db_instance - self.item_attribute_search_str = f"{self.item_attribute_search_str}@{self.db_instance}" - self.connection_data = self._db_plugin.get_parameter_value('connect') # pymsql ['host:localhost', 'user:smarthome', 'passwd:smarthome', 'db:smarthome', 'port:3306'] - self.logger.debug(f"Database Plugin available with instance={self.db_instance} and connection={self.connection_data}") - except Exception as e: - self.logger.error(f"Error {e} occurred during getting database plugin parameters. DatabaseAddOn Plugin not loaded.") - return False - else: - return True - - def _initialize_db(self) -> bool: - """ - Initializes database connection - - :return: Status of initialization - """ - - try: - if not self._db.connected(): - # limit connection requests to 20 seconds. - current_time = time.time() - time_delta_last_connect = current_time - self.last_connect_time - # self.logger.debug(f"DEBUG: delta {time_delta_last_connect}") - if time_delta_last_connect > 20: - self.last_connect_time = time.time() - self._db.connect() - else: - self.logger.error(f"_initialize_db: Database reconnect suppressed: Delta time: {time_delta_last_connect}") - return False - except Exception as e: - self.logger.critical(f"_initialize_db: Database: Initialization failed: {e}") - return False - else: - return True - - def _check_db_connection_setting(self) -> None: - """ - Check Setting of DB connection for stable use. - """ - try: - connect_timeout = int(self._get_db_connect_timeout()[1]) - if connect_timeout < self.default_connect_timeout: - self.logger.warning(f"DB variable 'connect_timeout' should be adjusted for proper working to {self.default_connect_timeout}. Current setting is {connect_timeout}. You need to insert adequate entries into /etc/mysql/my.cnf within section [mysqld].") - except Exception: - pass - - try: - net_read_timeout = int(self._get_db_net_read_timeout()[1]) - if net_read_timeout < self.default_net_read_timeout: - self.logger.warning(f"DB variable 'net_read_timeout' should be adjusted for proper working to {self.default_net_read_timeout}. Current setting is {net_read_timeout}. You need to insert adequate entries into /etc/mysql/my.cnf within section [mysqld].") - except Exception: - pass - - def _get_oldest_log(self, item: Item) -> int: - """ - Get timestamp of the oldest entry of item from cache dict or get value from db and put it to cache dict - - :param item: Item, for which query should be done - :return: timestamp of the oldest log - """ - - _oldest_log = self.item_cache.get(item, {}).get('oldest_log', None) - - if _oldest_log is None: - item_id = self._get_itemid(item) - _oldest_log = self._read_log_oldest(item_id) - if item not in self.item_cache: - self.item_cache[item] = {} - self.item_cache[item]['oldest_log'] = _oldest_log - - if self.prepare_debug: - self.logger.debug(f"_get_oldest_log for item {item.path()} = {_oldest_log}") - - return _oldest_log - - def _get_oldest_value(self, item: Item) -> Union[int, float, bool]: - """ - Get value of the oldest log of item from cache dict or get value from db and put it to cache dict - - :param item: Item, for which query should be done - :return: oldest value - """ - - _oldest_entry = self.item_cache.get(item, {}).get('_oldest_entry', None) - - if _oldest_entry is not None: - _oldest_value = _oldest_entry[0][4] - else: - item_id = self._get_itemid(item) - validity = False - i = 0 - _oldest_value = -999999999 - while validity is False: - oldest_entry = self._read_log_timestamp(item_id, self._get_oldest_log(item)) - i += 1 - if isinstance(oldest_entry, list) and isinstance(oldest_entry[0], tuple) and len(oldest_entry[0]) >= 4: - if item not in self.item_cache: - self.item_cache[item] = {} - self.item_cache[item]['oldest_entry'] = oldest_entry - _oldest_value = oldest_entry[0][4] - validity = True - elif i == 10: - validity = True - self.logger.error(f"oldest_value for item {item.path()} could not be read; value is set to -999999999") - - if self.prepare_debug: - self.logger.debug(f"_get_oldest_value for item {item.path()} = {_oldest_value}") - - return _oldest_value - - def _get_itemid(self, item: Item) -> int: - """ - Returns the ID of the given item from cache dict or request it from database - - :param item: Item to get the ID for - :return: id of the item within the database - """ - - # self.logger.debug(f"_get_itemid called with item={item.path()}") - _item_id = self.item_cache.get(item, {}).get('id', None) - - if _item_id is None: - row = self._read_item_table(item_path=str(item.path())) - if row and len(row) > 0: - _item_id = int(row[0]) - if item not in self.item_cache: - self.item_cache[item] = {} - self.item_cache[item]['id'] = _item_id - - return _item_id - - def _get_itemid_for_query(self, item: Union[Item, str, int]) -> Union[int, None]: - """ - Get DB item id for query - - :param item: item, the query should be done for - - """ - - if isinstance(item, Item): - item_id = self._get_itemid(item) - elif isinstance(item, str) and item.isdigit(): - item_id = int(item) - elif isinstance(item, int): - item_id = item - else: - item_id = None - return item_id - - def _query_item(self, func: str, item: Item, timeframe: str, start: int = None, end: int = 0, group: str = None, group2: str = None, ignore_value=None) -> list: - """ - Do diverse checks of input, and prepare query of log by getting item_id, start / end in timestamp etc. - - :param func: function to be used at query - :param item: item object or item_id for which the query should be done - :param timeframe: time increment für definition of start, end (day, week, month, year) - :param start: start of timeframe (oldest) for query given in x time increments (default = None, meaning complete database) - :param end: end of timeframe (newest) for query given in x time increments (default = 0, meaning end of today, end of last week, end of last month, end of last year) - :param group: first grouping parameter (default = None, possible values: day, week, month, year) - :param group2: second grouping parameter (default = None, possible values: day, week, month, year) - :param ignore_value: value of val_num, which will be ignored during query - - :return: query response / list for value pairs [[None, None]] for errors, [[0,0]] for - """ - - def _handle_query_result(query_result) -> list: - """ - Handle query result containing list - """ - - # if query delivers None, abort - if query_result is None: - # if query delivers None, abort - self.logger.error(f"Error occurred during _query_item. Aborting...") - _result = [[None, None]] - elif len(query_result) == 0: - _result = [[0, 0]] - self.logger.info(f" No values for item in requested timeframe in database found.") - else: - _result = [] - for element in query_result: - timestamp = element[0] - value = element[1] - if timestamp and value is not None: - _result.append([timestamp, round(value, 1)]) - if not _result: - _result = [[None, None]] - - return _result - - if self.prepare_debug: - self.logger.debug(f"_query_item called with {func=}, item={item.path()}, {timeframe=}, {start=}, {end=}, {group=}, {group2=}, {ignore_value=}") - - # set default result - result = [[None, None]] - - # check correctness of timeframe - if timeframe not in ALLOWED_QUERY_TIMEFRAMES: - self.logger.error(f"_query_item: Requested {timeframe=} for item={item.path()} not defined; Need to be 'year' or 'month' or 'week' or 'day' or 'hour''. Query cancelled.") - return result - - # check start / end for being int - if isinstance(start, str) and start.isdigit(): - start = int(start) - if isinstance(end, str) and end.isdigit(): - end = int(end) - if not isinstance(start, int) and not isinstance(end, int): - return result - - # check correctness of start / end - if start < end: - self.logger.warning(f"_query_item: Requested {start=} for item={item.path()} is not valid since {start=} < {end=}. Query cancelled.") - return result - - # define item_id - item_id = self._get_itemid(item) - if not item_id: - self.logger.error(f"_query_item: ItemId for item={item.path()} not found. Query cancelled.") - return result - - # define start and end of query as timestamp in microseconds - ts_start, ts_end = get_start_end_as_timestamp(timeframe, start, end) - oldest_log = int(self._get_oldest_log(item)) - - if start is None: - ts_start = oldest_log - - if self.prepare_debug: - self.logger.debug(f"_query_item: Requested {timeframe=} with {start=} and {end=} resulted in start being timestamp={ts_start} / {timestamp_to_timestring(ts_start)} and end being timestamp={ts_end} / {timestamp_to_timestring(ts_end)}") - - # check if values for end time and start time are in database - if ts_end < oldest_log: # (Abfrage abbrechen, wenn Endzeitpunkt in UNIX-timestamp der Abfrage kleiner (und damit jünger) ist, als der UNIX-timestamp des ältesten Eintrages) - self.logger.info(f"_query_item: Requested end time timestamp={ts_end} / {timestamp_to_timestring(ts_end)} of query for Item='{item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Query cancelled.") - return result - - if ts_start < oldest_log: - if not self.use_oldest_entry: - self.logger.info(f"_query_item: Requested start time timestamp={ts_start} / {timestamp_to_timestring(ts_start)} of query for Item='{item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Query cancelled.") - return result - else: - self.logger.info(f"_query_item: Requested start time timestamp={ts_start} / {timestamp_to_timestring(ts_start)} of query for Item='{item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Oldest available entry will be used.") - ts_start = oldest_log - - query_params = {'func': func, 'item_id': item_id, 'ts_start': ts_start, 'ts_end': ts_end, 'group': group, 'group2': group2, 'ignore_value': ignore_value} - result = _handle_query_result(self._query_log_timestamp(**query_params)) - - if self.prepare_debug: - self.logger.debug(f"_query_item: value for item={item.path()} with {timeframe=}, {func=}: {result}") - - return result - - def _init_cache_dicts(self) -> None: - """ - init all cache dicts - """ - - self.logger.info(f"All cache_dicts will be initiated.") - - self.item_cache = {} - - self.current_values = { - DAY: {}, - WEEK: {}, - MONTH: {}, - YEAR: {} - } - - self.previous_values = { - DAY: {}, - WEEK: {}, - MONTH: {}, - YEAR: {} - } - - def _clear_queue(self) -> None: - """ - Clear working queue - """ - - self.logger.info(f"Working queue will be cleared. Calculation run will end.") - self.item_queue.queue.clear() - - def _work_item_queue_thread_startup(self): - """ - Start a thread to work item queue - """ - - try: - _name = 'plugins.' + self.get_fullname() + '.work_item_queue' - self.work_item_queue_thread = threading.Thread(target=self.work_item_queue, name=_name) - self.work_item_queue_thread.daemon = False - self.work_item_queue_thread.start() - self.logger.debug("Thread for 'work_item_queue_thread' has been started") - except threading.ThreadError: - self.logger.error("Unable to launch thread for 'work_item_queue_thread'.") - self.work_item_queue_thread = None - - def _work_item_queue_thread_shutdown(self): - """ - Shut down the thread to work item queue - """ - - if self.work_item_queue_thread: - self.work_item_queue_thread.join() - if self.work_item_queue_thread.is_alive(): - self.logger.error("Unable to shut down 'work_item_queue_thread' thread") - else: - self.logger.info("Thread 'work_item_queue_thread' has been terminated.") - self.work_item_queue_thread = None - - ############################## - # Database Query Preparation - ############################## - - def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: int, group: str = None, group2: str = None, ignore_value=None) -> Union[list, None]: - """ - Assemble a mysql query str and param dict based on given parameters, get query response and return it - - :param func: function to be used at query - :param item_id: database item_id for which the query should be done - :param ts_start: start for query given in timestamp in microseconds - :param ts_end: end for query given in timestamp in microseconds - :param group: first grouping parameter (default = None, possible values: day, week, month, year) - :param group2: second grouping parameter (default = None, possible values: day, week, month, year) - :param ignore_value: value of val_num, which will be ignored during query - - :return: query response - - """ - - # do debug log - if self.prepare_debug: - self.logger.debug(f"_query_log_timestamp: Called with {func=}, {item_id=}, {ts_start=}, {ts_end=}, {group=}, {group2=}, {ignore_value=}") - - # define query parts - _select = { - 'avg': 'time, ROUND(AVG(val_num * duration) / AVG(duration), 1) as value ', - 'avg1': 'time, ROUND(AVG(value), 1) as value FROM (SELECT time, ROUND(AVG(val_num), 1) as value ', - 'min': 'time, ROUND(MIN(val_num), 1) as value ', - 'max': 'time, ROUND(MAX(val_num), 1) as value ', - 'max1': 'time, ROUND(MAX(value), 1) as value FROM (SELECT time, ROUND(MAX(val_num), 1) as value ', - 'sum': 'time, ROUND(SUM(val_num), 1) as value ', - 'on': 'time, ROUND(SUM(val_bool * duration) / SUM(duration), 1) as value ', - 'integrate': 'time, ROUND(SUM(val_num * duration),1) as value ', - 'sum_max': 'time, ROUND(SUM(value), 1) as value FROM (SELECT time, ROUND(MAX(val_num), 1) as value ', - 'sum_avg': 'time, ROUND(SUM(value), 1) as value FROM (SELECT time, ROUND(AVG(val_num * duration) / AVG(duration), 1) as value ', - 'sum_min_neg': 'time, ROUND(SUM(value), 1) as value FROM (SELECT time, IF(min(val_num) < 0, ROUND(MIN(val_num), 1), 0) as value ', - 'diff_max': 'time, value1 - LAG(value1) OVER (ORDER BY time) AS value FROM (SELECT time, ROUND(MAX(val_num), 1) as value1 ', - 'next': 'time, val_num as value ', - 'raw': 'time, val_num as value ' - } - - _table_alias = { - 'avg': '', - 'avg1': ') AS table1 ', - 'min': '', - 'max': '', - 'max1': ') AS table1 ', - 'sum': '', - 'on': '', - 'integrate': '', - 'sum_max': ') AS table1 ', - 'sum_avg': ') AS table1 ', - 'sum_min_neg': ') AS table1 ', - 'diff_max': ') AS table1 ', - 'next': '', - 'raw': '', - } - - _order = "time DESC LIMIT 1 " if func == "next" else "time ASC " - - _where = "item_id = :item_id AND time < :ts_start" if func == "next" else "item_id = :item_id AND time BETWEEN :ts_start AND :ts_end " - - _db_table = 'log ' - - _group_by_sql = { - "year": "GROUP BY YEAR(FROM_UNIXTIME(time/1000)) ", - "month": "GROUP BY FROM_UNIXTIME((time/1000),'%Y%m') ", - "week": "GROUP BY YEARWEEK(FROM_UNIXTIME(time/1000), 5) ", - "day": "GROUP BY DATE(FROM_UNIXTIME(time/1000)) ", - "hour": "GROUP BY FROM_UNIXTIME((time/1000),'%Y%m%d%H') ", - None: '' - } - - _group_by_sqlite = { - "year": "GROUP BY strftime('%Y', date((time/1000),'unixepoch')) ", - "month": "GROUP BY strftime('%Y%m', date((time/1000),'unixepoch')) ", - "week": "GROUP BY strftime('%Y%W', date((time/1000),'unixepoch')) ", - "day": "GROUP BY date((time/1000),'unixepoch') ", - "hour": "GROUP BY strftime('%Y%m%d%H', datetime((time/1000),'unixepoch')) ", - None: '' - } - - # select query parts depending in db driver - if self.db_driver.lower() == 'pymysql': - _group_by = _group_by_sql - elif self.db_driver.lower() == 'sqlite3': - _group_by = _group_by_sqlite - else: - self.logger.error('DB Driver unknown') - return - - # check correctness of func - if func not in _select: - self.logger.error(f"_query_log_timestamp: Requested {func=} for {item_id=} not defined. Query cancelled.") - return - - # check correctness of group and group2 - if group not in _group_by: - self.logger.error(f"_query_log_timestamp: Requested {group=} for item={item_id=} not defined. Query cancelled.") - return - if group2 not in _group_by: - self.logger.error(f"_query_log_timestamp: Requested {group2=} for item={item_id=} not defined. Query cancelled.") - return - - # handle ignore values - if func in ['min', 'max', 'max1', 'sum_max', 'sum_avg', 'sum_min_neg', 'diff_max']: # extend _where statement for excluding boolean values == 0 for defined functions - _where = f'{_where}AND val_bool = 1 ' - if ignore_value: # if value to be ignored are defined, extend _where statement - _where = f'{_where}AND val_num != {ignore_value} ' - - # set params - params = {'item_id': item_id, 'ts_start': ts_start} - if func != "next": - params.update({'ts_end': ts_end}) - - # assemble query - query = f"SELECT {_select[func]}FROM {_db_table}WHERE {_where}{_group_by[group]}ORDER BY {_order}{_table_alias[func]}{_group_by[group2]}".strip() - - if self.db_driver.lower() == 'sqlite3': - query = query.replace('IF', 'IIF') - - # do debug log - if self.prepare_debug: - self.logger.debug(f"_query_log_timestamp: {query=}, {params=}") - - # request database and return result - return self._fetchall(query, params) - - def _read_log_all(self, item_id: int): - """ - Read the oldest log record for given item - - :param item_id: item_id to read the record for - :return: Log record for item_id - """ - - if self.prepare_debug: - self.logger.debug(f"_read_log_all: Called for {item_id=}") - - query = "SELECT * FROM log WHERE (item_id = :item_id) AND (time = None OR 1 = 1)" - params = {'item_id': item_id} - result = self._fetchall(query, params) - return result - - def _read_log_oldest(self, item_id: int, cur=None) -> int: - """ - Read the oldest log record for given database ID - - :param item_id: Database ID of item to read the record for - :type item_id: int - :param cur: A database cursor object if available (optional) - - :return: Log record for the database ID - """ - - params = {'item_id': item_id} - query = "SELECT min(time) FROM log WHERE item_id = :item_id;" - return self._fetchall(query, params, cur=cur)[0][0] - - def _read_log_timestamp(self, item_id: int, timestamp: int, cur=None) -> Union[list, None]: - """ - Read database log record for given database ID - - :param item_id: Database ID of item to read the record for - :type item_id: int - :param timestamp: timestamp for the given value - :type timestamp: int - :param cur: A database cursor object if available (optional) - - :return: Log record for the database ID at given timestamp - """ - - params = {'item_id': item_id, 'timestamp': timestamp} - query = "SELECT * FROM log WHERE item_id = :item_id AND time = :timestamp;" - return self._fetchall(query, params, cur=cur) - - def _read_item_table(self, item_id: int = None, item_path: str = None): - """ - Read item table - - :param item_id: unique ID for item within database - :param item_path: item_path for Item within the database - - :return: Data for the selected item - :rtype: tuple - """ - - columns_entries = ('id', 'name', 'time', 'val_str', 'val_num', 'val_bool', 'changed') - columns = ", ".join(columns_entries) - - if item_id is None and item_path is None: - return - - if item_id: - query = f"SELECT {columns} FROM item WHERE id = {item_id}" - else: - query = f"SELECT {columns} FROM item WHERE name = '{item_path}'" - - return self._fetchone(query) - - def _get_db_version(self) -> str: - """ - Query the database version and provide result - """ - - query = 'SELECT sqlite_version()' if self.db_driver.lower() == 'sqlite3' else 'SELECT VERSION()' - return self._fetchone(query)[0] - - def _get_db_connect_timeout(self) -> list: - """ - Query database timeout - """ - - query = "SHOW GLOBAL VARIABLES LIKE 'connect_timeout'" - return self._fetchone(query) - - def _get_db_net_read_timeout(self) -> list: - """ - Query database timeout net_read_timeout - """ - - query = "SHOW GLOBAL VARIABLES LIKE 'net_read_timeout'" - return self._fetchone(query) - - ############################## - # Database Queries - ############################## - - def _execute(self, query: str, params: dict = None, cur=None) -> list: - if params is None: - params = {} - - return self._query(self._db.execute, query, params, cur) - - def _fetchone(self, query: str, params: dict = None, cur=None) -> list: - if params is None: - params = {} - - return self._query(self._db.fetchone, query, params, cur) - - def _fetchall(self, query: str, params: dict = None, cur=None) -> list: - if params is None: - params = {} - - return self._query(self._db.fetchall, query, params, cur) - - def _query(self, fetch, query: str, params: dict = None, cur=None) -> Union[None, list]: - if params is None: - params = {} - - if self.sql_debug: - self.logger.debug(f"_query: Called with {query=}, {params=}, {cur=}") - - if not self._initialize_db(): - return None - - if cur is None: - if self._db.verify(5) == 0: - self.logger.error("_query: Connection to database not recovered.") - return None - # if not self._db.lock(300): - # self.logger.error("_query: Can't query due to fail to acquire lock.") - # return None - - query_readable = re.sub(r':([a-z_]+)', r'{\1}', query).format(**params) - - try: - tuples = fetch(query, params, cur=cur) - except Exception as e: - self.logger.error(f"_query: Error for query '{query_readable}': {e}") - else: - if self.sql_debug: - self.logger.debug(f"_query: Result of '{query_readable}': {tuples}") - return tuples - # finally: - # if cur is None: - # self._db.release() - - -############################## -# Helper functions -############################## - - -def params_to_dict(string: str) -> Union[dict, None]: - """ - Parse a string with named arguments and comma separation to dict; (e.g. string = 'year=2022, month=12') - """ - - try: - res_dict = dict((a.strip(), b.strip()) for a, b in (element.split('=') for element in string.split(', '))) - except Exception: - return None - else: - # convert to int and remove possible double quotes - for key in res_dict: - if isinstance(res_dict[key], str): - res_dict[key] = res_dict[key].replace('"', '') - res_dict[key] = res_dict[key].replace("'", "") - if res_dict[key].isdigit(): - res_dict[key] = int(float(res_dict[key])) - - # check correctness if known key values (func=str, item, timeframe=str, start=int, end=int, count=int, group=str, group2=str, year=int, month=int): - for key in res_dict: - if key in ('func', 'timeframe', 'group', 'group2') and not isinstance(res_dict[key], str): - return None - elif key in ('start', 'end', 'count') and not isinstance(res_dict[key], int): - return None - elif key in 'year': - if not valid_year(res_dict[key]): - return None - elif key in 'month': - if not valid_month(res_dict[key]): - return None - return res_dict - - -def valid_year(year: Union[int, str]) -> bool: - """ - Check if given year is digit and within allowed range - """ - - if ((isinstance(year, int) or (isinstance(year, str) and year.isdigit())) and ( - 1980 <= int(year) <= datetime.date.today().year)) or (isinstance(year, str) and year == 'current'): - return True - else: - return False - - -def valid_month(month: Union[int, str]) -> bool: - """ - Check if given month is digit and within allowed range - """ - - if (isinstance(month, int) or (isinstance(month, str) and month.isdigit())) and (1 <= int(month) <= 12): - return True - else: - return False - - -def timestamp_to_timestring(timestamp: int) -> str: - """ - Parse timestamp from db query to string representing date and time - """ - - return datetime.datetime.utcfromtimestamp(timestamp / 1000).strftime('%Y-%m-%d %H:%M:%S') - - -def convert_timeframe(timeframe: str) -> str: - """ - Convert timeframe - - """ - - convertion = { - 'tag': 'day', - 'heute': 'day', - 'woche': 'week', - 'monat': 'month', - 'jahr': 'year', - 'vorjahreszeitraum': 'day', - 'jahreszeitraum': 'day', - 'h': 'hour', - 'd': 'day', - 'w': 'week', - 'm': 'month', - 'y': 'year' - } - - return convertion.get(timeframe) - - -def convert_duration(timeframe: str, window_dur: str) -> int: - """ - Convert duration - - """ - - _d_in_y = 365 - _d_in_w = 7 - _m_in_y = 12 - _w_in_y = _d_in_y / _d_in_w - _w_in_m = _w_in_y / _m_in_y - _d_in_m = _d_in_y / _m_in_y - - conversion = { - 'day': {'day': 1, - 'week': _d_in_w, - 'month': _d_in_m, - 'year': _d_in_y, - }, - 'week': {'day': 1 / _d_in_w, - 'week': 1, - 'month': _w_in_m, - 'year': _w_in_y - }, - 'month': {'day': 1 / _d_in_m, - 'week': 1 / _w_in_m, - 'month': 1, - 'year': _m_in_y - }, - 'year': {'day': 1 / _d_in_y, - 'week': 1 / _w_in_y, - 'month': 1 / _m_in_y, - 'year': 1 - } - } - - return round(int(conversion[timeframe][window_dur]), 0) - - -def count_to_start(count: int = 0, end: int = 0): - """ - Converts given count and end ot start and end - """ - - return end + count, end - - -def get_start_end_as_timestamp(timeframe: str, start: int, end: int) -> tuple: - """ - Provides start and end as timestamp in microseconds from timeframe with start and end - - :param timeframe: timeframe as week, month, year - :param start: beginning timeframe in x timeframes from now - :param end: end of timeframe in x timeframes from now - - :return: start time in timestamp in microseconds, end time in timestamp in microseconds - - """ - - return datetime_to_timestamp(get_start(timeframe, start)) * 1000, datetime_to_timestamp(get_end(timeframe, end)) * 1000 - - -def get_start(timeframe: str, start: int) -> datetime: - """ - Provides start as datetime - - :param timeframe: timeframe as week, month, year - :param start: beginning timeframe in x timeframes from now - - """ - - if start is None: - start = 0 - - if timeframe == 'week': - _dt_start = week_beginning(start) - elif timeframe == 'month': - _dt_start = month_beginning(start) - elif timeframe == 'year': - _dt_start = year_beginning(start) - else: - _dt_start = day_beginning(start) - - return _dt_start - - -def get_end(timeframe: str, end: int) -> datetime: - """ - Provides end as datetime - - :param timeframe: timeframe as week, month, year - :param end: end of timeframe in x timeframes from now - - """ - - if timeframe == 'week': - _dt_end = week_end(end) - elif timeframe == 'month': - _dt_end = month_end(end) - elif timeframe == 'year': - _dt_end = year_end(end) - else: - _dt_end = day_end(end) - - return _dt_end - - -def year_beginning(delta: int = 0) -> datetime: - """ - provides datetime of beginning of year of today minus x years - """ - - _dt = datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) - return _dt.replace(month=1, day=1) - relativedelta(years=delta) - - -def year_end(delta: int = 0) -> datetime: - """ - provides datetime of end of year of today minus x years - """ - - return year_beginning(delta) + relativedelta(years=1) - - -def month_beginning(delta: int = 0) -> datetime: - """ - provides datetime of beginning of month minus x month - """ - - _dt = datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) - return _dt.replace(day=1) - relativedelta(months=delta) - - -def month_end(delta: int = 0) -> datetime: - """ - provides datetime of end of month minus x month - """ - - return month_beginning(delta) + relativedelta(months=1) - - -def week_beginning(delta: int = 0) -> datetime: - """ - provides datetime of beginning of week minus x weeks - """ - - _dt = datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) - return _dt - relativedelta(days=(datetime.date.today().weekday() + (delta * 7))) - - -def week_end(delta: int = 0) -> datetime: - """ - provides datetime of end of week minus x weeks - """ - - return week_beginning(delta) + relativedelta(days=6) - - -def day_beginning(delta: int = 0) -> datetime: - """ - provides datetime of beginning of today minus x days - """ - - return datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) - relativedelta(days=delta) - - -def day_end(delta: int = 0) -> datetime: - """ - provides datetime of end of today minus x days - """ - - return day_beginning(delta) + relativedelta(days=1) - - -def datetime_to_timestamp(dt: datetime) -> int: - """ - Provides timestamp from given datetime - """ - - return int(dt.replace(tzinfo=datetime.timezone.utc).timestamp()) - - -def to_int(arg) -> Union[int, None]: - try: - return int(arg) - except (ValueError, TypeError): - return None - - -ALLOWED_QUERY_TIMEFRAMES = ['year', 'month', 'week', 'day', 'hour'] -ALLOWED_MINMAX_FUNCS = ['min', 'max', 'avg'] -ALL_ONCHANGE_ATTRIBUTES = ['verbrauch_heute', 'verbrauch_woche', 'verbrauch_monat', 'verbrauch_jahr', 'minmax_heute_min', 'minmax_heute_max', 'minmax_woche_min', 'minmax_woche_max', 'minmax_monat_min', 'minmax_monat_max', 'minmax_jahr_min', 'minmax_jahr_max', 'tagesmitteltemperatur_heute'] -ALL_DAILY_ATTRIBUTES = ['verbrauch_heute_minus1', 'verbrauch_heute_minus2', 'verbrauch_heute_minus3', 'verbrauch_heute_minus4', 'verbrauch_heute_minus5', 'verbrauch_heute_minus6', 'verbrauch_heute_minus7', 'verbrauch_rolling_12m_heute_minus1', 'verbrauch_jahreszeitraum_minus1', 'verbrauch_jahreszeitraum_minus2', 'verbrauch_jahreszeitraum_minus3', 'zaehlerstand_heute_minus1', 'zaehlerstand_heute_minus2', 'zaehlerstand_heute_minus3', 'minmax_last_24h_min', 'minmax_last_24h_max', 'minmax_last_24h_avg', 'minmax_last_7d_min', 'minmax_last_7d_max', 'minmax_last_7d_avg', 'minmax_heute_minus1_min', 'minmax_heute_minus1_max', 'minmax_heute_minus1_avg', 'minmax_heute_minus2_min', 'minmax_heute_minus2_max', 'minmax_heute_minus2_avg', 'minmax_heute_minus3_min', 'minmax_heute_minus3_max', 'minmax_heute_minus3_avg', 'tagesmitteltemperatur_heute_minus1', 'tagesmitteltemperatur_heute_minus2', 'tagesmitteltemperatur_heute_minus3', 'serie_minmax_tag_min_30d', 'serie_minmax_tag_max_30d', 'serie_minmax_tag_avg_30d', 'serie_verbrauch_tag_30d', 'serie_zaehlerstand_tag_30d', 'serie_tagesmittelwert_stunde_0d', 'serie_tagesmittelwert_tag_stunde_30d', 'kaeltesumme', 'waermesumme', 'gruenlandtempsumme', 'tagesmitteltemperatur', 'wachstumsgradtage'] -ALL_WEEKLY_ATTRIBUTES = ['verbrauch_woche_minus1', 'verbrauch_woche_minus2', 'verbrauch_woche_minus3', 'verbrauch_woche_minus4', 'verbrauch_rolling_12m_woche_minus1', 'zaehlerstand_woche_minus1', 'zaehlerstand_woche_minus2', 'zaehlerstand_woche_minus3', 'minmax_woche_minus1_min', 'minmax_woche_minus1_max', 'minmax_woche_minus1_avg', 'minmax_woche_minus2_min', 'minmax_woche_minus2_max', 'minmax_woche_minus2_avg', 'serie_minmax_woche_min_30w', 'serie_minmax_woche_max_30w', 'serie_minmax_woche_avg_30w', 'serie_verbrauch_woche_30w', 'serie_zaehlerstand_woche_30w'] -ALL_MONTHLY_ATTRIBUTES = ['verbrauch_monat_minus1', 'verbrauch_monat_minus2', 'verbrauch_monat_minus3', 'verbrauch_monat_minus4', 'verbrauch_monat_minus12', 'verbrauch_rolling_12m_monat_minus1', 'zaehlerstand_monat_minus1', 'zaehlerstand_monat_minus2', 'zaehlerstand_monat_minus3', 'minmax_monat_minus1_min', 'minmax_monat_minus1_max', 'minmax_monat_minus1_avg', 'minmax_monat_minus2_min', 'minmax_monat_minus2_max', 'minmax_monat_minus2_avg', 'serie_minmax_monat_min_15m', 'serie_minmax_monat_max_15m', 'serie_minmax_monat_avg_15m', 'serie_verbrauch_monat_18m', 'serie_zaehlerstand_monat_18m', 'serie_waermesumme_monat_24m', 'serie_kaeltesumme_monat_24m'] -ALL_YEARLY_ATTRIBUTES = ['verbrauch_jahr_minus1', 'verbrauch_jahr_minus2', 'verbrauch_rolling_12m_jahr_minus1', 'zaehlerstand_jahr_minus1', 'zaehlerstand_jahr_minus2', 'zaehlerstand_jahr_minus3', 'minmax_jahr_minus1_min', 'minmax_jahr_minus1_max', 'minmax_jahr_minus1_avg'] -ALL_NEED_PARAMS_ATTRIBUTES = ['kaeltesumme', 'waermesumme', 'gruenlandtempsumme', 'tagesmitteltemperatur', 'wachstumsgradtage', 'db_request'] -ALL_VERBRAUCH_ATTRIBUTES = ['verbrauch_heute', 'verbrauch_woche', 'verbrauch_monat', 'verbrauch_jahr', 'verbrauch_heute_minus1', 'verbrauch_heute_minus2', 'verbrauch_heute_minus3', 'verbrauch_heute_minus4', 'verbrauch_heute_minus5', 'verbrauch_heute_minus6', 'verbrauch_heute_minus7', 'verbrauch_woche_minus1', 'verbrauch_woche_minus2', 'verbrauch_woche_minus3', 'verbrauch_woche_minus4', 'verbrauch_monat_minus1', 'verbrauch_monat_minus2', 'verbrauch_monat_minus3', 'verbrauch_monat_minus4', 'verbrauch_monat_minus12', 'verbrauch_jahr_minus1', 'verbrauch_jahr_minus2', 'verbrauch_rolling_12m_heute_minus1', 'verbrauch_rolling_12m_woche_minus1', 'verbrauch_rolling_12m_monat_minus1', 'verbrauch_rolling_12m_jahr_minus1', 'verbrauch_jahreszeitraum_minus1', 'verbrauch_jahreszeitraum_minus2', 'verbrauch_jahreszeitraum_minus3'] -ALL_ZAEHLERSTAND_ATTRIBUTES = ['zaehlerstand_heute_minus1', 'zaehlerstand_heute_minus2', 'zaehlerstand_heute_minus3', 'zaehlerstand_woche_minus1', 'zaehlerstand_woche_minus2', 'zaehlerstand_woche_minus3', 'zaehlerstand_monat_minus1', 'zaehlerstand_monat_minus2', 'zaehlerstand_monat_minus3', 'zaehlerstand_jahr_minus1', 'zaehlerstand_jahr_minus2', 'zaehlerstand_jahr_minus3'] -ALL_HISTORIE_ATTRIBUTES = ['minmax_last_24h_min', 'minmax_last_24h_max', 'minmax_last_24h_avg', 'minmax_last_7d_min', 'minmax_last_7d_max', 'minmax_last_7d_avg', 'minmax_heute_min', 'minmax_heute_max', 'minmax_heute_minus1_min', 'minmax_heute_minus1_max', 'minmax_heute_minus1_avg', 'minmax_heute_minus2_min', 'minmax_heute_minus2_max', 'minmax_heute_minus2_avg', 'minmax_heute_minus3_min', 'minmax_heute_minus3_max', 'minmax_heute_minus3_avg', 'minmax_woche_min', 'minmax_woche_max', 'minmax_woche_minus1_min', 'minmax_woche_minus1_max', 'minmax_woche_minus1_avg', 'minmax_woche_minus2_min', 'minmax_woche_minus2_max', 'minmax_woche_minus2_avg', 'minmax_monat_min', 'minmax_monat_max', 'minmax_monat_minus1_min', 'minmax_monat_minus1_max', 'minmax_monat_minus1_avg', 'minmax_monat_minus2_min', 'minmax_monat_minus2_max', 'minmax_monat_minus2_avg', 'minmax_jahr_min', 'minmax_jahr_max', 'minmax_jahr_minus1_min', 'minmax_jahr_minus1_max', 'minmax_jahr_minus1_avg'] -ALL_TAGESMITTEL_ATTRIBUTES = ['tagesmitteltemperatur_heute', 'tagesmitteltemperatur_heute_minus1', 'tagesmitteltemperatur_heute_minus2', 'tagesmitteltemperatur_heute_minus3'] -ALL_SERIE_ATTRIBUTES = ['serie_minmax_monat_min_15m', 'serie_minmax_monat_max_15m', 'serie_minmax_monat_avg_15m', 'serie_minmax_woche_min_30w', 'serie_minmax_woche_max_30w', 'serie_minmax_woche_avg_30w', 'serie_minmax_tag_min_30d', 'serie_minmax_tag_max_30d', 'serie_minmax_tag_avg_30d', 'serie_verbrauch_tag_30d', 'serie_verbrauch_woche_30w', 'serie_verbrauch_monat_18m', 'serie_zaehlerstand_tag_30d', 'serie_zaehlerstand_woche_30w', 'serie_zaehlerstand_monat_18m', 'serie_waermesumme_monat_24m', 'serie_kaeltesumme_monat_24m', 'serie_tagesmittelwert_stunde_0d', 'serie_tagesmittelwert_tag_stunde_30d'] -ALL_GEN_ATTRIBUTES = ['general_oldest_value', 'general_oldest_log'] -ALL_COMPLEX_ATTRIBUTES = ['kaeltesumme', 'waermesumme', 'gruenlandtempsumme', 'tagesmitteltemperatur', 'wachstumsgradtage', 'db_request'] - - -""" - 'serie_minmax_monat_min_15m': {'func': 'min', 'timeframe': 'month', 'start': 15, 'end': 0, 'group': 'month'}, - 'serie_minmax_monat_max_15m': {'func': 'max', 'timeframe': 'month', 'start': 15, 'end': 0, 'group': 'month'}, - 'serie_minmax_monat_avg_15m': {'func': 'avg', 'timeframe': 'month', 'start': 15, 'end': 0, 'group': 'month'}, - 'serie_minmax_woche_min_30w': {'func': 'min', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, - 'serie_minmax_woche_max_30w': {'func': 'max', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, - 'serie_minmax_woche_avg_30w': {'func': 'avg', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, - 'serie_minmax_tag_min_30d': {'func': 'min', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, - 'serie_minmax_tag_max_30d': {'func': 'max', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, - 'serie_minmax_tag_avg_30d': {'func': 'avg', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, - 'serie_verbrauch_tag_30d': {'func': 'diff_max', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, - 'serie_verbrauch_woche_30w': {'func': 'diff_max', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, - 'serie_verbrauch_monat_18m': {'func': 'diff_max', 'timeframe': 'month', 'start': 18, 'end': 0, 'group': 'month'}, - 'serie_zaehlerstand_tag_30d': {'func': 'max', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, - 'serie_zaehlerstand_woche_30w': {'func': 'max', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, - 'serie_zaehlerstand_monat_18m': {'func': 'max', 'timeframe': 'month', 'start': 18, 'end': 0, 'group': 'month'}, - 'serie_waermesumme_monat_24m': {'func': 'sum_max', 'timeframe': 'month', 'start': 24, 'end': 0, 'group': 'day', 'group2': 'month'}, - 'serie_kaeltesumme_monat_24m': {'func': 'sum_min_neg', 'timeframe': 'month', 'start': 24, 'end': 0, 'group': 'day', 'group2': 'month'}, - 'serie_tagesmittelwert_0d': {'func': 'max', 'timeframe': 'year', 'start': 0, 'end': 0, 'group': 'day'}, - 'serie_tagesmittelwert_stunde_0d': {'func': 'avg1', 'timeframe': 'day', 'start': 0, 'end': 0, 'group': 'hour', 'group2': 'day'}, - 'serie_tagesmittelwert_stunde_30d': {'func': 'avg1', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'hour', 'group2': 'day'}, - 'gts': {'func': 'max', 'timeframe': 'year', 'start': None, 'end': None, 'group': 'day'}, -""" +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +######################################################################### +# Copyright 2022- Michael Wenzel wenzel_michael@web.de +######################################################################### +# This file is part of SmartHomeNG. +# https://www.smarthomeNG.de +# https://knx-user-forum.de/forum/supportforen/smarthome-py +# +# This plugin provides additional functionality to mysql database +# connected via database plugin +# +# SmartHomeNG is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SmartHomeNG is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SmartHomeNG. If not, see . +# +######################################################################### + +import sqlvalidator +import datetime +import time +import re +import queue +from dateutil.relativedelta import relativedelta +from typing import Union +import threading + +from lib.model.smartplugin import SmartPlugin +from lib.item import Items +from lib.item.item import Item +from lib.shtime import Shtime +from lib.plugin import Plugins +from .webif import WebInterface +import lib.db + +DAY = 'day' +WEEK = 'week' +MONTH = 'month' +YEAR = 'year' + + +class DatabaseAddOn(SmartPlugin): + """ + Main class of the Plugin. Does all plugin specific stuff and provides the update functions for the items + """ + + PLUGIN_VERSION = '1.1.0' + + def __init__(self, sh): + """ + Initializes the plugin. + """ + + # Call init code of parent class (SmartPlugin) + super().__init__() + + # get item and shtime instance + self.shtime = Shtime.get_instance() + self.items = Items.get_instance() + self.plugins = Plugins.get_instance() + + # define cache dicts + self.current_values = {} # Dict to hold min and max value of current day / week / month / year for items + self.previous_values = {} # Dict to hold value of end of last day / week / month / year for items + self.item_cache = {} # Dict to hold item_id, oldest_log_ts and oldest_entry for items + + # define variables for database, database connection, working queue and status + self.item_queue = queue.Queue() # Queue containing all to be executed items + self.work_item_queue_thread = None # Working Thread for queue + self._db_plugin = None # object if database plugin + self._db = None # object of database + self.connection_data = None # connection data list of database + self.db_driver = None # driver of the used database + self.db_instance = None # instance of the used database + self.item_attribute_search_str = 'database' # attribute, on which an item configured for database can be identified + self.last_connect_time = 0 # mechanism for limiting db connection requests + self.alive = None # Is plugin alive? + self.startup_finished = False # Startup of Plugin finished + self.suspended = False # Is plugin activity suspended + self.active_queue_item: str = '-' # String holding item path of currently executed item + + # define debug logs + self.parse_debug = False # Enable / Disable debug logging for method 'parse item' + self.execute_debug = False # Enable / Disable debug logging for method 'execute items' + self.sql_debug = False # Enable / Disable debug logging for sql stuff + self.onchange_debug = False # Enable / Disable debug logging for method 'handle_onchange' + self.prepare_debug = False # Enable / Disable debug logging for query preparation + + # define default mysql settings + self.default_connect_timeout = 60 + self.default_net_read_timeout = 60 + + # define variables from plugin parameters + self.db_configname = self.get_parameter_value('database_plugin_config') + self.startup_run_delay = self.get_parameter_value('startup_run_delay') + self.ignore_0 = self.get_parameter_value('ignore_0') + self.use_oldest_entry = self.get_parameter_value('use_oldest_entry') + + # init cache dicts + self._init_cache_dicts() + + # activate debug logger + if self.log_level == 10: # info: 20 debug: 10 + self.parse_debug = True + self.execute_debug = True + self.sql_debug = True + self.onchange_debug = True + self.prepare_debug = True + + # init webinterface + self.init_webinterface(WebInterface) + + def run(self): + """ + Run method for the plugin + """ + + self.logger.debug("Run method called") + + # check existence of db-plugin, get parameters, and init connection to db + if not self._check_db_existence(): + self.logger.error(f"Check of existence of database plugin incl connection check failed. Plugin not loaded") + return self.deinit() + + self._db = lib.db.Database("DatabaseAddOn", self.db_driver, self.connection_data) + if not self._db.api_initialized: + self.logger.error("Initialization of database API failed") + return self.deinit() + + self.logger.debug("Initialization of database API successful") + + # init db + if not self._initialize_db(): + return self.deinit() + + # check db connection settings + if self.db_driver is not None and self.db_driver.lower() == 'pymysql': + self._check_db_connection_setting() + + # add scheduler for cyclic trigger item calculation + self.scheduler_add('cyclic', self.execute_due_items, prio=3, cron='5 0 0 * * *', cycle=None, value=None, offset=None, next=None) + + # add scheduler to trigger items to be calculated at startup with delay + dt = self.shtime.now() + datetime.timedelta(seconds=(self.startup_run_delay + 3)) + self.logger.info(f"Set scheduler for calculating startup-items with delay of {self.startup_run_delay + 3}s to {dt}.") + self.scheduler_add('startup', self.execute_startup_items, next=dt) + + # update database_items in item config, where path was given + self._update_database_items() + + # set plugin to alive + self.alive = True + + # start the queue consumer thread + self._work_item_queue_thread_startup() + + def stop(self): + """ + Stop method for the plugin + """ + + self.logger.debug("Stop method called") + self.alive = False + self.scheduler_remove('cyclic') + self._work_item_queue_thread_shutdown() + + def parse_item(self, item: Item): + """ + Default plugin parse_item method. Is called when the plugin is initialized. + + The plugin can, corresponding to its attribute keywords, decide what to do with the item in the future, like adding it to an internal array for future reference + :param item: The item to process. + :return: If the plugin needs to be informed of an items change you should return a call back function + like the function update_item down below. An example when this is needed is the knx plugin + where parse_item returns the update_item function when the attribute knx_send is found. + This means that when the items value is about to be updated, the call back function is called + with the item, caller, source and dest as arguments and in case of the knx plugin the value + can be sent to the knx with a knx write function within the knx plugin. + """ + + def get_database_item() -> Item: + """ + Returns item from shNG config which is an item with database attribut valid for current db_addon item + """ + + _lookup_item = item.return_parent() + + for i in range(2): + if self.has_iattr(_lookup_item.conf, self.item_attribute_search_str): + self.logger.debug(f"Attribut '{self.item_attribute_search_str}' has been found for item={item.path()} {i + 1} level above item.") + return _lookup_item + else: + _lookup_item = _lookup_item.return_parent() + + def has_db_addon_item() -> bool: + """Returns item from shNG config which is item with db_addon attribut valid for database item""" + + for child in item.return_children(): + if check_db_addon_fct(child): + return True + + for child_child in child.return_children(): + if check_db_addon_fct(child_child): + return True + + for child_child_child in child_child.return_children(): + if check_db_addon_fct(child_child_child): + return True + + return False + + def check_db_addon_fct(check_item) -> bool: + """ + Check if item has db_addon_fct and is onchange + """ + if self.has_iattr(check_item.conf, 'db_addon_fct'): + if self.get_iattr_value(check_item.conf, 'db_addon_fct').lower() in ALL_ONCHANGE_ATTRIBUTES: + self.logger.debug(f"db_addon item for database item {item.path()} found.") + return True + return False + + # handle all items with db_addon_fct + if self.has_iattr(item.conf, 'db_addon_fct'): + + if self.parse_debug: + self.logger.debug(f"parse item: {item.path()} due to 'db_addon_fct'") + + # get db_addon_fct attribute value + db_addon_fct = self.get_iattr_value(item.conf, 'db_addon_fct').lower() + + # get attribute value if item should be calculated at plugin startup + db_addon_startup = bool(self.get_iattr_value(item.conf, 'db_addon_startup')) + + # get attribute if certain value should be ignored at db query + if self.has_iattr(item.conf, 'database_ignore_value'): + db_addon_ignore_value = self.get_iattr_value(item.conf, 'database_ignore_value') + elif any(x in str(item.id()) for x in self.ignore_0): + db_addon_ignore_value = 0 + else: + db_addon_ignore_value = None + + # get database item and return if not available + database_item_path = self.get_iattr_value(item.conf, 'db_addon_database_item') + if database_item_path is not None: + database_item = database_item_path + else: + database_item = get_database_item() + if database_item is None: + self.logger.warning(f"No database item found for {item.path()}: Item ignored. Maybe you should check instance of database plugin.") + return + + # return if mandatory params for ad_addon_fct not given. + if db_addon_fct in ALL_NEED_PARAMS_ATTRIBUTES and not self.has_iattr(item.conf, 'db_addon_params'): + self.logger.warning(f"Item '{item.path()}' with db_addon_fct={db_addon_fct} ignored, since parameter using 'db_addon_params' not given. Item will be ignored.") + return + + # create standard items config + item_config_data_dict = {'db_addon': 'function', 'db_addon_fct': db_addon_fct, 'database_item': database_item, 'ignore_value': db_addon_ignore_value} + if database_item_path is not None: + item_config_data_dict.update({'database_item_path': True}) + else: + database_item_path = database_item.path() + + if self.parse_debug: + self.logger.debug(f"Item '{item.path()}' added with db_addon_fct={db_addon_fct} and database_item={database_item_path}") + + # handle daily items + if db_addon_fct in ALL_DAILY_ATTRIBUTES: + item_config_data_dict.update({'cycle': 'daily'}) + + # handle weekly items + elif db_addon_fct in ALL_WEEKLY_ATTRIBUTES: + item_config_data_dict.update({'cycle': 'weekly'}) + + # handle monthly items + elif db_addon_fct in ALL_MONTHLY_ATTRIBUTES: + item_config_data_dict.update({'cycle': 'monthly'}) + + # handle yearly items + elif db_addon_fct in ALL_YEARLY_ATTRIBUTES: + item_config_data_dict.update({'cycle': 'yearly'}) + + # handle static items + elif db_addon_fct in ALL_GEN_ATTRIBUTES: + item_config_data_dict.update({'cycle': 'static'}) + + # handle on-change items + elif db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: + item_config_data_dict.update({'cycle': 'on-change'}) + + # handle all functions with 'summe' like waermesumme, kaeltesumme, gruenlandtemperatursumme + if 'summe' in db_addon_fct: + db_addon_params = params_to_dict(self.get_iattr_value(item.conf, 'db_addon_params')) + if db_addon_params is None or 'year' not in db_addon_params: + self.logger.info(f"No 'year' for evaluation via 'db_addon_params' of item {item.path()} for function {db_addon_fct} given. Default with 'current year' will be used.") + db_addon_params = {'year': 'current'} + item_config_data_dict.update({'params': db_addon_params}) + + # handle wachstumsgradtage function + elif db_addon_fct == 'wachstumsgradtage': + DEFAULT_THRESHOLD = 10 + db_addon_params = params_to_dict(self.get_iattr_value(item.conf, 'db_addon_params')) + if db_addon_params is None or 'year' not in db_addon_params: + self.logger.info(f"No 'year' for evaluation via 'db_addon_params' of item {item.path()} for function {db_addon_fct} given. Default with 'current year' will be used.") + db_addon_params = {'year': 'current'} + if 'threshold' not in db_addon_params: + self.logger.info(f"No 'threshold' for evaluation via 'db_addon_params' of item {item.path()} for function {db_addon_fct} given. Default with {DEFAULT_THRESHOLD} will be used.") + db_addon_params.update({'threshold': DEFAULT_THRESHOLD}) + if not isinstance(db_addon_params['threshold'], int): + threshold = to_int(db_addon_params['threshold']) + db_addon_params['threshold'] = DEFAULT_THRESHOLD if threshold is None else threshold + item_config_data_dict.update({'params': db_addon_params}) + + # handle tagesmitteltemperatur + elif db_addon_fct == 'tagesmitteltemperatur': + if not self.has_iattr(item.conf, 'db_addon_params'): + self.logger.warning(f"Item '{item.path()}' with db_addon_fct={db_addon_fct} ignored, since parameter using 'db_addon_params' not given. Item will be ignored.") + return + + db_addon_params = params_to_dict(self.get_iattr_value(item.conf, 'db_addon_params')) + if db_addon_params is None: + self.logger.warning(f"Error occurred during parsing of item attribute 'db_addon_params' of item {item.path()}. Item will be ignored.") + return + item_config_data_dict.update({'params': db_addon_params}) + + # handle db_request + elif db_addon_fct == 'db_request': + if not self.has_iattr(item.conf, 'db_addon_params'): + self.logger.warning(f"Item '{item.path()}' with db_addon_fct={db_addon_fct} ignored, since parameter using 'db_addon_params' not given. Item will be ignored") + return + + db_addon_params = params_to_dict(self.get_iattr_value(item.conf, 'db_addon_params')) + if db_addon_params is None: + self.logger.warning(f"Error occurred during parsing of item attribute 'db_addon_params' of item {item.path()}. Item will be ignored.") + return + + if self.parse_debug: + self.logger.debug(f"parse_item: {db_addon_fct=} for item={item.path()}, {db_addon_params=}") + + if not any(param in db_addon_params for param in ('func', 'timeframe')): + self.logger.warning(f"Item '{item.path()}' with {db_addon_fct=} ignored, not all mandatory parameters in {db_addon_params=} given. Item will be ignored.") + return + + TIMEFRAMES_2_UPDATECYCLE = {'day': 'daily', + 'week': 'weekly', + 'month': 'monthly', + 'year': 'yearly'} + + _timeframe = db_addon_params.get('group', None) + if not _timeframe: + _timeframe = db_addon_params.get('timeframe', None) + update_cycle = TIMEFRAMES_2_UPDATECYCLE.get(_timeframe) + if update_cycle is None: + self.logger.warning(f"Item '{item.path()}' with {db_addon_fct=} ignored. Not able to detect update cycle.") + return + + item_config_data_dict.update({'params': db_addon_params, 'cycle': update_cycle}) + + # debug log item cycle + if self.parse_debug: + self.logger.debug(f"Item '{item.path()}' added to be run {item_config_data_dict['cycle']}.") + + # handle item to be run on startup (onchange_items shall not be run at startup, but at first noticed change of item value; therefore remove for list of items to be run at startup) + if (db_addon_startup and db_addon_fct not in ALL_ONCHANGE_ATTRIBUTES) or db_addon_fct in ALL_GEN_ATTRIBUTES: + if self.parse_debug: + self.logger.debug(f"Item '{item.path()}' added to be run on startup") + item_config_data_dict.update({'startup': True}) + else: + item_config_data_dict.update({'startup': False}) + + # add item to plugin item dict + self.add_item(item, config_data_dict=item_config_data_dict) + + # handle all items with db_addon_info + elif self.has_iattr(item.conf, 'db_addon_info'): + if self.parse_debug: + self.logger.debug(f"parse item: {item.path()} due to used item attribute 'db_addon_info'") + self.add_item(item, config_data_dict={'db_addon': 'info', 'db_addon_fct': f"info_{self.get_iattr_value(item.conf, 'db_addon_info').lower()}", 'database_item': None, 'startup': True}) + + # handle all items with db_addon_admin + elif self.has_iattr(item.conf, 'db_addon_admin'): + if self.parse_debug: + self.logger.debug(f"parse item: {item.path()} due to used item attribute 'db_addon_admin'") + self.add_item(item, config_data_dict={'db_addon': 'admin', 'db_addon_fct': f"admin_{self.get_iattr_value(item.conf, 'db_addon_admin').lower()}", 'database_item': None}) + return self.update_item + + # Reference to 'update_item' für alle Items mit Attribut 'database', um die on_change Items zu berechnen + elif self.has_iattr(item.conf, self.item_attribute_search_str) and has_db_addon_item(): + self.logger.debug(f"reference to update_item for item '{item.path()}' will be set due to on-change") + self.add_item(item, config_data_dict={'db_addon': 'database'}) + return self.update_item + + def update_item(self, item, caller=None, source=None, dest=None): + """ + Handle updated item + This method is called, if the value of an item has been updated by SmartHomeNG. + It should write the changed value out to the device (hardware/interface) that is managed by this plugin. + + :param item: item to be updated towards the plugin + :param caller: if given it represents the callers name + :param source: if given it represents the source + :param dest: if given it represents the dest + """ + + if self.alive and caller != self.get_shortname(): + # handle database items + if item in self._database_items(): + # self.logger.debug(f"update_item was called with item {item.property.path} with value {item()} from caller {caller}, source {source} and dest {dest}") + if not self.startup_finished: + self.logger.info(f"Handling of 'on-change' is paused for startup. No updated will be processed.") + elif self.suspended: + self.logger.info(f"Plugin is suspended. No updated will be processed.") + else: + self.logger.info(f"+ Updated item '{item.path()}' with value {item()} will be put to queue for processing. {self.item_queue.qsize() + 1} items to do.") + self.item_queue.put((item, item())) + + # handle admin items + elif self.has_iattr(item.conf, 'db_addon_admin'): + self.logger.debug(f"update_item was called with item {item.property.path} from caller {caller}, source {source} and dest {dest}") + if self.get_iattr_value(item.conf, 'db_addon_admin') == 'suspend': + self.suspend(item()) + elif self.get_iattr_value(item.conf, 'db_addon_admin') == 'recalc_all': + self.execute_all_items() + item(False, self.get_shortname()) + elif self.get_iattr_value(item.conf, 'db_addon_admin') == 'clean_cache_values': + self._init_cache_dicts() + item(False, self.get_shortname()) + + def execute_due_items(self) -> None: + """ + Execute all items, which are due + """ + + if self.execute_debug: + self.logger.debug("execute_due_items called") + + if not self.suspended: + _todo_items = self._create_due_items() + self.logger.info(f"{len(_todo_items)} items are due and will be calculated.") + [self.item_queue.put(i) for i in _todo_items] + else: + self.logger.info(f"Plugin is suspended. No items will be calculated.") + + def execute_startup_items(self) -> None: + """ + Execute all startup_items + """ + if self.execute_debug: + self.logger.debug("execute_startup_items called") + + if not self.suspended: + self.logger.info(f"{len(self._startup_items())} items will be calculated at startup.") + [self.item_queue.put(i) for i in self._startup_items()] + self.startup_finished = True + else: + self.logger.info(f"Plugin is suspended. No items will be calculated.") + + def execute_static_items(self) -> None: + """ + Execute all static items + """ + if self.execute_debug: + self.logger.debug("execute_static_item called") + + if not self.suspended: + self.logger.info(f"{len(self._static_items())} items will be calculated.") + [self.item_queue.put(i) for i in self._static_items()] + else: + self.logger.info(f"Plugin is suspended. No items will be calculated.") + + def execute_info_items(self) -> None: + """ + Execute all info items + """ + if self.execute_debug: + self.logger.debug("execute_info_items called") + + if not self.suspended: + self.logger.info(f"{len(self._info_items())} items will be calculated.") + [self.item_queue.put(i) for i in self._info_items()] + else: + self.logger.info(f"Plugin is suspended. No items will be calculated.") + + def execute_all_items(self) -> None: + """ + Execute all ondemand items + """ + + if not self.suspended: + self.logger.info(f"Values for all {len(self._ondemand_items())} items with 'db_addon_fct' attribute, which are not 'on-change', will be calculated!") + [self.item_queue.put(i) for i in self._ondemand_items()] + else: + self.logger.info(f"Plugin is suspended. No items will be calculated.") + + def work_item_queue(self) -> None: + """ + Handles item queue were all to be executed items were be placed in. + """ + + while self.alive: + try: + queue_entry = self.item_queue.get(True, 10) + self.logger.info(f" Queue Entry: '{queue_entry}' received.") + except queue.Empty: + self.active_queue_item = '-' + pass + else: + if isinstance(queue_entry, tuple): + item, value = queue_entry + self.logger.info(f"# {self.item_queue.qsize() + 1} item(s) to do. || 'on-change' item '{item.path()}' with {value=} will be processed.") + self.active_queue_item = str(item.path()) + self.handle_onchange(item, value) + else: + self.logger.info(f"# {self.item_queue.qsize() + 1} item(s) to do. || 'on-demand' item '{queue_entry.path()}' will be processed.") + self.active_queue_item = str(queue_entry.path()) + self.handle_ondemand(queue_entry) + + def handle_ondemand(self, item: Item) -> None: + """ + Calculate value for requested item, fill cache dicts and set item value. + + :param item: Item for which value will be calculated + """ + + # set/get parameters + item_config = self.get_item_config(item) + db_addon = item_config['db_addon'] + db_addon_fct = item_config['db_addon_fct'] + database_item = item_config['database_item'] + ignore_value = item_config.get('ignore_value') + result = None + self.logger.debug(f"handle_ondemand: Item={item.path()} with {item_config=}") + + # handle info functions + if db_addon == 'info': + # handle info_db_version + if db_addon_fct == 'info_db_version': + result = self._get_db_version() + self.logger.debug(f"handle_ondemand: info_db_version {result=}") + else: + self.logger.warning(f"No handling for attribute {db_addon_fct=} for Item {item.path()} defined.") + + # handle general functions + elif db_addon_fct in ALL_GEN_ATTRIBUTES: + # handle oldest_value + if db_addon_fct == 'general_oldest_value': + result = self._get_oldest_value(database_item) + + # handle oldest_log + elif db_addon_fct == 'general_oldest_log': + result = self._get_oldest_log(database_item) + + else: + self.logger.warning(f"No handling for attribute {db_addon_fct=} for Item {item.path()} defined.") + + # handle item starting with 'verbrauch_' + elif db_addon_fct in ALL_VERBRAUCH_ATTRIBUTES: + if self.execute_debug: + self.logger.debug(f"handle_ondemand: 'verbrauch' detected.") + + result = self._handle_verbrauch(database_item, db_addon_fct, ignore_value) + + if result and result < 0: + self.logger.warning(f"Result of item {item.path()} with {db_addon_fct=} was negative. Something seems to be wrong.") + + # handle item starting with 'zaehlerstand_' of format 'zaehlerstand_timeframe_timedelta' like 'zaehlerstand_woche_minus1' + elif db_addon_fct in ALL_ZAEHLERSTAND_ATTRIBUTES: + if self.execute_debug: + self.logger.debug(f"handle_ondemand: 'zaehlerstand' detected.") + + result = self._handle_zaehlerstand(database_item, db_addon_fct, ignore_value) + + # handle item starting with 'minmax_' + elif db_addon_fct in ALL_HISTORIE_ATTRIBUTES: + if self.execute_debug: + self.logger.debug(f"handle_ondemand: 'minmax' detected.") + + result = self._handle_min_max(database_item, db_addon_fct, ignore_value)[0][1] + + # handle item starting with 'tagesmitteltemperatur_' + elif db_addon_fct in ALL_TAGESMITTEL_ATTRIBUTES: + if self.execute_debug: + self.logger.debug(f"handle_ondemand: 'tagesmitteltemperatur' detected.") + + result = self._handle_tagesmitteltemperatur(database_item, db_addon_fct, ignore_value)[0][1] + + # handle item starting with 'serie_' + elif db_addon_fct in ALL_SERIE_ATTRIBUTES: + if 'minmax' in db_addon_fct: + if self.execute_debug: + self.logger.debug(f"handle_ondemand: 'serie_minmax' detected.") + + result = self._handle_min_max(database_item, db_addon_fct, ignore_value) + + elif 'verbrauch' in db_addon_fct: + if self.execute_debug: + self.logger.debug(f"handle_ondemand: 'serie_verbrauch' detected.") + + result = self._handle_verbrauch(database_item, db_addon_fct, ignore_value) + + elif 'zaehlerstand' in db_addon_fct: + if self.execute_debug: + self.logger.debug(f"handle_ondemand: 'serie_zaehlerstand' detected.") + + result = self._handle_zaehlerstand(database_item, db_addon_fct, ignore_value) + + elif 'tagesmitteltemperatur' in db_addon_fct: + if self.execute_debug: + self.logger.debug(f"handle_ondemand: 'serie_tagesmittelwert' detected.") + + result = self._handle_tagesmitteltemperatur(database_item, db_addon_fct, ignore_value) + else: + self.logger.warning(f"No handling for attribute {db_addon_fct=} for Item {item.path()} defined.") + + # handle kaeltesumme + elif db_addon_fct == 'kaeltesumme': + db_addon_params = item_config.get('params') + if self.execute_debug: + self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected; {db_addon_params=}") + + if db_addon_params: + db_addon_params.update({'database_item': item_config['database_item']}) + result = self._handle_kaeltesumme(**db_addon_params) + + # handle waermesumme + elif db_addon_fct == 'waermesumme': + db_addon_params = item_config.get('params') + if self.execute_debug: + self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected; {db_addon_params=}") + + if db_addon_params: + db_addon_params.update({'database_item': item_config['database_item']}) + result = self._handle_waermesumme(**db_addon_params) + + # handle gruenlandtempsumme + elif db_addon_fct == 'gruenlandtempsumme': + db_addon_params = item_config.get('params') + if self.execute_debug: + self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected; {db_addon_params=}") + + if db_addon_params: + db_addon_params.update({'database_item': item_config['database_item']}) + result = self._handle_gruenlandtemperatursumme(**db_addon_params) + + # handle wachstumsgradtage + elif db_addon_fct == 'wachstumsgradtage': + db_addon_params = item_config.get('params') + if self.execute_debug: + self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected; {db_addon_params}") + + if db_addon_params: + db_addon_params.update({'database_item': item_config['database_item']}) + result = self._handle_wachstumsgradtage(**db_addon_params) + + # handle tagesmitteltemperatur + elif db_addon_fct == 'tagesmitteltemperatur': + db_addon_params = item_config.get('params') + if self.execute_debug: + self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected; {db_addon_params=}") + + if db_addon_params: + result = self._handle_tagesmitteltemperatur(database_item, db_addon_fct, ignore_value, db_addon_params) + + # handle db_request + elif db_addon_fct == 'db_request': + db_addon_params = item_config.get('params') + if self.execute_debug: + self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected with {db_addon_params=}") + + if db_addon_params: + db_addon_params.update({'database_item': item_config['database_item']}) + if db_addon_params.keys() & {'func', 'item', 'timeframe'}: + result = self._query_item(**db_addon_params) + else: + self.logger.error(f"Attribute 'db_addon_params' not containing needed params for Item {item.id} with {db_addon_fct=}.") + + # handle everything else + else: + self.logger.warning(f"handle_ondemand: Function '{db_addon_fct}' for item {item.path()} not defined or found.") + return + + # log result + if self.execute_debug: + self.logger.debug(f"handle_ondemand: result is {result} for item '{item.path()}' with '{db_addon_fct=}'") + + if result is None: + self.logger.info(f" Result was None; No item value will be set.") + return + + # set item value and put data into plugin_item_dict + self.logger.info(f" Item value for '{item.path()}' will be set to {result}") + item_config = self.get_item_config(item) + item_config.update({'value': result}) + item(result, self.get_shortname()) + + def handle_onchange(self, updated_item: Item, value: float) -> None: + """ + Get item and item value for which an update has been detected, fill cache dicts and set item value. + + :param updated_item: Item which has been updated + :param value: Value of updated item + """ + + if self.onchange_debug: + self.logger.debug(f"handle_onchange called with updated_item={updated_item.path()} and value={value}.") + + relevant_item_list = self.get_item_list('database_item', updated_item) + if self.onchange_debug: + self.logger.debug(f"Following items where identified for update: {relevant_item_list}.") + + for item in relevant_item_list: + item_config = self.get_item_config(item) + _database_item = item_config['database_item'] + _db_addon_fct = item_config['db_addon_fct'] + _ignore_value = item_config['ignore_value'] + _var = _db_addon_fct.split('_') + + # handle minmax on-change items like minmax_heute_max, minmax_heute_min, minmax_woche_max, minmax_woche_min..... + if _db_addon_fct.startswith('minmax') and len(_var) == 3 and _var[2] in ['min', 'max']: + _timeframe = convert_timeframe(_var[1]) + _func = _var[2] + _cache_dict = self.current_values[_timeframe] + if not _timeframe: + return + + if self.onchange_debug: + self.logger.debug(f"handle_onchange: 'minmax' item {updated_item.path()} with {_func=} detected. Check for update of _cache_dicts and item value.") + + _initial_value = False + _new_value = None + + # make sure, that database item is in cache dict + if _database_item not in _cache_dict: + _cache_dict[_database_item] = {} + if _cache_dict[_database_item].get(_func) is None: + _query_params = {'func': _func, 'item': _database_item, 'timeframe': _timeframe, 'start': 0, 'end': 0, 'ignore_value': _ignore_value} + _cached_value = self._query_item(**_query_params)[0][1] + _initial_value = True + if self.onchange_debug: + self.logger.debug(f"handle_onchange: Item={updated_item.path()} with _func={_func} and _timeframe={_timeframe} not in cache dict. recent value={_cached_value}.") + else: + _cached_value = _cache_dict[_database_item][_func] + + if _cached_value: + # check value for update of cache dict + if _func == 'min' and value < _cached_value: + _new_value = value + if self.onchange_debug: + self.logger.debug(f"handle_onchange: new value={_new_value} lower then current min_value={_cached_value}. _cache_dict will be updated") + elif _func == 'max' and value > _cached_value: + _new_value = value + if self.onchange_debug: + self.logger.debug(f"handle_onchange: new value={_new_value} higher then current max_value={_cached_value}. _cache_dict will be updated") + else: + if self.onchange_debug: + self.logger.debug(f"handle_onchange: new value={_new_value} will not change max/min for period.") + else: + _cached_value = value + + if _initial_value and not _new_value: + _new_value = _cached_value + if self.onchange_debug: + self.logger.debug(f"handle_onchange: initial value for item will be set with value {_new_value}") + + if _new_value: + _cache_dict[_database_item][_func] = _new_value + self.logger.info(f"Item value for '{item.path()}' with func={_func} will be set to {_new_value}") + item_config = self.get_item_config(item) + item_config.update({'value': _new_value}) + item(_new_value, self.get_shortname()) + else: + self.logger.info(f"Received value={value} is not influencing min / max value. Therefore item {item.path()} will not be changed.") + + # handle verbrauch on-change items ending with heute, woche, monat, jahr + elif _db_addon_fct.startswith('verbrauch') and len(_var) == 2 and _var[1] in ['heute', 'woche', 'monat', 'jahr']: + _timeframe = convert_timeframe(_var[1]) + _cache_dict = self.previous_values[_timeframe] + if _timeframe is None: + return + + # make sure, that database item is in cache dict + if _database_item not in _cache_dict: + _query_params = {'func': 'max', 'item': _database_item, 'timeframe': _timeframe, 'start': 1, 'end': 1, 'ignore_value': _ignore_value} + _cached_value = self._query_item(**_query_params)[0][1] + _cache_dict[_database_item] = _cached_value + if self.onchange_debug: + self.logger.debug(f"handle_onchange: Item={updated_item.path()} with {_timeframe=} not in cache dict. Value {_cached_value} has been added.") + else: + _cached_value = _cache_dict[_database_item] + + # calculate value, set item value, put data into plugin_item_dict + if _cached_value is not None: + _new_value = round(value - _cached_value, 1) + self.logger.info(f"Item value for '{item.path()}' will be set to {_new_value}") + item_config = self.get_item_config(item) + item_config.update({'value': _new_value}) + item(_new_value, self.get_shortname()) + else: + self.logger.info(f"Value for end of last {_timeframe} not available. No item value will be set.") + + def _update_database_items(self): + for item in self._database_item_path_items(): + item_config = self.get_item_config(item) + database_item_path = item_config.get('database_item') + database_item = self.items.return_item(database_item_path) + + if database_item is None: + self.logger.warning(f"Database-Item for Item with config item path for Database-Item {database_item_path!r} not found. Item '{item.path()}' will be removed from plugin.") + self.remove_item(item) + else: + item_config.update({'database_item': database_item}) + + @property + def log_level(self): + return self.logger.getEffectiveLevel() + + def queue_backlog(self): + return self.item_queue.qsize() + + def db_version(self): + return self._get_db_version() + + def _startup_items(self) -> list: + return self.get_item_list('startup', True) + + def _onchange_items(self) -> list: + return self.get_item_list('cycle', 'on-change') + + def _daily_items(self) -> list: + return self.get_item_list('cycle', 'daily') + + def _weekly_items(self) -> list: + return self.get_item_list('cycle', 'weekly') + + def _monthly_items(self) -> list: + return self.get_item_list('cycle', 'monthly') + + def _yearly_items(self) -> list: + return self.get_item_list('cycle', 'yearly') + + def _static_items(self) -> list: + return self.get_item_list('cycle', 'static') + + def _admin_items(self) -> list: + return self.get_item_list('db_addon', 'admin') + + def _info_items(self) -> list: + return self.get_item_list('db_addon', 'info') + + def _database_items(self) -> list: + return self.get_item_list('db_addon', 'database') + + def _database_item_path_items(self) -> list: + return self.get_item_list('database_item_path', True) + + def _ondemand_items(self) -> list: + return self._daily_items() + self._weekly_items() + self._monthly_items() + self._yearly_items() + self._static_items() + + ############################## + # Public functions / Using item_path + ############################## + + def gruenlandtemperatursumme(self, item_path: str, year: Union[int, str]) -> Union[int, None]: + """ + Query database for gruenlandtemperatursumme for given year or year + https://de.wikipedia.org/wiki/Gr%C3%BCnlandtemperatursumme + + Beim Grünland wird die Wärmesumme nach Ernst und Loeper benutzt, um den Vegetationsbeginn und somit den Termin von Düngungsmaßnahmen zu bestimmen. + Dabei erfolgt die Aufsummierung der Tagesmitteltemperaturen über 0 °C, wobei der Januar mit 0.5 und der Februar mit 0.75 gewichtet wird. + Bei einer Wärmesumme von 200 Grad ist eine Düngung angesagt. + + :param item_path: item object or item_id for which the query should be done + :param year: year the gruenlandtemperatursumme should be calculated for + :return: gruenlandtemperatursumme + """ + + item = self.items.return_item(item_path) + if item: + return self._handle_gruenlandtemperatursumme(item, year) + + def waermesumme(self, item_path: str, year, month: Union[int, str] = None, threshold: int = 0) -> Union[int, None]: + """ + Query database for waermesumme for given year or year/month + https://de.wikipedia.org/wiki/W%C3%A4rmesumme + + :param item_path: item object or item_id for which the query should be done + :param year: year the waermesumme should be calculated for + :param month: month the waermesumme should be calculated for + :param threshold: threshold for temperature + :return: waermesumme + """ + + item = self.items.return_item(item_path) + if item: + return self._handle_waermesumme(item, year, month, threshold) + + def kaeltesumme(self, item_path: str, year, month: Union[int, str] = None) -> Union[int, None]: + """ + Query database for kaeltesumme for given year or year/month + https://de.wikipedia.org/wiki/K%C3%A4ltesumme + + :param item_path: item object or item_id for which the query should be done + :param year: year the kaeltesumme should be calculated for + :param month: month the kaeltesumme should be calculated for + :return: kaeltesumme + """ + + item = self.items.return_item(item_path) + if item: + return self._handle_kaeltesumme(item, year, month) + + def tagesmitteltemperatur(self, item_path: str, timeframe: str = None, count: int = None) -> list: + """ + Query database for tagesmitteltemperatur + https://www.dwd.de/DE/leistungen/klimadatendeutschland/beschreibung_tagesmonatswerte.html + + :param item_path: item object or item_id for which the query should be done + :param timeframe: timeincrement for determination + :param count: number of time increments starting from now to the left (into the past) + :return: tagesmitteltemperatur + """ + + if not timeframe: + timeframe = 'day' + + if not count: + count = 0 + + item = self.items.return_item(item_path) + if item: + return self._handle_tagesmitteltemperatur(database_item=item, db_addon_fct='tagesmitteltemperatur', params={'timeframe': timeframe, 'count': count}) + + def wachstumsgradtage(self, item_path: str, year: Union[int, str], threshold: int) -> Union[int, None]: + """ + Query database for wachstumsgradtage + https://de.wikipedia.org/wiki/Wachstumsgradtag + + :param item_path: item object or item_id for which the query should be done + :param year: year the wachstumsgradtage should be calculated for + :param threshold: Temperature in °C as threshold: Ein Tage mit einer Tagesdurchschnittstemperatur oberhalb des Schellenwertes gilt als Wachstumsgradtag + :return: wachstumsgradtage + """ + + item = self.items.return_item(item_path) + if item: + return self._handle_wachstumsgradtage(item, year, threshold) + + def temperaturserie(self, item_path: str, year: Union[int, str], method: str) -> Union[list, None]: + """ + Query database for wachstumsgradtage + https://de.wikipedia.org/wiki/Wachstumsgradtag + + :param item_path: item object or item_id for which the query should be done + :param year: year the wachstumsgradtage should be calculated for + :param method: Calculation method + :return: wachstumsgradtage + """ + + item = self.items.return_item(item_path) + if item: + return self._handle_temperaturserie(item, year, method) + + def query_item(self, func: str, item_path: str, timeframe: str, start: int = None, end: int = 0, group: str = None, group2: str = None, ignore_value=None) -> list: + item = self.items.return_item(item_path) + if item is None: + return [] + + return self._query_item(func, item, timeframe, start, end, group, group2, ignore_value) + + def fetch_log(self, func: str, item_path: str, timeframe: str, start: int = None, end: int = 0, count: int = None, group: str = None, group2: str = None, ignore_value=None) -> list: + """ + Query database, format response and return it + + :param func: function to be used at query + :param item_path: item str or item_id for which the query should be done + :param timeframe: time increment für definition of start, end, count (day, week, month, year) + :param start: start of timeframe (oldest) for query given in x time increments (default = None, meaning complete database) + :param end: end of timeframe (newest) for query given in x time increments (default = 0, meaning today, end of last week, end of last month, end of last year) + :param count: start of timeframe defined by number of time increments starting from end to the left (into the past) + :param group: first grouping parameter (default = None, possible values: day, week, month, year) + :param group2: second grouping parameter (default = None, possible values: day, week, month, year) + :param ignore_value: value of val_num, which will be ignored during query + + :return: formatted query response + """ + item = self.items.return_item(item_path) + + if count: + start, end = count_to_start(count) + + if item and start and end: + return self._query_item(func=func, item=item, timeframe=timeframe, start=start, end=end, group=group, group2=group2, ignore_value=ignore_value) + else: + return [] + + def fetch_raw(self, query: str, params: dict = None) -> Union[list, None]: + """ + Fetch database with given query string and params + + :param query: database query to be executed + :param params: query parameters + + :return: result of database query + """ + + if params is None: + params = {} + + formatted_sql = sqlvalidator.format_sql(query) + sql_query = sqlvalidator.parse(formatted_sql) + + if not sql_query.is_valid(): + self.logger.error(f"fetch_raw: Validation of query failed with error: {sql_query.errors}") + return + + return self._fetchall(query, params) + + def suspend(self, state: bool = False) -> bool: + """ + Will pause value evaluation of plugin + + """ + + if state: + self.logger.warning("Plugin is set to 'suspended'. Queries to database will not be made until suspension is cancelled.") + self.suspended = True + self._clear_queue() + else: + self.logger.warning("Plugin suspension cancelled. Queries to database will be resumed.") + self.suspended = False + + # write back value to item, if one exists + for item in self.get_item_list('db_addon', 'admin'): + item_config = self.get_item_config(item) + if item_config['db_addon_fct'] == 'suspend': + item(self.suspended, self.get_shortname()) + + return self.suspended + + ############################## + # Support stuff / Using Item Object + ############################## + + def _handle_min_max(self, database_item: Item, db_addon_fct: str, ignore_value=None) -> Union[list, None]: + """ + Handle execution of min/max calculation + """ + # handle all on_change functions of format 'minmax_timeframe_function' like 'minmax_heute_max' + if db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: + if self.execute_debug: + self.logger.debug(f"on-change function with 'min/max' detected; will be calculated by next change of database item") + return + + _var = db_addon_fct.split('_') + group = None + group2 = None + + # handle all 'last' functions in format 'minmax_last_window_function' like 'minmax_last_24h_max' + if len(_var) == 4 and _var[1] == 'last': + func = _var[3] + timeframe = convert_timeframe(_var[2][-1:]) + start = to_int(_var[2][:-1]) + end = 0 + log_text = 'minmax_last' + if timeframe is None or start is None: + return + + # handle all functions 'min/max/avg' in format 'minmax_timeframe_timedelta_func' like 'minmax_heute_minus2_max' + elif len(_var) == 4 and _var[2].startswith('minus'): + func = _var[3] # min, max, avg + timeframe = convert_timeframe(_var[1]) # day, week, month, year + start = to_int(_var[2][-1]) # 1, 2, 3, ... + end = start + log_text = 'minmax' + if timeframe is None or start is None: + return + + # handle all functions 'serie_min/max/avg' in format 'serie_minmax_timeframe_func_count_group' like 'serie_minmax_monat_min_15m' + elif _var[0] == 'serie' and _var[1] == 'minmax': + timeframe = convert_timeframe(_var[2]) + func = _var[3] + start = to_int(_var[4][:-1]) + end = 0 + group = convert_timeframe(_var[4][len(_var[4]) - 1]) + log_text = 'serie_min/max/avg' + if timeframe is None or start is None or group is None: + return + else: + self.logger.info(f"_handle_min_max: No adequate function for {db_addon_fct=} found.") + return + + if func not in ALLOWED_MINMAX_FUNCS: + self.logger.info(f"_handle_min_max: Called {func=} not in allowed functions={ALLOWED_MINMAX_FUNCS}.") + return + + query_params = {'item': database_item, 'ignore_value': ignore_value, 'func': func, 'timeframe': timeframe, 'start': start, 'end': end, 'group': group, 'group2': group2} + + if self.execute_debug: + self.logger.debug(f"_handle_min_max: db_addon_fct={log_text} function detected. {query_params=}") + + return self._query_item(**query_params) + + def _handle_zaehlerstand(self, database_item: Item, db_addon_fct: str, ignore_value=None) -> Union[list, None]: + """ + Handle execution of Zaehlerstand calculation + """ + # handle all on_change functions + if db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: + if self.execute_debug: + self.logger.debug(f"on-change function with 'zaehlerstand' detected; will be calculated by next change of database item") + return + + _var = db_addon_fct.split('_') + group = None + group2 = None + + # handle functions starting with 'zaehlerstand' like 'zaehlerstand_heute_minus1' + if len(_var) == 3 and _var[1] == 'zaehlerstand': + func = 'max' + timeframe = convert_timeframe(_var[1]) + start = to_int(_var[2][-1]) + end = start + log_text = 'zaehlerstand' + if timeframe is None or start is None: + return + + # handle all functions 'serie_min/max/avg' in format 'serie_minmax_timeframe_func_count_group' like 'serie_zaehlerstand_tag_30d' + elif _var[0] == 'serie' and _var[1] == 'zaehlerstand': + func = 'max' + timeframe = convert_timeframe(_var[2]) + start = to_int(_var[3][:-1]) + end = 0 + group = convert_timeframe(_var[3][len(_var[3]) - 1]) + log_text = 'serie_min/max/avg' + if timeframe is None or start is None or group is None: + return + else: + self.logger.info(f"_handle_zaehlerstand: No adequate function for {db_addon_fct=} found.") + return + + query_params = {'item': database_item, 'ignore_value': ignore_value, 'func': func, 'timeframe': timeframe, 'start': start, 'end': end, 'group': group, 'group2': group2} + + if self.execute_debug: + self.logger.debug(f"_handle_zaehlerstand: db_addon_fct={log_text} function detected. {query_params=}") + + return self._query_item(**query_params) + + def _handle_verbrauch(self, database_item: Item, db_addon_fct: str, ignore_value=None): + """ + Handle execution of verbrauch calculation + """ + + self.logger.debug(f"_handle_verbrauch called with {database_item=} and {db_addon_fct=}") + + def consumption_calc(c_start, c_end) -> Union[float, None]: + """ + Handle query for Verbrauch + + :param c_start: beginning of timeframe + :param c_end: end of timeframe + """ + + if self.prepare_debug: + self.logger.debug(f"_consumption_calc called with {database_item=}, {timeframe=}, {c_start=}, {c_end=}") + + _result = None + _query_params = {'item': database_item, 'timeframe': timeframe} + + # get value for end and check it; + _query_params.update({'func': 'max', 'start': c_end, 'end': c_end}) + value_end = self._query_item(**_query_params)[0][1] + + if self.prepare_debug: + self.logger.debug(f"_consumption_calc {value_end=}") + + if value_end is None: # if None (Error) return + return + elif value_end == 0: # wenn die Query "None" ergab, was wiederum bedeutet, dass zum Abfragezeitpunkt keine Daten vorhanden sind, ist der value hier gleich 0 → damit der Verbrauch für die Abfrage auch Null + return 0 + + # get value for start and check it; + _query_params.update({'func': 'min', 'start': c_end, 'end': c_end}) + value_start = self._query_item(**_query_params)[0][1] + if self.prepare_debug: + self.logger.debug(f"_consumption_calc {value_start=}") + + if value_start is None: # if None (Error) return + return + + if value_start == 0: # wenn der Wert zum Startzeitpunkt 0 ist, gab es dort keinen Eintrag (also keinen Verbrauch), dann frage den nächsten Eintrag in der DB ab. + self.logger.info(f"No DB Entry found for requested start date. Looking for next DB entry.") + _query_params.update({'func': 'next', 'start': c_start, 'end': c_end}) + value_start = self._query_item(**_query_params)[0][1] + if self.prepare_debug: + self.logger.debug(f"_consumption_calc: next available value is {value_start=}") + + # calculate result + if value_start is not None: + return round(value_end - value_start, 1) + + # handle all on_change functions of format 'verbrauch_timeframe' like 'verbrauch_heute' + if db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: + if self.execute_debug: + self.logger.debug(f"on_change function with 'verbrauch' detected; will be calculated by next change of database item") + return + + _var = db_addon_fct.split('_') + + # handle all functions 'verbrauch' in format 'verbrauch_timeframe_timedelta' like 'verbrauch_heute_minus2' + if len(_var) == 3 and _var[1] in ['heute', 'woche', 'monat', 'jahr'] and _var[2].startswith('minus'): + timeframe = convert_timeframe(_var[1]) + timedelta = to_int(_var[2][-1]) + if timedelta is None or timeframe is None: + return + + if self.execute_debug: + self.logger.debug(f"_handle_verbrauch: '{db_addon_fct}' function detected. {timeframe=}, {timedelta=}") + + return consumption_calc(c_start=timedelta + 1, c_end=timedelta) + + # handle all functions of format 'verbrauch_function_window_timeframe_timedelta' like 'verbrauch_rolling_12m_woche_minus1' + elif len(_var) == 5 and _var[1] == 'rolling' and _var[4].startswith('minus'): + func = _var[1] + window = _var[2] # 12m + window_inc = to_int(window[:-1]) # 12 + window_dur = convert_timeframe(window[-1]) # day, week, month, year + timeframe = convert_timeframe(_var[3]) # day, week, month, year + timedelta = to_int(_var[4][-1]) # 1 + endtime = timedelta + + if window_inc is None or window_dur is None or timeframe is None or timedelta is None: + return + + if self.execute_debug: + self.logger.debug(f"_handle_verbrauch: '{func}' function detected. {window=}, {timeframe=}, {timedelta=}") + + if window_dur in ['day', 'week', 'month', 'year']: + starttime = convert_duration(timeframe, window_dur) * window_inc + return consumption_calc(c_start=starttime, c_end=endtime) + + # handle all functions of format 'verbrauch_timeframe_timedelta' like 'verbrauch_jahreszeitraum_minus1' + elif len(_var) == 3 and _var[1] == 'jahreszeitraum' and _var[2].startswith('minus'): + timeframe = convert_timeframe(_var[1]) # day, week, month, year + timedelta = to_int(_var[2][-1]) # 1 oder 2 oder 3 + if timedelta is None or timeframe is None: + return + + if self.execute_debug: + self.logger.debug(f"_handle_verbrauch: '{db_addon_fct}' function detected. {timeframe=}, {timedelta=}") + + today = datetime.date.today() + year = today.year - timedelta + start_date = datetime.date(year, 1, 1) - relativedelta(days=1) # Start ist Tag vor dem 1.1., damit Abfrage den Maximalwert von 31.12. 00:00:00 bis 1.1. 00:00:00 ergibt + end_date = today - relativedelta(years=timedelta) + start = (today - start_date).days + end = (today - end_date).days + + return consumption_calc(c_start=start, c_end=end) + + # handle all functions of format 'serie_verbrauch_timeframe_countgroup' like 'serie_verbrauch_tag_30d' + elif db_addon_fct.startswith('serie_') and len(_var) == 4: + self.logger.debug(f"_handle_verbrauch serie reached") + func = 'diff_max' + timeframe = convert_timeframe(_var[2]) + start = to_int(_var[3][:-1]) + group = convert_timeframe(_var[3][len(_var[3]) - 1]) + group2 = None + if timeframe is None or start is None or group is None: + self.logger.warning(f"For calculating '{db_addon_fct}' not all mandatory parameters given. {timeframe=}, {start=}, {group=}") + return + + query_params = {'func': func, 'item': database_item, 'timeframe': timeframe, 'start': start, 'end': 0, 'group': group, 'group2': group2, 'ignore_value': ignore_value} + + if self.execute_debug: + self.logger.debug(f"_handle_verbrauch: 'serie_verbrauch_timeframe_countgroup' function detected. {query_params=}") + + return self._query_item(**query_params) + + else: + self.logger.info(f"_handle_verbrauch: No adequate function for {db_addon_fct=} found.") + return + + def _handle_tagesmitteltemperatur(self, database_item: Item, db_addon_fct: str, ignore_value=None, params: dict = None) -> list: + """ + Query database for tagesmitteltemperatur + + :param database_item: item object or item_id for which the query should be done + :param db_addon_fct + :param ignore_value + :param params: + :return: tagesmitteltemperatur + """ + + # handle all on_change functions + if db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: + if self.execute_debug: + self.logger.debug(f"on_change function with 'tagesmitteltemperatur' detected; will be calculated by next change of database item") + return [] + + _var = db_addon_fct.split('_') + group = None + group2 = None + + # handle tagesmitteltemperatur + if db_addon_fct == 'tagesmitteltemperatur': + if not params: + return [] + + func = 'max' + timeframe = convert_timeframe(params.get('timeframe')) + log_text = 'tagesmitteltemperatur' + count = to_int(params.get('count')) + if timeframe is None or not count: + return [] + + start, end = count_to_start(count) + + # handle 'tagesmittelwert_timeframe_timedelta' like 'tagesmittelwert_heute_minus1' + elif len(_var) == 3 and _var[2].startswith('minus'): + func = 'max' + timeframe = convert_timeframe(_var[1]) + start = to_int(_var[2][-1]) + end = start + log_text = 'tagesmittelwert_timeframe_timedelta' + if timeframe is None or start is None: + return [] + + # handle 'serie_tagesmittelwert_countgroup' like 'serie_tagesmittelwert_0d' + elif db_addon_fct.startswith('serie_') and len(_var) == 3: + # 'serie_tagesmittelwert_0d': {'func': 'max', 'timeframe': 'year', 'start': 0, 'end': 0, 'group': 'day'}, + func = 'max' + timeframe = 'year' + log_text = 'serie_tagesmittelwert_countgroup' + start = to_int(_var[2][:-1]) + end = 0 + group = convert_timeframe(_var[2][len(_var[2]) - 1]) + if group is None or start is None: + return [] + + # handle 'serie_tagesmittelwert_group2_count_group' like 'serie_tagesmittelwert_stunde_0d' + elif db_addon_fct.startswith('serie_') and len(_var) == 4: + # 'serie_tagesmittelwert_stunde_0d': {'func': 'avg1', 'timeframe': 'day', 'start': 0, 'end': 0, 'group': 'hour', 'group2': 'day'}, + # 'serie_tagesmittelwert_stunde_30d': {'func': 'avg1', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'hour', 'group2': 'day'}, + func = 'avg1' + timeframe = 'day' + log_text = 'serie_tagesmittelwert_group2_countgroup' + start = to_int(_var[3][:-1]) + end = 0 + group = 'hour' + group2 = convert_timeframe(_var[3][len(_var[3]) - 1]) + if group2 is None or start is None: + return [] + + # handle 'serie_tagesmittelwert_group2_start_endgroup' like 'serie_tagesmittelwert_stunde_30_0d' + elif db_addon_fct.startswith('serie_') and len(_var) == 5: + timeframe = 'day' + method = 'raw' + start = to_int(_var[3]) + end = to_int(_var[4][:-1]) + if start is None or end is None: + return [] + + return self._prepare_temperature_list(database_item=database_item, timeframe=timeframe, start=start, end=end, method=method) + + # handle everything else + else: + self.logger.info(f"_handle_tagesmitteltemperatur: No adequate function for {db_addon_fct=} found.") + return [] + + query_params = {'item': database_item, 'ignore_value': ignore_value, 'func': func, 'timeframe': timeframe, 'start': start, 'end': end, 'group': group, 'group2': group2} + + if self.execute_debug: + self.logger.debug(f"_handle_tagesmitteltemperatur: db_addon_fct={log_text} function detected. {query_params=}") + + return self._query_item(**query_params) + + def _handle_kaeltesumme(self, database_item: Item, year: Union[int, str], month: Union[int, str] = None) -> Union[int, None]: + """ + Query database for kaeltesumme for given year or year/month + https://de.wikipedia.org/wiki/K%C3%A4ltesumme + + :param database_item: item object or item_id for which the query should be done + :param year: year the kaeltesumme should be calculated for + :param month: month the kaeltesumme should be calculated for + :return: kaeltesumme + """ + + self.logger.debug(f"_handle_kaeltesumme called with {database_item=}, {year=}, {month=}") + + # check validity of given year + if not valid_year(year): + self.logger.error(f"_handle_kaeltesumme: Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") + return + + # define year + if year == 'current': + if datetime.date.today() < datetime.date(int(datetime.date.today().year), 9, 21): + year = datetime.date.today().year - 1 + else: + year = datetime.date.today().year + + # define start_date and end_date + if month is None: + start_date = datetime.date(int(year), 9, 21) + end_date = datetime.date(int(year) + 1, 3, 22) + elif valid_month(month): + start_date = datetime.date(int(year), int(month), 1) + end_date = start_date + relativedelta(months=+1) - datetime.timedelta(days=1) + else: + self.logger.error(f"_handle_kaeltesumme: Month for item={database_item.path()} was {month}. This is not a valid month. Query cancelled.") + return + + # define start / end + today = datetime.date.today() + if start_date > today: + self.logger.error(f"_handle_kaeltesumme: Start time for query of item={database_item.path()} is in future. Query cancelled.") + return + + start = (today - start_date).days + end = (today - end_date).days if end_date < today else 0 + if start < end: + self.logger.error(f"_handle_kaeltesumme: End time for query of item={database_item.path()} is before start time. Query cancelled.") + return + + # get raw data as list + self.logger.debug("_handle_kaeltesumme: Try to get raw data") + raw_data = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method='raw') + if self.execute_debug: + self.logger.debug(f"_handle_kaeltesumme: raw_value_list={raw_data=}") + + # calculate value + if raw_data is None: + return + elif isinstance(raw_data, list): + # akkumulieren alle negativen Werte + ks = 0 + for entry in raw_data: + if entry[1] < 0: + ks -= entry[1] + return int(round(ks, 0)) + + def _handle_waermesumme(self, database_item: Item, year: Union[int, str], month: Union[int, str] = None, threshold: int = 0) -> Union[int, None]: + """ + Query database for waermesumme for given year or year/month + https://de.wikipedia.org/wiki/W%C3%A4rmesumme + + :param database_item: item object or item_id for which the query should be done + :param year: year the waermesumme should be calculated for; "current" for current year + :param month: month the waermesumme should be calculated for + :return: waermesumme + """ + + # start: links / älterer Termin end: rechts / jüngerer Termin + + # check validity of given year + if not valid_year(year): + self.logger.error(f"_handle_waermesumme: Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") + return + + # define year + if year == 'current': + year = datetime.date.today().year + + # define start_date, end_date + if month is None: + start_date = datetime.date(int(year), 1, 1) + end_date = datetime.date(int(year), 9, 21) + elif valid_month(month): + start_date = datetime.date(int(year), int(month), 1) + end_date = start_date + relativedelta(months=+1) - datetime.timedelta(days=1) + else: + self.logger.error(f"_handle_waermesumme: Month for item={database_item.path()} was {month}. This is not a valid month. Query cancelled.") + return + + # check start_date + today = datetime.date.today() + if start_date > today: + self.logger.info(f"_handle_waermesumme: Start time for query of item={database_item.path()} is in future. Query cancelled.") + return + + # define start / end + start = (today - start_date).days + end = (today - end_date).days if end_date < today else 0 + + # check end + if start < end: + self.logger.error(f"_handle_waermesumme: End time for query of item={database_item.path()} is before start time. Query cancelled.") + return + + # get raw data as list + raw_data = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method='raw') + if self.execute_debug: + self.logger.debug(f"_handle_waermesumme: raw_value_list={raw_data=}") + + # set threshold to min 0 + threshold = max(0, threshold) + + # calculate value + if raw_data is None: + return + elif isinstance(raw_data, list): + # akkumulieren alle Werte, größer/gleich Schwellenwert + ws = 0 + for entry in raw_data: + if entry[1] >= threshold: + ws += entry[1] + return int(round(ws, 0)) + + def _handle_gruenlandtemperatursumme(self, database_item: Item, year: Union[int, str]) -> Union[int, None]: + """ + Query database for gruenlandtemperatursumme for given year or year/month + https://de.wikipedia.org/wiki/Gr%C3%BCnlandtemperatursumme + + :param database_item: item object for which the query should be done + :param year: year the gruenlandtemperatursumme should be calculated for + :return: gruenlandtemperatursumme + """ + + if not valid_year(year): + self.logger.error(f"_handle_gruenlandtemperatursumme: Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") + return + + # define year + if year == 'current': + year = datetime.date.today().year + + # define start_date, end_date + start_date = datetime.date(int(year), 1, 1) + end_date = datetime.date(int(year), 9, 21) + + # check start_date + today = datetime.date.today() + if start_date > today: + self.logger.info(f"_handle_gruenlandtemperatursumme: Start time for query of item={database_item.path()} is in future. Query cancelled.") + return + + # define start / end + start = (today - start_date).days + end = (today - end_date).days if end_date < today else 0 + + # check end + if start < end: + self.logger.error(f"_handle_gruenlandtemperatursumme: End time for query of item={database_item.path()} is before start time. Query cancelled.") + return + + # get raw data as list + raw_data = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method='raw') + if self.execute_debug: + self.logger.debug(f"_handle_gruenlandtemperatursumme: raw_value_list={raw_data}") + + # calculate value + if raw_data is None: + return + elif isinstance(raw_data, list): + # akkumulieren alle positiven Tagesmitteltemperaturen, im Januar gewichtet mit 50%, im Februar mit 75% + gts = 0 + for entry in raw_data: + timestamp, value = entry + if value > 0: + dt = datetime.datetime.fromtimestamp(timestamp / 1000) + if dt.month == 1: + value = value * 0.5 + elif dt.month == 2: + value = value * 0.75 + gts += value + return int(round(gts, 0)) + + def _handle_wachstumsgradtage(self, database_item: Item, year: Union[int, str], method: int = 0, threshold: int = 10): + """ + Calculate "wachstumsgradtage" for given year with temperature thershold + https://de.wikipedia.org/wiki/Wachstumsgradtag + + :param database_item: item object or item_id for which the query should be done + :param year: year the wachstumsgradtage should be calculated for + :param method: calculation method to be used + :param threshold: temperature in °C as threshold for evaluation + :return: wachstumsgradtage + """ + + if not valid_year(year): + self.logger.error(f"_handle_wachstumsgradtage: Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") + return + + # define year + if year == 'current': + year = datetime.date.today().year + + # define start_date, end_date + start_date = datetime.date(int(year), 1, 1) + end_date = datetime.date(int(year), 9, 21) + + # check start_date + today = datetime.date.today() + if start_date > today: + self.logger.info(f"_handle_wachstumsgradtage: Start time for query of item={database_item.path()} is in future. Query cancelled.") + return + + # define start / end + start = (today - start_date).days + end = (today - end_date).days if end_date < today else 0 + + # check end + if start < end: + self.logger.error(f"_handle_wachstumsgradtage: End time for query of item={database_item.path()} is before start time. Query cancelled.") + return + + # get raw data as list + raw_data = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method='minmax') + if self.execute_debug: + self.logger.debug(f"_handle_wachstumsgradtage: raw_value_list={raw_data}") + + # calculate value + if raw_data is None: + return + + elif isinstance(raw_data, list): + # Die Berechnung des einfachen Durchschnitts // akkumuliere positive Differenz aus Mittelwert aus Tagesminimaltemperatur und Tagesmaximaltemperatur limitiert auf 30°C und Schwellenwert + wgte = 0 + wgte_list = [] + if method == 0 or method == 10: + self.logger.info(f"Caluclate 'Wachstumsgradtag' according to 'Berechnung des einfachen Durchschnitts'.") + for entry in raw_data: + timestamp, min_val, max_val = entry + wgt = (((min_val + min(30, max_val)) / 2) - threshold) + if wgt > 0: + wgte += wgt + wgte_list.append([timestamp, int(round(wgte, 0))]) + if method == 0: + return int(round(wgte, 0)) + else: + return wgte_list + + # Die modifizierte Berechnung des einfachen Durchschnitts. // akkumuliere positive Differenz aus Mittelwert aus Tagesminimaltemperatur mit mind Schwellentemperatur und Tagesmaximaltemperatur limitiert auf 30°C und Schwellenwert + elif method == 1 or method == 11: + self.logger.info(f"Caluclate 'Wachstumsgradtag' according to 'Modifizierte Berechnung des einfachen Durchschnitts'.") + for entry in raw_data: + timestamp, min_val, max_val = entry + wgt = (((max(threshold, min_val) + min(30.0, max_val)) / 2) - threshold) + if wgt > 0: + wgte += wgt + wgte_list.append([timestamp, int(round(wgte, 0))]) + if method == 1: + return int(round(wgte, 0)) + else: + return wgte_list + + # Zähle Tage, bei denen die Tagesmitteltemperatur oberhalb des Schwellenwertes lag + elif method == 2 or method == 12: + self.logger.info(f"Caluclate 'Wachstumsgradtag' according to 'Anzahl der Tage, bei denen die Tagesmitteltemperatur oberhalb des Schwellenwertes lag'.") + for entry in raw_data: + timestamp, min_val, max_val = entry + wgt = (((min_val + min(30, max_val)) / 2) - threshold) + if wgt > 0: + wgte += 1 + wgte_list.append([timestamp, wgte]) + if method == 0: + return wgte + else: + return wgte_list + + else: + self.logger.info(f"Method for 'Wachstumsgradtag' calculation not defined.'") + + def _handle_temperaturserie(self, database_item: Item, year: Union[int, str], method: str = 'raw'): + """ + provide list of lists having timestamp and temperature(s) per day + + :param database_item: item object or item_id for which the query should be done + :param year: year the wachstumsgradtage should be calculated for + :param method: calculation method to be used + :return: list of temperatures + """ + + if not valid_year(year): + self.logger.error(f"_handle_temepraturserie: Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") + return + + # define year + if year == 'current': + year = datetime.date.today().year + + # define start_date, end_date + start_date = datetime.date(int(year), 1, 1) + end_date = datetime.date(int(year), 12, 31) + + # check start_date + today = datetime.date.today() + if start_date > today: + self.logger.info(f"_handle_temepraturserie: Start time for query of item={database_item.path()} is in future. Query cancelled.") + return + + # define start / end + start = (today - start_date).days + end = (today - end_date).days if end_date < today else 0 + + # check end + if start < end: + self.logger.error(f"_handle_temepraturserie: End time for query of item={database_item.path()} is before start time. Query cancelled.") + return + + # check method + if method not in ['hour', 'raw', 'minmax']: + self.logger.error(f"_handle_temepraturserie: Calculation method {method!r} unknown. Need to be 'hour', 'raw' or 'minmax'. Query cancelled.") + return + + # get raw data as list + temp_list = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method=method) + if self.execute_debug: + self.logger.debug(f"_handle_temepraturserie: {temp_list=}") + + return temp_list + + def _prepare_temperature_list(self, database_item: Item, timeframe: str, start: int, end: int = 0, ignore_value=None, method: str = 'hour') -> Union[list, None]: + """ + returns list of lists having timestamp and temperature(s) per day + + :param database_item: item object or item_id for which the query should be done + :param timeframe: timeframe for query + :param start: increments for timeframe from now to start + :param end: increments for timeframe from now to end + :param ignore_value: value to be ignored during query + :param method: Calculation method + :return: list of temperatures + """ + + def _create_temp_dict() -> dict: + """create dict based on database query result like {'date1': {'hour1': [temp values], 'hour2': [temp values], ...}, 'date2': {'hour1': [temp values], 'hour2': [temp values], ...}, ...}""" + + _temp_dict = {} + for _entry in raw_data: + dt = datetime.datetime.utcfromtimestamp(_entry[0] / 1000) + date = dt.strftime('%Y-%m-%d') + hour = dt.strftime('%H') + if date not in _temp_dict: + _temp_dict[date] = {} + if hour not in _temp_dict[date]: + _temp_dict[date][hour] = [] + _temp_dict[date][hour].append(_entry[1]) + return _temp_dict + + def _calculate_hourly_average(): + """ calculate hourly average based on list of temperatures and update temp_dict""" + + for _date in temp_dict: + for hour in temp_dict[_date]: + hour_raw_value_list = temp_dict[_date][hour] + # hour_value = round(sum(hour_raw_value_list) / len(hour_raw_value_list), 1) # Durchschnittsbildung über alle Werte der Liste + hour_value = hour_raw_value_list[0] # Nehme den ersten Wert der Liste als Stundenwert (kommt am nächsten an die Definition, den Wert exakt zur vollen Stunden zu nehmen) + temp_dict[_date][hour] = [hour_value] + + def _create_list_timestamp_avgtemp() -> list: + """Create list of list with [[timestamp1, value1], [timestamp2, value2], ...] based on temp_dict""" + + _temp_list = [] + for _date in temp_dict: + + # wenn mehr als 20 Stundenwerte vorliegen, berechne den Tagesdurchschnitt über alle Werte + if len(temp_dict[_date]) >= 20: + _values = sum(list(temp_dict[_date].values()), []) + _values_avg = round(sum(_values) / len(_values), 1) + + # wenn für 00, 06, 12 und 18 Uhr Werte vorliegen, berechne den Tagesdurchschnitt über diese Werte + elif '00' in temp_dict[_date] and '06' in temp_dict[_date] and '12' in temp_dict[_date] and '18' in temp_dict[_date]: + _values_avg = round((temp_dict[_date]['00'][0] + temp_dict[_date]['06'][0] + temp_dict[_date]['12'][0] + temp_dict[_date]['18'][0]) / 4, 1) + + # sonst berechne den Tagesdurchschnitt über alle Werte + else: + _values = sum(list(temp_dict[_date].values()), []) + _values_avg = round(sum(_values) / len(_values), 1) + + _timestamp = datetime_to_timestamp(datetime.datetime.strptime(_date, '%Y-%m-%d')) + _temp_list.append([_timestamp, _values_avg]) + return _temp_list + + def _create_list_timestamp_minmaxtemp() -> list: + """Create list of list with [[timestamp1, min value1, max_value1], [timestamp2, min value2, max_value2], ...] based on temp_dict""" + + _temp_list = [] + for _date in temp_dict: + _timestamp = datetime_to_timestamp(datetime.datetime.strptime(_date, '%Y-%m-%d')) + _day_values = sum(list(temp_dict[_date].values()), []) + _temp_list.append([_timestamp, min(_day_values), max(_day_values)]) + return _temp_list + + # temp_list = [[timestamp1, avg-value1], [timestamp2, avg-value2], [timestamp3, avg-value3], ...] Tagesmitteltemperatur pro Stunde wird in der Datenbank per avg ermittelt + if method == 'hour': + raw_data = self._query_item(func='avg', item=database_item, timeframe=timeframe, start=start, end=end, group='hour', ignore_value=ignore_value) + self.logger.debug(f"{raw_data=}") + + if raw_data and isinstance(raw_data, list): + if raw_data == [[None, None]]: + return + + # create nested dict with temps + temp_dict = _create_temp_dict() + + # create list of list like database query response + temp_list = _create_list_timestamp_avgtemp() + self.logger.debug(f"{temp_list=}") + return temp_list + + # temp_list = [[timestamp1, avg-value1], [timestamp2, avg-value2], [timestamp3, avg-value3], ...] Tagesmitteltemperatur pro Stunde wird hier im Plugin ermittelt ermittelt + elif method == 'raw': + raw_data = self._query_item(func='raw', item=database_item, timeframe=timeframe, start=start, end=end, ignore_value=ignore_value) + self.logger.debug(f"{raw_data=}") + + if raw_data and isinstance(raw_data, list): + if raw_data == [[None, None]]: + return + + # create nested dict with temps + temp_dict = _create_temp_dict() + self.logger.debug(f"raw: {temp_dict=}") + + # calculate 'tagesdurchschnitt' and create list of list like database query response + _calculate_hourly_average() + self.logger.debug(f"raw: {temp_dict=}") + + # create list of list like database query response + temp_list = _create_list_timestamp_avgtemp() + self.logger.debug(f"{temp_list=}") + return temp_list + + # temp_list = [[timestamp1, min-value1, max-value1], [timestamp2, min-value2, max-value2], [timestamp3, min-value3, max-value3], ...] + elif method == 'minmax': + raw_data = self._query_item(func='raw', item=database_item, timeframe=timeframe, start=start, end=end, ignore_value=ignore_value) + self.logger.debug(f"{raw_data=}") + + if raw_data and isinstance(raw_data, list): + if raw_data == [[None, None]]: + return + + # create nested dict with temps + temp_dict = _create_temp_dict() + self.logger.debug(f"raw: {temp_dict=}") + + # create list of list like database query response + temp_list = _create_list_timestamp_minmaxtemp() + self.logger.debug(f"{temp_list=}") + return temp_list + + def _create_due_items(self) -> list: + """ + Create set of items which are due and resets cache dicts + + :return: set of items, which need to be processed + + """ + + # täglich zu berechnende Items zur Action Liste hinzufügen + _todo_items = set() + _todo_items.update(set(self._daily_items())) + self.current_values[DAY] = {} + self.previous_values[DAY] = {} + + # wenn Wochentag == Montag, werden auch die wöchentlichen Items berechnet + if self.shtime.now().hour == 0 and self.shtime.now().minute == 0 and self.shtime.weekday(self.shtime.today()) == 1: + _todo_items.update(set(self._weekly_items())) + self.current_values[WEEK] = {} + self.previous_values[WEEK] = {} + + # wenn der erste Tage eines Monates ist, werden auch die monatlichen Items berechnet + if self.shtime.now().hour == 0 and self.shtime.now().minute == 0 and self.shtime.now().day == 1: + _todo_items.update(set(self._monthly_items())) + self.current_values[MONTH] = {} + self.previous_values[MONTH] = {} + + # wenn der erste Tage des ersten Monates eines Jahres ist, werden auch die jährlichen Items berechnet + if self.shtime.now().hour == 0 and self.shtime.now().minute == 0 and self.shtime.now().day == 1 and self.shtime.now().month == 1: + _todo_items.update(set(self._yearly_items())) + self.current_values[YEAR] = {} + self.previous_values[YEAR] = {} + + return list(_todo_items) + + def _check_db_existence(self) -> bool: + """ + Check existence of database plugin with given config name + + :return: Status of db existence + """ + + try: + _db_plugin = self.plugins.return_plugin(self.db_configname) + except Exception as e: + self.logger.error(f"Database plugin not loaded, Error was {e}. No need for DatabaseAddOn Plugin.") + return False + else: + if not _db_plugin: + self.logger.error(f"Database plugin not loaded or given ConfigName {self.db_configname} not correct. No need for DatabaseAddOn Plugin.") + return False + else: + self.logger.debug(f"Corresponding plugin 'database' with given config name '{self.db_configname}' found.") + self._db_plugin = _db_plugin + return self._get_db_parameter() + + def _get_db_parameter(self) -> bool: + """ + Get driver of database and connection parameter + + :return: Status of db connection parameters + """ + + try: + self.db_driver = self._db_plugin.get_parameter_value('driver') + except Exception as e: + self.logger.error(f"Error {e} occurred during getting database plugin parameter 'driver'. DatabaseAddOn Plugin not loaded.") + return False + else: + if self.db_driver.lower() == 'pymysql': + self.logger.debug(f"Database is of type 'mysql' found.") + if self.db_driver.lower() == 'sqlite3': + self.logger.debug(f"Database is of type 'sqlite' found.") + + # get database plugin parameters + try: + db_instance = self._db_plugin.get_instance_name() + if db_instance != "": + self.db_instance = db_instance + self.item_attribute_search_str = f"{self.item_attribute_search_str}@{self.db_instance}" + self.connection_data = self._db_plugin.get_parameter_value('connect') # pymsql ['host:localhost', 'user:smarthome', 'passwd:smarthome', 'db:smarthome', 'port:3306'] + self.logger.debug(f"Database Plugin available with instance={self.db_instance} and connection={self.connection_data}") + except Exception as e: + self.logger.error(f"Error {e} occurred during getting database plugin parameters. DatabaseAddOn Plugin not loaded.") + return False + else: + return True + + def _initialize_db(self) -> bool: + """ + Initializes database connection + + :return: Status of initialization + """ + + try: + if not self._db.connected(): + # limit connection requests to 20 seconds. + current_time = time.time() + time_delta_last_connect = current_time - self.last_connect_time + # self.logger.debug(f"DEBUG: delta {time_delta_last_connect}") + if time_delta_last_connect > 20: + self.last_connect_time = time.time() + self._db.connect() + else: + self.logger.error(f"_initialize_db: Database reconnect suppressed: Delta time: {time_delta_last_connect}") + return False + except Exception as e: + self.logger.critical(f"_initialize_db: Database: Initialization failed: {e}") + return False + else: + return True + + def _check_db_connection_setting(self) -> None: + """ + Check Setting of DB connection for stable use. + """ + try: + connect_timeout = int(self._get_db_connect_timeout()[1]) + if connect_timeout < self.default_connect_timeout: + self.logger.warning(f"DB variable 'connect_timeout' should be adjusted for proper working to {self.default_connect_timeout}. Current setting is {connect_timeout}. You need to insert adequate entries into /etc/mysql/my.cnf within section [mysqld].") + except Exception: + pass + + try: + net_read_timeout = int(self._get_db_net_read_timeout()[1]) + if net_read_timeout < self.default_net_read_timeout: + self.logger.warning(f"DB variable 'net_read_timeout' should be adjusted for proper working to {self.default_net_read_timeout}. Current setting is {net_read_timeout}. You need to insert adequate entries into /etc/mysql/my.cnf within section [mysqld].") + except Exception: + pass + + def _get_oldest_log(self, item: Item) -> int: + """ + Get timestamp of the oldest entry of item from cache dict or get value from db and put it to cache dict + + :param item: Item, for which query should be done + :return: timestamp of the oldest log + """ + + _oldest_log = self.item_cache.get(item, {}).get('oldest_log', None) + + if _oldest_log is None: + item_id = self._get_itemid(item) + _oldest_log = self._read_log_oldest(item_id) + if item not in self.item_cache: + self.item_cache[item] = {} + self.item_cache[item]['oldest_log'] = _oldest_log + + if self.prepare_debug: + self.logger.debug(f"_get_oldest_log for item {item.path()} = {_oldest_log}") + + return _oldest_log + + def _get_oldest_value(self, item: Item) -> Union[int, float, bool]: + """ + Get value of the oldest log of item from cache dict or get value from db and put it to cache dict + + :param item: Item, for which query should be done + :return: oldest value + """ + + _oldest_entry = self.item_cache.get(item, {}).get('_oldest_entry', None) + + if _oldest_entry is not None: + _oldest_value = _oldest_entry[0][4] + else: + item_id = self._get_itemid(item) + validity = False + i = 0 + _oldest_value = -999999999 + while validity is False: + oldest_entry = self._read_log_timestamp(item_id, self._get_oldest_log(item)) + i += 1 + if isinstance(oldest_entry, list) and isinstance(oldest_entry[0], tuple) and len(oldest_entry[0]) >= 4: + if item not in self.item_cache: + self.item_cache[item] = {} + self.item_cache[item]['oldest_entry'] = oldest_entry + _oldest_value = oldest_entry[0][4] + validity = True + elif i == 10: + validity = True + self.logger.error(f"oldest_value for item {item.path()} could not be read; value is set to -999999999") + + if self.prepare_debug: + self.logger.debug(f"_get_oldest_value for item {item.path()} = {_oldest_value}") + + return _oldest_value + + def _get_itemid(self, item: Item) -> int: + """ + Returns the ID of the given item from cache dict or request it from database + + :param item: Item to get the ID for + :return: id of the item within the database + """ + + # self.logger.debug(f"_get_itemid called with item={item.path()}") + _item_id = self.item_cache.get(item, {}).get('id', None) + + if _item_id is None: + row = self._read_item_table(item_path=str(item.path())) + if row and len(row) > 0: + _item_id = int(row[0]) + if item not in self.item_cache: + self.item_cache[item] = {} + self.item_cache[item]['id'] = _item_id + + return _item_id + + def _get_itemid_for_query(self, item: Union[Item, str, int]) -> Union[int, None]: + """ + Get DB item id for query + + :param item: item, the query should be done for + + """ + + if isinstance(item, Item): + item_id = self._get_itemid(item) + elif isinstance(item, str) and item.isdigit(): + item_id = int(item) + elif isinstance(item, int): + item_id = item + else: + item_id = None + return item_id + + def _query_item(self, func: str, item: Item, timeframe: str, start: int = None, end: int = 0, group: str = None, group2: str = None, ignore_value=None) -> list: + """ + Do diverse checks of input, and prepare query of log by getting item_id, start / end in timestamp etc. + + :param func: function to be used at query + :param item: item object or item_id for which the query should be done + :param timeframe: time increment für definition of start, end (day, week, month, year) + :param start: start of timeframe (oldest) for query given in x time increments (default = None, meaning complete database) + :param end: end of timeframe (newest) for query given in x time increments (default = 0, meaning end of today, end of last week, end of last month, end of last year) + :param group: first grouping parameter (default = None, possible values: day, week, month, year) + :param group2: second grouping parameter (default = None, possible values: day, week, month, year) + :param ignore_value: value of val_num, which will be ignored during query + + :return: query response / list for value pairs [[None, None]] for errors, [[0,0]] for + """ + + def _handle_query_result(query_result) -> list: + """ + Handle query result containing list + """ + + # if query delivers None, abort + if query_result is None: + # if query delivers None, abort + self.logger.error(f"Error occurred during _query_item. Aborting...") + _result = [[None, None]] + elif len(query_result) == 0: + _result = [[0, 0]] + self.logger.info(f" No values for item in requested timeframe in database found.") + else: + _result = [] + for element in query_result: + timestamp = element[0] + value = element[1] + if timestamp and value is not None: + _result.append([timestamp, round(value, 1)]) + if not _result: + _result = [[None, None]] + + return _result + + if self.prepare_debug: + self.logger.debug(f"_query_item called with {func=}, item={item.path()}, {timeframe=}, {start=}, {end=}, {group=}, {group2=}, {ignore_value=}") + + # set default result + result = [[None, None]] + + # check correctness of timeframe + if timeframe not in ALLOWED_QUERY_TIMEFRAMES: + self.logger.error(f"_query_item: Requested {timeframe=} for item={item.path()} not defined; Need to be 'year' or 'month' or 'week' or 'day' or 'hour''. Query cancelled.") + return result + + # check start / end for being int + if isinstance(start, str) and start.isdigit(): + start = int(start) + if isinstance(end, str) and end.isdigit(): + end = int(end) + if not isinstance(start, int) and not isinstance(end, int): + return result + + # check correctness of start / end + if start < end: + self.logger.warning(f"_query_item: Requested {start=} for item={item.path()} is not valid since {start=} < {end=}. Query cancelled.") + return result + + # define item_id + item_id = self._get_itemid(item) + if not item_id: + self.logger.error(f"_query_item: ItemId for item={item.path()} not found. Query cancelled.") + return result + + # define start and end of query as timestamp in microseconds + ts_start, ts_end = get_start_end_as_timestamp(timeframe, start, end) + oldest_log = int(self._get_oldest_log(item)) + + if start is None: + ts_start = oldest_log + + if self.prepare_debug: + self.logger.debug(f"_query_item: Requested {timeframe=} with {start=} and {end=} resulted in start being timestamp={ts_start} / {timestamp_to_timestring(ts_start)} and end being timestamp={ts_end} / {timestamp_to_timestring(ts_end)}") + + # check if values for end time and start time are in database + if ts_end < oldest_log: # (Abfrage abbrechen, wenn Endzeitpunkt in UNIX-timestamp der Abfrage kleiner (und damit jünger) ist, als der UNIX-timestamp des ältesten Eintrages) + self.logger.info(f"_query_item: Requested end time timestamp={ts_end} / {timestamp_to_timestring(ts_end)} of query for Item='{item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Query cancelled.") + return result + + if ts_start < oldest_log: + if not self.use_oldest_entry: + self.logger.info(f"_query_item: Requested start time timestamp={ts_start} / {timestamp_to_timestring(ts_start)} of query for Item='{item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Query cancelled.") + return result + else: + self.logger.info(f"_query_item: Requested start time timestamp={ts_start} / {timestamp_to_timestring(ts_start)} of query for Item='{item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Oldest available entry will be used.") + ts_start = oldest_log + + query_params = {'func': func, 'item_id': item_id, 'ts_start': ts_start, 'ts_end': ts_end, 'group': group, 'group2': group2, 'ignore_value': ignore_value} + result = _handle_query_result(self._query_log_timestamp(**query_params)) + + if self.prepare_debug: + self.logger.debug(f"_query_item: value for item={item.path()} with {timeframe=}, {func=}: {result}") + + return result + + def _init_cache_dicts(self) -> None: + """ + init all cache dicts + """ + + self.logger.info(f"All cache_dicts will be initiated.") + + self.item_cache = {} + + self.current_values = { + DAY: {}, + WEEK: {}, + MONTH: {}, + YEAR: {} + } + + self.previous_values = { + DAY: {}, + WEEK: {}, + MONTH: {}, + YEAR: {} + } + + def _clear_queue(self) -> None: + """ + Clear working queue + """ + + self.logger.info(f"Working queue will be cleared. Calculation run will end.") + self.item_queue.queue.clear() + + def _work_item_queue_thread_startup(self): + """ + Start a thread to work item queue + """ + + try: + _name = 'plugins.' + self.get_fullname() + '.work_item_queue' + self.work_item_queue_thread = threading.Thread(target=self.work_item_queue, name=_name) + self.work_item_queue_thread.daemon = False + self.work_item_queue_thread.start() + self.logger.debug("Thread for 'work_item_queue_thread' has been started") + except threading.ThreadError: + self.logger.error("Unable to launch thread for 'work_item_queue_thread'.") + self.work_item_queue_thread = None + + def _work_item_queue_thread_shutdown(self): + """ + Shut down the thread to work item queue + """ + + if self.work_item_queue_thread: + self.work_item_queue_thread.join() + if self.work_item_queue_thread.is_alive(): + self.logger.error("Unable to shut down 'work_item_queue_thread' thread") + else: + self.logger.info("Thread 'work_item_queue_thread' has been terminated.") + self.work_item_queue_thread = None + + ############################## + # Database Query Preparation + ############################## + + def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: int, group: str = None, group2: str = None, ignore_value=None) -> Union[list, None]: + """ + Assemble a mysql query str and param dict based on given parameters, get query response and return it + + :param func: function to be used at query + :param item_id: database item_id for which the query should be done + :param ts_start: start for query given in timestamp in microseconds + :param ts_end: end for query given in timestamp in microseconds + :param group: first grouping parameter (default = None, possible values: day, week, month, year) + :param group2: second grouping parameter (default = None, possible values: day, week, month, year) + :param ignore_value: value of val_num, which will be ignored during query + + :return: query response + + """ + + # do debug log + if self.prepare_debug: + self.logger.debug(f"_query_log_timestamp: Called with {func=}, {item_id=}, {ts_start=}, {ts_end=}, {group=}, {group2=}, {ignore_value=}") + + # define query parts + _select = { + 'avg': 'time, ROUND(AVG(val_num * duration) / AVG(duration), 1) as value ', + 'avg1': 'time, ROUND(AVG(value), 1) as value FROM (SELECT time, ROUND(AVG(val_num), 1) as value ', + 'min': 'time, ROUND(MIN(val_num), 1) as value ', + 'max': 'time, ROUND(MAX(val_num), 1) as value ', + 'max1': 'time, ROUND(MAX(value), 1) as value FROM (SELECT time, ROUND(MAX(val_num), 1) as value ', + 'sum': 'time, ROUND(SUM(val_num), 1) as value ', + 'on': 'time, ROUND(SUM(val_bool * duration) / SUM(duration), 1) as value ', + 'integrate': 'time, ROUND(SUM(val_num * duration),1) as value ', + 'sum_max': 'time, ROUND(SUM(value), 1) as value FROM (SELECT time, ROUND(MAX(val_num), 1) as value ', + 'sum_avg': 'time, ROUND(SUM(value), 1) as value FROM (SELECT time, ROUND(AVG(val_num * duration) / AVG(duration), 1) as value ', + 'sum_min_neg': 'time, ROUND(SUM(value), 1) as value FROM (SELECT time, IF(min(val_num) < 0, ROUND(MIN(val_num), 1), 0) as value ', + 'diff_max': 'time, value1 - LAG(value1) OVER (ORDER BY time) AS value FROM (SELECT time, ROUND(MAX(val_num), 1) as value1 ', + 'next': 'time, val_num as value ', + 'raw': 'time, val_num as value ' + } + + _table_alias = { + 'avg': '', + 'avg1': ') AS table1 ', + 'min': '', + 'max': '', + 'max1': ') AS table1 ', + 'sum': '', + 'on': '', + 'integrate': '', + 'sum_max': ') AS table1 ', + 'sum_avg': ') AS table1 ', + 'sum_min_neg': ') AS table1 ', + 'diff_max': ') AS table1 ', + 'next': '', + 'raw': '', + } + + _order = "time DESC LIMIT 1 " if func == "next" else "time ASC " + + _where = "item_id = :item_id AND time < :ts_start" if func == "next" else "item_id = :item_id AND time BETWEEN :ts_start AND :ts_end " + + _db_table = 'log ' + + _group_by_sql = { + "year": "GROUP BY YEAR(FROM_UNIXTIME(time/1000)) ", + "month": "GROUP BY FROM_UNIXTIME((time/1000),'%Y%m') ", + "week": "GROUP BY YEARWEEK(FROM_UNIXTIME(time/1000), 5) ", + "day": "GROUP BY DATE(FROM_UNIXTIME(time/1000)) ", + "hour": "GROUP BY FROM_UNIXTIME((time/1000),'%Y%m%d%H') ", + None: '' + } + + _group_by_sqlite = { + "year": "GROUP BY strftime('%Y', date((time/1000),'unixepoch')) ", + "month": "GROUP BY strftime('%Y%m', date((time/1000),'unixepoch')) ", + "week": "GROUP BY strftime('%Y%W', date((time/1000),'unixepoch')) ", + "day": "GROUP BY date((time/1000),'unixepoch') ", + "hour": "GROUP BY strftime('%Y%m%d%H', datetime((time/1000),'unixepoch')) ", + None: '' + } + + # select query parts depending in db driver + if self.db_driver.lower() == 'pymysql': + _group_by = _group_by_sql + elif self.db_driver.lower() == 'sqlite3': + _group_by = _group_by_sqlite + else: + self.logger.error('DB Driver unknown') + return + + # check correctness of func + if func not in _select: + self.logger.error(f"_query_log_timestamp: Requested {func=} for {item_id=} not defined. Query cancelled.") + return + + # check correctness of group and group2 + if group not in _group_by: + self.logger.error(f"_query_log_timestamp: Requested {group=} for item={item_id=} not defined. Query cancelled.") + return + if group2 not in _group_by: + self.logger.error(f"_query_log_timestamp: Requested {group2=} for item={item_id=} not defined. Query cancelled.") + return + + # handle ignore values + if func in ['min', 'max', 'max1', 'sum_max', 'sum_avg', 'sum_min_neg', 'diff_max']: # extend _where statement for excluding boolean values == 0 for defined functions + _where = f'{_where}AND val_bool = 1 ' + if ignore_value: # if value to be ignored are defined, extend _where statement + _where = f'{_where}AND val_num != {ignore_value} ' + + # set params + params = {'item_id': item_id, 'ts_start': ts_start} + if func != "next": + params.update({'ts_end': ts_end}) + + # assemble query + query = f"SELECT {_select[func]}FROM {_db_table}WHERE {_where}{_group_by[group]}ORDER BY {_order}{_table_alias[func]}{_group_by[group2]}".strip() + + if self.db_driver.lower() == 'sqlite3': + query = query.replace('IF', 'IIF') + + # do debug log + if self.prepare_debug: + self.logger.debug(f"_query_log_timestamp: {query=}, {params=}") + + # request database and return result + return self._fetchall(query, params) + + def _read_log_all(self, item_id: int): + """ + Read the oldest log record for given item + + :param item_id: item_id to read the record for + :return: Log record for item_id + """ + + if self.prepare_debug: + self.logger.debug(f"_read_log_all: Called for {item_id=}") + + query = "SELECT * FROM log WHERE (item_id = :item_id) AND (time = None OR 1 = 1)" + params = {'item_id': item_id} + result = self._fetchall(query, params) + return result + + def _read_log_oldest(self, item_id: int, cur=None) -> int: + """ + Read the oldest log record for given database ID + + :param item_id: Database ID of item to read the record for + :type item_id: int + :param cur: A database cursor object if available (optional) + + :return: Log record for the database ID + """ + + params = {'item_id': item_id} + query = "SELECT min(time) FROM log WHERE item_id = :item_id;" + return self._fetchall(query, params, cur=cur)[0][0] + + def _read_log_timestamp(self, item_id: int, timestamp: int, cur=None) -> Union[list, None]: + """ + Read database log record for given database ID + + :param item_id: Database ID of item to read the record for + :type item_id: int + :param timestamp: timestamp for the given value + :type timestamp: int + :param cur: A database cursor object if available (optional) + + :return: Log record for the database ID at given timestamp + """ + + params = {'item_id': item_id, 'timestamp': timestamp} + query = "SELECT * FROM log WHERE item_id = :item_id AND time = :timestamp;" + return self._fetchall(query, params, cur=cur) + + def _read_item_table(self, item_id: int = None, item_path: str = None): + """ + Read item table + + :param item_id: unique ID for item within database + :param item_path: item_path for Item within the database + + :return: Data for the selected item + :rtype: tuple + """ + + columns_entries = ('id', 'name', 'time', 'val_str', 'val_num', 'val_bool', 'changed') + columns = ", ".join(columns_entries) + + if item_id is None and item_path is None: + return + + if item_id: + query = f"SELECT {columns} FROM item WHERE id = {item_id}" + else: + query = f"SELECT {columns} FROM item WHERE name = '{item_path}'" + + return self._fetchone(query) + + def _get_db_version(self) -> str: + """ + Query the database version and provide result + """ + + query = 'SELECT sqlite_version()' if self.db_driver.lower() == 'sqlite3' else 'SELECT VERSION()' + return self._fetchone(query)[0] + + def _get_db_connect_timeout(self) -> list: + """ + Query database timeout + """ + + query = "SHOW GLOBAL VARIABLES LIKE 'connect_timeout'" + return self._fetchone(query) + + def _get_db_net_read_timeout(self) -> list: + """ + Query database timeout net_read_timeout + """ + + query = "SHOW GLOBAL VARIABLES LIKE 'net_read_timeout'" + return self._fetchone(query) + + ############################## + # Database Queries + ############################## + + def _execute(self, query: str, params: dict = None, cur=None) -> list: + if params is None: + params = {} + + return self._query(self._db.execute, query, params, cur) + + def _fetchone(self, query: str, params: dict = None, cur=None) -> list: + if params is None: + params = {} + + return self._query(self._db.fetchone, query, params, cur) + + def _fetchall(self, query: str, params: dict = None, cur=None) -> list: + if params is None: + params = {} + + return self._query(self._db.fetchall, query, params, cur) + + def _query(self, fetch, query: str, params: dict = None, cur=None) -> Union[None, list]: + if params is None: + params = {} + + if self.sql_debug: + self.logger.debug(f"_query: Called with {query=}, {params=}, {cur=}") + + if not self._initialize_db(): + return None + + if cur is None: + if self._db.verify(5) == 0: + self.logger.error("_query: Connection to database not recovered.") + return None + # if not self._db.lock(300): + # self.logger.error("_query: Can't query due to fail to acquire lock.") + # return None + + query_readable = re.sub(r':([a-z_]+)', r'{\1}', query).format(**params) + + try: + tuples = fetch(query, params, cur=cur) + except Exception as e: + self.logger.error(f"_query: Error for query '{query_readable}': {e}") + else: + if self.sql_debug: + self.logger.debug(f"_query: Result of '{query_readable}': {tuples}") + return tuples + # finally: + # if cur is None: + # self._db.release() + + +############################## +# Helper functions +############################## + + +def params_to_dict(string: str) -> Union[dict, None]: + """ + Parse a string with named arguments and comma separation to dict; (e.g. string = 'year=2022, month=12') + """ + + try: + res_dict = dict((a.strip(), b.strip()) for a, b in (element.split('=') for element in string.split(', '))) + except Exception: + return None + else: + # convert to int and remove possible double quotes + for key in res_dict: + if isinstance(res_dict[key], str): + res_dict[key] = res_dict[key].replace('"', '') + res_dict[key] = res_dict[key].replace("'", "") + if res_dict[key].isdigit(): + res_dict[key] = int(float(res_dict[key])) + + # check correctness if known key values (func=str, item, timeframe=str, start=int, end=int, count=int, group=str, group2=str, year=int, month=int): + for key in res_dict: + if key in ('func', 'timeframe', 'group', 'group2') and not isinstance(res_dict[key], str): + return None + elif key in ('start', 'end', 'count') and not isinstance(res_dict[key], int): + return None + elif key in 'year': + if not valid_year(res_dict[key]): + return None + elif key in 'month': + if not valid_month(res_dict[key]): + return None + return res_dict + + +def valid_year(year: Union[int, str]) -> bool: + """ + Check if given year is digit and within allowed range + """ + + if ((isinstance(year, int) or (isinstance(year, str) and year.isdigit())) and ( + 1980 <= int(year) <= datetime.date.today().year)) or (isinstance(year, str) and year == 'current'): + return True + else: + return False + + +def valid_month(month: Union[int, str]) -> bool: + """ + Check if given month is digit and within allowed range + """ + + if (isinstance(month, int) or (isinstance(month, str) and month.isdigit())) and (1 <= int(month) <= 12): + return True + else: + return False + + +def timestamp_to_timestring(timestamp: int) -> str: + """ + Parse timestamp from db query to string representing date and time + """ + + return datetime.datetime.utcfromtimestamp(timestamp / 1000).strftime('%Y-%m-%d %H:%M:%S') + + +def convert_timeframe(timeframe: str) -> str: + """ + Convert timeframe + + """ + + convertion = { + 'tag': 'day', + 'heute': 'day', + 'woche': 'week', + 'monat': 'month', + 'jahr': 'year', + 'vorjahreszeitraum': 'day', + 'jahreszeitraum': 'day', + 'h': 'hour', + 'd': 'day', + 'w': 'week', + 'm': 'month', + 'y': 'year' + } + + return convertion.get(timeframe) + + +def convert_duration(timeframe: str, window_dur: str) -> int: + """ + Convert duration + + """ + + _d_in_y = 365 + _d_in_w = 7 + _m_in_y = 12 + _w_in_y = _d_in_y / _d_in_w + _w_in_m = _w_in_y / _m_in_y + _d_in_m = _d_in_y / _m_in_y + + conversion = { + 'day': {'day': 1, + 'week': _d_in_w, + 'month': _d_in_m, + 'year': _d_in_y, + }, + 'week': {'day': 1 / _d_in_w, + 'week': 1, + 'month': _w_in_m, + 'year': _w_in_y + }, + 'month': {'day': 1 / _d_in_m, + 'week': 1 / _w_in_m, + 'month': 1, + 'year': _m_in_y + }, + 'year': {'day': 1 / _d_in_y, + 'week': 1 / _w_in_y, + 'month': 1 / _m_in_y, + 'year': 1 + } + } + + return round(int(conversion[timeframe][window_dur]), 0) + + +def count_to_start(count: int = 0, end: int = 0): + """ + Converts given count and end ot start and end + """ + + return end + count, end + + +def get_start_end_as_timestamp(timeframe: str, start: int, end: int) -> tuple: + """ + Provides start and end as timestamp in microseconds from timeframe with start and end + + :param timeframe: timeframe as week, month, year + :param start: beginning timeframe in x timeframes from now + :param end: end of timeframe in x timeframes from now + + :return: start time in timestamp in microseconds, end time in timestamp in microseconds + + """ + + return datetime_to_timestamp(get_start(timeframe, start)) * 1000, datetime_to_timestamp(get_end(timeframe, end)) * 1000 + + +def get_start(timeframe: str, start: int) -> datetime: + """ + Provides start as datetime + + :param timeframe: timeframe as week, month, year + :param start: beginning timeframe in x timeframes from now + + """ + + if start is None: + start = 0 + + if timeframe == 'week': + _dt_start = week_beginning(start) + elif timeframe == 'month': + _dt_start = month_beginning(start) + elif timeframe == 'year': + _dt_start = year_beginning(start) + else: + _dt_start = day_beginning(start) + + return _dt_start + + +def get_end(timeframe: str, end: int) -> datetime: + """ + Provides end as datetime + + :param timeframe: timeframe as week, month, year + :param end: end of timeframe in x timeframes from now + + """ + + if timeframe == 'week': + _dt_end = week_end(end) + elif timeframe == 'month': + _dt_end = month_end(end) + elif timeframe == 'year': + _dt_end = year_end(end) + else: + _dt_end = day_end(end) + + return _dt_end + + +def year_beginning(delta: int = 0) -> datetime: + """ + provides datetime of beginning of year of today minus x years + """ + + _dt = datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) + return _dt.replace(month=1, day=1) - relativedelta(years=delta) + + +def year_end(delta: int = 0) -> datetime: + """ + provides datetime of end of year of today minus x years + """ + + return year_beginning(delta) + relativedelta(years=1) + + +def month_beginning(delta: int = 0) -> datetime: + """ + provides datetime of beginning of month minus x month + """ + + _dt = datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) + return _dt.replace(day=1) - relativedelta(months=delta) + + +def month_end(delta: int = 0) -> datetime: + """ + provides datetime of end of month minus x month + """ + + return month_beginning(delta) + relativedelta(months=1) + + +def week_beginning(delta: int = 0) -> datetime: + """ + provides datetime of beginning of week minus x weeks + """ + + _dt = datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) + return _dt - relativedelta(days=(datetime.date.today().weekday() + (delta * 7))) + + +def week_end(delta: int = 0) -> datetime: + """ + provides datetime of end of week minus x weeks + """ + + return week_beginning(delta) + relativedelta(days=6) + + +def day_beginning(delta: int = 0) -> datetime: + """ + provides datetime of beginning of today minus x days + """ + + return datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) - relativedelta(days=delta) + + +def day_end(delta: int = 0) -> datetime: + """ + provides datetime of end of today minus x days + """ + + return day_beginning(delta) + relativedelta(days=1) + + +def datetime_to_timestamp(dt: datetime) -> int: + """ + Provides timestamp from given datetime + """ + + return int(dt.replace(tzinfo=datetime.timezone.utc).timestamp()) + + +def to_int(arg) -> Union[int, None]: + try: + return int(arg) + except (ValueError, TypeError): + return None + + +ALLOWED_QUERY_TIMEFRAMES = ['year', 'month', 'week', 'day', 'hour'] +ALLOWED_MINMAX_FUNCS = ['min', 'max', 'avg'] +ALL_ONCHANGE_ATTRIBUTES = ['verbrauch_heute', 'verbrauch_woche', 'verbrauch_monat', 'verbrauch_jahr', 'minmax_heute_min', 'minmax_heute_max', 'minmax_woche_min', 'minmax_woche_max', 'minmax_monat_min', 'minmax_monat_max', 'minmax_jahr_min', 'minmax_jahr_max', 'tagesmitteltemperatur_heute'] +ALL_DAILY_ATTRIBUTES = ['verbrauch_heute_minus1', 'verbrauch_heute_minus2', 'verbrauch_heute_minus3', 'verbrauch_heute_minus4', 'verbrauch_heute_minus5', 'verbrauch_heute_minus6', 'verbrauch_heute_minus7', 'verbrauch_rolling_12m_heute_minus1', 'verbrauch_jahreszeitraum_minus1', 'verbrauch_jahreszeitraum_minus2', 'verbrauch_jahreszeitraum_minus3', 'zaehlerstand_heute_minus1', 'zaehlerstand_heute_minus2', 'zaehlerstand_heute_minus3', 'minmax_last_24h_min', 'minmax_last_24h_max', 'minmax_last_24h_avg', 'minmax_last_7d_min', 'minmax_last_7d_max', 'minmax_last_7d_avg', 'minmax_heute_minus1_min', 'minmax_heute_minus1_max', 'minmax_heute_minus1_avg', 'minmax_heute_minus2_min', 'minmax_heute_minus2_max', 'minmax_heute_minus2_avg', 'minmax_heute_minus3_min', 'minmax_heute_minus3_max', 'minmax_heute_minus3_avg', 'tagesmitteltemperatur_heute_minus1', 'tagesmitteltemperatur_heute_minus2', 'tagesmitteltemperatur_heute_minus3', 'serie_minmax_tag_min_30d', 'serie_minmax_tag_max_30d', 'serie_minmax_tag_avg_30d', 'serie_verbrauch_tag_30d', 'serie_zaehlerstand_tag_30d', 'serie_tagesmittelwert_stunde_0d', 'serie_tagesmittelwert_tag_stunde_30d', 'kaeltesumme', 'waermesumme', 'gruenlandtempsumme', 'tagesmitteltemperatur', 'wachstumsgradtage'] +ALL_WEEKLY_ATTRIBUTES = ['verbrauch_woche_minus1', 'verbrauch_woche_minus2', 'verbrauch_woche_minus3', 'verbrauch_woche_minus4', 'verbrauch_rolling_12m_woche_minus1', 'zaehlerstand_woche_minus1', 'zaehlerstand_woche_minus2', 'zaehlerstand_woche_minus3', 'minmax_woche_minus1_min', 'minmax_woche_minus1_max', 'minmax_woche_minus1_avg', 'minmax_woche_minus2_min', 'minmax_woche_minus2_max', 'minmax_woche_minus2_avg', 'serie_minmax_woche_min_30w', 'serie_minmax_woche_max_30w', 'serie_minmax_woche_avg_30w', 'serie_verbrauch_woche_30w', 'serie_zaehlerstand_woche_30w'] +ALL_MONTHLY_ATTRIBUTES = ['verbrauch_monat_minus1', 'verbrauch_monat_minus2', 'verbrauch_monat_minus3', 'verbrauch_monat_minus4', 'verbrauch_monat_minus12', 'verbrauch_rolling_12m_monat_minus1', 'zaehlerstand_monat_minus1', 'zaehlerstand_monat_minus2', 'zaehlerstand_monat_minus3', 'minmax_monat_minus1_min', 'minmax_monat_minus1_max', 'minmax_monat_minus1_avg', 'minmax_monat_minus2_min', 'minmax_monat_minus2_max', 'minmax_monat_minus2_avg', 'serie_minmax_monat_min_15m', 'serie_minmax_monat_max_15m', 'serie_minmax_monat_avg_15m', 'serie_verbrauch_monat_18m', 'serie_zaehlerstand_monat_18m', 'serie_waermesumme_monat_24m', 'serie_kaeltesumme_monat_24m'] +ALL_YEARLY_ATTRIBUTES = ['verbrauch_jahr_minus1', 'verbrauch_jahr_minus2', 'verbrauch_rolling_12m_jahr_minus1', 'zaehlerstand_jahr_minus1', 'zaehlerstand_jahr_minus2', 'zaehlerstand_jahr_minus3', 'minmax_jahr_minus1_min', 'minmax_jahr_minus1_max', 'minmax_jahr_minus1_avg'] +ALL_NEED_PARAMS_ATTRIBUTES = ['kaeltesumme', 'waermesumme', 'gruenlandtempsumme', 'tagesmitteltemperatur', 'wachstumsgradtage', 'db_request'] +ALL_VERBRAUCH_ATTRIBUTES = ['verbrauch_heute', 'verbrauch_woche', 'verbrauch_monat', 'verbrauch_jahr', 'verbrauch_heute_minus1', 'verbrauch_heute_minus2', 'verbrauch_heute_minus3', 'verbrauch_heute_minus4', 'verbrauch_heute_minus5', 'verbrauch_heute_minus6', 'verbrauch_heute_minus7', 'verbrauch_woche_minus1', 'verbrauch_woche_minus2', 'verbrauch_woche_minus3', 'verbrauch_woche_minus4', 'verbrauch_monat_minus1', 'verbrauch_monat_minus2', 'verbrauch_monat_minus3', 'verbrauch_monat_minus4', 'verbrauch_monat_minus12', 'verbrauch_jahr_minus1', 'verbrauch_jahr_minus2', 'verbrauch_rolling_12m_heute_minus1', 'verbrauch_rolling_12m_woche_minus1', 'verbrauch_rolling_12m_monat_minus1', 'verbrauch_rolling_12m_jahr_minus1', 'verbrauch_jahreszeitraum_minus1', 'verbrauch_jahreszeitraum_minus2', 'verbrauch_jahreszeitraum_minus3'] +ALL_ZAEHLERSTAND_ATTRIBUTES = ['zaehlerstand_heute_minus1', 'zaehlerstand_heute_minus2', 'zaehlerstand_heute_minus3', 'zaehlerstand_woche_minus1', 'zaehlerstand_woche_minus2', 'zaehlerstand_woche_minus3', 'zaehlerstand_monat_minus1', 'zaehlerstand_monat_minus2', 'zaehlerstand_monat_minus3', 'zaehlerstand_jahr_minus1', 'zaehlerstand_jahr_minus2', 'zaehlerstand_jahr_minus3'] +ALL_HISTORIE_ATTRIBUTES = ['minmax_last_24h_min', 'minmax_last_24h_max', 'minmax_last_24h_avg', 'minmax_last_7d_min', 'minmax_last_7d_max', 'minmax_last_7d_avg', 'minmax_heute_min', 'minmax_heute_max', 'minmax_heute_minus1_min', 'minmax_heute_minus1_max', 'minmax_heute_minus1_avg', 'minmax_heute_minus2_min', 'minmax_heute_minus2_max', 'minmax_heute_minus2_avg', 'minmax_heute_minus3_min', 'minmax_heute_minus3_max', 'minmax_heute_minus3_avg', 'minmax_woche_min', 'minmax_woche_max', 'minmax_woche_minus1_min', 'minmax_woche_minus1_max', 'minmax_woche_minus1_avg', 'minmax_woche_minus2_min', 'minmax_woche_minus2_max', 'minmax_woche_minus2_avg', 'minmax_monat_min', 'minmax_monat_max', 'minmax_monat_minus1_min', 'minmax_monat_minus1_max', 'minmax_monat_minus1_avg', 'minmax_monat_minus2_min', 'minmax_monat_minus2_max', 'minmax_monat_minus2_avg', 'minmax_jahr_min', 'minmax_jahr_max', 'minmax_jahr_minus1_min', 'minmax_jahr_minus1_max', 'minmax_jahr_minus1_avg'] +ALL_TAGESMITTEL_ATTRIBUTES = ['tagesmitteltemperatur_heute', 'tagesmitteltemperatur_heute_minus1', 'tagesmitteltemperatur_heute_minus2', 'tagesmitteltemperatur_heute_minus3'] +ALL_SERIE_ATTRIBUTES = ['serie_minmax_monat_min_15m', 'serie_minmax_monat_max_15m', 'serie_minmax_monat_avg_15m', 'serie_minmax_woche_min_30w', 'serie_minmax_woche_max_30w', 'serie_minmax_woche_avg_30w', 'serie_minmax_tag_min_30d', 'serie_minmax_tag_max_30d', 'serie_minmax_tag_avg_30d', 'serie_verbrauch_tag_30d', 'serie_verbrauch_woche_30w', 'serie_verbrauch_monat_18m', 'serie_zaehlerstand_tag_30d', 'serie_zaehlerstand_woche_30w', 'serie_zaehlerstand_monat_18m', 'serie_waermesumme_monat_24m', 'serie_kaeltesumme_monat_24m', 'serie_tagesmittelwert_stunde_0d', 'serie_tagesmittelwert_tag_stunde_30d'] +ALL_GEN_ATTRIBUTES = ['general_oldest_value', 'general_oldest_log'] +ALL_COMPLEX_ATTRIBUTES = ['kaeltesumme', 'waermesumme', 'gruenlandtempsumme', 'tagesmitteltemperatur', 'wachstumsgradtage', 'db_request'] + + +""" + 'serie_minmax_monat_min_15m': {'func': 'min', 'timeframe': 'month', 'start': 15, 'end': 0, 'group': 'month'}, + 'serie_minmax_monat_max_15m': {'func': 'max', 'timeframe': 'month', 'start': 15, 'end': 0, 'group': 'month'}, + 'serie_minmax_monat_avg_15m': {'func': 'avg', 'timeframe': 'month', 'start': 15, 'end': 0, 'group': 'month'}, + 'serie_minmax_woche_min_30w': {'func': 'min', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, + 'serie_minmax_woche_max_30w': {'func': 'max', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, + 'serie_minmax_woche_avg_30w': {'func': 'avg', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, + 'serie_minmax_tag_min_30d': {'func': 'min', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, + 'serie_minmax_tag_max_30d': {'func': 'max', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, + 'serie_minmax_tag_avg_30d': {'func': 'avg', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, + 'serie_verbrauch_tag_30d': {'func': 'diff_max', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, + 'serie_verbrauch_woche_30w': {'func': 'diff_max', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, + 'serie_verbrauch_monat_18m': {'func': 'diff_max', 'timeframe': 'month', 'start': 18, 'end': 0, 'group': 'month'}, + 'serie_zaehlerstand_tag_30d': {'func': 'max', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, + 'serie_zaehlerstand_woche_30w': {'func': 'max', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, + 'serie_zaehlerstand_monat_18m': {'func': 'max', 'timeframe': 'month', 'start': 18, 'end': 0, 'group': 'month'}, + 'serie_waermesumme_monat_24m': {'func': 'sum_max', 'timeframe': 'month', 'start': 24, 'end': 0, 'group': 'day', 'group2': 'month'}, + 'serie_kaeltesumme_monat_24m': {'func': 'sum_min_neg', 'timeframe': 'month', 'start': 24, 'end': 0, 'group': 'day', 'group2': 'month'}, + 'serie_tagesmittelwert_0d': {'func': 'max', 'timeframe': 'year', 'start': 0, 'end': 0, 'group': 'day'}, + 'serie_tagesmittelwert_stunde_0d': {'func': 'avg1', 'timeframe': 'day', 'start': 0, 'end': 0, 'group': 'hour', 'group2': 'day'}, + 'serie_tagesmittelwert_stunde_30d': {'func': 'avg1', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'hour', 'group2': 'day'}, + 'gts': {'func': 'max', 'timeframe': 'year', 'start': None, 'end': None, 'group': 'day'}, +""" diff --git a/db_addon/plugin.yaml b/db_addon/plugin.yaml index 5e2761fa9..85d8d3c5f 100644 --- a/db_addon/plugin.yaml +++ b/db_addon/plugin.yaml @@ -1,1093 +1,1093 @@ -# Metadata for the plugin -plugin: - # Global plugin attributes - type: system # plugin type (gateway, interface, protocol, system, web) - description: - de: 'Add-On für das database Plugin zur Datenauswertung' - en: 'Add-On for the database plugin for data evaluation' - maintainer: sisamiwe - tester: bmx, onkelandy # Who tests this plugin? - state: ready # change to ready when done with development -# keywords: iot xyz -# documentation: https://github.com/smarthomeNG/smarthome/wiki/CLI-Plugin # url of documentation (wiki) page - support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1848494-support-thread-databaseaddon-plugin - version: 1.1.0 # Plugin version (must match the version specified in __init__.py) - sh_minversion: 1.9.3.5 # minimum shNG version to use this plugin -# sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) - py_minversion: 3.8 # minimum Python version to use for this plugin -# py_maxversion: # maximum Python version to use for this plugin (leave empty if latest) - multi_instance: false # plugin supports multi instance - restartable: unknown - classname: DatabaseAddOn # class containing the plugin - -parameters: - database_plugin_config: - type: str - default: 'database' - description: - de: "Konfiguration des Plugin 'Database', für die das Plugin 'DatabaseAddOn' verwendet wird" - en: "Config of Plugin 'Database, for which the Plugin 'DatabaseAddOn' should be active" - - startup_run_delay: - type: int - default: 60 - description: - de: 'Zeitlicher Abstand in Sekunden, mit der die Berechnungen bei Startup ausgeführt werden sollen' - en: 'Delay in seconds, after which the startup calculations will be run' - - ignore_0: - type: list - default: [] - description: - de: "Bei Items, bei denen ein String aus der Liste im Pfadnamen vorkommt, werden 0-Werte (val_num = 0) bei Datenbankauswertungen ignoriert. - Beispieleintrag: temp | hum" - en: "At items having a entry of that list in path, val_num=0 will be ignored for database queries. - Example: temp | hum" - - use_oldest_entry: - type: bool - default: False - description: - de: "True: Verwendung des ältesten Eintrags des Items in der Datenbank, falls der Start des Abfragezeitraums zeitlich vor diesem Eintrag liegt - False: Abbruch der Datenbankabfrage" - en: "True: Use of oldest entry of item in database, if start of query is prior to oldest entry - False: Cancel query" - -item_attributes: - db_addon_fct: - type: str - description: - de: 'Auswertefunktion des DB-Addon Plugins' - en: 'Evaluation Function of DB-Addon Plugins' - valid_list: - - 'verbrauch_heute' - - 'verbrauch_woche' - - 'verbrauch_monat' - - 'verbrauch_jahr' - - 'verbrauch_heute_minus1' - - 'verbrauch_heute_minus2' - - 'verbrauch_heute_minus3' - - 'verbrauch_heute_minus4' - - 'verbrauch_heute_minus5' - - 'verbrauch_heute_minus6' - - 'verbrauch_heute_minus7' - - 'verbrauch_woche_minus1' - - 'verbrauch_woche_minus2' - - 'verbrauch_woche_minus3' - - 'verbrauch_woche_minus4' - - 'verbrauch_monat_minus1' - - 'verbrauch_monat_minus2' - - 'verbrauch_monat_minus3' - - 'verbrauch_monat_minus4' - - 'verbrauch_monat_minus12' - - 'verbrauch_jahr_minus1' - - 'verbrauch_jahr_minus2' - - 'verbrauch_rolling_12m_heute_minus1' - - 'verbrauch_rolling_12m_woche_minus1' - - 'verbrauch_rolling_12m_monat_minus1' - - 'verbrauch_rolling_12m_jahr_minus1' - - 'verbrauch_jahreszeitraum_minus1' - - 'verbrauch_jahreszeitraum_minus2' - - 'verbrauch_jahreszeitraum_minus3' - - 'zaehlerstand_heute_minus1' - - 'zaehlerstand_heute_minus2' - - 'zaehlerstand_heute_minus3' - - 'zaehlerstand_woche_minus1' - - 'zaehlerstand_woche_minus2' - - 'zaehlerstand_woche_minus3' - - 'zaehlerstand_monat_minus1' - - 'zaehlerstand_monat_minus2' - - 'zaehlerstand_monat_minus3' - - 'zaehlerstand_jahr_minus1' - - 'zaehlerstand_jahr_minus2' - - 'zaehlerstand_jahr_minus3' - - 'minmax_last_24h_min' - - 'minmax_last_24h_max' - - 'minmax_last_24h_avg' - - 'minmax_last_7d_min' - - 'minmax_last_7d_max' - - 'minmax_last_7d_avg' - - 'minmax_heute_min' - - 'minmax_heute_max' - - 'minmax_heute_minus1_min' - - 'minmax_heute_minus1_max' - - 'minmax_heute_minus1_avg' - - 'minmax_heute_minus2_min' - - 'minmax_heute_minus2_max' - - 'minmax_heute_minus2_avg' - - 'minmax_heute_minus3_min' - - 'minmax_heute_minus3_max' - - 'minmax_heute_minus3_avg' - - 'minmax_woche_min' - - 'minmax_woche_max' - - 'minmax_woche_minus1_min' - - 'minmax_woche_minus1_max' - - 'minmax_woche_minus1_avg' - - 'minmax_woche_minus2_min' - - 'minmax_woche_minus2_max' - - 'minmax_woche_minus2_avg' - - 'minmax_monat_min' - - 'minmax_monat_max' - - 'minmax_monat_minus1_min' - - 'minmax_monat_minus1_max' - - 'minmax_monat_minus1_avg' - - 'minmax_monat_minus2_min' - - 'minmax_monat_minus2_max' - - 'minmax_monat_minus2_avg' - - 'minmax_jahr_min' - - 'minmax_jahr_max' - - 'minmax_jahr_minus1_min' - - 'minmax_jahr_minus1_max' - - 'minmax_jahr_minus1_avg' - - 'tagesmitteltemperatur_heute' - - 'tagesmitteltemperatur_heute_minus1' - - 'tagesmitteltemperatur_heute_minus2' - - 'tagesmitteltemperatur_heute_minus3' - - 'serie_minmax_monat_min_15m' - - 'serie_minmax_monat_max_15m' - - 'serie_minmax_monat_avg_15m' - - 'serie_minmax_woche_min_30w' - - 'serie_minmax_woche_max_30w' - - 'serie_minmax_woche_avg_30w' - - 'serie_minmax_tag_min_30d' - - 'serie_minmax_tag_max_30d' - - 'serie_minmax_tag_avg_30d' - - 'serie_verbrauch_tag_30d' - - 'serie_verbrauch_woche_30w' - - 'serie_verbrauch_monat_18m' - - 'serie_zaehlerstand_tag_30d' - - 'serie_zaehlerstand_woche_30w' - - 'serie_zaehlerstand_monat_18m' - - 'serie_waermesumme_monat_24m' - - 'serie_kaeltesumme_monat_24m' - - 'serie_tagesmittelwert_stunde_0d' - - 'serie_tagesmittelwert_tag_stunde_30d' - - 'general_oldest_value' - - 'general_oldest_log' - - 'kaeltesumme' - - 'waermesumme' - - 'gruenlandtempsumme' - - 'tagesmitteltemperatur' - - 'wachstumsgradtage' - - 'db_request' - valid_list_description: - - 'Verbrauch am heutigen Tag (Differenz zwischen aktuellem Wert und den Wert am Ende des vorherigen Tages)' - - 'Verbrauch in der aktuellen Woche' - - 'Verbrauch im aktuellen Monat' - - 'Verbrauch im aktuellen Jahr' - - 'Verbrauch gestern (heute -1 Tag) (Differenz zwischen Wert am Ende des gestrigen Tages und dem Wert am Ende des Tages danach)' - - 'Verbrauch vorgestern (heute -2 Tage)' - - 'Verbrauch heute -3 Tage' - - 'Verbrauch heute -4 Tage' - - 'Verbrauch heute -5 Tage' - - 'Verbrauch heute -6 Tage' - - 'Verbrauch heute -7 Tage' - - 'Verbrauch Vorwoche (aktuelle Woche -1)' - - 'Verbrauch aktuelle Woche -2 Wochen' - - 'Verbrauch aktuelle Woche -3 Wochen' - - 'Verbrauch aktuelle Woche -4 Wochen' - - 'Verbrauch Vormonat (aktueller Monat -1)' - - 'Verbrauch aktueller Monat -2 Monate' - - 'Verbrauch aktueller Monat -3 Monate' - - 'Verbrauch aktueller Monat -4 Monate' - - 'Verbrauch aktueller Monat -12 Monate' - - 'Verbrauch Vorjahr (aktuelles Jahr -1 Jahr)' - - 'Verbrauch aktuelles Jahr -2 Jahre' - - 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Tages' - - 'Verbrauch der letzten 12 Monate ausgehend im Ende der letzten Woche' - - 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Monats' - - 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Jahres' - - 'Verbrauch seit dem 1.1. bis zum heutigen Tag des Vorjahres' - - 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 2 Jahren' - - 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 3 Jahren' - - 'Zählerstand / Wert am Ende des letzten Tages (heute -1 Tag)' - - 'Zählerstand / Wert am Ende des vorletzten Tages (heute -2 Tag)' - - 'Zählerstand / Wert am Ende des vorvorletzten Tages (heute -3 Tag)' - - 'Zählerstand / Wert am Ende der vorvorletzten Woche (aktuelle Woche -1 Woche)' - - 'Zählerstand / Wert am Ende der vorletzten Woche (aktuelle Woche -2 Wochen)' - - 'Zählerstand / Wert am Ende der aktuellen Woche -3 Wochen' - - 'Zählerstand / Wert am Ende des letzten Monates (aktueller Monat -1 Monat)' - - 'Zählerstand / Wert am Ende des vorletzten Monates (aktueller Monat -2 Monate)' - - 'Zählerstand / Wert am Ende des aktuellen Monats -3 Monate' - - 'Zählerstand / Wert am Ende des letzten Jahres (aktuelles Jahr -1 Jahr)' - - 'Zählerstand / Wert am Ende des vorletzten Jahres (aktuelles Jahr -2 Jahre)' - - 'Zählerstand / Wert am Ende des aktuellen Jahres -3 Jahre' - - 'minimaler Wert der letzten 24h' - - 'maximaler Wert der letzten 24h' - - 'durchschnittlicher Wert der letzten 24h' - - 'minimaler Wert der letzten 7 Tage' - - 'maximaler Wert der letzten 7 Tage' - - 'durchschnittlicher Wert der letzten 7 Tage' - - 'Minimalwert seit Tagesbeginn' - - 'Maximalwert seit Tagesbeginn' - - 'Minimalwert gestern (heute -1 Tag)' - - 'Maximalwert gestern (heute -1 Tag)' - - 'Durchschnittswert gestern (heute -1 Tag)' - - 'Minimalwert vorgestern (heute -2 Tage)' - - 'Maximalwert vorgestern (heute -2 Tage)' - - 'Durchschnittswert vorgestern (heute -2 Tage)' - - 'Minimalwert heute vor 3 Tagen' - - 'Maximalwert heute vor 3 Tagen' - - 'Durchschnittswert heute vor 3 Tagen' - - 'Minimalwert seit Wochenbeginn' - - 'Maximalwert seit Wochenbeginn' - - 'Minimalwert Vorwoche (aktuelle Woche -1)' - - 'Maximalwert Vorwoche (aktuelle Woche -1)' - - 'Durchschnittswert Vorwoche (aktuelle Woche -1)' - - 'Minimalwert aktuelle Woche -2 Wochen' - - 'Maximalwert aktuelle Woche -2 Wochen' - - 'Durchschnittswert aktuelle Woche -2 Wochen' - - 'Minimalwert seit Monatsbeginn' - - 'Maximalwert seit Monatsbeginn' - - 'Minimalwert Vormonat (aktueller Monat -1)' - - 'Maximalwert Vormonat (aktueller Monat -1)' - - 'Durchschnittswert Vormonat (aktueller Monat -1)' - - 'Minimalwert aktueller Monat -2 Monate' - - 'Maximalwert aktueller Monat -2 Monate' - - 'Durchschnittswert aktueller Monat -2 Monate' - - 'Minimalwert seit Jahresbeginn' - - 'Maximalwert seit Jahresbeginn' - - 'Minimalwert Vorjahr (aktuelles Jahr -1 Jahr)' - - 'Maximalwert Vorjahr (aktuelles Jahr -1 Jahr)' - - 'Durchschnittswert Vorjahr (aktuelles Jahr -1 Jahr)' - - 'Tagesmitteltemperatur heute' - - 'Tagesmitteltemperatur des letzten Tages (heute -1 Tag)' - - 'Tagesmitteltemperatur des vorletzten Tages (heute -2 Tag)' - - 'Tagesmitteltemperatur des vorvorletzten Tages (heute -3 Tag)' - - 'monatlicher Minimalwert der letzten 15 Monate (gleitend)' - - 'monatlicher Maximalwert der letzten 15 Monate (gleitend)' - - 'monatlicher Mittelwert der letzten 15 Monate (gleitend)' - - 'wöchentlicher Minimalwert der letzten 30 Wochen (gleitend)' - - 'wöchentlicher Maximalwert der letzten 30 Wochen (gleitend)' - - 'wöchentlicher Mittelwert der letzten 30 Wochen (gleitend)' - - 'täglicher Minimalwert der letzten 30 Tage (gleitend)' - - 'täglicher Maximalwert der letzten 30 Tage (gleitend)' - - 'täglicher Mittelwert der letzten 30 Tage (gleitend)' - - 'Verbrauch pro Tag der letzten 30 Tage' - - 'Verbrauch pro Woche der letzten 30 Wochen' - - 'Verbrauch pro Monat der letzten 18 Monate' - - 'Zählerstand am Tagesende der letzten 30 Tage' - - 'Zählerstand am Wochenende der letzten 30 Wochen' - - 'Zählerstand am Monatsende der letzten 18 Monate' - - 'monatliche Wärmesumme der letzten 24 Monate' - - 'monatliche Kältesumme der letzten 24 Monate' - - 'Stundenmittelwert für den aktuellen Tag' - - 'Stundenmittelwert pro Tag der letzten 30 Tage (bspw. zur Berechnung der Tagesmitteltemperatur basierend auf den Mittelwert der Temperatur pro Stunde' - - 'Ausgabe des ältesten Wertes des entsprechenden "Parent-Items" mit database Attribut' - - 'Ausgabe des Timestamp des ältesten Eintrages des entsprechenden "Parent-Items" mit database Attribut' - - 'Berechnet die Kältesumme für einen Zeitraum, db_addon_params: (year=mandatory: int, month=optional: str)' - - 'Berechnet die Wärmesumme für einen Zeitraum, db_addon_params: (year=mandatory: int, month=optional: str, threshold=optional: int)' - - 'Berechnet die Grünlandtemperatursumme für einen Zeitraum, db_addon_params: (year=mandatory)' - - 'Berechnet die Tagesmitteltemperatur auf Basis der stündlichen Durchschnittswerte eines Tages für die angegebene Anzahl von Tagen (timeframe=day, count=integer)' - - 'Berechnet die Wachstumsgradtage auf Basis der stündlichen Durchschnittswerte eines Tages für das laufende Jahr mit an Angabe des Temperaturschwellenwertes (year=Jahr: int, method=0/1: int, threshold=Schwellentemperatur: int)' - - 'Abfrage der DB: db_addon_params: (func=mandatory, item=mandatory, timespan=mandatory, start=optional, end=optional, count=optional, group=optional, group2=optional)' - valid_list_item_type: - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'num ' - - 'list' - - 'num' - - 'num' - - 'num' - - 'list' - - 'num' - - 'list' - valid_list_calculation: - - 'onchange' - - 'onchange' - - 'onchange' - - 'onchange' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'weekly' - - 'weekly' - - 'weekly' - - 'weekly' - - 'monthly' - - 'monthly' - - 'monthly' - - 'monthly' - - 'monthly' - - 'yearly' - - 'yearly' - - 'daily' - - 'weekly' - - 'monthly' - - 'yearly' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'weekly' - - 'weekly' - - 'weekly' - - 'monthly' - - 'monthly' - - 'monthly' - - 'yearly' - - 'yearly' - - 'yearly' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'onchange' - - 'onchange' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'onchange' - - 'onchange' - - 'weekly' - - 'weekly' - - 'weekly' - - 'weekly' - - 'weekly' - - 'weekly' - - 'onchange' - - 'onchange' - - 'monthly' - - 'monthly' - - 'monthly' - - 'monthly' - - 'monthly' - - 'monthly' - - 'onchange' - - 'onchange' - - 'yearly' - - 'yearly' - - 'yearly' - - 'onchange' - - 'daily' - - 'daily' - - 'daily' - - 'monthly' - - 'monthly' - - 'monthly' - - 'weekly' - - 'weekly' - - 'weekly' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'weekly' - - 'monthly' - - 'daily' - - 'weekly' - - 'monthly' - - 'monthly' - - 'monthly' - - 'daily' - - 'daily' - - 'False' - - 'False' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'group' - - db_addon_info: - type: str - description: - de: 'Info-Funktion des DB-Addon Plugins' - en: 'Info-Function of DB-Addon Plugins' - valid_list: - - 'db_version' - valid_list_description: - - 'Version der verbundenen Datenbank' - valid_list_item_type: - - 'str' - - db_addon_admin: - type: str - description: - de: 'Admin-Funktion des DB-Addon Plugins' - en: 'Admin-Function of DB-Addon Plugins' - valid_list: - - 'suspend' - - 'recalc_all' - - 'clean_cache_values' - valid_list_description: - - 'unterbricht die Aktivitäten des Plugin -> bool' - - 'Startet einen Neuberechnungslauf aller on-demand items -> bool' - - 'Löscht Plugin-Cache und damit alle im Plugin zwischengespeicherten Werte -> bool' - valid_list_item_type: - - 'bool' - - 'bool' - - 'bool' - - db_addon_params: - type: str - description: - de: "Parameter für eine Auswertefunktion des DB-Addon Plugins im Format 'kwargs' enclosed in quotes like 'keyword=argument, keyword=argument'" - en: "Parameters of a DB-Addon Plugin evaluation function. Need to have format of 'kwargs' enclosed in quotes like 'keyword=argument, keyword=argument'" - - db_addon_startup: - type: bool - description: - de: 'Ausführen der Berechnung bei Plugin Start (mit zeitlichem Abstand, wie in den Plugin Parametern definiert)' - en: 'Run function in startup of plugin (with delay, set in plugin parameters)' - - db_addon_ignore_value: - type: num - description: - de: 'Wert der bei Abfrage bzw. Auswertung der Datenbank für diese Item ignoriert werden soll' - en: 'Value which will be ignored at database query' - - db_addon_database_item: - type: str - description: - de: 'Optional: Pfad des zu verwendenden Items mit Database Attribut' - en: 'Optional: Path of item with database attribut to be used' - -item_structs: - verbrauch_1: - name: Struct für Verbrauchsauswertung bei Zählern mit stetig ansteigendem Zählerstand (Teil 1) - verbrauch_heute: - name: Verbrauch heute - db_addon_fct: verbrauch_heute - type: num - visu_acl: ro - # cache: yes - - verbrauch_woche: - name: Verbrauch seit Wochenbeginn - db_addon_fct: verbrauch_woche - type: num - visu_acl: ro - # cache: yes - - verbrauch_monat: - name: Verbrauch seit Monatsbeginn - db_addon_fct: verbrauch_monat - type: num - visu_acl: ro - # cache: yes - - verbrauch_jahr: - name: Verbrauch seit Jahresbeginn - db_addon_fct: verbrauch_jahr - type: num - visu_acl: ro - # cache: yes - - verbrauch_rolling_12m: - name: Verbrauch innerhalb der letzten 12 Monate ausgehend von gestern - db_addon_fct: verbrauch_rolling_12m_heute_minus1 - type: num - visu_acl: ro - # cache: yes - - verbrauch_gestern: - name: Verbrauch gestern - db_addon_fct: verbrauch_heute_minus1 - db_addon_startup: yes - type: num - visu_acl: ro - # cache: yes - - verbrauch_gestern_minus1: - name: Verbrauch vorgestern - db_addon_fct: verbrauch_heute_minus2 - db_addon_startup: yes - type: num - visu_acl: ro - # cache: yes - - verbrauch_gestern_minus2: - name: Verbrauch vor 3 Tagen - db_addon_fct: verbrauch_heute_minus3 - db_addon_startup: yes - type: num - visu_acl: ro - # cache: yes - - verbrauch_vorwoche: - name: Verbrauch in der Vorwoche - db_addon_fct: verbrauch_woche_minus1 - db_addon_startup: yes - type: num - visu_acl: ro - # cache: yes - - verbrauch_vorwoche_minus1: - name: Verbrauch vor 2 Wochen - db_addon_fct: verbrauch_woche_minus2 - db_addon_startup: yes - type: num - visu_acl: ro - # cache: yes - - verbrauch_vormonat: - name: Verbrauch im Vormonat - db_addon_fct: verbrauch_monat_minus1 - db_addon_startup: yes - type: num - visu_acl: ro - # cache: yes - - verbrauch_vormonat_minus12: - name: Verbrauch vor 12 Monaten - db_addon_fct: verbrauch_monat_minus12 - db_addon_startup: yes - type: num - visu_acl: ro - # cache: yes - - verbrauch_vorjahreszeitraum: - name: Verbrauch im Jahreszeitraum 1.1. bis jetzt vor einem Jahr - db_addon_fct: verbrauch_jahreszeitraum_minus1 - db_addon_startup: yes - type: num - visu_acl: ro - # cache: yes - - verbrauch_2: - name: Struct für Verbrauchsauswertung bei Zählern mit stetig ansteigendem Zählerstand (Teil 2) - verbrauch_gestern_minus3: - name: Verbrauch vor 3 Tagen - db_addon_fct: verbrauch_heute_minus3 - type: num - visu_acl: ro - # cache: yes - - verbrauch_gestern_minus4: - name: Verbrauch vor 4 Tagen - db_addon_fct: verbrauch_heute_minus4 - type: num - visu_acl: ro - # cache: yes - - verbrauch_gestern_minus5: - name: Verbrauch vor 5 Tagen - db_addon_fct: verbrauch_heute_minus5 - type: num - visu_acl: ro - # cache: yes - - verbrauch_gestern_minus6: - name: Verbrauch vor 6 Tagen - db_addon_fct: verbrauch_heute_minus6 - type: num - visu_acl: ro - # cache: yes - - verbrauch_gestern_minus7: - name: Verbrauch vor 7 Tagen - db_addon_fct: verbrauch_heute_minus7 - type: num - visu_acl: ro - # cache: yes - - verbrauch_vorwoche_minus2: - name: Verbrauch vor 3 Wochen - db_addon_fct: verbrauch_woche_minus3 - type: num - visu_acl: ro - # cache: yes - - verbrauch_vorwoche_minus3: - name: Verbrauch vor 4 Wochen - db_addon_fct: verbrauch_woche_minus4 - type: num - visu_acl: ro - # cache: yes - - verbrauch_vormonat_minus1: - name: Verbrauch vor 2 Monaten - db_addon_fct: verbrauch_monat_minus2 - type: num - visu_acl: ro - # cache: yes - - verbrauch_vormonat_minus2: - name: Verbrauch vor 3 Monaten - db_addon_fct: verbrauch_monat_minus3 - type: num - visu_acl: ro - # cache: yes - - verbrauch_vormonat_minus3: - name: Verbrauch vor 4 Monaten - db_addon_fct: verbrauch_monat_minus4 - type: num - visu_acl: ro - # cache: yes - - zaehlerstand_1: - name: Struct für die Erfassung von Zählerständen zu bestimmten Zeitpunkten bei Zählern mit stetig ansteigendem Zählerstand - zaehlerstand_gestern: - name: Zählerstand zum Ende des gestrigen Tages - db_addon_fct: zaehlerstand_heute_minus1 - type: num - visu_acl: ro - # cache: yes - - zaehlerstand_vorwoche: - name: Zählerstand zum Ende der vorigen Woche - db_addon_fct: zaehlerstand_woche_minus1 - db_addon_startup: yes - type: num - visu_acl: ro - # cache: yes - - zaehlerstand_vormonat: - name: Zählerstand zum Ende des Vormonates - db_addon_fct: zaehlerstand_monat_minus1 - db_addon_startup: yes - type: num - visu_acl: ro - # cache: yes - - zaehlerstand_vormonat_minus1: - name: Zählerstand zum Monatsende vor 2 Monaten - db_addon_fct: zaehlerstand_monat_minus2 - db_addon_startup: yes - type: num - visu_acl: ro - # cache: yes - - zaehlerstand_vormonat_minus2: - name: Zählerstand zum Monatsende vor 3 Monaten - db_addon_fct: zaehlerstand_monat_minus3 - db_addon_startup: yes - type: num - visu_acl: ro - # cache: yes - - zaehlerstand_vorjahr: - name: Zählerstand am Ende des vorigen Jahres - db_addon_fct: zaehlerstand_jahr_minus1 - db_addon_startup: yes - type: num - visu_acl: ro - # cache: yes - - minmax_1: - name: Struct für Auswertung der Wertehistorie bei schwankenden Werten wie bspw. Temperatur oder Leistung (Teil 1) - - heute_min: - name: Minimaler Wert seit Tagesbeginn - db_addon_fct: minmax_heute_min - db_addon_ignore_value: 0 - type: num - # cache: yes - - heute_max: - name: Maximaler Wert seit Tagesbeginn - db_addon_fct: minmax_heute_max - type: num - # cache: yes - - last24h_min: - name: Minimaler Wert in den letzten 24h (gleitend) - db_addon_fct: minmax_last_24h_min - type: num - # cache: yes - - last24h_max: - name: Maximaler Wert in den letzten 24h (gleitend) - db_addon_fct: minmax_last_24h_max - type: num - # cache: yes - - woche_min: - name: Minimaler Wert seit Wochenbeginn - db_addon_fct: minmax_woche_min - type: num - # cache: yes - - woche_max: - name: Maximaler Wert seit Wochenbeginn - db_addon_fct: minmax_woche_max - type: num - # cache: yes - - monat_min: - name: Minimaler Wert seit Monatsbeginn - db_addon_fct: minmax_monat_min - type: num - # cache: yes - - monat_max: - name: Maximaler Wert seit Monatsbeginn - db_addon_fct: minmax_monat_max - type: num - # cache: yes - - jahr_min: - name: Minimaler Wert seit Jahresbeginn - db_addon_fct: minmax_jahr_min - type: num - # cache: yes - - jahr_max: - name: Maximaler Wert seit Jahresbeginn - db_addon_fct: minmax_jahr_max - type: num - # cache: yes - - gestern_min: - name: Minimaler Wert gestern - db_addon_fct: minmax_heute_minus1_min - db_addon_startup: yes - type: num - # cache: yes - - gestern_max: - name: Maximaler Wert gestern - db_addon_fct: minmax_heute_minus1_max - db_addon_startup: yes - type: num - # cache: yes - - gestern_avg: - name: Durchschnittlicher Wert gestern - db_addon_fct: minmax_heute_minus1_avg - db_addon_startup: yes - type: num - # cache: yes - - vorwoche_min: - name: Minimaler Wert in der Vorwoche - db_addon_fct: minmax_woche_minus1_min - db_addon_startup: yes - type: num - # cache: yes - - vorwoche_max: - name: Maximaler Wert in der Vorwoche - db_addon_fct: minmax_woche_minus1_max - db_addon_startup: yes - type: num - # cache: yes - - vorwoche_avg: - name: Durchschnittlicher Wert in der Vorwoche - db_addon_fct: minmax_woche_minus1_avg - db_addon_startup: yes - type: num - # cache: yes - - vormonat_min: - name: Minimaler Wert im Vormonat - db_addon_fct: minmax_monat_minus1_min - db_addon_startup: yes - type: num - # cache: yes - - vormonat_max: - name: Maximaler Wert im Vormonat - db_addon_fct: minmax_monat_minus1_max - db_addon_startup: yes - type: num - # cache: yes - - vormonat_avg: - name: Durchschnittlicher Wert im Vormonat - db_addon_fct: minmax_monat_minus1_avg - db_addon_startup: yes - type: num - # cache: yes - - vorjahr_min: - name: Minimaler Wert im Vorjahr - db_addon_fct: minmax_jahr_minus1_min - db_addon_startup: yes - type: num - # cache: yes - - vorjahr_max: - name: Maximaler Wert im Vorjahr - db_addon_fct: minmax_jahr_minus1_max - db_addon_startup: yes - type: num - # cache: yes - - minmax_2: - name: Struct für Auswertung der Wertehistorie bei schwankenden Werten wie bspw. Temperatur oder Leistung (Teil 2) - - gestern_minus1_min: - name: Minimaler Wert vorgestern - db_addon_fct: minmax_heute_minus2_min - type: num - # cache: yes - - gestern_minus1_max: - name: Maximaler Wert vorgestern - db_addon_fct: minmax_heute_minus2_max - type: num - # cache: yes - - gestern_minus1_avg: - name: Durchschnittlicher Wert vorgestern - db_addon_fct: minmax_heute_minus2_avg - type: num - # cache: yes - - gestern_minus2_min: - name: Minimaler Wert vor 3 Tagen - db_addon_fct: minmax_heute_minus3_min - type: num - # cache: yes - - gestern_minus2_max: - name: Maximaler Wert vor 3 Tagen - db_addon_fct: minmax_heute_minus3_max - type: num - # cache: yes - - gestern_minus2_avg: - name: Durchschnittlicher Wert vor 3 Tagen - db_addon_fct: minmax_heute_minus3_avg - type: num - # cache: yes - - vorwoche_minus1_min: - name: Minimaler Wert in der Woche vor 2 Wochen - db_addon_fct: minmax_woche_minus2_min - type: num - # cache: yes - - vorwoche_minus1_max: - name: Maximaler Wert in der Woche vor 2 Wochen - db_addon_fct: minmax_woche_minus2_max - type: num - # cache: yes - - vorwoche_minus1_avg: - name: Durchschnittlicher Wert in der Woche vor 2 Wochen - db_addon_fct: minmax_woche_minus2_avg - type: num - # cache: yes - - vormonat_minus1_min: - name: Minimaler Wert im Monat vor 2 Monaten - db_addon_fct: minmax_monat_minus2_min - type: num - # cache: yes - - vormonat_minus1_max: - name: Maximaler Wert im Monat vor 2 Monaten - db_addon_fct: minmax_monat_minus2_max - type: num - # cache: yes - - vormonat_minus1_avg: - name: Durchschnittlicher Wert im Monat vor 2 Monaten - db_addon_fct: minmax_monat_minus2_avg - type: num - # cache: yes - -item_attribute_prefixes: NONE - -plugin_functions: - fetch_log: - type: list - description: - de: 'Liefert für das angegebene Item und die Parameter das Abfrageergebnis zurück' - en: 'Return the database request result for the given item and parameters' - # mit dieser Funktion ist es möglich, eine Liste der "func" Werte pro "group" / "group2" eines "item" von "start""timespan" bis "end""timespan" oder von "start""timespan" bis "count" ausgegeben zu lassen - # bspw. minimale Tagestemperatur vom Item "outdoor.temp" der letzten 10 Tage startend von gestern davor --> func=min, item=outdoor.temp, timespan=day, start=1, count=10, group=day - # bspw. maximal Tagestemperatur vom Item "outdoor.temp" von jetzt bis 2 Monate davor --> func=max, item=outdoor.temp, timeframe=month, start=0, end=2, group=day - parameters: - func: - type: str - description: - de: "zu verwendende Abfragefunktion" - en: "database function to be used" - mandatory: True - valid_list: - - min # Minimalwerte - - max # Maximalwerte - - sum # Summe - - on - - integrate - - sum_max - - sum_avg - - sum_min_neg - - diff_max - item: - type: foo - description: - de: "Das Item-Objekt oder die Item_ID der DB" - en: "An item object" - mandatory: True - timeframe: - type: str - description: - de: "Zeitinkrement für die DB-Abfrage" - en: "time increment for db-request" - mandatory: True - valid_list: - - day - - week - - month - - year - start: - type: int - description: - de: "Zeitlicher Beginn der DB-Abfrage: x Zeitinkrementen von jetzt in die Vergangenheit" - en: "start point in time for db-request; x time increments from now into the past" - end: - type: int - description: - de: "Zeitliches Ende der DB-Abfrage: x Zeitinkrementen von jetzt in die Vergangenheit" - en: "end point in time for db-request; x time increments from now into the past" - count: - type: int - description: - de: "Anzahl der Zeitinkremente, vom Start in die Vergangenheit abzufragen sind. Alternative zu 'end'" - en: "number of time increments from start point in time into the past. can be used alternativly to 'end'" - group: - type: str - description: - de: "erste Gruppierung der DB-Abfrage" - en: "first grouping for the db-request" - valid_list: - - day - - week - - month - - year - group2: - type: str - description: - de: "zweite Gruppierung der DB-Abfrage" - en: "second grouping for the db-request" - valid_list: - - day - - week - - month - - year - - db_version: - type: str - description: - de: 'Liefer die verwendete Version der Datenbank' - en: 'Return the database version' - - suspend: - type: bool - description: - de: 'Pausiert die Berechnungen des Plugins' - en: 'Suspends value evaluation of plugin' - -logic_parameters: NONE +# Metadata for the plugin +plugin: + # Global plugin attributes + type: system # plugin type (gateway, interface, protocol, system, web) + description: + de: 'Add-On für das database Plugin zur Datenauswertung' + en: 'Add-On for the database plugin for data evaluation' + maintainer: sisamiwe + tester: bmx, onkelandy # Who tests this plugin? + state: ready # change to ready when done with development +# keywords: iot xyz +# documentation: https://github.com/smarthomeNG/smarthome/wiki/CLI-Plugin # url of documentation (wiki) page + support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1848494-support-thread-databaseaddon-plugin + version: 1.1.0 # Plugin version (must match the version specified in __init__.py) + sh_minversion: 1.9.3.5 # minimum shNG version to use this plugin +# sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) + py_minversion: 3.8 # minimum Python version to use for this plugin +# py_maxversion: # maximum Python version to use for this plugin (leave empty if latest) + multi_instance: false # plugin supports multi instance + restartable: unknown + classname: DatabaseAddOn # class containing the plugin + +parameters: + database_plugin_config: + type: str + default: 'database' + description: + de: "Konfiguration des Plugin 'Database', für die das Plugin 'DatabaseAddOn' verwendet wird" + en: "Config of Plugin 'Database, for which the Plugin 'DatabaseAddOn' should be active" + + startup_run_delay: + type: int + default: 60 + description: + de: 'Zeitlicher Abstand in Sekunden, mit der die Berechnungen bei Startup ausgeführt werden sollen' + en: 'Delay in seconds, after which the startup calculations will be run' + + ignore_0: + type: list + default: [] + description: + de: "Bei Items, bei denen ein String aus der Liste im Pfadnamen vorkommt, werden 0-Werte (val_num = 0) bei Datenbankauswertungen ignoriert. + Beispieleintrag: temp | hum" + en: "At items having a entry of that list in path, val_num=0 will be ignored for database queries. + Example: temp | hum" + + use_oldest_entry: + type: bool + default: False + description: + de: "True: Verwendung des ältesten Eintrags des Items in der Datenbank, falls der Start des Abfragezeitraums zeitlich vor diesem Eintrag liegt + False: Abbruch der Datenbankabfrage" + en: "True: Use of oldest entry of item in database, if start of query is prior to oldest entry + False: Cancel query" + +item_attributes: + db_addon_fct: + type: str + description: + de: 'Auswertefunktion des DB-Addon Plugins' + en: 'Evaluation Function of DB-Addon Plugins' + valid_list: + - 'verbrauch_heute' + - 'verbrauch_woche' + - 'verbrauch_monat' + - 'verbrauch_jahr' + - 'verbrauch_heute_minus1' + - 'verbrauch_heute_minus2' + - 'verbrauch_heute_minus3' + - 'verbrauch_heute_minus4' + - 'verbrauch_heute_minus5' + - 'verbrauch_heute_minus6' + - 'verbrauch_heute_minus7' + - 'verbrauch_woche_minus1' + - 'verbrauch_woche_minus2' + - 'verbrauch_woche_minus3' + - 'verbrauch_woche_minus4' + - 'verbrauch_monat_minus1' + - 'verbrauch_monat_minus2' + - 'verbrauch_monat_minus3' + - 'verbrauch_monat_minus4' + - 'verbrauch_monat_minus12' + - 'verbrauch_jahr_minus1' + - 'verbrauch_jahr_minus2' + - 'verbrauch_rolling_12m_heute_minus1' + - 'verbrauch_rolling_12m_woche_minus1' + - 'verbrauch_rolling_12m_monat_minus1' + - 'verbrauch_rolling_12m_jahr_minus1' + - 'verbrauch_jahreszeitraum_minus1' + - 'verbrauch_jahreszeitraum_minus2' + - 'verbrauch_jahreszeitraum_minus3' + - 'zaehlerstand_heute_minus1' + - 'zaehlerstand_heute_minus2' + - 'zaehlerstand_heute_minus3' + - 'zaehlerstand_woche_minus1' + - 'zaehlerstand_woche_minus2' + - 'zaehlerstand_woche_minus3' + - 'zaehlerstand_monat_minus1' + - 'zaehlerstand_monat_minus2' + - 'zaehlerstand_monat_minus3' + - 'zaehlerstand_jahr_minus1' + - 'zaehlerstand_jahr_minus2' + - 'zaehlerstand_jahr_minus3' + - 'minmax_last_24h_min' + - 'minmax_last_24h_max' + - 'minmax_last_24h_avg' + - 'minmax_last_7d_min' + - 'minmax_last_7d_max' + - 'minmax_last_7d_avg' + - 'minmax_heute_min' + - 'minmax_heute_max' + - 'minmax_heute_minus1_min' + - 'minmax_heute_minus1_max' + - 'minmax_heute_minus1_avg' + - 'minmax_heute_minus2_min' + - 'minmax_heute_minus2_max' + - 'minmax_heute_minus2_avg' + - 'minmax_heute_minus3_min' + - 'minmax_heute_minus3_max' + - 'minmax_heute_minus3_avg' + - 'minmax_woche_min' + - 'minmax_woche_max' + - 'minmax_woche_minus1_min' + - 'minmax_woche_minus1_max' + - 'minmax_woche_minus1_avg' + - 'minmax_woche_minus2_min' + - 'minmax_woche_minus2_max' + - 'minmax_woche_minus2_avg' + - 'minmax_monat_min' + - 'minmax_monat_max' + - 'minmax_monat_minus1_min' + - 'minmax_monat_minus1_max' + - 'minmax_monat_minus1_avg' + - 'minmax_monat_minus2_min' + - 'minmax_monat_minus2_max' + - 'minmax_monat_minus2_avg' + - 'minmax_jahr_min' + - 'minmax_jahr_max' + - 'minmax_jahr_minus1_min' + - 'minmax_jahr_minus1_max' + - 'minmax_jahr_minus1_avg' + - 'tagesmitteltemperatur_heute' + - 'tagesmitteltemperatur_heute_minus1' + - 'tagesmitteltemperatur_heute_minus2' + - 'tagesmitteltemperatur_heute_minus3' + - 'serie_minmax_monat_min_15m' + - 'serie_minmax_monat_max_15m' + - 'serie_minmax_monat_avg_15m' + - 'serie_minmax_woche_min_30w' + - 'serie_minmax_woche_max_30w' + - 'serie_minmax_woche_avg_30w' + - 'serie_minmax_tag_min_30d' + - 'serie_minmax_tag_max_30d' + - 'serie_minmax_tag_avg_30d' + - 'serie_verbrauch_tag_30d' + - 'serie_verbrauch_woche_30w' + - 'serie_verbrauch_monat_18m' + - 'serie_zaehlerstand_tag_30d' + - 'serie_zaehlerstand_woche_30w' + - 'serie_zaehlerstand_monat_18m' + - 'serie_waermesumme_monat_24m' + - 'serie_kaeltesumme_monat_24m' + - 'serie_tagesmittelwert_stunde_0d' + - 'serie_tagesmittelwert_tag_stunde_30d' + - 'general_oldest_value' + - 'general_oldest_log' + - 'kaeltesumme' + - 'waermesumme' + - 'gruenlandtempsumme' + - 'tagesmitteltemperatur' + - 'wachstumsgradtage' + - 'db_request' + valid_list_description: + - 'Verbrauch am heutigen Tag (Differenz zwischen aktuellem Wert und den Wert am Ende des vorherigen Tages)' + - 'Verbrauch in der aktuellen Woche' + - 'Verbrauch im aktuellen Monat' + - 'Verbrauch im aktuellen Jahr' + - 'Verbrauch gestern (heute -1 Tag) (Differenz zwischen Wert am Ende des gestrigen Tages und dem Wert am Ende des Tages danach)' + - 'Verbrauch vorgestern (heute -2 Tage)' + - 'Verbrauch heute -3 Tage' + - 'Verbrauch heute -4 Tage' + - 'Verbrauch heute -5 Tage' + - 'Verbrauch heute -6 Tage' + - 'Verbrauch heute -7 Tage' + - 'Verbrauch Vorwoche (aktuelle Woche -1)' + - 'Verbrauch aktuelle Woche -2 Wochen' + - 'Verbrauch aktuelle Woche -3 Wochen' + - 'Verbrauch aktuelle Woche -4 Wochen' + - 'Verbrauch Vormonat (aktueller Monat -1)' + - 'Verbrauch aktueller Monat -2 Monate' + - 'Verbrauch aktueller Monat -3 Monate' + - 'Verbrauch aktueller Monat -4 Monate' + - 'Verbrauch aktueller Monat -12 Monate' + - 'Verbrauch Vorjahr (aktuelles Jahr -1 Jahr)' + - 'Verbrauch aktuelles Jahr -2 Jahre' + - 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Tages' + - 'Verbrauch der letzten 12 Monate ausgehend im Ende der letzten Woche' + - 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Monats' + - 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Jahres' + - 'Verbrauch seit dem 1.1. bis zum heutigen Tag des Vorjahres' + - 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 2 Jahren' + - 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 3 Jahren' + - 'Zählerstand / Wert am Ende des letzten Tages (heute -1 Tag)' + - 'Zählerstand / Wert am Ende des vorletzten Tages (heute -2 Tag)' + - 'Zählerstand / Wert am Ende des vorvorletzten Tages (heute -3 Tag)' + - 'Zählerstand / Wert am Ende der vorvorletzten Woche (aktuelle Woche -1 Woche)' + - 'Zählerstand / Wert am Ende der vorletzten Woche (aktuelle Woche -2 Wochen)' + - 'Zählerstand / Wert am Ende der aktuellen Woche -3 Wochen' + - 'Zählerstand / Wert am Ende des letzten Monates (aktueller Monat -1 Monat)' + - 'Zählerstand / Wert am Ende des vorletzten Monates (aktueller Monat -2 Monate)' + - 'Zählerstand / Wert am Ende des aktuellen Monats -3 Monate' + - 'Zählerstand / Wert am Ende des letzten Jahres (aktuelles Jahr -1 Jahr)' + - 'Zählerstand / Wert am Ende des vorletzten Jahres (aktuelles Jahr -2 Jahre)' + - 'Zählerstand / Wert am Ende des aktuellen Jahres -3 Jahre' + - 'minimaler Wert der letzten 24h' + - 'maximaler Wert der letzten 24h' + - 'durchschnittlicher Wert der letzten 24h' + - 'minimaler Wert der letzten 7 Tage' + - 'maximaler Wert der letzten 7 Tage' + - 'durchschnittlicher Wert der letzten 7 Tage' + - 'Minimalwert seit Tagesbeginn' + - 'Maximalwert seit Tagesbeginn' + - 'Minimalwert gestern (heute -1 Tag)' + - 'Maximalwert gestern (heute -1 Tag)' + - 'Durchschnittswert gestern (heute -1 Tag)' + - 'Minimalwert vorgestern (heute -2 Tage)' + - 'Maximalwert vorgestern (heute -2 Tage)' + - 'Durchschnittswert vorgestern (heute -2 Tage)' + - 'Minimalwert heute vor 3 Tagen' + - 'Maximalwert heute vor 3 Tagen' + - 'Durchschnittswert heute vor 3 Tagen' + - 'Minimalwert seit Wochenbeginn' + - 'Maximalwert seit Wochenbeginn' + - 'Minimalwert Vorwoche (aktuelle Woche -1)' + - 'Maximalwert Vorwoche (aktuelle Woche -1)' + - 'Durchschnittswert Vorwoche (aktuelle Woche -1)' + - 'Minimalwert aktuelle Woche -2 Wochen' + - 'Maximalwert aktuelle Woche -2 Wochen' + - 'Durchschnittswert aktuelle Woche -2 Wochen' + - 'Minimalwert seit Monatsbeginn' + - 'Maximalwert seit Monatsbeginn' + - 'Minimalwert Vormonat (aktueller Monat -1)' + - 'Maximalwert Vormonat (aktueller Monat -1)' + - 'Durchschnittswert Vormonat (aktueller Monat -1)' + - 'Minimalwert aktueller Monat -2 Monate' + - 'Maximalwert aktueller Monat -2 Monate' + - 'Durchschnittswert aktueller Monat -2 Monate' + - 'Minimalwert seit Jahresbeginn' + - 'Maximalwert seit Jahresbeginn' + - 'Minimalwert Vorjahr (aktuelles Jahr -1 Jahr)' + - 'Maximalwert Vorjahr (aktuelles Jahr -1 Jahr)' + - 'Durchschnittswert Vorjahr (aktuelles Jahr -1 Jahr)' + - 'Tagesmitteltemperatur heute' + - 'Tagesmitteltemperatur des letzten Tages (heute -1 Tag)' + - 'Tagesmitteltemperatur des vorletzten Tages (heute -2 Tag)' + - 'Tagesmitteltemperatur des vorvorletzten Tages (heute -3 Tag)' + - 'monatlicher Minimalwert der letzten 15 Monate (gleitend)' + - 'monatlicher Maximalwert der letzten 15 Monate (gleitend)' + - 'monatlicher Mittelwert der letzten 15 Monate (gleitend)' + - 'wöchentlicher Minimalwert der letzten 30 Wochen (gleitend)' + - 'wöchentlicher Maximalwert der letzten 30 Wochen (gleitend)' + - 'wöchentlicher Mittelwert der letzten 30 Wochen (gleitend)' + - 'täglicher Minimalwert der letzten 30 Tage (gleitend)' + - 'täglicher Maximalwert der letzten 30 Tage (gleitend)' + - 'täglicher Mittelwert der letzten 30 Tage (gleitend)' + - 'Verbrauch pro Tag der letzten 30 Tage' + - 'Verbrauch pro Woche der letzten 30 Wochen' + - 'Verbrauch pro Monat der letzten 18 Monate' + - 'Zählerstand am Tagesende der letzten 30 Tage' + - 'Zählerstand am Wochenende der letzten 30 Wochen' + - 'Zählerstand am Monatsende der letzten 18 Monate' + - 'monatliche Wärmesumme der letzten 24 Monate' + - 'monatliche Kältesumme der letzten 24 Monate' + - 'Stundenmittelwert für den aktuellen Tag' + - 'Stundenmittelwert pro Tag der letzten 30 Tage (bspw. zur Berechnung der Tagesmitteltemperatur basierend auf den Mittelwert der Temperatur pro Stunde' + - 'Ausgabe des ältesten Wertes des entsprechenden "Parent-Items" mit database Attribut' + - 'Ausgabe des Timestamp des ältesten Eintrages des entsprechenden "Parent-Items" mit database Attribut' + - 'Berechnet die Kältesumme für einen Zeitraum, db_addon_params: (year=mandatory: int, month=optional: str)' + - 'Berechnet die Wärmesumme für einen Zeitraum, db_addon_params: (year=mandatory: int, month=optional: str, threshold=optional: int)' + - 'Berechnet die Grünlandtemperatursumme für einen Zeitraum, db_addon_params: (year=mandatory)' + - 'Berechnet die Tagesmitteltemperatur auf Basis der stündlichen Durchschnittswerte eines Tages für die angegebene Anzahl von Tagen (timeframe=day, count=integer)' + - 'Berechnet die Wachstumsgradtage auf Basis der stündlichen Durchschnittswerte eines Tages für das laufende Jahr mit an Angabe des Temperaturschwellenwertes (year=Jahr: int, method=0/1: int, threshold=Schwellentemperatur: int)' + - 'Abfrage der DB: db_addon_params: (func=mandatory, item=mandatory, timespan=mandatory, start=optional, end=optional, count=optional, group=optional, group2=optional)' + valid_list_item_type: + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'num ' + - 'list' + - 'num' + - 'num' + - 'num' + - 'list' + - 'num' + - 'list' + valid_list_calculation: + - 'onchange' + - 'onchange' + - 'onchange' + - 'onchange' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'weekly' + - 'weekly' + - 'weekly' + - 'weekly' + - 'monthly' + - 'monthly' + - 'monthly' + - 'monthly' + - 'monthly' + - 'yearly' + - 'yearly' + - 'daily' + - 'weekly' + - 'monthly' + - 'yearly' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'weekly' + - 'weekly' + - 'weekly' + - 'monthly' + - 'monthly' + - 'monthly' + - 'yearly' + - 'yearly' + - 'yearly' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'onchange' + - 'onchange' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'onchange' + - 'onchange' + - 'weekly' + - 'weekly' + - 'weekly' + - 'weekly' + - 'weekly' + - 'weekly' + - 'onchange' + - 'onchange' + - 'monthly' + - 'monthly' + - 'monthly' + - 'monthly' + - 'monthly' + - 'monthly' + - 'onchange' + - 'onchange' + - 'yearly' + - 'yearly' + - 'yearly' + - 'onchange' + - 'daily' + - 'daily' + - 'daily' + - 'monthly' + - 'monthly' + - 'monthly' + - 'weekly' + - 'weekly' + - 'weekly' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'weekly' + - 'monthly' + - 'daily' + - 'weekly' + - 'monthly' + - 'monthly' + - 'monthly' + - 'daily' + - 'daily' + - 'False' + - 'False' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'group' + + db_addon_info: + type: str + description: + de: 'Info-Funktion des DB-Addon Plugins' + en: 'Info-Function of DB-Addon Plugins' + valid_list: + - 'db_version' + valid_list_description: + - 'Version der verbundenen Datenbank' + valid_list_item_type: + - 'str' + + db_addon_admin: + type: str + description: + de: 'Admin-Funktion des DB-Addon Plugins' + en: 'Admin-Function of DB-Addon Plugins' + valid_list: + - 'suspend' + - 'recalc_all' + - 'clean_cache_values' + valid_list_description: + - 'unterbricht die Aktivitäten des Plugin -> bool' + - 'Startet einen Neuberechnungslauf aller on-demand items -> bool' + - 'Löscht Plugin-Cache und damit alle im Plugin zwischengespeicherten Werte -> bool' + valid_list_item_type: + - 'bool' + - 'bool' + - 'bool' + + db_addon_params: + type: str + description: + de: "Parameter für eine Auswertefunktion des DB-Addon Plugins im Format 'kwargs' enclosed in quotes like 'keyword=argument, keyword=argument'" + en: "Parameters of a DB-Addon Plugin evaluation function. Need to have format of 'kwargs' enclosed in quotes like 'keyword=argument, keyword=argument'" + + db_addon_startup: + type: bool + description: + de: 'Ausführen der Berechnung bei Plugin Start (mit zeitlichem Abstand, wie in den Plugin Parametern definiert)' + en: 'Run function in startup of plugin (with delay, set in plugin parameters)' + + db_addon_ignore_value: + type: num + description: + de: 'Wert der bei Abfrage bzw. Auswertung der Datenbank für diese Item ignoriert werden soll' + en: 'Value which will be ignored at database query' + + db_addon_database_item: + type: str + description: + de: 'Optional: Pfad des zu verwendenden Items mit Database Attribut' + en: 'Optional: Path of item with database attribut to be used' + +item_structs: + verbrauch_1: + name: Struct für Verbrauchsauswertung bei Zählern mit stetig ansteigendem Zählerstand (Teil 1) + verbrauch_heute: + name: Verbrauch heute + db_addon_fct: verbrauch_heute + type: num + visu_acl: ro + # cache: yes + + verbrauch_woche: + name: Verbrauch seit Wochenbeginn + db_addon_fct: verbrauch_woche + type: num + visu_acl: ro + # cache: yes + + verbrauch_monat: + name: Verbrauch seit Monatsbeginn + db_addon_fct: verbrauch_monat + type: num + visu_acl: ro + # cache: yes + + verbrauch_jahr: + name: Verbrauch seit Jahresbeginn + db_addon_fct: verbrauch_jahr + type: num + visu_acl: ro + # cache: yes + + verbrauch_rolling_12m: + name: Verbrauch innerhalb der letzten 12 Monate ausgehend von gestern + db_addon_fct: verbrauch_rolling_12m_heute_minus1 + type: num + visu_acl: ro + # cache: yes + + verbrauch_gestern: + name: Verbrauch gestern + db_addon_fct: verbrauch_heute_minus1 + db_addon_startup: yes + type: num + visu_acl: ro + # cache: yes + + verbrauch_gestern_minus1: + name: Verbrauch vorgestern + db_addon_fct: verbrauch_heute_minus2 + db_addon_startup: yes + type: num + visu_acl: ro + # cache: yes + + verbrauch_gestern_minus2: + name: Verbrauch vor 3 Tagen + db_addon_fct: verbrauch_heute_minus3 + db_addon_startup: yes + type: num + visu_acl: ro + # cache: yes + + verbrauch_vorwoche: + name: Verbrauch in der Vorwoche + db_addon_fct: verbrauch_woche_minus1 + db_addon_startup: yes + type: num + visu_acl: ro + # cache: yes + + verbrauch_vorwoche_minus1: + name: Verbrauch vor 2 Wochen + db_addon_fct: verbrauch_woche_minus2 + db_addon_startup: yes + type: num + visu_acl: ro + # cache: yes + + verbrauch_vormonat: + name: Verbrauch im Vormonat + db_addon_fct: verbrauch_monat_minus1 + db_addon_startup: yes + type: num + visu_acl: ro + # cache: yes + + verbrauch_vormonat_minus12: + name: Verbrauch vor 12 Monaten + db_addon_fct: verbrauch_monat_minus12 + db_addon_startup: yes + type: num + visu_acl: ro + # cache: yes + + verbrauch_vorjahreszeitraum: + name: Verbrauch im Jahreszeitraum 1.1. bis jetzt vor einem Jahr + db_addon_fct: verbrauch_jahreszeitraum_minus1 + db_addon_startup: yes + type: num + visu_acl: ro + # cache: yes + + verbrauch_2: + name: Struct für Verbrauchsauswertung bei Zählern mit stetig ansteigendem Zählerstand (Teil 2) + verbrauch_gestern_minus3: + name: Verbrauch vor 3 Tagen + db_addon_fct: verbrauch_heute_minus3 + type: num + visu_acl: ro + # cache: yes + + verbrauch_gestern_minus4: + name: Verbrauch vor 4 Tagen + db_addon_fct: verbrauch_heute_minus4 + type: num + visu_acl: ro + # cache: yes + + verbrauch_gestern_minus5: + name: Verbrauch vor 5 Tagen + db_addon_fct: verbrauch_heute_minus5 + type: num + visu_acl: ro + # cache: yes + + verbrauch_gestern_minus6: + name: Verbrauch vor 6 Tagen + db_addon_fct: verbrauch_heute_minus6 + type: num + visu_acl: ro + # cache: yes + + verbrauch_gestern_minus7: + name: Verbrauch vor 7 Tagen + db_addon_fct: verbrauch_heute_minus7 + type: num + visu_acl: ro + # cache: yes + + verbrauch_vorwoche_minus2: + name: Verbrauch vor 3 Wochen + db_addon_fct: verbrauch_woche_minus3 + type: num + visu_acl: ro + # cache: yes + + verbrauch_vorwoche_minus3: + name: Verbrauch vor 4 Wochen + db_addon_fct: verbrauch_woche_minus4 + type: num + visu_acl: ro + # cache: yes + + verbrauch_vormonat_minus1: + name: Verbrauch vor 2 Monaten + db_addon_fct: verbrauch_monat_minus2 + type: num + visu_acl: ro + # cache: yes + + verbrauch_vormonat_minus2: + name: Verbrauch vor 3 Monaten + db_addon_fct: verbrauch_monat_minus3 + type: num + visu_acl: ro + # cache: yes + + verbrauch_vormonat_minus3: + name: Verbrauch vor 4 Monaten + db_addon_fct: verbrauch_monat_minus4 + type: num + visu_acl: ro + # cache: yes + + zaehlerstand_1: + name: Struct für die Erfassung von Zählerständen zu bestimmten Zeitpunkten bei Zählern mit stetig ansteigendem Zählerstand + zaehlerstand_gestern: + name: Zählerstand zum Ende des gestrigen Tages + db_addon_fct: zaehlerstand_heute_minus1 + type: num + visu_acl: ro + # cache: yes + + zaehlerstand_vorwoche: + name: Zählerstand zum Ende der vorigen Woche + db_addon_fct: zaehlerstand_woche_minus1 + db_addon_startup: yes + type: num + visu_acl: ro + # cache: yes + + zaehlerstand_vormonat: + name: Zählerstand zum Ende des Vormonates + db_addon_fct: zaehlerstand_monat_minus1 + db_addon_startup: yes + type: num + visu_acl: ro + # cache: yes + + zaehlerstand_vormonat_minus1: + name: Zählerstand zum Monatsende vor 2 Monaten + db_addon_fct: zaehlerstand_monat_minus2 + db_addon_startup: yes + type: num + visu_acl: ro + # cache: yes + + zaehlerstand_vormonat_minus2: + name: Zählerstand zum Monatsende vor 3 Monaten + db_addon_fct: zaehlerstand_monat_minus3 + db_addon_startup: yes + type: num + visu_acl: ro + # cache: yes + + zaehlerstand_vorjahr: + name: Zählerstand am Ende des vorigen Jahres + db_addon_fct: zaehlerstand_jahr_minus1 + db_addon_startup: yes + type: num + visu_acl: ro + # cache: yes + + minmax_1: + name: Struct für Auswertung der Wertehistorie bei schwankenden Werten wie bspw. Temperatur oder Leistung (Teil 1) + + heute_min: + name: Minimaler Wert seit Tagesbeginn + db_addon_fct: minmax_heute_min + db_addon_ignore_value: 0 + type: num + # cache: yes + + heute_max: + name: Maximaler Wert seit Tagesbeginn + db_addon_fct: minmax_heute_max + type: num + # cache: yes + + last24h_min: + name: Minimaler Wert in den letzten 24h (gleitend) + db_addon_fct: minmax_last_24h_min + type: num + # cache: yes + + last24h_max: + name: Maximaler Wert in den letzten 24h (gleitend) + db_addon_fct: minmax_last_24h_max + type: num + # cache: yes + + woche_min: + name: Minimaler Wert seit Wochenbeginn + db_addon_fct: minmax_woche_min + type: num + # cache: yes + + woche_max: + name: Maximaler Wert seit Wochenbeginn + db_addon_fct: minmax_woche_max + type: num + # cache: yes + + monat_min: + name: Minimaler Wert seit Monatsbeginn + db_addon_fct: minmax_monat_min + type: num + # cache: yes + + monat_max: + name: Maximaler Wert seit Monatsbeginn + db_addon_fct: minmax_monat_max + type: num + # cache: yes + + jahr_min: + name: Minimaler Wert seit Jahresbeginn + db_addon_fct: minmax_jahr_min + type: num + # cache: yes + + jahr_max: + name: Maximaler Wert seit Jahresbeginn + db_addon_fct: minmax_jahr_max + type: num + # cache: yes + + gestern_min: + name: Minimaler Wert gestern + db_addon_fct: minmax_heute_minus1_min + db_addon_startup: yes + type: num + # cache: yes + + gestern_max: + name: Maximaler Wert gestern + db_addon_fct: minmax_heute_minus1_max + db_addon_startup: yes + type: num + # cache: yes + + gestern_avg: + name: Durchschnittlicher Wert gestern + db_addon_fct: minmax_heute_minus1_avg + db_addon_startup: yes + type: num + # cache: yes + + vorwoche_min: + name: Minimaler Wert in der Vorwoche + db_addon_fct: minmax_woche_minus1_min + db_addon_startup: yes + type: num + # cache: yes + + vorwoche_max: + name: Maximaler Wert in der Vorwoche + db_addon_fct: minmax_woche_minus1_max + db_addon_startup: yes + type: num + # cache: yes + + vorwoche_avg: + name: Durchschnittlicher Wert in der Vorwoche + db_addon_fct: minmax_woche_minus1_avg + db_addon_startup: yes + type: num + # cache: yes + + vormonat_min: + name: Minimaler Wert im Vormonat + db_addon_fct: minmax_monat_minus1_min + db_addon_startup: yes + type: num + # cache: yes + + vormonat_max: + name: Maximaler Wert im Vormonat + db_addon_fct: minmax_monat_minus1_max + db_addon_startup: yes + type: num + # cache: yes + + vormonat_avg: + name: Durchschnittlicher Wert im Vormonat + db_addon_fct: minmax_monat_minus1_avg + db_addon_startup: yes + type: num + # cache: yes + + vorjahr_min: + name: Minimaler Wert im Vorjahr + db_addon_fct: minmax_jahr_minus1_min + db_addon_startup: yes + type: num + # cache: yes + + vorjahr_max: + name: Maximaler Wert im Vorjahr + db_addon_fct: minmax_jahr_minus1_max + db_addon_startup: yes + type: num + # cache: yes + + minmax_2: + name: Struct für Auswertung der Wertehistorie bei schwankenden Werten wie bspw. Temperatur oder Leistung (Teil 2) + + gestern_minus1_min: + name: Minimaler Wert vorgestern + db_addon_fct: minmax_heute_minus2_min + type: num + # cache: yes + + gestern_minus1_max: + name: Maximaler Wert vorgestern + db_addon_fct: minmax_heute_minus2_max + type: num + # cache: yes + + gestern_minus1_avg: + name: Durchschnittlicher Wert vorgestern + db_addon_fct: minmax_heute_minus2_avg + type: num + # cache: yes + + gestern_minus2_min: + name: Minimaler Wert vor 3 Tagen + db_addon_fct: minmax_heute_minus3_min + type: num + # cache: yes + + gestern_minus2_max: + name: Maximaler Wert vor 3 Tagen + db_addon_fct: minmax_heute_minus3_max + type: num + # cache: yes + + gestern_minus2_avg: + name: Durchschnittlicher Wert vor 3 Tagen + db_addon_fct: minmax_heute_minus3_avg + type: num + # cache: yes + + vorwoche_minus1_min: + name: Minimaler Wert in der Woche vor 2 Wochen + db_addon_fct: minmax_woche_minus2_min + type: num + # cache: yes + + vorwoche_minus1_max: + name: Maximaler Wert in der Woche vor 2 Wochen + db_addon_fct: minmax_woche_minus2_max + type: num + # cache: yes + + vorwoche_minus1_avg: + name: Durchschnittlicher Wert in der Woche vor 2 Wochen + db_addon_fct: minmax_woche_minus2_avg + type: num + # cache: yes + + vormonat_minus1_min: + name: Minimaler Wert im Monat vor 2 Monaten + db_addon_fct: minmax_monat_minus2_min + type: num + # cache: yes + + vormonat_minus1_max: + name: Maximaler Wert im Monat vor 2 Monaten + db_addon_fct: minmax_monat_minus2_max + type: num + # cache: yes + + vormonat_minus1_avg: + name: Durchschnittlicher Wert im Monat vor 2 Monaten + db_addon_fct: minmax_monat_minus2_avg + type: num + # cache: yes + +item_attribute_prefixes: NONE + +plugin_functions: + fetch_log: + type: list + description: + de: 'Liefert für das angegebene Item und die Parameter das Abfrageergebnis zurück' + en: 'Return the database request result for the given item and parameters' + # mit dieser Funktion ist es möglich, eine Liste der "func" Werte pro "group" / "group2" eines "item" von "start""timespan" bis "end""timespan" oder von "start""timespan" bis "count" ausgegeben zu lassen + # bspw. minimale Tagestemperatur vom Item "outdoor.temp" der letzten 10 Tage startend von gestern davor --> func=min, item=outdoor.temp, timespan=day, start=1, count=10, group=day + # bspw. maximal Tagestemperatur vom Item "outdoor.temp" von jetzt bis 2 Monate davor --> func=max, item=outdoor.temp, timeframe=month, start=0, end=2, group=day + parameters: + func: + type: str + description: + de: "zu verwendende Abfragefunktion" + en: "database function to be used" + mandatory: True + valid_list: + - min # Minimalwerte + - max # Maximalwerte + - sum # Summe + - on + - integrate + - sum_max + - sum_avg + - sum_min_neg + - diff_max + item: + type: foo + description: + de: "Das Item-Objekt oder die Item_ID der DB" + en: "An item object" + mandatory: True + timeframe: + type: str + description: + de: "Zeitinkrement für die DB-Abfrage" + en: "time increment for db-request" + mandatory: True + valid_list: + - day + - week + - month + - year + start: + type: int + description: + de: "Zeitlicher Beginn der DB-Abfrage: x Zeitinkrementen von jetzt in die Vergangenheit" + en: "start point in time for db-request; x time increments from now into the past" + end: + type: int + description: + de: "Zeitliches Ende der DB-Abfrage: x Zeitinkrementen von jetzt in die Vergangenheit" + en: "end point in time for db-request; x time increments from now into the past" + count: + type: int + description: + de: "Anzahl der Zeitinkremente, vom Start in die Vergangenheit abzufragen sind. Alternative zu 'end'" + en: "number of time increments from start point in time into the past. can be used alternativly to 'end'" + group: + type: str + description: + de: "erste Gruppierung der DB-Abfrage" + en: "first grouping for the db-request" + valid_list: + - day + - week + - month + - year + group2: + type: str + description: + de: "zweite Gruppierung der DB-Abfrage" + en: "second grouping for the db-request" + valid_list: + - day + - week + - month + - year + + db_version: + type: str + description: + de: 'Liefer die verwendete Version der Datenbank' + en: 'Return the database version' + + suspend: + type: bool + description: + de: 'Pausiert die Berechnungen des Plugins' + en: 'Suspends value evaluation of plugin' + +logic_parameters: NONE diff --git a/db_addon/user_doc.rst b/db_addon/user_doc.rst index 28bfb15b9..74c774c71 100644 --- a/db_addon/user_doc.rst +++ b/db_addon/user_doc.rst @@ -1,277 +1,277 @@ - -.. index:: Plugins; db_addon (Datenbank Unterstützung) -.. index:: db_addon - -======== -db_addon -======== - -.. image:: webif/static/img/plugin_logo.png - :alt: plugin logo - :width: 300px - :height: 300px - :scale: 50 % - :align: left - - -Das Plugin bietet eine Funktionserweiterung zum Database Plugin und ermöglicht die einfache Auswertung von Messdaten. -Basierend auf den Daten in der Datenbank können bspw. Auswertungen zu Verbrauch (heute, gestern, ...) oder zu Minimal- -und Maximalwerten gefahren werden. -Diese Auswertungen werden zyklisch zum Tageswechsel, Wochenwechsel, Monatswechsel oder Jahreswechsel, in Abhängigkeit -der Funktion erzeugt. -Um die Zugriffe auf die Datenbank zu minimieren, werden diverse Daten zwischengespeichert. - -Sind Items mit einem DatabaseAddon-Attribut im gleichen Pfad, wie das Item, für das das Database Attribut -konfiguriert ist, wird dieses Item automatisch ermittelt. Bedeutet: Sind die Items mit dem DatabaseAddon-Attribute Kinder -oder Kindeskinder oder Kindeskinderkinder des Items, für das das Database Attribut konfiguriert ist, wird dieses automatisch -ermittelt. - -Alternativ kann mit dem Attribute "db_addon_database_item" auch der absolute Pfad des Items angegeben werden, für das -das Database Attribut konfiguriert ist. - -Bsp: - - -.. code-block:: yaml - - temperatur: - type: bool - database: yes - - auswertung: - type: foo - - heute_min: - type: num - db_addon_fct: heute_min - - gestern_max: - type: num - db_addon_fct: heute_minus1_max - - - tagesmitteltemperatur_gestern: - type: num - db_addon_fct: heute_minus1_avg - db_addon_database_item: 'temperatur' - -| - -Anforderungen -============= - -Es muss das Database Plugin konfiguriert und aktiv sein. In den Plugin Parametern ist der Name der Konfiguration des -Database-Plugins anzugeben. Damit ist auch eine etwaig definierte Instanz des Database-Plugins definiert. -Die Konfiguration des DatabaseAddon-Plugin erfolgt automatisch bei Start. - - -Hinweis: Das Plugin selbst ist aktuell nicht multi-instance fähig. Das bedeutet, dass das Plugin aktuell nur eine Instanz -des Database-Plugin abgebunden werden kann. - -| - -Konfiguration -============= - -Diese Plugin Parameter und die Informationen zur Item-spezifischen Konfiguration des Plugins sind -unter :doc:`/plugins_doc/config/db_addon` beschrieben. - -mysql Datenbank ---------------- - -Bei Verwendung von mysql sollten einige Variablen der Datenbank angepasst werden, so dass die komplexeren Anfragen -ohne Fehler bearbeitet werden. - -Dazu folgenden Block am Ende der Datei */etc/mysql/my.cnf* einfügen bzw den existierenden ergänzen. - - -.. code-block:: bash - - [mysqld] - connect_timeout = 60 - net_read_timeout = 60 - wait_timeout = 28800 - interactive_timeout = 28800 - -| - -Hinweise -======== - - - Das Plugin startet die Berechnungen der Werte nach einer gewissen (konfigurierbaren) Zeit (Attribut `startup_run_delay`) nach dem Start von shNG, um den Startvorgang nicht zu beeinflussen. - - Bei Start werden automatisch nur die Items berechnet, für das das Attribute `db_addon_startup` gesetzt wurde. Alle anderen Items werden erst zu konfigurierten Zeit berechnet. Über das WebIF kann die Berechnung aller definierten Items ausgelöst werden. - - Für sogenannte `on_change` Items, also Items, deren Berechnung bis zum Jetzt (bspw. verbrauch-heute) gehen, wird die Berechnung immer bei eintreffen eines neuen Wertes gestartet. Zu Reduktion der Belastung auf die Datenbank werden die Werte für das Ende der letzten Periode gecached. - - Berechnungen werden nur ausgeführt, wenn für den kompletten abgefragten Zeitraum Werte in der Datenbank vorliegen. Wird bspw. der Verbrauch des letzten Monats abgefragt wobei erst Werte ab dem 3. des Monats in der Datenbank sind, wird die Berechnung abgebrochen. - Mit dem Attribut `use_oldest_entry` kann dieses Verhalten verändert werden. Ist das Attribut gesetzt, wird, wenn für den Beginn der Abfragezeitraums keinen Werte vorliegen, der älteste Eintrag der Datenbank genutzt. - - Für die Auswertung kann es nützlich sein, bestimmte Werte aus der Datenbank bei der Berechnung auszublenden. Hierfür stehen 2 Möglichkeiten zur Verfügung: - - Plugin-Attribut `ignore_0`: (list of strings) Bei Items, bei denen ein String aus der Liste im Pfadnamen vorkommt, werden 0-Werte (val_num = 0) bei Datenbankauswertungen ignoriert. Hat also das Attribut den Wert ['temp'] werden bei allen Items mit 'temp' im Pfadnamen die 0-Werte bei der Auswertung ignoriert. - - Item-Attribut `db_addon_ignore_value`: (num) Dieser Wert wird bei der Abfrage bzw. Auswertung der Datenbank für diese Item ignoriert. - - Das Plugin enthält sehr ausführliche Logginginformation. Bei unerwartetem Verhalten, den LogLevel entsprechend anpassen, um mehr information zu erhalten. - - Berechnungen des Plugins können im WebIF unterbrochen werden. Auch das gesamte Plugin kann pausiert werden. Dies kann be starker Systembelastung nützlich sein. - -| - -Beispiele -========= - -Verbrauch ---------- - -Soll bspw. der Verbrauch von Wasser ausgewertet werden, so ist dies wie folgt möglich: - -.. code-block:: yaml - - wasserzaehler: - zaehlerstand: - type: num - knx_dpt: 12 - knx_cache: 5/3/4 - eval: round(value/1000, 1) - database: init - struct: - - db_addon.verbrauch_1 - - db_addon.verbrauch_2 - - db_addon.zaehlerstand_1 - -Die Werte des Wasserzählerstandes werden in die Datenbank geschrieben und darauf basierend ausgewertet. Die structs -'db_addon.verbrauch_1' und 'db_addon.verbrauch_2' stellen entsprechende Items für die Verbrauchsauswerten zur Verfügung. - -minmax ------- - -Soll bspw. die minimalen und maximalen Temperaturen ausgewertet werden, kann dies so umgesetzt werden: - -.. code-block:: yaml - - temperature: - aussen: - nord: - name: Außentemp Nordseite - type: num - visu_acl: ro - knx_dpt: 9 - knx_cache: 6/5/1 - database: init - struct: - - db_addon.minmax_1 - - db_addon.minmax_2 - -Die Temperaturwerte werden in die Datenbank geschrieben und darauf basierend ausgewertet. Die structs -'db_addon.minmax_1' und 'db_addon.minmax_2' stellen entsprechende Items für die min/max Auswertung zur Verfügung. - -| - -Web Interface -============= - -Das WebIF stellt neben der Ansicht verbundener Items und deren Parameter und Werte auch Funktionen für die -Administration des Plugins bereit. - -Es stehen Button für: - -- Neuberechnung aller Items -- Abbruch eines aktiven Berechnungslaufes -- Pausieren des Plugins -- Wiederaufnahme des Plugins - -bereit. - -Achtung: Das Auslösen einer kompletten Neuberechnung aller Items kann zu einer starken Belastung der Datenbank -aufgrund vieler Leseanfragen führen. - - -db_addon Items --------------- - -Dieser Reiter des Webinterface zeigt die Items an, für die ein DatabaseAddon Attribut konfiguriert ist. - - -db_addon Maintenance --------------------- - -Das Webinterface zeigt detaillierte Informationen über die im Plugin verfügbaren Daten an. -Dies dient der Maintenance bzw. Fehlersuche. Dieser Tab ist nur bei Log-Level "Debug" verfügbar. - - -Erläuterungen zu Temperatursummen -================================= - - -Grünlandtemperatursumme ------------------------ - -Beim Grünland wird die Wärmesumme nach Ernst und Loeper benutzt, um den Vegetationsbeginn und somit den Termin von Düngungsmaßnahmen zu bestimmen. -Dabei erfolgt die Aufsummierung der Tagesmitteltemperaturen über 0 °C, wobei der Januar mit 0.5 und der Februar mit 0.75 gewichtet wird. -Bei einer Wärmesumme von 200 Grad ist eine Düngung angesagt. - -siehe: https://de.wikipedia.org/wiki/Gr%C3%BCnlandtemperatursumme - -Folgende Parameter sind möglich / notwendig: - -.. code-block:: yaml - db_addon_params: "year=current" - -- year: Jahreszahl (str oder int), für das die Berechnung ausgeführt werden soll oder "current" für aktuelles Jahr (default: 'current') - - -Wachstumsgradtag ----------------- -Der Begriff Wachstumsgradtage (WGT) ist ein Überbegriff für verschiedene Größen. -Gemeinsam ist ihnen, daß zur Berechnung eine Lufttemperatur von einem Schwellenwert subtrahiert wird. -Je nach Fragestellung und Pflanzenart werden der Schwellenwert unterschiedlich gewählt und die Temperatur unterschiedlich bestimmt. -Verfügbar sind die Berechnung über 0) "einfachen Durchschnitt der Tagestemperaturen", 1) "modifizierten Durchschnitt der Tagestemperaturen" -und 2) Anzahl der Tage, deren Mitteltempertatur oberhalb der Schwellentemperatur lag. - -siehe https://de.wikipedia.org/wiki/Wachstumsgradtag - -Folgende Parameter sind möglich / notwendig: - -.. code-block:: yaml - db_addon_params: "year=current, method=1, threshold=10" - -- year: Jahreszahl (str oder int), für das die Berechnung ausgeführt werden soll oder "current" für aktuelles Jahr (default: 'current') -- method: 0-Berechnung über "einfachen Durchschnitt der Tagestemperaturen", 1-Berechnung über "modifizierten Durchschnitt (default: 0) -der Tagestemperaturen" 2-Anzahl der Tage, mit Mitteltempertatur oberhalb Schwellentemperatur// 10, 11 Ausgabe aus Zeitserie -- threshold: Schwellentemperatur in °C (int) (default: 10) - - -Wärmesumme ----------- - -Die Wärmesumme soll eine Aussage über den Sommer und die Pflanzenreife liefern. Es gibt keine eindeutige Definition der Größe "Wärmesumme". -Berechnet wird die Wärmesumme als Summe aller Tagesmitteltemperaturen über einem Schwellenwert ab dem 1.1. des Jahres. - -siehe https://de.wikipedia.org/wiki/W%C3%A4rmesumme - -Folgende Parameter sind möglich / notwendig: - -.. code-block:: yaml - db_addon_params: "year=current, month=1, threshold=10" - -- year: Jahreszahl (str oder int), für das die Berechnung ausgeführt werden soll oder "current" für aktuelles Jahr (default: 'current') -- month: Monat (int) des Jahres, für das die Berechnung ausgeführt werden soll (optional) (default: None) -- threshold: Schwellentemperatur in °C (int) (default: 10) - - -Kältesumme ----------- - -Die Kältesumme soll eine Aussage über die Härte des Winters liefern. -Berechnet wird die Kältesumme als Summe aller negativen Tagesmitteltemperaturen ab dem 21.9. des Jahres bis 31.3. des Folgejahres. - -siehe https://de.wikipedia.org/wiki/K%C3%A4ltesumme - -Folgende Parameter sind möglich / notwendig: - -.. code-block:: yaml - db_addon_params: "year=current, month=1" - -- year: Jahreszahl (str oder int), für das die Berechnung ausgeführt werden soll oder "current" für aktuelles Jahr (default: 'current') -- month: Monat (int) des Jahres, für das die Berechnung ausgeführt werden soll (optional) (default: None) - - -Tagesmitteltemperatur ---------------------- - -Die Tagesmitteltemperatur wird auf Basis der stündlichen Durchschnittswerte eines Tages (aller in der DB enthaltenen Datensätze) -für die angegebene Anzahl von Tagen (days=optional) berechnet. + +.. index:: Plugins; db_addon (Datenbank Unterstützung) +.. index:: db_addon + +======== +db_addon +======== + +.. image:: webif/static/img/plugin_logo.png + :alt: plugin logo + :width: 300px + :height: 300px + :scale: 50 % + :align: left + + +Das Plugin bietet eine Funktionserweiterung zum Database Plugin und ermöglicht die einfache Auswertung von Messdaten. +Basierend auf den Daten in der Datenbank können bspw. Auswertungen zu Verbrauch (heute, gestern, ...) oder zu Minimal- +und Maximalwerten gefahren werden. +Diese Auswertungen werden zyklisch zum Tageswechsel, Wochenwechsel, Monatswechsel oder Jahreswechsel, in Abhängigkeit +der Funktion erzeugt. +Um die Zugriffe auf die Datenbank zu minimieren, werden diverse Daten zwischengespeichert. + +Sind Items mit einem DatabaseAddon-Attribut im gleichen Pfad, wie das Item, für das das Database Attribut +konfiguriert ist, wird dieses Item automatisch ermittelt. Bedeutet: Sind die Items mit dem DatabaseAddon-Attribute Kinder +oder Kindeskinder oder Kindeskinderkinder des Items, für das das Database Attribut konfiguriert ist, wird dieses automatisch +ermittelt. + +Alternativ kann mit dem Attribute "db_addon_database_item" auch der absolute Pfad des Items angegeben werden, für das +das Database Attribut konfiguriert ist. + +Bsp: + + +.. code-block:: yaml + + temperatur: + type: bool + database: yes + + auswertung: + type: foo + + heute_min: + type: num + db_addon_fct: heute_min + + gestern_max: + type: num + db_addon_fct: heute_minus1_max + + + tagesmitteltemperatur_gestern: + type: num + db_addon_fct: heute_minus1_avg + db_addon_database_item: 'temperatur' + +| + +Anforderungen +============= + +Es muss das Database Plugin konfiguriert und aktiv sein. In den Plugin Parametern ist der Name der Konfiguration des +Database-Plugins anzugeben. Damit ist auch eine etwaig definierte Instanz des Database-Plugins definiert. +Die Konfiguration des DatabaseAddon-Plugin erfolgt automatisch bei Start. + + +Hinweis: Das Plugin selbst ist aktuell nicht multi-instance fähig. Das bedeutet, dass das Plugin aktuell nur eine Instanz +des Database-Plugin abgebunden werden kann. + +| + +Konfiguration +============= + +Diese Plugin Parameter und die Informationen zur Item-spezifischen Konfiguration des Plugins sind +unter :doc:`/plugins_doc/config/db_addon` beschrieben. + +mysql Datenbank +--------------- + +Bei Verwendung von mysql sollten einige Variablen der Datenbank angepasst werden, so dass die komplexeren Anfragen +ohne Fehler bearbeitet werden. + +Dazu folgenden Block am Ende der Datei */etc/mysql/my.cnf* einfügen bzw den existierenden ergänzen. + + +.. code-block:: bash + + [mysqld] + connect_timeout = 60 + net_read_timeout = 60 + wait_timeout = 28800 + interactive_timeout = 28800 + +| + +Hinweise +======== + + - Das Plugin startet die Berechnungen der Werte nach einer gewissen (konfigurierbaren) Zeit (Attribut `startup_run_delay`) nach dem Start von shNG, um den Startvorgang nicht zu beeinflussen. + - Bei Start werden automatisch nur die Items berechnet, für das das Attribute `db_addon_startup` gesetzt wurde. Alle anderen Items werden erst zu konfigurierten Zeit berechnet. Über das WebIF kann die Berechnung aller definierten Items ausgelöst werden. + - Für sogenannte `on_change` Items, also Items, deren Berechnung bis zum Jetzt (bspw. verbrauch-heute) gehen, wird die Berechnung immer bei eintreffen eines neuen Wertes gestartet. Zu Reduktion der Belastung auf die Datenbank werden die Werte für das Ende der letzten Periode gecached. + - Berechnungen werden nur ausgeführt, wenn für den kompletten abgefragten Zeitraum Werte in der Datenbank vorliegen. Wird bspw. der Verbrauch des letzten Monats abgefragt wobei erst Werte ab dem 3. des Monats in der Datenbank sind, wird die Berechnung abgebrochen. + Mit dem Attribut `use_oldest_entry` kann dieses Verhalten verändert werden. Ist das Attribut gesetzt, wird, wenn für den Beginn der Abfragezeitraums keinen Werte vorliegen, der älteste Eintrag der Datenbank genutzt. + - Für die Auswertung kann es nützlich sein, bestimmte Werte aus der Datenbank bei der Berechnung auszublenden. Hierfür stehen 2 Möglichkeiten zur Verfügung: + - Plugin-Attribut `ignore_0`: (list of strings) Bei Items, bei denen ein String aus der Liste im Pfadnamen vorkommt, werden 0-Werte (val_num = 0) bei Datenbankauswertungen ignoriert. Hat also das Attribut den Wert ['temp'] werden bei allen Items mit 'temp' im Pfadnamen die 0-Werte bei der Auswertung ignoriert. + - Item-Attribut `db_addon_ignore_value`: (num) Dieser Wert wird bei der Abfrage bzw. Auswertung der Datenbank für diese Item ignoriert. + - Das Plugin enthält sehr ausführliche Logginginformation. Bei unerwartetem Verhalten, den LogLevel entsprechend anpassen, um mehr information zu erhalten. + - Berechnungen des Plugins können im WebIF unterbrochen werden. Auch das gesamte Plugin kann pausiert werden. Dies kann be starker Systembelastung nützlich sein. + +| + +Beispiele +========= + +Verbrauch +--------- + +Soll bspw. der Verbrauch von Wasser ausgewertet werden, so ist dies wie folgt möglich: + +.. code-block:: yaml + + wasserzaehler: + zaehlerstand: + type: num + knx_dpt: 12 + knx_cache: 5/3/4 + eval: round(value/1000, 1) + database: init + struct: + - db_addon.verbrauch_1 + - db_addon.verbrauch_2 + - db_addon.zaehlerstand_1 + +Die Werte des Wasserzählerstandes werden in die Datenbank geschrieben und darauf basierend ausgewertet. Die structs +'db_addon.verbrauch_1' und 'db_addon.verbrauch_2' stellen entsprechende Items für die Verbrauchsauswerten zur Verfügung. + +minmax +------ + +Soll bspw. die minimalen und maximalen Temperaturen ausgewertet werden, kann dies so umgesetzt werden: + +.. code-block:: yaml + + temperature: + aussen: + nord: + name: Außentemp Nordseite + type: num + visu_acl: ro + knx_dpt: 9 + knx_cache: 6/5/1 + database: init + struct: + - db_addon.minmax_1 + - db_addon.minmax_2 + +Die Temperaturwerte werden in die Datenbank geschrieben und darauf basierend ausgewertet. Die structs +'db_addon.minmax_1' und 'db_addon.minmax_2' stellen entsprechende Items für die min/max Auswertung zur Verfügung. + +| + +Web Interface +============= + +Das WebIF stellt neben der Ansicht verbundener Items und deren Parameter und Werte auch Funktionen für die +Administration des Plugins bereit. + +Es stehen Button für: + +- Neuberechnung aller Items +- Abbruch eines aktiven Berechnungslaufes +- Pausieren des Plugins +- Wiederaufnahme des Plugins + +bereit. + +Achtung: Das Auslösen einer kompletten Neuberechnung aller Items kann zu einer starken Belastung der Datenbank +aufgrund vieler Leseanfragen führen. + + +db_addon Items +-------------- + +Dieser Reiter des Webinterface zeigt die Items an, für die ein DatabaseAddon Attribut konfiguriert ist. + + +db_addon Maintenance +-------------------- + +Das Webinterface zeigt detaillierte Informationen über die im Plugin verfügbaren Daten an. +Dies dient der Maintenance bzw. Fehlersuche. Dieser Tab ist nur bei Log-Level "Debug" verfügbar. + + +Erläuterungen zu Temperatursummen +================================= + + +Grünlandtemperatursumme +----------------------- + +Beim Grünland wird die Wärmesumme nach Ernst und Loeper benutzt, um den Vegetationsbeginn und somit den Termin von Düngungsmaßnahmen zu bestimmen. +Dabei erfolgt die Aufsummierung der Tagesmitteltemperaturen über 0 °C, wobei der Januar mit 0.5 und der Februar mit 0.75 gewichtet wird. +Bei einer Wärmesumme von 200 Grad ist eine Düngung angesagt. + +siehe: https://de.wikipedia.org/wiki/Gr%C3%BCnlandtemperatursumme + +Folgende Parameter sind möglich / notwendig: + +.. code-block:: yaml + db_addon_params: "year=current" + +- year: Jahreszahl (str oder int), für das die Berechnung ausgeführt werden soll oder "current" für aktuelles Jahr (default: 'current') + + +Wachstumsgradtag +---------------- +Der Begriff Wachstumsgradtage (WGT) ist ein Überbegriff für verschiedene Größen. +Gemeinsam ist ihnen, daß zur Berechnung eine Lufttemperatur von einem Schwellenwert subtrahiert wird. +Je nach Fragestellung und Pflanzenart werden der Schwellenwert unterschiedlich gewählt und die Temperatur unterschiedlich bestimmt. +Verfügbar sind die Berechnung über 0) "einfachen Durchschnitt der Tagestemperaturen", 1) "modifizierten Durchschnitt der Tagestemperaturen" +und 2) Anzahl der Tage, deren Mitteltempertatur oberhalb der Schwellentemperatur lag. + +siehe https://de.wikipedia.org/wiki/Wachstumsgradtag + +Folgende Parameter sind möglich / notwendig: + +.. code-block:: yaml + db_addon_params: "year=current, method=1, threshold=10" + +- year: Jahreszahl (str oder int), für das die Berechnung ausgeführt werden soll oder "current" für aktuelles Jahr (default: 'current') +- method: 0-Berechnung über "einfachen Durchschnitt der Tagestemperaturen", 1-Berechnung über "modifizierten Durchschnitt (default: 0) +der Tagestemperaturen" 2-Anzahl der Tage, mit Mitteltempertatur oberhalb Schwellentemperatur// 10, 11 Ausgabe aus Zeitserie +- threshold: Schwellentemperatur in °C (int) (default: 10) + + +Wärmesumme +---------- + +Die Wärmesumme soll eine Aussage über den Sommer und die Pflanzenreife liefern. Es gibt keine eindeutige Definition der Größe "Wärmesumme". +Berechnet wird die Wärmesumme als Summe aller Tagesmitteltemperaturen über einem Schwellenwert ab dem 1.1. des Jahres. + +siehe https://de.wikipedia.org/wiki/W%C3%A4rmesumme + +Folgende Parameter sind möglich / notwendig: + +.. code-block:: yaml + db_addon_params: "year=current, month=1, threshold=10" + +- year: Jahreszahl (str oder int), für das die Berechnung ausgeführt werden soll oder "current" für aktuelles Jahr (default: 'current') +- month: Monat (int) des Jahres, für das die Berechnung ausgeführt werden soll (optional) (default: None) +- threshold: Schwellentemperatur in °C (int) (default: 10) + + +Kältesumme +---------- + +Die Kältesumme soll eine Aussage über die Härte des Winters liefern. +Berechnet wird die Kältesumme als Summe aller negativen Tagesmitteltemperaturen ab dem 21.9. des Jahres bis 31.3. des Folgejahres. + +siehe https://de.wikipedia.org/wiki/K%C3%A4ltesumme + +Folgende Parameter sind möglich / notwendig: + +.. code-block:: yaml + db_addon_params: "year=current, month=1" + +- year: Jahreszahl (str oder int), für das die Berechnung ausgeführt werden soll oder "current" für aktuelles Jahr (default: 'current') +- month: Monat (int) des Jahres, für das die Berechnung ausgeführt werden soll (optional) (default: None) + + +Tagesmitteltemperatur +--------------------- + +Die Tagesmitteltemperatur wird auf Basis der stündlichen Durchschnittswerte eines Tages (aller in der DB enthaltenen Datensätze) +für die angegebene Anzahl von Tagen (days=optional) berechnet. diff --git a/db_addon/webif/__init__.py b/db_addon/webif/__init__.py index 95a601130..bd5c6c41f 100644 --- a/db_addon/webif/__init__.py +++ b/db_addon/webif/__init__.py @@ -1,138 +1,138 @@ -#!/usr/bin/env python3 -# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab -######################################################################### -# Copyright 2022- Michael Wenzel wenzel_michael@web.de -######################################################################### -# This file is part of SmartHomeNG. -# https://www.smarthomeNG.de -# https://knx-user-forum.de/forum/supportforen/smarthome-py -# -# This plugin provides additional functionality to mysql database -# connected via database plugin -# -# SmartHomeNG is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# SmartHomeNG is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with SmartHomeNG. If not, see . -# -######################################################################### - -import json - -from lib.item import Items -from lib.model.smartplugin import SmartPluginWebIf - - -# ------------------------------------------ -# Webinterface of the plugin -# ------------------------------------------ - -import cherrypy -import csv -from jinja2 import Environment, FileSystemLoader - - -class WebInterface(SmartPluginWebIf): - - def __init__(self, webif_dir, plugin): - """ - Initialization of instance of class WebInterface - - :param webif_dir: directory where the webinterface of the plugin resides - :param plugin: instance of the plugin - :type webif_dir: str - :type plugin: object - """ - self.logger = plugin.logger - self.webif_dir = webif_dir - self.plugin = plugin - self.items = Items.get_instance() - - self.tplenv = self.init_template_environment() - - @cherrypy.expose - def index(self, reload=None): - """ - Build index.html for cherrypy - - Render the template and return the html file to be delivered to the browser - - :return: contents of the template after being rendered - """ - - tmpl = self.tplenv.get_template('index.html') - - return tmpl.render(p=self.plugin, - webif_pagelength=self.plugin.get_parameter_value('webif_pagelength'), - suspended='true' if self.plugin.suspended else 'false', - items=self.plugin.get_item_list('db_addon', 'function'), - item_count=len(self.plugin.get_item_list('db_addon', 'function')), - plugin_shortname=self.plugin.get_shortname(), - plugin_version=self.plugin.get_version(), - plugin_info=self.plugin.get_info(), - maintenance=True if self.plugin.log_level == 10 else False, - ) - - @cherrypy.expose - def get_data_html(self, dataSet=None): - """ - Return data to update the webpage - - For the standard update mechanism of the web interface, the dataSet to return the data for is None - - :param dataSet: Dataset for which the data should be returned (standard: None) - :return: dict with the data needed to update the web page. - """ - if dataSet is None: - # get the new data - data = dict() - data['items'] = {} - - for item in self.plugin.get_item_list('db_addon', 'function'): - data['items'][item.id()] = {} - data['items'][item.id()]['value'] = item.property.value - data['items'][item.id()]['last_update'] = item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') - data['items'][item.id()]['last_change'] = item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') - - data['plugin_suspended'] = self.plugin.suspended - data['maintenance'] = True if self.plugin.log_level == 10 else False - data['queue_length'] = self.plugin.queue_backlog() - data['active_queue_item'] = self.plugin.active_queue_item - - try: - return json.dumps(data, default=str) - except Exception as e: - self.logger.error(f"get_data_html exception: {e}") - - @cherrypy.expose - def recalc_all(self): - self.logger.debug(f"recalc_all called") - self.plugin.execute_all_items() - - @cherrypy.expose - def clean_cache_dicts(self): - self.logger.debug(f"_clean_cache_dicts called") - self.plugin._clean_cache_dicts() - - @cherrypy.expose - def clear_queue(self): - self.logger.debug(f"_clear_queue called") - self.plugin._clear_queue() - - @cherrypy.expose - def activate(self): - self.logger.debug(f"active called") - self.plugin.suspend(False) - - @cherrypy.expose - def suspend(self): - self.logger.debug(f"suspend called") - self.plugin.suspend(True) +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +######################################################################### +# Copyright 2022- Michael Wenzel wenzel_michael@web.de +######################################################################### +# This file is part of SmartHomeNG. +# https://www.smarthomeNG.de +# https://knx-user-forum.de/forum/supportforen/smarthome-py +# +# This plugin provides additional functionality to mysql database +# connected via database plugin +# +# SmartHomeNG is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SmartHomeNG is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SmartHomeNG. If not, see . +# +######################################################################### + +import json + +from lib.item import Items +from lib.model.smartplugin import SmartPluginWebIf + + +# ------------------------------------------ +# Webinterface of the plugin +# ------------------------------------------ + +import cherrypy +import csv +from jinja2 import Environment, FileSystemLoader + + +class WebInterface(SmartPluginWebIf): + + def __init__(self, webif_dir, plugin): + """ + Initialization of instance of class WebInterface + + :param webif_dir: directory where the webinterface of the plugin resides + :param plugin: instance of the plugin + :type webif_dir: str + :type plugin: object + """ + self.logger = plugin.logger + self.webif_dir = webif_dir + self.plugin = plugin + self.items = Items.get_instance() + + self.tplenv = self.init_template_environment() + + @cherrypy.expose + def index(self, reload=None): + """ + Build index.html for cherrypy + + Render the template and return the html file to be delivered to the browser + + :return: contents of the template after being rendered + """ + + tmpl = self.tplenv.get_template('index.html') + + return tmpl.render(p=self.plugin, + webif_pagelength=self.plugin.get_parameter_value('webif_pagelength'), + suspended='true' if self.plugin.suspended else 'false', + items=self.plugin.get_item_list('db_addon', 'function'), + item_count=len(self.plugin.get_item_list('db_addon', 'function')), + plugin_shortname=self.plugin.get_shortname(), + plugin_version=self.plugin.get_version(), + plugin_info=self.plugin.get_info(), + maintenance=True if self.plugin.log_level == 10 else False, + ) + + @cherrypy.expose + def get_data_html(self, dataSet=None): + """ + Return data to update the webpage + + For the standard update mechanism of the web interface, the dataSet to return the data for is None + + :param dataSet: Dataset for which the data should be returned (standard: None) + :return: dict with the data needed to update the web page. + """ + if dataSet is None: + # get the new data + data = dict() + data['items'] = {} + + for item in self.plugin.get_item_list('db_addon', 'function'): + data['items'][item.id()] = {} + data['items'][item.id()]['value'] = item.property.value + data['items'][item.id()]['last_update'] = item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') + data['items'][item.id()]['last_change'] = item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') + + data['plugin_suspended'] = self.plugin.suspended + data['maintenance'] = True if self.plugin.log_level == 10 else False + data['queue_length'] = self.plugin.queue_backlog() + data['active_queue_item'] = self.plugin.active_queue_item + + try: + return json.dumps(data, default=str) + except Exception as e: + self.logger.error(f"get_data_html exception: {e}") + + @cherrypy.expose + def recalc_all(self): + self.logger.debug(f"recalc_all called") + self.plugin.execute_all_items() + + @cherrypy.expose + def clean_cache_dicts(self): + self.logger.debug(f"_clean_cache_dicts called") + self.plugin._clean_cache_dicts() + + @cherrypy.expose + def clear_queue(self): + self.logger.debug(f"_clear_queue called") + self.plugin._clear_queue() + + @cherrypy.expose + def activate(self): + self.logger.debug(f"active called") + self.plugin.suspend(False) + + @cherrypy.expose + def suspend(self): + self.logger.debug(f"suspend called") + self.plugin.suspend(True) diff --git a/tasmota/__init__.py b/tasmota/__init__.py old mode 100644 new mode 100755 index 48ec19807..8266281f0 --- a/tasmota/__init__.py +++ b/tasmota/__init__.py @@ -1,1723 +1,1723 @@ -#!/usr/bin/env python3 -# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab -######################################################################### -# Copyright 2020- Martin Sinn m.sinn@gmx.de -# Copyright 2021- Michael Wenzel wenzel_michael@web.de -######################################################################### -# This file is part of SmartHomeNG. -# -# SmartHomeNG is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# SmartHomeNG is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with SmartHomeNG. If not, see . -# -######################################################################### - -from datetime import datetime, timedelta - -from lib.model.mqttplugin import * -from .webif import WebInterface - - -class Tasmota(MqttPlugin): - """ - Main class of the Plugin. Does all plugin specific stuff and provides the update functions for the items - """ - - PLUGIN_VERSION = '1.4.0' - - LIGHT_MSG = ['HSBColor', 'Dimmer', 'Color', 'CT', 'Scheme', 'Fade', 'Speed', 'LedTable', 'White'] - - RF_MSG = ['RfSync', 'RfLow', 'RfHigh', 'RfCode'] - - ZIGBEE_BRIDGE_DEFAULT_OPTIONS = {'SetOption89': 'OFF', - 'SetOption101': 'OFF', - 'SetOption120': 'OFF', - 'SetOption83': 'ON', - 'SetOption112': 'OFF', - 'SetOption110': 'OFF', - 'SetOption119': 'OFF', - 'SetOption118': 'OFF', - 'SetOption125': 'ON', - } - TASMOTA_ATTR_R_W = ['relay', 'hsb', 'white', 'ct', 'rf_send', 'rf_key_send', 'zb_permit_join', 'zb_forget', 'zb_ping', 'rf_key'] - - TASMOTA_ZB_ATTR_R_W = ['power', 'hue', 'sat', 'ct', 'dimmer', 'ct_k'] - - ENERGY_SENSOR_KEYS = {'Voltage': 'item_voltage', - 'Current': 'item_current', - 'Power': 'item_power', - 'ApparentPower': 'item_apparent_power', - 'ReactivePower': 'item_reactive_power', - 'Factor': 'item_power_factor', - 'TotalStartTime': 'item_total_starttime', - 'Total': 'item_power_total', - 'Yesterday': 'item_power_yesterday', - 'Today': 'item_power_today'} - - ENV_SENSOR = ['DS18B20', 'AM2301', 'SHT3X', 'BMP280', 'DHT11'] - - ENV_SENSOR_KEYS = {'Temperature': 'item_temp', - 'Humidity': 'item_hum', - 'DewPoint': 'item_dewpoint', - 'Pressure': 'item_pressure', - 'Id': 'item_1wid'} - - ANALOG_SENSOR_KEYS = {'Temperature': 'item_analog_temp', - 'Temperature1': 'item_analog_temp1', - 'A0': 'item_analog_a0', - 'Range': 'item_analog_range'} - - ESP32_SENSOR_KEYS = {'Temperature': 'item_esp32_temp'} - - SENSORS = [*ENV_SENSOR, - 'ENERGY', - ] - - def __init__(self, sh): - """ - Initializes the plugin. - """ - - # Call init code of parent class (MqttPlugin) - super().__init__() - if not self._init_complete: - return - - # get the parameters for the plugin (as defined in metadata plugin.yaml): - self.telemetry_period = self.get_parameter_value('telemetry_period') - self.full_topic = self.get_parameter_value('full_topic').lower() - - # crate full_topic - if self.full_topic.find('%prefix%') == -1 or self.full_topic.find('%topic%') == -1: - self.full_topic = '%prefix%/%topic%/' - if self.full_topic[-1] != '/': - self.full_topic += '/' - - # Define properties - self.tasmota_devices = {} # to hold tasmota device information for web interface - self.tasmota_zigbee_devices = {} # to hold tasmota zigbee device information for web interface - self.tasmota_items = [] # to hold item information for web interface - self.topics_of_retained_messages = [] # to hold all topics of retained messages - - self.alive = None - - # Add subscription to get device discovery - self.add_subscription( 'tasmota/discovery/#', 'dict', callback=self.on_mqtt_discovery_message) - # Add subscription to get device LWT - self.add_tasmota_subscription('tele', '+', 'LWT', 'bool', bool_values=['Offline', 'Online'], callback=self.on_mqtt_lwt_message) - # Add subscription to get device status - self.add_tasmota_subscription('stat', '+', 'STATUS0', 'dict', callback=self.on_mqtt_status0_message) - # Add subscription to get device actions result - self.add_tasmota_subscription('stat', '+', 'RESULT', 'dict', callback=self.on_mqtt_message) - - # Init WebIF - self.init_webinterface(WebInterface) - return - - def run(self): - """ - Run method for the plugin - """ - self.logger.debug("Run method called") - - # start subscription to all defined topics - self.start_subscriptions() - - self.logger.debug(f"Scheduler: 'check_online_status' created") - dt = self.shtime.now() + timedelta(seconds=(self.telemetry_period - 3)) - self.scheduler_add('check_online_status', self.check_online_status, cycle=self.telemetry_period, next=dt) - - self.logger.debug(f"Scheduler: 'add_tasmota_subscriptions' created") - self.scheduler_add('add_tasmota_subscriptions', self.add_tasmota_subscriptions, cron='init+20') - - self.alive = True - - def stop(self): - """ - Stop method for the plugin - """ - self.alive = False - self.logger.debug("Stop method called") - self.scheduler_remove('check_online_status') - - # stop subscription to all topics - self.stop_subscriptions() - - def parse_item(self, item): - """ - Default plugin parse_item method. Is called when the plugin is initialized. - The plugin can, corresponding to its attribute keywords, decide what to do with - the item in the future, like adding it to an internal array for future reference - :param item: The item to process. - :return: If the plugin needs to be informed of an items change you should return a call back function - like the function update_item down below. An example when this is needed is the knx plugin - where parse_item returns the update_item function when the attribute knx_send is found. - This means that when the items value is about to be updated, the call back function is called - with the item, caller, source and dest as arguments and in case of the knx plugin the value - can be sent to the knx with a knx write function within the knx plugin. - """ - - if self.has_iattr(item.conf, 'tasmota_topic'): - tasmota_topic = self.get_iattr_value(item.conf, 'tasmota_topic') - self.logger.info(f"parsing item: {item.id()} with tasmota_topic={tasmota_topic}") - - tasmota_attr = self.get_iattr_value(item.conf, 'tasmota_attr') - tasmota_relay = self.get_iattr_value(item.conf, 'tasmota_relay') - tasmota_rf_details = self.get_iattr_value(item.conf, 'tasmota_rf_key') - tasmota_zb_device = self.get_iattr_value(item.conf, 'tasmota_zb_device') - tasmota_zb_group = self.get_iattr_value(item.conf, 'tasmota_zb_group') - tasmota_zb_attr = self.get_iattr_value(item.conf, 'tasmota_zb_attr') - tasmota_zb_attr = tasmota_zb_attr.lower() if tasmota_zb_attr else None - tasmota_sml_device = self.get_iattr_value(item.conf, 'tasmota_sml_device') - tasmota_sml_attr = self.get_iattr_value(item.conf, 'tasmota_sml_attr') - tasmota_sml_attr = tasmota_sml_attr.lower() if tasmota_sml_attr else None - - # handle tasmota devices without zigbee - if tasmota_attr: - self.logger.info(f"Item={item.id()} identified for Tasmota with tasmota_attr={tasmota_attr}") - tasmota_attr = tasmota_attr.lower() - tasmota_relay = 1 if not tasmota_relay else tasmota_relay - - if tasmota_rf_details and '=' in tasmota_rf_details: - tasmota_rf_details, tasmota_rf_key_param = tasmota_rf_details.split('=') - - # handle tasmota zigbee devices - elif tasmota_zb_device and tasmota_zb_attr: - self.logger.info(f"Item={item.id()} identified for Tasmota Zigbee with tasmota_zb_device={tasmota_zb_device} and tasmota_zb_attr={tasmota_zb_attr}") - - # check if zigbee device short name has been used without parentheses; if so this will be normally parsed to a number and therefore mismatch with definition - try: - tasmota_zb_device = int(tasmota_zb_device) - self.logger.warning(f"Probably for item {item.id()} the device short name as been used for attribute 'tasmota_zb_device'. Trying to make that work but it will cause exceptions. To prevent this, the short name need to be defined as string by using parentheses") - tasmota_zb_device = str(hex(tasmota_zb_device)) - tasmota_zb_device = tasmota_zb_device[0:2] + tasmota_zb_device[2:len(tasmota_zb_device)].upper() - except Exception: - pass - - # handle tasmota zigbee groups - elif tasmota_zb_group and tasmota_zb_attr: - self.logger.info(f"Item={item.id()} identified for Tasmota Zigbee with tasmota_zb_group={tasmota_zb_group} and tasmota_zb_attr={tasmota_zb_attr}") - - # handle tasmota smartmeter devices - elif tasmota_sml_device and tasmota_sml_attr: - self.logger.info(f"Item={item.id()} identified for Tasmota SML with tasmota_sml_device={tasmota_sml_device} and tasmota_sml_attr={tasmota_sml_attr}") - - # handle everything else - else: - self.logger.info(f"Definition of attributes for item={item.id()} incomplete. Item will be ignored.") - return - - # setup dict for new device - if not self.tasmota_devices.get(tasmota_topic): - self._add_new_device_to_tasmota_devices(tasmota_topic) - self.tasmota_devices[tasmota_topic]['status'] = 'item.conf' - - # fill tasmota_device dict - self.tasmota_devices[tasmota_topic]['connected_to_item'] = True - if tasmota_attr == 'relay' and tasmota_relay: - item_type = f'item_{tasmota_attr}{tasmota_relay}' - elif tasmota_attr == 'rf_key' and tasmota_rf_details: - item_type = f'item_{tasmota_attr}{tasmota_rf_details}' - elif tasmota_zb_device and tasmota_zb_attr: - item_type = f'item_{tasmota_zb_device}.{tasmota_zb_attr}' - elif tasmota_sml_device and tasmota_sml_attr: - item_type = f'item_{tasmota_sml_device}.{tasmota_sml_attr}' - else: - item_type = f'item_{tasmota_attr}' - self.tasmota_devices[tasmota_topic]['connected_items'][item_type] = item - - # append to list used for web interface - if item not in self.tasmota_items: - self.tasmota_items.append(item) - - return self.update_item - - elif self.has_iattr(item.conf, 'tasmota_admin'): - self.logger.debug(f"parsing item: {item.id()} for tasmota admin attribute") - - return self.update_item - - def update_item(self, item, caller: str = None, source: str = None, dest: str = None): - """ - Item has been updated - - This method is called, if the value of an item has been updated by SmartHomeNG. - It should write the changed value out to the device (hardware/interface) that - is managed by this plugin. - - :param item: item to be updated towards the plugin - :param caller: if given it represents the callers name - :param source: if given it represents the source - :param dest: if given it represents the dest - """ - - if self.alive and caller != self.get_shortname(): - # code to execute if the plugin is not stopped AND only, if the item has not been changed by this plugin: - - # get tasmota attributes of item - tasmota_admin = self.get_iattr_value(item.conf, 'tasmota_admin') - tasmota_topic = self.get_iattr_value(item.conf, 'tasmota_topic') - tasmota_attr = self.get_iattr_value(item.conf, 'tasmota_attr') - tasmota_relay = self.get_iattr_value(item.conf, 'tasmota_relay') - tasmota_relay = '1' if not tasmota_relay else None - tasmota_rf_details = self.get_iattr_value(item.conf, 'tasmota_rf_details') - tasmota_zb_device = self.get_iattr_value(item.conf, 'tasmota_zb_device') - tasmota_zb_group = self.get_iattr_value(item.conf, 'tasmota_zb_group') - tasmota_zb_attr = self.get_iattr_value(item.conf, 'tasmota_zb_attr') - tasmota_zb_cluster = self.get_iattr_value(item.conf, 'tasmota_zb_cluster') - tasmota_zb_attr = tasmota_zb_attr.lower() if tasmota_zb_attr else None - - # handle tasmota_admin - if tasmota_admin: - if tasmota_admin == 'delete_retained_messages' and bool(item()): - self.clear_retained_messages() - item(False, self.get_shortname()) - - # handle tasmota_attr - elif tasmota_attr and tasmota_attr in self.TASMOTA_ATTR_R_W: - self.logger.info(f"update_item: {item.id()}, item has been changed in SmartHomeNG outside of this plugin in {caller} with value {item()}") - - value = item() - link = { - # 'attribute': (detail, data_type, bool_values, min_value, max_value) - 'relay': (f'Power', bool, ['OFF', 'ON'], None, None), - 'hsb': ('HsbColor', list, None, None, None), - 'white': ('White', int, None, 0, 120), - 'ct': ('CT', int, None, 153, 500), - 'rf_send': ('Backlog', dict, None, None, None), - 'rf_key_send': (f'RfKey', int, None, 1, 16), - 'rf_key': (f'RfKey', bool, None, None, None), - 'zb_permit_join': ('ZbPermitJoin', bool, ['0', '1'], None, None), - 'zb_forget': ('ZbForget', bool, ['0', '1'], None, None), - 'zb_ping': ('ZbPing', bool, ['0', '1'], None, None), - } - - if tasmota_attr not in link: - return - - (detail, data_type, bool_values, min_value, max_value) = link[tasmota_attr] - - # check data type - if not isinstance(value, data_type): - self.logger.warning(f"update_item: type of value {type(value)} for tasmota_attr={tasmota_attr} to be published, does not fit with expected type '{data_type}'. Abort publishing.") - return - - # check and correct if value is in allowed range - if min_value and value < min_value: - self.logger.info(f'Commanded value for {tasmota_attr} below min value; set to allowed min value.') - value = min_value - elif max_value and value > max_value: - self.logger.info(f'Commanded value for {tasmota_attr} above max value; set to allowed max value.') - value = max_value - - # do tasmota_attr specific checks and adaptations - if tasmota_attr == 'relay': - detail = f"{detail}{tasmota_relay}" if tasmota_relay > '1' else detail - - elif tasmota_attr == 'hsb': - if not len(value) == 3: - return - new_value = f"{value[0]},{value[1]},{value[2]}" - value = new_value - - elif tasmota_attr == 'rf_send': - # Input: {'RfSync': 12220, 'RfLow': 440, 'RfHigh': 1210, 'RfCode':'#F06104'} / Output: "RfSync 12220; RfLow 440; RfHigh 1210; RfCode #F06104" - rf_cmd = {k.lower(): v for k, v in value.items()} - if all(k in rf_cmd for k in [x.lower() for x in self.RF_MSG]): - value = f"RfSync {value['rfsync']}; RfLow {value['rflow']}; RfHigh {value['rfhigh']}; RfCode #{value['rfcode']}" - else: - self.logger.debug(f"update_item: rf_send received but not with correct content; expected content is: {'RfSync': 12220, 'RfLow': 440, 'RfHigh': 1210, 'RfCode':'#F06104'}") - return - - elif tasmota_attr == 'rf_key_send': - detail = f"{detail}{value}" - value = 1 - - elif tasmota_attr == 'rf_key': - if not tasmota_rf_details: - self.logger.warning(f"tasmota_rf_details not specified, no action taken.") - return - - if tasmota_rf_details and '=' in tasmota_rf_details: - tasmota_rf_details, tasmota_rf_key_param = tasmota_rf_details.split('=') - - detail = f"{detail}{tasmota_rf_details}" - value = 1 - - elif tasmota_attr == 'zb_forget': - if value not in self.tasmota_zigbee_devices: - self.logger.error(f"Device {value} not known by plugin, no action taken.") - return - - elif tasmota_attr == 'zb_ping': - if value not in self.tasmota_zigbee_devices: - self.logger.error(f"Device {value} not known by plugin, no action taken.") - return - - if value is not None: - self.publish_tasmota_topic('cmnd', tasmota_topic, detail, value, item, bool_values=bool_values) - - # handle tasmota_zb_attr - elif tasmota_zb_attr and tasmota_zb_attr in self.TASMOTA_ZB_ATTR_R_W: - self.logger.info(f"update_item: item={item.id()} with tasmota_zb_attr={tasmota_zb_attr} has been changed from {caller} with value={item()}") - self.logger.info(f"update_item: tasmota_zb_device={tasmota_zb_device}; tasmota_zb_group={tasmota_zb_group}") - - if tasmota_zb_device is None and tasmota_zb_group is None: - return - - value = int(item()) - detail = 'ZbSend' - link = { - # 'attribute': (send_cmd, bool_values, min_value, max_value, cluster, convert) - 'power': ('Power', ['OFF', 'ON'], None, None, '0x0006', None), - 'dimmer': ('Dimmer', None, 0, 100, '0x0008', _100_to_254), - 'hue': ('Hue', None, 0, 360, '0x0300', _360_to_254), - 'sat': ('Sat', None, 0, 100, '0x0300', _100_to_254), - 'ct': ('CT', None, 150, 500, '0x0300', None), - 'ct_k': ('CT', None, 2000, 6700, '0x0300', _kelvin_to_mired), - } - - if tasmota_zb_attr not in link: - return - - (send_cmd, bool_values, min_value, max_value, cluster, convert) = link[tasmota_zb_attr] - - # check and correct if value is in allowed range - if min_value and value < min_value: - self.logger.info(f'Commanded value for {tasmota_zb_attr} below min value; set to allowed min value.') - value = min_value - elif max_value and value > max_value: - self.logger.info(f'Commanded value for {tasmota_zb_attr} above max value; set to allowed max value.') - value = max_value - - # Konvertiere Wert - if convert: - value = convert(value) - - # build payload - payload = {'Device': tasmota_zb_device} if tasmota_zb_device else {'group': tasmota_zb_group} - payload['Send'] = {send_cmd: value} - if tasmota_zb_cluster: - payload['Cluster'] = cluster - - self.logger.debug(f"payload={payload}") - - # publish command - self.publish_tasmota_topic('cmnd', tasmota_topic, detail, payload, item, bool_values=bool_values) - - else: - self.logger.warning(f"update_item: {item.id()}, trying to change item in SmartHomeNG that is read only in tasmota device (by {caller})") - - ############################################################ - # Callbacks - ############################################################ - - # ToDo: 2023-01-20 17:21:04 ERROR modules.mqtt _on_log: Caught exception in on_message: 'ip' - - def on_mqtt_discovery_message(self, topic: str, payload: dict, qos: int = None, retain: bool = None) -> None: - """ - Callback function to handle received discovery messages - - :param topic: MQTT topic - :param payload: MQTT message payload - :param qos: qos for this message (optional) - :param retain: retain flag for this message (optional) - - """ - - self._handle_retained_message(topic, retain) - - try: - (tasmota, discovery, device_id, msg_type) = topic.split('/') - self.logger.info(f"on_mqtt_discovery_message: device_id={device_id}, type={msg_type}, payload={payload}") - except Exception as e: - self.logger.error(f"received topic {topic} is not in correct format. Error was: {e}") - else: - if msg_type == 'config': - """ - device_id = 2CF432CC2FC5 - - payload = - { - 'ip': '192.168.2.33', // IP address - 'dn': 'NXSM200_01', // Device name - 'fn': ['NXSM200_01', None, None, None, None, None, None, None], // List of friendly names - 'hn': 'NXSM200-01-4037', // Hostname - 'mac': '2CF432CC2FC5', // MAC Adresse ohne : - 'md': 'NXSM200', // Module - 'ty': 0, // Tuya - 'if': 0, // ifan - 'ofln': 'Offline', // LWT-offline - 'onln': 'Online', // LWT-online - 'state': ['OFF', 'ON', 'TOGGLE', 'HOLD'], // StateText[0..3] - 'sw': '12.1.1', // Firmware Version - 't': 'NXSM200_01', // Topic - 'ft': '%prefix%/%topic%/', // Full Topic - 'tp': ['cmnd', 'stat', 'tele'], // Topic [SUB_PREFIX, PUB_PREFIX, PUB_PREFIX2] - 'rl': [1, 0, 0, 0, 0, 0, 0, 0], // Relays, 0: disabled, 1: relay, 2.. future extension (fan, shutter?) - 'swc': [-1, -1, -1, -1, -1, -1, -1, -1], // SwitchMode - 'swn': [None, None, None, None, None, None, None, None], // SwitchName - 'btn': [0, 0, 0, 0, 0, 0, 0, 0], // Buttons - 'so': {'4': 0, '11': 0, '13': 0, '17': 0, '20': 0, '30': 0, '68': 0, '73': 0, '82': 0, '114': 0, '117': 0}, // SetOption needed by HA to map Tasmota devices to HA entities and triggers - 'lk': 0, // ctrgb - 'lt_st': 0, // Light subtype - 'sho': [0, 0, 0, 0], - 'sht': [[0, 0, 48], [0, 0, 46], [0, 0, 110], [0, 0, 108]], - 'ver': 1 // Discovery protocol version - } - """ - - tasmota_topic = payload['t'] - if tasmota_topic: - - device_name = payload['dn'] - self.logger.info(f"Discovered Tasmota Device with topic={tasmota_topic} and device_name={device_name}") - - # if device is unknown, add it to dict - if tasmota_topic not in self.tasmota_devices: - self.logger.info(f"New device based on Discovery Message found.") - self._add_new_device_to_tasmota_devices(tasmota_topic) - - # process decoding message and set device to status 'discovered' - self.tasmota_devices[tasmota_topic]['ip'] = payload['ip'] - self.tasmota_devices[tasmota_topic]['friendly_name'] = payload['fn'][0] - self.tasmota_devices[tasmota_topic]['fw_ver'] = payload['sw'] - self.tasmota_devices[tasmota_topic]['device_id'] = device_id - self.tasmota_devices[tasmota_topic]['module'] = payload['md'] - self.tasmota_devices[tasmota_topic]['mac'] = ':'.join(device_id[i:i + 2] for i in range(0, 12, 2)) - self.tasmota_devices[tasmota_topic]['discovery_config'] = self._rename_discovery_keys(payload) - self.tasmota_devices[tasmota_topic]['status'] = 'discovered' - - # start device interview - self._interview_device(tasmota_topic) - - if payload['ft'] != self.full_topic: - self.logger.warning(f"Device {device_name} discovered, but FullTopic of device does not match plugin setting!") - - # if zigbee bridge, process those - if 'zigbee_bridge' in device_name.lower(): - self.logger.info(f"Zigbee_Bridge discovered") - self.tasmota_devices[tasmota_topic]['zigbee']['status'] = 'discovered' - self._configure_zigbee_bridge_settings(tasmota_topic) - self._discover_zigbee_bridge_devices(tasmota_topic) - - elif msg_type == 'sensors': - """ - device_id = 2CF432CC2FC5 - - payload = {'sn': {'Time': '2022-11-19T13:35:59', - 'ENERGY': {'TotalStartTime': '2019-12-23T17:02:03', 'Total': 85.314, 'Yesterday': 0.0, - 'Today': 0.0, 'Power': 0, 'ApparentPower': 0, 'ReactivePower': 0, 'Factor': 0.0, - 'Voltage': 0, 'Current': 0.0}}, 'ver': 1} - """ - - # get payload with Sensor information - sensor_payload = payload['sn'] - if 'Time' in sensor_payload: - sensor_payload.pop('Time') - - # find matching tasmota_topic - tasmota_topic = None - for entry in self.tasmota_devices: - if self.tasmota_devices[entry].get('device_id') == device_id: - tasmota_topic = entry - break - - # hand over sensor information payload for parsing - if sensor_payload and tasmota_topic: - self.logger.info(f"Discovered Tasmota Device with topic={tasmota_topic} and SensorInformation") - self._handle_sensor(tasmota_topic, '', sensor_payload) - - def on_mqtt_lwt_message(self, topic: str, payload: bool, qos: int = None, retain: bool = None) -> None: - """ - Callback function to handle received lwt messages - - :param topic: MQTT topic - :param payload: MQTT message payload - :param qos: qos for this message (optional) - :param retain: retain flag for this message (optional) - - """ - self._handle_retained_message(topic, retain) - - try: - (topic_type, tasmota_topic, info_topic) = topic.split('/') - except Exception as e: - self.logger.error(f"received topic {topic} is not in correct format. Error was: {e}") - else: - self.logger.info(f"Received LWT Message for {tasmota_topic} with value={payload} and retain={retain}") - - if payload: - if tasmota_topic not in self.tasmota_devices: - self.logger.debug(f"New online device based on LWT Message discovered.") - self._handle_new_discovered_device(tasmota_topic) - self.tasmota_devices[tasmota_topic]['online_timeout'] = datetime.now() + timedelta(seconds=self.telemetry_period + 5) - - if tasmota_topic in self.tasmota_devices: - self.tasmota_devices[tasmota_topic]['online'] = payload - self._set_item_value(tasmota_topic, 'item_online', payload, info_topic) - - def on_mqtt_status0_message(self, topic: str, payload: dict, qos: int = None, retain: bool = None) -> None: - """ - Callback function to handle received messages - - :param topic: MQTT topic - :param payload: MQTT message payload - :param qos: qos for this message - :param retain: retain flag for this message - - """ - - """ - Example payload - - payload = {'Status': {'Module': 75, 'DeviceName': 'ZIGBEE_Bridge01', 'FriendlyName': ['SONOFF_ZB1'], - 'Topic': 'SONOFF_ZB1', 'ButtonTopic': '0', 'Power': 0, 'PowerOnState': 3, 'LedState': 1, - 'LedMask': 'FFFF', 'SaveData': 1, 'SaveState': 1, 'SwitchTopic': '0', - 'SwitchMode': [0, 0, 0, 0, 0, 0, 0, 0], 'ButtonRetain': 0, 'SwitchRetain': 0, - 'SensorRetain': 0, 'PowerRetain': 0, 'InfoRetain': 0, 'StateRetain': 0}, - 'StatusPRM': {'Baudrate': 115200, 'SerialConfig': '8N1', 'GroupTopic': 'tasmotas', - 'OtaUrl': 'http://ota.tasmota.com/tasmota/release/tasmota-zbbridge.bin.gz', - 'RestartReason': 'Software/System restart', 'Uptime': '0T23:18:30', - 'StartupUTC': '2022-11-19T12:10:15', 'Sleep': 50, 'CfgHolder': 4617, 'BootCount': 116, - 'BCResetTime': '2021-04-28T08:32:10', 'SaveCount': 160, 'SaveAddress': '1FB000'}, - 'StatusFWR': {'Version': '12.1.1(zbbridge)', 'BuildDateTime': '2022-08-25T11:37:17', 'Boot': 31, - 'Core': '2_7_4_9', 'SDK': '2.2.2-dev(38a443e)', 'CpuFrequency': 160, - 'Hardware': 'ESP8266EX', 'CR': '372/699'}, - 'StatusLOG': {'SerialLog': 0, 'WebLog': 2, 'MqttLog': 0, 'SysLog': 0, 'LogHost': '', 'LogPort': 514, - 'SSId': ['WLAN-Access', ''], 'TelePeriod': 300, 'Resolution': '558180C0', - 'SetOption': ['00008009', '2805C80001000600003C5A0A002800000000', '00000080', - '40046002', '00004810', '00000000']}, - 'StatusMEM': {'ProgramSize': 685, 'Free': 1104, 'Heap': 25, 'ProgramFlashSize': 2048, - 'FlashSize': 2048, 'FlashChipId': '1540A1', 'FlashFrequency': 40, 'FlashMode': 3, - 'Features': ['00000809', '0F1007C6', '04400001', '00000003', '00000000', '00000000', - '00020080', '00200000', '04000000', '00000000'], - 'Drivers': '1,2,4,7,9,10,12,20,23,38,41,50,62', 'Sensors': '1'}, - 'StatusNET': {'Hostname': 'SONOFF-ZB1-6926', 'IPAddress': '192.168.2.24', 'Gateway': '192.168.2.1', - 'Subnetmask': '255.255.255.0', 'DNSServer1': '192.168.2.1', 'DNSServer2': '0.0.0.0', - 'Mac': '84:CC:A8:AA:1B:0E', 'Webserver': 2, 'HTTP_API': 1, 'WifiConfig': 0, - 'WifiPower': 17.0}, - 'StatusMQT': {'MqttHost': '192.168.2.12', 'MqttPort': 1883, 'MqttClientMask': 'DVES_%06X', - 'MqttClient': 'DVES_AA1B0E', 'MqttUser': 'DVES_USER', 'MqttCount': 1, - 'MAX_PACKET_SIZE': 1200, 'KEEPALIVE': 30, 'SOCKET_TIMEOUT': 4}, - 'StatusTIM': {'UTC': '2022-11-20T11:28:45', 'Local': '2022-11-20T12:28:45', - 'StartDST': '2022-03-27T02:00:00', 'EndDST': '2022-10-30T03:00:00', - 'Timezone': '+01:00', 'Sunrise': '08:07', 'Sunset': '17:04'}, - 'StatusSNS': {'Time': '2022-11-20T12:28:45'}, - 'StatusSTS': {'Time': '2022-11-20T12:28:45', 'Uptime': '0T23:18:30', 'UptimeSec': 83910, 'Vcc': 3.41, - 'Heap': 24, 'SleepMode': 'Dynamic', 'Sleep': 50, 'LoadAvg': 19, 'MqttCount': 1, - 'Wifi': {'AP': 1, 'SSId': 'WLAN-Access', 'BSSId': '38:10:D5:15:87:69', 'Channel': 1, - 'Mode': '11n', 'RSSI': 50, 'Signal': -75, 'LinkCount': 1, - 'Downtime': '0T00:00:03'}}} - - """ - - self._handle_retained_message(topic, retain) - - try: - (topic_type, tasmota_topic, info_topic) = topic.split('/') - self.logger.info(f"on_mqtt_status0_message: topic_type={topic_type}, tasmota_topic={tasmota_topic}, info_topic={info_topic}, payload={payload}") - except Exception as e: - self.logger.error(f"received topic {topic} is not in correct format. Error was: {e}") - - else: - self.logger.info(f"Received Status0 Message for {tasmota_topic} with value={payload} and retain={retain}") - self.tasmota_devices[tasmota_topic]['status'] = 'interviewed' - - # handle teleperiod - self._handle_teleperiod(tasmota_topic, payload['StatusLOG']) - - if self.tasmota_devices[tasmota_topic]['status'] != 'interviewed': - if self.tasmota_devices[tasmota_topic]['status'] != 'discovered': - # friendly name - self.tasmota_devices[tasmota_topic]['friendly_name'] = payload['Status']['FriendlyName'][0] - - # IP Address - ip = payload['StatusNET']['IPAddress'] - ip_eth = payload['StatusNET'].get('Ethernet', {}).get('IPAddress') - ip = ip_eth if ip == '0.0.0.0' else None - self.tasmota_devices[tasmota_topic]['ip'] = ip - - # Firmware - self.tasmota_devices[tasmota_topic]['fw_ver'] = payload['StatusFWR']['Version'].split('(')[0] - - # MAC - self.tasmota_devices[tasmota_topic]['mac'] = payload['StatusNET']['Mac'] - - # Module No - self.tasmota_devices[tasmota_topic]['template'] = payload['Status']['Module'] - - # get detailed status using payload['StatusSTS'] - status_sts = payload['StatusSTS'] - - # Handling Lights and Dimmer - if any([i in status_sts for i in self.LIGHT_MSG]): - self._handle_lights(tasmota_topic, info_topic, status_sts) - - # Handling of Power - if any(item.startswith("POWER") for item in status_sts.keys()): - self._handle_power(tasmota_topic, info_topic, status_sts) - - # Handling of RF messages - if any(item.startswith("Rf") for item in status_sts.keys()): - self._handle_rf(tasmota_topic, info_topic, status_sts) - - # Handling of Wi-Fi - if 'Wifi' in status_sts: - self._handle_wifi(tasmota_topic, status_sts['Wifi']) - - # Handling of Uptime - if 'Uptime' in status_sts: - self._handle_uptime(tasmota_topic, status_sts['Uptime']) - - # Handling of UptimeSec - if 'UptimeSec' in status_sts: - self.logger.info(f"Received Message contains UptimeSec information.") - self._handle_uptime_sec(tasmota_topic, status_sts['UptimeSec']) - - def on_mqtt_info_message(self, topic: str, payload: dict, qos: int = None, retain: bool = None) -> None: - """ - Callback function to handle received messages - - :param topic: MQTT topic - :param payload: MQTT message payload - :param qos: qos for this message (optional) - :param retain: retain flag for this message (optional) - - """ - - self._handle_retained_message(topic, retain) - - try: - (topic_type, tasmota_topic, info_topic) = topic.split('/') - self.logger.debug(f"on_mqtt_message: topic_type={topic_type}, tasmota_topic={tasmota_topic}, info_topic={info_topic}, payload={payload}") - except Exception as e: - self.logger.error(f"received topic {topic} is not in correct format. Error was: {e}") - else: - if info_topic == 'INFO1': - # payload={'Info1': {'Module': 'Sonoff Basic', 'Version': '11.0.0(tasmota)', 'FallbackTopic': 'cmnd/DVES_2EB8AE_fb/', 'GroupTopic': 'cmnd/tasmotas/'}} - self.logger.debug(f"Received Message decoded as INFO1 message.") - self.tasmota_devices[tasmota_topic]['fw_ver'] = payload['Info1']['Version'].split('(')[0] - self.tasmota_devices[tasmota_topic]['module_no'] = payload['Info1']['Module'] - - elif info_topic == 'INFO2': - # payload={'Info2': {'WebServerMode': 'Admin', 'Hostname': 'SONOFF-B1-6318', 'IPAddress': '192.168.2.25'}} - self.logger.debug(f"Received Message decoded as INFO2 message.") - self.tasmota_devices[tasmota_topic]['ip'] = payload['Info2']['IPAddress'] - - elif info_topic == 'INFO3': - # payload={'Info3': {'RestartReason': 'Software/System restart', 'BootCount': 1395}} - self.logger.debug(f"Received Message decoded as INFO3 message.") - restart_reason = payload['Info3']['RestartReason'] - self.logger.warning(f"Device {tasmota_topic} (IP={self.tasmota_devices[tasmota_topic]['ip']}) just startet. Reason={restart_reason}") - - def on_mqtt_message(self, topic: str, payload: dict, qos: int = None, retain: bool = None) -> None: - """ - Callback function to handle received messages - - :param topic: MQTT topic - :param payload: MQTT message payload - :param qos: qos for this message (optional) - :param retain: retain flag for this message (optional) - - """ - - self._handle_retained_message(topic, retain) - - try: - (topic_type, tasmota_topic, info_topic) = topic.split('/') - self.logger.info(f"on_mqtt_message: topic_type={topic_type}, tasmota_topic={tasmota_topic}, info_topic={info_topic}, payload={payload}") - except Exception as e: - self.logger.error(f"received topic {topic} is not in correct format. Error was: {e}") - else: - - # handle unknown device - if tasmota_topic not in self.tasmota_devices: - self._handle_new_discovered_device(tasmota_topic) - - # handle message - if isinstance(payload, dict) and info_topic in ['STATE', 'RESULT']: - - # Handling of TelePeriod - if 'TelePeriod' in payload: - self.logger.info(f"Received Message decoded as teleperiod message.") - self._handle_teleperiod(tasmota_topic, payload['TelePeriod']) - - elif 'Module' in payload: - self.logger.info(f"Received Message decoded as Module message.") - self._handle_module(tasmota_topic, payload['Module']) - - # Handling of Light messages - elif any([i in payload for i in self.LIGHT_MSG]): - self.logger.info(f"Received Message decoded as light message.") - self._handle_lights(tasmota_topic, info_topic, payload) - - # Handling of Power messages - elif any(item.startswith("POWER") for item in payload.keys()): - self.logger.info(f"Received Message decoded as power message.") - self._handle_power(tasmota_topic, info_topic, payload) - - # Handling of RF messages payload={'Time': '2022-11-21T11:22:55', 'RfReceived': {'Sync': 10120, 'Low': 330, 'High': 980, 'Data': '3602B8', 'RfKey': 'None'}} - elif 'RfReceived' in payload: - self.logger.info(f"Received Message decoded as RF message.") - self._handle_rf(tasmota_topic, info_topic, payload['RfReceived']) - - # Handling of Setting messages - elif next(iter(payload)).startswith("SetOption"): - # elif any(item.startswith("SetOption") for item in payload.keys()): - self.logger.info(f"Received Message decoded as Tasmota Setting message.") - self._handle_setting(tasmota_topic, payload) - - # Handling of Zigbee Bridge Config messages - elif 'ZbConfig' in payload: - self.logger.info(f"Received Message decoded as Zigbee Config message.") - self._handle_zbconfig(tasmota_topic, payload['ZbConfig']) - - # Handling of Zigbee Bridge Status messages - elif any(item.startswith("ZbStatus") for item in payload.keys()): - self.logger.info(f"Received Message decoded as Zigbee ZbStatus message.") - self._handle_zbstatus(tasmota_topic, payload) - - # Handling of Wi-Fi - if 'Wifi' in payload: - self.logger.info(f"Received Message contains Wifi information.") - self._handle_wifi(tasmota_topic, payload['Wifi']) - - # Handling of Uptime - if 'Uptime' in payload: - self.logger.info(f"Received Message contains Uptime information.") - self._handle_uptime(tasmota_topic, payload['Uptime']) - - # Handling of UptimeSec - if 'UptimeSec' in payload: - self.logger.info(f"Received Message contains UptimeSec information.") - self._handle_uptime_sec(tasmota_topic, payload['UptimeSec']) - - elif isinstance(payload, dict) and info_topic == 'SENSOR': - self.logger.info(f"Received Message contains sensor information.") - self._handle_sensor(tasmota_topic, info_topic, payload) - - else: - self.logger.warning(f"Received Message '{payload}' not handled within plugin.") - - # setting new online-timeout - self.tasmota_devices[tasmota_topic]['online_timeout'] = datetime.now() + timedelta(seconds=self.telemetry_period + 5) - - # setting online_item to True - self._set_item_value(tasmota_topic, 'item_online', True, info_topic) - - def on_mqtt_power_message(self, topic: str, payload: dict, qos: int = None, retain: bool = None) -> None: - """ - Callback function to handle received messages - - :param topic: MQTT topic - :param payload: MQTT message payload - :param qos: qos for this message (optional) - :param retain: retain flag for this message (optional) - - """ - - self._handle_retained_message(topic, retain) - - # check for retained message and handle it - if bool(retain): - if topic not in self.topics_of_retained_messages: - self.topics_of_retained_messages.append(topic) - else: - if topic in self.topics_of_retained_messages: - self.topics_of_retained_messages.remove(topic) - - # handle incoming message - try: - (topic_type, tasmota_topic, info_topic) = topic.split('/') - self.logger.info(f"on_mqtt_power_message: topic_type={topic_type}, tasmota_topic={tasmota_topic}, info_topic={info_topic}, payload={payload}") - except Exception as e: - self.logger.error(f"received topic {topic} is not in correct format. Error was: {e}") - else: - device = self.tasmota_devices.get(tasmota_topic, None) - if device: - if info_topic.startswith('POWER'): - tasmota_relay = str(info_topic[5:]) - tasmota_relay = '1' if not tasmota_relay else None - item_relay = f'item_relay{tasmota_relay}' - self._set_item_value(tasmota_topic, item_relay, payload == 'ON', info_topic) - self.tasmota_devices[tasmota_topic]['relais'][info_topic] = payload - - ############################################################ - # Parse detailed messages - ############################################################ - - def _handle_sensor(self, device: str, function: str, payload: dict) -> None: - """ - - :param device: - :param function: - :param payload: - :return: - """ - # Handling of Zigbee Device Messages - if 'ZbReceived' in payload: - self.logger.info(f"Received Message decoded as Zigbee Sensor message.") - self._handle_sensor_zigbee(device, function, payload['ZbReceived']) - - # Handling of Energy Sensors - elif 'ENERGY' in payload: - self.logger.info(f"Received Message decoded as Energy Sensor message.") - self._handle_sensor_energy(device, function, payload['ENERGY']) - - # Handling of Environmental Sensors - elif any([i in payload for i in self.ENV_SENSOR]): - self._handle_sensor_env(device, function, payload) - - # Handling of Analog Sensors - elif 'ANALOG' in payload: - self.logger.info(f"Received Message decoded as ANALOG Sensor message.") - self._handle_sensor_analog(device, function, payload['ANALOG']) - - # Handling of Sensors of ESP32 - elif 'ESP32' in payload: - self.logger.info(f"Received Message decoded as ESP32 Sensor message.") - self._handle_sensor_esp32(device, function, payload['ESP32']) - - # Handling of any other Sensor e.g. all SML devices - else: - if len(payload) == 2 and isinstance(payload[list(payload.keys())[1]], dict): # wenn payload 2 Einträge und der zweite Eintrag vom Typ dict - self.logger.info(f"Received Message decoded as other Sensor message (e.g. smartmeter).") - sensor = list(payload.keys())[1] - self._handle_sensor_other(device, sensor, function, payload[sensor]) - - def _handle_sensor_zigbee(self, device: str, function: str, payload: dict) -> None: - """ - Handles Zigbee Sensor information and set items - - :param payload: payload containing zigbee sensor infos - :return: - """ - - """ - payload = {'Fenster_01': {'Device': '0xD4F3', 'Name': 'Fenster_01', 'Contact': 0, 'Endpoint': 1, 'LinkQuality': 92}} - """ - - # self.logger.debug(f"_handle_sensor_zigbee: {device=}, {function=}, {payload=}") - - for zigbee_device in payload: - if zigbee_device != '0x0000' and zigbee_device not in self.tasmota_zigbee_devices: - self.logger.info(f"New Zigbee Device '{zigbee_device}'based on {function}-Message from {device} discovered") - self.tasmota_zigbee_devices[zigbee_device] = {} - - # Make all keys of Zigbee-Device Payload Dict lowercase to match itemtype from parse_item - zigbee_device_dict = {k.lower(): v for k, v in payload[zigbee_device].items()} - - # Korrigieren der Werte für (HSB) Dimmer (0-254 -> 0-100), Hue(0-254 -> 0-360), Saturation (0-254 -> 0-100) - if 'dimmer' in zigbee_device_dict: - zigbee_device_dict.update({'dimmer': _254_to_100(zigbee_device_dict['dimmer'])}) - if 'sat' in zigbee_device_dict: - zigbee_device_dict.update({'sat': _254_to_100(zigbee_device_dict['sat'])}) - if 'hue' in zigbee_device_dict: - zigbee_device_dict.update({'hue': _254_to_360(zigbee_device_dict['hue'])}) - if 'ct' in zigbee_device_dict: - zigbee_device_dict['ct_k'] = _mired_to_kelvin(zigbee_device_dict['ct']) - - # Korrektur des LastSeenEpoch von Timestamp zu datetime - if 'lastseenepoch' in zigbee_device_dict: - zigbee_device_dict.update({'lastseenepoch': datetime.fromtimestamp(zigbee_device_dict['lastseenepoch'])}) - if 'batterylastseenepoch' in zigbee_device_dict: - zigbee_device_dict.update({'batterylastseenepoch': datetime.fromtimestamp(zigbee_device_dict['batterylastseenepoch'])}) - - # Udpate des Sub-Dicts - self.tasmota_zigbee_devices[zigbee_device].update(zigbee_device_dict) - - # Iterate over payload and set corresponding items - for element in zigbee_device_dict: - itemtype = f"item_{zigbee_device}.{element.lower()}" - value = zigbee_device_dict[element] - self._set_item_value(device, itemtype, value, function) - - def _handle_sensor_energy(self, device: str, function: str, energy: dict): - """ - Handle Energy Sensor Information - :param device: - :param energy: - :param function: - """ - - if 'ENERGY' not in self.tasmota_devices[device]['sensors']: - self.tasmota_devices[device]['sensors']['ENERGY'] = {} - - self.tasmota_devices[device]['sensors']['ENERGY']['period'] = energy.get('Period', None) - - for key in self.ENERGY_SENSOR_KEYS: - if key in energy: - self.tasmota_devices[device]['sensors']['ENERGY'][key.lower()] = energy[key] - self._set_item_value(device, self.ENERGY_SENSOR_KEYS[key], energy[key], function) - - def _handle_sensor_env(self, device: str, function: str, payload: dict): - """ - Handle Environmental Sensor Information - :param device: - :param function: - :param payload: - """ - - for sensor in self.ENV_SENSOR: - data = payload.get(sensor) - - if data and isinstance(data, dict): - self.logger.debug(f"Received Message decoded as {sensor} Sensor message.") - if sensor not in self.tasmota_devices[device]['sensors']: - self.tasmota_devices[device]['sensors'][sensor] = {} - - for key in self.ENV_SENSOR_KEYS: - if key in data: - self.tasmota_devices[device]['sensors'][sensor][key.lower()] = data[key] - self._set_item_value(device, self.ENV_SENSOR_KEYS[key], data[key], function) - - def _handle_sensor_analog(self, device: str, function: str, analog: dict): - """ - Handle Analog Sensor Information - :param device: - :param function: - :param analog: - """ - - if 'ANALOG' not in self.tasmota_devices[device]['sensors']: - self.tasmota_devices[device]['sensors']['ANALOG'] = {} - - for key in self.ANALOG_SENSOR_KEYS: - if key in analog: - self.tasmota_devices[device]['sensors']['ANALOG'][key.lower()] = analog[key] - self._set_item_value(device, self.ANALOG_SENSOR_KEYS[key], analog[key], function) - - def _handle_sensor_esp32(self, device: str, function: str, esp32: dict): - """ - Handle ESP32 Sensor Information - :param device: - :param function: - :param esp32: - """ - - if 'ESP32' not in self.tasmota_devices[device]['sensors']: - self.tasmota_devices[device]['sensors']['ESP32'] = {} - - for key in self.ESP32_SENSOR_KEYS: - if key in esp32: - self.tasmota_devices[device]['sensors']['ESP32'][key.lower()] = esp32[key] - self._set_item_value(device, self.ESP32_SENSOR_KEYS[key], esp32[key], function) - - def _handle_sensor_other(self, device: str, sensor: str, function: str, payload: dict): - """ - Handle Other Sensor Information - :param device: Tasmota Device - :param sensor: Sensor Device - :param function: Messages Information will be taken from - :param payload: dict with infos - """ - - self.logger.debug(f"Received Message decoded as {sensor} Sensor message with payload={payload}.") - - if sensor not in self.tasmota_devices[device]['sensors']: - self.tasmota_devices[device]['sensors'][sensor] = {} - - # Make all keys of SML-Device Payload Dict lowercase to match itemtype from parse_item - sensor_dict = {k.lower(): v for k, v in payload.items()} - - # Udpate des Sub-Dicts - self.tasmota_devices[device]['sensors'][sensor].update(sensor_dict) - - # Iterate over payload and set corresponding items - for element in sensor_dict: - itemtype = f"item_{sensor}.{element.lower()}" - value = sensor_dict[element] - self._set_item_value(device, itemtype, value, function) - - def _handle_lights(self, device: str, function: str, payload: dict) -> None: - """ - Extracts Light information out of payload and updates plugin dict - - :param device: Device, the Light information shall be handled (equals tasmota_topic) - :param function: Function of Device (equals info_topic) - :param payload: MQTT message payload - - """ - hsb = payload.get('HSBColor') - if hsb: - if hsb.count(',') == 2: - hsb = hsb.split(",") - try: - hsb = [int(element) for element in hsb] - except Exception as e: - self.logger.info(f"Received Data for HSBColor do not contain in values for HSB. Payload was {hsb}. Error was {e}.") - else: - self.logger.info(f"Received Data for HSBColor do not contain values for HSB. Payload was {hsb}.") - self.tasmota_devices[device]['lights']['hsb'] = hsb - self._set_item_value(device, 'item_hsb', hsb, function) - - dimmer = payload.get('Dimmer') - if dimmer: - self.tasmota_devices[device]['lights']['dimmer'] = dimmer - self._set_item_value(device, 'item_dimmer', dimmer, function) - - color = payload.get('Color') - if color: - self.tasmota_devices[device]['lights']['color'] = str(color) - - ct = payload.get('CT') - if ct: - self.tasmota_devices[device]['lights']['ct'] = ct - self._set_item_value(device, 'item_ct', ct, function) - - white = payload.get('White') - if white: - self.tasmota_devices[device]['lights']['white'] = white - self._set_item_value(device, 'item_white', white, function) - - scheme = payload.get('Scheme') - if scheme: - self.tasmota_devices[device]['lights']['scheme'] = scheme - - fade = payload.get('Fade') - if fade: - self.tasmota_devices[device]['lights']['fade'] = bool(fade) - - speed = payload.get('Speed') - if speed: - self.tasmota_devices[device]['lights']['speed'] = speed - - ledtable = payload.get('LedTable') - if ledtable: - self.tasmota_devices[device]['lights']['ledtable'] = bool(ledtable) - - def _handle_power(self, device: str, function: str, payload: dict) -> None: - """ - Extracts Power information out of payload and updates plugin dict - - :param device: Device, the Power information shall be handled (equals tasmota_topic) - :param function: Function of Device (equals info_topic) - :param payload: MQTT message payload - - """ - # payload = {"Time": "2022-11-21T12:56:34", "Uptime": "0T00:00:11", "UptimeSec": 11, "Heap": 27, "SleepMode": "Dynamic", "Sleep": 50, "LoadAvg": 19, "MqttCount": 0, "POWER1": "OFF", "POWER2": "OFF", "POWER3": "OFF", "POWER4": "OFF", "Wifi": {"AP": 1, "SSId": "WLAN-Access", "BSSId": "38:10:D5:15:87:69", "Channel": 1, "Mode": "11n", "RSSI": 82, "Signal": -59, "LinkCount": 1, "Downtime": "0T00:00:03"}} - - power_dict = {key: val for key, val in payload.items() if key.startswith('POWER')} - self.tasmota_devices[device]['relais'].update(power_dict) - for power in power_dict: - relay_index = 1 if len(power) == 5 else str(power[5:]) - item_relay = f'item_relay{relay_index}' - self._set_item_value(device, item_relay, power_dict[power], function) - - def _handle_module(self, device: str, payload: dict) -> None: - """ - Extracts Module information out of payload and updates plugin dict payload = {"0":"ZB-GW03-V1.3"}} - - :param device: Device, the Module information shall be handled - :param payload: MQTT message payload - - """ - template = next(iter(payload)) - module = payload[template] - self.tasmota_devices[device]['module'] = module - self.tasmota_devices[device]['tasmota_template'] = template - - def _handle_rf(self, device: str, function: str, payload: dict) -> None: - """ - Extracts RF information out of payload and updates plugin dict - - :param device: Device, the RF information shall be handled - :param function: Function of Device (equals info_topic) - :param payload: MQTT message payload - - """ - - # payload = {'Sync': 10120, 'Low': 330, 'High': 980, 'Data': '3602B8', 'RfKey': 'None'} - - self.logger.info(f"Received Message decoded as RF message.") - self.tasmota_devices[device]['rf']['rf_received'] = payload - self._set_item_value(device, 'item_rf_recv', payload['Data'], function) - - rf_key = 0 if payload["RfKey"] == 'None' else int(payload["RfKey"]) - self._set_item_value(device, 'item_rf_key_recv', rf_key, function) - self._set_item_value(device, f'item_rf_key{rf_key}', True, function) - - def _handle_zbconfig(self, device: str, payload: dict) -> None: - """ - Extracts ZigBee Config information out of payload and updates plugin dict - - :param device: Device, the Zigbee Config information shall be handled - :param payload: MQTT message payload - - """ - # stat/SONOFF_ZB1/RESULT = {"ZbConfig":{"Channel":11,"PanID":"0x0C84","ExtPanID":"0xCCCCCCCCAAA8CC84","KeyL":"0xAAA8CC841B1F40A1","KeyH":"0xAAA8CC841B1F40A1","TxRadio":20}} - self.tasmota_devices[device]['zigbee']['zbconfig'] = payload - - def _handle_zbstatus(self, device: str, payload: dict) -> None: - """ - Extracts ZigBee Status information out of payload and updates plugin dict - - :param device: Device, the Zigbee Status information shall be handled - :param payload: MQTT message payload - - """ - - zbstatus1 = payload.get('ZbStatus1') - if zbstatus1: - self.logger.info(f"Received Message decoded as Zigbee ZbStatus1 message for device {device}.") - self._handle_zbstatus1(device, zbstatus1) - - zbstatus23 = payload.get('ZbStatus2') - if not zbstatus23: - zbstatus23 = payload.get('ZbStatus3') - - if zbstatus23: - self.logger.info(f"Received Message decoded as Zigbee ZbStatus2 or ZbStatus3 message for device {device}.") - self._handle_zbstatus23(device, zbstatus23) - - def _handle_zbstatus1(self, device: str, zbstatus1: list) -> None: - """ - Extracts ZigBee Status1 information out of payload and updates plugin dict - - :param device: Device, the Zigbee Status information shall be handled - :param zbstatus1: List of status information out mqtt payload - - """ - """ - zbstatus1 = [{'Device': '0x676D', 'Name': 'SNZB-02_01'}, - {'Device': '0xD4F3', 'Name': 'Fenster_01'} - ] - """ - - for element in zbstatus1: - zigbee_device = element.get('Name') - if not zigbee_device: - zigbee_device = element['Device'] - - if zigbee_device != '0x0000' and zigbee_device not in self.tasmota_zigbee_devices: - self.logger.info(f"New Zigbee Device '{zigbee_device}'based on 'ZbStatus1'-Message from {device} discovered") - self.tasmota_zigbee_devices[zigbee_device] = {} - - # request detailed information of all discovered zigbee devices - self._poll_zigbee_devices(device) - - def _handle_zbstatus23(self, device: str, zbstatus23: dict) -> None: - """ - Extracts ZigBee Status 2 and 3 information out of payload and updates plugin dict - - :param device: Device, the Zigbee Status information shall be handled - :param zbstatus23: ZbStatus2 or ZbStatus 3 part of MQTT message payload - - """ - - """ - zbstatus23 = [{'Device': '0xD4F3', 'Name': 'Fenster_01', 'IEEEAddr': '0x00158D0007005B59', - 'ModelId': 'lumi.sensor_magnet.aq2', 'Manufacturer': 'LUMI', 'Endpoints': [1], - 'Config': ['A01'], 'ZoneStatus': 29697, 'Reachable': True, 'BatteryPercentage': 100, - 'BatteryLastSeenEpoch': 1668953504, 'LastSeen': 238, 'LastSeenEpoch': 1668953504, - 'LinkQuality': 81}] - - zbstatus23 = [{'Device': '0x676D', 'Name': 'SNZB-02_01', 'IEEEAddr': '0x00124B00231E45B8', - 'ModelId': 'TH01', 'Manufacturer': 'eWeLink', 'Endpoints': [1], 'Config': ['T01'], - 'Temperature': 19.27, 'Humidity': 58.12, 'Reachable': True, 'BatteryPercentage': 73, - 'BatteryLastSeenEpoch': 1668953064, 'LastSeen': 610, 'LastSeenEpoch': 1668953064, 'LinkQuality': 66}] - - zbstatus23 = [{'Device': '0x0A22', 'IEEEAddr': '0xF0D1B800001571C5', 'ModelId': 'CLA60 RGBW Z3', - 'Manufacturer': 'LEDVANCE', 'Endpoints': [1], 'Config': ['L01', 'O01'], 'Dimmer': 100, - 'Hue': 200, 'Sat': 254, 'X': 1, 'Y': 1, 'CT': 350, 'ColorMode': 0, 'RGB': 'B600FF', - 'RGBb': '480064', 'Power': 1, 'Reachable': False, 'LastSeen': 30837743, - 'LastSeenEpoch': 1638132192, 'LinkQuality': 13}] - """ - - for element in zbstatus23: - zigbee_device = element.get('Name') - if not zigbee_device: - zigbee_device = element['Device'] - - payload = dict() - payload[zigbee_device] = element - - self._handle_sensor_zigbee(device, 'ZbStatus', payload) - - def _handle_wifi(self, device: str, payload: dict) -> None: - """ - Extracts Wi-Fi information out of payload and updates plugin dict - - :param device: Device, the Zigbee Status information shall be handled - :param payload: MQTT message payload - - """ - self.logger.debug(f"_handle_wifi: received payload={payload}") - wifi_signal = payload.get('Signal') - if wifi_signal: - if isinstance(wifi_signal, str) and wifi_signal.isdigit(): - wifi_signal = int(wifi_signal) - self.tasmota_devices[device]['wifi_signal'] = wifi_signal - - def _handle_setting(self, device: str, payload: dict) -> None: - """ - Extracts Zigbee Bridge Setting information out of payload and updates dict - :param device: - :param payload: MQTT message payload - """ - - # handle Setting listed in Zigbee Bridge Settings (wenn erster Key des Payload-Dict in Zigbee_Bridge_Default_Setting...) - if next(iter(payload)) in self.ZIGBEE_BRIDGE_DEFAULT_OPTIONS: - if not self.tasmota_devices[device]['zigbee'].get('setting'): - self.tasmota_devices[device]['zigbee']['setting'] = {} - self.tasmota_devices[device]['zigbee']['setting'].update(payload) - - if self.tasmota_devices[device]['zigbee']['setting'] == self.ZIGBEE_BRIDGE_DEFAULT_OPTIONS: - self.tasmota_devices[device]['zigbee']['status'] = 'set' - self.logger.info(f'_handle_setting: Setting of Tasmota Zigbee Bridge successful.') - - def _handle_teleperiod(self, tasmota_topic: str, teleperiod: dict) -> None: - - self.tasmota_devices[tasmota_topic]['teleperiod'] = teleperiod - if teleperiod != self.telemetry_period: - self._set_telemetry_period(tasmota_topic) - - def _handle_uptime(self, tasmota_topic: str, uptime: str) -> None: - self.logger.debug(f"Received Message contains Uptime information. uptime={uptime}") - self.tasmota_devices[tasmota_topic]['uptime'] = uptime - - def _handle_uptime_sec(self, tasmota_topic: str, uptime_sec: int) -> None: - self.logger.debug(f"Received Message contains UptimeSec information. uptime={uptime_sec}") - self.tasmota_devices[tasmota_topic]['uptime_sec'] = int(uptime_sec) - - ############################################################ - # MQTT Settings & Config - ############################################################ - - def add_tasmota_subscriptions(self): - self.logger.info(f"Further tasmota_subscriptions for regular/cyclic messages will be added") - - self.add_tasmota_subscription('tele', '+', 'STATE', 'dict', callback=self.on_mqtt_message) - self.add_tasmota_subscription('tele', '+', 'SENSOR', 'dict', callback=self.on_mqtt_message) - self.add_tasmota_subscription('tele', '+', 'RESULT', 'dict', callback=self.on_mqtt_message) - # self.add_tasmota_subscription('tele', '+', 'INFO1', 'dict', callback=self.on_mqtt_message) - # self.add_tasmota_subscription('tele', '+', 'INFO2', 'dict', callback=self.on_mqtt_message) - self.add_tasmota_subscription('tele', '+', 'INFO3', 'dict', callback=self.on_mqtt_info_message) - self.add_tasmota_subscription('stat', '+', 'POWER', 'num', callback=self.on_mqtt_power_message) - self.add_tasmota_subscription('stat', '+', 'POWER1', 'num', callback=self.on_mqtt_power_message) - self.add_tasmota_subscription('stat', '+', 'POWER2', 'num', callback=self.on_mqtt_power_message) - self.add_tasmota_subscription('stat', '+', 'POWER3', 'num', callback=self.on_mqtt_power_message) - self.add_tasmota_subscription('stat', '+', 'POWER4', 'num', callback=self.on_mqtt_power_message) - - def check_online_status(self): - """ - checks all tasmota topics, if last message is with telemetry period. If not set tasmota_topic offline - - """ - - self.logger.info("check_online_status: Checking online status of connected devices") - for tasmota_topic in self.tasmota_devices: - if self.tasmota_devices[tasmota_topic].get('online') is True and self.tasmota_devices[tasmota_topic].get('online_timeout'): - if self.tasmota_devices[tasmota_topic]['online_timeout'] < datetime.now(): - self._set_device_offline(tasmota_topic) - else: - self.logger.debug(f'check_online_status: Checking online status of {tasmota_topic} successful') - - def add_tasmota_subscription(self, prefix: str, topic: str, detail: str, payload_type: str, bool_values: list = None, item=None, callback=None) -> None: - """ - build the topic in Tasmota style and add the subscription to mqtt - - :param prefix: prefix of topic to subscribe to - :param topic: unique part of topic to subscribe to - :param detail: detail of topic to subscribe to - :param payload_type: payload type of the topic (for this subscription to the topic) - :param bool_values: bool values (for this subscription to the topic) - :param item: item that should receive the payload as value. Used by the standard handler (if no callback function is specified) - :param callback: a plugin can provide an own callback function, if special handling of the payload is needed - - """ - - tpc = self.full_topic.replace("%prefix%", prefix) - tpc = tpc.replace("%topic%", topic) - tpc += detail - self.add_subscription(tpc, payload_type, bool_values=bool_values, callback=callback) - - def publish_tasmota_topic(self, prefix: str, topic: str, detail: str, payload, item=None, qos: int = None, retain: bool = False, bool_values: list = None) -> None: - """ - build the topic in Tasmota style and publish to mqtt - - :param prefix: prefix of topic to publish - :param topic: unique part of topic to publish - :param detail: detail of topic to publish - :param payload: payload to publish - :param item: item (if relevant) - :param qos: qos for this message (optional) - :param retain: retain flag for this message (optional) - :param bool_values: bool values (for publishing this topic, optional) - - """ - tpc = self.full_topic.replace("%prefix%", prefix) - tpc = tpc.replace("%topic%", topic) - tpc += detail - - self.publish_topic(tpc, payload, item, qos, retain, bool_values) - - def interview_all_devices(self): - - """ - Interview known Tasmota Devices (defined in item.yaml and self discovered) - """ - - self.logger.info(f"Interview of all known tasmota devices started.") - - tasmota_device_list = list(set(list(self.tasmota_device + self.discovered_device))) - - for device in tasmota_device_list: - self.logger.debug(f"Interview {device}.") - self._interview_device(device) - self.logger.debug(f"Set Telemetry period for {device}.") - self._set_telemetry_period(device) - - def clear_retained_messages(self, retained_msg=None): - """ - Method to clear all retained messages - """ - - if not retained_msg: - retained_msg = self.topics_of_retained_messages - - for topic in retained_msg: - try: - self.logger.warning(f"Clearing retained message for topic={topic}") - self.publish_topic(topic=topic, payload="", retain=True) - except Exception as e: - self.logger.warning(f"Clearing retained message for topic={topic}, caused error {e}") - pass - - def _interview_device(self, topic: str) -> None: - """ - ask for status info of each known tasmota_topic - - :param topic: tasmota Topic - """ - - # self.logger.debug(f"run: publishing 'cmnd/{topic}/Status0'") - self.publish_tasmota_topic('cmnd', topic, 'Status0', '') - - # self.logger.debug(f"run: publishing 'cmnd/{topic}/State'") - # self.publish_tasmota_topic('cmnd', topic, 'State', '') - - # self.logger.debug(f"run: publishing 'cmnd/{topic}/Module'") - # self.publish_tasmota_topic('cmnd', topic, 'Module', '') - - def _set_telemetry_period(self, topic: str) -> None: - """ - sets telemetry period for given topic/device - - :param topic: tasmota Topic - """ - - self.logger.info(f"run: Setting telemetry period to {self.telemetry_period} seconds") - self.publish_tasmota_topic('cmnd', topic, 'teleperiod', self.telemetry_period) - - ############################################################ - # Helper - ############################################################ - - def _set_item_value(self, tasmota_topic: str, itemtype: str, value, info_topic: str = '') -> None: - """ - Sets item value - - :param tasmota_topic: MQTT message payload - :param itemtype: itemtype to be set - :param value: value to be set - :param info_topic: MQTT info_topic - """ - - if tasmota_topic in self.tasmota_devices: - - # create source of item value - src = f"{tasmota_topic}:{info_topic}" if info_topic != '' else f"{tasmota_topic}" - - if itemtype in self.tasmota_devices[tasmota_topic]['connected_items']: - # get item to be set - item = self.tasmota_devices[tasmota_topic]['connected_items'][itemtype] - - tasmota_rf_details = self.get_iattr_value(item.conf, 'tasmota_rf_key') - if tasmota_rf_details and '=' in tasmota_rf_details: - tasmota_rf_key, tasmota_rf_key_param = tasmota_rf_details.split('=') - - if tasmota_rf_key_param.lower() == 'true': - value = True - elif tasmota_rf_key_param.lower() == 'false': - value = True - elif tasmota_rf_key_param.lower() == 'toggle': - value = not(item()) - else: - self.logger.warning(f"Paramater of tasmota_rf_key unknown, Need to be True, False, Toggle") - return - - # set item value - self.logger.info(f"{tasmota_topic}: Item '{item.id()}' via itemtype '{itemtype}' set to value '{value}' provided by '{src}'.") - item(value, self.get_shortname(), src) - - else: - self.logger.debug(f"{tasmota_topic}: No item for itemtype '{itemtype}' defined to set to '{value}' provided by '{src}'.") - else: - self.logger.debug(f"{tasmota_topic} unknown.") - - def _handle_new_discovered_device(self, tasmota_topic): - - self._add_new_device_to_tasmota_devices(tasmota_topic) - self.tasmota_devices[tasmota_topic]['status'] = 'discovered' - self._interview_device(tasmota_topic) - - def _add_new_device_to_tasmota_devices(self, tasmota_topic): - self.tasmota_devices[tasmota_topic] = self._get_device_dict_1_template() - self.tasmota_devices[tasmota_topic].update(self._get_device_dict_2_template()) - - def _set_device_offline(self, tasmota_topic): - - self.tasmota_devices[tasmota_topic]['online'] = False - self._set_item_value(tasmota_topic, 'item_online', False, 'check_online_status') - self.logger.info(f"{tasmota_topic} is not online any more - online_timeout={self.tasmota_devices[tasmota_topic]['online_timeout']}, now={datetime.now()}") - - # clean data from dict to show correct status - self.tasmota_devices[tasmota_topic].update(self._get_device_dict_2_template()) - - @staticmethod - def _rename_discovery_keys(payload: dict) -> dict: - - link = {'ip': 'IP', - 'dn': 'DeviceName', - 'fn': 'FriendlyNames', # list - 'hn': 'HostName', - 'mac': 'MAC', - 'md': 'Module', - 'ty': 'Tuya', - 'if': 'ifan', - 'ofln': 'LWT-offline', - 'onln': 'LWT-online', - 'state': 'StateText', # [0..3] - 'sw': 'FirmwareVersion', - 't': 'Topic', - 'ft': 'FullTopic', - 'tp': 'Prefix', - 'rl': 'Relays', # 0: disabled, 1: relay, 2.. future extension (fan, shutter?) - 'swc': 'SwitchMode', - 'swn': 'SwitchName', - 'btn': 'Buttons', - 'so': 'SetOption', # needed by HA to map Tasmota devices to HA entities and triggers - 'lk': 'ctrgb', - 'lt_st': 'LightSubtype', - 'sho': 'sho', - 'sht': 'sht', - 'ver': 'ProtocolVersion', - } - - new_payload = {} - for k_old in payload: - k_new = link.get(k_old) - if k_new: - new_payload[k_new] = payload[k_old] - - return new_payload - - @staticmethod - def _get_device_dict_1_template(): - return {'connected_to_item': False, - 'online': False, - 'status': None, - 'connected_items': {}, - 'uptime': '-', - } - - @staticmethod - def _get_device_dict_2_template(): - return {'lights': {}, - 'rf': {}, - 'sensors': {}, - 'relais': {}, - 'zigbee': {}, - 'sml': {}, - } - - ############################################################ - # Zigbee - ############################################################ - - def _poll_zigbee_devices(self, device: str) -> None: - """ - Polls information of all discovered zigbee devices from dedicated Zigbee bridge - - :param device: Zigbee bridge, where all Zigbee Devices shall be polled (equal to tasmota_topic) - - """ - self.logger.info(f"_poll_zigbee_devices: Polling information of all discovered Zigbee devices for zigbee_bridge {device}") - for zigbee_device in self.tasmota_zigbee_devices: - # self.logger.debug(f"_poll_zigbee_devices: publishing 'cmnd/{device}/ZbStatus3 {zigbee_device}'") - self.publish_tasmota_topic('cmnd', device, 'ZbStatus3', zigbee_device) - - def _configure_zigbee_bridge_settings(self, device: str) -> None: - """ - Configures Zigbee Bridge settings - - :param device: Zigbee bridge to be set to get MQTT Messages in right format") - """ - - self.logger.info(f"_configure_zigbee_bridge_settings: Do settings of ZigbeeBridge {device}") - bridge_setting_backlog = '; '.join(f"{key} {value}" for key, value in self.ZIGBEE_BRIDGE_DEFAULT_OPTIONS.items()) - self.publish_tasmota_topic('cmnd', device, 'Backlog', bridge_setting_backlog) - - def _request_zigbee_bridge_config(self, device: str) -> None: - """ - Request Zigbee Bridge configuration - - :param device: Zigbee bridge to be requested (equal to tasmota_topic) - """ - - self.logger.info(f"_request_zigbee_bridge_config: Request configuration of Zigbee bridge {device}") - # self.logger.debug(f"_discover_zigbee_bridge: publishing 'cmnd/{device}/ZbConfig'") - self.publish_tasmota_topic('cmnd', device, 'ZbConfig', '') - - def _discover_zigbee_bridge_devices(self, device: str) -> None: - """ - Discovers all connected Zigbee devices - - :param device: Zigbee bridge where connected devices shall be discovered (equal to tasmota_topic) - """ - - self.logger.info(f"_discover_zigbee_bridge_devices: Discover all connected Zigbee devices for ZigbeeBridge {device}") - self.publish_tasmota_topic('cmnd', device, 'ZbStatus1', '') - - def _handle_retained_message(self, topic: str, retain: bool) -> None: - """ - check for retained message and handle it - - :param topic: - :param retain: - """ - - if bool(retain): - if topic not in self.topics_of_retained_messages: - self.topics_of_retained_messages.append(topic) - else: - if topic in self.topics_of_retained_messages: - self.topics_of_retained_messages.remove(topic) - - ############################################################ - # Plugin Properties - ############################################################ - - @property - def log_level(self): - return self.logger.getEffectiveLevel() - - @property - def retained_msg_count(self): - return self._broker.retained_messages - - @property - def tasmota_device(self): - return list(self.tasmota_devices.keys()) - - @property - def has_zigbee(self): - for tasmota_topic in self.tasmota_devices: - if self.tasmota_devices[tasmota_topic]['zigbee']: - return True - return False - - @property - def has_lights(self): - for tasmota_topic in self.tasmota_devices: - if self.tasmota_devices[tasmota_topic]['lights']: - return True - return False - - @property - def has_rf(self): - for tasmota_topic in self.tasmota_devices: - if self.tasmota_devices[tasmota_topic]['rf']: - return True - return False - - @property - def has_relais(self): - for tasmota_topic in self.tasmota_devices: - if self.tasmota_devices[tasmota_topic]['relais']: - return True - return False - - @property - def has_energy_sensor(self): - for tasmota_topic in self.tasmota_devices: - if 'ENERGY' in self.tasmota_devices[tasmota_topic]['sensors']: - return True - return False - - @property - def has_env_sensor(self): - for tasmota_topic in self.tasmota_devices: - if any([i in self.tasmota_devices[tasmota_topic]['sensors'] for i in self.ENV_SENSOR]): - return True - return False - - @property - def has_ds18b20_sensor(self): - for tasmota_topic in self.tasmota_devices: - if 'DS18B20' in self.tasmota_devices[tasmota_topic]['sensors']: - return True - return False - - @property - def has_am2301_sensor(self): - for tasmota_topic in self.tasmota_devices: - if 'AM2301' in self.tasmota_devices[tasmota_topic]['sensors']: - return True - return False - - @property - def has_sht3x_sensor(self): - for tasmota_topic in self.tasmota_devices: - if 'SHT3X' in self.tasmota_devices[tasmota_topic]['sensors']: - return True - return False - - @property - def has_other_sensor(self): - for tasmota_topic in self.tasmota_devices: - for sensor in self.tasmota_devices[tasmota_topic]['sensors']: - if sensor not in self.SENSORS: - return True - return False - -################################################################## -# Utilities -################################################################## - - -def _254_to_100(value): - return int(round(value * 100 / 254, 0)) - - -def _254_to_360(value): - return int(round(value * 360 / 254, 0)) - - -def _100_to_254(value): - return int(round(value * 254 / 100, 0)) - - -def _360_to_254(value): - return int(round(value * 254 / 360, 0)) - - -def _kelvin_to_mired(value): - """Umrechnung der Farbtemperatur von Kelvin auf "mired scale" (Reziproke Megakelvin)""" - return int(round(1000000 / value, 0)) - - -def _mired_to_kelvin(value): - """Umrechnung der Farbtemperatur von "mired scale" (Reziproke Megakelvin) auf Kelvin""" - return int(round(10000 / int(value), 0)) * 100 +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +######################################################################### +# Copyright 2020- Martin Sinn m.sinn@gmx.de +# Copyright 2021- Michael Wenzel wenzel_michael@web.de +######################################################################### +# This file is part of SmartHomeNG. +# +# SmartHomeNG is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SmartHomeNG is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SmartHomeNG. If not, see . +# +######################################################################### + +from datetime import datetime, timedelta + +from lib.model.mqttplugin import * +from .webif import WebInterface + + +class Tasmota(MqttPlugin): + """ + Main class of the Plugin. Does all plugin specific stuff and provides the update functions for the items + """ + + PLUGIN_VERSION = '1.4.0' + + LIGHT_MSG = ['HSBColor', 'Dimmer', 'Color', 'CT', 'Scheme', 'Fade', 'Speed', 'LedTable', 'White'] + + RF_MSG = ['RfSync', 'RfLow', 'RfHigh', 'RfCode'] + + ZIGBEE_BRIDGE_DEFAULT_OPTIONS = {'SetOption89': 'OFF', + 'SetOption101': 'OFF', + 'SetOption120': 'OFF', + 'SetOption83': 'ON', + 'SetOption112': 'OFF', + 'SetOption110': 'OFF', + 'SetOption119': 'OFF', + 'SetOption118': 'OFF', + 'SetOption125': 'ON', + } + TASMOTA_ATTR_R_W = ['relay', 'hsb', 'white', 'ct', 'rf_send', 'rf_key_send', 'zb_permit_join', 'zb_forget', 'zb_ping', 'rf_key'] + + TASMOTA_ZB_ATTR_R_W = ['power', 'hue', 'sat', 'ct', 'dimmer', 'ct_k'] + + ENERGY_SENSOR_KEYS = {'Voltage': 'item_voltage', + 'Current': 'item_current', + 'Power': 'item_power', + 'ApparentPower': 'item_apparent_power', + 'ReactivePower': 'item_reactive_power', + 'Factor': 'item_power_factor', + 'TotalStartTime': 'item_total_starttime', + 'Total': 'item_power_total', + 'Yesterday': 'item_power_yesterday', + 'Today': 'item_power_today'} + + ENV_SENSOR = ['DS18B20', 'AM2301', 'SHT3X', 'BMP280', 'DHT11'] + + ENV_SENSOR_KEYS = {'Temperature': 'item_temp', + 'Humidity': 'item_hum', + 'DewPoint': 'item_dewpoint', + 'Pressure': 'item_pressure', + 'Id': 'item_1wid'} + + ANALOG_SENSOR_KEYS = {'Temperature': 'item_analog_temp', + 'Temperature1': 'item_analog_temp1', + 'A0': 'item_analog_a0', + 'Range': 'item_analog_range'} + + ESP32_SENSOR_KEYS = {'Temperature': 'item_esp32_temp'} + + SENSORS = [*ENV_SENSOR, + 'ENERGY', + ] + + def __init__(self, sh): + """ + Initializes the plugin. + """ + + # Call init code of parent class (MqttPlugin) + super().__init__() + if not self._init_complete: + return + + # get the parameters for the plugin (as defined in metadata plugin.yaml): + self.telemetry_period = self.get_parameter_value('telemetry_period') + self.full_topic = self.get_parameter_value('full_topic').lower() + + # crate full_topic + if self.full_topic.find('%prefix%') == -1 or self.full_topic.find('%topic%') == -1: + self.full_topic = '%prefix%/%topic%/' + if self.full_topic[-1] != '/': + self.full_topic += '/' + + # Define properties + self.tasmota_devices = {} # to hold tasmota device information for web interface + self.tasmota_zigbee_devices = {} # to hold tasmota zigbee device information for web interface + self.tasmota_items = [] # to hold item information for web interface + self.topics_of_retained_messages = [] # to hold all topics of retained messages + + self.alive = None + + # Add subscription to get device discovery + self.add_subscription( 'tasmota/discovery/#', 'dict', callback=self.on_mqtt_discovery_message) + # Add subscription to get device LWT + self.add_tasmota_subscription('tele', '+', 'LWT', 'bool', bool_values=['Offline', 'Online'], callback=self.on_mqtt_lwt_message) + # Add subscription to get device status + self.add_tasmota_subscription('stat', '+', 'STATUS0', 'dict', callback=self.on_mqtt_status0_message) + # Add subscription to get device actions result + self.add_tasmota_subscription('stat', '+', 'RESULT', 'dict', callback=self.on_mqtt_message) + + # Init WebIF + self.init_webinterface(WebInterface) + return + + def run(self): + """ + Run method for the plugin + """ + self.logger.debug("Run method called") + + # start subscription to all defined topics + self.start_subscriptions() + + self.logger.debug(f"Scheduler: 'check_online_status' created") + dt = self.shtime.now() + timedelta(seconds=(self.telemetry_period - 3)) + self.scheduler_add('check_online_status', self.check_online_status, cycle=self.telemetry_period, next=dt) + + self.logger.debug(f"Scheduler: 'add_tasmota_subscriptions' created") + self.scheduler_add('add_tasmota_subscriptions', self.add_tasmota_subscriptions, cron='init+20') + + self.alive = True + + def stop(self): + """ + Stop method for the plugin + """ + self.alive = False + self.logger.debug("Stop method called") + self.scheduler_remove('check_online_status') + + # stop subscription to all topics + self.stop_subscriptions() + + def parse_item(self, item): + """ + Default plugin parse_item method. Is called when the plugin is initialized. + The plugin can, corresponding to its attribute keywords, decide what to do with + the item in the future, like adding it to an internal array for future reference + :param item: The item to process. + :return: If the plugin needs to be informed of an items change you should return a call back function + like the function update_item down below. An example when this is needed is the knx plugin + where parse_item returns the update_item function when the attribute knx_send is found. + This means that when the items value is about to be updated, the call back function is called + with the item, caller, source and dest as arguments and in case of the knx plugin the value + can be sent to the knx with a knx write function within the knx plugin. + """ + + if self.has_iattr(item.conf, 'tasmota_topic'): + tasmota_topic = self.get_iattr_value(item.conf, 'tasmota_topic') + self.logger.info(f"parsing item: {item.id()} with tasmota_topic={tasmota_topic}") + + tasmota_attr = self.get_iattr_value(item.conf, 'tasmota_attr') + tasmota_relay = self.get_iattr_value(item.conf, 'tasmota_relay') + tasmota_rf_details = self.get_iattr_value(item.conf, 'tasmota_rf_key') + tasmota_zb_device = self.get_iattr_value(item.conf, 'tasmota_zb_device') + tasmota_zb_group = self.get_iattr_value(item.conf, 'tasmota_zb_group') + tasmota_zb_attr = self.get_iattr_value(item.conf, 'tasmota_zb_attr') + tasmota_zb_attr = tasmota_zb_attr.lower() if tasmota_zb_attr else None + tasmota_sml_device = self.get_iattr_value(item.conf, 'tasmota_sml_device') + tasmota_sml_attr = self.get_iattr_value(item.conf, 'tasmota_sml_attr') + tasmota_sml_attr = tasmota_sml_attr.lower() if tasmota_sml_attr else None + + # handle tasmota devices without zigbee + if tasmota_attr: + self.logger.info(f"Item={item.id()} identified for Tasmota with tasmota_attr={tasmota_attr}") + tasmota_attr = tasmota_attr.lower() + tasmota_relay = 1 if not tasmota_relay else tasmota_relay + + if tasmota_rf_details and '=' in tasmota_rf_details: + tasmota_rf_details, tasmota_rf_key_param = tasmota_rf_details.split('=') + + # handle tasmota zigbee devices + elif tasmota_zb_device and tasmota_zb_attr: + self.logger.info(f"Item={item.id()} identified for Tasmota Zigbee with tasmota_zb_device={tasmota_zb_device} and tasmota_zb_attr={tasmota_zb_attr}") + + # check if zigbee device short name has been used without parentheses; if so this will be normally parsed to a number and therefore mismatch with definition + try: + tasmota_zb_device = int(tasmota_zb_device) + self.logger.warning(f"Probably for item {item.id()} the device short name as been used for attribute 'tasmota_zb_device'. Trying to make that work but it will cause exceptions. To prevent this, the short name need to be defined as string by using parentheses") + tasmota_zb_device = str(hex(tasmota_zb_device)) + tasmota_zb_device = tasmota_zb_device[0:2] + tasmota_zb_device[2:len(tasmota_zb_device)].upper() + except Exception: + pass + + # handle tasmota zigbee groups + elif tasmota_zb_group and tasmota_zb_attr: + self.logger.info(f"Item={item.id()} identified for Tasmota Zigbee with tasmota_zb_group={tasmota_zb_group} and tasmota_zb_attr={tasmota_zb_attr}") + + # handle tasmota smartmeter devices + elif tasmota_sml_device and tasmota_sml_attr: + self.logger.info(f"Item={item.id()} identified for Tasmota SML with tasmota_sml_device={tasmota_sml_device} and tasmota_sml_attr={tasmota_sml_attr}") + + # handle everything else + else: + self.logger.info(f"Definition of attributes for item={item.id()} incomplete. Item will be ignored.") + return + + # setup dict for new device + if not self.tasmota_devices.get(tasmota_topic): + self._add_new_device_to_tasmota_devices(tasmota_topic) + self.tasmota_devices[tasmota_topic]['status'] = 'item.conf' + + # fill tasmota_device dict + self.tasmota_devices[tasmota_topic]['connected_to_item'] = True + if tasmota_attr == 'relay' and tasmota_relay: + item_type = f'item_{tasmota_attr}{tasmota_relay}' + elif tasmota_attr == 'rf_key' and tasmota_rf_details: + item_type = f'item_{tasmota_attr}{tasmota_rf_details}' + elif tasmota_zb_device and tasmota_zb_attr: + item_type = f'item_{tasmota_zb_device}.{tasmota_zb_attr}' + elif tasmota_sml_device and tasmota_sml_attr: + item_type = f'item_{tasmota_sml_device}.{tasmota_sml_attr}' + else: + item_type = f'item_{tasmota_attr}' + self.tasmota_devices[tasmota_topic]['connected_items'][item_type] = item + + # append to list used for web interface + if item not in self.tasmota_items: + self.tasmota_items.append(item) + + return self.update_item + + elif self.has_iattr(item.conf, 'tasmota_admin'): + self.logger.debug(f"parsing item: {item.id()} for tasmota admin attribute") + + return self.update_item + + def update_item(self, item, caller: str = None, source: str = None, dest: str = None): + """ + Item has been updated + + This method is called, if the value of an item has been updated by SmartHomeNG. + It should write the changed value out to the device (hardware/interface) that + is managed by this plugin. + + :param item: item to be updated towards the plugin + :param caller: if given it represents the callers name + :param source: if given it represents the source + :param dest: if given it represents the dest + """ + + if self.alive and caller != self.get_shortname(): + # code to execute if the plugin is not stopped AND only, if the item has not been changed by this plugin: + + # get tasmota attributes of item + tasmota_admin = self.get_iattr_value(item.conf, 'tasmota_admin') + tasmota_topic = self.get_iattr_value(item.conf, 'tasmota_topic') + tasmota_attr = self.get_iattr_value(item.conf, 'tasmota_attr') + tasmota_relay = self.get_iattr_value(item.conf, 'tasmota_relay') + tasmota_relay = '1' if not tasmota_relay else None + tasmota_rf_details = self.get_iattr_value(item.conf, 'tasmota_rf_details') + tasmota_zb_device = self.get_iattr_value(item.conf, 'tasmota_zb_device') + tasmota_zb_group = self.get_iattr_value(item.conf, 'tasmota_zb_group') + tasmota_zb_attr = self.get_iattr_value(item.conf, 'tasmota_zb_attr') + tasmota_zb_cluster = self.get_iattr_value(item.conf, 'tasmota_zb_cluster') + tasmota_zb_attr = tasmota_zb_attr.lower() if tasmota_zb_attr else None + + # handle tasmota_admin + if tasmota_admin: + if tasmota_admin == 'delete_retained_messages' and bool(item()): + self.clear_retained_messages() + item(False, self.get_shortname()) + + # handle tasmota_attr + elif tasmota_attr and tasmota_attr in self.TASMOTA_ATTR_R_W: + self.logger.info(f"update_item: {item.id()}, item has been changed in SmartHomeNG outside of this plugin in {caller} with value {item()}") + + value = item() + link = { + # 'attribute': (detail, data_type, bool_values, min_value, max_value) + 'relay': (f'Power', bool, ['OFF', 'ON'], None, None), + 'hsb': ('HsbColor', list, None, None, None), + 'white': ('White', int, None, 0, 120), + 'ct': ('CT', int, None, 153, 500), + 'rf_send': ('Backlog', dict, None, None, None), + 'rf_key_send': (f'RfKey', int, None, 1, 16), + 'rf_key': (f'RfKey', bool, None, None, None), + 'zb_permit_join': ('ZbPermitJoin', bool, ['0', '1'], None, None), + 'zb_forget': ('ZbForget', bool, ['0', '1'], None, None), + 'zb_ping': ('ZbPing', bool, ['0', '1'], None, None), + } + + if tasmota_attr not in link: + return + + (detail, data_type, bool_values, min_value, max_value) = link[tasmota_attr] + + # check data type + if not isinstance(value, data_type): + self.logger.warning(f"update_item: type of value {type(value)} for tasmota_attr={tasmota_attr} to be published, does not fit with expected type '{data_type}'. Abort publishing.") + return + + # check and correct if value is in allowed range + if min_value and value < min_value: + self.logger.info(f'Commanded value for {tasmota_attr} below min value; set to allowed min value.') + value = min_value + elif max_value and value > max_value: + self.logger.info(f'Commanded value for {tasmota_attr} above max value; set to allowed max value.') + value = max_value + + # do tasmota_attr specific checks and adaptations + if tasmota_attr == 'relay': + detail = f"{detail}{tasmota_relay}" if tasmota_relay > '1' else detail + + elif tasmota_attr == 'hsb': + if not len(value) == 3: + return + new_value = f"{value[0]},{value[1]},{value[2]}" + value = new_value + + elif tasmota_attr == 'rf_send': + # Input: {'RfSync': 12220, 'RfLow': 440, 'RfHigh': 1210, 'RfCode':'#F06104'} / Output: "RfSync 12220; RfLow 440; RfHigh 1210; RfCode #F06104" + rf_cmd = {k.lower(): v for k, v in value.items()} + if all(k in rf_cmd for k in [x.lower() for x in self.RF_MSG]): + value = f"RfSync {value['rfsync']}; RfLow {value['rflow']}; RfHigh {value['rfhigh']}; RfCode #{value['rfcode']}" + else: + self.logger.debug(f"update_item: rf_send received but not with correct content; expected content is: {'RfSync': 12220, 'RfLow': 440, 'RfHigh': 1210, 'RfCode':'#F06104'}") + return + + elif tasmota_attr == 'rf_key_send': + detail = f"{detail}{value}" + value = 1 + + elif tasmota_attr == 'rf_key': + if not tasmota_rf_details: + self.logger.warning(f"tasmota_rf_details not specified, no action taken.") + return + + if tasmota_rf_details and '=' in tasmota_rf_details: + tasmota_rf_details, tasmota_rf_key_param = tasmota_rf_details.split('=') + + detail = f"{detail}{tasmota_rf_details}" + value = 1 + + elif tasmota_attr == 'zb_forget': + if value not in self.tasmota_zigbee_devices: + self.logger.error(f"Device {value} not known by plugin, no action taken.") + return + + elif tasmota_attr == 'zb_ping': + if value not in self.tasmota_zigbee_devices: + self.logger.error(f"Device {value} not known by plugin, no action taken.") + return + + if value is not None: + self.publish_tasmota_topic('cmnd', tasmota_topic, detail, value, item, bool_values=bool_values) + + # handle tasmota_zb_attr + elif tasmota_zb_attr and tasmota_zb_attr in self.TASMOTA_ZB_ATTR_R_W: + self.logger.info(f"update_item: item={item.id()} with tasmota_zb_attr={tasmota_zb_attr} has been changed from {caller} with value={item()}") + self.logger.info(f"update_item: tasmota_zb_device={tasmota_zb_device}; tasmota_zb_group={tasmota_zb_group}") + + if tasmota_zb_device is None and tasmota_zb_group is None: + return + + value = int(item()) + detail = 'ZbSend' + link = { + # 'attribute': (send_cmd, bool_values, min_value, max_value, cluster, convert) + 'power': ('Power', ['OFF', 'ON'], None, None, '0x0006', None), + 'dimmer': ('Dimmer', None, 0, 100, '0x0008', _100_to_254), + 'hue': ('Hue', None, 0, 360, '0x0300', _360_to_254), + 'sat': ('Sat', None, 0, 100, '0x0300', _100_to_254), + 'ct': ('CT', None, 150, 500, '0x0300', None), + 'ct_k': ('CT', None, 2000, 6700, '0x0300', _kelvin_to_mired), + } + + if tasmota_zb_attr not in link: + return + + (send_cmd, bool_values, min_value, max_value, cluster, convert) = link[tasmota_zb_attr] + + # check and correct if value is in allowed range + if min_value and value < min_value: + self.logger.info(f'Commanded value for {tasmota_zb_attr} below min value; set to allowed min value.') + value = min_value + elif max_value and value > max_value: + self.logger.info(f'Commanded value for {tasmota_zb_attr} above max value; set to allowed max value.') + value = max_value + + # Konvertiere Wert + if convert: + value = convert(value) + + # build payload + payload = {'Device': tasmota_zb_device} if tasmota_zb_device else {'group': tasmota_zb_group} + payload['Send'] = {send_cmd: value} + if tasmota_zb_cluster: + payload['Cluster'] = cluster + + self.logger.debug(f"payload={payload}") + + # publish command + self.publish_tasmota_topic('cmnd', tasmota_topic, detail, payload, item, bool_values=bool_values) + + else: + self.logger.warning(f"update_item: {item.id()}, trying to change item in SmartHomeNG that is read only in tasmota device (by {caller})") + + ############################################################ + # Callbacks + ############################################################ + + # ToDo: 2023-01-20 17:21:04 ERROR modules.mqtt _on_log: Caught exception in on_message: 'ip' + + def on_mqtt_discovery_message(self, topic: str, payload: dict, qos: int = None, retain: bool = None) -> None: + """ + Callback function to handle received discovery messages + + :param topic: MQTT topic + :param payload: MQTT message payload + :param qos: qos for this message (optional) + :param retain: retain flag for this message (optional) + + """ + + self._handle_retained_message(topic, retain) + + try: + (tasmota, discovery, device_id, msg_type) = topic.split('/') + self.logger.info(f"on_mqtt_discovery_message: device_id={device_id}, type={msg_type}, payload={payload}") + except Exception as e: + self.logger.error(f"received topic {topic} is not in correct format. Error was: {e}") + else: + if msg_type == 'config': + """ + device_id = 2CF432CC2FC5 + + payload = + { + 'ip': '192.168.2.33', // IP address + 'dn': 'NXSM200_01', // Device name + 'fn': ['NXSM200_01', None, None, None, None, None, None, None], // List of friendly names + 'hn': 'NXSM200-01-4037', // Hostname + 'mac': '2CF432CC2FC5', // MAC Adresse ohne : + 'md': 'NXSM200', // Module + 'ty': 0, // Tuya + 'if': 0, // ifan + 'ofln': 'Offline', // LWT-offline + 'onln': 'Online', // LWT-online + 'state': ['OFF', 'ON', 'TOGGLE', 'HOLD'], // StateText[0..3] + 'sw': '12.1.1', // Firmware Version + 't': 'NXSM200_01', // Topic + 'ft': '%prefix%/%topic%/', // Full Topic + 'tp': ['cmnd', 'stat', 'tele'], // Topic [SUB_PREFIX, PUB_PREFIX, PUB_PREFIX2] + 'rl': [1, 0, 0, 0, 0, 0, 0, 0], // Relays, 0: disabled, 1: relay, 2.. future extension (fan, shutter?) + 'swc': [-1, -1, -1, -1, -1, -1, -1, -1], // SwitchMode + 'swn': [None, None, None, None, None, None, None, None], // SwitchName + 'btn': [0, 0, 0, 0, 0, 0, 0, 0], // Buttons + 'so': {'4': 0, '11': 0, '13': 0, '17': 0, '20': 0, '30': 0, '68': 0, '73': 0, '82': 0, '114': 0, '117': 0}, // SetOption needed by HA to map Tasmota devices to HA entities and triggers + 'lk': 0, // ctrgb + 'lt_st': 0, // Light subtype + 'sho': [0, 0, 0, 0], + 'sht': [[0, 0, 48], [0, 0, 46], [0, 0, 110], [0, 0, 108]], + 'ver': 1 // Discovery protocol version + } + """ + + tasmota_topic = payload['t'] + if tasmota_topic: + + device_name = payload['dn'] + self.logger.info(f"Discovered Tasmota Device with topic={tasmota_topic} and device_name={device_name}") + + # if device is unknown, add it to dict + if tasmota_topic not in self.tasmota_devices: + self.logger.info(f"New device based on Discovery Message found.") + self._add_new_device_to_tasmota_devices(tasmota_topic) + + # process decoding message and set device to status 'discovered' + self.tasmota_devices[tasmota_topic]['ip'] = payload['ip'] + self.tasmota_devices[tasmota_topic]['friendly_name'] = payload['fn'][0] + self.tasmota_devices[tasmota_topic]['fw_ver'] = payload['sw'] + self.tasmota_devices[tasmota_topic]['device_id'] = device_id + self.tasmota_devices[tasmota_topic]['module'] = payload['md'] + self.tasmota_devices[tasmota_topic]['mac'] = ':'.join(device_id[i:i + 2] for i in range(0, 12, 2)) + self.tasmota_devices[tasmota_topic]['discovery_config'] = self._rename_discovery_keys(payload) + self.tasmota_devices[tasmota_topic]['status'] = 'discovered' + + # start device interview + self._interview_device(tasmota_topic) + + if payload['ft'] != self.full_topic: + self.logger.warning(f"Device {device_name} discovered, but FullTopic of device does not match plugin setting!") + + # if zigbee bridge, process those + if 'zigbee_bridge' in device_name.lower(): + self.logger.info(f"Zigbee_Bridge discovered") + self.tasmota_devices[tasmota_topic]['zigbee']['status'] = 'discovered' + self._configure_zigbee_bridge_settings(tasmota_topic) + self._discover_zigbee_bridge_devices(tasmota_topic) + + elif msg_type == 'sensors': + """ + device_id = 2CF432CC2FC5 + + payload = {'sn': {'Time': '2022-11-19T13:35:59', + 'ENERGY': {'TotalStartTime': '2019-12-23T17:02:03', 'Total': 85.314, 'Yesterday': 0.0, + 'Today': 0.0, 'Power': 0, 'ApparentPower': 0, 'ReactivePower': 0, 'Factor': 0.0, + 'Voltage': 0, 'Current': 0.0}}, 'ver': 1} + """ + + # get payload with Sensor information + sensor_payload = payload['sn'] + if 'Time' in sensor_payload: + sensor_payload.pop('Time') + + # find matching tasmota_topic + tasmota_topic = None + for entry in self.tasmota_devices: + if self.tasmota_devices[entry].get('device_id') == device_id: + tasmota_topic = entry + break + + # hand over sensor information payload for parsing + if sensor_payload and tasmota_topic: + self.logger.info(f"Discovered Tasmota Device with topic={tasmota_topic} and SensorInformation") + self._handle_sensor(tasmota_topic, '', sensor_payload) + + def on_mqtt_lwt_message(self, topic: str, payload: bool, qos: int = None, retain: bool = None) -> None: + """ + Callback function to handle received lwt messages + + :param topic: MQTT topic + :param payload: MQTT message payload + :param qos: qos for this message (optional) + :param retain: retain flag for this message (optional) + + """ + self._handle_retained_message(topic, retain) + + try: + (topic_type, tasmota_topic, info_topic) = topic.split('/') + except Exception as e: + self.logger.error(f"received topic {topic} is not in correct format. Error was: {e}") + else: + self.logger.info(f"Received LWT Message for {tasmota_topic} with value={payload} and retain={retain}") + + if payload: + if tasmota_topic not in self.tasmota_devices: + self.logger.debug(f"New online device based on LWT Message discovered.") + self._handle_new_discovered_device(tasmota_topic) + self.tasmota_devices[tasmota_topic]['online_timeout'] = datetime.now() + timedelta(seconds=self.telemetry_period + 5) + + if tasmota_topic in self.tasmota_devices: + self.tasmota_devices[tasmota_topic]['online'] = payload + self._set_item_value(tasmota_topic, 'item_online', payload, info_topic) + + def on_mqtt_status0_message(self, topic: str, payload: dict, qos: int = None, retain: bool = None) -> None: + """ + Callback function to handle received messages + + :param topic: MQTT topic + :param payload: MQTT message payload + :param qos: qos for this message + :param retain: retain flag for this message + + """ + + """ + Example payload + + payload = {'Status': {'Module': 75, 'DeviceName': 'ZIGBEE_Bridge01', 'FriendlyName': ['SONOFF_ZB1'], + 'Topic': 'SONOFF_ZB1', 'ButtonTopic': '0', 'Power': 0, 'PowerOnState': 3, 'LedState': 1, + 'LedMask': 'FFFF', 'SaveData': 1, 'SaveState': 1, 'SwitchTopic': '0', + 'SwitchMode': [0, 0, 0, 0, 0, 0, 0, 0], 'ButtonRetain': 0, 'SwitchRetain': 0, + 'SensorRetain': 0, 'PowerRetain': 0, 'InfoRetain': 0, 'StateRetain': 0}, + 'StatusPRM': {'Baudrate': 115200, 'SerialConfig': '8N1', 'GroupTopic': 'tasmotas', + 'OtaUrl': 'http://ota.tasmota.com/tasmota/release/tasmota-zbbridge.bin.gz', + 'RestartReason': 'Software/System restart', 'Uptime': '0T23:18:30', + 'StartupUTC': '2022-11-19T12:10:15', 'Sleep': 50, 'CfgHolder': 4617, 'BootCount': 116, + 'BCResetTime': '2021-04-28T08:32:10', 'SaveCount': 160, 'SaveAddress': '1FB000'}, + 'StatusFWR': {'Version': '12.1.1(zbbridge)', 'BuildDateTime': '2022-08-25T11:37:17', 'Boot': 31, + 'Core': '2_7_4_9', 'SDK': '2.2.2-dev(38a443e)', 'CpuFrequency': 160, + 'Hardware': 'ESP8266EX', 'CR': '372/699'}, + 'StatusLOG': {'SerialLog': 0, 'WebLog': 2, 'MqttLog': 0, 'SysLog': 0, 'LogHost': '', 'LogPort': 514, + 'SSId': ['WLAN-Access', ''], 'TelePeriod': 300, 'Resolution': '558180C0', + 'SetOption': ['00008009', '2805C80001000600003C5A0A002800000000', '00000080', + '40046002', '00004810', '00000000']}, + 'StatusMEM': {'ProgramSize': 685, 'Free': 1104, 'Heap': 25, 'ProgramFlashSize': 2048, + 'FlashSize': 2048, 'FlashChipId': '1540A1', 'FlashFrequency': 40, 'FlashMode': 3, + 'Features': ['00000809', '0F1007C6', '04400001', '00000003', '00000000', '00000000', + '00020080', '00200000', '04000000', '00000000'], + 'Drivers': '1,2,4,7,9,10,12,20,23,38,41,50,62', 'Sensors': '1'}, + 'StatusNET': {'Hostname': 'SONOFF-ZB1-6926', 'IPAddress': '192.168.2.24', 'Gateway': '192.168.2.1', + 'Subnetmask': '255.255.255.0', 'DNSServer1': '192.168.2.1', 'DNSServer2': '0.0.0.0', + 'Mac': '84:CC:A8:AA:1B:0E', 'Webserver': 2, 'HTTP_API': 1, 'WifiConfig': 0, + 'WifiPower': 17.0}, + 'StatusMQT': {'MqttHost': '192.168.2.12', 'MqttPort': 1883, 'MqttClientMask': 'DVES_%06X', + 'MqttClient': 'DVES_AA1B0E', 'MqttUser': 'DVES_USER', 'MqttCount': 1, + 'MAX_PACKET_SIZE': 1200, 'KEEPALIVE': 30, 'SOCKET_TIMEOUT': 4}, + 'StatusTIM': {'UTC': '2022-11-20T11:28:45', 'Local': '2022-11-20T12:28:45', + 'StartDST': '2022-03-27T02:00:00', 'EndDST': '2022-10-30T03:00:00', + 'Timezone': '+01:00', 'Sunrise': '08:07', 'Sunset': '17:04'}, + 'StatusSNS': {'Time': '2022-11-20T12:28:45'}, + 'StatusSTS': {'Time': '2022-11-20T12:28:45', 'Uptime': '0T23:18:30', 'UptimeSec': 83910, 'Vcc': 3.41, + 'Heap': 24, 'SleepMode': 'Dynamic', 'Sleep': 50, 'LoadAvg': 19, 'MqttCount': 1, + 'Wifi': {'AP': 1, 'SSId': 'WLAN-Access', 'BSSId': '38:10:D5:15:87:69', 'Channel': 1, + 'Mode': '11n', 'RSSI': 50, 'Signal': -75, 'LinkCount': 1, + 'Downtime': '0T00:00:03'}}} + + """ + + self._handle_retained_message(topic, retain) + + try: + (topic_type, tasmota_topic, info_topic) = topic.split('/') + self.logger.info(f"on_mqtt_status0_message: topic_type={topic_type}, tasmota_topic={tasmota_topic}, info_topic={info_topic}, payload={payload}") + except Exception as e: + self.logger.error(f"received topic {topic} is not in correct format. Error was: {e}") + + else: + self.logger.info(f"Received Status0 Message for {tasmota_topic} with value={payload} and retain={retain}") + self.tasmota_devices[tasmota_topic]['status'] = 'interviewed' + + # handle teleperiod + self._handle_teleperiod(tasmota_topic, payload['StatusLOG']) + + if self.tasmota_devices[tasmota_topic]['status'] != 'interviewed': + if self.tasmota_devices[tasmota_topic]['status'] != 'discovered': + # friendly name + self.tasmota_devices[tasmota_topic]['friendly_name'] = payload['Status']['FriendlyName'][0] + + # IP Address + ip = payload['StatusNET']['IPAddress'] + ip_eth = payload['StatusNET'].get('Ethernet', {}).get('IPAddress') + ip = ip_eth if ip == '0.0.0.0' else None + self.tasmota_devices[tasmota_topic]['ip'] = ip + + # Firmware + self.tasmota_devices[tasmota_topic]['fw_ver'] = payload['StatusFWR']['Version'].split('(')[0] + + # MAC + self.tasmota_devices[tasmota_topic]['mac'] = payload['StatusNET']['Mac'] + + # Module No + self.tasmota_devices[tasmota_topic]['template'] = payload['Status']['Module'] + + # get detailed status using payload['StatusSTS'] + status_sts = payload['StatusSTS'] + + # Handling Lights and Dimmer + if any([i in status_sts for i in self.LIGHT_MSG]): + self._handle_lights(tasmota_topic, info_topic, status_sts) + + # Handling of Power + if any(item.startswith("POWER") for item in status_sts.keys()): + self._handle_power(tasmota_topic, info_topic, status_sts) + + # Handling of RF messages + if any(item.startswith("Rf") for item in status_sts.keys()): + self._handle_rf(tasmota_topic, info_topic, status_sts) + + # Handling of Wi-Fi + if 'Wifi' in status_sts: + self._handle_wifi(tasmota_topic, status_sts['Wifi']) + + # Handling of Uptime + if 'Uptime' in status_sts: + self._handle_uptime(tasmota_topic, status_sts['Uptime']) + + # Handling of UptimeSec + if 'UptimeSec' in status_sts: + self.logger.info(f"Received Message contains UptimeSec information.") + self._handle_uptime_sec(tasmota_topic, status_sts['UptimeSec']) + + def on_mqtt_info_message(self, topic: str, payload: dict, qos: int = None, retain: bool = None) -> None: + """ + Callback function to handle received messages + + :param topic: MQTT topic + :param payload: MQTT message payload + :param qos: qos for this message (optional) + :param retain: retain flag for this message (optional) + + """ + + self._handle_retained_message(topic, retain) + + try: + (topic_type, tasmota_topic, info_topic) = topic.split('/') + self.logger.debug(f"on_mqtt_message: topic_type={topic_type}, tasmota_topic={tasmota_topic}, info_topic={info_topic}, payload={payload}") + except Exception as e: + self.logger.error(f"received topic {topic} is not in correct format. Error was: {e}") + else: + if info_topic == 'INFO1': + # payload={'Info1': {'Module': 'Sonoff Basic', 'Version': '11.0.0(tasmota)', 'FallbackTopic': 'cmnd/DVES_2EB8AE_fb/', 'GroupTopic': 'cmnd/tasmotas/'}} + self.logger.debug(f"Received Message decoded as INFO1 message.") + self.tasmota_devices[tasmota_topic]['fw_ver'] = payload['Info1']['Version'].split('(')[0] + self.tasmota_devices[tasmota_topic]['module_no'] = payload['Info1']['Module'] + + elif info_topic == 'INFO2': + # payload={'Info2': {'WebServerMode': 'Admin', 'Hostname': 'SONOFF-B1-6318', 'IPAddress': '192.168.2.25'}} + self.logger.debug(f"Received Message decoded as INFO2 message.") + self.tasmota_devices[tasmota_topic]['ip'] = payload['Info2']['IPAddress'] + + elif info_topic == 'INFO3': + # payload={'Info3': {'RestartReason': 'Software/System restart', 'BootCount': 1395}} + self.logger.debug(f"Received Message decoded as INFO3 message.") + restart_reason = payload['Info3']['RestartReason'] + self.logger.warning(f"Device {tasmota_topic} (IP={self.tasmota_devices[tasmota_topic]['ip']}) just startet. Reason={restart_reason}") + + def on_mqtt_message(self, topic: str, payload: dict, qos: int = None, retain: bool = None) -> None: + """ + Callback function to handle received messages + + :param topic: MQTT topic + :param payload: MQTT message payload + :param qos: qos for this message (optional) + :param retain: retain flag for this message (optional) + + """ + + self._handle_retained_message(topic, retain) + + try: + (topic_type, tasmota_topic, info_topic) = topic.split('/') + self.logger.info(f"on_mqtt_message: topic_type={topic_type}, tasmota_topic={tasmota_topic}, info_topic={info_topic}, payload={payload}") + except Exception as e: + self.logger.error(f"received topic {topic} is not in correct format. Error was: {e}") + else: + + # handle unknown device + if tasmota_topic not in self.tasmota_devices: + self._handle_new_discovered_device(tasmota_topic) + + # handle message + if isinstance(payload, dict) and info_topic in ['STATE', 'RESULT']: + + # Handling of TelePeriod + if 'TelePeriod' in payload: + self.logger.info(f"Received Message decoded as teleperiod message.") + self._handle_teleperiod(tasmota_topic, payload['TelePeriod']) + + elif 'Module' in payload: + self.logger.info(f"Received Message decoded as Module message.") + self._handle_module(tasmota_topic, payload['Module']) + + # Handling of Light messages + elif any([i in payload for i in self.LIGHT_MSG]): + self.logger.info(f"Received Message decoded as light message.") + self._handle_lights(tasmota_topic, info_topic, payload) + + # Handling of Power messages + elif any(item.startswith("POWER") for item in payload.keys()): + self.logger.info(f"Received Message decoded as power message.") + self._handle_power(tasmota_topic, info_topic, payload) + + # Handling of RF messages payload={'Time': '2022-11-21T11:22:55', 'RfReceived': {'Sync': 10120, 'Low': 330, 'High': 980, 'Data': '3602B8', 'RfKey': 'None'}} + elif 'RfReceived' in payload: + self.logger.info(f"Received Message decoded as RF message.") + self._handle_rf(tasmota_topic, info_topic, payload['RfReceived']) + + # Handling of Setting messages + elif next(iter(payload)).startswith("SetOption"): + # elif any(item.startswith("SetOption") for item in payload.keys()): + self.logger.info(f"Received Message decoded as Tasmota Setting message.") + self._handle_setting(tasmota_topic, payload) + + # Handling of Zigbee Bridge Config messages + elif 'ZbConfig' in payload: + self.logger.info(f"Received Message decoded as Zigbee Config message.") + self._handle_zbconfig(tasmota_topic, payload['ZbConfig']) + + # Handling of Zigbee Bridge Status messages + elif any(item.startswith("ZbStatus") for item in payload.keys()): + self.logger.info(f"Received Message decoded as Zigbee ZbStatus message.") + self._handle_zbstatus(tasmota_topic, payload) + + # Handling of Wi-Fi + if 'Wifi' in payload: + self.logger.info(f"Received Message contains Wifi information.") + self._handle_wifi(tasmota_topic, payload['Wifi']) + + # Handling of Uptime + if 'Uptime' in payload: + self.logger.info(f"Received Message contains Uptime information.") + self._handle_uptime(tasmota_topic, payload['Uptime']) + + # Handling of UptimeSec + if 'UptimeSec' in payload: + self.logger.info(f"Received Message contains UptimeSec information.") + self._handle_uptime_sec(tasmota_topic, payload['UptimeSec']) + + elif isinstance(payload, dict) and info_topic == 'SENSOR': + self.logger.info(f"Received Message contains sensor information.") + self._handle_sensor(tasmota_topic, info_topic, payload) + + else: + self.logger.warning(f"Received Message '{payload}' not handled within plugin.") + + # setting new online-timeout + self.tasmota_devices[tasmota_topic]['online_timeout'] = datetime.now() + timedelta(seconds=self.telemetry_period + 5) + + # setting online_item to True + self._set_item_value(tasmota_topic, 'item_online', True, info_topic) + + def on_mqtt_power_message(self, topic: str, payload: dict, qos: int = None, retain: bool = None) -> None: + """ + Callback function to handle received messages + + :param topic: MQTT topic + :param payload: MQTT message payload + :param qos: qos for this message (optional) + :param retain: retain flag for this message (optional) + + """ + + self._handle_retained_message(topic, retain) + + # check for retained message and handle it + if bool(retain): + if topic not in self.topics_of_retained_messages: + self.topics_of_retained_messages.append(topic) + else: + if topic in self.topics_of_retained_messages: + self.topics_of_retained_messages.remove(topic) + + # handle incoming message + try: + (topic_type, tasmota_topic, info_topic) = topic.split('/') + self.logger.info(f"on_mqtt_power_message: topic_type={topic_type}, tasmota_topic={tasmota_topic}, info_topic={info_topic}, payload={payload}") + except Exception as e: + self.logger.error(f"received topic {topic} is not in correct format. Error was: {e}") + else: + device = self.tasmota_devices.get(tasmota_topic, None) + if device: + if info_topic.startswith('POWER'): + tasmota_relay = str(info_topic[5:]) + tasmota_relay = '1' if not tasmota_relay else None + item_relay = f'item_relay{tasmota_relay}' + self._set_item_value(tasmota_topic, item_relay, payload == 'ON', info_topic) + self.tasmota_devices[tasmota_topic]['relais'][info_topic] = payload + + ############################################################ + # Parse detailed messages + ############################################################ + + def _handle_sensor(self, device: str, function: str, payload: dict) -> None: + """ + + :param device: + :param function: + :param payload: + :return: + """ + # Handling of Zigbee Device Messages + if 'ZbReceived' in payload: + self.logger.info(f"Received Message decoded as Zigbee Sensor message.") + self._handle_sensor_zigbee(device, function, payload['ZbReceived']) + + # Handling of Energy Sensors + elif 'ENERGY' in payload: + self.logger.info(f"Received Message decoded as Energy Sensor message.") + self._handle_sensor_energy(device, function, payload['ENERGY']) + + # Handling of Environmental Sensors + elif any([i in payload for i in self.ENV_SENSOR]): + self._handle_sensor_env(device, function, payload) + + # Handling of Analog Sensors + elif 'ANALOG' in payload: + self.logger.info(f"Received Message decoded as ANALOG Sensor message.") + self._handle_sensor_analog(device, function, payload['ANALOG']) + + # Handling of Sensors of ESP32 + elif 'ESP32' in payload: + self.logger.info(f"Received Message decoded as ESP32 Sensor message.") + self._handle_sensor_esp32(device, function, payload['ESP32']) + + # Handling of any other Sensor e.g. all SML devices + else: + if len(payload) == 2 and isinstance(payload[list(payload.keys())[1]], dict): # wenn payload 2 Einträge und der zweite Eintrag vom Typ dict + self.logger.info(f"Received Message decoded as other Sensor message (e.g. smartmeter).") + sensor = list(payload.keys())[1] + self._handle_sensor_other(device, sensor, function, payload[sensor]) + + def _handle_sensor_zigbee(self, device: str, function: str, payload: dict) -> None: + """ + Handles Zigbee Sensor information and set items + + :param payload: payload containing zigbee sensor infos + :return: + """ + + """ + payload = {'Fenster_01': {'Device': '0xD4F3', 'Name': 'Fenster_01', 'Contact': 0, 'Endpoint': 1, 'LinkQuality': 92}} + """ + + # self.logger.debug(f"_handle_sensor_zigbee: {device=}, {function=}, {payload=}") + + for zigbee_device in payload: + if zigbee_device != '0x0000' and zigbee_device not in self.tasmota_zigbee_devices: + self.logger.info(f"New Zigbee Device '{zigbee_device}'based on {function}-Message from {device} discovered") + self.tasmota_zigbee_devices[zigbee_device] = {} + + # Make all keys of Zigbee-Device Payload Dict lowercase to match itemtype from parse_item + zigbee_device_dict = {k.lower(): v for k, v in payload[zigbee_device].items()} + + # Korrigieren der Werte für (HSB) Dimmer (0-254 -> 0-100), Hue(0-254 -> 0-360), Saturation (0-254 -> 0-100) + if 'dimmer' in zigbee_device_dict: + zigbee_device_dict.update({'dimmer': _254_to_100(zigbee_device_dict['dimmer'])}) + if 'sat' in zigbee_device_dict: + zigbee_device_dict.update({'sat': _254_to_100(zigbee_device_dict['sat'])}) + if 'hue' in zigbee_device_dict: + zigbee_device_dict.update({'hue': _254_to_360(zigbee_device_dict['hue'])}) + if 'ct' in zigbee_device_dict: + zigbee_device_dict['ct_k'] = _mired_to_kelvin(zigbee_device_dict['ct']) + + # Korrektur des LastSeenEpoch von Timestamp zu datetime + if 'lastseenepoch' in zigbee_device_dict: + zigbee_device_dict.update({'lastseenepoch': datetime.fromtimestamp(zigbee_device_dict['lastseenepoch'])}) + if 'batterylastseenepoch' in zigbee_device_dict: + zigbee_device_dict.update({'batterylastseenepoch': datetime.fromtimestamp(zigbee_device_dict['batterylastseenepoch'])}) + + # Udpate des Sub-Dicts + self.tasmota_zigbee_devices[zigbee_device].update(zigbee_device_dict) + + # Iterate over payload and set corresponding items + for element in zigbee_device_dict: + itemtype = f"item_{zigbee_device}.{element.lower()}" + value = zigbee_device_dict[element] + self._set_item_value(device, itemtype, value, function) + + def _handle_sensor_energy(self, device: str, function: str, energy: dict): + """ + Handle Energy Sensor Information + :param device: + :param energy: + :param function: + """ + + if 'ENERGY' not in self.tasmota_devices[device]['sensors']: + self.tasmota_devices[device]['sensors']['ENERGY'] = {} + + self.tasmota_devices[device]['sensors']['ENERGY']['period'] = energy.get('Period', None) + + for key in self.ENERGY_SENSOR_KEYS: + if key in energy: + self.tasmota_devices[device]['sensors']['ENERGY'][key.lower()] = energy[key] + self._set_item_value(device, self.ENERGY_SENSOR_KEYS[key], energy[key], function) + + def _handle_sensor_env(self, device: str, function: str, payload: dict): + """ + Handle Environmental Sensor Information + :param device: + :param function: + :param payload: + """ + + for sensor in self.ENV_SENSOR: + data = payload.get(sensor) + + if data and isinstance(data, dict): + self.logger.debug(f"Received Message decoded as {sensor} Sensor message.") + if sensor not in self.tasmota_devices[device]['sensors']: + self.tasmota_devices[device]['sensors'][sensor] = {} + + for key in self.ENV_SENSOR_KEYS: + if key in data: + self.tasmota_devices[device]['sensors'][sensor][key.lower()] = data[key] + self._set_item_value(device, self.ENV_SENSOR_KEYS[key], data[key], function) + + def _handle_sensor_analog(self, device: str, function: str, analog: dict): + """ + Handle Analog Sensor Information + :param device: + :param function: + :param analog: + """ + + if 'ANALOG' not in self.tasmota_devices[device]['sensors']: + self.tasmota_devices[device]['sensors']['ANALOG'] = {} + + for key in self.ANALOG_SENSOR_KEYS: + if key in analog: + self.tasmota_devices[device]['sensors']['ANALOG'][key.lower()] = analog[key] + self._set_item_value(device, self.ANALOG_SENSOR_KEYS[key], analog[key], function) + + def _handle_sensor_esp32(self, device: str, function: str, esp32: dict): + """ + Handle ESP32 Sensor Information + :param device: + :param function: + :param esp32: + """ + + if 'ESP32' not in self.tasmota_devices[device]['sensors']: + self.tasmota_devices[device]['sensors']['ESP32'] = {} + + for key in self.ESP32_SENSOR_KEYS: + if key in esp32: + self.tasmota_devices[device]['sensors']['ESP32'][key.lower()] = esp32[key] + self._set_item_value(device, self.ESP32_SENSOR_KEYS[key], esp32[key], function) + + def _handle_sensor_other(self, device: str, sensor: str, function: str, payload: dict): + """ + Handle Other Sensor Information + :param device: Tasmota Device + :param sensor: Sensor Device + :param function: Messages Information will be taken from + :param payload: dict with infos + """ + + self.logger.debug(f"Received Message decoded as {sensor} Sensor message with payload={payload}.") + + if sensor not in self.tasmota_devices[device]['sensors']: + self.tasmota_devices[device]['sensors'][sensor] = {} + + # Make all keys of SML-Device Payload Dict lowercase to match itemtype from parse_item + sensor_dict = {k.lower(): v for k, v in payload.items()} + + # Udpate des Sub-Dicts + self.tasmota_devices[device]['sensors'][sensor].update(sensor_dict) + + # Iterate over payload and set corresponding items + for element in sensor_dict: + itemtype = f"item_{sensor}.{element.lower()}" + value = sensor_dict[element] + self._set_item_value(device, itemtype, value, function) + + def _handle_lights(self, device: str, function: str, payload: dict) -> None: + """ + Extracts Light information out of payload and updates plugin dict + + :param device: Device, the Light information shall be handled (equals tasmota_topic) + :param function: Function of Device (equals info_topic) + :param payload: MQTT message payload + + """ + hsb = payload.get('HSBColor') + if hsb: + if hsb.count(',') == 2: + hsb = hsb.split(",") + try: + hsb = [int(element) for element in hsb] + except Exception as e: + self.logger.info(f"Received Data for HSBColor do not contain in values for HSB. Payload was {hsb}. Error was {e}.") + else: + self.logger.info(f"Received Data for HSBColor do not contain values for HSB. Payload was {hsb}.") + self.tasmota_devices[device]['lights']['hsb'] = hsb + self._set_item_value(device, 'item_hsb', hsb, function) + + dimmer = payload.get('Dimmer') + if dimmer: + self.tasmota_devices[device]['lights']['dimmer'] = dimmer + self._set_item_value(device, 'item_dimmer', dimmer, function) + + color = payload.get('Color') + if color: + self.tasmota_devices[device]['lights']['color'] = str(color) + + ct = payload.get('CT') + if ct: + self.tasmota_devices[device]['lights']['ct'] = ct + self._set_item_value(device, 'item_ct', ct, function) + + white = payload.get('White') + if white: + self.tasmota_devices[device]['lights']['white'] = white + self._set_item_value(device, 'item_white', white, function) + + scheme = payload.get('Scheme') + if scheme: + self.tasmota_devices[device]['lights']['scheme'] = scheme + + fade = payload.get('Fade') + if fade: + self.tasmota_devices[device]['lights']['fade'] = bool(fade) + + speed = payload.get('Speed') + if speed: + self.tasmota_devices[device]['lights']['speed'] = speed + + ledtable = payload.get('LedTable') + if ledtable: + self.tasmota_devices[device]['lights']['ledtable'] = bool(ledtable) + + def _handle_power(self, device: str, function: str, payload: dict) -> None: + """ + Extracts Power information out of payload and updates plugin dict + + :param device: Device, the Power information shall be handled (equals tasmota_topic) + :param function: Function of Device (equals info_topic) + :param payload: MQTT message payload + + """ + # payload = {"Time": "2022-11-21T12:56:34", "Uptime": "0T00:00:11", "UptimeSec": 11, "Heap": 27, "SleepMode": "Dynamic", "Sleep": 50, "LoadAvg": 19, "MqttCount": 0, "POWER1": "OFF", "POWER2": "OFF", "POWER3": "OFF", "POWER4": "OFF", "Wifi": {"AP": 1, "SSId": "WLAN-Access", "BSSId": "38:10:D5:15:87:69", "Channel": 1, "Mode": "11n", "RSSI": 82, "Signal": -59, "LinkCount": 1, "Downtime": "0T00:00:03"}} + + power_dict = {key: val for key, val in payload.items() if key.startswith('POWER')} + self.tasmota_devices[device]['relais'].update(power_dict) + for power in power_dict: + relay_index = 1 if len(power) == 5 else str(power[5:]) + item_relay = f'item_relay{relay_index}' + self._set_item_value(device, item_relay, power_dict[power], function) + + def _handle_module(self, device: str, payload: dict) -> None: + """ + Extracts Module information out of payload and updates plugin dict payload = {"0":"ZB-GW03-V1.3"}} + + :param device: Device, the Module information shall be handled + :param payload: MQTT message payload + + """ + template = next(iter(payload)) + module = payload[template] + self.tasmota_devices[device]['module'] = module + self.tasmota_devices[device]['tasmota_template'] = template + + def _handle_rf(self, device: str, function: str, payload: dict) -> None: + """ + Extracts RF information out of payload and updates plugin dict + + :param device: Device, the RF information shall be handled + :param function: Function of Device (equals info_topic) + :param payload: MQTT message payload + + """ + + # payload = {'Sync': 10120, 'Low': 330, 'High': 980, 'Data': '3602B8', 'RfKey': 'None'} + + self.logger.info(f"Received Message decoded as RF message.") + self.tasmota_devices[device]['rf']['rf_received'] = payload + self._set_item_value(device, 'item_rf_recv', payload['Data'], function) + + rf_key = 0 if payload["RfKey"] == 'None' else int(payload["RfKey"]) + self._set_item_value(device, 'item_rf_key_recv', rf_key, function) + self._set_item_value(device, f'item_rf_key{rf_key}', True, function) + + def _handle_zbconfig(self, device: str, payload: dict) -> None: + """ + Extracts ZigBee Config information out of payload and updates plugin dict + + :param device: Device, the Zigbee Config information shall be handled + :param payload: MQTT message payload + + """ + # stat/SONOFF_ZB1/RESULT = {"ZbConfig":{"Channel":11,"PanID":"0x0C84","ExtPanID":"0xCCCCCCCCAAA8CC84","KeyL":"0xAAA8CC841B1F40A1","KeyH":"0xAAA8CC841B1F40A1","TxRadio":20}} + self.tasmota_devices[device]['zigbee']['zbconfig'] = payload + + def _handle_zbstatus(self, device: str, payload: dict) -> None: + """ + Extracts ZigBee Status information out of payload and updates plugin dict + + :param device: Device, the Zigbee Status information shall be handled + :param payload: MQTT message payload + + """ + + zbstatus1 = payload.get('ZbStatus1') + if zbstatus1: + self.logger.info(f"Received Message decoded as Zigbee ZbStatus1 message for device {device}.") + self._handle_zbstatus1(device, zbstatus1) + + zbstatus23 = payload.get('ZbStatus2') + if not zbstatus23: + zbstatus23 = payload.get('ZbStatus3') + + if zbstatus23: + self.logger.info(f"Received Message decoded as Zigbee ZbStatus2 or ZbStatus3 message for device {device}.") + self._handle_zbstatus23(device, zbstatus23) + + def _handle_zbstatus1(self, device: str, zbstatus1: list) -> None: + """ + Extracts ZigBee Status1 information out of payload and updates plugin dict + + :param device: Device, the Zigbee Status information shall be handled + :param zbstatus1: List of status information out mqtt payload + + """ + """ + zbstatus1 = [{'Device': '0x676D', 'Name': 'SNZB-02_01'}, + {'Device': '0xD4F3', 'Name': 'Fenster_01'} + ] + """ + + for element in zbstatus1: + zigbee_device = element.get('Name') + if not zigbee_device: + zigbee_device = element['Device'] + + if zigbee_device != '0x0000' and zigbee_device not in self.tasmota_zigbee_devices: + self.logger.info(f"New Zigbee Device '{zigbee_device}'based on 'ZbStatus1'-Message from {device} discovered") + self.tasmota_zigbee_devices[zigbee_device] = {} + + # request detailed information of all discovered zigbee devices + self._poll_zigbee_devices(device) + + def _handle_zbstatus23(self, device: str, zbstatus23: dict) -> None: + """ + Extracts ZigBee Status 2 and 3 information out of payload and updates plugin dict + + :param device: Device, the Zigbee Status information shall be handled + :param zbstatus23: ZbStatus2 or ZbStatus 3 part of MQTT message payload + + """ + + """ + zbstatus23 = [{'Device': '0xD4F3', 'Name': 'Fenster_01', 'IEEEAddr': '0x00158D0007005B59', + 'ModelId': 'lumi.sensor_magnet.aq2', 'Manufacturer': 'LUMI', 'Endpoints': [1], + 'Config': ['A01'], 'ZoneStatus': 29697, 'Reachable': True, 'BatteryPercentage': 100, + 'BatteryLastSeenEpoch': 1668953504, 'LastSeen': 238, 'LastSeenEpoch': 1668953504, + 'LinkQuality': 81}] + + zbstatus23 = [{'Device': '0x676D', 'Name': 'SNZB-02_01', 'IEEEAddr': '0x00124B00231E45B8', + 'ModelId': 'TH01', 'Manufacturer': 'eWeLink', 'Endpoints': [1], 'Config': ['T01'], + 'Temperature': 19.27, 'Humidity': 58.12, 'Reachable': True, 'BatteryPercentage': 73, + 'BatteryLastSeenEpoch': 1668953064, 'LastSeen': 610, 'LastSeenEpoch': 1668953064, 'LinkQuality': 66}] + + zbstatus23 = [{'Device': '0x0A22', 'IEEEAddr': '0xF0D1B800001571C5', 'ModelId': 'CLA60 RGBW Z3', + 'Manufacturer': 'LEDVANCE', 'Endpoints': [1], 'Config': ['L01', 'O01'], 'Dimmer': 100, + 'Hue': 200, 'Sat': 254, 'X': 1, 'Y': 1, 'CT': 350, 'ColorMode': 0, 'RGB': 'B600FF', + 'RGBb': '480064', 'Power': 1, 'Reachable': False, 'LastSeen': 30837743, + 'LastSeenEpoch': 1638132192, 'LinkQuality': 13}] + """ + + for element in zbstatus23: + zigbee_device = element.get('Name') + if not zigbee_device: + zigbee_device = element['Device'] + + payload = dict() + payload[zigbee_device] = element + + self._handle_sensor_zigbee(device, 'ZbStatus', payload) + + def _handle_wifi(self, device: str, payload: dict) -> None: + """ + Extracts Wi-Fi information out of payload and updates plugin dict + + :param device: Device, the Zigbee Status information shall be handled + :param payload: MQTT message payload + + """ + self.logger.debug(f"_handle_wifi: received payload={payload}") + wifi_signal = payload.get('Signal') + if wifi_signal: + if isinstance(wifi_signal, str) and wifi_signal.isdigit(): + wifi_signal = int(wifi_signal) + self.tasmota_devices[device]['wifi_signal'] = wifi_signal + + def _handle_setting(self, device: str, payload: dict) -> None: + """ + Extracts Zigbee Bridge Setting information out of payload and updates dict + :param device: + :param payload: MQTT message payload + """ + + # handle Setting listed in Zigbee Bridge Settings (wenn erster Key des Payload-Dict in Zigbee_Bridge_Default_Setting...) + if next(iter(payload)) in self.ZIGBEE_BRIDGE_DEFAULT_OPTIONS: + if not self.tasmota_devices[device]['zigbee'].get('setting'): + self.tasmota_devices[device]['zigbee']['setting'] = {} + self.tasmota_devices[device]['zigbee']['setting'].update(payload) + + if self.tasmota_devices[device]['zigbee']['setting'] == self.ZIGBEE_BRIDGE_DEFAULT_OPTIONS: + self.tasmota_devices[device]['zigbee']['status'] = 'set' + self.logger.info(f'_handle_setting: Setting of Tasmota Zigbee Bridge successful.') + + def _handle_teleperiod(self, tasmota_topic: str, teleperiod: dict) -> None: + + self.tasmota_devices[tasmota_topic]['teleperiod'] = teleperiod + if teleperiod != self.telemetry_period: + self._set_telemetry_period(tasmota_topic) + + def _handle_uptime(self, tasmota_topic: str, uptime: str) -> None: + self.logger.debug(f"Received Message contains Uptime information. uptime={uptime}") + self.tasmota_devices[tasmota_topic]['uptime'] = uptime + + def _handle_uptime_sec(self, tasmota_topic: str, uptime_sec: int) -> None: + self.logger.debug(f"Received Message contains UptimeSec information. uptime={uptime_sec}") + self.tasmota_devices[tasmota_topic]['uptime_sec'] = int(uptime_sec) + + ############################################################ + # MQTT Settings & Config + ############################################################ + + def add_tasmota_subscriptions(self): + self.logger.info(f"Further tasmota_subscriptions for regular/cyclic messages will be added") + + self.add_tasmota_subscription('tele', '+', 'STATE', 'dict', callback=self.on_mqtt_message) + self.add_tasmota_subscription('tele', '+', 'SENSOR', 'dict', callback=self.on_mqtt_message) + self.add_tasmota_subscription('tele', '+', 'RESULT', 'dict', callback=self.on_mqtt_message) + # self.add_tasmota_subscription('tele', '+', 'INFO1', 'dict', callback=self.on_mqtt_message) + # self.add_tasmota_subscription('tele', '+', 'INFO2', 'dict', callback=self.on_mqtt_message) + self.add_tasmota_subscription('tele', '+', 'INFO3', 'dict', callback=self.on_mqtt_info_message) + self.add_tasmota_subscription('stat', '+', 'POWER', 'num', callback=self.on_mqtt_power_message) + self.add_tasmota_subscription('stat', '+', 'POWER1', 'num', callback=self.on_mqtt_power_message) + self.add_tasmota_subscription('stat', '+', 'POWER2', 'num', callback=self.on_mqtt_power_message) + self.add_tasmota_subscription('stat', '+', 'POWER3', 'num', callback=self.on_mqtt_power_message) + self.add_tasmota_subscription('stat', '+', 'POWER4', 'num', callback=self.on_mqtt_power_message) + + def check_online_status(self): + """ + checks all tasmota topics, if last message is with telemetry period. If not set tasmota_topic offline + + """ + + self.logger.info("check_online_status: Checking online status of connected devices") + for tasmota_topic in self.tasmota_devices: + if self.tasmota_devices[tasmota_topic].get('online') is True and self.tasmota_devices[tasmota_topic].get('online_timeout'): + if self.tasmota_devices[tasmota_topic]['online_timeout'] < datetime.now(): + self._set_device_offline(tasmota_topic) + else: + self.logger.debug(f'check_online_status: Checking online status of {tasmota_topic} successful') + + def add_tasmota_subscription(self, prefix: str, topic: str, detail: str, payload_type: str, bool_values: list = None, item=None, callback=None) -> None: + """ + build the topic in Tasmota style and add the subscription to mqtt + + :param prefix: prefix of topic to subscribe to + :param topic: unique part of topic to subscribe to + :param detail: detail of topic to subscribe to + :param payload_type: payload type of the topic (for this subscription to the topic) + :param bool_values: bool values (for this subscription to the topic) + :param item: item that should receive the payload as value. Used by the standard handler (if no callback function is specified) + :param callback: a plugin can provide an own callback function, if special handling of the payload is needed + + """ + + tpc = self.full_topic.replace("%prefix%", prefix) + tpc = tpc.replace("%topic%", topic) + tpc += detail + self.add_subscription(tpc, payload_type, bool_values=bool_values, callback=callback) + + def publish_tasmota_topic(self, prefix: str, topic: str, detail: str, payload, item=None, qos: int = None, retain: bool = False, bool_values: list = None) -> None: + """ + build the topic in Tasmota style and publish to mqtt + + :param prefix: prefix of topic to publish + :param topic: unique part of topic to publish + :param detail: detail of topic to publish + :param payload: payload to publish + :param item: item (if relevant) + :param qos: qos for this message (optional) + :param retain: retain flag for this message (optional) + :param bool_values: bool values (for publishing this topic, optional) + + """ + tpc = self.full_topic.replace("%prefix%", prefix) + tpc = tpc.replace("%topic%", topic) + tpc += detail + + self.publish_topic(tpc, payload, item, qos, retain, bool_values) + + def interview_all_devices(self): + + """ + Interview known Tasmota Devices (defined in item.yaml and self discovered) + """ + + self.logger.info(f"Interview of all known tasmota devices started.") + + tasmota_device_list = list(set(list(self.tasmota_device + self.discovered_device))) + + for device in tasmota_device_list: + self.logger.debug(f"Interview {device}.") + self._interview_device(device) + self.logger.debug(f"Set Telemetry period for {device}.") + self._set_telemetry_period(device) + + def clear_retained_messages(self, retained_msg=None): + """ + Method to clear all retained messages + """ + + if not retained_msg: + retained_msg = self.topics_of_retained_messages + + for topic in retained_msg: + try: + self.logger.warning(f"Clearing retained message for topic={topic}") + self.publish_topic(topic=topic, payload="", retain=True) + except Exception as e: + self.logger.warning(f"Clearing retained message for topic={topic}, caused error {e}") + pass + + def _interview_device(self, topic: str) -> None: + """ + ask for status info of each known tasmota_topic + + :param topic: tasmota Topic + """ + + # self.logger.debug(f"run: publishing 'cmnd/{topic}/Status0'") + self.publish_tasmota_topic('cmnd', topic, 'Status0', '') + + # self.logger.debug(f"run: publishing 'cmnd/{topic}/State'") + # self.publish_tasmota_topic('cmnd', topic, 'State', '') + + # self.logger.debug(f"run: publishing 'cmnd/{topic}/Module'") + # self.publish_tasmota_topic('cmnd', topic, 'Module', '') + + def _set_telemetry_period(self, topic: str) -> None: + """ + sets telemetry period for given topic/device + + :param topic: tasmota Topic + """ + + self.logger.info(f"run: Setting telemetry period to {self.telemetry_period} seconds") + self.publish_tasmota_topic('cmnd', topic, 'teleperiod', self.telemetry_period) + + ############################################################ + # Helper + ############################################################ + + def _set_item_value(self, tasmota_topic: str, itemtype: str, value, info_topic: str = '') -> None: + """ + Sets item value + + :param tasmota_topic: MQTT message payload + :param itemtype: itemtype to be set + :param value: value to be set + :param info_topic: MQTT info_topic + """ + + if tasmota_topic in self.tasmota_devices: + + # create source of item value + src = f"{tasmota_topic}:{info_topic}" if info_topic != '' else f"{tasmota_topic}" + + if itemtype in self.tasmota_devices[tasmota_topic]['connected_items']: + # get item to be set + item = self.tasmota_devices[tasmota_topic]['connected_items'][itemtype] + + tasmota_rf_details = self.get_iattr_value(item.conf, 'tasmota_rf_key') + if tasmota_rf_details and '=' in tasmota_rf_details: + tasmota_rf_key, tasmota_rf_key_param = tasmota_rf_details.split('=') + + if tasmota_rf_key_param.lower() == 'true': + value = True + elif tasmota_rf_key_param.lower() == 'false': + value = True + elif tasmota_rf_key_param.lower() == 'toggle': + value = not(item()) + else: + self.logger.warning(f"Paramater of tasmota_rf_key unknown, Need to be True, False, Toggle") + return + + # set item value + self.logger.info(f"{tasmota_topic}: Item '{item.id()}' via itemtype '{itemtype}' set to value '{value}' provided by '{src}'.") + item(value, self.get_shortname(), src) + + else: + self.logger.debug(f"{tasmota_topic}: No item for itemtype '{itemtype}' defined to set to '{value}' provided by '{src}'.") + else: + self.logger.debug(f"{tasmota_topic} unknown.") + + def _handle_new_discovered_device(self, tasmota_topic): + + self._add_new_device_to_tasmota_devices(tasmota_topic) + self.tasmota_devices[tasmota_topic]['status'] = 'discovered' + self._interview_device(tasmota_topic) + + def _add_new_device_to_tasmota_devices(self, tasmota_topic): + self.tasmota_devices[tasmota_topic] = self._get_device_dict_1_template() + self.tasmota_devices[tasmota_topic].update(self._get_device_dict_2_template()) + + def _set_device_offline(self, tasmota_topic): + + self.tasmota_devices[tasmota_topic]['online'] = False + self._set_item_value(tasmota_topic, 'item_online', False, 'check_online_status') + self.logger.info(f"{tasmota_topic} is not online any more - online_timeout={self.tasmota_devices[tasmota_topic]['online_timeout']}, now={datetime.now()}") + + # clean data from dict to show correct status + self.tasmota_devices[tasmota_topic].update(self._get_device_dict_2_template()) + + @staticmethod + def _rename_discovery_keys(payload: dict) -> dict: + + link = {'ip': 'IP', + 'dn': 'DeviceName', + 'fn': 'FriendlyNames', # list + 'hn': 'HostName', + 'mac': 'MAC', + 'md': 'Module', + 'ty': 'Tuya', + 'if': 'ifan', + 'ofln': 'LWT-offline', + 'onln': 'LWT-online', + 'state': 'StateText', # [0..3] + 'sw': 'FirmwareVersion', + 't': 'Topic', + 'ft': 'FullTopic', + 'tp': 'Prefix', + 'rl': 'Relays', # 0: disabled, 1: relay, 2.. future extension (fan, shutter?) + 'swc': 'SwitchMode', + 'swn': 'SwitchName', + 'btn': 'Buttons', + 'so': 'SetOption', # needed by HA to map Tasmota devices to HA entities and triggers + 'lk': 'ctrgb', + 'lt_st': 'LightSubtype', + 'sho': 'sho', + 'sht': 'sht', + 'ver': 'ProtocolVersion', + } + + new_payload = {} + for k_old in payload: + k_new = link.get(k_old) + if k_new: + new_payload[k_new] = payload[k_old] + + return new_payload + + @staticmethod + def _get_device_dict_1_template(): + return {'connected_to_item': False, + 'online': False, + 'status': None, + 'connected_items': {}, + 'uptime': '-', + } + + @staticmethod + def _get_device_dict_2_template(): + return {'lights': {}, + 'rf': {}, + 'sensors': {}, + 'relais': {}, + 'zigbee': {}, + 'sml': {}, + } + + ############################################################ + # Zigbee + ############################################################ + + def _poll_zigbee_devices(self, device: str) -> None: + """ + Polls information of all discovered zigbee devices from dedicated Zigbee bridge + + :param device: Zigbee bridge, where all Zigbee Devices shall be polled (equal to tasmota_topic) + + """ + self.logger.info(f"_poll_zigbee_devices: Polling information of all discovered Zigbee devices for zigbee_bridge {device}") + for zigbee_device in self.tasmota_zigbee_devices: + # self.logger.debug(f"_poll_zigbee_devices: publishing 'cmnd/{device}/ZbStatus3 {zigbee_device}'") + self.publish_tasmota_topic('cmnd', device, 'ZbStatus3', zigbee_device) + + def _configure_zigbee_bridge_settings(self, device: str) -> None: + """ + Configures Zigbee Bridge settings + + :param device: Zigbee bridge to be set to get MQTT Messages in right format") + """ + + self.logger.info(f"_configure_zigbee_bridge_settings: Do settings of ZigbeeBridge {device}") + bridge_setting_backlog = '; '.join(f"{key} {value}" for key, value in self.ZIGBEE_BRIDGE_DEFAULT_OPTIONS.items()) + self.publish_tasmota_topic('cmnd', device, 'Backlog', bridge_setting_backlog) + + def _request_zigbee_bridge_config(self, device: str) -> None: + """ + Request Zigbee Bridge configuration + + :param device: Zigbee bridge to be requested (equal to tasmota_topic) + """ + + self.logger.info(f"_request_zigbee_bridge_config: Request configuration of Zigbee bridge {device}") + # self.logger.debug(f"_discover_zigbee_bridge: publishing 'cmnd/{device}/ZbConfig'") + self.publish_tasmota_topic('cmnd', device, 'ZbConfig', '') + + def _discover_zigbee_bridge_devices(self, device: str) -> None: + """ + Discovers all connected Zigbee devices + + :param device: Zigbee bridge where connected devices shall be discovered (equal to tasmota_topic) + """ + + self.logger.info(f"_discover_zigbee_bridge_devices: Discover all connected Zigbee devices for ZigbeeBridge {device}") + self.publish_tasmota_topic('cmnd', device, 'ZbStatus1', '') + + def _handle_retained_message(self, topic: str, retain: bool) -> None: + """ + check for retained message and handle it + + :param topic: + :param retain: + """ + + if bool(retain): + if topic not in self.topics_of_retained_messages: + self.topics_of_retained_messages.append(topic) + else: + if topic in self.topics_of_retained_messages: + self.topics_of_retained_messages.remove(topic) + + ############################################################ + # Plugin Properties + ############################################################ + + @property + def log_level(self): + return self.logger.getEffectiveLevel() + + @property + def retained_msg_count(self): + return self._broker.retained_messages + + @property + def tasmota_device(self): + return list(self.tasmota_devices.keys()) + + @property + def has_zigbee(self): + for tasmota_topic in self.tasmota_devices: + if self.tasmota_devices[tasmota_topic]['zigbee']: + return True + return False + + @property + def has_lights(self): + for tasmota_topic in self.tasmota_devices: + if self.tasmota_devices[tasmota_topic]['lights']: + return True + return False + + @property + def has_rf(self): + for tasmota_topic in self.tasmota_devices: + if self.tasmota_devices[tasmota_topic]['rf']: + return True + return False + + @property + def has_relais(self): + for tasmota_topic in self.tasmota_devices: + if self.tasmota_devices[tasmota_topic]['relais']: + return True + return False + + @property + def has_energy_sensor(self): + for tasmota_topic in self.tasmota_devices: + if 'ENERGY' in self.tasmota_devices[tasmota_topic]['sensors']: + return True + return False + + @property + def has_env_sensor(self): + for tasmota_topic in self.tasmota_devices: + if any([i in self.tasmota_devices[tasmota_topic]['sensors'] for i in self.ENV_SENSOR]): + return True + return False + + @property + def has_ds18b20_sensor(self): + for tasmota_topic in self.tasmota_devices: + if 'DS18B20' in self.tasmota_devices[tasmota_topic]['sensors']: + return True + return False + + @property + def has_am2301_sensor(self): + for tasmota_topic in self.tasmota_devices: + if 'AM2301' in self.tasmota_devices[tasmota_topic]['sensors']: + return True + return False + + @property + def has_sht3x_sensor(self): + for tasmota_topic in self.tasmota_devices: + if 'SHT3X' in self.tasmota_devices[tasmota_topic]['sensors']: + return True + return False + + @property + def has_other_sensor(self): + for tasmota_topic in self.tasmota_devices: + for sensor in self.tasmota_devices[tasmota_topic]['sensors']: + if sensor not in self.SENSORS: + return True + return False + +################################################################## +# Utilities +################################################################## + + +def _254_to_100(value): + return int(round(value * 100 / 254, 0)) + + +def _254_to_360(value): + return int(round(value * 360 / 254, 0)) + + +def _100_to_254(value): + return int(round(value * 254 / 100, 0)) + + +def _360_to_254(value): + return int(round(value * 254 / 360, 0)) + + +def _kelvin_to_mired(value): + """Umrechnung der Farbtemperatur von Kelvin auf "mired scale" (Reziproke Megakelvin)""" + return int(round(1000000 / value, 0)) + + +def _mired_to_kelvin(value): + """Umrechnung der Farbtemperatur von "mired scale" (Reziproke Megakelvin) auf Kelvin""" + return int(round(10000 / int(value), 0)) * 100 diff --git a/tasmota/plugin.yaml b/tasmota/plugin.yaml old mode 100644 new mode 100755 index d3e2d2004..fcef9cc87 --- a/tasmota/plugin.yaml +++ b/tasmota/plugin.yaml @@ -1,283 +1,283 @@ -# Metadata for the plugin -plugin: - # Global plugin attributes - type: gateway # plugin type (gateway, interface, protocol, system, web) - description: - de: 'Plugin zur Steuerung von Switches, die mit Tasmota Firmware ausgestattet sind. Die Kommunikation erfolgt über das MQTT Module von SmartHomeNG.' - en: 'Plugin to control switches which are equipped with Tasmote firmware. Communication is handled through the MQTT module of SmartHomeNG.' - maintainer: sisamiwe - tester: msinn # Who tests this plugin? - state: ready # change to ready when done with development - keywords: iot - documentation: http://smarthomeng.de/user/plugins/tasmota/user_doc.html - support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1520293-support-thread-für-das-tasmota-plugin - - version: 1.4.0 # Plugin version - sh_minversion: 1.9.3 # minimum shNG version to use this plugin -# sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) -# py_minversion: # minimum Python version to use for this plugin - multi_instance: True # plugin supports multi instance - restartable: unknown - classname: Tasmota # class containing the plugin - -parameters: - # Definition of parameters to be configured in etc/plugin.yaml (enter 'parameters: NONE', if section should be empty) - full_topic: - type: str - default: '%prefix%/%topic%/' - description: - de: 'Vollständiges Topic (Prefix und Topic) zur Kommunikation mit den Tasmota Devices' - en: 'Full topic (prefix und topic) for communication with tasmota devices' - - telemetry_period: - type: int - default: 300 - valid_min: 10 - valid_max: 3600 - description: - de: 'Zeitabstand in Sekunden in dem die Tasmota Devices Telemetrie Daten senden sollen' - en: 'Timeperiod in seconds in which Tasmota devices shall send telemetry data' - - -item_attributes: - tasmota_topic: - type: str - description: - de: Topic um mit dem Tasmota Device zu kommunizieren (%topic%) - en: Topic to be used to communicate with the tasmota device (%topic%) - - tasmota_attr: - type: str - default: relay - description: - de: "Zu lesendes/schreibendes Attribut des Tasmota Devices. Achtung: Nicht jedes Attribut ist auf allen Device-Typen vorhanden." - en: "Attribute of Tasmota device that shall be read/written. Note: Not every attribute is available on all device types" - valid_list: - - relay - - online - - voltage - - current - - power - - power_total - - power_yesterday - - power_today - - temp - - hum - - dewpoint - - hsb - - white - - ct - - dimmer - - rf_recv - - rf_send - - rf_key_send - - rf_key_recv - - rf_key - - zb_permit_join - - zb_forget - - zb_ping - - power_total - - power_today - - power_yesterday - - analog_temp - - analog_temp1 - - analog_a0 - - analog_range - - esp32_temp - valid_list_description: - de: - - "Schalten des Relais -> bool, r/w" - - "Online Status des Tasmota Devices -> bool, r/o" - - "Spannung in Volt bei Tasmota Devices mit ENERGY Sensor -> num, r/o" - - "Strom in Ampere bei Tasmota Devices mit ENERGY Sensor -> num, r/o" - - "Leistung in Watt bei Tasmota Devices mit ENERGY Sensor -> num, r/o" - - "Verbrauch (gesamt) in kWh bei Tasmota Devices mit ENERGY Sensor -> num, r/o" - - "Verbrauch (gestern) in kWh bei Tasmota Devices mit ENERGY Sensor -> num, r/o" - - "Verbrauch (heute) in kWh bei Tasmota Devices mit ENERGY Sensor -> num, r/o" - - "Temperatur in °C bei Tasmota Devices mit TEMP Sensor (DS18B20, AM2301) -> num, r/o" - - "Luftfeuchtigkeit in %rH bei Tasmota Devices mit HUM Sensor (AM2301) -> num, r/o" - - "Taupunkt in °C bei Tasmota Devices mit HUM und TEMP Sensor (AM2301) -> num, r/o" - - "Hue, Saturation, Brightness (HSB) bei RGBW Tasmota Devices (H801) -> list, r/w" - - "Color Temperature in Kelvin bei RGBW Tasmota Devices (H801) -> num, r/w" - - "Color Temperature in Kelvin bei RGBW Tasmota Devices (H801) -> num, r/w" - - "Dimmwert in % Tasmota Devices -> num, r/w" - - "Empfangene RF Daten bei Tasmota Device mit RF Sendemöglichkeit (SONOFF RF Bridge) -> dict, r/o" - - "Zu sendende RF Daten bei Tasmota Device mit RF Sendemöglichkeit (SONOFF RF Bridge) -> dict {'RfSync': 12220, 'RfLow': 440, 'RfHigh': 1210, 'RfCode':'#F06104'}, r/w" - - "Zu sendender RF-Key Tasmota Device mit RF Sendemöglichkeit (SONOFF RF Bridge) -> num [1-16], r/w" - - "Zu empfangender RF-Key Tasmota Device mit RF Sendemöglichkeit (SONOFF RF Bridge) -> num [1-16], r/w" - - 'RF Key' - - "Schaltet das Pairing an der ZigBee Bridge ein/aus -> bool, r/w" - - "Löscht das Zigbee-Gerät aus dem Item Wert aus der Liste bekannter Geräte in der Zigbee-Bridge -> str, r/w" - - "Sendet ein Ping zum Zigbee-Gerät aus dem Item Wert -> str, r/w" - - "Gemessener Gesamtenergieverbrauch" - - "Gemessener Energieverbrauch heute" - - "Gemessener Energieverbrauch gestern" - - "Temperatur am Analogeingang" - - "Temperatur am Analogeingang1" - - "ADC-Eingang eines ESPs" - - "ADC-Eingang eines ESPs" - - "Temperatur des ESP32" - - tasmota_relay: - type: int - default: 1 - valid_min: 1 - valid_max: 4 - description: - de: "Nummer des zu schaltenden Relais im Tasmota Device" - en: "Number of the relay in Tasmota device to use for switching command" - - tasmota_rf_details: - type: str - default: 1 - description: - de: "Nummer des auszulösenden RF Keys im Tasmota Device=Aktion bei Empfang" - en: "Number of rf keys to be used for sending command" - - tasmota_zb_device: - type: str - description: - de: "Friendly Name oder Kurzname des Zigbee Devices. ACHTUNG: Wird der Kurzname verwendet und beginnt dieser mit 0x, muss die Schreibweise '0x9CB9' verwendet werden" - en: "Friendly Name oder Short Name of Zigbee Devices" - - tasmota_zb_group: - type: num - description: - de: "Zigbee Control Group: Werte werden an diese Gruppe gesendet. Gruppennachrichten werden nicht empfangen. https://tasmota.github.io/docs/Device-Groups/#zigbee" - en: "Zigbee Control Group: Values will be sent to group. return messages will not be received" - - tasmota_zb_attr: - type: str - description: - de: "Schlüssel der Json-Dict, der vom Zigbee-Device bereitgestellt wird; Key aus dem dict des tasmota_zb_device" - en: "Dict Key of provided data; can be seen in Plugin WebIF" - valid_list_ci: - - device - - power - - dimmer - - hue - - sat - - ct - - ct_k - - temperature - - humidity - - reachable - - batterypercentage - - batterylastseenepoch - - lastseen - - lastseenepoch - - linkquality - - ieeeaddr - - modelid - - manufacturer - - colormode - - zonestatus - - contact - - movement - - colortempstepup - - colortempstepdown - - dimmerstepup - - dimmerstepdown - - dimmermove - - aqaravibrationmode - - aqaravibration505 - - batteryvoltage - - shutterclose - - shutteropen - - endpoint - - huemove - - 0300!0a - - 0300!01 - - 0300!03 - - 0300!4c - - 0006!00 - - 0006!01 - - 0008!01 - - 0008!02 - - 0008!03 - - 0008!04 - - 0008!05 - - valid_list_description: - de: - - "Geräte_ID Kurzform -> str, r/o" - - "Schalter true/false -> bool, r/w" - - "Helligkeit 0-100 -> num, r/w" - - "Farbwert 0-360 -> num, r/w" - - "Sättigung 0-100 -> num, r/w" - - "Farbtemperatur (mired scale), 150-500 -> num, r/w" - - "Farbtemperatur (Kelvin), 2000-6700 -> num, r/w" - - "Temperatur -> num, r/o" - - "Feuchtigkeit -> num, r/o" - - "Erreichbarkeit -> bool, r/o" - - "Batteriefüllung in % -> num, r/o" - - "Letzte Batteriemeldung -> datetime, r/o" - - "Letzter Kontakt vor xx Sekunden -> num, r/o" - - "Letzter Kontakt -> datetime, r/o" - - "Verbindungsqualität -> num, r/o" - - "IEEE-Adresse -> str, r/o" - - "Model-ID -> str, r/o" - - "Hersteller -> str, r/o" - - "Farbmodus -> num, r/o" - - "Zonenstatus -> num, r/o" - - "Kontakt -> bool, r/o" - - "Bewegung -> bool, r/o" - - "Farbtemperatur +" - - "Farbtemperatur -" - - "Dimmer +" - - "Dimmer -" - - "Dimmer" - - "aqaravibrationmode" - - "aqaravibration505" - - "Batteriespannung" - - "Rollo schließen" - - "Rollo öffnen" - - "Endlage erreicht" - - "Farbbewegung Hue" - - "0300!0a" - - "0300!01" - - "0300!03" - - "0300!4c" - - "0006!00" - - "0006!01" - - "0008!01" - - "0008!02" - - "0008!03" - - "0008!04" - - "0008!05" - - tasmota_zb_cluster: - type: bool - default: False - description: - de: "Ergänzung des Sendebefehls um entsprechendes Zigbee-Cluster" - en: "Use zigbee cluster in send command additionally" - - tasmota_sml_device: - type: str - description: - de: "Name des Smartmeter (SML Device)" - en: "Name of smartmeter (SML Device)" - - tasmota_sml_attr: - type: str - description: - de: "Smartmeter Attribut; muss dem Key des Dictionary dem SML Devices entsprechen" - en: "Smartmeter attribute; need to be key of SML device dictionary" - - tasmota_admin: - type: str - default: delete_retained_messages - description: - de: "" - en: "" - valid_list: - - delete_retained_messages - -item_structs: NONE - -plugin_functions: NONE - -logic_parameters: NONE - - +# Metadata for the plugin +plugin: + # Global plugin attributes + type: gateway # plugin type (gateway, interface, protocol, system, web) + description: + de: 'Plugin zur Steuerung von Switches, die mit Tasmota Firmware ausgestattet sind. Die Kommunikation erfolgt über das MQTT Module von SmartHomeNG.' + en: 'Plugin to control switches which are equipped with Tasmote firmware. Communication is handled through the MQTT module of SmartHomeNG.' + maintainer: sisamiwe + tester: msinn # Who tests this plugin? + state: ready # change to ready when done with development + keywords: iot + documentation: http://smarthomeng.de/user/plugins/tasmota/user_doc.html + support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1520293-support-thread-für-das-tasmota-plugin + + version: 1.4.0 # Plugin version + sh_minversion: 1.9.3 # minimum shNG version to use this plugin +# sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) +# py_minversion: # minimum Python version to use for this plugin + multi_instance: True # plugin supports multi instance + restartable: unknown + classname: Tasmota # class containing the plugin + +parameters: + # Definition of parameters to be configured in etc/plugin.yaml (enter 'parameters: NONE', if section should be empty) + full_topic: + type: str + default: '%prefix%/%topic%/' + description: + de: 'Vollständiges Topic (Prefix und Topic) zur Kommunikation mit den Tasmota Devices' + en: 'Full topic (prefix und topic) for communication with tasmota devices' + + telemetry_period: + type: int + default: 300 + valid_min: 10 + valid_max: 3600 + description: + de: 'Zeitabstand in Sekunden in dem die Tasmota Devices Telemetrie Daten senden sollen' + en: 'Timeperiod in seconds in which Tasmota devices shall send telemetry data' + + +item_attributes: + tasmota_topic: + type: str + description: + de: Topic um mit dem Tasmota Device zu kommunizieren (%topic%) + en: Topic to be used to communicate with the tasmota device (%topic%) + + tasmota_attr: + type: str + default: relay + description: + de: "Zu lesendes/schreibendes Attribut des Tasmota Devices. Achtung: Nicht jedes Attribut ist auf allen Device-Typen vorhanden." + en: "Attribute of Tasmota device that shall be read/written. Note: Not every attribute is available on all device types" + valid_list: + - relay + - online + - voltage + - current + - power + - power_total + - power_yesterday + - power_today + - temp + - hum + - dewpoint + - hsb + - white + - ct + - dimmer + - rf_recv + - rf_send + - rf_key_send + - rf_key_recv + - rf_key + - zb_permit_join + - zb_forget + - zb_ping + - power_total + - power_today + - power_yesterday + - analog_temp + - analog_temp1 + - analog_a0 + - analog_range + - esp32_temp + valid_list_description: + de: + - "Schalten des Relais -> bool, r/w" + - "Online Status des Tasmota Devices -> bool, r/o" + - "Spannung in Volt bei Tasmota Devices mit ENERGY Sensor -> num, r/o" + - "Strom in Ampere bei Tasmota Devices mit ENERGY Sensor -> num, r/o" + - "Leistung in Watt bei Tasmota Devices mit ENERGY Sensor -> num, r/o" + - "Verbrauch (gesamt) in kWh bei Tasmota Devices mit ENERGY Sensor -> num, r/o" + - "Verbrauch (gestern) in kWh bei Tasmota Devices mit ENERGY Sensor -> num, r/o" + - "Verbrauch (heute) in kWh bei Tasmota Devices mit ENERGY Sensor -> num, r/o" + - "Temperatur in °C bei Tasmota Devices mit TEMP Sensor (DS18B20, AM2301) -> num, r/o" + - "Luftfeuchtigkeit in %rH bei Tasmota Devices mit HUM Sensor (AM2301) -> num, r/o" + - "Taupunkt in °C bei Tasmota Devices mit HUM und TEMP Sensor (AM2301) -> num, r/o" + - "Hue, Saturation, Brightness (HSB) bei RGBW Tasmota Devices (H801) -> list, r/w" + - "Color Temperature in Kelvin bei RGBW Tasmota Devices (H801) -> num, r/w" + - "Color Temperature in Kelvin bei RGBW Tasmota Devices (H801) -> num, r/w" + - "Dimmwert in % Tasmota Devices -> num, r/w" + - "Empfangene RF Daten bei Tasmota Device mit RF Sendemöglichkeit (SONOFF RF Bridge) -> dict, r/o" + - "Zu sendende RF Daten bei Tasmota Device mit RF Sendemöglichkeit (SONOFF RF Bridge) -> dict {'RfSync': 12220, 'RfLow': 440, 'RfHigh': 1210, 'RfCode':'#F06104'}, r/w" + - "Zu sendender RF-Key Tasmota Device mit RF Sendemöglichkeit (SONOFF RF Bridge) -> num [1-16], r/w" + - "Zu empfangender RF-Key Tasmota Device mit RF Sendemöglichkeit (SONOFF RF Bridge) -> num [1-16], r/w" + - 'RF Key' + - "Schaltet das Pairing an der ZigBee Bridge ein/aus -> bool, r/w" + - "Löscht das Zigbee-Gerät aus dem Item Wert aus der Liste bekannter Geräte in der Zigbee-Bridge -> str, r/w" + - "Sendet ein Ping zum Zigbee-Gerät aus dem Item Wert -> str, r/w" + - "Gemessener Gesamtenergieverbrauch" + - "Gemessener Energieverbrauch heute" + - "Gemessener Energieverbrauch gestern" + - "Temperatur am Analogeingang" + - "Temperatur am Analogeingang1" + - "ADC-Eingang eines ESPs" + - "ADC-Eingang eines ESPs" + - "Temperatur des ESP32" + + tasmota_relay: + type: int + default: 1 + valid_min: 1 + valid_max: 4 + description: + de: "Nummer des zu schaltenden Relais im Tasmota Device" + en: "Number of the relay in Tasmota device to use for switching command" + + tasmota_rf_details: + type: str + default: 1 + description: + de: "Nummer des auszulösenden RF Keys im Tasmota Device=Aktion bei Empfang" + en: "Number of rf keys to be used for sending command" + + tasmota_zb_device: + type: str + description: + de: "Friendly Name oder Kurzname des Zigbee Devices. ACHTUNG: Wird der Kurzname verwendet und beginnt dieser mit 0x, muss die Schreibweise '0x9CB9' verwendet werden" + en: "Friendly Name oder Short Name of Zigbee Devices" + + tasmota_zb_group: + type: num + description: + de: "Zigbee Control Group: Werte werden an diese Gruppe gesendet. Gruppennachrichten werden nicht empfangen. https://tasmota.github.io/docs/Device-Groups/#zigbee" + en: "Zigbee Control Group: Values will be sent to group. return messages will not be received" + + tasmota_zb_attr: + type: str + description: + de: "Schlüssel der Json-Dict, der vom Zigbee-Device bereitgestellt wird; Key aus dem dict des tasmota_zb_device" + en: "Dict Key of provided data; can be seen in Plugin WebIF" + valid_list_ci: + - device + - power + - dimmer + - hue + - sat + - ct + - ct_k + - temperature + - humidity + - reachable + - batterypercentage + - batterylastseenepoch + - lastseen + - lastseenepoch + - linkquality + - ieeeaddr + - modelid + - manufacturer + - colormode + - zonestatus + - contact + - movement + - colortempstepup + - colortempstepdown + - dimmerstepup + - dimmerstepdown + - dimmermove + - aqaravibrationmode + - aqaravibration505 + - batteryvoltage + - shutterclose + - shutteropen + - endpoint + - huemove + - 0300!0a + - 0300!01 + - 0300!03 + - 0300!4c + - 0006!00 + - 0006!01 + - 0008!01 + - 0008!02 + - 0008!03 + - 0008!04 + - 0008!05 + + valid_list_description: + de: + - "Geräte_ID Kurzform -> str, r/o" + - "Schalter true/false -> bool, r/w" + - "Helligkeit 0-100 -> num, r/w" + - "Farbwert 0-360 -> num, r/w" + - "Sättigung 0-100 -> num, r/w" + - "Farbtemperatur (mired scale), 150-500 -> num, r/w" + - "Farbtemperatur (Kelvin), 2000-6700 -> num, r/w" + - "Temperatur -> num, r/o" + - "Feuchtigkeit -> num, r/o" + - "Erreichbarkeit -> bool, r/o" + - "Batteriefüllung in % -> num, r/o" + - "Letzte Batteriemeldung -> datetime, r/o" + - "Letzter Kontakt vor xx Sekunden -> num, r/o" + - "Letzter Kontakt -> datetime, r/o" + - "Verbindungsqualität -> num, r/o" + - "IEEE-Adresse -> str, r/o" + - "Model-ID -> str, r/o" + - "Hersteller -> str, r/o" + - "Farbmodus -> num, r/o" + - "Zonenstatus -> num, r/o" + - "Kontakt -> bool, r/o" + - "Bewegung -> bool, r/o" + - "Farbtemperatur +" + - "Farbtemperatur -" + - "Dimmer +" + - "Dimmer -" + - "Dimmer" + - "aqaravibrationmode" + - "aqaravibration505" + - "Batteriespannung" + - "Rollo schließen" + - "Rollo öffnen" + - "Endlage erreicht" + - "Farbbewegung Hue" + - "0300!0a" + - "0300!01" + - "0300!03" + - "0300!4c" + - "0006!00" + - "0006!01" + - "0008!01" + - "0008!02" + - "0008!03" + - "0008!04" + - "0008!05" + + tasmota_zb_cluster: + type: bool + default: False + description: + de: "Ergänzung des Sendebefehls um entsprechendes Zigbee-Cluster" + en: "Use zigbee cluster in send command additionally" + + tasmota_sml_device: + type: str + description: + de: "Name des Smartmeter (SML Device)" + en: "Name of smartmeter (SML Device)" + + tasmota_sml_attr: + type: str + description: + de: "Smartmeter Attribut; muss dem Key des Dictionary dem SML Devices entsprechen" + en: "Smartmeter attribute; need to be key of SML device dictionary" + + tasmota_admin: + type: str + default: delete_retained_messages + description: + de: "" + en: "" + valid_list: + - delete_retained_messages + +item_structs: NONE + +plugin_functions: NONE + +logic_parameters: NONE + + diff --git a/tasmota/webif/__init__.py b/tasmota/webif/__init__.py old mode 100644 new mode 100755 index 2cfa270cc..29f12a867 --- a/tasmota/webif/__init__.py +++ b/tasmota/webif/__init__.py @@ -1,125 +1,125 @@ -#!/usr/bin/env python3 -# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab -######################################################################### -# Copyright 2020- Martin Sinn m.sinn@gmx.de -# Copyright 2021- Michael Wenzel wenzel_michael@web.de -######################################################################### -# This file is part of SmartHomeNG. -# https://www.smarthomeNG.de -# https://knx-user-forum.de/forum/supportforen/smarthome-py -# -# SmartHomeNG is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# SmartHomeNG is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with SmartHomeNG. If not, see . -# -######################################################################### - -import json - -from lib.item import Items -from lib.model.smartplugin import SmartPluginWebIf - - -# ------------------------------------------ -# Webinterface of the plugin -# ------------------------------------------ - -import cherrypy -from jinja2 import Environment, FileSystemLoader - - -class WebInterface(SmartPluginWebIf): - - def __init__(self, webif_dir, plugin): - """ - Initialization of instance of class WebInterface - - :param webif_dir: directory where the webinterface of the plugin resides - :param plugin: instance of the plugin - :type webif_dir: str - :type plugin: object - """ - self.logger = plugin.logger - self.webif_dir = webif_dir - self.plugin = plugin - self.items = Items.get_instance() - - self.tplenv = self.init_template_environment() - - @cherrypy.expose - def index(self, reload=None): - """ - Build index.html for cherrypy - - Render the template and return the html file to be delivered to the browser - - :return: contents of the template after being rendered - """ - self.plugin.get_broker_info() - - pagelength = self.plugin.get_parameter_value('webif_pagelength') - tmpl = self.tplenv.get_template('index.html') - - return tmpl.render(p=self.plugin, - webif_pagelength=pagelength, - items=self.plugin.tasmota_items, - item_count=len(self.plugin.tasmota_items), - plugin_shortname=self.plugin.get_shortname(), - plugin_version=self.plugin.get_version(), - plugin_info=self.plugin.get_info(), - maintenance=True if self.plugin.log_level == 10 else False, - ) - - @cherrypy.expose - def get_data_html(self, dataSet=None): - """ - Return data to update the webpage - - For the standard update mechanism of the web interface, the dataSet to return the data for is None - - :param dataSet: Dataset for which the data should be returned (standard: None) - :return: dict with the data needed to update the web page. - """ - if dataSet is None: - # get the new data - self.plugin.get_broker_info() - data = dict() - data['broker_info'] = self.plugin._broker - data['broker_uptime'] = self.plugin.broker_uptime() - - data['item_values'] = {} - for item in self.plugin.tasmota_items: - data['item_values'][item.id()] = {} - data['item_values'][item.id()]['value'] = item.property.value - data['item_values'][item.id()]['last_update'] = item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') - data['item_values'][item.id()]['last_change'] = item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') - - data['device_values'] = {} - for device in self.plugin.tasmota_devices: - data['device_values'][device] = {} - data['device_values'][device]['online'] = self.plugin.tasmota_devices[device].get('online', '-') - data['device_values'][device]['uptime'] = self.plugin.tasmota_devices[device].get('uptime', '-') - data['device_values'][device]['fw_ver'] = self.plugin.tasmota_devices[device].get('fw_ver', '-') - data['device_values'][device]['wifi_signal'] = self.plugin.tasmota_devices[device].get('wifi_signal', '-') - data['device_values'][device]['sensors'] = self.plugin.tasmota_devices[device].get('sensors', '-') - data['device_values'][device]['lights'] = self.plugin.tasmota_devices[device].get('lights', '-') - data['device_values'][device]['rf'] = self.plugin.tasmota_devices[device].get('rf', '-') - - data['tasmota_zigbee_devices'] = self.plugin.tasmota_zigbee_devices - - # return it as json the web page - try: - return json.dumps(data, default=str) - except Exception as e: - self.logger.error("get_data_html exception: {}".format(e)) - return {} - return +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +######################################################################### +# Copyright 2020- Martin Sinn m.sinn@gmx.de +# Copyright 2021- Michael Wenzel wenzel_michael@web.de +######################################################################### +# This file is part of SmartHomeNG. +# https://www.smarthomeNG.de +# https://knx-user-forum.de/forum/supportforen/smarthome-py +# +# SmartHomeNG is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SmartHomeNG is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SmartHomeNG. If not, see . +# +######################################################################### + +import json + +from lib.item import Items +from lib.model.smartplugin import SmartPluginWebIf + + +# ------------------------------------------ +# Webinterface of the plugin +# ------------------------------------------ + +import cherrypy +from jinja2 import Environment, FileSystemLoader + + +class WebInterface(SmartPluginWebIf): + + def __init__(self, webif_dir, plugin): + """ + Initialization of instance of class WebInterface + + :param webif_dir: directory where the webinterface of the plugin resides + :param plugin: instance of the plugin + :type webif_dir: str + :type plugin: object + """ + self.logger = plugin.logger + self.webif_dir = webif_dir + self.plugin = plugin + self.items = Items.get_instance() + + self.tplenv = self.init_template_environment() + + @cherrypy.expose + def index(self, reload=None): + """ + Build index.html for cherrypy + + Render the template and return the html file to be delivered to the browser + + :return: contents of the template after being rendered + """ + self.plugin.get_broker_info() + + pagelength = self.plugin.get_parameter_value('webif_pagelength') + tmpl = self.tplenv.get_template('index.html') + + return tmpl.render(p=self.plugin, + webif_pagelength=pagelength, + items=self.plugin.tasmota_items, + item_count=len(self.plugin.tasmota_items), + plugin_shortname=self.plugin.get_shortname(), + plugin_version=self.plugin.get_version(), + plugin_info=self.plugin.get_info(), + maintenance=True if self.plugin.log_level == 10 else False, + ) + + @cherrypy.expose + def get_data_html(self, dataSet=None): + """ + Return data to update the webpage + + For the standard update mechanism of the web interface, the dataSet to return the data for is None + + :param dataSet: Dataset for which the data should be returned (standard: None) + :return: dict with the data needed to update the web page. + """ + if dataSet is None: + # get the new data + self.plugin.get_broker_info() + data = dict() + data['broker_info'] = self.plugin._broker + data['broker_uptime'] = self.plugin.broker_uptime() + + data['item_values'] = {} + for item in self.plugin.tasmota_items: + data['item_values'][item.id()] = {} + data['item_values'][item.id()]['value'] = item.property.value + data['item_values'][item.id()]['last_update'] = item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') + data['item_values'][item.id()]['last_change'] = item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') + + data['device_values'] = {} + for device in self.plugin.tasmota_devices: + data['device_values'][device] = {} + data['device_values'][device]['online'] = self.plugin.tasmota_devices[device].get('online', '-') + data['device_values'][device]['uptime'] = self.plugin.tasmota_devices[device].get('uptime', '-') + data['device_values'][device]['fw_ver'] = self.plugin.tasmota_devices[device].get('fw_ver', '-') + data['device_values'][device]['wifi_signal'] = self.plugin.tasmota_devices[device].get('wifi_signal', '-') + data['device_values'][device]['sensors'] = self.plugin.tasmota_devices[device].get('sensors', '-') + data['device_values'][device]['lights'] = self.plugin.tasmota_devices[device].get('lights', '-') + data['device_values'][device]['rf'] = self.plugin.tasmota_devices[device].get('rf', '-') + + data['tasmota_zigbee_devices'] = self.plugin.tasmota_zigbee_devices + + # return it as json the web page + try: + return json.dumps(data, default=str) + except Exception as e: + self.logger.error("get_data_html exception: {}".format(e)) + return {} + return diff --git a/tasmota/webif/templates/index.html b/tasmota/webif/templates/index.html old mode 100644 new mode 100755 index a8d7a00f8..da031567f --- a/tasmota/webif/templates/index.html +++ b/tasmota/webif/templates/index.html @@ -1,773 +1,773 @@ -{% extends "base_plugin.html" %} -{% set logo_frame = false %} - - -{% set update_interval = [(((10 * item_count) / 1000) | round | int) * 1000, 5000]|max %} - - -{% block pluginstyles %} - -{% endblock pluginstyles %} - - -{% block pluginscripts %} - - -{% endblock pluginscripts %} - -{% block headtable %} - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
{{_('Broker Host')}}{{ p.broker_config.host }}{{_('Broker Port')}}{{ p.broker_config.port }}
{{_('Benutzer')}}{{ p.broker_config.user }}{{_('Passwort')}} - {% if p.broker_config.password %} - {% for letter in p.broker_config.password %}*{% endfor %} - {% endif %} -
{{_('QoS')}}{{ p.broker_config.qos }}{{_('full_topic')}}{{ p.full_topic }}
-{% endblock headtable %} - - -{% block buttons %} -{% endblock %} - - -{% set tabcount = 6 %} - - -{% if p.tasmota_items != [] %} - {% set start_tab = 1 %} -{% endif %} - - -{% if items != [] %} - {% set tab1title = _("" ~ plugin_shortname ~ " Items") %} -{% else %} - {% set tab1title = "hidden" %} -{% endif %} -{% set tab2title = _("" ~ plugin_shortname ~ " Devices") %} -{% set tab3title = _("" ~ plugin_shortname ~ " " ~ _('Details') ~ "") %} -{% set tab4title = _("" ~ plugin_shortname ~ " " ~ _('Zigbee Devices') ~ "") %} -{% set tab5title = _("" ~ " Broker Information") %} -{% if maintenance %} - {% set tab6title = _("" ~ plugin_shortname ~ " " ~ _('Maintenance') ~ "") %} -{% else %} - {% set tab6title = "hidden" %} -{% endif %} - - -{% block bodytab1 %} -
-

Item Information

- - - - - - - - - - - - - - - {% for item in items %} - - - - - - - {% if p.get_iattr_value(item.conf, 'tasmota_relay') is in ['1', '2', '3', '4', '5', '6', '7', '8'] %} - - {% elif p.get_iattr_value(item.conf, 'tasmota_attr') == 'relay' %} - - {% else %} - - {% endif %} - - - - {% endfor %} - -
{{ _('Item') }}{{ _('Typ') }}{{ _('Wert') }}{{ _('Tasmota Topic') }}{{ _('Relais') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
{{ item._path }}{{ item._type }}{{ item() }}{{ p.get_iattr_value(item.conf, 'tasmota_topic') }}{{ p.get_iattr_value(item.conf, 'tasmota_relay') }}1-{{ item.last_update().strftime('%d.%m.%Y %H:%M:%S') }}{{ item.last_change().strftime('%d.%m.%Y %H:%M:%S') }}
-
-{% endblock %} - - -{% block bodytab2 %} -
-

Device Information

- - - - - - - - - - - - - - - - - - - {% for device in p.tasmota_devices %} - {% if 'fw_ver' in p.tasmota_devices[device] %} - - - - - - - - - - - - {% if p.tasmota_devices[device]['wifi_signal'] %} - - {% else %} - - {% endif %} - - - {% endif %} - {% endfor %} - -
{{ _('Tasmota Topic') }}{{ _('Online') }}{{ _('Friendy Name') }}{{ _('Mac Adresse') }}{{ _('IP Adresse') }}{{ _('Uptime') }}{{ _('Sensor Type') }}{{ _('Firmware') }}{{ _('Module') }}{{ _('Wifi') }}{{ _('Details') }}
{{ device }}{{ p.tasmota_devices[device].online }}{{ p.tasmota_devices[device].friendly_name }}{{ p.tasmota_devices[device].mac }}{{ p.tasmota_devices[device].ip }}{{ p.tasmota_devices[device].uptime }} - {% if p.tasmota_devices[device]['sensors'] != {} %} - {% for key in p.tasmota_devices[device]['sensors'] %} - {{ key }} - {%if not loop.last%}, {%endif%} - {% endfor %} - {% else %} - - - {% endif %} - {{ p.tasmota_devices[device].fw_ver }}{{ p.tasmota_devices[device].module }}{{ p.tasmota_devices[device].wifi_signal }} dBm - - {% for entry in p.tasmota_devices[device]['discovery_config'] %} - - - - - {% endfor %} -
{{ entry }}:{{ p.tasmota_devices[device]['discovery_config'][entry] }}
-
- -
-{% endblock %} - - -{% block bodytab3 %} -
-{% if p.has_energy_sensor %} -

ENERGY SENSORS

- - - - - - - - - - - - - - - - - {% for device in p.tasmota_devices %} - {% if p.tasmota_devices[device]['sensors']['ENERGY'] %} - - - - - - - - - - - - - {% endif %} - {% endfor %} - -
{{ _('Tasmota Topic') }}{{ _('Spannung') }}{{ _('Strom') }}{{ _('Leistung') }}{{ _('Heute') }}{{ _('Gestern') }}{{ _('Gesamt') }}{{ _('Gesamt - Startzeit') }}
{{ device }}{{ p.tasmota_devices[device]['sensors']['ENERGY']['voltage'] }}V.{{ p.tasmota_devices[device]['sensors']['ENERGY']['current'] }}A.{{ p.tasmota_devices[device]['sensors']['ENERGY']['power'] }}W{{ p.tasmota_devices[device]['sensors']['ENERGY']['today'] }}kWh{{ p.tasmota_devices[device]['sensors']['ENERGY']['yesterday'] }}kWh{{ p.tasmota_devices[device]['sensors']['ENERGY']['total'] }}kWh{{ p.tasmota_devices[device]['sensors']['ENERGY']['total_starttime'] }}
-
-
-{% endif %} - -{% if p.has_env_sensor %} -

ENVIRONMENTAL SENSORS

- - - - - - - - - - - - - {% if p.has_ds18b20_sensor %} - {% for device in p.tasmota_devices %} - {% if p.tasmota_devices[device]['sensors'] %} - {% if p.tasmota_devices[device]['sensors']['DS18B20'] %} - - - - - - - - - {% endif %} - {% endif %} - {% endfor %} - {% endif %} - {% if p.has_am2301_sensor or p.has_sht3x_sensor%} - {% for device in p.tasmota_devices %} - {% if p.tasmota_devices[device]['sensors'] %} - {% if p.tasmota_devices[device]['sensors']['AM2301'] %} - - - - - - - - - {% endif %} - {% if p.tasmota_devices[device]['sensors']['SHT3X'] %} - - - - - - - - - {% endif %} - {% endif %} - {% endfor %} - {% endif %} - -
{{ _('Tasmota Topic') }}{{ _('Temperatur') }}{{ _('Luftfeuchtigkeit') }}{{ _('Taupunkt') }}{{ _('1w-ID') }}
{{ device }}{{ p.tasmota_devices[device]['sensors']['DS18B20'].temperature }}°C.--{{ p.tasmota_devices[device]['sensors']['DS18B20'].id }}
{{ device }}{{ p.tasmota_devices[device]['sensors']['AM2301'].temperature }}°C.{{ p.tasmota_devices[device]['sensors']['AM2301'].humidity }}%rH.{{ p.tasmota_devices[device]['sensors']['AM2301'].dewpoint }}°C.-
{{ device }}{{ p.tasmota_devices[device]['sensors']['SHT3X'].temperature }}°C.{{ p.tasmota_devices[device]['sensors']['SHT3X'].humidity }}%rH.{{ p.tasmota_devices[device]['sensors']['SHT3X'].dewpoint }}°C.-
-
-
-{% endif %} - -{% if p.has_other_sensor %} -

OTHER SENSORS

- - - - - - - - - - {% for device in p.tasmota_devices %} - {% for sensor in p.tasmota_devices[device]['sensors'] %} - {% if sensor not in p.SENSORS %} - - - - - - {% endif %} - {% endfor %} - {% endfor %} - -
{{ _('Sensor') }}{{ _('Sensor Details') }}
{{ sensor }}{{ p.tasmota_devices[device]['sensors'][sensor] }}
-{% endif %} - -{% if p.has_lights %} -

LIGHTS

- - - - - - - - - - - - - - - - - {% if p.has_lights %} - {% for device in p.tasmota_devices %} - {% if p.tasmota_devices[device]['lights'] %} - - - - - - - - - - - - - {% endif %} - {% endfor %} - {% endif %} - -
{{ _('Tasmota Topic') }}{{ _('HSB') }}{{ _('Dimmer') }}{{ _('Color') }}{{ _('CT') }}{{ _('Scheme') }}{{ _('Fade') }}{{ _('Speed') }}{{ _('LED-Table') }}
{{ device }}{{ p.tasmota_devices[device]['lights'].hsb }}.{{ p.tasmota_devices[device]['lights'].dimmer }}.{{ p.tasmota_devices[device]['lights'].color }}.{{ p.tasmota_devices[device]['lights'].ct }}.{{ p.tasmota_devices[device]['lights'].scheme }}.{{ p.tasmota_devices[device]['lights'].fade }}.{{ p.tasmota_devices[device]['lights'].speed }}.{{ p.tasmota_devices[device]['lights'].ledtable }}.
- -
-
-{% endif %} - -{% if p.has_rf %} -

RF

- - - - - - - - - - - - {% if p.has_rf %} - {% for device in p.tasmota_devices %} - {% if p.tasmota_devices[device]['rf'] %} - - - - - - - - {% endif %} - {% endfor %} - {% endif %} - -
{{ _('Tasmota Topic') }}{{ _('RF-Received') }}{{ _('RF-Send Result') }}{{ _('RF-Key Result') }}
{{ device }}{{ p.tasmota_devices[device]['rf'].rf_received }}{{ p.tasmota_devices[device]['rf'].rf_send_result }}{{ p.tasmota_devices[device]['rf'].rfkey_result }}
- -
-
-{% endif %} -
-{% endblock %} - - -{% block bodytab4 %} -
-

Zigbee Information

- - - - - - - - - - - - - - - - {% for device in p.tasmota_zigbee_devices %} - - - - - - - - - - - - {% endfor %} - -
{{ _('Device ID') }}{{ _('IEEEAddr') }}{{ _('Hersteller') }}{{ _('ModelId') }}{{ _('LinkQuality') }}{{ _('Battery %') }}{{ _('LastSeen') }}{{ _('Data') }}
{{ device }}{{ p.tasmota_zigbee_devices[device]['ieeeaddr'] }}{{ p.tasmota_zigbee_devices[device]['manufacturer'] }}{{ p.tasmota_zigbee_devices[device]['modelid'] }}{{ p.tasmota_zigbee_devices[device]['linkquality'] }}{{ p.tasmota_zigbee_devices[device]['batterypercentage'] }}{{ p.tasmota_zigbee_devices[device]['lastseenepoch'] }}{{ p.tasmota_zigbee_devices[device] }}
-
-{% endblock %} - - -{% block bodytab5 %} -
-

Broker Information

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - {% if p.broker_monitoring %} - - - - - {% endif %} - - - -
{{ _('Merkmal') }}{{ _('Wert') }}
{{ 'Broker Version' }}{{ p._broker.version }}
{{ 'Active Clients' }}{{ p._broker.active_clients }}
{{ 'Subscriptions' }}{{ p._broker.subscriptions }}
{{ 'Messages stored' }}{{ p._broker.stored_messages }}
{{ 'Retained Messages' }}{{ p._broker.retained_messages }}
{{ _('Laufzeit') }}{{ p.broker_uptime() }}
- {% if p.broker_monitoring %} -
-
-

Broker Monitor

- - - - - - - - - - - - - - - - - - - - - -
{{ _('Message Durchsatz') }}{{ _('letzte Minute') }}{{ _('letzte 5 Min.') }}{{ _('letzte 15 Min.') }}
{{ _('Durchschnittlich Messages je Minute empfangen') }}     {{ p._broker.msg_rcv_1min }}     {{ p._broker.msg_rcv_5min }}     {{ p._broker.msg_rcv_15min }}
{{ _('Durchschnittlich Messages je Minute gesendet') }}     {{ p._broker.msg_snt_1min }}     {{ p._broker.msg_snt_5min }}     {{ p._broker.msg_snt_15min }}
-{% endif %} -
-{% endblock %} - - -{% block bodytab6 %} - - -
- - - - - - - - - {% for device in p.tasmota_devices %} - - - - - {% endfor %} - - - - - - - - - - -
{{ _('Tasmota Device') }}{{ _('Tasmota Device Details') }}
{{ device }}{{ p.tasmota_devices[device] }}
{{ 'DEVICE_DICT_1' }}{{ p.DEVICE_DICT_1 }}
{{ 'DEVICE_DICT_2' }}{{ p.DEVICE_DICT_2 }}
-
- -
- - - - - - - - - {% for device in p.tasmota_zigbee_devices %} - - - - - {% endfor %} - -
{{ _('Zigbee Device') }}{{ _('Zigbee Device Details') }}
{{ device }}{{ p.tasmota_zigbee_devices[device] }}
-
-{% endblock %} - - - +{% extends "base_plugin.html" %} +{% set logo_frame = false %} + + +{% set update_interval = [(((10 * item_count) / 1000) | round | int) * 1000, 5000]|max %} + + +{% block pluginstyles %} + +{% endblock pluginstyles %} + + +{% block pluginscripts %} + + +{% endblock pluginscripts %} + +{% block headtable %} + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
{{_('Broker Host')}}{{ p.broker_config.host }}{{_('Broker Port')}}{{ p.broker_config.port }}
{{_('Benutzer')}}{{ p.broker_config.user }}{{_('Passwort')}} + {% if p.broker_config.password %} + {% for letter in p.broker_config.password %}*{% endfor %} + {% endif %} +
{{_('QoS')}}{{ p.broker_config.qos }}{{_('full_topic')}}{{ p.full_topic }}
+{% endblock headtable %} + + +{% block buttons %} +{% endblock %} + + +{% set tabcount = 6 %} + + +{% if p.tasmota_items != [] %} + {% set start_tab = 1 %} +{% endif %} + + +{% if items != [] %} + {% set tab1title = _("" ~ plugin_shortname ~ " Items") %} +{% else %} + {% set tab1title = "hidden" %} +{% endif %} +{% set tab2title = _("" ~ plugin_shortname ~ " Devices") %} +{% set tab3title = _("" ~ plugin_shortname ~ " " ~ _('Details') ~ "") %} +{% set tab4title = _("" ~ plugin_shortname ~ " " ~ _('Zigbee Devices') ~ "") %} +{% set tab5title = _("" ~ " Broker Information") %} +{% if maintenance %} + {% set tab6title = _("" ~ plugin_shortname ~ " " ~ _('Maintenance') ~ "") %} +{% else %} + {% set tab6title = "hidden" %} +{% endif %} + + +{% block bodytab1 %} +
+

Item Information

+ + + + + + + + + + + + + + + {% for item in items %} + + + + + + + {% if p.get_iattr_value(item.conf, 'tasmota_relay') is in ['1', '2', '3', '4', '5', '6', '7', '8'] %} + + {% elif p.get_iattr_value(item.conf, 'tasmota_attr') == 'relay' %} + + {% else %} + + {% endif %} + + + + {% endfor %} + +
{{ _('Item') }}{{ _('Typ') }}{{ _('Wert') }}{{ _('Tasmota Topic') }}{{ _('Relais') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
{{ item._path }}{{ item._type }}{{ item() }}{{ p.get_iattr_value(item.conf, 'tasmota_topic') }}{{ p.get_iattr_value(item.conf, 'tasmota_relay') }}1-{{ item.last_update().strftime('%d.%m.%Y %H:%M:%S') }}{{ item.last_change().strftime('%d.%m.%Y %H:%M:%S') }}
+
+{% endblock %} + + +{% block bodytab2 %} +
+

Device Information

+ + + + + + + + + + + + + + + + + + + {% for device in p.tasmota_devices %} + {% if 'fw_ver' in p.tasmota_devices[device] %} + + + + + + + + + + + + {% if p.tasmota_devices[device]['wifi_signal'] %} + + {% else %} + + {% endif %} + + + {% endif %} + {% endfor %} + +
{{ _('Tasmota Topic') }}{{ _('Online') }}{{ _('Friendy Name') }}{{ _('Mac Adresse') }}{{ _('IP Adresse') }}{{ _('Uptime') }}{{ _('Sensor Type') }}{{ _('Firmware') }}{{ _('Module') }}{{ _('Wifi') }}{{ _('Details') }}
{{ device }}{{ p.tasmota_devices[device].online }}{{ p.tasmota_devices[device].friendly_name }}{{ p.tasmota_devices[device].mac }}{{ p.tasmota_devices[device].ip }}{{ p.tasmota_devices[device].uptime }} + {% if p.tasmota_devices[device]['sensors'] != {} %} + {% for key in p.tasmota_devices[device]['sensors'] %} + {{ key }} + {%if not loop.last%}, {%endif%} + {% endfor %} + {% else %} + - + {% endif %} + {{ p.tasmota_devices[device].fw_ver }}{{ p.tasmota_devices[device].module }}{{ p.tasmota_devices[device].wifi_signal }} dBm + + {% for entry in p.tasmota_devices[device]['discovery_config'] %} + + + + + {% endfor %} +
{{ entry }}:{{ p.tasmota_devices[device]['discovery_config'][entry] }}
+
+ +
+{% endblock %} + + +{% block bodytab3 %} +
+{% if p.has_energy_sensor %} +

ENERGY SENSORS

+ + + + + + + + + + + + + + + + + {% for device in p.tasmota_devices %} + {% if p.tasmota_devices[device]['sensors']['ENERGY'] %} + + + + + + + + + + + + + {% endif %} + {% endfor %} + +
{{ _('Tasmota Topic') }}{{ _('Spannung') }}{{ _('Strom') }}{{ _('Leistung') }}{{ _('Heute') }}{{ _('Gestern') }}{{ _('Gesamt') }}{{ _('Gesamt - Startzeit') }}
{{ device }}{{ p.tasmota_devices[device]['sensors']['ENERGY']['voltage'] }}V.{{ p.tasmota_devices[device]['sensors']['ENERGY']['current'] }}A.{{ p.tasmota_devices[device]['sensors']['ENERGY']['power'] }}W{{ p.tasmota_devices[device]['sensors']['ENERGY']['today'] }}kWh{{ p.tasmota_devices[device]['sensors']['ENERGY']['yesterday'] }}kWh{{ p.tasmota_devices[device]['sensors']['ENERGY']['total'] }}kWh{{ p.tasmota_devices[device]['sensors']['ENERGY']['total_starttime'] }}
+
+
+{% endif %} + +{% if p.has_env_sensor %} +

ENVIRONMENTAL SENSORS

+ + + + + + + + + + + + + {% if p.has_ds18b20_sensor %} + {% for device in p.tasmota_devices %} + {% if p.tasmota_devices[device]['sensors'] %} + {% if p.tasmota_devices[device]['sensors']['DS18B20'] %} + + + + + + + + + {% endif %} + {% endif %} + {% endfor %} + {% endif %} + {% if p.has_am2301_sensor or p.has_sht3x_sensor%} + {% for device in p.tasmota_devices %} + {% if p.tasmota_devices[device]['sensors'] %} + {% if p.tasmota_devices[device]['sensors']['AM2301'] %} + + + + + + + + + {% endif %} + {% if p.tasmota_devices[device]['sensors']['SHT3X'] %} + + + + + + + + + {% endif %} + {% endif %} + {% endfor %} + {% endif %} + +
{{ _('Tasmota Topic') }}{{ _('Temperatur') }}{{ _('Luftfeuchtigkeit') }}{{ _('Taupunkt') }}{{ _('1w-ID') }}
{{ device }}{{ p.tasmota_devices[device]['sensors']['DS18B20'].temperature }}°C.--{{ p.tasmota_devices[device]['sensors']['DS18B20'].id }}
{{ device }}{{ p.tasmota_devices[device]['sensors']['AM2301'].temperature }}°C.{{ p.tasmota_devices[device]['sensors']['AM2301'].humidity }}%rH.{{ p.tasmota_devices[device]['sensors']['AM2301'].dewpoint }}°C.-
{{ device }}{{ p.tasmota_devices[device]['sensors']['SHT3X'].temperature }}°C.{{ p.tasmota_devices[device]['sensors']['SHT3X'].humidity }}%rH.{{ p.tasmota_devices[device]['sensors']['SHT3X'].dewpoint }}°C.-
+
+
+{% endif %} + +{% if p.has_other_sensor %} +

OTHER SENSORS

+ + + + + + + + + + {% for device in p.tasmota_devices %} + {% for sensor in p.tasmota_devices[device]['sensors'] %} + {% if sensor not in p.SENSORS %} + + + + + + {% endif %} + {% endfor %} + {% endfor %} + +
{{ _('Sensor') }}{{ _('Sensor Details') }}
{{ sensor }}{{ p.tasmota_devices[device]['sensors'][sensor] }}
+{% endif %} + +{% if p.has_lights %} +

LIGHTS

+ + + + + + + + + + + + + + + + + {% if p.has_lights %} + {% for device in p.tasmota_devices %} + {% if p.tasmota_devices[device]['lights'] %} + + + + + + + + + + + + + {% endif %} + {% endfor %} + {% endif %} + +
{{ _('Tasmota Topic') }}{{ _('HSB') }}{{ _('Dimmer') }}{{ _('Color') }}{{ _('CT') }}{{ _('Scheme') }}{{ _('Fade') }}{{ _('Speed') }}{{ _('LED-Table') }}
{{ device }}{{ p.tasmota_devices[device]['lights'].hsb }}.{{ p.tasmota_devices[device]['lights'].dimmer }}.{{ p.tasmota_devices[device]['lights'].color }}.{{ p.tasmota_devices[device]['lights'].ct }}.{{ p.tasmota_devices[device]['lights'].scheme }}.{{ p.tasmota_devices[device]['lights'].fade }}.{{ p.tasmota_devices[device]['lights'].speed }}.{{ p.tasmota_devices[device]['lights'].ledtable }}.
+ +
+
+{% endif %} + +{% if p.has_rf %} +

RF

+ + + + + + + + + + + + {% if p.has_rf %} + {% for device in p.tasmota_devices %} + {% if p.tasmota_devices[device]['rf'] %} + + + + + + + + {% endif %} + {% endfor %} + {% endif %} + +
{{ _('Tasmota Topic') }}{{ _('RF-Received') }}{{ _('RF-Send Result') }}{{ _('RF-Key Result') }}
{{ device }}{{ p.tasmota_devices[device]['rf'].rf_received }}{{ p.tasmota_devices[device]['rf'].rf_send_result }}{{ p.tasmota_devices[device]['rf'].rfkey_result }}
+ +
+
+{% endif %} +
+{% endblock %} + + +{% block bodytab4 %} +
+

Zigbee Information

+ + + + + + + + + + + + + + + + {% for device in p.tasmota_zigbee_devices %} + + + + + + + + + + + + {% endfor %} + +
{{ _('Device ID') }}{{ _('IEEEAddr') }}{{ _('Hersteller') }}{{ _('ModelId') }}{{ _('LinkQuality') }}{{ _('Battery %') }}{{ _('LastSeen') }}{{ _('Data') }}
{{ device }}{{ p.tasmota_zigbee_devices[device]['ieeeaddr'] }}{{ p.tasmota_zigbee_devices[device]['manufacturer'] }}{{ p.tasmota_zigbee_devices[device]['modelid'] }}{{ p.tasmota_zigbee_devices[device]['linkquality'] }}{{ p.tasmota_zigbee_devices[device]['batterypercentage'] }}{{ p.tasmota_zigbee_devices[device]['lastseenepoch'] }}{{ p.tasmota_zigbee_devices[device] }}
+
+{% endblock %} + + +{% block bodytab5 %} +
+

Broker Information

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + {% if p.broker_monitoring %} + + + + + {% endif %} + + + +
{{ _('Merkmal') }}{{ _('Wert') }}
{{ 'Broker Version' }}{{ p._broker.version }}
{{ 'Active Clients' }}{{ p._broker.active_clients }}
{{ 'Subscriptions' }}{{ p._broker.subscriptions }}
{{ 'Messages stored' }}{{ p._broker.stored_messages }}
{{ 'Retained Messages' }}{{ p._broker.retained_messages }}
{{ _('Laufzeit') }}{{ p.broker_uptime() }}
+ {% if p.broker_monitoring %} +
+
+

Broker Monitor

+ + + + + + + + + + + + + + + + + + + + + +
{{ _('Message Durchsatz') }}{{ _('letzte Minute') }}{{ _('letzte 5 Min.') }}{{ _('letzte 15 Min.') }}
{{ _('Durchschnittlich Messages je Minute empfangen') }}     {{ p._broker.msg_rcv_1min }}     {{ p._broker.msg_rcv_5min }}     {{ p._broker.msg_rcv_15min }}
{{ _('Durchschnittlich Messages je Minute gesendet') }}     {{ p._broker.msg_snt_1min }}     {{ p._broker.msg_snt_5min }}     {{ p._broker.msg_snt_15min }}
+{% endif %} +
+{% endblock %} + + +{% block bodytab6 %} + + +
+ + + + + + + + + {% for device in p.tasmota_devices %} + + + + + {% endfor %} + + + + + + + + + + +
{{ _('Tasmota Device') }}{{ _('Tasmota Device Details') }}
{{ device }}{{ p.tasmota_devices[device] }}
{{ 'DEVICE_DICT_1' }}{{ p.DEVICE_DICT_1 }}
{{ 'DEVICE_DICT_2' }}{{ p.DEVICE_DICT_2 }}
+
+ +
+ + + + + + + + + {% for device in p.tasmota_zigbee_devices %} + + + + + {% endfor %} + +
{{ _('Zigbee Device') }}{{ _('Zigbee Device Details') }}
{{ device }}{{ p.tasmota_zigbee_devices[device] }}
+
+{% endblock %} + + + From da9e9f384f175d322bc2b0d736997aea9cc04c60 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sat, 29 Apr 2023 23:55:07 +0200 Subject: [PATCH 076/775] AppleTV Plugin: use separate webif/init for web interface integration --- appletv/__init__.py | 152 -------------------------------------- appletv/webif/__init__.py | 143 +++++++++++++++++++++++++++++++++++ 2 files changed, 143 insertions(+), 152 deletions(-) create mode 100755 appletv/webif/__init__.py diff --git a/appletv/__init__.py b/appletv/__init__.py index 16e2673df..21b6e3c5e 100755 --- a/appletv/__init__.py +++ b/appletv/__init__.py @@ -426,155 +426,3 @@ def pause(self): def play(self): self.logger.warning('Playing, sending command !!') self._loop.create_task(self.execute_rc('rc_play')) - -# ------------------------------------------ -# Webinterface of the plugin -# ------------------------------------------ - - def init_webinterface(self): - """" - Initialize the web interface for this plugin - - This method is only needed if the plugin is implementing a web interface - """ - try: - # try/except to handle running in a core version that does not support modules - self.mod_http = Modules.get_instance().get_module('http') - except: - self.mod_http = None - if self.mod_http == None: - self.logger.error( - "Plugin '{}': Not initializing the web interface".format(self.get_shortname())) - return False - - import sys - if not "SmartPluginWebIf" in list(sys.modules['lib.model.smartplugin'].__dict__): - self.logger.warning( - "Plugin '{}': Web interface needs SmartHomeNG v1.5 and up. Not initializing the web interface".format(self.get_shortname())) - return False - - # set application configuration for cherrypy - webif_dir = self.path_join(self.get_plugin_dir(), 'webif') - config = { - '/': { - 'tools.staticdir.root': webif_dir, - }, - '/static': { - 'tools.staticdir.on': True, - 'tools.staticdir.dir': 'static' - } - } - - # Register the web interface as a cherrypy app - self.mod_http.register_webif(WebInterface(webif_dir, self), - self.get_shortname(), - config, - self.get_classname(), self.get_instance_name(), - description='') - - return True - -class WebInterface(SmartPluginWebIf): - - def __init__(self, webif_dir, plugin): - """ - Initialization of instance of class WebInterface - - :param webif_dir: directory where the webinterface of the plugin resides - :param plugin: instance of the plugin - :type webif_dir: str - :type plugin: object - """ - self.logger = logging.getLogger(__name__) - self.webif_dir = webif_dir - self.plugin = plugin - self.items = Items.get_instance() - self.tplenv = self.init_template_environment() - self.pinentry = False - - @cherrypy.expose - def index(self, reload=None): - """ - Build index.html for cherrypy - - Render the template and return the html file to be delivered to the browser - - :return: contents of the template after beeing rendered - """ - # get list of items with the attribute knx_dpt - plgitems = [] - _instance = self.plugin.get_instance_name() - if _instance: - _keyword = 'appletv@{}'.format(_instance) - else: - _keyword = 'appletv' - for item in self.items.return_items(): - if _keyword in item.conf: - plgitems.append(item) - tmpl = self.tplenv.get_template('index.html') - return tmpl.render(p=self.plugin, items=sorted(plgitems, key=lambda k: str.lower(k['_path'])), pinentry=self.pinentry) - - @cherrypy.expose - def get_data_html(self, dataSet=None): - """ - Return data to update the webpage - - For the standard update mechanism of the web interface, the dataSet to return the data for is None - - :param dataSet: Dataset for which the data should be returned (standard: None) - :return: dict with the data needed to update the web page. - """ - if dataSet is None: - data = {} - data['state'] = self.plugin._state - # return it as json the the web page - try: - return json.dumps(data) - except Exception as e: - self.logger.error("get_data_html exception: {}".format(e)) - #self.logger.debug(data) - return {} - - @cherrypy.expose - def button_pressed(self, button=None, pin=None): - if button == "discover": - self.logger.debug('Discover button pressed') - self.plugin._loop.create_task(self.plugin.discover()) - elif button == "start_authorization": - self.logger.debug('Start authentication') - self.pinentry = True - - _protocol = self.plugin._atv.main_service().protocol - _task = self.plugin._loop.create_task( - pyatv.pair(self.plugin._atv, _protocol, self.plugin._loop) - ) - while not _task.done(): - sleep(0.1) - self._pairing = _task.result() - if self._pairing.device_provides_pin: - self._pin = None - self.logger.info('Device provides pin') - else: - self._pin = randint(1111,9999) - self.logger.info('SHNG must provide pin: {}'.format(self._pin)) - - self.plugin._loop.create_task(self._pairing.begin()) - - elif button == "finish_authorization": - self.logger.debug('Finish authentication') - self.pinentry = False - self._pairing.pin(pin) - _task = self.plugin._loop.create_task(self._pairing.finish()) - while not _task.done(): - sleep(0.1) - if self._pairing.has_paired: - self.logger.info('Pairing successfull !') - self.plugin._credentials = self._pairing.service.credentials - self.plugin.save_credentials() - else: - self.logger.error('Unable to pair, wrong Pin ?') - self.plugin._loop.create_task(self._pairing.close()) - else: - self.logger.warning( - "Unknown button pressed in webif: {}".format(button)) - raise cherrypy.HTTPRedirect('index') diff --git a/appletv/webif/__init__.py b/appletv/webif/__init__.py new file mode 100755 index 000000000..dd44b0433 --- /dev/null +++ b/appletv/webif/__init__.py @@ -0,0 +1,143 @@ +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +######################################################################### +# Copyright 2018- Serge Wagener serge@wagener.family +######################################################################### +# This file is part of SmartHomeNG. +# +# AppleTV plugin +# +# SmartHomeNG is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SmartHomeNG is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SmartHomeNG. If not, see . +# +######################################################################### + +import json + +from lib.item import Items +from lib.model.smartplugin import SmartPluginWebIf + + +# ------------------------------------------ +# Webinterface of the plugin +# ------------------------------------------ + +import cherrypy +import csv +from jinja2 import Environment, FileSystemLoader + + +class WebInterface(SmartPluginWebIf): + + def __init__(self, webif_dir, plugin): + """ + Initialization of instance of class WebInterface + + :param webif_dir: directory where the webinterface of the plugin resides + :param plugin: instance of the plugin + :type webif_dir: str + :type plugin: object + """ + self.logger = plugin.logger + self.webif_dir = webif_dir + self.plugin = plugin + self.items = Items.get_instance() + self.tplenv = self.init_template_environment() + self.pinentry = False + + @cherrypy.expose + def index(self, reload=None): + """ + Build index.html for cherrypy + + Render the template and return the html file to be delivered to the browser + + :return: contents of the template after beeing rendered + """ + # get list of items with the attribute knx_dpt + plgitems = [] + _instance = self.plugin.get_instance_name() + if _instance: + _keyword = 'appletv@{}'.format(_instance) + else: + _keyword = 'appletv' + for item in self.items.return_items(): + if _keyword in item.conf: + plgitems.append(item) + tmpl = self.tplenv.get_template('index.html') + return tmpl.render(p=self.plugin, items=sorted(plgitems, key=lambda k: str.lower(k['_path'])), pinentry=self.pinentry) + + @cherrypy.expose + def get_data_html(self, dataSet=None): + """ + Return data to update the webpage + + For the standard update mechanism of the web interface, the dataSet to return the data for is None + + :param dataSet: Dataset for which the data should be returned (standard: None) + :return: dict with the data needed to update the web page. + """ + if dataSet is None: + data = {} + data['state'] = self.plugin._state + # return it as json the the web page + try: + return json.dumps(data) + except Exception as e: + self.logger.error("get_data_html exception: {}".format(e)) + #self.logger.debug(data) + return {} + + @cherrypy.expose + def button_pressed(self, button=None, pin=None): + if button == "discover": + self.logger.debug('Discover button pressed') + self.plugin._loop.create_task(self.plugin.discover()) + elif button == "start_authorization": + self.logger.debug('Start authentication') + self.pinentry = True + + _protocol = self.plugin._atv.main_service().protocol + _task = self.plugin._loop.create_task( + pyatv.pair(self.plugin._atv, _protocol, self.plugin._loop) + ) + while not _task.done(): + sleep(0.1) + self._pairing = _task.result() + if self._pairing.device_provides_pin: + self._pin = None + self.logger.info('Device provides pin') + else: + self._pin = randint(1111,9999) + self.logger.info('SHNG must provide pin: {}'.format(self._pin)) + + self.plugin._loop.create_task(self._pairing.begin()) + + elif button == "finish_authorization": + self.logger.debug('Finish authentication') + self.pinentry = False + self._pairing.pin(pin) + _task = self.plugin._loop.create_task(self._pairing.finish()) + while not _task.done(): + sleep(0.1) + if self._pairing.has_paired: + self.logger.info('Pairing successfull !') + self.plugin._credentials = self._pairing.service.credentials + self.plugin.save_credentials() + else: + self.logger.error('Unable to pair, wrong Pin ?') + self.plugin._loop.create_task(self._pairing.close()) + else: + self.logger.warning( + "Unknown button pressed in webif: {}".format(button)) + raise cherrypy.HTTPRedirect('index') From c640d5a229c77545ba5a49f4795592d4f83df09e Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sat, 29 Apr 2023 23:55:44 +0200 Subject: [PATCH 077/775] AppleTV plugin: integrate external webif file --- appletv/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/appletv/__init__.py b/appletv/__init__.py index 21b6e3c5e..de026aabb 100755 --- a/appletv/__init__.py +++ b/appletv/__init__.py @@ -28,6 +28,7 @@ from lib.module import Modules from lib.model.smartplugin import * from lib.item import Items +from .webif import WebInterface import asyncio import datetime @@ -89,7 +90,7 @@ def __init__(self, sh): self._atv_pwc = None - self.init_webinterface() + self.init_webinterface(WebInterface) return def run(self): From b9303c7aa87a6bb7e73695b59794a61dbbd27024 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sat, 29 Apr 2023 23:56:24 +0200 Subject: [PATCH 078/775] AppleTV plugin: introduce some error handling if no "real" AppleTV is found --- appletv/__init__.py | 65 ++++++++++++++++++++++++++++----------------- 1 file changed, 40 insertions(+), 25 deletions(-) diff --git a/appletv/__init__.py b/appletv/__init__.py index de026aabb..19e7801c4 100755 --- a/appletv/__init__.py +++ b/appletv/__init__.py @@ -5,8 +5,7 @@ ######################################################################### # This file is part of SmartHomeNG. # -# Sample plugin for new plugins to run with SmartHomeNG version 1.4 and -# upwards. +# AppleTV plugin # # SmartHomeNG is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -51,7 +50,7 @@ class AppleTV(SmartPlugin): the update functions for the items """ - PLUGIN_VERSION = '1.6.1' + PLUGIN_VERSION = '1.6.2' def __init__(self, sh): """ @@ -108,11 +107,14 @@ def stop(self): """ self.logger.debug( "Plugin '{}': stop method called".format(self.get_fullname())) - self._loop.stop() - while self._loop.is_running(): - pass - self._loop.run_until_complete(self.disconnect()) - self._loop.close() + try: + self._loop.stop() + while self._loop.is_running(): + pass + self._loop.run_until_complete(self.disconnect()) + self._loop.close() + except Exception as e: + self.logger.warning(f"Issues stopping AppleTV plugin: {e}") self.alive = False def parse_item(self, item): @@ -224,7 +226,7 @@ def load_credentials(self): async def discover(self): """ - Discovers Apple TV's on local mdns domain + Discovers Apple TV's on local mdns domain """ self.logger.debug("Discovering Apple TV's in your network for {} seconds...".format( int(self._atv_scan_timeout))) @@ -243,7 +245,7 @@ async def discover(self): async def connect(self): """ - Connects to this instance's Apple TV + Connects to this instance's Apple TV """ if not self._atv: if len(self._atvs) > 0: @@ -260,7 +262,7 @@ async def connect(self): self._update_items('mac', self._atv.device_info.mac) if self._atv.device_info.model: self._update_items('model', str(self._atv.device_info.model).replace('DeviceModel.','')) - if self._atv.device_info.operating_system.TvOS: + if self._atv.device_info.operating_system.TvOS and self._atv.device_info.version is not None: self._update_items('os', 'TvOS ' + self._atv.device_info.version) else: self._update_items('os', self._atv.device_info.version) @@ -269,10 +271,13 @@ async def connect(self): self._device = await pyatv.connect(self._atv, self._loop) self._atv_rc = self._device.remote_control self._atv_pwc = self._device.power - if self._atv_pwc.power_state == pyatv.const.PowerState.On: - self._update_items('power', True) - else: - self._update_items('power', False) + try: + if self._atv_pwc.power_state == pyatv.const.PowerState.On: + self._update_items('power', True) + else: + self._update_items('power', False) + except Exception as e: + self.logger.error(f"Could not query power state. Error: {e}") self._push_listener_thread = threading.Thread( target=self._push_listener_thread_worker, name='ATV listener') self._push_listener_thread.start() @@ -280,11 +285,14 @@ async def connect(self): async def disconnect(self): """ - Stop listening to push updates and logout of this istances Apple TV + Stop listening to push updates and logout of this istances Apple TV """ self.logger.info("Disconnecting from '{0}'".format(self._atv.name)) - self._device.push_updater.stop() - self._device.close() + try: + self._device.push_updater.stop() + self._device.close() + except Exception as e: + self.logger.info(f"Could not disconnect from AppleTV. Error: {e}") async def update_artwork(self): try: @@ -313,13 +321,13 @@ def _update_position(self, new_position, from_device): self._position = new_position self._position_timestamp = datetime.datetime.now() self._update_items('playing_position', new_position) - if new_position > 0 and self._state['playing_total_time'] > 0: + if new_position > 0 and self._state['playing_total_time'] > 0: self._update_items('playing_position_percent', int(round(new_position / self._state['playing_total_time'] * 100))) else: self._update_items('playing_position_percent', 0) def handle_async_exception(self, loop, context): - self.logger.error('*** ASYNC EXCEPTIONM ***') + self.logger.error('*** ASYNC EXCEPTION ***') self.logger.error('Context: {}'.format(context)) raise @@ -328,7 +336,10 @@ def _push_listener_thread_worker(self): Thread to run asyncio loop. This avoids blocking the main plugin thread """ asyncio.set_event_loop(self._loop) - self._loop.set_exception_handler(self.handle_async_exception) + try: + self._loop.set_exception_handler(self.handle_async_exception) + except Exception as e: + self.logger.error(f"Issue with exception handler: {e}") self._device.push_updater.listener = self self._device.push_updater.start() self._device.power.listener = self @@ -367,6 +378,7 @@ def playstatus_update(self, updater, playstatus): self._update_items('playing_app_identifier', _app.identifier if _app.identifier else '---') except: pass + self._update_items('playing_state', playstatus.device_state.value) self._update_items('playing_state_text', pyatv.convert.device_state_str(playstatus.device_state)) self._update_items('playing_fingerprint', playstatus.hash) @@ -382,10 +394,13 @@ def playstatus_update(self, updater, playstatus): self._update_items('playing_position_percent', round(playstatus.position / playstatus.total_time * 100)) else: self._update_items('playing_position_percent', 0) - self._update_items('playing_repeat', playstatus.repeat.value) - self._update_items('playing_repeat_text', pyatv.convert.repeat_str(playstatus.repeat)) - self._update_items('playing_shuffle', playstatus.shuffle.value) - self._update_items('playing_shuffle_text', pyatv.convert.shuffle_str(playstatus.shuffle)) + try: + self._update_items('playing_repeat', playstatus.repeat.value) + self._update_items('playing_repeat_text', pyatv.convert.repeat_str(playstatus.repeat)) + self._update_items('playing_shuffle', playstatus.shuffle.value) + self._update_items('playing_shuffle_text', pyatv.convert.shuffle_str(playstatus.shuffle)) + except Exception as e: + self.logger.warning(f"Could not query repeat and/or shuffle state. Error: {e}") def playstatus_error(self, updater, exception): """ From 8d3012ba8815dc3b98e85b2af95d35810478cce0 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sat, 29 Apr 2023 23:58:09 +0200 Subject: [PATCH 079/775] AppleTV plugin: introduce item attributes in metadata, fix struct (item should not be named type - renamed to media_type) bump to version 1.6.2 --- appletv/plugin.yaml | 299 ++++++++++++++++++++++++++++++++++---------- 1 file changed, 232 insertions(+), 67 deletions(-) diff --git a/appletv/plugin.yaml b/appletv/plugin.yaml index c7d185ee5..43053f3d0 100755 --- a/appletv/plugin.yaml +++ b/appletv/plugin.yaml @@ -7,13 +7,13 @@ plugin: en: 'Controls an Apple TV' fr: 'Contrôle un Apple TV' maintainer: Foxi352 - tester: Foxi352 + tester: onkelandy state: ready keywords: appletv media # documentation: https://github.com/smarthomeNG/smarthome/wiki/CLI-Plugin # url of documentation (wiki) page support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1223483-plugin-apple-tv - version: 1.6.1 # Plugin version + version: 1.6.2 # Plugin version sh_minversion: 1.6 # minimum shNG version to use this plugin multi_instance: True # plugin supports multi instance restartable: unknown @@ -35,43 +35,208 @@ parameters: en: 'Timeout in seconds to scan the local network for AppleTV devices' fr: "Délai d'attente en secondes du scan réseau pour trouver les appareils AppleTV" -item_attributes: NONE - # Definition of item attributes defined by this plugin +item_attributes: + appletv: + type: str + description: + de: 'ATV Funktion' + en: 'ATV function' + valid_list_description: + de: + - 'AppleTV Name' + - 'AppleTV IP Adresse' + - 'AppleTV MAC' + - 'AppleTV Modell' + - 'AppleTV OS Version' + - 'Einschaltstatus' + - 'Applikationsidentifikation' + - 'Applikationsname' + - 'Abspielstatus als Zahl' + - 'Abspielstatus als Text' + - 'Fingerprint des aktuellen Mediums' + - 'Aktuelles Genre' + - 'Aktueller Künstler' + - 'Aktuelles Album' + - 'Aktueller Titel' + - 'Aktueller Medientyp als Zahl' + - 'Aktueller Medientyp als Text' + - 'Aktuelle Zeitposition' + - 'Aktuelle Gesamtspielzeit' + - 'Aktuelle Abspielposition in Prozent' + - 'Aktueller Wiederholmodus als Zahl' + - 'Aktueller Wiederholmodus als Text' + - 'Aktueller Shuffle Modus als Zahl' + - 'Aktueller Shuffle Modus als Text' + - 'Breite des aktuellen Artworks in Pixel' + - 'Höhe des aktuellen Artworks in Pixel' + - 'Mimetype des aktuellen Artorks' + - 'Tatsächliches aktuelles Artwork' + - 'Topmenü Taste' + - 'Home Taste' + - 'Hold home Taste' + - 'Menü Taste' + - 'Auswählen Taste' + - 'Nächster Titel' + - 'Vorheriger Titel' + - 'Abspielen' + - 'Pause' + - 'Stop' + - 'Umschalten zwischen Play und Pause' + - 'Hinunter Taste' + - 'Hinauf Taste' + - 'Links Taste' + - 'Rechts Taste' + - 'Abspielposition setzen' + - 'Wiederholmodus setzen' + - 'Shuffle Modus setzen' + - 'Rückwärts springen' + - 'Vorwärts springen' + - 'Lautstärke verringern' + - 'Lautstärke erhöhen' + - 'Suspend Modus' + - 'Aufwecken' + en: + - 'AppleTV name' + - 'AppleTV IP address' + - 'AppleTV MAC' + - 'AppleTV Model' + - 'AppleTV OS version' + - 'Power State' + - 'App identifier of playing application' + - 'App name of playing application' + - 'Playing State as number' + - 'Playing State as text' + - 'Fingerprint of currently played media' + - 'Currently playing genre' + - 'Currently playing artist' + - 'Currently playing album' + - 'Currently playing title' + - 'Currently playing media type as number' + - 'Currently playing media type as text' + - 'Current time position' + - 'Current total time' + - 'Current playing position in percent' + - 'Current repeat setting as number' + - 'Current repeat setting as text' + - 'Current shuffle setting as number' + - 'Current shuffle setting as text' + - 'Width in pixels of current art work' + - 'Height in pixels of current art work' + - 'Mimetype of current art work' + - 'Actual playing artwork' + - 'Top menu key' + - 'Home key' + - 'Hold home key' + - 'Menu key' + - 'Select key' + - 'Next' + - 'Previous' + - 'Play' + - 'Pause' + - 'Stop' + - 'Toggle play/pause' + - 'Down key' + - 'Up key' + - 'Left key' + - 'Right key' + - 'Set play position' + - 'Set repeat mode' + - 'Set shuffle mode' + - 'Skip backward' + - 'Skip forward' + - 'Volume down' + - 'Volume up' + - 'Suspend mode' + - 'Wake up' + valid_list: + - 'name' + - 'ip' + - 'mac' + - 'model' + - 'os' + - 'power' + - 'playing_app_identifier' + - 'playing_app_name' + - 'playing_state' + - 'playing_state_text' + - 'playing_fingerprint' + - 'playing_genre' + - 'playing_artist' + - 'playing_album' + - 'playing_title' + - 'playing_type' + - 'playing_type_text' + - 'playing_position' + - 'playing_total_time' + - 'playing_position_percent' + - 'playing_repeat' + - 'playing_repeat_text' + - 'playing_shuffle' + - 'playing_shuffle_text' + - 'artwork_width' + - 'artwork_height' + - 'artwork_mimetype' + - 'artwork_base64' + - 'rc_top_menu' + - 'rc_home' + - 'rc_home_hold' + - 'rc_menu' + - 'rc_select' + - 'rc_next' + - 'rc_previous' + - 'rc_play' + - 'rc_pause' + - 'rc_stop' + - 'rc_play_pause' + - 'rc_down' + - 'rc_up' + - 'rc_left' + - 'rc_right' + - 'rc_set_position' + - 'rc_set_repeat' + - 'rc_set_shuffle' + - 'rc_skip_backward' + - 'rc_skip_forward' + - 'rc_volume_down' + - 'rc_volume_up' + - 'rc_suspend' + - 'rc_wakeup' + item_structs: device: - name: - name: Name of device + remark: + remark: Name of device type: str visu_acl: ro - cache: Yes + cache: True appletv@instance: name ip: - name: IP address of device + remark: IP address of device type: str visu_acl: ro - cache: Yes + cache: True appletv@instance: ip mac: - name: MAC address of device + remark: MAC address of device type: str visu_acl: ro - cache: Yes + cache: True appletv@instance: mac model: - name: MAC address of device + remark: MAC address of device type: str visu_acl: ro - cache: Yes + cache: True appletv@instance: model os: - name: MAC address of device + remark: MAC address of device type: str visu_acl: ro - cache: Yes + cache: True appletv@instance: os power: - name: Power state of the device + remark: Power state of the device type: bool visu_acl: rw appletv@instance: power @@ -80,234 +245,234 @@ item_structs: identifier: type: str visu_acl: ro - cache: Yes + cache: True appletv@instance: playing_app_identifier - name: + remark: type: str visu_acl: ro - cache: Yes + cache: True appletv@instance: playing_app_name state: type: num visu_acl: ro - cache: Yes + cache: True appletv@instance: playing_state state_text: type: str visu_acl: ro - cache: Yes + cache: True appletv@instance: playing_state_text fingerprint: type: str visu_acl: ro - cache: Yes + cache: True appletv@instance: playing_fingerprint genre: type: str visu_acl: ro - cache: Yes + cache: True appletv@instance: playing_genre artist: type: str visu_acl: ro - cache: Yes + cache: True appletv@instance: playing_artist album: type: str visu_acl: ro - cache: Yes + cache: True appletv@instance: playing_album title: type: str visu_acl: ro - cache: Yes + cache: True appletv@instance: playing_title - type: + media_type: type: num visu_acl: ro - cache: Yes + cache: True appletv@instance: playing_type - type_text: + media_type_text: type: str visu_acl: ro - cache: Yes + cache: True appletv@instance: playing_type_text position: type: num visu_acl: ro - cache: Yes + cache: True appletv@instance: playing_position totaltime: type: num visu_acl: ro - cache: Yes + cache: True appletv@instance: playing_total_time position_percent: type: num visu_acl: rw - cache: Yes + cache: True appletv@instance: playing_position_percent repeat: type: num visu_acl: rw - cache: Yes - enfore_updates: Yes + cache: True + enforce_updates: True appletv@instance: playing_repeat repeat_text: type: str visu_acl: ro - cache: Yes + cache: True appletv@instance: playing_repeat_text shuffle: type: num visu_acl: rw - cache: Yes - enfore_updates: Yes + cache: True + enforce_updates: True appletv@instance: playing_shuffle shuffle_text: type: str visu_acl: ro - cache: Yes + cache: True appletv@instance: playing_shuffle_text artwork: width: - name: Width of the currently playing artwork + remark: Width of the currently playing artwork type: num visu_acl: ro appletv@instance: artwork_width height: - name: Height of the currently playing artwork + remark: Height of the currently playing artwork type: num visu_acl: ro appletv@instance: artwork_height mimetype: - name: Mimetype of the currently playing artwork + remark: Mimetype of the currently playing artwork type: str visu_acl: ro appletv@instance: artwork_mimetype base64: - name: The actual playing artwork in base64 + remark: The actual playing artwork in base64 type: foo visu_acl: ro appletv@instance: artwork_base64 commands: top_menu: - name: RC top menu key + remark: RC top menu key type: bool visu_acl: rw appletv@instance: rc_top_menu home: - name: RC kome key + remark: RC kome key type: bool visu_acl: rw appletv@instance: rc_home home_hold: - name: RC hold home key + remark: RC hold home key type: bool visu_acl: rw appletv@instance: rc_home_hold menu: - name: RC menu key + remark: RC menu key type: bool visu_acl: rw appletv@instance: rc_menu select: - name: RC select key + remark: RC select key type: bool visu_acl: rw appletv@instance: rc_select next: - name: RC next key + remark: RC next key type: bool visu_acl: rw appletv@instance: rc_next previous: - name: RC previous key + remark: RC previous key type: bool visu_acl: rw appletv@instance: rc_previous pause: - name: RC pause key + remark: RC pause key type: bool visu_acl: rw appletv@instance: rc_pause play: - name: RC play key + remark: RC play key type: bool visu_acl: rw appletv@instance: rc_play play_pause: - name: RC toggle between play and pause + remark: RC toggle between play and pause type: bool visu_acl: rw appletv@instance: rc_play_pause stop: - name: RC stop key + remark: RC stop key type: bool visu_acl: rw appletv@instance: rc_stop down: - name: RC down key + remark: RC down key type: bool visu_acl: rw appletv@instance: rc_down up: - name: RC up key + remark: RC up key type: bool visu_acl: rw appletv@instance: rc_up right: - name: RC right key + remark: RC right key type: bool visu_acl: rw appletv@instance: rc_right left: - name: RC left key + remark: RC left key type: bool visu_acl: rw appletv@instance: rc_left set_position: - name: RC set position + remark: RC set position type: num visu_acl: rw appletv@instance: rc_set_position set_repeat: - name: RC set repeat mode + remark: RC set repeat mode type: num visu_acl: rw appletv@instance: rc_set_repeat set_shuffle: - name: RC set shuffle mode + remark: RC set shuffle mode type: num visu_acl: rw appletv@instance: rc_set_shuffle skip_backward: - name: RC skip backward key + remark: RC skip backward key type: bool visu_acl: rw appletv@instance: rc_skip_backward skip_forward: - name: RC skip forward key + remark: RC skip forward key type: bool visu_acl: rw appletv@instance: rc_skip_forward volume_down: - name: RC volume down key + remark: RC volume down key type: bool visu_acl: rw appletv@instance: rc_volume_down volume_up: - name: RC volume up key + remark: RC volume up key type: bool visu_acl: rw appletv@instance: rc_volume_up suspend: - name: RC suspend key + remark: RC suspend key type: bool visu_acl: rw appletv@instance: rc_suspend wakeup: - name: RC wakeup key + remark: RC wakeup key type: bool visu_acl: rw appletv@instance: rc_wakeup From 7048399635103b4af5ed44d8b0ddd3ea55b12aeb Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sat, 29 Apr 2023 23:58:29 +0200 Subject: [PATCH 080/775] AppleTV: replace README with user_doc --- appletv/README.md | 340 ------------------------------ appletv/assets/webif_appletv1.png | Bin 0 -> 420698 bytes appletv/user_doc.rst | 290 +++++++++++++++++++++++++ 3 files changed, 290 insertions(+), 340 deletions(-) delete mode 100755 appletv/README.md create mode 100644 appletv/assets/webif_appletv1.png create mode 100755 appletv/user_doc.rst diff --git a/appletv/README.md b/appletv/README.md deleted file mode 100755 index 88ba3c835..000000000 --- a/appletv/README.md +++ /dev/null @@ -1,340 +0,0 @@ -# Apple TV plugin - -#### Version 1.7.1 - -With this plugin you can control one or more Apple TV's. Each Apple TV needs an own plugin instance. It uses the fantastic [pyatv library](https://github.com/postlund/pyatv/tree/v0.3.9) from [Pierre Ståhl](https://github.com/postlund). It also provides a web interface to be used with the `http` module. - - -## Requirements -This plugin is designed to work with SHNG v1.5. It runs on current develop version, but using Python >= 3.5 is mandatory. - -### Needed software - -* Python >= 3.6 -* [pyatv package](https://github.com/postlund/pyatv "pyatv page on GitHub") - -### Supported Hardware - -* [Apple TV](https://www.apple.com/tv/), all generations should work - -## Configuration - -### plugin.yaml - -```yaml -appletv: - plugin_name: appletv - #instance: wohnzimmer - #ip: 192.168.2.103 - #login_id: 00000000-0580-3568-6c73-86bd9b834320 -``` -The `instance` name is only needed if you have more than one Apple TV. If not specified it uses the default instance. - -The parameters `ip` and `login_id` are needed if you have more than one Apple TV. If you omit them the plugin tries to autodetect the Apple TV and uses the first one it finds. You can find the needed parameters for all detected devices in the plugin's web interface. - -### items.yaml - -Here comes a list of all possible items. -#### name (string) -Contains the name of the device, will be filled by autodetection on plugin startup - -#### artwork_url (string) -Contains a URL to the artwork of the currently played media file (if available). - -#### play_state (integer) -The current state of playing as integer. Currently supported play states: - -* 0: Device is in idle state -* 1: No media is currently select/playing -* 2: Media is loading/buffering -* 3: Media is paused -* 4: Media is playing -* 5: Media is being fast forwarded -* 6: Media is being rewinded - -#### play\_state\_string (string) -The current state of playing as text. - -#### playing (bool) -`True` if play\_state is 4 (Media is playing), `False` for all other play\_states. - -#### media_type (integer) -The current state of playing as integer. Currently supported play states: - -* 1: Media type is unknown -* 2: Media type is video -* 3: Media type is music -* 4: Media type is TV - -#### media\_type\_string (string) -The current media type as text. - -#### album (string) -The album name. Only relevant if content is music. - -#### artist (string) -The artist name. Only relevant if content is music. - -#### genre (string) -The genre of the music. Only relevant if content is music. - -#### title (string) -The title of the current media. - -#### position (integer) -The actual position inside the playing media in seconds. - -#### total_time (integer) -The actual playint time of the media in seconds. - -#### position_percent (integer) -The actual position inside the playing media in %. - -#### repeat (integer) -The current state of selected repeat mode. Currently supported repeat modes: - -* 0: No repeat -* 1: Repeat current track -* 2: Repeat all tracks - -#### repeat_string (string) -The actual choosen type of repeat mode as string. - -#### shuffle (bool) -`True` if shuffle is enabled, `False` if not. - -#### rc_top_menu (bool) -Set this item to `True` to return to home menu. -The plugin resets this item to `False` after command execution. - -#### rc_menu (bool) -Set this item to `True` to return to menu. -The plugin resets this item to `False` after command execution. - -#### rc_select -Set this item to `True` to press the 'select' key. -The plugin resets this item to `False` after command execution. - -#### rc_left, rc_up, rc_right, rc_down (bools) -Set one of these items to `True` to move the cursor to the respective direction. -The plugin resets these items to `False` after command execution. - -#### rc_previous -Set this item to `True` to press the 'previous' key. -The plugin resets this item to `False` after command execution. - -#### rc_play -Set this item to `True` to press the 'play' key. -The plugin resets this item to `False` after command execution. - -#### rc_pause -Set this item to `True` to press the 'pause' key. -The plugin resets this item to `False` after command execution. - -#### rc_stop -Set this item to `True` to press the 'stop' key. -The plugin resets this item to `False` after command execution. - -#### rc_next -Set this item to `True` to press the 'next' key. -The plugin resets this item to `False` after command execution. - -#### Example - -```yaml -atv: - wohnzimmer: - name: - type: str - atv@wohnzimmer: name - artwork_url: - type: str - atv@wohnzimmer: artwork_url - play_state: - type: num - atv@wohnzimmer: play_state - play_state_string: - type: str - atv@wohnzimmer: play_state_string - playing: - type: bool - atv@wohnzimmer: playing - media_type: - type: num - atv@wohnzimmer: media_type - media_type_string: - type: str - atv@wohnzimmer: media_type_string - album: - type: str - atv@wohnzimmer: album - artist: - type: str - atv@wohnzimmer: artist - genre: - type: str - atv@wohnzimmer: genre - title: - type: str - atv@wohnzimmer: title - position: - type: num - visu_acl: rw - atv@wohnzimmer: position - total_time: - type: num - atv@wohnzimmer: total_time - position_percent: - type: num - atv@wohnzimmer: position_percent - repeat: - type: num - visu_acl: rw - atv@wohnzimmer: repeat - repeat_string: - type: str - atv@wohnzimmer: repeat_string - shuffle: - type: bool - visu_acl: rw - atv@wohnzimmer: shuffle - rc_top_menu: - type: bool - visu_acl: rw - atv@wohnzimmer: rc_top_menu - rc_menu: - type: bool - visu_acl: rw - atv@wohnzimmer: rc_menu - rc_select: - type: bool - visu_acl: rw - atv@wohnzimmer: rc_select - rc_left: - type: bool - visu_acl: rw - atv@wohnzimmer: rc_left - rc_up: - type: bool - visu_acl: rw - atv@wohnzimmer: rc_up - rc_down: - type: bool - visu_acl: rw - atv@wohnzimmer: rc_down - rc_right: - type: bool - visu_acl: rw - atv@wohnzimmer: rc_right - rc_previous: - type: bool - visu_acl: rw - atv@wohnzimmer: rc_previous - rc_play: - type: bool - visu_acl: rw - atv@wohnzimmer: rc_play - rc_pause: - type: bool - visu_acl: rw - atv@wohnzimmer: rc_pause - rc_stop: - type: bool - visu_acl: rw - atv@wohnzimmer: rc_stop - rc_next: - type: bool - visu_acl: rw - atv@wohnzimmer: rc_next -``` - -## Methods - -### is_playing() -Returns `true` or `false` indicating if the Apple TV is currently playing media. -Example: `playing = sh.appletv.is_playing()` - -### play() -Sends a pause command to the device. -Example: `sh.appletv.play()` - -### pause() -Sends a pause command to the device. -Example: `sh.appletv.pause()` - -### play_url(url) -Plays a media using the given URL. Thed media must of course be compatible with the Apple TV device. For this to work SHNG must be authenticated with the device first. This is done by using the "Authenticate" button in the web interface. A PIN code displayed on the TV screen must then be entered in the web interface. This should only be needed once and be valid forever. -Example: `sh.appletv.play_url('http://distribution.bbb3d.renderfarming.net/video/mp4/bbb_sunflower_1080p_60fps_normal.mp4')` - -## Visualisation with SmartVISU -If you use SmartVISU as your visualisation you can use the following html code inside one of your pages to get you started: - -``` -
-
-
-

Apple TV {{ basic.print('', 'atv.wohnzimmer.name') }} ({{ basic.print('', 'atv.wohnzimmer.media_type_string') }} {{ basic.print('', 'atv.wohnzimmer.play_state_string') }})

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- {{ basic.stateswitch('', 'atv.wohnzimmer.rc_top_menu', '', '1', 'jquery_home.svg', '') }} - {{ basic.stateswitch('', 'atv.wohnzimmer.rc_menu', '', '1', 'control_return.svg', '') }} - - {{ basic.stateswitch('', 'atv.wohnzimmer.rc_up', '', '1', 'control_arrow_up.svg', '') }} -
- {{ basic.stateswitch('', 'atv.wohnzimmer.shuffle', '', '', 'audio_shuffle.svg', '') }} - {{ basic.stateswitch('', 'atv.wohnzimmer.repeat', '', [0,1,2], ['audio_repeat.svg','audio_repeat_song.svg','audio_repeat.svg'], '', ['icon0','icon1','icon1']) }} - - {{ basic.stateswitch('', 'atv.wohnzimmer.rc_left', '', '1', 'control_arrow_left.svg', '') }} - {{ basic.stateswitch('', 'atv.wohnzimmer.rc_select', '', '1', 'control_ok.svg', '') }} - {{ basic.stateswitch('', 'atv.wohnzimmer.rc_right', '', '1', 'control_arrow_right.svg', '') }} -
  - {{ basic.stateswitch('', 'atv.wohnzimmer.rc_down', '', '1', 'control_arrow_down.svg', '') }} -
 
- {{ basic.print('', 'atv.wohnzimmer.artist') }} - {{ basic.print('', 'atv.wohnzimmer.album') }} -
- {{ basic.print('', 'atv.wohnzimmer.title') }} ({{ basic.print('', 'atv.wohnzimmer.genre') }}) -
{{ basic.slider('', 'atv.wohnzimmer.position_percent', 0, 100, 1, 'horizontal', 'none') }}
-
- {{ basic.stateswitch('', 'atv.wohnzimmer.rc_previous', '', '1', 'audio_rew.svg', '') }} - {{ basic.stateswitch('', 'atv.wohnzimmer.rc_play', '', '1', 'audio_play.svg', '') }} - {{ basic.stateswitch('', 'atv.wohnzimmer.rc_pause', '', '1', 'audio_pause.svg', '') }} - {{ basic.stateswitch('', 'atv.wohnzimmer.rc_next', '', '1', 'audio_ff.svg', '') }} -
-
- {{ basic.print ('', 'atv.wohnzimmer.artwork_url', 'html', '\'\'') }} -
-
-
-
-``` - diff --git a/appletv/assets/webif_appletv1.png b/appletv/assets/webif_appletv1.png new file mode 100644 index 0000000000000000000000000000000000000000..05961e2cab6a93de12fecd2526d256b20ea97340 GIT binary patch literal 420698 zcmeFZcT`i|_AU$viXcS=RFEc0m7;VAT|lJ^p%Xx)6F_Pp^eQMt5l~8K(n9aOD^+^0 zp?3&9biR$RzUSO~$L~AeIPZVoxQvlxXYZ`NGS{4I&SySzh9G4{St0^T0xT>nq8HDf zy~4t}v4w?oRUZE;a7HLP-Vh6m0BJ5IrTjulicZCvp7z^upP-OHq<%m%VFZ4xv zpkEZJklz&U3U2r$sm}~S_*+RWKbU&n zUcn+#!@53#BS*}y^YaEFA@)1@@w$!^I95!)b2gLR*~!HPSPgb+#et=KxA8@_mG#xe zbW-;QaydIJ@eQd{89BS}w42*!Ivj7G^1H1Soj>mvm22Vr5| zqTcSeufTao(ne*db5n^~Qu9N;sf-LG$;=#nEW?v`VLTrbKJMw!G@$Gl+wAQGa0Hb$ zj9+clHd?(EG9A5exqqSav@f-vSN`Us)aN)1q*=jnT1wg``Ek-Ob|urbT4O&X%#)cL z?D5zaXqpw@ay86BrzQXQy?{O=c!d)t9jHLnO4h3a+*KQ7oVG~zU)t4 zI42X;L5OiaVs(nK_kXgRCMeAQ6vi0v2qX9H=Fa(1GFHOZ%JZEX(>I=F`t&-rd`yp6 zm1~{5$*;w)1qd{^kZHwxc&43TW%* zio8w6*Xf1#R6;2EmiRULR!)5^yF)p9M!v>&GR_EmJQhI*#+}APhthYJ-0!VzEYVo? z-{J9<#5U#;Xwu}#FWWgxIMei$w6tQvbA;zN>RGN}y(_$zpmhf;xV5IPG=wd3Uh#+# zb?Yjr_pL-6g~V$gyae;F9@O6}zhX0e%}1R19nQAF&1oO9aBqiN+-xjM32bH@od#lM z;sc*6(_8{q6}@C^DRgl(8We1)9DHTPuO2tZap6q)KEd~~CB2s{9uqG521iTcfb`0a z-)SN#Gfr^eEqd&SKHqu>%tX|1$)y&0JTk5x;|X2S^U0Np@41q3htg+CJi(Am-X}Ak z!-}9g;2zg);((NC|9q@2pWSIU7`N_KjRx77tB1GTZf^z{Z^^;P@RSLdDciA(LtZCp zW+>f#zc(%K*oQ ztFPw@GDj9h6hevlpDbGx)K@rGD-3?<&C`l5PzkI;7^Z&q_xFAed3EC{>br( zCVc8s+Z)wFenIazsVVtfdVNN*N1kfmn8%n4$z6i8$l1uK$g0TnS#Exo|NQ0U`_DU{ zUq&jjPQId4XJn3LE&43jp7ojSb2l?eg9eW7o$-#!Mac9MgpK-BVoSU89e$5kZM;h-zh$WNKu}noSid(-KT#7Wq15 zI+?W1*yVC;a~^5Ki*wD0ELBS;ODe|v$C!#~SL=nMgl-CTI1(=xuYFiuUAej(?C87# z8KNFaS#eoQU5#9hAExR{Uxu#|?#u0QF4rYy^?u!RaJ^R(Df&9mh}4XZcf_X0kjuo( z#`-##* z(tJ%QO{|JQMcPFzi*M{XcOUPv?p9G~5(Y@e{IGsM9+jUOVH|AizrfBm(mwjqtYjQ% zPhk?Z_P942sfEdTF(kjHyQaMcp^8y>*C`Y$6RS}aV#@S$imwWKz^}tJ?lA6K8n&*s z;j=z8$~#_DP*%J&rZIwYq!T_A&UP}hgKd~Cv^r1Bjm$lJ8?#p2kojXJtLS~v_lho; z)*mB}dQjYY6A}Dj>mxH` zWlJs=fv4r5L#=nl?;d`0;Q;ZQZ5~vFEq(oYyVLRpUebEcMv_wF&&I+=52BWUKz|KC zv9SF2nm2nXUIgjhcX_zu{#j`tDnqt+vA1tI>Tc>6yeGU*XkWV0(Zy|5XES+?sp454I_xt$ZVJKqQbt|`yALK}{yzgRH*Z!&H_Vi}N-B{Se zZ9=Bcsz2^2yk-J3hS+LN#7{(nY#`Td-k}>f7fBw%AKqaiApqfy<4LhVSW;P-yLNkan>$hD zSJ{Sgm-X6oD?G`ISAStrA|*@D$k<$TZ$n>DLp@;rE%sFS;Wt@U}#X)P^| zM+Up{<8ZPG)O4*OI&7KNU(CtMVWix0u1pd#6Y^j@xi6VBwmjDQ^bJOJRB0|Yg5TNd z?Np&Dn^TtAS`8F3PJAbeY>6BLO{l6#@`Jq6+_7$%r5=`}mcwj5-2A-xInw_#@5a-q z635}_ozFX)1PQ^%FXP)oB59p=ot!ter&P-{oNaoix=;L0To10T+hjb=VHNQb35QP~ zHGJ`figA17W)2}akms(?9iBT0%-ULp$SHK*jAziXn9(-h=;_6iwV~Y=8B#OVE2~4O z>pR@PA5G)AEjP=FQN&F!6KoUEa&DhjTQF~5X}p?YsMYk`=w(VaOKs+^e9zY?6U<_N z-Poz?a(2ULu)W1nX&e)xTTIM@9^JSsz5QcG&PUFj{k;y2UTBqD8E*iP0gq$;bd;jAfHTj8UB#fo_`Z|_^pqH;eLojV%|Yz&K`CGmXiW^r== z#Kpj>?aoAWVIGBkY#p8_9E5lx%CY!dAR+~eKt(|Co)>>07G28~@rsb2B)jR0D=)m$ zYOxuwpE4n0DwuxU^Ah?l|3NzYJ=T5M0SBT_RJ}nTncJ!AiHYsv5$;S&d)FHt=s!NG zX3Eg}Hd~utuwT4- z2%v535py+72TcWeAtM_ohv6HW*Tx($sO{x_us|>&;1Fu;U`PjpT3Oo*!9*VX`h*a0 ze0iGl0o|{UI9Q52&{R;Sld`chrsLz_;^2B9N$-o@I%5XNq8&+ykx{@mx8vAvO{L&5gOc2YJ_U{VLs ze<|$uy#Idj_dpQmrR@K<7Jn`Dud~2PixPl1|JIr)!OZmA3P6$f&7Uc$0k43WUH)BR z1fH1xdIgTJAi0i{C%Lh(#IasHlTd?UuTSE6-CsCr*=AaDe!;;JE>1`K?Z>$^;g3}1 zVzXemUW#DLx6!kTUUc51%-Rz^sxNgC%cXq2@9XY(dZ4GQOfUzVd~JH0b{Og}>!ICl zCe{m^XPx~}gtJT_77jk0*MI#4eb5tvr4(CU_zIU)91Huuee%J=rs2T-Z%6-{c}&?0 zONnZ>(BdE61%Jzy=pQTxK3z`$4lZnrG#bqC-!CdYow!$^<$u4LzT`Iwkmc6N5M6BFwR2nlnF ziwCX}-#6=cT=ap6R&-)W%anTDz?Ueyyj;L4OQ+15V&4$=;_ql0FE5^uzc|=kbi8#h zYYJ((CYg{BYvg})Ucn+ZEJmVrC_gZeWT|!0YV25x5L^KBRb`L0vs${9i+*5h?I?gu zo)u|0U))$+`?>45bRr=1M@xw;-Zo690|O76|8vOIdkKJqXrambk8$2>h-1fBR#pbo zPNgX&m~@L>RM%`bQyt6%(gsX-t&bMfe9S1j1x-=JMtGbZk0P^jErd7AQ#LBsa_P5j z<9XTH+mD-dWK;ZcJ(|@x=e=xx&Z{DBsyZ4rVBQjSJ=HrneA0L>^tLHjVX`pEaHd*k z?V&lbwk<`n>94Ar*a##gEjnSXTEi>d^o7JS`Lqg5)?E`Cu-{zPVEg@~k& z1rzS(3`p^dRCiwoU%w58Q9cz;stm;uTg|c@fG^OX*#=Ks)!LW{4(DCTzxz~AD0im! zupN_8z|dhqbYWty(ugSJY4L56Sn0pQvA5GLWq;$pHTwS&qixC-LaHUJ*tsq-jz77w zS$cK#1S=cch8X(1g$A~T4F}}0+L>>oevsck@itR3tPvDQ<7$S#YgYd4+jlAi<0b=Y zqEKGy&m2OZ#A`fDvQ!S!N~adp)y98TfRi*}>7IG7*L1YWa6p2B$ohVEKf6Vj!FbAq zxPLws^0hy;H@>xBx!5tA4mP_oI) zJn+$NO$>%Ff)F_X?^zQjVzAWn96-jMXa<&47hK0bC)Cr2&~jAa*E z#PU`3oe-?`r?g((XD{%sc`sGYNWeOLbh-+LrdJm>O9_{>(S_lN5FEoK;ven{XFAy#o1cGx>fu=NlcW<-|^ueK=YP*M;n^_AT5|**~%pUZRYocA@_y ztHBEZtQI{)@E+Xrb|TBj%q-9JR* zjk#37jRH~kW54Fy_=Q6O;y>FZ4rd#ydtK8j-8rn0bo$;j7zWBSipd*l8UYVF(j0EL zkREThi=1a5g;j>q>e<%G=9e@f*13Vspw>B-IMT!G9fST$G5YQjkWh?h-LcPvay-*x z^xUp{Tq6k55=f`CRJ|v_`qykR6_Ox)6eGU)^Yj>bP2bdA<6pNLv$fNVYKZ}kZw#fz{JqL29M*|yZ+3vn@H zu33AeUAlRds_H7YaXjY}*+Qp$~){Ar-P~!#f0fpicS6#01Y`yd=k`BuKKP%c{2jr>od8E%X#bi6 zw=zx~3WeftR%514_?*@?;oCp%SRJg7>UGfzO;=ZgbUe@YHxhk`3yJtAdajbttP?5k zCbxn9o?wGR89nngCV)Vg99+VeUOnau;!WC7!0(=Svcx|e2z)6Nlq@(YnIS_@4Ib+? zl0aY}K};xsjkHZ!6;v6J8CNE!0j~%7fLycp=)#dZ%N0sHqBjZpDtFxVq-!^mCH-A@ zD2EcLa6~pjn~DE?{a8W(Y}>!)`wwDb;mExM;>e@sJcFh6 zT;cT>)eVm}qJI_sC1p&AEvgS-USC^1tU*0W`O`h4(6qy*>T_NjvE{H zaYLoq=m5&a?(>+357KS~O3M7bPhD^UwIb}>KK>0T|7hTUOCIZ!0cWelBNioN5Ohab zd8uDXtRx-b+TJX})m3}8IsWMU3k3$7xp;RR>R~CoQ5^v-?v>$#Z5h}( zH1M4^v_*xqxEF#JkG4jCY_6NO2^uV^gZ3zCaavIOQ!382d+dYF4>rhG&Q(7 zb3a%Z>7=TsV>(DPrQ(&|&NiiXoPwNrX6!&Vp;AJPXCnE7Dz9EuGu*NQqQKSNB$A`I zEi>pC+8A1_Wn){#Ix|9%}|R3D9eLGSV&WleB|fVm1$FC z2o(XDLC0W%o$QRB6VeZDyFH4KjOK`?25Nuj$^{-jBW+{!V&1cp^e-RLGWfSLg~AUM z$X$@2R@Ip#nhl#&F_M<~-OQKD1qZ{Jl98-!2b*PkH?cF%4*CXe+ApSm3HR6#2q01U zx0^Trjs*XM)Z(##37d0ktA7x0xt)=dQ<7JA)QoUBHiyrDlwGgEOzmDpcsM&%s^4kt z>?{f48*BBzRG~)s#;uGlcPzg=Ap1tXly{#ThXu!iRM2wB>N2EmWTqz&U?VE*m(qSViI3xd{``3)RCv=Y*19qiS~lr4 zl&+M}b65;5ZPxx{$3@pOViXnh^ziGL*Iy0x$aY)0!4Mbe7xtjZ$W8ZpABMK-)^BT;;cKp z741Z-X|ZK64xqseekvO)9qWoze(_G5z#4;y-zJj(?fQVrLJR>(gT0_ep|bqpVR#hz}3u zV1=Yp%bf~Sl1TWDa4MR$l_zQee?hlIFC`(WZ9zgDfAX;9L%?2z&IMnS{3m>Cuzk5X zL0wjO-cC2a2ti{+j+avo*15Y!4!7rW(kd(U4tzuS@L>%7gX%h+P9uib#+}9i=AmTB z=jxyQ`8h#`MY0bE{Y0N~L0Ve#6%hUOEd3plDmB*Gy9BdH@& z96xBL+X%rHWBkPtpnOKeG7RjVj+CKCXp=CpB&r!9Ilg&)a>e_Y%nBl4Tsc$&KL^8h zUSmH3(YKBZ$weKlsjtz+!<+ef zzqiuA^JKrjI7JrVuYa_6Hj`?Ca2y>S58AWx{E@;+=X2L%K`uM9qg*;|FX=cryZKOV z_Tz6N6s*ET4)Ply@qxsDL|0z;T3P8rUk;dPvJ$QnnM3bh#(PP2e@I9gmp*-T9H~Pa zre4kilDrz)KHxB~`M&WbvQc%d1+1Dyr=*9EUVefLpAa`pa|KiGT+#T#gbD+Smgs8~Xjd0w2LJnmqQh(xM*$iS~xW^pcF=NldrOC}qqBX|jc39c+vP&#T%GBVGXu#tvtgVC% zk()n`hEl%OSMsZkw{^B%?-wabDHN8I{~}FKvkMC~b&%9FMFhI28D~@{QHF);cZ1!Z zx8%JPCfL~9n%h)~TN(_!1$I&Unyw>o_`|Vug!uQ=Z`gp0MTcea(grF6#5>;xYM*(c z6a+_3EoHR2Vl31Dzlu*VFfcGO7ba${B(ncmR`S157gNS?RIL}5PCAYM z^2j1A(*^z-0!rGg$DzD~}o{Z%4Qzu(};XtFi-7X~U@1nj=k9aM@ zwI!K;&F8}I3Cl!k}<2Cd@C#v2C7zFm!L6@ zepz+@Pb_Yy!6B3Rk)hwFDq8MXdN_k&@<_YM6Ez%F4tAz8+mTX@vvY^*C;@ z$g6Cvj65iu5KAzdbj2uWUHen^>2bu}i?>_M)?&E^vYi%Zd(8a&-Z15rOGBSi5=+fS z7Mm)q4Q6TB>GJJ9G+n&@Tgbmb+C2cay%9Gg!;y=r2dF3u%!GYd%~m7f!O>Czd@$aY zZTHazSLNRM;?mMMrDgU_98eXywkoH3r(JK%*H`wJEhc8+55OGbNhdP5UviW4sxJ`;td36;{z~~CJp?Pv zJ!(4q-N=d5_A4T;+1W4sT*)k6`nfM^c|fe6wm$SW^j*53cYIN5fcm{v52JUfs5gfZuyg5Nb%@X7bUI?bA)ThCuU&YhBMTor*W#&GOmytc|HBnDp$E zRW{DDPQJ;9#=4d_+_vgSpchw2UT6)L)=iFaR4n_5q=$Z&hSPm$pb>Ayci07L$ z`T4~V5AO&FhWa`dnG&U|tXZ157ev07-eq|i$IW?V+4SAnUp_B!7l4c_)k{$fYdAQO zk&%4BVCA<<`@{NW!NI{Cwgd4er0`>EH@*>rE58ubeCx+^xm?ls6E8s^Dm=gWoCh+T z@r;yv+ot_-oA(jX{+ZV7#XJwb{Wn`ODMM*p_c zxFYsmsUDRN$WeVmB3;m$yKF*A(HcBsjqFQlm-<0t^~(5xOLoO<;$!}1t3Oe)9G61Y zYv}~dG~yk2KgRBP%h|%+@wb7f)DImzw!7?Qqve6;h^H$tS?4BMc?TVz-`@CTsl|_# z;d7{L>xmi`ZYLd&a`Fkm3&+w_ETzfArJRGf2-V@|yNA@mh}Q9eo&NNIb#g;OmN`@M^Ln zw01PjY4KjVbjtYtM=Yg-)0JWA;x`WN&zfAr*w~J?c8ToR;aYrK)kZ^Ki}qHO^gv387VDs+0P1TQ3I8 zSU>T}T!R+vx|FBqckAk-D6R`W9UJ3xgxl+(4vy9jZq8byIbGc7<*LnsDm3+NVza}o z8K+&M;bYX|ZlO@U=lL$R@J61gzl(-Gl(%XT$yTTx`t`4v@EkyP5gd(NIC5TI?N95D z7Gn>leM!bTIP0v$&JR-thle?_;awai4B-(GM!r{x3Pbp(MoP!6g~ts8sSD4O!#j_^ zUc<%}it?f?%j%P<(TKy36p%~}63PR8%n(gtCENZf9m+q3Iap14>*=k5P&V@5-5ItQ z>2uWeW@bnVmvBuFW(cz1T$;>!Kj62kPwPV&WFKc);W6uYesb?@X} z6Y~seiQCFx1+hC;4((N_d5y4IBxrRlA1Lq-;E7 zU4*$kK9<<>FD@H1-YCo6n9!wGQY`1#cX{3tr3uLk+z(pg$929n6VK|Is~GvNxLbRs zyso%9TL06cE1tb?Gei|>LLs6_0h4a z{i^5kBULLWCC9YAI1Ebe)0tMO$hTWu@ZKYX4;+kX~_U(K$A16PTtxEvG0Ce|jFNu`x2IL}283 z)S*A}r3x({>nris0v6r|tTO(#J$;WMZtd|Z8xTlNtWFQNA)gKzvXIDCu9N1_;#_eF z32*kg)1A_lwAe8k_`2a*c9G(mwO0wfmv|tlGK>kA-(pFtf`3-S|2lX&BO6)5YBQp* zm(sjHH9ZCAwd+>(b+^gL4Zb~D^FTIi){Tv&y=+t3Mg7$m6n$t=eCuOUJHnTohc0_@ z5-&C8lBmJ2RqACTrOB2vc9ulcTvA8a@(zze8t)SbJ(lMOQY(|LoI)+gf^=5T{x`>i zRfqvVdEN%=QJDOAT=X1A2w;Ygece?!EI|n;&%9&}sr|FbUSH3{u1&QT&97ZIQN7W0DTBgFQ3bAh&e_qVJWBPdGl_p%n(&0pedK zRwOoGZ#nyMFz$?|2rENYj+KtAmYpByO_@Oe(DI^>D%5!hD=bu#=%Y>b=IRHqJ;%j3 zm~XMRVAqghdUP_yri@ozABr7ipbiAW8<(-XNozf>C7fouS7pk6m|6?`hN-?^Fz`kvvTFRY7C3HI6cTb@ zurGj^daQyZCPgaoJ^n+a0c?;O9J!SrDoI6v{`p1qo+QOWq$mfEa-JNYfrgPY9>+jW z4P2r&kOMR^!A;RQ#irv>$S z7&F?vyN+W~2Raf-8SP3Be0jR_7HE2{{!rxN*KPs^7KVR-a;6nNFE@%A0O~9ciEdxw zrEEM4MS+Rci!;TPBYP6i#11j&fw7w$Hccl8ogSoa-*}S`-9q~+YLNQ=PF{JN-4H~~ z@$?qY0C^+3-pt+TM7dBsX)JF=)*f9i(~*4Rjef(ro*w5lTj~Ma;iAY^nSZd^q+x~&2 zl1%2Gsl)zndYYY8hb1KNYvvET;f9A_Id^t5hXd0*HY3k+GhX@U#VBuQRU^B|CzhIaEOLJ#ZOz|tp_ z{tgcEtAx9Q@bH!^05y12_57|Ih_~D_Lt};USZ@jA+Z4~_ zaRcSVw${c=)J|vVeN1w0#M^Se&+L3c1XEO<5^BRE8jBT1K>&6qo@%n%ocQcGe~J={ z@*IS+%td|k&Ly=5P-6K*J^OWl`gBnAUc>+-={RJ~gSZJaCc0eKMW*blzR_x>0e;gwY|^=e zTe0K5b!g~UETxAH440`}eexx#tR@j93-rB+o~(0MVG!^Q|2vSasz^6eAZ37-^QzZ8 z#GeP!wdn@mjp3=~H^43^0V-YL!h?^mGljZ8qf-zMnIa!k5gW})auYMoRoJur@t9=G z?<(36Svf~UPB~V=!Q+i09u9SjWYN)-aWF$uL3T)3SWd;0`}H!c6Eefi-6oB@mZE9q~npB@-Yw-F(r)g zsdf+V6G!!m9G>E^kh^^?QTIl;)wEX^mb4`T{8vO11e&HU?1J65ItQM;YxTW_oGL!H zJF_ZTI2rev>O?7+7?!p@WheBbwZAYV5!($eJH)S%QBxoGTg#a66(+!R9*BQ|&?YOA z#}BZ5?;pP|`Tef!Li;f%+ujC`^8 z2}^ZKuqwg)GH))tseq+1<0yF=fQoCnBVIbiVt>2LH9`kJtT0Lnk%LyLp*sCKnQP^V z*oXv=&`qhC-uGubtGbinX2gl7C*DjcJ4UUdz@${h`N93FgwA7xYvsw z?Qx?5^Pm-mO_B^5mpO~3l3g<+A|nYkr*z7&j&VkLq7xP@tTI!h3XP++nmwvI)emAr z_=>Bms~MwAcmTue(M(({w9h`zY7S*RH(Yc5HshGaxzIU4#EkXHaT+t6?!&((Nu3GX_Se}TEsopdAd#%= z9q-6BF<(g2}GG3T3kj@%7=}S*=r`bfJicCpPesHRn%ye*~ z=FfQ?KiFN&c5p%)ztd(9po9a7sb5>>R0OP`Y5po?5i<6?@-od6aVqOq6v=FhRQ&pM zuyo4lgfYF0Z2D+uoC9<;zAXD(M<*U!Cr?)6jRRfR?T>_8PRO zjH!Vzr-TQqmU-hOVy9*(r7s3&Cm52G(&p8y(NJM48>xWD@rCj{TMa-T*cpjD5uyDq+*SD0&t(5{fa!`*8sHa;U91}O16 zKA@&Rk}$??pu|h9D;}vuw)16#bnNKus_x0#fNT6|ktdulS0&3CXttEExbmuN01lt zh4;6x%`@vJ?bdKjwxAS?oJAAfTx>&z{c_oJb~V_Di%Vo~M63ja_Vp0U=VWF!!M=1Q(5MzYPk{Anp9#K9xQK%S?LX8*07wREIL;La4S zY4mV_fwE%=tNSBSOJr~~t0>VPX;`DMw#I36eT*Ev^8m#(N zcpis}Bh&U+YINTBHO*un&5=82bMbsx-4!zhhJKJMgp%Zl`)pvZmONEFmz@%^^WEDQ zqu1GHX#YwbNtb(YsOjq&=fLqp>beJ{>jD;-ULkfnVv*gFnctA6|>-mB9ZR0l@Aa2HUPk&jC6P=f*ZGFrp{p;NdrMX&)8vVD0T48$N`b=4v62 z`h$suY&}nQRb+Vx0%Uesarc%|!sPfALz-p7qyldg$Z?2@P2kQIe*NuG@); zf4is^GKG5mJ|G<)Hqj9(e6TD-1nx)q^4hyLZ37%CYHF6U%R%p(#&i(+ePd%R^l1Dr zW9w)kbt&P+y|SUPwe%a7i509$LS;sqRI!CWUDt9gb!~K=gp8v_=YF6GxC6!I#_MCnv z%zOE4d)oJ)OwY4Uu6?g5^f01kH_y4LoUmJLxT$zNO@7#`k(SD;*xb%d)sP%d_|1Js zo9cr?M^2XOC|{iuSPJxUR}0KbHff1O>|DU_buo!h*ad8T<}(%R42c@7ABO!8w7}b?V|Ke{+cIItlLuGkXbQ)z>lo(HPxdBHd!&dL`OUbe1n&Y*Wz={*}?d^w|OPMhy1ru9cQ-e9B%v! zLu+&MOw_33qjA#&cPk)!DTUOyItED1-6dTfz#(vE{=Vz4A*CYynHH3P6Kn+(cy?tD zrH4>dvuhQSG%6*L*15?6yGU44ZFX{VT4zo%ki$rY%g%wujXGTL5DPUp-O71;$pMGR zcGdM6n#vyxiF8e5dDYyGCerxk?rVl|XvWS1Iz@dQqeCp6A58+dFpBO9%}-^Y6ue}; z?}D@1_E|wSBIW(#QXHv|@YLtU(9(EE^llN>iQzw!ufmFL23+Wk-F_VVW6n z;ZKvkdZ(Rxp?*G)m&EesU{=PAy;p}X2C>Lab)s$cxi+;fo_;wzkAB*Fje@6M71OGj zRkPUldC)<}(2smVl*3rp{)$E1ak^2c9AB%sHG~(YVwB$4IIt?Qef70zyo+m*_sbmp z5Mx^SVS@}&dVdr9%HS2XAZXncV#Uqm%~SXf<2(Fg3G+9!M1#;5_tN8E-+{zgQ0j+J zZl1bNoY5bS_Q6s6!w_=G!tI5*7T6dI(QbfEWSOemq3K=@&fZ-9-(MeaV^F83;60_N@f|6qyTNe1_^A zoHr(KdxxUmRAU4au{7exVsF)+ZAY71c#mfJo={>s``lU2`ME&8>|J#*@#Q{ceR#XN zvX8_A;$ZC%ss=SLnK^?_Mqc%o#R0uv(tX^f)5chzGPfJ|-f`DJzIzsg-mQ5VaxA%) z1jP4D#+rW1R$zaUm8zs>Qld>=9?^osVxFY8%Vn^=Gor6faR_DId)7eRJ_p35Ar`PHz?1b~nzO?70a4qs!AS>j`+VHwxxtZaLflgh(R?5gb15>>-tUTS z(J)lkeeJJgN{g)CPSe>KQYm8^E||&V&(}XgX_qz7%0c-0N*| z8u{q?lFvjvB}tA$$nzsB`q@d0`2|=+7Mp(3U~X_2qI4aJY$)EW%vMZF+aK29TyKG+ z>ukb6$FSltQZ?6uOr}pwuD8UR4{>?xr{2t=`~vz2u4byJR#%<&y|3W*yzh6}aw*P@ z^VNqEDMaZB9Xy!~p;3N;7Cl{N7D#DF~Vq)9YW;@X?t3o=1fa8OD63&A_7su6QigIW%Gn^V zn2VnZbRIDu2K;Re7$tbJzhQadib3(6M-IedQ7@qL0(sq*J_J6Eb=|1jL0WEX!do7e zeO$r3i^*VhM-HWPK(?!R!}znSzO9F z?F0cG+5D#k-+thI#rEd$_VK~F*x|1)MR~x5_j;tx!ot|Tbv9ghT@Q!2ju7w+7= zd&r@uiOxa$Xgw>blY@NYYx{e(16hjmI))zBg*agcdLigR7=#`%YCL$byZ1D9-a9c! zRfPFzo9_dftFQ19JRl9G%BaoKo$x8X*JcUILFo+l;Tj{?zz=#WzD$Fi84q1lzQ{@- zc-8M3tmS}4)G>4<*oa))M~}yzm;l+oI++8@L8AU!phInII0PaRTurI;kssMcMDD6q zz`43dS(t%MhHw7IkdW>z7GB&oze@eEKxY1@M2}_e>GDDV z#(o8l+o1~MYEmV8n+tak*ZJYShkL@7GP?-3+3nH|o3d?-uGHF4C8(YX-sHj?x{L2B+uxDX~`GB@_r-QbKa!fnOSN8d%QRmhz zZ;Ue2SZlshMMLu&T!H#;knkPZae0#Z<=j%FE5*f>o|cxm=Rq(O?b^iZNYGHdcgo$% z+GqrtxMkyZy3Ld@mJ~?)b)tKt^eiXdb5kqFsVH~vER4UgFO=oG{lefdBbB+du=}w& zQ(!w+x-uo+gg_msCH3@Ph=PzHD2@XggB#MQl%)g=!N#j;eI7iYya~y;$MAe z-ivro@puP%_6pg~que-qDG@F^PpCjJ+xB*~d*XQuD$YN9Wk-#|E*Kv9#n4Q<%ypKn zE6LJQv43(?6R^({amHLtWLOumdFHyL=R7{F?->zCBjPe!P&fWmDlkBzW@)1ajoth0 zvn(&;+5Yibpv<>!kqaM%7#=}mR~}MNlEHHK1F;MhQfYo`fw?`^)2qZ;BR{o@8l8oB zbrsPF_%SV??X(w{P^>Lb7yV{q^p?o9lA{=k7nz)!J#mq{9P+&-%#)Wg(rti-q|IS3FUU;Cr1>W_yt2tx=~GbY)9! zZTf80(4YSWaB$28PYdQ&8M;{??oK8 z+~$K1S!HEqS-!nzBAzISeO8d4|Nl_+)?rQlard|&NC`ti1SF*;MY^O!LPEMly1Qe7 zgeawSj)8QFl+-9`7~L?C?ixA9ez%|JdA`^6yRQAUe|By6{XRRd^E&6;#iOuS59>aG z#AYvMEkT%P7KqKq7BtxS+%}=U($``*ZqjFra9i@iOl*d(mfew4`gy}9FoAK+@@rr#QAOU5NU7JZHX+i$dBwrmo4PAC*C)u(Tg#(*stKo>0@;q+LzJ>T4@ zw5+(aF5T=vHQ3*}LRTlyeCn8b+V&(SuEaGETMFu2o5xbsBHm;p}-Fkir6M>%j z?@0}4ZvoIomRMy^|L|Rc5~cs7<7Hh;oWN6KICIqtot3WNA-&5AZ#aL--Tj~GHjZo5 z42vG^Zccn38MU(wn)x=yD{*AEm6dE4l=SxF)`i4TO}gNg$kU^Vq6WG~Ovs&>^_?w` zE~n;NDDv=J->ephb%c-Yfc;`OThA_x4=~e4f;6>w3<0yeEmz~~Zb++t@&b7rO7*4P zua-v{G}20xWO)QL=7GsOhYPzSk|;^c(puu!kK?ow=5VgCqxvm|cjd^a)e}BxDlXO^ z`G>&4=N2-cb zUI+;1K2Wb}xe0iZ|A5Qb<)`7mp6|=DLpj*S^n&82ixK0h0-d&K5mIBvVIg!}zevGL zzv#zmu9K)u$F)SL)-XA!K_NDtvgdZqLW#xv)48-aV8O9A0oP{Z2FD!1Xq3#+4TH25 za#Np$=Yq=CuaG?pV>sxz6_HGrZ;J?Ir1gpAIU4^nu?d(r4Cn;J2#Nztv zTu)C&hXT?KgiyDcvA;fVpU6SL#}Suz+alZ7XOOrdzsRE|lv(p`Mt3Z;m*@SZb;ex^ zgQ-~dJYH?xS-bgg0&?edX%s7BP*<6k&>_cr5LR_}+!bGsR)>KmYJ+t@ z>K4&z@3$F$Xo<817OI&pIes$JM-R+tF&|DPyd3GO>L+qcfpyh|?e1g=Crow3%UOtO z#lGUKI9M4Y<>84)UV}oBj{IhCl~)0|znOKzmtsCeJo_^ELWvZ zgOtY!fbaOXZadbqqIVg760F(}1Z>6Ir}50IY+6suB-=Nh+z9DUpq@-Wl4w)gi-}LP zugQcpM{D)lk0nl0%YC%1(MJ$aertL0U4JFLm{76{Z?=h;(vOg)^gD;aBOWPB^#b%Z zWzr;gYY=>Pwy$YwuM6W1`R!zG>{4Pj-COl_nchsZ(b1wQ9+qu$6eQh$x7J#kTTJt1 z`nrq~Z{FzHgW300^GR>1;v~gyUmqIDd+886o^{j;47;#dqkmd!_e2S?_(efcv!W3oHMVLS}Sud}qa(&enYuZmdrjApcK_WsN0 z1l1f7^)GSCGZ%R{e#i{%O=w5InV8eS(2Fu8Bh=PaZj^05ilM5qVp)o?T>mocSo{Z? z<6_`d)AG2)&LoOZ$FMxFv$WjHm*%UCOHgs`hB5@@4X`UDNPMA-h)#)yZ2_ciu6~UK_LZ&R>6*$jy@$z1-^m3f z#g13HS{%Ai7GG#UWl+-pnLN(^&4Pr-+y|6$ml7*0>z#yi+fqEGPg3c#bllc@_NoYC zb=PFm{XPq#TX~H9wTYFF=saON&5A8wMRcUE;-eP8cPO(U5BvbGnZ(uR8C+~ ze7)aC+o52~gHxDk@(Z|@61__l+PzKwrIv`ztkd8moby}gWozWg{=SW$N}#>SmVwZ8_7Kolr>I z#V*Xy-X1Hu{(DW&9`&apU#DoufX#y+bhR4gGa|M%YwX%;t*fgGJ@F>`%1<2{@pv@*y{d|e z)30ISU$DFLDnN~RUVFoE_c&n7fHahs>-e_$?7k%8_T^xy!>$u0$gaFe&7Af{Zj^{7 z-{~&(AL!2jRRxEbqet|p9!kq&xO4|JbeR>tc;LziaEkA7@x(wh>}k&?n{lP$?xWnaKSl~~QcD}y1I@@+^No2Um zIy=vFZjU4H2uZ@%j-7W$%)LVvGvJpmaGp)hnq#F)f^~F($f2?%TYXW2Y@y5MB3I@{ z>xH>&I`&Z=!Uz_orCTaY#uYaix$^3R#FCty?B1jRMzWO~nP{UoJ82jla$TU}T)-6U zY0Sqynj=UN9_ z9^$Mblpzv_Cjr#<7krW6|4p z(>_!Xzp=b(MD@b(Slv4 zAJi{#_8mF52}(^hoVuVz&BzBg=O8F0uVTDu`R(pL0}@qapXb&{o!Ii8R`NXZ3h?)+ zRTlid2j>wnvjw2l%wrkJ-sD+x*ixqMtE-ovQ~VXr&nwlc6{MK_SI7R~$}-*_X~e*T z8bvVtoIwN$(l}jc>26QjN$mc`%0V5r4u<^P<@z_W>WY6p><-GrQ=2)@wJ$uJoWM9Z zJT0vJeJ`Ht$jTvs&fHc~t*rSR5!B^<(l7a6mZFal(A%NFX4CnwW+8`D*YPi<%$l#= zj6*&xZLx>FHdao_MB$v?!Vt*eb@SQ4Al$fWeB6%z`sxnFFpGu9knwan(NBH@SpEz7 z`(D}=I#xDy3NLjdjs(5ZH{ zp`}VD>%-mFTZVXy^BtJrQU z7$bNsX~xQ$d8EMChOEhU8!zeKz?@Yz+`2A{F%?6PLUzXy^6HY;I)STyQo8~>%lKgL zi?V!PKbdYhA*`J-@%^+UcHq%?V@9QtOHq$qf zzZgwDvFIz5WZ7|Z@(%9=d^1aDfJ+)QeqXY}o4zg^`0t9H(*M=0gs2>&Zf1uB^o)p} z?ejaYA)KrL-+{GXj+ymk*x88vsV|n%E5u3uJ$SC{{e!up$K18F!UHn@0k|O$RqxOE zLjB0#W$4aC(Zcpv{sFL5tOsHnqqBZv(|zOkR(!d94pITgYM}n3FHUUp$v=(df=G}Z z8^5-+o#^L1_zeAuRu|^cAo6E2;0<6x2hM(6W`zd4u8$p`@{*0+0q3OP#}3*2hIQiI5QS}2l8eQL7i}#u0O}b>hqz*XUGzGtA z=!E}kC!SFh&Gx0vb~mc6KoRONI*ukwHqKj#pDy!O%QUq}rq9yslP5-=>H7E3E|d)< z#ChiG6KW|^6PM+^E>w3GtS#>pD;s{dl8+Sew9V`7P7RCVI-AZ|RjFz*tHz_hi5n?{`tDI`N?~+2$C0 z_9>*Z;SE0x*27oD62jH?3h_3*xJ2)5&3M&T1yqjeduC(v{JNFg3z$4F3C`L@ z0sd~UgQ#F6n1X`q1g%|N9KW#L0)YH47Ao9@PI$U2f7+GwbMAunq4M8TE^E^vbiz~i zhPXc{9`2c?IZB~zOZ6|AtG0EY)>mxQ@)41*jDhs(GCA=JX5x0l^6bB0{m7r+JWJwd|1kshus=83X3xsMO`6J5gg^ z0uwN^9I}w=40Ysf&u^yPGe~Sm*f@3zW(tvAf<(I%SlmD zbaPyxb*XUa$*#Ki2J%>nTOjEQ`0W0dQqr!%@ch@4bK#yf=|u(3yd(EC3Li8&j7nH^ z;xwOCHYdh5tf2tV*I#%|D_prr15rYJL!d9g2VL>fH9(Jt`Pa}K%lPYGbbjq2C%VT^ z0kZqM{V(D1%S-g@Zs&8f+Fl%~^ctgu28Uci>IB?fImaWfXaw<4xg{ z3?&h&8}wn|j{UZo|Bl$d2%sN5Mx6?6Rs*DWr(Iss>bJj=%^-c^fM3PY$3J>sYW0}rLQq_thYDMq{%P9!-b5SvIGg0> z_0>okNTL7k7Ase2pK?fPPu}||E>dEo85F8umlue?iDQn{+XRB8J1~R@@-%Y1&nNv- zTBzgS_$oA>+Ow2wN=fqS#|fcXpGfIVUUi3B1ka+ZY`7+CUp!1f*McI5kWxXidOhTR zK=PG8A8{-88(>8jvPzlp(wzCD4n-Vi{h*-?`l{Lfq6!{Tj|mbG%Ob*k?iB+wa?h<0 z`tYhcTJJG4c}S=C$RJJWz_xh$_RQBxVqDnE@)Z^Q<8c30GIjX~>If}4w_f=9E18jk z8+JJ%k1r!{0RsCz8r!hRMkjsO5%i9ewC*-HJihWzKjDg9bI9A>CY>j5WvO_o&KS*g z_Y6l;4VS@%DRh&9w?R8w@>}w4GTTiTo(5Xk#iRwnqwp`K=!d8L!|yS36GAU0yPTR{ ztDICJhVx zEqmkWdm8l?y(#r@um3f$*-3=Pk*%n@`HIZfh8}SGBn_-5gIT zdGoGtj>^Gg>6vB4MTV+&%0d0Uho0NzGtRXPTE7|dUW~^g0TD&JQ#@cQU;P$EU%ioP zJ^~RsOzFO-A)LHu7JT3>u{lfLiDm0xwP+9~@E@N^mlM(g986Oc4NKCv%9L0=QdgRd zoU$)1fMNaDgocFJ!yZ}@$L`0Ot~UB+-~2Gc$W~@_<=vS0xq+^(^`2{^a*&SS-OY2p zNLqCp!s8zzq;Y&%J7_-o73C@}3UlFwEv+tuQ>WP6(8RdUfUailWf7GRys&H(O?d}t^> za2c0K+X{vEu@Q?o*$baE#u*+W5q-k>f;V?)7a6j6v%x+6x)#P10aHD6{}g zKl4H}CYH}DT3#?T8sAU$W=tq(k22RF4vmV3L0ljKxpGrRlON4n<=9k9xT2!)n*v8= zu^#BwavWX4u zWAS>*rtQQ*u#_$TxRVjIPI1!$-Hpm5cpX(+4_mkuj{XdBXQtSNP zmximMq|9G%9ULUPjr3FT#qEA8q!kI{h*Icj!fxBgNuL@`%Tq;v)KJGKQ0y%_9({RG zb;17M>%I1RvIIpB_2zsToUGA=h#F-_>m0)E(O{iWRNgK6%3?`p1Q+b(HZDzpFAp46fgQ-Oc?uaU6!2w9ciIs&S3~0$tkIF@vC*F(va50 zPP7nAoJVl*zHn(12j%myhI zNUPfOt#t{M?l#(BgaJFY3R^Gsi_8~wrOVyLbBe{|!d$yjcj+osv-$kXn$)~1fbYet zbwB+g=R5eqYbBVhC zFC7cW?57M{~D6W1#; znuC=SEpz+c;YrG}kT(_Fql(7Na0j70Ihc(aDFnn2C%99+BK4kzp8w!2wUW`2PUn#Go)a@7NnLby<{_-l`Jj3 zc3d;ay9Ogl!?a2+Ja7R#BfSXFgvfVi+#Jg|=>Zby~FVDLg)ndEf z+1UK2oG{`8ezviPN|+75K%naByAjZ)P`~t-IN+R)Y4?ymxVeO@q{(m1h8PGry$!DN zyM(y@XqmAS^EL;gV-K2o`?_E4BRE*d!5UxK;@j5rj0Z73*(D2@c6l{ux*rwjILmVx z_hQ>pqnmx9Fw@EDe^r=vz;S9PpQ8TKV_J0S_(iFB#(y5WJZGWv;@t;8qF{7#efwvn|)mU>@KOM9!Rl;@2Z`^zWx{*?Q93^EH#c*%|FLXFdg3Tz;P} z2l{sq@LG8QgvHMG&u)8UG$Jh*EiX+EVkXp;#g>s+x_x7T%9DyrhQz&}*h?czON$AS z9JBen^chQ@>(bB<*MOv5>u%XQOY7QZ4Hmzd*<#xK?v$!f(_pf0 z)88rf9{hinbuzG;rjR%ll_sU;g>5ZxRn>*cMMOlz>H}z|bCa?PZS-p*{{MB}{#!4T zF%Uf)HF(8fGs(ZYk=iP`O29rJLoG-H?|y&V~f0i%#v%#E4*dM%FI-TCiK)-#sERKf(FsZ-Wk^fCW1HRbQ_{3! zMfrx)nqa3o3$(K}sFb7p#{`%$WpufH_enG}W3R1m}Og+698K!2G%2i$bE+$NiP|Y5kdirrvv#PPC7EX?i$)Ym7^i`@&W>E@&ZfS*LtC6<&M1CxH$?R z^Mr9Yq#^uW(w^~0?Ed*rW#Ti~XmJy6N(a7}XMWSg=f36}-@H19zDn(+-=$#QIhyHz z;ldwe59;-sD%5oDfu}$9Hk-n1%cjBE{ThcGN{(OuZ5jemdU$wA1^UU)q=xCvgK@n! z^`;{aI5e4EO?{iBZ?3Mh_a^EzZm6~1mrs6T756oY4>SA~L>;lMldN)9>~0gS|J~5r zKn4|Y*`)qgw(`nbj8$ie^Ovg>&Smz(&^hGmhlWvj?1(e|c4Iv77uQslCN%Yl|0pe;q&bW%2?=C?4-=owNF)3xK5l(FlFt!8*Z zV%g2WxNdpjs52F=11z6CkY~Rbv(1!{hVvNE-n3Y>Z^%F=9LPC6k+X~|h6Mh_b{YR3jIk1I#=^Ox?n}{{#l1iZA-8Rg)p$+}6hj06z zpiq9eEJ4GQ0zP+6`86#b+ha@Pj-mr9plMwQ6Di_;1NI2#k9fZ0%|=y(%_BFotV(=w zfV9e0)=@)aPzLp$EEzk|1vd}DP&cV}db>5wE04i^UttV|N3ZL#a{f7{OoLYacfJ-q zgaqD2T=eC~N4YO%%S1%}H@hl_s>#L{W5+-Kf`~Tlx}n$9)2l3kPWWahVWdaEu#78Q zM^8Y6Ks>MguEaoZJ4j5MVqme4IH_vMaGIrwjnm(crXfFR3u+QZL4!ORkd0TeE zf7C>K%SNLn4NTG^i(X%sgt^YoI@4{lY?=?6R6XvTAXelf{WdvH4Kwp+)=j)dgvUXE zo3!u}gFKu{H9x+%FC{O3420i!txq59Rk;GthSjuwl`r*^1RE;td5`wC`L9Z0yC!W#cB-G565r zuW7VzA$m0 zE9ik&b7%XLSjmutw|5~1QPC<+M`&XDyFc&?!Ti~giThGaZ2bilZp@ObMYruqRA1E-eA zo+r2lMGu)06(`Qf+n~HVF3x71968^`*UQB%=;^K9Jf|jmxQC1&ofhH}{8MnMxxU(F zlE${)qb$~=%oF|`-{m%twf7k=J)u%kVWa36k>&TbAZ_zn1w4X@6d3aIjFU zF1TfAfp$Nju0T~pUlgcC^}aNxkhimpL`zgcZa9zKxOGAwEtqN}!mRRBNU%YPU(>b~ z#$wJ{;)QKToG=#K^`)f&mm(ewm~vsy&4;(#i!uTdUayuOVLh7V!>?^S&8xdT&NLkn zx&cdnaGzP8whlJeudOh#8BPtRW}r8g*e4V6Zu*0`uD*Y0^DDe*G zTwHo}nvk3R-eZZdCHq&a>|K1?*T@~)xt`d}#a$vkQ3P+R)yjxO3-1yY)+5r;ZwicW zCh@8KXlQ9;tI(_C>Q~obbeaCFn7Oz_Emx3-pnfF#@v(b?7kLut=og)~Gnj|+c55oC zCcKFKc0Xh9;3!_|P-^Us4>!N}?uKd*4&ST3qXN(rbbAdY*}G!Al|pXlo7FvsO`q64 z8mM0Ep-SomRl6(&+_ur1OHbdWMv8ZIx3azds;H@VE%%+(mut0P+1dDc`@ zYVlQx5DYC)l(7u^Y=ORsQ+APstlivPMct9+I?xYV{*J_aFSmL-Pib*T)x^zz&;GnMWop1iKtm$4EFru3U_?`?Vk) z(C=1ooKhmHR%mhhm7HmQ1BWf*D&8M5j?G?oHooDkNkHc3^Qs*Bq@({X$+gF-h?$b* zk1`j9;4FP%l-ue$ddhgvnWvlK3ul|ALPyx?qX~($+D=pweai)7Xw<1(;HkzVGKc#7 z5oT_+59zT;I<{fAQ<=mlKlR}^I2QARHf{m1>d6#~Rkh%pXMa+Es$Y zykG6|k3NSHr=QO5gD;}UmBYjvQoWO`zI5b#_$D&K5ra!4X-)c}UR$BFrlfJ8U?^e3 zE{?dmFG_TS%&%Iex0o+M$UQ5!%8_X*p+hgmb$Ve`I;1ZX8_wM)pfH;Vk3ZXh}K6}2uu0R6F9fZX?ZH^KGhb;SFwvkiZ3tBlX?Leg+m z)xVZs+;3?a6)C^imct9~azVmBrDI%A*Zc*tqu0KxN{3f3fP!C6r*nvg3Rdf|H~215aS}bJ*jJY+h`QfH3mZw9c+t zhB(Io*O1loa2u$zRVWKP|C(ctewcaHZSWG0uzMAIiy}f`DJo3Ux5}IDV(R`QQUFK( zK?xHpsXaEx!Yk*;618Sy``L;$sx6+JoJ#qjX>A<@dw-=Jw6?P9!o}%?!S4RXpO{oJE5Vvh~6> zMB5HCAoKm~-jseQn#97zPQb%8Kt-XC%aAE zjn*zS>a8@22=dD{c+D}i0)wXWojby~*=~ykKXb#6IQ(~dU&Z5uDa5*kMoA$2&iD?X zQ5Z76EU+|6CklRlH8JzIylp7^wkK}GTHszZd(a|!B*l~E5U{DDy0T5R?}r4VD(Y9j z(nF^E2tIUg+ug8hGinv%Mz;Sif(<-$A%XIUqkb#lrqe4smrT09iQvVEKyUV_qFx@p zDwA+axJKmp3+7?`Unh^8Qv z{^`9ZjOs3d9M%xCuX-}OZBY~gq4AN**1H~F^q9Yzw^*(o@U>z@vDLz^uat@8&GWht zN=u$I3{5RA;SfJ%KCUM)sYbBi`)2-=8X2#q=jL{@=)kDRmr7g`@;(l@%v z;~Hoyn+rY9p}m-ZRm4SwYhaD@md=9tvv0E+x_N2mee$3uiH|-JYVSdA!5Ym)Ht>uB zz8UvQFL%<=&9CLQR1UeC|i5h^AT;)eW z8p$_sNYuMQ1qqF{M1`OHlo=>&J_s;{tKR`5kJo@O7mdAS)LUMbKiSju; z7e_|pm`~w?#NI#ERmdo)(nNLOR+QYdUrL>JZpif6;Y-mHNn+Iu+I}2_zr{z@ZQMl_ z@1@+kEO6?kNT;M7aGHyJm-;|ZnxS9njc$57tISdl)eo`sKeii0lFvmB6*81*M_g0a zUihk14uAUCb;I#QOrQ%v(2zq)hRc~p-Ov5THZ<)gZdi|v3b(cwOvLS69Euia#xO5joqwHU1#fjJB!zmQ%f_=yxI7cU(t$Kkvqcsm$?AReUnr> zod@B#PvnA+svaII{BlT{=E-x4`o7Rp5FCbY8S_G$WH<9|C|+-F znu7gTxwi@J^8H;@gJ~Hr$;l=uV#v+N<7m0gcLmv(xsCN5jwn+arMLVn$EPTM(7&b2 zj&4);^xmq!gjf=|J%uN|<4R~*`yQEq)Cm(1|9P)p$sn!0%f091wfmR-#+K&7*~gZK z@f6R^*iBwMiT3%AftaOa@wicjDW- z1XQAB)^vCt1O($08x~7-vMw z9C702bv$%}S7n+GlgDc>lq0Hd)o(%7yV#cUE?EM-S`U zyTrxjqOfBjue-ck-@aZG4(9xij?3J1Oz{SHX#8EJT0`$eP-WlSYXD!Ar(bNLFxzaQ zs6DUe|3mcE0FJxOeOzwvb6gtp$FuqAM`q%PjAJtC-|2f2gO6`pJO_%JLMm74vl677 zcXf>0J}9QbDS|?JkoFfV_rZk0U%~S7pH$^%v6O8!R2$p7EEl6$ni=P!ycxXLzon$5 z*}3wW?Y9j43it20b|vGLAc7>4qnD! znhzeQK3pg+1eF364!tZ}_T#QU&y`t&kTP4Ymp$`N(JEMdRO zE{{4PX$amZplS+QZn{Cq)Yw8$N>R?>n>B`cb`8^63p$2}>(@SFSB#b0Rqb6cPxF&0 zSFh{SLg0*RSmfTg=i=lHXEvhaK(M;8{8prFefriVdl3jz4xB|rjZM!CwFde{k7oIJWMZpN^%fL`RIulP98Up)bNPp znOZZ@*j$mDdyirB-RqH^9W{mbp9zld^Rv&-SYQV#zD;CzVU+#xG;xhH$4zR2ljBrV zZiU6a4St+zVD?wNocw|44uq2}>FI4mJSR3k`coc$qCE%M73-T7&WP$)Shn1c35gOD zGzF=Zg~|u{vI^sE@|T8lTJA;LK3k_L{GpkWcJI;V3a7u%%A_Q_x27FF0`eD|m4G)w zF}eO}9?qYPu{U&3D}fTmt~v_}dTl~`r%v~upu_+UPv}5q35yI-2-r0bA<{gX;2L z#*;_9$H%{B#~6U(^r(}&ae4=UzHBr&d0;Dm2{FZ(cF18}eBu)NSVGhnRCdka!1s&;q!%8kPCCYX)x#1m-(| z97*?)k`thR@H~=SdX}djQH&e|{i*o-tjE;{mr}>J%|IH^UqpvZJ?lLl zonLO|UI)Z^x{7cAc(DuI2LJy3yWF0a(m%;n;r$B1n-yxmLh--&?Cmo1u1if*cc>fi zL6HoqB$*<$k>LBY+gk4c6lbViP#GF{ip9ZSRo;5QT}=Pr&h9G+)pWhndFD5%sW;DR=~(k6$G^y zFZPmf3J9fp@vFU`h(f26eOgGs|1NIAhY)T(KB^W*sbLt8xbM|Tp9`7HsNP3OYs zQt%gMtA2rmC%wq;Pf|H3a@xD3ZIr6!j1Coe*oGUu4c>C@GvW&*CG_#vo00;2=8Ysy zf#9@$gct3XH)1t?^~WSkt8^+64*#}QzaJx|FgEZN>Wy`YRz7L~P@xJi+l zMUi%K{LrFUGq8rxeX*t?3;Ir$Fdx=-D9P(@>i(3^thH;or*o1q71`Y+pzi1F7W1yq z(ny0fZn%j9s>Q$N8Da#ti?fLN+0D<%-O}1d(Q{V7a?!Dwo#4Ogz7)tj7WzIAe_kT+ zL%q=ZhuSr(j&jx?+Y@p9+1cp`OrAV*k|_{6s-Ifg5>nWSaEM{Q5fI=UQSIoc>zv8G zBamEI5&Kf5TwWy+%1%GS)kBqXj6OF%#X z^=_H)|L&U5-gR(y0^gDdl8~73)(=3LJbIa4d-juKw);;nHT+qN*JHmc6KmaMQ*|Z=cijxOBo-v6<*1UKdENRYg>PF z={||#IT!sKMxUkLGlzhYYY^|ubxN;IwTMlWk*sXU=*094Tx3z~R;m&Jo6DmeK)od5 zQB(bWTV{TgAlEI7lgto;agaC=gb)S{R5Tx4m@hD3yujPnhti(ODamv03^HQ1bbsR5 z@9ylj-LJAkr4h%LpiQ*YBfr@;Dh&F2@R~7#KuGA#%4*{MvH1I($H$kjJRBFjjkRsXyu6JBXxpsh%+l z95y;8-sEDgmLbip6$IY}H6zDW1^m|-*-teOwf+g^f!J;Qq3|Kz;YhOhL_Piv*bvY8 zn@xMKaYf2?Qued=HDIE*l#hoTKpF*U;*XVC-`s;9CnTqQ?oJCQ2~z>CjO?>_4ksMB<>4%Hs^M=N8)h_pb=>#Z;ft6*97pY%+da=Hox*<~bTkFpc z&54B^pRFxT_=9cfQ?OaKkUF#GPxC1n>Y1)Tf0)eERjhP133&IqhPG%z;E*HsS`Kfa zp6mlrszR+hAxl|_0msAG&hL1SWh3lm_J8Rb4>&9NXD7@4T|GZ#is=P@44-IWim{l4;~kSe&5tEkyuE2WN9Mvob+SXvpzfp^Sp!Vpqkwm-SSDhT*v&u z_|YE>ICxI7=8ovZyItMdc_Ny^uk1v4IciV5zgT|%0QvI2#Ow1bEAl4~L!%I+eXhk1 z`6wJJuje#6^>LR$WX6ZBD3a76(zdS0K~NB^q}ub{ef9#=fD^@wer>Ax8Q8URAW-_* zOP#&hM0@XzrG2NeEu>lzG^NW@M;~SZR1stZBEhhxKO9>) z$q0mdD*6uO8<}%+*Z{MJ0DNn7?QVI;5e1hz>eLiB9YsOSk3b+M+lU6rX{$Myfm+FL-ywI*$&q|wHq3GUWF za1tamE3NpP10C&3*;2u^Sa+|4<2X3aNi=Ks!r?`7@P ztDzg{y?4EDRXz38Q-W5fizgKUu!X3*(#4H5{SSdr9*0jIfSzG0KGwPYVPED!HV8=FoKBTDcdDMBJG)<11k9O$_=SG#i6to#00CL4SkPJK z-neJ%}G&!(izEzX`lRL$#6@d*S@B3c(X#tfl+*Z(gvr+$mPK?QJxCipcS_9n9F5yZ^ znCCu40;;!g%#Lb-j#mHH75|leXVl*pcK^tkU;H}1d7@a~q^Y4{LML__GCHA5+D7q- z&c6_7K)AVV=%X&75sm7~b~(FfJq{fS7`7al{Vh`SSzf2(Z2C2K3*+%*fZ=tag*h5@ zHY^~a>*J;`(SAot_$f|X4bQO9^?C5CY(|`b!ol&03yiwWLW#C-v;ev5j3glYpwA~L zKec7I?_i0^<3=~nwaM^uhtbH{HY%mC207I*yUl z(|eogxf=OL&+cTA7MGAvgQKX;10XCn1TT9$b8u;beysHdVUJs}{o>6Gwq`W=nVwcX z_LKEYNU%W{GA`rG7pBMv7ob4jGJCj=8bxHlA3xeLun%_ts!|;E2Vb&M*O<|wxJUvT z(G;=Z=Q?{hu%54~+#jB3Volh^riu*426jWToK~}iSt8}JVK13_LOtjYnrXNXB$LSd zPTe0I7zcQi3wuP6aGf*ZLNwUW&aK6`;(6Ijd38hPCf|$Krv#z(5Dq?C*Fq5|5h##$ zd;3^WXIU4IA_V*hexB3bwVsB`aG1w{S6ImZ4nz>bq@=Qg1Vub3QruO9DWJ?EtHHnP zEcHdspMPq#a8U$Mp@+;$GzQNcroo7CY2^ee2?}>0)0}H!RP!$U_mL68u7VWc)XxJoK!PEk&EEB`~~!b`mkP$ z#ij!S(-iBflgyLt10Q?8zB^q`g3!u4-i5hoo{wb zpA8+Nhavnw>$U%b@{cAuQ|u{LgHd4d>*^d)BObZw{g70F)W zlev`Tw+H)qy|@ZJ!k_EBQF+%M>?nsDz~uynH9Dkq${@mVM#L1BP3F#DUsrZD4Y9q_ zSY=q-IX3Sz0L3k#e!BmU3Q|7{FF9z$I7=)*b*9>)t{g=Be9rOR*#`; ziQLMG`#U87!h=xG9f;;Tuk~)#IlgN@T@i+FZ1|`-D;BXZULb-?d#}-pzdba^{(fa)G;yDPxz1Lii>R?aB#S^@eGfAuKV)k%e$e=y4c-W zjpnZ665IKu{@z$B+xJgmetOJZKHc)K=3APWDt}=`SE^^RasN$kc7;xLHU9#6YQTXM z>Uj7SST(Z%YsUkPEvTG_(`}#b1n=a0j;^(qu+4}{!oPVaRouR9ZEOMjtFFG(uF!{P zZAn#Ne1c0tVuTn1X_}MUW{Q_&J~gwaX*D2m(RpOf7YoI1W*_J<%A-Umd}Y2{*5rJ^ zvW`wIpNs^g{`CbN-IoO2fmyqA)fS(VKGjr6ZZ~#?p+F_#Xas;x&^27qlSwW6@%4e^ zw{IWY%~pg(NisvQ)Z%m0+Xt>x6>{=c*GXVuC{o~{0ij2!3L^u@Eb&-M^r%VCzA~9G z4W}kcrv(<#*FYuf1J4O@yuy)N4|wlo50DvrV~{h)GpAHNl`z(f9STk5YYps-69P;4 z9<_`Nfnijs?OK-lO2wJGj!p_K&L62fAnr)iH3^SzunWV9IKJ9)@xq zM1W0rsEdznSp5T53XF~4F4(-=!p!;2ebT48M0q6jal|;tNH){__l}MGSxI$2w$Hw$ z%wC1&t1cT$SV-j^LOj{*Yaee}rj$wEwt`o}iXMI>R~GpJWhEP;4nLI)+fvN~JErg$ zlQ7V1az#kKenzG%b{Tynhi%Rrs}?IdXapc@7h26}e1f0BETsalO!j8(SSzLHNkI7? zcjb_+jgKe_41>jfwu{1EvTa~qMWgK(y?ya<2mt9g*bs%31A_WvDf**@JMUwDtZ6d; z=EVzJ#h*x6ZBP3Bx?zwOr>~HzjqnTGkAqK^LBj?Srs4Ek)`GG{FDjp(E~3%0 z;%JPnc^%AuKG17lr#*$D3yFspe4M(6%}e4n-TO(tKA1m4z|tVS3F297!+!9PvZ8SI zk`ziB2GsC9s@V>VGgwzYtq|YnEu+z95R635Hy(IOS)AE2%znj33elj`Mk_18h&_X_ zY%>1>(?}5stOv5BFY4s~;G?Cqm+$JAo$M{?Y~!3`m<|j6d=lBYzLe_9U7d=Quurv^ zixaCW`YRy(3zGXMHuo1ocMl9pSdg$;ASC`!TwMLNO%XG6Y=;=l}^I2!}) z7%c#9AT^cE)!N1d8nYHSS3Q3^IcLf$9G(d0!UGQcbn&-(tm;ntta-Z zQ!+F0-G8Zuou45WU$EHGDZbjc#wP%)2(1VK(&%Rszs| zJ%^t785p8)1`MMMY8U7N!(N;nv%G(nAvED})0FFjD8yzLNvGcxmMVEM%pvzeHTIsG zFh5(1QQr&wDj&Qa8pCjbc-8LuL+N+Z6G*^srwcbjwWw`6Ue(tWbGyYj?FhUHpF+Jg zfj}4zoJ^Xi-p)GV`6R{#jJ<*$Xv<`kl&^uD38!c(I-C&D2agu>_thW1KTk$0inKp^ zokF}0hHZH6a)iIs4qXc+^BB}w>1BZQj`}~JHKGGZVY|(OBD4UZXkx}c%EC*g3e3d^H!zIhj;^1~5ZfA7hQy=b6nf56U4`PFoVyh55% zgT>)OZ$0IBT{t4Zd9RM?K3o$SPkNs<{m}a)r09j=C1EefNbS0FQ*VAd-D9{8KHa%G zC7Bm_Na#t=sF?@QqRP-{>9QT<{@~uq(&qwqI`YS!#@ZjSusV2Wwt7qv*5sZ%Q+||5 z`X-R)Xa8{3haH30S>m5LMJ&|k-Vgq>&ikLA7+2mIkKJl9PcC?8x~eOgQ#WWbQ)2Ex zMYCyLT^-Sa;UiTv#z;wj|JF?3qk+>%HQ&zf3~5kNQPIS=h4m4V-Gt|Txp>^@UHR?+ zAj7XIZ_o)w17oAK+;;>ui+l`azfbZaRjH=sh31R_s0xLj0NN|iVlFtY+$)9MucTaQ z7r~d|yY#+Y3k89R+`nQzpbrgkxd-uNWk76>-cwp0kk$X4$1q4|n=oC2F~ITkrRd1( zPIc^Yu|{Sy#m>B&JkPvOPWQT)c~!doxDVrV7kM~q&V-U?1$lqQwaZ`_^r|o4d!htm zda;}n-=Te9ZF$DQk@1bCwjdg<0?f=+Gs8~6f&g%27o2)^G$4i7+coR%2&;1C~&)NC?~c#m``Cd~J#E3>vXi5H%27p?<^ahnnZJ?G`snSwYyO+E^)3N z^AZbKFi(?WkIop6`6Ehds=oY#jTCJI4|c^Wg|eXjVE;jsI5dOcmCyQsU7 zW-hj!M=Kd=jZg8mA_k1_6Q0j{IK#cl1Q6lC0$b1Asm;SrjQ3#v{b#Wo*fa>Ofmlyg zFYbfbT`c=I4jDN~K+Qhkc?*%;yVlmlXQetOfNFQG*u^@s-g=@S!X}%N3Dgyeg7ml> z>{^)N5=G-N?+g%L;mWG2wA@3_Mdgguv91q4Zj@(zI8BOmgS%U;Vvr3oW+3)eYW96x z-5JsI8k>hBMYiiT+>4x$+g8z^t0~s_?R^d3ZdkJF)2^cwymWhT03WUc$NTv@=SzTL zNK63ce+xzmHQGtf4q)ezmQCw?H^*R2=UoX2k|AYAd`UtBAVf+hO#;?at!fiZ%n1ok zd`G{^E>geB1?<NYP}&YQzKs6 zGJ2r8h+GTawNeF%2Mhz&j*S2&vM&7 z#=l|)!@eqHIx9YpUl-&LwR-YKIpQts@j`*|^(F!d=_g*DC*O@!YmC5Dia@&Dih6OQ zu4S90no^S$*qz8NM?R^=H+ue-pHhi$OX!u26HQPszpx;`2*)6wB#`Mfjw{OXq7Rs$p`0SYlyv9!ru>#(hFkYzl2H`%4lH~8c! zp$FT(I(I_0^Ly$e*d#|Mo}@1AH1r zT%ApnBq)Qk(RocirT*7h+wY-8?<;#-8=I*1f3Z0=<-nXdscB00Ov7S9+drtz>b2Rh>)Kq?czCK~+#eB1aY=06}7oMUI zHLFqHDcUvO0GPUvia%z0Nsa}JcVQz_?*gNV(d01~+_xx% zig+uQMe4ZLug3ei=M1qCnAc<` zIR=S}7ucVptKxI%58-SIbfjQdUA(fv`2FERrziPkONQXB4`T%}L$%h8!}2mtKiOM2 zGRWkLSQP}}=_`KOqDTw2N){QA&C8<@if?nz5#a-dFxr_Z!G|L3#UN7>Wc6FU*-FpB z*or;3QS_NZw!6A|K51=Arz&3eJ58+T@EaQ&2Uq(-^6uJ_#%VU??pHt1M34C()$=-C zp4+OcD<6NI#B&zMztny)6){z0S=-jbwL2tjqfXMz!+dR=gZ3Tl4*sA8Qahg>|7Xl1 zdl$1*J)dO#%S8Uq7JCjT^}q?Wzu(q;HoaGsi{`V6mHJt z2w-#aJeXne;58|b$R`HHN#yu{P0p;h(fY{urjHDV?O=UvnuIqjxE)DOXPd5EY_IHG z*R=6_s8LPIl*2r-Na4YtLyDwuLUs|8th`jtNt&#pGA}nrP6EpdzPirN zoBcY8m3w_!YlTm^bp~@TzF6kQQ3l8)<9LuNGf#4IGx@e`Sd*Q}wUUe)k6GWShZXf4rV7i*XN@*|&d1mIX_lyln)otReu54G#mJ3D zB1T0{NCIrA=x--Qi3-ZA@cL|bTjyp~(g4OW*vFn1-^*_BQ4b-8ayw#*jQbH3?~roc zmP{_Iv=*(8;rft9`4j6ZzoY;6+D!DekNQ9yaNh~D;)bG9|2{AZFr04&XGe(0%0Mlmr<|3h)FZ*abGHBeVP3{neppoZ=tROK+<4R7$Jo7n6 zo{qMeV*C23R}v%Z1|@owd7g4!ZQ-+Xex*?&k*O@?jYv+GA~`ojUg_g)Q14Gs$6{M~{b`#ls2H(ObbeqG9ghe@pmVX7_A%9dvu#qS(j^ksJGg&7P0Xl7cLg+Y=92NH~ z!XQr-t=bjgWu3^FNNg6OeELTZ?gbfu(6Y!&&zPG|i9**B#4MVUzdg&aecfbu;oZIF zj9XW`hpp%Zl^qc~KL0^{trM1t-o9CqkpIAY9`K&-C zL@E#&Kmhw8nbPZeGyt6gX7?S&TsvR&4QzAD(MPwpjBtCb#_){QGWrD}Z=og>sO&-P zFcM1Pl}yODE|$2;uEuU`RR_n88+`@^5{HV~6H$(`+T2C37_6K}Up@ICKZ65{%YPw( z<3QbMCh!;gyrD%p{eh>8W{gIj@JpAhp|lJ$Yenbe!8~I4G64r%-b8aSHnT}CUlR+{ z5TvH015)b^gdXs6^{WxgPG>R7Zf)>l!Cc4Ol`tcP`wm)T9wMxm&~ooneYU&k&5X}^ zOCR)*3Zl-Yt|9n7q|2O^wFiwwo=+?vOImkqSZ@x5u+H2bQBsy`Q=tf6r@>Nu&%4Nu zXCd{40&vlN6e5#?wbSD#ET^=|DnFJC2AwG)1-Z$JRE32KWHrQdpA%<2mie8JSB>41 z34w$Uv;%1r%J38n`&j3j}>_H8Lhnqe9+dtN zN@_~ZlD5E)(~~OS|BA$mt*3Grn-kuYBRsLUb{>vX_JbP{PNlh;Q>hg*mGJYE{8ath zOHz+*d^Dl(O*kUb42EdfRkp+J^G&j4itDTEsgxw_4t`WFE%4an z%Y(aX3PEwoC(hNw+$qUgQMK9p3U+RRdhGr0#AvXoBE|vWX!Cb%21fHYZD|gUCyYd7 zq_l{~coUvZmk!ZLHDXf366#;hReY*wra|O$8ki=9_?jer(NGp|7lODxVzTW4QIa4R z0VIPmizuSc=h^LDee{n57xV7s%kUJ%{GD73cu%ktkd7IvKckV64}_34I4s!2N!&C- zJQnq8``@NtaBbcJJu-@c5MS5on%%~=I1m1j&QKgKc6MvF^t?}U#-mtwJat7B676qf z419&vi!4=6rar8!zc;O*b05BtKo6K%omPEHg+9{Ntcb8Ls^!1zH1{ZY`tV_$H#^kg zk^EK=`~AaI`s@Ts0!79?OR1#(C%JLy!UXqnatjz%A)dqx-+7-#3~;44?tE}(9Dd1M z50}Fp(avj7kMB1I5y;Y7;VoyAl7o{T=+O(lp^uDM9TDVAGD|%eM=_isRA7sROX>|C zAEpVK86+zw9AdWX(~-=i>wL3g_jtwo@g>}$c&OLChSjpk|F8k?lZ7;n1p~G$6@&GC zX~x+=f6GDQPzKgg^JpoyJ@V2~U09RYMDqec_L3CShfC>3LW zAec16r~utJ&T)gFdL{Pd@wyxw!&1W5=u(=a;tge;qjqy}Yhu|bb#V_V`C z3pd4(yN}Jbu2vPTiE$Oj56LGgXn$y0T1{XTGvdCMXg3geMG%fgYxo>Kva*^FdGisC zCJL$|EOg3r6-jJ}(OU?4&YS$Z({>8^^zFi{HvF|9MJ->o^;3!;ueajQEMrHi!RXfU zdT*-YP#$E*ju3ndhr^U?kVBzLfxD-bDPmDN8 zd;7AMqrnHK-4M0u&pd5*7Ww(X&p+A_-y`|~L9oFOGGSiTG_GIxfP|dHC40sij%7+}`T-%bk@RDe8#d z+YGXkjCDcC>$Fjg;K*ZmRsRs&g&q2yVHY04DecTG@QCQj+nPR!mr)EL+9-p^rP)^0 z7j^NTk=SS^OeTi^`rMa@1BR0Et5SaRN(@T5)C|`qbYTluZ+Ere$I1t@LNuBWm?LlGvty$;%L#t4w7p-UH?>#25Qrr;ZkC{j107mn$fUY{-xZLSdl5j?QaFFKrDJwa@#e|;4b?{Mzj zVVm3YdT@o6bFsGt@TQKQTp|nuV0oAWa@)huXUDO^b36y)%;xEwGrbn*$u3%&C>_0R?&@FyF50k4zG&NGJud8g-&X zs%Z!K0ABR6_a@1oO-Y@6CC@|E(<*ncTf(z-uJw=M+flt=wx zjs+Oby5Yw;9^ZBzdnK#Wz$@n8LrU>EMQc{fj!XiQjc3q0%Q| z!0n`p3FrEcGj|ml4u^;CaLuC1x2NRf&~L_(;qakPGJf_oYl2I9# zS}Nl;z|*73D6oqm*nwtcB5Iz0Ow8J(B9?Qoa8-DA=Foa%GABGzIL3<)8$i~4OZD2rnc?U zMP<}eFm%a^S}J*8mX2H9Ed@~5bRK8ln%9x>S>LN&JK47PF$SInt+Bh((Xk%eFMMEv z%2zRGD7TkEkmu@qCbn$*%-tq%lN`4v;DjyF)`%a9lJheEB-0uT69UcN)ZU8rB>!8` z_-9l}jtfK&7{S#)$shhPIPJ&#xY=JL5{{0|hhbm8e#O&UC{_TdP*f$9{CI1&{iLT;P0Tb(Y)Pg%xeV*0&dXdD zm<7+BfLbLgly+GdM%xpw4L`S%RuPvC_w6U<&MXl77^DdGHvuW=S_Hp&#e-rzkcPO8 z^{b-CpRtwgbWMsNr1G12lq?icxbM{^CIs0!<54G61amV<^iuaaf9pF@N2wGMW~4@q z*X>lljKDlxCdEn1?adE)NHw>07!4{kPid8r3IRn`@N&L*ZmK-ST~NY{$BxIzCZJxO z2HIjaINj2#SfOSh5<%q846u=iVppk$nB;gaUvi9XmlT-^*#Dfy6=J<+CM)3Q+YMtJ zeF&4lJBj_Q{Av|wF2`o{(PSg$(l`3K{@|1=l!rEsaUw%;#^K9(-$`%LM|XSnvidTX zECbnuJdjz$CI#k2qRSIpa~vUYXE?THQY{;^2nDL~z*x*@clMB@`^a43EL~wi&o4>} zEas9LFS*vM*F%KUf1(w_p+)ABVJ5jM>I@G~DRfh$;#Y*ND8F16dAS2&3YI#BRFhW_ zxIBmScqH)=OTGDvD*GVGjl?Woc~3EQ&i905{xnr_>>L{Y@7?O^O1~{^lCNDL%b@SN zg{4CEuq$*u{MNh-`y&;YSL^fxy?b*5bk(qX-H3MJMxYk~|!(ar26 zN3;3n)AOKvxD6z=jZSc{iWj?2IvKpQVd0EvfAY&gcM(Xtss{3JMj&A!Km@|jQLcpf7DdYgsAQR*&AZ06TzpTjxA>flD3|U7 zP;GKq5P&thACEnm14A>63$wh7)8|DazLszhO&6m1-V0}p&d%-9j^)RCL_*Gy0FzP7 zr9DzIb4^b_7{Ty@ETq+T$wssByun0Xv)^B*&kT~5<(3VsHCJoR!D?6h?VL(TCw`Fy zX1~vkc;CU&Y8UJ)DPoL`hRGVE{>(%Pk0l5I>Rdo_U~ZO|HRz%Wb@XH(s}a3#q>hw|&m5$;PWt+=v6fT` zvlib|rXi3RMOVF1u_q*;piW(KxonOg`azW=Qx7Nu6(W{8*eR&B%wWuKd4vNoW-H4J zl;THwoe?dIMZG1MLq7y2n=YN}bVm}6d>weQTwBzLO-$5{am=3m9!<8@E53|v6Hb+~ z#T=&@@st`<)cxBmeg;opPj`@81Bs-A%O*xul(jgpB%~9rY_N2wpX?Y_yCf3IGE^Vo ze0U!*YiJ+d{T7Z*-RXXq@kW6{uj<}_CgH^NcWTb06$45F*(@h#j-(KD4DP3xzrRO% z?sQ>5v-wUUK{yr=HOnZMDQ=egN-AFQ=4`WGYf1e0iEanf`4q9%*mk}##xD39kFbU8 z3XL?K%dD7@${9!xZ>2%!BV1uEX4eXVIy}|-4vvUN!mE2-pSI%^pO6&5yeijhBa9>* z#l~K1=AP-vC4@&B*!hS#uyYME zkPTT#=6(_7xN)Y5bLw38xEY}7inF#Wc4-yBtKP>x9t2uH?L^O)gU_5}fERJa?+v1o z+~eR&%R>JAO+s8?Hfh``KEWyMyrhV6jfq@H^X`n?Q-l{dA?MfV?@7ixf#CeisA@kC zS(`k>ptNz-Xy}-O#Bh{cdR|KKW3%`WWLk3IIu7exw;N%=HB(iK93i|>d-5TJbr13> z8v`$(1SHqUyT1RVBUl<0sjIVZ89|!PP|v#y#ofnycRrq4V} z(bLwoig}4?K|aU?-&mIB>y5-D6wBZRv63*e9RR=&fDx!RRxU=OQwW-9e^S0Y2m$?C zC#o$$dzElcOEOgXla-N&d2q7>?UJ3V<|TphiTC^D!R9=%_NROsMAS-mZGpv{EWKQ5&_|$(y|PQ_&cS;mUZ(a{tKkNR>5q zsD*ff!e%GO{R@@XBppvpY7tHVe&=0u^*G#(?MJ_UVHR0F zWda4uw39IjuPo$0TTVV(Soed6Y_5Aax`h0Dq)dt?VspAxdY^OR&n;;U=X!(@&NEb|qHXB6!CMOV5 zOE=P?wr!kK2&4m;DO?Nkc zxI0Y9#maBTkaj{*?MFZqM5y&LQBWEji^_Jf0Up9mb12j zu4n31o@oWKee+C&J2Bp$@k~@H*>;T5Yc4mS;vMdf4lP|EmuvF@(e{yy3_+eYa{JEz zvMDTGQ{+>KS@oliJGls*^D*G{?}$po+x}7Hw9gv#sIj1bPV@}_9;}=O#w{lhka6s8 z@NHn~9`uLgsmqkEPr8VB^Hij}Zn4;2pDjB&L&dH?HbEbjmy_L*ztPXVt&Ee5e;VKD zox0Mo-#gNOOG*FRx3U;_9KkUH8#xKE-CX^UL*osGezRxQebREr?CJTD;FqpIo@yc! zi1e5c7`MOa__ezR2)nQxb@#lIpzxW_NTuWe(YJ$-Tj_~Dy@IT;%XyB32D=~^=PXdZ z_;?`aL=H1o(Hs}9{Z!Wpme_Y%p?aRkUB3&AuO_*$Sj4~)-+aUGMw6z{Jx!E%I-^mF zmu0&(w;zWU<)$SRMh`r`f9EvJeGP>+9#g9>Zw5uRxK1xz4)30rqo|SRV2=9t$9J#A zu6dqUYq=@#AIU-LgF9sme0;oG{d0I%6d{Dcvmytn;=s_U-+rH(UfFLPqIf>n$^Nuv z3Z+dsibs+hOm7Fo#X)5}0f)_bTCtQ>`Q82bt7Y%{aN-E0(k#Jhs@5e4XK}y0u^hvG zYCpc-ld~D%mlJxBMzGkT$K)=x4n(gW7{v)1Q4=kdHp>0F;#ICPfIO_+O*s#Zg2+r42%)T3>o z{Oc~Sdp5#@@U~f(VE-4aKcwNtbwRuED}bK zLq}B%M4|+oH7(5us7aXZx81fr5){N=UwOp|*ajAq5CT~v_QaRCb`ssLv6|@_JdE)>2#7Dun>-(-xW1;)sm*D^ zMP3JD9ZS+bempxqF=4+q|Iz()#%iupOO>x&>u*+W{%dImp}+vtfHb|OcYjD40~gr+ zmx|C_Nkywoet!P9+1c3@FR{DdMGW7OiMbv;_)JRO($3wu3`&;>0EDIQ+Qa_0@b3To zhJR*3O-zy+o8?V=)#1QATtT~OV-*z@rG+j05C z4}08y78UB5nwpvmJ}aBu$Fh4+#vk0$JN4FaXaZ0A10csQZ7|Kwi%J=o_T;1B*?Mi6dD2`I6xw!4mqSvB1Ld|q$bgALtd z3pEaLKlS(510=YfEsYuv3=EK>=EzzcfYsMcKqJ zaaYgdaAP>Ea!JPz5laCXA}qVlKFrc z+N(#*ob?b3rc3PU%}`MIH}{TzU0RGWfEQ5MP9R#$DkxyR+BUkiX;^Siw_m7!6_Q=1 z>KMS%&Rm4Syf-;HspG2biit}>L6Mx1#2HzL`X4rFu@13W z-~l%uoO-{nk?{f`To;|Q&nEAXq(D&wieMv$4S@MnR~40y0F^74(()Em!Jhm7-hO$_ z?<|Aj@^{H`Zf z_e^PLLRiuF|0Mqpi~cW%`rlp(zH=DH35#mm*7o*wfNR=qX!~9E^x3n$LT0yF zrc4x;y4hL1l8TD`r|R#S6Ol+WK0ZDsn$MLYZQb3nO+cAxx3p%sdVby@64q6)8D=G8 zAHpZfyeEjOUJrvXxD`{r-9vD*!G%|Wz>aj(eTUk&*jI|lV+dlv^ z@PIM8Re*x$eD$cr*w|RYgm9fXz@8RDqfP!M$lb(AUuxT(QKBk)w_9LDp#)f(UaD%E zmgckc=v#UxB0YioN@7Cu&jQ`w_Y%fNccDQAct5)|W7+1s6_^JD zBAoVwo(FAzZU>S)fOQ;$6x0CE^6%H}A3sS*dn)ml7Qlad50)$paNR^pa$SW8ppoI3 zo}Nub%jw$%w+WScuL;XYa982gL;nG6`ZeRrgrFm?tbpSh8|YX2@C( z(3{RzUjsdGOMs~`RV3IX{MY~XUq0#oe1Ci89ivglP0L&;v054^Y;qa3&fm4Y0<#~j zu2^_wPiKl=F^+nG;OWK1wNit5=2z_0-xR_9FJvV7lw}o3nu1 zsoq!`+p4OnV7)euzZ+To7vDgBbGO)U#i_>yjDPlDo^0;|a+TG0x>x|}6ILr$Y=9Q_ zRubrMY{Gw=o_;l-1mwk^2M4QDQ&YwF-{WCsw*xa8cAiLFCa7eKFZ4!Jybr1B*av-O z`@i3J{$-_iCErEQbFXCPoC;niB`42T=PLv5-m#8lzqMpwM`n(tSS$#OH=+1bK`&e5 zNxrO{oUV&YwMAZ`zMGqyk^7jSjBMbRXdkUl=?m`}3qfE|+~=Von%v|MdjC74=D)^e zcQMzBEU?`;{Vc<>wguKHmwx@1EU)hrO=np?mz?af3ctSM5>0QSEjLIl7YRne01Nah zjw2>O&q_)n8F2Dr6ilMZso)>q#j(n^$06gc1tw}Y`cn;(7CHQX{9ONFT|-y(08+mSqE+4P`ZcR!}CI3A5rMkPjyVb7TJ}ebw zB9T=Mn409f0TR7F=&}dq&-aG>zY8t?^#cBYOM=v_7`>?#-c?tR0~6_9_$=RG?f^u$ z!d=tuh$yURdQg&GYEYNOJM0M$juMWt*4EaMDY&mevxS9)u7Sb4@Bv4ejG{{67|{~7TvGS zS5Bdn!vxgp*wIoMzv4O4oHfp_ZgwWCL6Fa2_$wc;X>ClB*s9|r=D8WcV zFEyvb0E1LkR(567FghI*Opv0d1bypPMDA#1Unc!QuAix~5@mS6uUB}d+4^4(KpkVc z(HUt!-8fcHc3;=fpGU73Ev%;~P&v-+AZ;}hd|J;(*{Ec3O#xP42WG9bnQ>?*V0`iZNZ1ov zFJ}KOUiXWnY7`fT^pwqtF>qT#$nldgc(tVi1~E>Z7}TQ{%rwg>WhVz*uw_nWr9 z>+b|VEdgm50NNL)Llps!4$dYD{V&}f1hLgYfiXfAb!Ot131Tx(ul_VMTolB?h~%~J zT0p@?HbKC&!|x3hzjBkFs5kC=7e-+FeY)-Qwb&ZlfE(oK<=&nX@urpPZK?f2{a5?d zfN!Qg4y}jYSN*rN3k&6l?TgUVTi+Wa_od$iv#TD9;Q6oz1Q|jj!;iP!MAvFZ&|3Rm zHNF2sHVTMIi9y>qIJ=7yi&p#1HF=+{Qcw@B8e}#{8kD&|jvoCw^0uc#PsOD3?l$z_ zccbv0_)xJ$mP6H~@DO}#$yWIOTNT^zMFu~)|;hWCQ1^Z3Evvt0<2tXqL zdN9lTGOc~#;^O#i_U~@_mCu})ny24u+>ByJ>;WY|eqaK0K)B#pfFVvepPQfI&xQ?l?DuDx~)duN!wgD9eaw#S73@g z%Y)t}Mf9C_Rn`N1%AM~mw>MX+i(bGMZ`T!ur?fz^gRky)m4p&=i^010aI#7NM!HHO zR8vLaN2J_iHBQ>T?oUS8{ErF?J3pPH+C6>sri#XoJt>S|fdA}V9|Ulz>Sr3z#>Xdi zfCRc8sU|*yFS^4^C1ClbwM_lV-IRfUI}pEtCTgyL)g(0!Vb`0g)4CLoTSStY;o;Yc zIf0Rz_g(IOUQQieM(&Wl3k#k*vLkRTK$i37^c|WOR1~n6e%d}aba-yyZ+%{je|9nZ zW$MS`phw$puB$=x&69fFhLN~hZ`y#>+ZmyXOVx(WZ3*XVXSy!)JtH^8HIY>roKFg6 zH{vY|z^GjwzJcw$MZt+*fBu)N2+2_^`nKXJBfp}Nr7(Z4^%0*<1unF;n9EP_ZBqL;;}Tg5t$y~^&ligw5fEzIcUEl>19ogsQ-%k=I$q- zFCVQjhb5Yu3Y%Ws7O7>KF8w}z%1!iPE@n3S9o)4;E@gJMZrgiq5)e*{={4I|F$vnE z$o}nHk!{2-wcxby$o)tWdaMInm*l;}WfL|3%i0>dt!fJsTkp234c9jF=K-6=wCEch zHWK=uLo)V#&wTtYGcSQz5p%5pCm!B*SCXx?$EsTTcY6fjbpVdo%x{60We>Qx>h6TV zvOKTB9-m2ofCcb`FM|Y}!;e&7_$>rRSo=SY<2m-+f+_rvQX;U(dIzRS@+>YX0ThWp zso5{{0dYk}w@lg-i*sT?wx`r@XF6^MsOkqgstQk!3yg>F4}948^9KHpM?w>{Al3@J zy!NgMvCY?3a|EroFV1|ju|)kI{gGZV2BEY`o!BllayX6K1AV`3U(ai@_fv;mzf%;> zxaGZdoBkhWG3B9qP46{d2oMwZh1)Pv&noF?XR7Ev#-oGj@6>Lq%LI=fXPi zPRBpvD<0W`Q+23J>Oj6z92PXn{ z-kuE*gg#-jv9RQyeynLT4?S;QBWuN^c0M-|^g3eJZ*XX7X*tit-?vRd?A@-3o!-uf z;+(U^3P!idNrQ+oa#<8|xG1BV?!GV^Fdpn3tn76#=s010BYnx9wA#Z3Cd_bL41M9g zfBKN@;{iOttq+TXbtrA$YpZ%FuC3|_f|>0x1^lnv>f4n zzO#6||FvYi?pZcw6A$WUQ|QFDMw75ak}BlQf9%KzK#YbvzI%0?xXzM5mAqJOj;qL*WLwPYScib6!CpoyAc?PK*0)>)F#qtazMo+ ze<&;P|8VvdP+7iTwo(G3q=0mTv>?q#NJ@8iiG*}F3R2P~-QC?GA>G~0M|bCaKXm5K zf9`MY+_`JHSS%JQJnwnW*?XUT&I4kU49B+qiGFnCc&St{rHu9MNFdBe0!u#Tutnvj z-WJSIg~@$;;>rVmaM>~Pjg8(g&}8k-5sZvUD<`5CbqNNILgSq4r`A|5$=47znOr6} zi8mpxb;%CkPDEGIqiL&ZWzAyOG~By7j5sc3Rb(NJ!#(}1R>V+E#>925mWOAR?7Du5 zEY9mg{=neMc8mtu@=6sYT+r(&x?I-1e%xQis?)Ujidppw>GFh;ha?w!vwBm|U)njz zUO^K-gC<^n1N}V(BaYc9YJP#)$<^%nbb}>d)#0>oPhS!(Rgq4ExL6;W;1mZ>4GLBc zG9FXlWYN-Fb~1&bj1M2EOzwY6>7V-lP%P0VD61=v9sN#b3Wb7X3Vk_qurqK^r`7yb zqd;PtLY!k1Sjdl@$7vF!tU6= zgqL68*zK$%c6`OK*VPlPz$#)*5KrWE{((*@nOiI}RkDln4{UZYtO1vo?DL!);$i}hdN_p!|^B%&>`YW}+kc)CSu1VdC2DXu z6!t?VlG~xjDmhFSenD#z!f|uYKgNmvbo!aF#LL#BNML^x9gG(n?S3dgS0rJpQMU3_ ziVhk-j)vrJ}&?uj@XUOSQ$V*d4!m6KC#pGK7 zWD8Ne?l)0|_VxMXVNdPkzAY{m5{TsBykwVnXAT_8F#3)iqqSsYCLsJ z=jyW}s$U0~tmVc@aKE>l6FTm^9Q^Jr3{uLUl6R=w6jnL~53m(}QmPlsVW>1d^vGj4 zH0`hPbOsFC(I^CQ%TyRaZ-5f+df9ckvVD`MfyKK-wj{NWjv4moNX=$KG+t7r?sCHrdg|z7bp9FXxO*_S7Pfy}7V(W!q5o?Zw`BZOX0XqJaC& z4UPLx+ey6M9AavI%IM9HLN*RD++IV*0-R}KzfSImpHv4_euRYIiYTu?bjPEr zRGy+UfcAcWLw%1xE*9Yvh(%8`@6;DeTUkgF!`5{{OhR08jYWdyo^VE zjuo5KF*5>^d2aD0z6?*lZ4tb!9QId~ zKGK&4CCo3?onm(g+P#!VtcG4PW$=p3I5`vy_xsDktJb?iXnGuLv4#u)GbOU>)mTz zGUVXY_r^oLYQ4vIhdIZt^#Z^hL%@sauMWLq4D4i654UXLrKB>fHEBmK6Z4*pn~>!w zw>yTW(acTld2l$GmC~v|m+fWL0hP=sx(w8xvsewwW+lRjfIYagGC5ZfpoVljwKDTr zO=<5L68T~Sv1#)&Uy(Ih(D#Mvz}C`98hPh)<@!n{0xP)5CrUI#^YnYXMe{zRt^{+c z5|hI7{QA8~KbNbL;vtXefUX}NS{GKLizYn&t9WzIf&m_DJ__S2ru}r28Jp($8h&A`yL8@%c%n1lM#66^?wf<@O1n*GdKIzLYZfC7GQ1 zL0*2lGKnm{p9bQ({V81xaAR*a(g{A*$Df;S)3btHS6V-1j~zPtX*y;dS`G+phnk@ zOim}SM}l(r8k@a1NUE5;u;253GxE(=P(moHKx$|k)W`nadv4%TNkU#kS09NX4XlgC z=y{uU&ROTt_)l)hd%5lUgo`G>)S*yn)=p1O&X{Lc_}@Q6mlo#NRPO!BM23kbkn957 zT8mXS_q>G-xHtPh=V#z6g;c3EPV|k?goN(vxg(W^Gd2T4boB406h)vR-`xxieM0(U zrTw`=q{yr2!*hB>r>ooJLr=itayUQpK2LHxf-*p>`3^;?P+p6g&&I0dAYD7F4g66O z&rhbn^3mIF`^0GbrJAC8<;RsFV>36^8V|B-WPCczX&Sc(SF?$19s-b^s(72bX!h5q zv?)aYl$3usm6;Bgwd=!3sn}PevuiHrzjg~z1u&B3JY8eBV;%TYo$W^E#r~|UpR^Y1 zLNMN%sJ-db_<9eb!|s~v^Y&7U^A{(_JzstnWm+yYOHqhND~#G0P6TE}F&+-tpo>0X zHSf3Nt;Hsn2Me*Eme(SK<}>AuXy0ypM+V*1MV{v*ieJYo^=*SeyyCO( z(4Cfvt4uD%uF|OnSDY~!E;gvI9(hdWnK_)(=I$%Uy#=|G|-wf`Mbab zf_Ql=9jY!Gdi2OdMousbqO!ja{l0kNC9N7 z=9ouE_8Yr>X&+YvDh}sqZPpyFJESU5x91}sHu33i*73QejEzQ4^7v5pszGGVi&pL@F?_? zi}!-5rlw~1z=V{|dYMRn>%H|-iFQ*2><4*Q5@>xU*+NVG?Nu@wCGr%qmij&PLvH2+Gx}l4jN8}OL0hvJv|HA(YoF^@rJpP+l`@LCl7%< zfM?Ls0=k{O6D)RDA5E%V2FWy2v206qD|J$|DTge&OVKKACa7v+ps!Cy0*!)#cgj^qu4LV9wfHHNgT;!_5CT4F z-xpY!HbxR=n#oR*8gQUH^_3~WdcjjK2bLzttmS#M?YJuuT7JxY*2M60G<%M}lSLl3w zLeb=Lua5ppb&R@Uy!N+kv=Hc6a}8%<-G*o{0)sNqtxek(ZOP{p#R-49=({gpM1suS z)$1XOwB2f0NeY}hhoA2AabAm=s z4~@V6@DUJS0e}2`=s9k6?&dATXm*&7A4Mw zhP7E~z95Z;G*=M^>!{HzuT*cdp=08Myg!G|JsgkZiRhRL0V7II2X4sK&qYBJr$4I^ zoW_gg<{d7JzeF*8b6=$v9Y_J|QTzBdMa(-o!0pB4pE#bl$$Wn6Uojr4X|aFntx5D$ zO{_ja0Kn-)#XCo=UGt^pBxr~I^cDu~D52L5rf=we_~@g)lUfc&Z2-C7D7xIo6=&(B zqpc!{5@4^$qf7YPE(XCfRK4}@WOebLYY1=%K&Py{YERL2Z3m^5q=xF76Y8L!AuypJ zxm@8JeLz9-AZ0${?8HtdclaDYDgb4vlluN9WZO)lA4K?vkZY6S zw>*v~g9VX!;z&uvFic678V0O(G$4@vfx17bQnjZGsH)`-bMr5gi>4Y}VjngK;)jQS z!JOxtoaoPT#79hv6KA9KVL^nOl-Ae( z&p#=sB$v<0BJIi{{B^MMaKUcYNeiuXpo}=nB%CLK1B4; z%D&f-Cdy(E9WRD1m@3Zr@XgPp*AE3xQVNfKbp7~nSIx6GuV<~Je_J6};Mzvyy8BPg zwX%S2WICNNo63xII^CqdzrUoo*}q)E>_TIBM>V>9uG*(uNdABy3Da-m{F37_?}R ztPSgNXhIKqOx*EU4cSP>r{ci?{3k;tZBa;NP0j|U5`ukq38 zjFws+y#^8GuGm)eDS`?k$i*agrwY^Uws`~EU(FSat+)ep%lBXCR$LrA5?%-aw@86X zQTkG6@WD5Dl>TuMGz1|!D34kvibTgNC~uvF3j|I5&Ol*ck23|kib_v_pXF+;4A#)k z9T}`ca#Ve6ht(BcQqrS?R}nu&6RC)(p- zp#xRBZ0~x(L`BzKX0U&%|0ygTqSL29t#B%-WE%T6Bo}2m4fAF}xqNLa%?uZ%_^Vxx zpf5laW650E!nH5tb%i|m?o8$xeX-5c{w5-|J-+>3zEQ6IhQ-v!fvH+5DUJDw&wB|X zZt=iKB`}9er|_DQ53Y0$+vpxkJqQR#3s)r$k*0j2*U+F}Mx1W)}4|oZvuB zw--552{qz^ES5BEgKzXb)?NyB5j$JZ9ekNiO>|HONh&F*Ww zkoID6K*eI}r(ArOX%ZHXvTc7&#Kys_na_W3;5Ypk0uy-R~OqWGFp*@5C^N^jPV_-pRRFdg+r<)Qu_Qx7{Tdy@1dz7s&lw84a(A-7+(8r{pVjG z8UT<^Kod3vqbW1!2wb0TL^ouavhEJ2cs>AR9le&Fzk=*`qwi6+lO`p(a~$)L!!=@l zVPQhe|J##%*6|E_6Kl`aHHYd!ay8gCfdvO6gNqak;%F9of;BI~~TL~!v-)EfaciY-(R_`#NTtw~tf^pCL zaPJbBfl|`RO_uDfQL~?lc4!5I^}R@|$X-Q-YczmkRZV@Ns0f8bCZ^cYa6bs8#`Bfg<&) zbU^p|SmIj(?zhhV_mKPFpXl=Z6<7<2d@~6MopVJ0rog_Z6gU_!7?~{@$)yj>wonAo zIdIvGe^xKw8(YOQ!$PwW0LsVQ0I@fnRT*&ZzY!U$vrl<^t@hN1>B#{R|MfDG%VI3+K z$!R!Gu9!bO3{g2Ukf2J5s+4GB2!vGNmWf7?2bk<;zj6q0!lzhXan5ItS!RJxc5QBS z-d-wDVNF-TN&>lEg;^}tQ%uZ}Cxgwc(O;B!J5Y4%@4IR?Lzo< zaJOhE;aG42`xc<54EH=5ITBaMyEbnU9d>|XJ-|)C{g+*~hF*!x#HRc@8R}+kD zslxNL(g2`kpKVy^cj#ng`h-5=(c_5%6}$Z#dwsbI^~?@m+fPzC4J<@Z6?4U)w=IeNwNSV3-B#)31G_MK>5 zwYGhGE7$h=Y9+(zGnzi(@iOUqT|dH^EK&F5z2?=G6_Nn9tG>ZpEV4ZCcc*pqk3Zj$ zNX0Sx24d1)Y*!J~wHvVf1LO9;fHc02LzhKS;7m7*YZNN8zsA8nwsH;E5H)8owIc{a zLO7LDSFAN-(O315M`Ixt-*T*&433cFEky$R7Xd-Pmr+5Iga)%5lroC1=K+XjI%P&o zP)Eq9JXh*~nT1J8r~%Emw+Xjs!(pe(m?a%8F1?EIl%7#vApm8%%qzzxE09H}HXsD& zwsWRfdtsL4w`J7XwQQ}bn9;@EoC}~XjUr!6avdGD4J^9f(zsT_A^r3AF(tGTfd|Kq&|2OK5I(rgVw5HU@=T9ubyO3~&} zf!mt~#7Lt!Gf+4E=y6}drXn^J`rl0t2?%O;EEuLw9@BzKxr zh7QK-)?$qXF$8gqPAj&3#3Fk<$FQ=@NA8^U7ZlJazDW3VQoq7wWks)hbFnhlQd0Tc z5};S|Xbm$zh<1%t2j1@I8k@9-00rQhcc3&RvDxmL*?xBlw(GGlo~XqD&lQ&?G{T2nQSDQ9w(uJt7Cxrlx;K2539$Q!HeWd-BT2Rs#ECzM) zJI-R{dC&Ie=l(;tzXuW&iv=*VuJ-aU);BitGUnkA^x!O(ID51#bwMN)0 zRwk!zRMVMq3Wpo}#XkSIKj`H@c6EeMA!QU`{b9LdcB6()I&gn2(d+blT6kd!N0!yvv!IiN9m zy!Jsno-9iuI@8(qlj+GBxwmi=%)Nj{wZ9m5Nu&P#-HG0=ZWXzhVmb=noQczb0~>`s&d zG|>HWRYyn_Yh^!VR0UQ8QhPexS3Rbee)uyLNZ84&@|uU9E7uII@z=Y-CN!Zsu`@O| zZ#(;98PdPPozR+SkZl6-xQ}%H6p!`jKs?rK;~<^tgOh+YCnI04Pq%v_bFfS^-oU1_ zLp4*o9iPC#!NZSnA6w~L>W0HCMRf&Xhk|%OS!7@mFMdhlFlHlhF1Ph}9a>B)`m6Z= zXVStKhe$G2(8*#Lza{t_xa|bCUOok#7eHm5Mx-WFcB|#6l*>1pK1d& zZ>eVud9@<5q!18r-*c{S&dp4x(kjXem|_dC4e=(~V>g#f^OZ}A_MR^kiW%Ut*@e|n zzJ>XN<47Sm&J=2>*lS@N=sg-aAfl7YI*frO|F#y4*7(g}K?TYsS-%ARf+jEEJ){jr zitq56(=xu!pH52QZFLcOhDC~Wg@wuX-d7x_57$)~%g2pU1n-djkvfTO5Rdd9GP**< zzSRdv{ZyORr8ZxvPI5WBS!ZiY(SWoCS*X?O8%YHS*bGrw&1O}p-($w{8eo-8M}A8~ zUH#SWr&wxbot2lihJr1W=_QJzO2rIF-knC-e+znc%+gQmwoc=K2**3X!In%H7g4R! z+CKDcK)qVor)qI=+r+XJIZG!5pT|LSHZ@^Q_ysm8&@&E+f9ZHIpOsD_PP_3Jd}gv( zj>@IkunL=FX!{W!oV9Q}=BfrT_d5M;ssof4ebIQE$MsSU=s86*fcxX6jr!a;RTxS| zTPqBVpOpnQ;y*Oq#o|xkles*2-rI)*vM6nE+J-!prAe5ka^iK&SxXyCUp%zJvK?u`%xV4?;3T!o;K3?~G8x5RL{o<}Aw- z#tWuO$S-jQkt~-P@ASI@r7VsY$VpJRPl*Alg0Vbx{yiA)E%2)ideHpjdt#hVfcoFl z9Y+*|uOc|%2fj&yIFY;tAXK5YJ*H@GB&|Kg0i(Jy=%hnT;qzpo^C;+Sacw~AU3M^8 z%I@vuVJCDAZwHdwda26>bMLJ!A*achHMn?TJZmntb1_CU+gBu-al6Ee~95H1@+A{}63S7P0xg0a}~ zt6gP<;zZ6&j>ATsR4b+ou10^ybD(ecR*p1-84G-}x(TG;H_=HPF5USHx52oie?qM_ z4b}p&s|H=bj?7OG{z6=u+D845y_0d1i1wG4GuieM;^81pQG1vm%KXXP zqA01jvjQvllEO^R2*AiO0Om$=Q{5ticZCB10%PV1nMZg-nl?{^0ytzpnnm;klTIL% zh%fx)Y%@EL4&56bS-jZG#sk2hQ7k6t)YpK)4N{!zK5`u(P8VAX82e49Mp9*!(=B<* zK>C{F?2@HUal-xh{qB~_{>IzMO}8V!6N{oQ6@14HSMZGoIytUlb2yWFEYm_VNzkx= z$#Gd%y~ZPhCG?NMWMxO;eIPu%Uzp4ywD<#Hw###?E0>=IqOX_uNi^_^1j=q#4+C*S zWa4XfN4A8{7=X!ROWYhC!PP7Zu`ga<*q_)hrt zn8PTLQ2mUkO1AuOP}!u`d5x<#W}k|JUepPG$_BIGZBUep!y%0a)$Utt+O8l^opi;v zp838$g`PEkvaXdSwuje2`5x%S&#YHF0wS!PWFMrAj>1+t2H|W}s|`soX|9ZZ{YbPN z2=~mP*2$yMYH-lXUu-nNz~wSbWtEbHe@eK-FCg?G4-N#%3eON3i!kc5H+I~!BF_34 zEd^pz>33QmH^Yjb6 z{Ji;th%I;h2owqfV?HSVcB?ykfMMaNTiAw721f*1w!G6oq&tCpslGZi5Qx`>;# zJ+V&T(3B5B(R3Lc)kltXe$bbuz_z77roU~EsSEljPFqTfq7Zyun<({4(*na+gj*8y z8Z1B;-Uqw^MDQ!TdkU~#-O{naeXaw2QERs{)X@IpLx#AL>03{BHxw4nMRR9_?Kf(t zI!&MGU!gB?cCU;FkVSD=>vNjeu@n-O8qk&zC%_ENgdf6-eEU;y5~H5Mf}|Z*4whPE zCi0as{7?uvX8M`5P_G2MEc>sJ*s#)$2f4GMAFn#uk5;8awEg{9AM}8Wbpb@Va(r?Y z)d9LV^JXJPB|sp@=C)2mcoB%K@arGyH9zqz#0T^oN~W?7SS1OVeN@Exn{xdfHxFmC zBGC}FOBJ#;DU8Cg7}3?s%$c&pqviAZ8fvS*EQzUgbhO~$^OnkxjjkkJ&0sFiU!Tfz zF9ye9yY>8%ThqM0qybtelC#!7u@#CmtA*6U+6;szIY2Qkj;7L#e_S)Yat&>hU*>!h zhGIL%0g^4wjWsucgA2*$fUS)K;-C<-mgJQy#-s~5j zs8tw-V_oLoGX1~{{9KpFCm2e^nM*h9{%}_ZtU88k$cA1bn}2PAyxI1^)3DWWI2u>* zmHCP4dw(Q<=l+{6p83mnW_>^j&eK_1T;JTxU$zXZAL))9(S>(s5;Be>OTT;Q&+)?*@jnci-ZvzpEdh*jL~ zt_QTp_eL$zIOFSq(*WQ|vq~^HW_z|G#Sz1(BSysQk(wQhHxjC!@zP#bPN9=#VmlDk z&|3Ecf{vpg8cOWx<@1v!=q0n#H{BCTx9V%W_~&%@n*(?~J=y!IFk7_Lc2K=7aKMkAeTc!kIWT*D(7q7)y#VMq}uz(J8g1MO+}ui#aC0(5lJ z&JFR#Z^#8#Jig1ng%K<{j)E;ee$HK=ZfD=R@KF`$UL&=LnK7N4Sk9hFei(iZOPcU3 zXx7P?`md1quj-CBBShHw(0b~Nq7TtQP%&W((TD0RLJ9fO zg@a|o?7+w|TX`CzI;?PeFZ$Y4fDU%gPZcJc2+$lf~XF_Hy z)vtlJUxpK(^Z|e*`e9Td@|{Mo`o}Cs?VX#yqO%a7ctV03WoT6PXS?%QXWJCu#W;caUPYzq6ghk7wEFGscLmXnh~+)Vy*CHA4?Y7ql1}irrf^HM0VQ z>y(;tiyRON)-NnZZAJ7~JeVobbSHXff%vNAWp3L7HrCeR6@|RGFPwhX6%;6|o?IPc zEIiaQSo;4OWXx{!5BI3FyM7_Xq@#efWOWO1szl~UoAHjRfj|bchGe2dQdE@ad)b4< zJ3F!!BCNfM)EJ$ctctOZBo6Iz@H{I>vD`Om>O9%nngA-Z0TBM`<1BH*vZ-yrG;42- z4pHzhZ8^Avs$(#mr@^jBA+V*+6Hrm7ZlXl&Iy@thHiyz)E-uC^*qq&ZXWNoP7)+}e zB7f@|Id%vf((T25Muyrc~Cs!WN~bimDqrO$ZSK9f=f`zf}$4$n0VEe0Pvps~_X37XPbZjnwxKawUc1 zdH$qCN5U2YjuiO6-Ql=B35L&0#jnnHfw2UdE`Vf;Bek!;)RtWZ+<$=8afQHmHItgf zY!H;y^4BWmARmK7>XOKN?*Djs*L%*KlajhU;$loqr@D!lCH-|$r+1#i^UcsU~=JQamMWnlacO^avE$D0V)yFP_g z^_prnFdq0#BZ6_8_$x$`?io0E;{0pI63W?cenLn|;=q8Ide=>XJw{|=Wn>tG4B~g;jfhdz086D)vGAnTn?K;~Ujrub>$5}I9&WXKn8OR~T1NgPiu=|f zZutCfi=^!KoU4Cc_6s!K56Fx=E3Ks1SPegs8aa9`_c#WM+il#jj7|nC0Lwn}>{Zb2 zQOIwA_+Qiqr2p*GoeCozbMhBtDtOT+_`nV9-EGyv}TZiL^^D zD1+sjdiXm#&@$y64$a|3f5Y9$D$SqF>b)feP$Yl8SehutoH!1~Hw>z^wGmTKBz@co zs_>@5i&mvr(K}lzj=XA3>%zIl-F|s!4nq`0?HsQm?}93nNY0VE)7jDCaX^E-i`!O! zkg&y}D1JN*xSNwRH%E@|uOY%;8V%qy4kc7i$y|UmmjadRbZIzT!Yjpy7zE_)oTW+UpR;QgfJi|3zP6e_C(JuH$yMqC5Q zw?+yG_C%mVhBo(Hv9<3GoD<#dO|Q))E5Q9^RQ91DWuTK^lWwmp&-NuZn&Ei=P|d%(@vIRzHet}oz!lXWDS?2Uf! zr(G5W-ZD0cgmGucok|T@ww;t`)$htOpYg~fD*u!Pa<;*%o9ZSIy13i0KIC)afB$JP zyTaIWYGdfqC>WP5togF+H)}5d5>N20VV}As{MZI=w8#Me0Bo;C3hcDi*YyT*V6a2pCcNgBy(TwI!ae&a*8wch~}h`x9ze5o9G zFs?&5`-zdyS~e}`8&?t|y9)mh;Qv$Nc}kftp&lMi?|k{TZ_e>XzdIx+C$I0{6WpJz zg^EZ+xhx!|>#(!42PMS#IwmeJqZVKxiB593(1pQ2$<5E82PM7O&l;VME$i)X*~+|P z_|}}Y1j`M@dbr(PdZ__&{3!|HPgoC%dM!!B5HWfif$oS~!G{NUnI@nqEq5TYk-_Iz88CM8U_N!p8 z4i8djRklVlx<)TqdLk$=_Qo_Qb|x>*s5f#iVVau5pMI~ABmlIJ+`kC8ZgRa00lLKk zD8BKSP_S)+UY*=-6?aD?WZRU=2SF|KC+H}G+N)jfG~npwbF`P;q;%TUz<5o8$lb1F zgdJZ~crvh8=db#JrozyK5~#l{n2c}SWYa>%?=uWE8ow7Gip6{*4iry@#J{4NeICGLJq;|@j=8J&H-l8PRmug zF=&X#>n(+QP=>GA0i1Q&1UZr*5 zPKv3i*vTR-GeIOpI4A??!zIsDDw!17&!2OxSHOv|$7@L3Bjkat-z$thAP0GZFV8aXwarwv>rS}rwB_y+UpZiy zvE(-O;1VQ3f-R&(C4KN9;&uCM@2>dtLjM0h4Szzw`58bs^=pq#Jcdq;-Ey8q1KyWd zH{e;r=zN`PSK-lbiee-R@u-hBsd^+YgY2fO{Dk@RO4|CXt5}+~IfL}4!@IQn4ObgF zsT^8XJVOu;ms)bd!cbqh6N=-jG(fA(wUD!+OM-myWZX#DR_i`=8_@HN+p<7;yyEjX zQS~4DO0d9s^G8qpFXQFEi!Xm&K=%`Rrz>igvT;k-aX^JYtjh@16TR7K8#G8zX3#|m z;HQ(bqbN-WNG;6ZS;f`>_d?Q>wahXD=QB?6?$e8X4VR0(I4fiqu|r?to@CpVx>7Hy zG!=m!p6*FoL3dOuSSW`U{SWb+*z}@5zVZGKLgk<02VdqHuvYZFy;Eo$v!R_+%GS{? z>Cp!c$5OMd$Q+seWDX=JRiXB;m<}a+;P`H2y7l8Ly7d4&_TqMYOVzt0$#+ion^jWa z&Jb<@5n~-UI(Z-vqt3wcj&{jFgN|L+TcrMxfBbhR`5ztunDi;jmjL^BM%ks`$w)HY zpzBWIIP5!JZH3Q>m0S5emRWTX=HON&&B=DB<6vWB10xhsiI%FW>;A3aGr)!_)_*3= zaau|in%8QpTNaX#kYGiAk_u$ZxcopjRDUbz67803V3t%KFhBxfXf^xhpN{JP{iOf* zq2l<$gbt$vitYfckG3zb+Z{iVkL<>Ify&wPO8Z;P&1n~CI!YVtuW&LX>v|thf=uzz zR8wCAdAI{Kw7t;ah)!hRLifhT-X1fAn^1s%eY4bh!hjeVdJ_HWXeHxn&Yhpp=jz8J z*0dB7Ft@ji0rk+p;k?7LF_;vD?&1uE1i@_oe}9BDAhA3racZQ$-6Fxr>Fw6@;Uz2` zhOS<30=wl_mDPdM>BRsQTBv`8Q)e7>ki#?_BuWFpxS{&)7=3zU#teYz;p}8X0%-qo zKh`!hAgzRPbM{_$>0yY7>QH!tfzGiYNwWb{axCgwkSnmebloemI;jpXD`cXl+v zVj15Bh<9-oSM25<&!>cED~)K2v@4_?kr92y43lPA^Q4R70U*S;b>S`fKQ9U1KLvr5 zL{%F@EuTpYSa7I*QfI#*Kq!5)v6A5zaVu^2Gn5S6x=OIfx}d~yjOxDDUCejcsh{J& z#SWbv$Y211au{*ByDGCna{5q?iUOVd(|%^Ec#a?D`Kz+sOs5gYdl&m$iJTBh?Bp&( zC>hydG1Ku%U$h3Cv25AS7aKoL!qH9vK8$f+at!Pp`MkWG1{pZb=wZv%3B40}#ok*( zwf1m-8}XACn6WR2_q_e`BA&;VYoj}PApd4k(FF%f1lZY>P0<&lp`evN28;mi%w!~> z7Br|WJjeXMVaop^=OhL}q)@9S)zUB)G4!X?n1FQ(fAn-b_!z=0#}n)S<}tyt8BL$f zQLF<47<%%51MU8mzmoXq;ojBdW)H{L$7k3Ng(!;i7!1YeCE||*3)lt15r1kp5k|s+ z=#&RkN^`?AI^!a{$ZOc*P(pN2ythb~5s_SluaR(C*sjHa2Dy&S$c;;JZ|r*`>_ot0 zgQS=LVEi)!76go3<$MoL7#Yy0X?|&8WPVlpD*_K?$?`i@DblB;o8T>ABK6BbHn5nW z(h9i84m-4S=P$1P?_aY2V9#Ugp1lTc>_tXlrO44L=poAzpq9O}B`M)s|!|(3*jDX!UqEYpg=(^YcS18Z=>C zNAm(VwfoKCdZyiv9%w?|Ux1ix(vdqdYD~zDZhtr#r(R=DlEnRRd5-4K0G$k^ECuNl zw|Z>d$7HRu1;=f>K7CWlpnZUQi6ddtGr0x=y(W;;RoD*uXZ=_w(PYh`1Fbw@n%HJ^ zlg#sb&6lMBKwvX3r*F||ayo7^bY9@9`nzNUp)N(Tg_>8hR8OsQ-fn;kq|F; zPXwjy*BSjlIjQB&K5RP~f2ZkSs_gH;2&XP^7I>*isY%U-m_?Cm?rxRMK z9+1bv{B3j20i%9tgMnXlJccV^RMTql`1a7`5)E|O7^UJ8yQ8Sdz8ve6IP3h_29D`+ zszgVE0ecark-9k{;Rj4tNf-qAAExU71BCYbBYiYfxPb6`bB!F~8C0-AurO9_~tegL}w-WSqjIsTbV8Ktj?s{3+_yaS99$yKd(2Fmwg4FIGQe|>wK znD39&AnCl{Zizf_@Cx*{%=PHoN{a3pHpK5lJnW>HIjH7bJi!l zVhl+Fg&)w+hkT7W01!YwbRtxML7I)A6cgW*hQm(&=@Q-2eCl!wTw{X(Ccrw{3UCB2 zMIaXgD3PcRQQ{ky-!9O}BOYXV9t&KQLsy};V3R99JfrcU)HepK>~rhCmCn>*Hs&JbmYQz(wB~j5n2wlMt$#lrU^&AXFkWoq=$K>J6Ct;!T9%|c`BUEz0{|Z1)`gC42Fy&*bX8SV4P{8= zyCk-v!_XMqF+Avym3V5w~*|EeHF%4QgAEj+tB$Gin zb@lb?17YCgF{%`c`@h>W#^{4g;`S^m04;XFq^y0qWKHCWT4&L)vKXyk9#xu5g#$9( z_}NVH7qgwEni|TXch+aq)+DPYMLnC&$vpOns~u(ggr~r$hGM1@wUo~GXm(apgQNeg z(%6vY?xZC-;9gQ!dfW?h$;N!~Jif^FKYS*6TmP8b*Mf400COkEGM7ZerEXE_>X0`m0fIS`A+>Qq{_K>+2I%rwH4O(S6 zE349oXi6#EeC?vZs83*|=I0Rq#g&`pYQ1a>T^zM-wxnXIE))&uYzuK|M>J6@YFmmB5z%{Cr2eSQnUv{P^_; z-{YMGH5eht3HQYTuziHwA$S}KfR5vp!n9&%84RHWRBS~$74j|vC#*lYzC0)3a`PeN z^GGii=?uh517_&snkXTMM~1`~L$4zy8bUbsgO-ZfsQ2SQJWGuBH*M`?n`)cJt4oTV zvCBw}7f5*}S`Ecfd1U>8aNRarvsq-oFha!AmbFdHCdbND8DxJ6aFAb1c(`r0eSMKO zl=e6pSel4+htJEbZ9=FxW@AnXWW?F#XHc-%?xR*x?kCgZRW;C?1=^=x7E*+c3)bGOFIx47>$giYQoiM#V0%5GifSjxJlYR%| z4g*c9i3QxwOL)T6>V0mLlN{+}UGnm9TJzb)Xpv9?WAgs6zU!Z?5?CjGo4;t4bM*WnfyxcWO3ODS$J~!=5;zS+Xk>|7^756zOMn^+$nN9;j)h-3+^D4A&|0&Z8@#O! z>*IFZ;XmJ#f61jHcSwjl1x?%0~6s6l+89?CRP;aep)FB4;0I9u?1=yEdsD&1bGg{L0 zUo-Uk{wE&_=lw3NiXqwzFku%PIVyK}FR1|aH2(S@*!=$sAISNm>sGJpU+($kYo)CP zB%RL$06XKf`XBJ9IAA14MqpKQF|aNo1UlM0u?|&j_N~sYM$-5e33!~#*CT!guOSdG z)4@8W9TceOooV_XIzgRrhgY*5fq&iKUKSOFxQrz71Jzb4z9ti3S1iVLf0hi;ZwO;U z4gH9#FgY3Dj(3iiFhACjd6mF2J-*hrp1mm{`RzdgqtI&rwdr=2hCc28KPb0g4!Oou`c>Du>wJf;CY%X3piA$>G z{=tuH&fE>?GbpCHaY_-`LK9+n-~9pK?Kvhi{$ZYiQk zN|*C35ju+qK_^%cFg}^?@+y8fuEKcZASz>VSDX5E_(^pBXf*9(enln+_twj^edDN- z*fNWIkrM(ARfX}t_yMS7WsXZ8B=IBvi?Oc^t3unl1_UVyK|mTsN$KwHl9Ddz?rxP* zLZrK-ySt=8x}>|i8@{zY=iGb0_vSwoA0OCjtr=sEF(#6M>S-q}7r`Zq<8C|R@=o6} zgQHD_kMW-l5@+ZYdUIwHPjY&KKfx|U~yaXXu4q0(q7GjfXhl!5zY{9hYV z{O3&f9vd=9T6FWe*{hC+n`uD!DH7;{@ml&-kjonyB>NAF?awS<(F%=^TIu=7Q0h&jk6rJ(kLe5uay@4QDI&cA_fZt7Sq|GQ4mA+~Pj!k93a&L21wTBV&P}HvWSpGzGS>>!E;Ht2e z@^m*BQ3bSPtA`8XIJ?GDC!&W0t}l{NZ(b;)75f;k<*_gu{6d&Z1Inx8auXd5i_lz} z;$kouIPTE^=1*WE&E@%{;3FD2;e=*8%i zBKfO}H`v2)yYti>;#zeMY6zKL5$(Qjs3a3uD_27y)LtzW@{Ay{x1AvQnf_EQBvnnB z=gHA`zkxM+$}HpX7uvD((&lbr&ZbgK|9^Rvkbe_M`8&W<1A{Qh(ywR9MpgpDr9S|@D=M?WR&d%dYzchQad9+fj-Oz@^Xq(o(zC3rSGN&ZyM9^sU+y(>j{hPvLKZ0_w4*@n5{5k4PzU@dj zxhCLHn7u8g&BVB1A53XZT@esqKJZ3feV?H(jNsyeDuRrY1sLDM<;JZJm)%d7&EG#) z{fX|dTcU-0s}|Y1{G^TkXwp^*h({rZi!D@2(W^x9t^y%bA|n3&Y6B^W)3Z~FWxB$a z#&Ga_?QF#aTJFytE%K;8NG4K}RrGvsUSez1JG-LnT}#_RGm!0NVG58(Rc#~a9Dugv z?Z_*Mt;69AhrMY>$5M<9H~4cn@uJv`0i9H`eswCt-B1%M8!nFE=e(cH-W8K2vJ%Vt z{^(e>=(h!Ej8d7ftqc*lbkK?_l=JGM*Owx?ZPcl7v#tRsN9^3W24=XIttqCwuE=Kh zRS}?Q0>QjuJNf4@Ofo_3310u&>JjO;lW`wkq1Q*DOB^|)9x{(ncID1?CghF9BxfUI ze_}s<_g(tg$3G!Kes&1ZA?ss;`oHCRbbT;uSb>dUble_u*cc$3i|Y)+4B(!f>k$fc z(Od~4ENUJu^#A!>a1kI{r>cR@$zOn^!sW&s4FDHepU@9I_UJyEc-|B9d3>jT@yqp| zJ5n17dQhWuGcL(x7iG8G>4MgYwkk%)!5oJOdJqt!a>a_9M6(^PmR${@J%QUAF!K?^ zB$r``*N+(>%r(9!6201`Rm^zD(=X^z`z-^GTdUq7ooKGEdycwm&i!1WYhW(2f?%p? zJmptu+XTJ)K6gC*aBS;#R3F8`psU4pDBp$sT2FCbwsPM(1VFUgt6lk%)$Z8!{4Xjq zU<>Nmimr&F1i=X+E_Y??cgczGQ-faZ`>EdZaKg9j3wY3SK)qn9 z=H>MMs^t{R`)1m=J4IM@KSC9-QFac4&KWQx<1+~)Ko6t`0 zmH5w(gQhFZMxHl^$2XIHq(YTBp4F%{m4no?r+p_n6}rh}Xn$pK8z-2ww6p;)q<_gs z{+B>A`zHVmpte3QP5LgA5N);4WNG(Rwe)>>et!O|Ra>0*$w2X&Q1At2gX~bL1{Y$f zFnu*!JJ`-pE*H&A3HFaZeLx}c;ddZy`B4A1^lL)Wb15^d`T#e9@MdKl#d%JIdcU^T^4E!Xo<#PwajayrlYRypkYjdfkQ#j+Ol;6-9 zs$m=|K{x=i$tDgm*fiRb&A~+Z`|pB)=r+3%nk%vbCu2w84;Sfp%XM|$%0>8g$tuWqx1U%@&W z5D}28+%UdMavQMhFm3(O0SH@Ga}~Z`w@2_Y$+@3ML4&{+O|J8z|LvQ{Tu$8L_Pp^( zMi_H-PFefQItH+c-p;=6k0`Y2D#97yg8%9Q!bPEAlEsBWeE)M=|J!%dIY53-zk*xd z0?zHBR&#ZRPFV{cCm|IYEDEttRJ(eEoceWlF-oLCpXuvGu3Pl221(>(G*K-(#A(kV7bf3blVqgy1P-BnV=aTF9>x4> zz=4+hN&>Ai4%@V_HBv~vhxDtkCk7KDNx)lD>S@J;Mq7|t1I9Q-CHjB4TTO~~dBA-^ zz-cEJz1rrmNdp$HUP+g$G`eGa#suh~U_9+xT3(0&u0H;_Kz3c2(^E35seZsxVn1Va zeSh;4n2X7Lenyx1)XK0oG91RqVtkA##^$@vMrTNZQl15zecMxH_%B%0g06NOS?sne zL)OiAw6)@v3#C%kbD;AHg4246YxFZw1~Amv@ZSGrl=p`#9z=3b4jF??6_c!^{ci~b zps_$0@!0_OJc8HLtXV}^3GqFsc4KOfM^%S^B8KEbdutmFmgSuJM;{L)@<>5U5+a?qX2St_J+J4B6(V+a$qo*( zIPaai04A9%SrfK@D>n4t+Lw)fq|jI_CZ;F1oZ5i!8EGloqAx+i!T zf_R=zb?CQh-Qo9mJVp(GHQVUsls!N=(+5PH#)J2wj`++I6qF;fSrYEwo3CE^=gl~>yTA0n-Y07zvDF%(2s~-h4!j87~qB+ zn z&)wJ#CMIu7F$|eSLNN|iSDxA3U87I|3sR%oO|r$Mi`XwX4_Twe10zAHBuiu%-*>1o zI1rV8s6&Ubq*L>3U=#Ju+w z7J<$KDe_RFbO6-iy+d;b{n>A&X@-~TCl0HhbUy^JE-FQBzWx!*(MchM5mH)G+5Zuc zY=MRp{ZQ?J`!Dr7gB{Mcl@kJ%;B*0L!QPQnP7bXnDuB=p9Z;v}oP2x&y2r<(01_;` ze}wtA;Fk*z*G4|a`p z1~_~m-q-g=u93@xgZUIBneJ_`#W$fqCSwpgK$EhJ2%i8#Q~$7AKZrz{NlXJ{0yK@| zg)+d`I&RPH&t@%y^ERIQVuq4(A+dW|cacbvbbjWiA1Jf8ks`quFdL|GBxDMo9>Nl_ zi(7)vn=P{6aCdg}yRA^B7U<6P^ol7Uwnuc1t0Oua6rt#=33+XTO+b_}E@KfY_uCye zAnboh63%XlT8ETb0{s{7Wvx%+VoTrR`rs9?!NX>45^JMf7 zujv1Z#RSqr2LI31yBq;-Yg8(J@_CN1>?^s|Y|S^HuD(%4yY<%n0jHJVGXDntBM_iZ z!7?R^1R*7qpT#OTOLc8l3JoL?gf0bYUvZUBI%;&_eVq^H<;BGj{CK@-Yxg}a4`2L{ z1JDbzgVb6m`qHEKJ_xTt(5wajj#`vgbsUHry0vGE;a(i1Ee%b$vgyP zg5KmEV;L5naQ@18$IU^$2Ze134&*n68wjFZz8fJ(-$q+@^k&*y%1NV!)>~q5U?tVf$ z0#*m0WgJo6qK3ea-+`|7c%edMOD?oPqw#}>hutVJe$7?;G68U2=4Ts}aIC_ov)N<8 z)`-xiK?-ZkJfXxpV!K58QXu69boF1}g^9N9)WYK`Uner?4xuk!=sP}V>kQi-tJ-TA z+e2C5a(et0AWSIbxe$X0{pn{CiH1snzrE2Ry|(~lY$Sk#nf-OSexdVBr?nD}6tkL@+`Q?r^22Aqi-E@hg6PSO^ z*4jaab`JH&=qU@7O)|2%3R$nv(9q^Mw|@qLm<;qGdz>a6>}K$PDTl{L(?&8SM;bVo zSX|wLAf$)wv72R+AC4r8FY4U(@S0?bF?0g~z72oP6l!|lLI7$S36 zIsMLi$rBt}1!uaB(M%aB-y4|kH5~B3t{z(broH63tJZ88E92$kyy(m%;2!jt*BTM-_S)d{J2p(m8Y;Uw4-qB(~Hp19i@Zj#-&HE$N!Elkk4ad<(A)WbX zG5rAAzaY^6t;+uU7cJ`GuvMj~kai6FQ+l|+d~O8>>z5WZ?n?uA&UgCA3lEE z8eS1=A0TF6C}Hm!A6LfMEwKqm%3znf$H4^9^QtE7e3Pe?TnbMn&Y;dv{l&qvC0PSR zmyyqKN5I0#fz~9pPy*(UQRi!8!iW$HiV~F;MIcj%r}rwD3V&CuCHC<#+US@(vI~R- zoToHe2iOIx0GEhobqfNf_#?nnhD8MZ5IyAimY7sb@sLe8<}OTIk#tYQ>xlC2Dp~;}bKdt&x)Wzbu6?WytD6i(d5C65VITv46?l{*a*`EawcQTjPhj>%w*BUH zlgHLes!KI){SXk}e@aH)zq*v?ydtPQ?0T502MZXAXr3&>W2ApYpVj)TSiwK6F#GyD zzb{OOmb#{tEuB&-d&eAd2U3TSufpuw5)mo?0?#AoShYT~skvQ$>-n&OpGjOy&JPh5 z$7Lt|h}g~EF1V~>?mSh{ME4wu6~d;kDe`zY0&XA{Ov>hC*ijsGrflI8>eVkEQ#aG7 ziGpi8xrghqN^*5`BYzUDGwk~)V9atDu_uQs2u=guMbzVhr(dT;+#TsP0*8rD6H;u^ zMzT`q6Y0-yQ(jaC8)fB`BDS;@OI=1Gu1F1E!HPWG69xU10 z@%F5CCywVT*EJ7|&^+$5a1B*jCy@5ED|J4bRfuu_|}x1wg_&(RNcp1 zn+FTaxdsDvLDTZ`ZX*XuE=xLYbGp(z3qI&w45p1g)Zl3LV2-%0=UD`R%SJ{Pu+>iq zn5<9Wm;#qbxGQcc5`GlK;Y6v;xerz?M|gu9;aau@5urHpaw+$7S^#k@p)mY9#7K(Xbe*bgXwgA+u_5M@k{3kc6_4k{@ zq*V7k*>(H3japd_lvMA^f{Q#S2AMzq5Hul`pN`y$WXp${7}%zXp1jbUJL%0BZl08 zcZlGau@q;WdSmE_?`{s7op50ToG$$(P~~2oU66zDT%QOJUzx=Jy@v97{?5IfEs&Fn zS+{<8Cv250Z|CyrD#&};{6MRc5Vq%3RP~Fy?_r>g8%W}QLHNxa!p3HHV;3)BxU){R zs{3(axNljKsO6vu(~FDg2ze?Nb`M|)-sdY6Vr$`yOIJj&wzTct!K0FSh4_zNvrtp& zG&T75_g<*ngld_9%qKd7zW9Rb($+SE|CvzS$rEJc>~EQU@w4Qd;E}ljM4!e`JVo1C z>*dRrIks<^I6bFN4{|cZLcX_jqC|PVfdf**nF$^F2QN>@u8k70_F&5-uJ`J@7SE#wnu6+j=3th zV>6*3wSJBRrKhg!es&Tum;lGKTIX6SM030?Gd@TW-Ge_rV90AS7^B2PA>?#S1FDid z_Y#@R&sd-$vmUR?nI|6$CBp`%y@dY(FoaAUvO;zy-zJCI;a8gD7wI$$U{T|%7*5xE zQX38=O3bjR_qDcmMIE5EB?0>pst=Yc{f{S}+~^bj@s!bZV9^(Y=u|L@AN}XI{rm9; z7DB;rmtr}z+`}O~LuY$(uRqE3`kBCI;r$>A(~<9zsXzK@(zFHxK6c6aP~&r>`+hJU z)U3up3+Mof00~}R-pIE~Gik7;Os(_iC6!PrQ?93%r8x~JN{HC1NL#AYMt!#6hmI@K zZZzu?T2{5CSbPFa&|bf?5~stqXxE32j(c{m!6lg|7Yp4TQ-)RMrBRIM=4spn z8!FNTe`08L3lV7yJj~v~12pQLS_oB3Thes3xzu>Ra+*i_@oES@o9RK5C$q(+^RbQS z35c`IW~=m-bJI|R5TANL(%JT!h9hcY^K>&!h+G3rmtsoOUGQ}6i_2~SU{b7DeRN<_ zdxEVrS&2;0g3EMWPtnFoP(XcV>Y8yNwO6{zs1tEzZ``Mx@Uz-|D|q{1g>jG#QO1$|Ni0 zix#A)yv=DBzegX-y|03hu)B+`R(!_5y2f1dFm2H#!hL4_>t}iIl)VeqX`0g?!K3DO zE&A=vu;TiC97yI8^A+fKTsW~Kzi)DuTcuUWNHp$^4Qgm?Oy3Pa{j*e1LFf1B!Yab( zMEGwP1Kb>TK`3Y@1ZQRIgn4mfsZbX;7C z+wXVTgUR1A1YsG*7fVo8W+h@+lZGQk_1&a*;h{BQi9b=#yxc#BJu47!Wx88N)p18=BqOmQ=uq)>p}Zx)bWs&2nOV|9mjVYSwj z0tN{RUHUsva$whz$#kT|_K|DrG!)te&;13W*|SSHx=xqngAkg83HfV>t>DmM$cm4; zJ?9LkHh*BGIvmpALoRnGbcX*pem2XJ+i8_Qn2OUhOwVje;gVLdGq}ROYvOr8 zNe1R30}eg6>usQdo*36Tu;Wr}hW=O~v&|R~g0wm)cd@^a&3CcKhFh{UKrnlyH*GLy zUhlHSs9dNm#S^W$c#${XIIlvrd{JM9#mnSwQJYeE0?dZw=QukzDxJJfKSGKGa5xd* z;SM;k&}0ACN)Umcgcc4Y7Aa5aL9OrNPdUAAYoZ!0XZrj5Ltwr=e#?0D$piIF*G?81 z6-`)Jm;y^E@hS@eNLhNn@;<7q*LXO!-FViOvz!3sjb@7w+Ge9UOjYNbxOpCyt5an$ zwF#CeG0YL(aK!nP!@AVqb>Q@?kCZOz=KFyhCkY$_M$g!=7MD;pYqQNSv@0$KJ?rkX z-hGIN9SPL?C^8;@eDtz~$z{{#+Omf+BL*)5|D^V%DySF=>>|m3#?a9v@!l1K(r@Ci z3mA_TCDP>`M!V^j4rOTIYy%62svuAou(vaU&|Y^R{jMgx_nIZ2LMDE!&I%NZhoL9y zbl-tL$WG|fq$tOHmACh|SJM0ME&00@d;+|Sl=3wDEiy@XkBp^T2JT!w;%bYOhH|QN)cup8WJrTilPa*>*6=z52 zQ}-DG5^nCqeLfFp*!YRJ^4x2@=-6XIZq!55rTzYQ)h&zePF;$%wGWxS`Fzi zk<&JqOfD|V!}9r_$xus_4mM4qIb+S!IVdz@x50V7C>u3#@xa!&pT%|*W|u>)?O0lG zWSG3K{m5kF-{&HO|KTunG;1(5cR8iDXXGNW*0aEJLpMe5tL%Qg=L}gd8P$&thtIM; zfD~eFx>tu%r+DPvd6WU)|IWUmQ`?!pR3YC81#EevxVpl9@J^CSKh}ye>x{Ay4IvOE z;EcqC;Z}gL|KnNtl!9m0NoL~v$E*8~O&729??bD@q0>$bCF0qBMtXL1ls1;D7!i}5 ztr!gdk)E!FO^VO`J~5fkUG&Av*h8Q-JW1MX=xA#bs-23aKGgl-Q;u+W(Vo#2wpDuB z#YWX-WT)gq4#YDKU~uy53Dyl~IMhPq09&?sHXZP&)P>X$&B2`~Fj+%`4GRenf_#O0 zZCQ#!aq?Ua|NSk*EdgTYB%fj=^HDf6qscS_svR|^(s8tEV`kimX zm>v@>ggh73fo(+d>2~61wpn_qoUbNUgx4=e#P@!pEyw-|9$Ap?cyJuw*7+ONRH#BV zLrrDLNy^VreJVvm8_MR}Zk4xX94NTl{F9y31O(<&b%mEx7gg5;&BllBGI1lB!PjQ^ zze2knZq5m(jWX6Cgm`LGM8Utj0FZ61il~fC<9}DzOtO2&u4|H~Z55PtdEdC-c@371 zw|5l-25li~z{;O@a#;zqjqGb(J^u?hK#l?GfFN4`+B9I>_YIYpwMk%9(_5?p9v1d3 zpruBU>yQJW{%LkQ$NLv8Y&ZUXe#6K|pRV2>(FrPTg-0}gfIr^(kk4wn3D(ieBxBek zwC6&3_uI2Cr6Hhy5)*E(*$zDy`L%N+?vAQG5KaoORb$gyg1duWk_#2ULaSXT4umB* z3&g?_5!Z@fJ)Owwrbt$vu-cDtNJO?a`lmVJ>rS@UPDP7PF)$>-eitP#vT3sx0w+%I z`wzz_a|;V$j6ssl^Yg=KeWF;4;VWwkPl`$VZ)W5`=4t%>fEcZ6x-j=F$(!VA}ijlu$U_{ zT}uRk`k(Ohnv+CI+U^`1C8&Hact}j4(zZk2A@$i(w&=8ZU><^46KGBJ*>aq4&%E^X zE^QBIW(`aRKI4BK>^i6aMGS-b^m6}(p~9nsYOWTdb%2b>D@|1{-67SQs4`R2)~HnO zVfx+|!x=yy0elG>$>YHOR&HTV}0+Q@ySJ7q)9;#m0)wpySPJf zd7;7U#ECXpy(m)OMa40w*!#yP#Gp1!q9E?>wziL#h-cc%<}ZCsx?rJ=(HHrlRNevY zfgi^FQcHXxxfq;_-CD26hA zY_`Mg0!by7Ux8=Jc>L@Isy6tUK|NOl$6nK>oJGXrD*fWc3(2otMEa0K0qg`F&Ue+^ z8(;}AVOVh%hcq(>^M`>|ZE$E3shAUFf|;`#Bdyf#CNOEO30Wq9cedhxeYQRH$yQ(5 zN+`n;^o~G{9}4sL_CQx1bOEEZCVn6T9=Er*jo7T6mt!Tfo=yX4G z-|;18^Za3d2nZu7U5^bSrId2yqNp-6=YCotqo8Ck84nCK-k?#ud86c!9G6&T&?BtI zXv|t4#$*}z<*3=~E}mAWRx)k5JPP6h3%um?rGKQP%{JZf)!ARr8e*WC0rihaaF`$A zzln*9+Ym&x1?zS-LT+;?O^B9WL4m3i_LYp}{r!C};|Fe9+bMc+gmy0IdwL2+Mg`bM z$)oy5FDU6|>>(ewTr&6RLG#1(N#^1&5ZYSqn^E5azts00Kf{o5(g7fVsj}Z{zz3>= zUE?pLK=%08^Z$DM(7(550|wM)Mj-k&cw-e*d#<-TqG@KXBK zi12W%g{w><#~xs5nSv(1J=8y+Ct*qkDCy`o4U&n{C-yMhJOpkEl=)|Nhoek=WB=Aa-z+^f_^Evf7IY z>D89=spb*X5Fuu&kxm}lbgxrWkn?_V^VgV|7>geSzwcDRv&H6cyHtbiQ(XCd`PG#l z%(l1Bkr3z!9Tp$$-q{m7?&uLzFmtoRb3&FU!oGY`tyVu@(4n$?P&Djqg8>ivneJk) zegKo@8tzny^65JywIhYO|N1I$T8lv6?ht?9CVj2^i6))N_G`9cm(x2xG7C?cLfnmt zEhf`}h8!J50Er48W~Fo#rx_0>E5=yyY_8^dr*oe{>djm^Nzti6C3sYJ$NlNHF|u+? z0$m0is0c0&v)f}2g4pGQ+P;|LZnE+ddC~yIe!TUMYZ$?7@@PY4SJ){G(T72sQCg0b9{D8UkGR zzbKQ9+@>vJ1H7$s)Yt5hEN0)j;^pTz%Bm0^I4^{y0c7wwO0)aqz?jVKm%UTqW=Ew~ zSJ2aTYyUE~q0!dOMef9jt3HyvNBrp^z)?oI)WioAy!}DClDcO7WnIzkh@QR8v}a_E+38kT z%N}^K=J+7=9WMCFC2|`+V|>nFK|h|WB~=|(NzH(Acw1$YTx5r=^2BVes&U`(crXTO ztxr+G+ndwoFm-q^CG8`;0_Z0S2=j_`q+z&^p+fLkQb9jQg|-@D>2D8$cAGwa(aFLu za=ADwCHb7#oL@=>-CgZfpt+#CFS=~{j2sXiO{XQz%RBEMkO3-{xAmb~4m#vOB|ZPZByb zY1)!EUrK-Q98BV}e)ee_EHkUL7}mV8i(CUe7R3TQK}mE(K}`g!3|^hx8C4QS8dYIW zwxJ|s=jS26IHEtt-z$)R=->DoutD_&M{7NtE<(x9{tHgj%qR)43t>I?I^6%lzM_}pPVa5 zRQ><oh3xX1topKyFu(u%R zjkBW8-(H@)iDz*Sot~(2q=t+)>IRT64){_B^T(BDF0DnGvgBSDealK>M{ie_GeGbW z!qi|b!buT(J?x}NbXws!<1}4pbVKJK= z^>g+|Bb0XB*%T7V66D37C`=E=Wqh8rwpj{suq6eHj-s|Fi}O7Ovi82C9Ky1ii9fpb z&X_B>Exc_)DAUBoQl0?gaUj1Pw=6z(HN;~xiY$^T`gI(C%#h(mBh}I7-5H!fB3GYrTpv3@&nVa%*8Vx4t1_FVW_R&9ts6^>)D}oJBl@ce-t*P?>e5P2TP# zhT)=;&hQ)$3!zIVwF=aqa$ zeRnxZ*PFlb^=G8SO8>i&O*UP0+f;vY)U`Ki1;t1WMtg2}yvLq727R55k*t<@nZR&| zR=&$hcS#!ib!k@?F_i}?yzNlRFYU9*&dm^Kn z*1(|m<3;%zOUQQR;lf$|WM2Mbp_=KDf)rysI3X9!+iE}nA{(`qiaE+-ylXPdDEgpn zV)8tSR%2=I9fls`iE`+rd3ZC~T7^j~LFRU_=0@pw?qncDQpaq&XmmFnRX509h>WMLCux3op!)LrcA8w>%< zx$U8Q^9(1;M||9?7kgYL2Prq36NNL&h}UcsptFkP)#9X5CE*(fG+I#JXU~af@w)FP zx@FW^ywTdmq>^uVepF@Hr!ulN`Y=*BQ%wnQ??^6Z+BH$tHY#Yl1&|2?;p4a?@>j`{ zjTY3nr;;C7f3SsF-+6M)p|7k^sthDXYRsa~o_-;bzhdT}6Qd1L-(&Hg}6XDh?~XR?%60U%WDcBC_+e&L3*Ocyw$S+o?A?5r#a! zYSetK_zujU4qIz}$T#|jj31&CJ8zs9_6l9D_JBGH1i#x8`kbwso%6Pe9tiqo$1ZA* z>h+BJs)EF(3YGdmy~ zC-W)WQiJPB{Qi7{M0k9QyhCR8`e z+BO%W2twyR3=?c z9R^|xLpH;Cur=On;`C1O(vpLf7y6;mFe_)1!Tg+H&b5--+ z3lb~|IyJP{gXF_VI^G%QTLI5z!qlobx0G@aTOW}UG%V&??z|hzvlBbSJ7j|vw-h9T-&Z;CslYIC*slD|o0xBUk4a%w@gY(#vP z^w`6#0#}p9(ETmr?e$`c$Qj<_yceZEG_5c^}hdbxs^A6d++V;P_0^b_+z#XVT;2)GDF-kYyi$3CGjhDi7W4yK`Sk`aT zPF_E@*gGHU{;FC_Wy>6M*V>T;pbRBERhjloqyT#liN@v5` zgqmrz$W;0VP*Oe=Hy&y18@Q~LS%jJTkZ0+yoB%0Hc=Q`B+5lardEwcDW{VEVIVbZx zr_JA&Y>^%sb<`%upInh`*PcR1gTZpJXm6E{=&`XG1PD>imlvBm-6=@PF$oVFchq*d zyROU$E(aSPIbOmrc0ibu2A67)G%(!L{GG7}%^7COe3Kz=L`c)U_!~z)q=H^sR2@vtcu?a{9`) zJD4RsYgI_Eb2P(Mp8I0u(^VqbpVoZ7O+}LXvGjc__Dc0Kvsnx>4xRi8*@4ioyYn7; z$_bf#wV1JQGh6L7AS36LCFnh;;Or9A`8Vmf=G$+at@3!SPwe6TrWiSx*+@tl2zXrG zU8x0#ywan=qO1KLWd<&)^AAw4%MnoCvAG}>Uq}8R{Sg(Dh0-6Z;S}k6etVp_zZ_IQ zb+Vq(dxk$u9>tffv^6p+X{$dpf;*~Dut=#{Lr=)(rJ4NKMCotkKIrpCk=qS@i~&_@ zEX|&c)9o~NfKsgtO1;bk8p2wgEYeysL)9FAQKxuQJcF+CD?B0>Ds0xM-{WRNCX&$* zsWKn@xsKtp-+F%&Xw}?u8Y@7+5(R9-Rojes{Lnp6nVZcF*qtFsbqV`)eM7AhAlZjq zAJE`e%x*5uIhpz48p&o&H0N`fb>EpDAMzw-wKwh~z<9+?N3({{>fQF@F-~ip_Uc9k z7A{UU21187q^b}OO&&26YE*u=(&F0cG_SEcpXER@yT0F$PMWSXX)kH@pL{71tIwdyO;VgBXQp38kHUrk&`^zq#5x4R3H5T$tCK5e74any4?2V zH&YVlx#_NdEWMtfD!%^;WVZa^Y38CeCM zuAC2^eWH>8Fom!lPYh48(iMcCeh&7KIKh6CKAIguybQsCt_u`I&VH@Ka94H7i@kK0$?1VpRyV*unrBu_ASft;iX`X-9w|zsbUV^O%4%*t^>sb*BWBMYCYQ z;*Ux!(b~1}j`RX61viRT`Lo?>caa0o>ow%$m21rTJ)+Q5rU5+~s8g^j4zAYc;WIe3 z!{$9rFZLTH*}N7s8gz>~v;Y?~vQsXx;5_$j8i7n4yS7%fk-!Zv(HzuZ>Eg=R`VQ*0 zX(-`+Uk+nLQxF zDEEtf#i;GdF)J-j$J8pzCWXmDt)a&*E-7nLI6F>jCA|Qy2yC{(xvLT`WN%wZy4j%v zE7>x4toJ?_c3N+HKn6?QD#d|)s9N=2Nf6_Q{D}hjt^s9u?^w>Byi)tj0+9Hqvum9X zh?4Fvf)&&x{ZB7%V%nT>nS}X#49#{;{>?zdryLn1xm^|U-~-Yl-$Rb0S4C)slCSdK zfgGpGZAgg-xLK;VEsH`7tvjDu0w~bQc*l?sQJxa-a$~ZU2~)vNrv|h1h1!jViX=0c z9zPqa@f0%~t7;rM753n--p$6XFr2b}hK=#EUc_8mcy`QwH}*cab(j;1^Z)jfU`fy$ zbQNeKDY_F-Bw<^#H-e>B<4>_1D=`-|>vpR}>de?1?gDW6Ms_()Dzvr|cpG!JaAYD` z?V*$Z0QAPk?k=r~0plwzq1r0fvSFy)UBzrFy>{9N5PQZcFCXg`S<#Z)Z=mqn`CJM} zazifSOqfC*V!lwmMVBOVudv-ma&yyASdY^Sq)klJF>6W8-F~7 z15HHf%K^JAstcfMX2WSTGCA$Flip<>#;-^?R0ekS79J<-D23{c(V`(l)Ctl9_0D_J zfP<&*p~F_2ktGkJl`YsArd&RL`u?phX>SyDHqb9-(MXCsof#}XT(2;W0)iRh0Q)Ei@BANTVt-JbYo6J3LZJo7vU|L@Vl4RFa>wat zA-6YO#X7wPsEA5S<#~1IBbmHuaeUIs)6#IJmz{^>)Y04LOHlwLA6=!q{>nG|p9yKICNjD|vX0*B;aX-+x|_Efgk9 zBs2`W{;WZ)aG1L36>N9}!_CQCz~8b26a9ne_6|6&!M)zxTw2nzv$!Ry0ti`rv+fJq zUmYXuT{n5k$je`Kr1``~9=p6rlB&1-QNy5gRepgr!v0ns)c=W@dqEJ@Ani0LmV zaxtK7Y1_D~kT%WJ$c|Rf8XRLEdi?J&c8cE^`#2a7k92&A=1aBljd_~!f=v7?KX+@7 z7|OAWi_15YA7Dn=E(DF*`pMr~LmZ%+~Q5b)Drcr&m0O42tU(YrIuZ+3ZKp% zuxss|JO}2nj$lF8zPP1&^+;u$`c#E0wN~@}`9=5!P#A#~%#@dxwsRMNUAzXYFy&WG z$;OPgYyNH4%m3Zlsn7z-(=u9J3>e~^R;lAefx8jOGz{pr&* zr5V@zWsajVR&>k+L$OBmz2|Ejn67RLBL+QN$_B4^yliw>yUeu3Jz_5|7QDN*pZ~)B zr%1Rc4sDEb_(}?!-(01Tgs=IPC;@1?`9broH%64?-yBA$P?vyo=l~#uY45qtO&fSr zqV&+D`mJezBm(!;Fd&HiJ`<(__GbwJifD$pv%~Lzli=@tE1~1}!PYE9#Vgl zcQEv38LBSVr<9&ty6A-5E~=J9CwIp4&Z<-S9ax6t z?l7o)OsT_HFa~Gh|>dH zjxoGRmSfn17&nl3VnNiH#9Bnl2R5i%fN%@|P~yv@?P(+txmiNLZl8z5Ix zC(qbf|8J1tZ^?qOC0vZ-GsY%F;g$7>C|`ms#=zq-`=rO@xHS=RzW30V!^4S!aM&>Z zS;8kEpU%X3bB$E3uJrD7%R9)rX;Kz6Nh~E#pf@ns?M|P2&fR zKV)yxA_^mRPpTgRbM=mCi5z7Bd>0xkt+p6W1cn7>zvhNd%OhotqlerF7B|bRJeDW;M}&o1)&GyQua2uSUE2i&DV6R9=~4u# zMI#+j5`vU;OD!6dMmj_#q`Nx=q(P-ia#2#!-RD`(-ru)poH={WncvSp97bL5dfxlF z^SZA~Y`g}DE2I|EX zLM(%FY(a11zf#i4Cu{qbQUr`fxG?QzL%Q1}4^@RYIJN4fEF|HOo+#C@OL`3j5ABm=uUQ@>T zvLr`CFOScQf&5LNg$}jN=eWsrTUZFX`8Bl{ z@;OV|`yGm*09}<~I|~RY4p&DtwQCrB;q$?DMwW*?A+OTf0&k7zVby)Lzk3^hPVUCN9jQJZ4ILE*4by}2f32FMk+r7zW&l|`z;l*xA{3gIRi{5Az)X19*NHR z2li=*mGG=EfIlfi;zLD<;n*m?W&|Je9x9~(e}7e|B;U9C^$0LE(Xo(De(*1J)Mj4U ztzXg`drQCCTRYt>e7Br5Ak<*vRFsZYU!Q7s>BJ;Xz?sE)>NS;5Hc++8-DtcAHSn>V z_>fZk^&NpG=FY+Tw{RV9JJW+9sYnuKK;}78HO1Ou41Lf9Su3gV2BUA`1Tf~JrG2U6 zGxLclDF7id0-Y7Rw4f!q!L?(Kt)F>CCYFK6>+V$`$td@W5p$g?_o5-ir?+juLd4qC z)wTp-S0v@@a)I8@Z#eTL^UBwm%@k78k)5Bn70-C&nr+*G%ANJ9TgoP}gOC7pe%QZW zA2$oCe|(}>_4!|^4j2n~!?h}l9Y^27`$QX!*ux0@gT|jms`aD~wHCX)Ig!Lu^ENW~voQA;tVB&zLP|yU>dnt6NCluDz6n(!l5N4GO*;)-`ljN%`}Rf6b)cVqt{AJD zpX%mrh8jvfNT(JMBgW&^4!6D~H(AfBr##^ur%dIYaK8w;Ly>(CCv!e;gps}ntQToN zJWd5&fX7wuz{=uyp{|s6g{ew?s*r00kYp=hTRB~&o7vWs%LutvFH(65_JXr%UY)7E zIvWH%k9NaBI127JZ}R&TF^2$*_ANNSp8J_}NBIG2fx{+5c>zR_H7w^nC102ig^XQ% z(sM;91b_Pu4(x-H7L*upb-!$FZCwTFl1pCwV(z_g!mlbTUw(tFig{T6GGPV%A06sq zc>MfUt*!saY^|Jt+jJV4u0&awS?ocMvPNoaGgxyNDU!C+6De+aU@`=a<&Dv=cX~_P zetT&Fi=tOp7o8eVB=nX)K!qlRiS`}SCT1j`tX=D(hzbVy2dc(f^27&~nNMwk#feba zWtJxo1*n3GzV|09@|T-w27bZ{CJ}Oh*i2Uf^(_+;L`phbZqXK7qzfTF^X_}gC*;qx znq9Kaye+pf^|`IY%w_|;;ZqU2vm@2|Ej*1EU_mW!tJ)nb0wTScG%%WP4~ZQqKOA3A zICV5t0r1t)!>b!X13DEG7@>Qz6vZ#B1&$C2*oTh4O@a*;sc0(F&QqrU>65R76C#`l1X1*fA4rdr`;%pTI^*l1$H0!2bhpNH!ws{c0uZaLBQ*| z!{vj`crD9tI!|6QWvNX$HS4$u-E=Dku=-HOOTFq9W_#;jjM-NaB?)XYV-c+l)&s9s}acl$%Q5LRsvRULQfKBJPuU~dUo#LyuB(NPCB;o$eF8As;n;Cb0GH_=C#+y9-^=C z)TftNA)s&xcJi*qcFGSPjx82aQQwc1(AaGsBi{Y?Ch}P0eaUF*iirrj1UWWb%R?_c z$uYN5YSY~k7Si@amtFS!-ls%lC;&4m?Gv%7_BR8ZL2oK&DmcdoQXjDCWeyOOy>+K6 zE7STugCB)AKb+{?CgxVZ-QiGbJ(rTae$1pfwt(^U1=odl+~y10iMe+JDbqxMmGIW) z|D01S4`nN|_J+?n_(gYKVgvmNnP;@!v2e2R_0+h=#>NHj$Ha)p!UlfhcF$PTD7;l! zMIJ?pfD#G{U8vJd*ys&(ow6=ti!|J>J93`b*98Qh=~m;#f0_UG;lt*MT)=im9GQdS z$8H|hH!5ITFHW(-v|lA3k%{%)HGEPa%&e_?+CZnNgC=}cW4bZbq3``pj6Re|?MV`U zi}A^b$0p`H*^3)7H?ijw>}@eg13eMamBLfZLr6&IJEM~6-xQ1CmLVA+_L_O8TRtVa2P zjD@a&${J!NTivkNlBP2>5M_01%AKGQTCKZI z0p2*@Kj}%KZjm4gYEiawa%LcBC7F$z;%P11l}WX0ge;BJ8GBe>YRKzFErE!_?a>E0 zTU*gCC%e5m{sxJx`8F&++z8HGFtS%rz1|-Up<2#%OBaY`Hrw>hP5`8*qG0V6Kh>;J zrL~D_xvyx$)CRE2_<)2?r`9~Aws^asYj>OQ^Bayais9%*5qGx4$?A>Vl;-ZPfVFcK zx{;QW>6X;pTSOXZl>|lgPV1sJQ=aq<1H)$hKx4Rm{g@9mr8D9x0yIT4kD@UFb9fqh zY<9QCvMb{i1qR+eEsT+Nw&A!b?jzjPZvdA@nD{n2dRpnuy#=i7lN$=QZw!exxu&{Zo^lBG>I%Aeq{On8&)`!* zjKF;D_dy)3Hq^HQ1v+5^sLRrg+RiO+4aL}P>D}Ab%zAfQ#6L5jcJ~-mTFa&d-xb>x z+b=u5w!e7s9RVw{OfRU9ffcuB_j6rd)N7#|5YJgw%@P)Z*sa;Bi<;e3Go_rP^zs-N zabM{IKNGYGeOPOU{0Pv*@h z0B*VI@Rfak1@(}OXHd`ePIVj(O-3AT?ZHxy8X)4&A&pvCv}am+lj?mhKesI{>q9pC zYw_Gb;p|S;N*SE0(3HH*?8!>7GM@tUy)$et&ZF`+J{g}`bUOWPZAWHw^)pfeS`K!- zYH^AioIA(UU>%=%zRssViD1sLYt-P5-3r14ACBt&?x3c~f~BzTsI(x!B5Lvz`h53# zgxq_gHJQ47;l0~-&=W`RXu4_pA4taEdVprm6M@GkG`b0rx zM>SXijEeVBp~A?l<+Yh=-LKdf#y@B<@w~RBy9-`8aJt z=V~UsnL+w=(0qN_e}vkJ^GBoz0+I_Zyt90LeM0&1jAazzmcC1ni%p)yc}cvfcc|wz z;;Xe1z|+(4R<}g0f$%m3L{yW#ccL-*i>rx21VL&+_hYvh7(?Z zowUTs=Nj(;qr$snzNl|yUV@fm+}+QYDK_r|q!J_tbcpY_O;4R@8IGaV_hKD9)4n5 zsawToM>^u~e!O0Mo-Eza)}wYEJXdv=c#GZQe3wg5;oZF_o(b;LS8JiRA z7CZQTekTsmUI+3YV25IboEFs$0@w1fGr~YagC5__MegI*dX0g^!L~E!W5)!6h)*l* zisv=8Dd#WeeBW*7?KHo(n2NizeKz5)P1!%|2)1xkvlT)@t&f@#`Go)IA12gWa1c`f zX#6pN=K;N$*l7SQC<5CPJBmGrqz@lr1lXtE285rTuSw#zE{?;&qDwGB4@<=;RQNO# z*?DWi4{T!R2nCck0`&#bd*->tAFWm*fW#@Q9$5*w@C)_4;EWmUk<9yt=h=~y>Qz!U zGk60a2hUjg4}@zmpDDJ#m@R4P#IJhM&8JNZ3zs`<;+ z=<`GNS-)4F;badVdowQdJKTqZdVUU$eqa2-B!FgU7fP|y-cnBoUMMBNqa}L;>v{n; zyI256YY%EfTnZ7nDR_+1wyG-y6S+U686Ay}ODb+?IFEnIGGwhK9SA+?uz0g9(C;>M z)5xge;>i5yy2R!65#%XYI>KJ9WV{z~`}ZG;0QRfw0jT2ptA~!FO*n^J9~%xXfZPxh zC25?ngu8><};+6i7jI###`lR<%;;g1dg zO~sA8Ct=``vlZp@RI^@hrIAmj`rJ%uwvX;h7A%4MU;%52arE*Gy)pb15?zJ`vETOI zN%1gTy=~VQv33PAQ1m+;-hhOKR-Y<0*QLva3=MFxCjr4Z(KJItxvY#z3sp7JT@Pi2 z?BHOudZ)E=Rqe>R8>*o}ntR(M+1|xnOD7YkfBH%gkc#jAEST7fN%xm}@Q8@>_Ob5! z5z~si9sE4?9!Zkf#Ye>FP&T|z#QilkQXgXrReC@`5bJ!h49>#5^Hmq#+;bYc%BasT zY-{z3_mddw^sV2-zv739OzuwZJIQvOk1cjZZn|NRll}7f25NpM_^>kOTX#%VK0GRT z*sg(4saBCKHj<->wQw&o2~L$^)3jxv&TlSyZO2rq@+F_M+t~OoD!>1H>rcT*-q=b- zki36xE;U8S!!(G_r#5H_%WY<6rbDgXlesR}{F8Fbi1*I%cvAU?xibErSIM|>Xurl>^)&9*OE;8LJb zIlccBV)EdP=?ZmZ^F1Yomyl;E&m_00V8ov@j9t;%l@YVnm9&ThLIB%&bN2W`YIXS? zB*;<2*1t=KZDc;KKl)|SmQJLThU{=Lk*K!ax%W-xV=w#kYl(|Ali0Ko6jSa+fJHRo zqRW9_M=-yi^MPlNfHRfg^p-)S!Hxs--Me=aq(5yXAoF!EuC2);A|m2Paf_lKYOP1w zV4q;4@MYow4w5v(v|intu6(7h5c+-YoW?L|7L#v&tLUOxX_fs$h6z5Q8vY4{p;X`R z1ALMBQIZ=sz;PBJ^uA%02lEz&V)MVmzy604>VpqNA3P&?b^rA2OciMHim^V~q-hbO zq@tpddrM32U>`aT_gj)BbB)Ka z!RnDg_KTHJZ^7wE&aJTLq;}RvS(mqKpeL%H4dqawKYObGaQf6&a8L}=_=Fm=%rn3T zh(s5zFR?Tb(a<26C6kZ@v^0v@x=J*IHE$q(Mk1dG0ZU$-babOp5w|wF`H>)Yw<%Qo zy1?B9oK|wOu?$t7^Q8p8_z##QSO(-9>}YxXtIhoKe13eO;mD|TS|nH(6omRHlx+2W zM>qniTP=q2x_VxGJ_K5BJgtkGG+Ks4KIukss+w1yaeJ*B7LjCC(}Rs^w-_>R^8SB} zpy$%4p^*Fw;`Lx(;H=ZXxd0-@pXFH1H3@x9y#H!?disYm2076$Pi^1`w+W@t8~?Lt z_KR2f-_Xgy^3yrjg77{FXfkyTGIxD~y=7w_gW{#<^XD+WiC5@IyNbTAm~he;h?-Y+ z%5SYFzPCE>#>NCb;547lUWa}m@vG%T8)VJ(*XV3+J20{t_f!3qe3>RKWU z1;Gn#=6(+Cd8W@K26RH~W+8j{qb)z7zyJGH;crY=77Uy;>vf1(|HcCuD1(Fik!O`* z%qXAm8YqHUXJpLZtHy=(%H*l#WIva^5iB8;+ilFMP2M$#bUo-pTf485SKWq4ajHz# zcmLK_x3diuiP5|bWVhZr_D=)YnG-qj#YRaplUZL9G_{O}BLel8BZLzKY_;M*H&!1I|_fKybRw5dfmG9D|zx!5fE-z#fj{ojt%SB9DGo;lq>Z=uZ^|wUIVPasU1E>{9(?q7hhGuf+l`M03ZujE+ydQOc(0LY&v;=|}Qli33dw-?$s}lYN4TT}eCv{TQ61IFDJoWC)=S)R6U&|TqIe$hO^!HVkG;#0X;HM$0_M=ogkb9)?F zgTkY9)X;(amq+uJfIkQmuhL(DTz>Z|_~866N}di&1KEo2rGR4dn}S5aGK4w%R2CCC zAZ1Ph43w^Wv3-19f}9lfD{e50itWOj50dYzhZk6gDHA?C69NF^))-32E+?PBmS+8} z97kVJ;nxGwgcr#KC8V9d&Zd5E%y4^{kN^hlkvn;k2q4$lU>gyzZYPFEzHa*_-vX|$ zYL?t109%ct#egi^2+mj+B`1S3B zAmP&`yk(*Gy>DfI;u0I-E^)i+Q}=(jMg+tzIEF)On>ab#uDIlB4TsHSC9~AIaZW7` zY`|YX=5Cy-`qJ+1(I>?d2S*a&)e6g3f?FZD%!Ju2ThuLh=bU&?0n=lsCP;HFK>KyA&k{pM@86!pHM>?uk15TZ}H>6C!T9?>SX%Gg9kd><*Y8)|AYh zb>K51U^5?<5PRa@@1EZDQot^)LC|!VFA-AaRF-3E`d;~oqOB!jzP|8F+qO`nwMFqv z=V!>S@L%5Z&I2dUk8pswaaTkZ$b6_9@zi7h8QLGdg%UluL~43>Zr%Lj_4<>;4wist zhS(wkIT69ZAD$HH43?XGcH*s;ULjs3b4Aic?-*{21#wX6>dJd5y89KMfgd_jH6g5o z2M}NG?d{QGseB9==l?)=ZSe}imHP@o`A3Pk^$kc=kg_?Wgpmwg{dXmjKVl%)}H73KO##rPXC85Ds?_cU4}3 zPY&s6ql?BZwXgRx66IiSF-!(wDcWwKT@FDq(D^x3`lj;|IU?nh-ghv;22BCkcvj8q z*^eG-Y2pEKy|UG0BoV((1B0K!&uNUx>{qDu?_(AGCMHRPueDM~s1k7(kUZqUb-JnQ zJ&tYTE(brkp-j(A_b96`pv#bo3eyg>rC};y2ZtjBxq=m^p3hVAr|%X@6Cc*ny)ns^ z4s=y0`fNikH*X^ExcLwS0WauZI;$h63$Ikbz>ab2R;J75qza(X#a^u-#Jc>GxBjaK z`1MOr!;IizU;+gbbX@^E$uNMM!)bOL%La{W1CdfvQsjcW^!?o$KQ{IN=C@MJF&YCE zQS|m;O7t1r&`>2xII#Ib!ncd|{a0Qo-WIsvB#TZZ6OY|&lI2LQ7(g0UoF>|f{=s^G z^<;nb2U^_V*=35A;h_wOstQ2QH4{*~#Di;YeCWNpnuMgwA5gGk&qLXKR~Cd=@}?GK zM0q5+S9m?`CJS1|BcjlBlt|0`HZ zRrl&ISM2XTBoTZu{1NADTM(87=p9u;cZ~q$P-9pV#;vEUEu*e6ca;=C-k1e?!%;zK z&+ThL&$1GU9N+EmjNp-!ROqP{{f~|`PGsnn+S|h0!?}VJ1~mDOd&e(8UIo(e@{vqg zJW%-I0*>e@*cU$Jzj2HHJo>{h;P1LsgX=+Oj&kY{Xz5xU{rHAYPOiYI-^4wP_qF&g z4BY;Ui%X`wR1r{`#56QCu->LJuF60_b)}_!#xl22^5~G_j+cXdmpXi}OfH@hXV0zZ zlyM5malPwOSD5U`6fn;IwF(|{kLUj~OW1Lel{6`OiGrz`Ea05$eR&2Zb}s7QxFlNz z{(rySLLGeA;O*De+G>7%by;aYFAn&ni2z+wTZcYQU_TOh)6U*r`beItnrJz~3;ES- zOm5M&Hrx|sWTRF(S|)Bbb+9uMw}Tqw$~PT=&_7jzV&f~-@FTYbUu|V+1I|a9ubPFy zV>uEGA~>80H-Z$m@7F5)Z{0fZe)yu{IfB!DBLVzkmrtLPfh`WygEe_D<2nFj6p55~ z&iSK7L-CoJ0M}JqJMVpm!}#tu(Q=RQ8A)jajieFp1V~Crsmhq61q22T$(OMefH-KZ z7f&hpj1mYD-GG3rmjRfNmzRLgx-0sZzB@oY@sM@_4Lj%~WwvZLh6OfAk zROV@K1_PeuQ3i#?fow1;M)jfCO?u@Ucnu}1(SB!Dv&*I2h^5@fF||~lL}vSD1F|B! zMKooyY;fl!lDs4*hY{esgVx82(>$|KBf%ZEpnpe$vT8`*U+o zyi@o+-k$wfX)(-jwiqe03ZyLR-_IMr(EWkOi^SCu%b_RGn|Pn@lwLO7zub1NUJyt^`@pzD@yIhxO` z;R@&z~I}4z@;jx~G%70QM$Nyr9`p$vDfA1sxGOYWzDN=*9 zIvRJ^sgFRXFNt3Q^}oI$SN*eY2vRDQh9|MU2eX1{u2HjsOnRd3(#Xs# z>9miK#Uu-ZsbM8U4ZcXUA6>;64`yX$%-7IQ`gyyoj~KglMSvVJ6L#O)i7KvKX9+yiBF@cO>K3{L6%fu?ECFOTk z@Eq3g>N^?+*npx59SC78FXO)}rK8J2Ndd6*L6Cl1fEWTIQ}cLUl;iG1_%z%X@a@aw3T6d%o{sZ1Tv!rVuK76ICx`UtH2a9JXUP%tM$x=_kpGB+8pvz^gI}`1#Xw{YqKcfO9|^7mWD!=kSC|jd z*4xh`mDQs5d7-sGfq!p>v@}C}VqzK4^2q}5XPNuTpksON#DZSqUsB@hix4|Xai4+B z+%PTt>>jBW?7kv4{n@;hXKVK4!nNJy*y=S*#|GXheR2Kz8BTkM>NvL#uxit(J z_SXQxkJIM5hJ{5(z}H8kPtZyc;Dme!)G)tFmn{Dma_E(cvYB)CDb zj)2r}yXl;x%5pTcH;!4tdK2x=-_u?}_{%?LBoCf`0(XC4uRm2J2f!JlA|tWBri$q6 zETf~N?+(l|q}X;yx8UJWiqWLefPg9I&9+qM65f|8D1RmX0p*Q7^2AwmGj@-V+nGOr zAbx3gfIqq|0Wu`JY%t7z0WV@hh7m+#;O3-(c;7>feDx3znF+`9?a9&pRFCuT{}3A) zl89`I44U7oo#AAa#$9*b@mLWi^EtdsPf!2SOC4j6oJ0dd$4h{=wza`)H_DUqL1&t! zx)-Nh0m!KIy3#SQw_pk)Ew7<8QBeV=;Q?Uc96*k0QH zdeVSTiL&~t6{+Y0AQ*)CH51PjIIWDaqgCqO^HlCQT-@GPR(_SD`uut3sFgIjYh1q~ z;CbSvcE?fjY%L?1Jy;u71L;E^Sm9L9S7!$L%j~-oEq*{5@t^z>^+X5SKl(2zy*&*K zsP!75DmshWjnFcDUnum=Ouk1S;ZA9yf6=a_Bw83m`5SGd$V{zwJkkxM!cc{2jNtdY zfv0C8P*DI!xJ#%YH9JLTEgS+MCY=Mh9r^_REsZ#_8zvXLUVtXg#P^{9a4T!y9} z+$js-2*CbVRVF5#pZk_2qH&-Nw;hMYS#Zf+Mk4J-uTC|QPPDFh#>2(-}M_r$x~UZz}M zd8CL6DSGArc^wiI;am6%6e1Y6Z_`(+cSLmXMF`MOs|Zt}B1VR!JoCp(EzK$e)yXZd zolmGEjc$&;ZyzwyE2m7W-p~A}=I%QT?&%_ToNNK@v&a3d6ri9uJKHD;DcWN~lo+C& zub*u?TQ7(Moe-AC9}wX6z}`wfr&q*EU-J8Z=SSC)cu=-hfBM7^9}H+1&@-@Um_pDZ zvAu(3u->$i_nD>L33HlgRg#pvTZTgU(U*~hMUHh}ODhiD6UMpWtjCFG_EqeYn3J*= zt2>*d65G-srosXpOhueh12xPIz$Y_vgW6d7#*F!r0~pDb-g0FfxbpzL=@V^VQhTpjN=C}osMC&5n7m3xjGjg#Eis@kf&3GTFc!MJh6JQk5 z(Oq%nTH0tS6}#~W<=B?oXgd>!W9l(6vFJoZeBeE%;BGX<()1NvGBLC_RY&sz@dnZ% zuIlh0=|F#LA+hS-%Kb&1D$BuO9J-%nJgAL&67~T*?mxSC)HLAr$709Dw(w893~Fu` zf%lrd&(;|0eXhN6SF5ZVR)c@iF1Ef@ic@3XVFC(nSa^h9THSc0xU~Mb3>z%Pn;$(! zDQVaUgs{hgp%FpST7+AXsl2p^L?*`$+81RZzcQV%l|E41EU+0wQ zaL?k;kRsXFWfTnt8^f8hv6&@=2aM5Mf?4nTuy>Gy(8MCKcS`vfn~*4(7jVR>2>1L0 z0#p-AJKHQ$zC()xF%U<*L_{z=iJ8vH-U~NbBPs*BSU_Z{UfwfJMBk#?1g;IHHDBan zC)SF?JG!Q4xoz--_{UV}=PyLdi)5x(kHQGEutYARK)Xb0Z8%3$qZRd!h1CCJn&9^a zu4MftP}m}g@7}$r1P(npk?V^Gdi74S7$_q~SDAnU<)Z=49Pi|h6;!Ul|vQAq7cw=wR-EX3BnerANdvvcyI*^MpU4u6}E zWm;t!U)uDD3mNi72()%as%@UaXK=add8!U1!sIvpQA!UkKL>!^Maq|^tI{D7ZhryO zUqDAH1(=D;sx6HQIhosM^uUb|Hxox|rHztVlOFQvS4hyDcJquI@b|Jk2BiwQSx9V159mQTG4XM=62A%OPiQXQ#R}AQtG-3;-mV z#re^uD&MCCEWXdk)3PmXgikY;R_6s} zIJ;EW{fE+0k5na*eQ1N(g#h?VcK%M#H)h0;{5I8hWv}lSi;O)1DeX8qG?6_l7Ct}q zIzP&S&wb7g)@*L0?tvGMi|B`ng|u42V--NWt6@2u59bb|!KoxwR6s)lUOU}+<2p-~ zUj8vqiOw?Tq3H{A{L|DOded)M^Kz}b!^7$F$`~U&c<;4R8WE);wR7*1`aNl>d{s80 z_}xCroF>ctS>-S1_FvBV#M;F|VtidL@rlNj(`xlZLdqA-xqqH@i#*K^AlGS%+h54+ zqrYd;i@)+Dc=CVkvqcOZ($O$FbR~Qr;ez(_5NUt4&D1Jr_o@2NzpJ1x))7)^X)_|D zU!N~qDLq58xk>Lq|5i4UF0l7`(F%e4Rch&X+A0ACOitYt_HG%&fv~gJMr0liDdNPlWYt#pWcdfPQRVx)=!-W(s+GwTg|rxqHRL$Pm=o9UNCz(>R?4croZ&L+E| z7!iTm>*p(omndK(!2YaU9k0axsW#Nu$!&ir3-A;Sk(Kaf0r_Wic^>BQN6#BgHe8&x zF5F0zK>3#pahWjUca@v2v*JD65E&?x-54uOAk_SVixa}@ckb;yQg7p-9f_;(^=#S9 zax$|_!kO{l;_NCY@8ENxepU5ulCZ-;R9%NKJJDanxnKr_T_tWze_UR_iC}E%S=njh z@G-$u(X4cFWK9tlTE3MUB%YT)>rW^tB0bCx*7iV(aB+=fG5FcPY+KamE&`&m5jhW* zZAGeXaFwBnx}JOM$r2vR6Xw-xRs9TVO!b+4q?4V%|VmA(rw=q-lGc$2(VZ{z}l`K6GDT=LEER1B5_}te~@fRH0A8;nq)Qp zGM?K1gqRSTLxh`-%W^H|i*J^!7(U^YHM9Lo$r0FN6HsSA79~ zd4o?YGBU?2y`$WVZ?sh__h$#hT8AE2_(Qy6_}_SYuL^pdv{3fO&}T0*dXe1Oh>VO3 zU|ej}|M-F*E>H>;un5^br}9aE6!u0nzBhg~HZrJ`^B7AsBW7sTUS%<&oWRxXD~#H&=rv-#;U*bGcUrHN5Xb*$JL@`j&N{p_Is(J zcT9#?iH3ZDY-G%?MG>^26Yaa5hrIAY50sC26=2z+V@%BD$AlO>0D(|22u4uLMoL(# z?p>5g$dJb41f^goXg$*J%{Q@cKHb z-gU$1zH;hn;i*Z|@fb4X%#rTapXYcHxQV3(SS~Bh_=LUm7)TcF95u!sBW2&l&@nq2 zvAT*!9@5Y}kj0gWy-+6Q<8(UxTN zcBw+&jmczbTpLFQzX@F;&u3`RBv~rwI)6A|!D2LDqY`L<3xJl)>FH@lY05$jUNc}1 zfcXI@iVoGvRR>(tmIEIU;jI-g_dJa$5lx~*)>Up{dahM&F04_YL*E(ljdo(rYHUzC z)zdQJc-encQ>+zr2zOUgW^EAKSr$nL@ zGV%Y@ZpVVW!N|7Y!c!CgbE~1dce;7=;gr9d9Uc}}>^(nC$+_lo$IH8hkX3=7QALyu zU33*k-XJ8CuXEJv=K6t#k51*+aW7K;1mNE$?hbrZAz|4_o-rlS9e=n>J$wV5?}c8j ztwc+POpB`r5^n&(OCVP`1V(*p06>e6kH2(e6QO+L=Q-q|K>_`o7^qOvKz5|EF;*0^ zA`BuT3X&AfD;~s1BSU;c^@sn`cII;-^u$O+JH%YL73*+9gU9@A%I(!&}A_uT{bBc2#Yb zZ_#NMXaAzZc^?HkSmwB;UM`6)04RkN-q#q=Y|i;S{(NnIa*yRnW$^19q9+po-NDfY zCIgjb+s}%UY7#Xch2bac&UhTD0#sSTc&V6|TneO)koP8FAr-@-gbc)3sh60Vew6$C zzd6l+dRpp1Jl`mqK`a^p1HkwNA^bp%j!X`wbCEZq9wYyBr7ecw?(@B7Q6Xrrt?#%0 zI^`7FH*?EA^=2A9xCRf=jY{UvP zRFV96Q<0kwK7fqvQbJ=_M=$-ZB_lW%p`WS;_)hqjaj~ zYpJk;7NAjL{-c)D{IZ!$Bj)W~c?TJ{%NO%^m(6qLebc8pDJpELS5^67)Wo*0ty^w- zn5v^q_^fd>uYizB_=lI}XgP}Wk0C|8O!K$2Eef6C6pEWuAK#icD?}ZM zMBIiXS=ib&k{gch$=BdBM~R5o%vruuCgRY|{9ZI^*>UsEsxqB0CC4I!5o*g+1O23% z&mqa5Fb(Fzd0?~Ibl+rdYcmcYY2-ViZ~mD>i|U5gM%}2`Uu$F#kSN2nqR^0Bz>kcH za4>#LI@(_%_*0ho#s`NQ>CTvyGO$fZq~WC0-5K#>%Alu)L69s|8FI>m)+Y+F$E!ey zJrD)afGB?odxr^k2amw7yIW?m)bIwoepAv>=*+j0(3xwYuVO7X5F>LipeH))>bcmX zMNi+&a#J3#dSW9A94K zdh`n~Y;W_7;)i*kzF5u$sSx0Md%-RujFd7f!} z2oMTp)S2pheHM3ixLMS(h)evyrIF;CLYz95Q(rh=tr-V2O!S}hw7#`@`fvhY=-WGe z-JV^b(<;%$VXBzK?ErCJn-1}eAa~C94O+oFmghHZFJ!9&I(2FdcLt5J1gfkC8~K~H zSwgx0w7r1^zb&Y@XzQ^7J0#>jpru77rv*QfNPLX;+XVt8ggYM^OEf_nxIn)1-m_kv zNbHD+(&#X^fQ-z?9LzYSu6j&_za1~VADHcd#t}i0UPHps<`kog<}vw$Q0yHLSwoOg z;3LU+7L8oczaGigU&w%jmE{XG>y- zxw-B%Y%B!OWDwBBW2uhxcK(#tm zdj4lgfFp?V%=$n^yDtGHB?yeE0N1Ap|5X=ZPO5i`*l@da-F9=bxLvhPQTooGhAzl! z1i}mGgm1cFjPFOH>%`+kCTfGX;GHDYQ4{=@71|dt_#!opOYSwU{)d{ zlhgaI5Jbd?*JVn!%sQuOfmxMIi*hCM1nsAeFd_>utQukg2^SWC{4h*jSK;WYUJQTr*GA%Ggx9=pU8UumdWg*wm;YG*tV19`83})7&a>{ ze}4NkVc}&pG9*dFy!*V;VxRDiT+LBQyy`&KLa*JPJLUs<6~+k4bdbB{S)crALyA%g z0n-~|Ejmac+cF#G^Tk5}AG=fmxTDXyQ5e6al)UhiGW>l5Re*zYDo(3c`1xxT9VDa( z+?IU|Y1uSgRa%4X`p0b4KjVly5)KYdCFnw*oo_c=d`@6r+Srh1+mbE3gG7l2g*7oJv-bbeD=xl z5h3MYFlM(^H24H4JYXcp9g^_HeV1p|ljYEPNLYGfbcRR$}wl z#!kkse*hT3JDFqwmw|;4no^UEhj4Aou@PB)Z!tZAM`6~Lw@`0L$;%2>+oWHd z?)79`pKpr*hZOxVM~isb(Y~a^Vd)0jSI(433IXTAb=V8zuGi_pA3a_Sq(A68uf>3B zHG(0*a^OdixLGn%cKb!A>b{X$ZAT6N$x7QtBHq{Ge^E8H?4uccid+e2c9(p<-68T_ z$P^t-ivjo~6Al@apS5f-=o1Mobbg(QYz00EZN91V|KgK`ULtTxc^$0c*v!<&gIZA< zz=dMN67Y;yzzpA)u?!OmT!kX&Gi8FFrz%MK^F;lS+7qDgd4vKyl?SW#sv60@j1b66 z+qou1Vd=;r&DiEN7|h^-)BA^o8MFbIRu&1!ge=2vky0iPN#CJAEJU|FnFe`|dbK$H z=&iLb=@#Qkfy@%<=dn%k3oc1{(u*yGGxORy4NCUm@EeGTewEiPi21sCxXXRXtbOcW zCp$D6dC#5-HhI>)sB~ZEV(;_Y_DPg${&J=vf9^bX%W^t5jc^?G^}bctSolbu71o1| zhwhjnXU2zyi;a;5zKm>auk%L>3-pK*zIReNejm*2QL7xj-kq|}*Doplujh`8i1i%- ztvw!i9|wB4>?hp+u;KU)wBg3Lrv?Ixzu$h@_-~6ZWmIo*-ei)E+rNu+RE#;Qi3Y_- zKcRdC`&MD5T3g|;D8;-4ejAB@bT?aR5_e~*KQ(nPgyH}kwY-Xo$^kc1FqFpX@#9im zHVRx)IUJ*TSNq`Lpa8YZQl<9OXs|#_Ny!lrk{KK<6NbT@%7O+xf`Ece zUf~VZBnA??IH*X1zsj0j?mghK8JhTkozb$eRMw``RDle+5PH_A&R_KGPLhzRG3Ikt=+->R}WPDmJoCv-OJSUS2?1O1#zW~?%vjW4WkhZc!{bL@1Z z(nJJ&4)FAEH^y9L=!l&xK#RM1%wI{*eE^5X*SL!?FoJRxizZL%PbU~V3P@VSu(ylv zMm$X5*T+jWm1eb>2Q&h;=w3PfOag$YfbReu$H??G@#})~*Em~N(`IY~xY+h_2q~I; zau~}oxQHI(0Y-?3k+g_&bm*n#%YeGO(y;9&^AZ@u2V)mY-VJR6ud(D{4*cGwf}!+uza ztE>$_Y=Zy$VP~L2z~uD~m9PR_D*%5Fv<2=5`nJs=v}?BDk&-eLZuz$&MG|Xe@C9{E zf}YLR>P2w4R>wQ2^e7G)B33`q)o;_}k3(`pn4hJGUJhYm%2=y}oPW0HjjZ;>w;>4G z=bOQq1G_tb&YeC$(Aq5rNRq;l57K36pg@&{7pOvLx5-9w5Vso@NAsVQX{mFT+Q<7c z^6@41Hb2o2^Z_G_)qyJ^ooQ9rD=d+0=j+1pa%$M%<65?#LX#s0;e4b0;;A(Pm0=Er z-rgPEp3$W?`!@lW%eTtirF3!%*TyFhs(&3LD$0jox1?t&ZVMN_Ky&44;?cMiv zo)#uc17+4f>;3vlW6R{Ch%oCbDh0fFp848BKdX{i?XlL<0}PDt4`f%$s}AGT~Mpvt5Ey+!c*PWqHDJ%OrPle@dl}wTJPBx-+Is z-4x)R2HgbVKjf9D5%gI!pG` zae!<1fkE-z)jd?tJ6l=cs=v=#qXXnuBglkFcMu~} zBCJ{;c)xulqL56O(+etcsW8GX;s1WKk3q?Q=6H%Lmppmk+O5jJ*jU3*SO;{hdF)1? z6uc~n#G%3(Utrl8?tftZ?u5w|Gy+duS7$y+SnMa*aMi(exxF@|G?4#vdgP0WXMgxI zm9~ycD?sygIvb4NbL)S7M<*g^E;8c!0&(hlDgFrg!S_vr4h zio9ze#X3)L)nbG1w^@!CQvj~0$h|O>Au!&o2|#rNDX;*z^ldSZ_KS4J2PsT>a*4Ev zQ6LvP^@g$IRmM-Np|7$PFU3Hw0-(FH#_U`MVqPPq{JXHaNcFym(5bRc>p1iSouxrC z_NmoaYS{gZ`3p3k5?;%_n=`d;!-X%Z6P|xXK}LROvN>56u=l;kI3YV#KdH=MmMV@_ zQ_1qoUB2Q4#hj(GzRzKn_B&xC;3Wulbn86>p4rXH33j;7c`KIi+K4Z+NIlIzJIOHQ zHisb;{Y6tWMZQG?(_1x1L5X4`AyXBlg8x?kw&6YYZw?7tC^z{%Z(+RjrAIn?cF|xt z`_wW_xTvs}a&nAAX-%~`U%8st)C%&s$~3#KYvgTD~m^Wx+QSvi-}yE#3Bzi zXeyI(nH$9TaTzTU!zq$}Y>!YiW$5r&nR@}f5zgCgc8R)?#v3!4E~0Em7s5X_U1zhB z8(i?3iIVrvt@GI)k_E^^k*L0eeb_WhtvhHBmvb8&nLYBctF6RIe}N{Xt9Y_Jvi;g& z`B1aTD7gcWLF5WF7zP;#-t^anQ^|fwOl0R7&b&# zuV>NEQIRQJF@GjwGCz|s*$C0ckFk@5JQ5z-PRp=9Df|F<#AVV#8+`4nKw>+6cQz#S zCE)%_qS{$azF*v5L7?;liIRRD3|JqOE;(NBhf=(NhwQKKd2?`E7sSD!$J$Vg63y55 zXFrn_7z=NSc$TFOD}cEvT0ObW1I7Rfk91Ig48x5@|N2-^@$|@pJ@p3JwVpI+`#qHR z09kI8Pl5a`8g#auM8g;EODBLFGR>H%X!c(%-^$u7gr^&Z1wy@EXFbx~|A(%(ii$dH z_l5;UK)ME`LqbxdTR=h@>6QlR9vVct1f;vWyFt3UJBEfKhOY0=v-i8!ckt{J7HbX| z*2H~Z*Du#o>s6HR#NoyuD5getcivo+?qnWjs&T*xxm~#(c9G`)b6yifpUL!gnf(v{d7u;+aq(NkbB**%vokbGCsDwG7pPj0JXD3YJ(`cLdTM%f z30V{*s1Ki@y1@2Wft9#fHMSZ-M6(BJo?5}!l0fWhQh9x)y8n6+s4CV%k9LH+F z1D}hN1FIWsr~BpiYD4g}2Rml3E2ZU+x(D@oJ;Q$rYRyAzKDX`G?hb;6?V4|((O;hPV_JHXK5_;6$QvLEl4w?Ab8 zM({=i6c<1fsk({E8E1SD7=T0_f_emJ1+|3%L0bRNkn|n#4u)34 z21b5$I$$Pjba<`R1>BilMap@I`1WD2tt^y_Wia`4swdM5 z-TK+Zb-t?AVsLxLr37#YWU$UgLM0 z{+n^cp&&0k?_XuB-j*NvIw$8t6h5En{!_#UVTq0DGV1=+$)=m3kC7*BI-Yl0^nev* z&v2J6C)b83)kZfOdk(pzWN)curR(z^f3Vh!0w%K2wCk%+xTA@|clAYPPNT(JGn@5B z2COOe!kJj?PBZ%N&_k8KF<(yQNl_d|2D!;aDb|&~&e=G;5mO@h2%QXQ)_@o`%}|=W z|HvJY+`crTY1+!epQ6{g(%}Y2yUXte3!+ymHdZ}60lEw2NvB-mH3C8@f#?vRqLjrx zTigrd@oBtsvDpwmb<(iNSep6qPC{$+BRS{Wr8;9jQRu%^>h(l`3SMb7)qL}y8tAN? zQ*pcFdV;hx`|C{6uy^ttkD=CtY`H!eP{$YbY~eJxUeIu0LIqV$ucGQH^%LC&l z>n4CI0!SO?`eRY;zRR|BGPL)1a3p+Q|1#TT9}I|{4i^_PtL#1+9o};@U$%xBVjr{x z^ziyS7-vNzRDIuAPW&jks0I;ey@3-G-V}rc&(~}=HFyiT_M}_i2moax0+I5(s`-rX zW!AniUcZ05NB89XNB$=VsbT3N%I zL>%a$cP*3wP_0=#nYbU*KPrN{_rlDtHi0bl&%GX7Jf%GXGw>MKLqIPhugi(diy{Q@ z+DUf);Xs!Jw$%!G@=OGNGB5UDvn4wD9ARQymLL6H+&0?%0P8^>a!lK#Ge`*`9G+B8 z$mx!~b)hYW-e79?Vh=$W*k1cCm&hE<&*#?65Df0CeEsVm>_2gHs}~#2MKJy4k|$*X zs;Rs$2svE4WY&8?N%P-h>;V)IK;B-`k*fn^9t;l4RT$_Ddr^zt?OzP#v(U&*PF4Z3 z6tWhszd(7s>^_@OhRl1m!IpU>FeyJ$#zF1DPp5;|da_AhQu*3)=~?1}m~tvyPDFQ4#t&NYjVjLs4iyH~Vun6lyG(BGt-y|F zwn`qD(WDgpDS3>YyhEKyFps6_M*(!wbkyBl#pBW{N(rwvrN2_Hw7@-x9^T^NJ zoVYVFXcO$GJxe3iZ2}J;j_fs#W=A!^We~nl6J*jL=*T;6o^!x$R`k8KPPKTfv-L$& z%@pC|1W)F1!&-XZ=iyK-6whMUZ}1BMQp)SiWC0&iTsXUCb>-@Le6pcM=M5nF*}o_A zdWnWrg6azFjntK&Q%nENhz#)l!qc;pZHp}3p#ccKpbwm%a%XRzs=Ew> zz`(9~+FRst}NnE=M!XO>R;4$Q3E@)Zc@WnF^(BQ;H}fr6C%V=(p$=fU`&!Cv?a<~oTv5EL&ATw1w6#+lR*e+%8E zF}FPS=~sc!$&zM*(iXZQmNEaCw7pMSet_>ohw(H=K0Pgo%Z9o11eZluo3S_Iw(8ti zn~Iwj>4P&oGSm94>0>+WsUp%Kd?eZ@kYS<0oDB`cH#lz(ZRh$uZwWin^PUP323#pY zSY|JnHXNSNHs6lJ7fqc|#d#5UE$F_`k3Vr;7)ZB}XsQTVclmTyC5Ah>XD@d9?mo@s zIG;e2l}3m77sY`L|8&hasypL5c5`7l6#$78KA0#j4@tUZyjy5<%9aTY_X)&U_i}g+ zKk46iOrq;g2;}~+oB1x5?aseMDG|m=#UOE5MCoM7ic3_4ZdIC~F zN70Cc{-1Nxqe zfCR=cKX*r{lwKGS5!Fh4LF#^0=oJJ)CvRdCMJekLL6DqK1~ghi6>_Lc>*Ukc#yO znY60@6e*YGWXWAntXGCdmmWIXCqHs4>^8-v?VQ`Y5nZ{ z9D5djP$6B=vjD(|Uf9}dv3X=yfpu;2_24(yfEpe(|B>I%3!6;K`mUys9rmcNiDdy# z%4|Eoi}VPs7kUY%)^tTzF8!V;NPH!zQmz`?c6)*dyjXkck*I{{f>w(yVhjpx+B}-A z_KBc+{7)z7T+YLE2UE@YtzAY>fM%)iGMS{N%JRMGb6RQ=xIYc}2rj=AMeJWks3Ib& zzv(t0ZzVcC`9t6kaIpo9JkL;ZH>e-RWqziKZ1VW<`>EkcOt}9x?Er|~s3t)N--Xv? zp*Si7l>=8=&P6@^vB>Zr8fr8Pz1Z*h>>8yTuhT;DA{fa59rMp?uo~_-3V3uCslxqT z-6zMVxI$trsv=#=`lGJQ3zuWk!;lwnHUFT*)#ed(g6FlS@(1X0VipeLZ_Zoo++*6Z zWC(kSg2IVoDK^f(f8kW{Y1p?_-=T9uSb7&nVEt8_=d@2^r{#c}oD|J}rUQ_86z(4~ z84KEs;t>;<#Ohq`elAjIO98nU1A6m@37s|r>Tp8f^2cS;{92vRVd!f1Y%LW#SU1Cp z1RRaQOT*F+5=4qyPY32|7o8w+nUG|=M5A{~F<aNrXXiW@U1e;!J^92x_z9K&k%um4rn z!=LO^4SHLFm)dE?kle}3_?w~lirZRvmqWiZKc@57SbHGzRGAXj$boD@5ccWmXqDm4 zkxao9?=e5{HI&%@->>18+iYcHLAW%pf$#3{C1TBwPzxrW1)S6Ep*Y{S^{(OS8NDag zUod6wha>{dB6$Gi`LL^c$k~qhgGY+anqVuPUj=^#r}E)SF{0j#91q0X;?;xp-z0fRv9J3>hH>A|LW*Oi@qmfC{re11)XBttjixr1+PqpyWUZ z=n@diinMuuCLQ~i4?t_dE;#jc-fb;_;%h;@b?(zF{v0@;SzxbUF$VH!mEfp;ZTIJy zUf%Q9axROX)juT=_*;VR>j)Si;D;CM?a7M!=lxU{L;!QAVeo#4kZz@tqH!)^*2wzJ z*af{CMc#Q1c5%3KRLP$Za(Utxzz^Nl{nqP*JJ)s*1_6BT!E3cTEsh$)ep*14uP&=~ zPvH+=coXPjSl@!?CsJ%|5g^hcKG3K?!S_6vx0I)D+V1I}7_a*Yu`BgO7LZP&I6>+E zaVqrIVxPM#d91T6h%Omm9i)4V7PI>z_qT(U903*CJ8XEqwD?v3cfn|~(pmxmY&V`N z-~@AtC+uHjHopk*@Wk?a_%zbC#?pkcRhg28Mij{Sk;sLFgfz(Xgn*i-N=>fNe8Krj6~5V`jbrkCL3@5P?fLiS z@$Bl=V(n6mcz2u_`(y2B-^H4lZu$=TDBd-PrH-*=^5FN=EGQKVxzGpB(uR*EfvZ^8 za2VaKR+aiR3M0~9?RcUvO=sP@ez-#~D)$RV4H)}UjVBPID$Z!y(c zarOHZZ_YcHUqx$sw6kkFy{IX(BA-({l(d(#vs%H{&83tX$KdJd!iQhWYf+;tD-ywj#Q zQ|it$9AY{CiJu>``JO}%D(#YzD>mcR!$Cu27e;jH z%>KYWW57$9tH-BN7C8QuGiV85lRc~2O7vjeXU0GjNaedj{4cV%G^q{^l-Go6j9+Ex z@jrKUqws)*B`lW%Ns1IBWF|uC%a>ihKHp!1EsJb57l4Tw+$l;}iNoAXUa208&-pRm z8P!DpPn!IOb|RhL73@2%oyt+f5K7Jwk8X!DovHX>38@D@soIw4?V^ z3cg40gYfxI%hKi^NR@2Wb9$z#!G1%PC}@$B&T^1cxgoqk=EX-coyO}R`1D>+yP_VP zKFW!gjdR4^U3Rm_hU&L5y~MI}rTf{FLtc+5hxATvKsg>eUl{(djD~Xub>2?$v1v-5)9{2<{xmSiHMI=73WrQh!o4XZy{AM|jS+#LXU# zbIFq%t3pzXIlU9ceIt^VJxJcOQWMGP^yCI;YBJX=HbH(CwGbk=#`TrVQUkHNZZD9* zlt%X#6_j^aX5KD&2)AE+zq{7yc-63xJaA3yL(Nw1pBIb>l5dP9J;Hn0vy(xCV%5T< z>PF$?+A3z@YV8%c>0rxpcJD21Jy#|$UH|+lQKeSOShET2n``h{w%ts<{gryV+1z4h z+`&%WSZceE$@5+Etk>3Asb*b+4*s=#Yg}E=-fCU9e^Yf za{ZKSAN?F$U6Q&8WT1rdH90~*c5BWVakP-TLqc+4lxaWJjX?C65HancQ z;|dyTtxZm5qtN!GnV~2kUSjezQes>`or?9GI}_+}gpJI__bIQG7P0M?YyX(q zMjWcIF2iMd;^A2Z+Bw|Y!U$Asb#tm=_sq~7;a#llB$_>5ZcT1WdI)brs#GX*E=_{1 zTq~LHSSpbP{znwVJ71C_cn-K#03;x9FbANUnQn_7X1pI|$s!4;vppVV6CbG2h@M!j zR;y(3COq0s62<_>%+zEU9EbTFa2b2)_1JUml z=1y8`Sh+oZM6T3=7&{Do8Jm@XsCsmYWj0=2+6vnV?4+k5nPYbey?|$HR1*Fmx2ru9 zJFLc1sG$l*sMFfPw2BzS<>=Yu+2E2ZMD>#|{AgSGx5R&;Z-WuG-73*s{JDtL+b&Xc zhA3dZq`>xA`O2R-T1&-M^hxLt>C9JxYw&-rf zULO-Z?uycJ*$9SOh8vhVMUE$L3sgq3s$4A}w;5=Ito~<_Hi~n)`_CfXfs3U&77NoGOpX6y{Ii6@hjqOiYVd%VmYO#}0|GmGeGE{*n8I{s5 zs!P>&UAH^`YwokY;?6Y3luK*tdBpZo6DcuI+K~Evn{Cf!j!n$Oy9?QNTE`O;ACI?+ z;vV_*8^jLx9w*GrV+JrM+JFjR>%4@6Cl!MiM4G4QXzs4~V+uG)*qsiPD;uufo<*9f z@x1IT-^}1Jphut0#>tEH+S}$kDT;d#ZOf9N1^92ike^}B*LfngvVFSqWovI(U+HHI zmQ^j&l%*1nnBeVW(W$dpDd@rauTGKwJZOp?bolSRA<=4)Ry2L+3PL`CTs4al&V}FJPbGo$ z&e0oQYpUYH2coiqVpE$A~jp;jcaC_LJ#9 z3VT?qi6DW|-$`GrxLrkg8Wa<`J6p%B`2vIpPTcp=A;;_rVVbs^U1MZ0P^W5sBayC- zi?FxUrw>9N#@j?$ebj@70Z1TY#izccV!d)^Pd?+%fX1MAbJNHv1-pKGEMA_varoKw zj)mQ6(NV)YKZ3;UJSX@ig-FHXD+(|A$&&0sl^Dwd8)q+oC-mq5|Hf=L4ZnOGlLnob z;+mGv_2@vBW)F5iqMOkF*tLBoGV@Y6jR1yeQ2n2ztNfr;z}QHX2CzMZMywUZwj1F^oApI zp#jV*UUVl>KjZ(r^>82YbGAhM z*>aH0W$i7o-=U=CC?Jo%1H{jiaAa?YKXhPVSg*Ex1DNofHSg!tBLNxPibaB+|ENMU z-|+L_0#TZxK)MQ6-nc{K%!o!9%4=kR9%M;F>5URw42U*4tQHdngLR73 zUxDTMG4*Qipad8RG+wL`O-{!p=zjlqeu5k|=A1uUuQ<#~H7#U!EQ>n58SBdhXZqx3 z#M^_o5qYK}2^Lr|E(%mAy?$t-D^-&jFcWZD;N>6Wp7&o_o;OM@$!pIoa!MhArwu&o zJ^=$c4bvFGItiDzhWm7a%uM&ML}w?dZb>!d7NpHMZq9bhx0Tf&)(7I5UZYuGXrgZX z;a~3&VpRB1%;JV3Vo9CThXNOwK7YpT-aE~Cm_YO9os0iW>~BXvDZyv4x^#%c;*&Vu zTqY}cb)aw?@9&hm_T+SIz$y()d9OoV{`NCyBUP)n7~(n?I#-SHJ$TVGVifjJ-|>5b zq5E1jC+pOn(KRC3JNTjf_U|^r)IP_>syjcEwh)VMPu2>X)9ZZ*${TQ~qG! zntJA?WLhN{>-d!(m1>c6tZvEJC6YzfQ?@2V}MVfKPHe{ z3%G4ar{eWIOH+5MC#Znn`4;{ZEX&(Mf z!k9Hr!D&7eO??N=*6qkXFmB({IxZByJh1nhZl5ce`Yn^vx2rFVGB?+xB5g4A1z^rH zOPA`VfOfftAV)^=ABw*iNNnac>xB(FPulpyu?=i0Qo?Ng_7fZOjuY*PE z{+^y?zWH|4iLNGL?DnEFE6>hG+-Q>tHR~mqW6% z{&0NZ$Y%Q81$y`M21a*SwyoMv@+dj@4HQYQM#Z1Ew`Q0H@#w1mY`}o^Mg! zA)@b<$L{)+LbqOgG?BWVwNU;io%5;4wZ?D=$qbh1>YUNQA5G2W>E|m!uYEzLC0Etd z(W0?g`db&L2FxHj>8etHa7G-$*(){dc;{-op|Pg9h=~Bt%`}73jkJI2{j)X>h8l+a zfcQM@QTPFZBMD4sca0WDwTj(_(k)$mqgct!+6^RTCB2NJAaB7FbscG~W+>$`pSxCQ z@fRA3ZT(D)(C_o_d+3X_wLR2Oz?+ja5)u0P%Q;?`5OFfg!s`pyW<%ns&A$$@r-2dZ${*f$z3!9)#Ut}u_U#4j|}==A?ceKk1686d0S-`_kEgG z9Pnuc31XozfTP~>?Hsv}q=Q|QL41EP4H(Of^7ylZBw5xKFV}G3N$Ln3g+3*O1YcBN zl3hN10HXRmuT5bFCQe}tp6)D9sT@#}{nrjVJ+@~~mR7ydJxMpS#X_re>$i3geZtu` zdPu*~A|wev+T(eb!T?2UmYC&+h@zWMDyJ9arVNQeh96LsVtC6J+%-fvAu>EYbW5G7+GpJpA;*nG9j%XX)N8qPFF03GQyt;ecMDg3o z_$yJ!&WOy%X!n65B*o`#>iZGrN678HV&Kok+I}l;7q&m6Yp5B&c-X!#h%9tp{={Cb zz%;GGY^p3zDHZ!A)sa0|C6=wE_5g88V{p*?sgUVo!W4ucq8CIGeEWdB0leOZ%l_Vv zP3;B0!Ow#yS3o*MM#h~RpwwhL3zc#S@R6A5`4>n(la23pN4zWz&@-wFNn=?7^}J$Q zM1_j2gn$QLE6jzv_ga{7cthto7PwN+QKjq-@v*}(h;$GF z?;w*#Q;OidM@z_LKNi^(-{ZZx9r`K9Vwotd?1%2H>Wy@c#WfFRLje2_+6jc_DFCuJ zo~7YNAQ#KQp{Gi0(Iu33e^jU0-#^orUTV!lAb37H!k&oBDz8&iNI`KE;hxPajovw! z3xD(hddu9pT0R3)Eh!ub*KUg4PJ2 zj<0;USIzk1RSk_H@YnlL5fo8+^|8-jF_A6L+&hvxl?irweGI36Dc{)ITo)>vpc?|8$DJFHg^|S4bK#@Yki3uI(Ifx3w#%3jgYXI&ftgN3pnTnwS0MQ> z)|d;UjalYb{^1r0!+Qwh*nyc8hty6~oDGkEpUuC20uzY=3YXZfdn3Gg?qA%;9&dO= zyjX?Ua;3k`_0-zRYU8wkR-&t&1x`6s$`qVkgUZ3o8D%=sPV;wdwlKGlqjr;ZY0%u3Z`7?n`9C~>@S552Z!x5=Z&A-aO?SL+igj{vr2%^Or zYoA2|>vby5|E$;DzORJ7Zjv<+aard8-;Yoddgh)?&`{u)7q=gI&n9tukHTI;^$mlxGSZ>f3?TGJh*NIZ>Pa+ zScnV&xcFIUF+J}`G4QgYkx{HzvMQD3-5c&B@B{`51}xMBtT=9_7=TYl zXNr630aN7|qyZIF7!FY|4vw?{+K>=&@?5(++ zW+Lyr=)qqW=1hJO%S*Y?cZ;J{SA$D~dNiOAlJ-s0V#$^ZV5iiTRN{82@h?dK%|X4T z&5>5)@c3Q1vcO#b1B=YPBIBL=DR*~CLL2H>B(g|G7HFJc7oVl&&TapR0Hsy^!(Sdx zdJUG)d76VZ8{m%#3XR#dZuxUjb;G}>R&FNm=5gRmoetS6*SGF+VL(qpj#qzRE0RW& z?e2oF>uDmfwPL!PKmn_(!@@D2_wW3Y4giBR->-sHiDyGnGBU`E6bh4^Ia|4$*F%Clp3#QPjO;qX*bg0hX|+Ks;V>dmDl~QA+aX_um^f2UNU$- zcPVSwj3mwER(#_NX0`=HtW6HrWit8}vOr2ai|X9P=XYQ*YfBYl*K8QDQOT)UIQ}X9 zUJ)eGSnFMy0~omq3le<-U#hXSLsM>EsUd@RLR`B*-i$mzwCSI~m30@yafz-#^B$OL z&dt4fEapfhL4P#S#R9h`pe&EWBZG^?h87IMC`k=T<9X~MJeQ9g+IaN`0DF!Xu4Mvl z@(Ij56bp%a>swwwm&}n1Ad8tmW2)xk$+*_wW;;OZj?6uc9vnj28IBiSCK=Ddh!{$` zE|1FaDq)^ShwcGQS9_(w92!A<=9~DcOIa#c(eR^9yPtg3G|C#~z+xHU+Drjtas9i}S6>A^bmkq<2JtiIia~Kbj4OUb zk)~J}(Bx;yPj!y%Lvx~tX%Z~IsSm)`D0oauRoPx8uDu<*GqO4h*J#IT72rcl)Q z;9I+^-q+PzJO;=2|5h)%`+A*D4omS@usxG1cj(kicVHg!x>woMd@=?i)gqMG@rPM| zD{58_{d12|>f!f07Wk$$v>6z=r}@0Bt8ItOn;1FG?dqysbP6s2MV4d-;>)}n5)ut^ z&67`;Tl2h5Wj`a1JlM|sj}4w4P9Bg&nsxZVZ66<7*_kHXjOq{r7{MP$q|>fP4vV=7 z$dFo2qSG2%9@%v6Z&!D(2IV3nWb<%@Mp!QA40CFvW-tP?+x1p@ zVEV?Q1R%)?Q06UG{yIrV|L-OoVyBT8=xgGVo-L_FngP#kns$Bbxcg&c| zqSt2AAa>}J#ZNvgtwTNP%8Z~+_Tcz`D#r<>XzPRF^fx{N#{tti1I0*n4?YR z0j;BYR#(O4;h&yTrjEH!N4YYRZ|W#sZc;fZWQQOWnj71^7qH(TeQdl>K|MqOc|4dp zI2VZyKd?3-{8(cY`cau`g2>Hk0I2D!cUntns=@i~^CYpZeRF4&rSC=djIC&QZ)VFQ z=1{=ONvpg5#17q*G+Ndzm}wP3VJq({$#?c3_n^>2e>1H}-E5}5vH|&uZBN+)X9re)w}RYgIX#_b!hlOWAdG{e7B5 z>R-A=D4jh>&iU0w7qiGl)ljCm`IQ)l8}-#4yZ7CSP22=icBN=%_ukr=l zuLqPWF0tzjvRoL^^`nsZylh^QIX;&>(*l~;G5_3}kl132P}la9Pialg!JtojOT8Q8 zubo2MhURXaCHdE#L24Kab=A z!3*mAy3_~%IiCw0csKDs>S{8W5aURr^s4ATZeF=L9ge50d8=r^paHV~QFJYvMSUBp zz#6}pj9j}s<(}CxS+|G-@~z4b zHTqa%`V`Z*RzF}+1E=$IDp8g3Jexw@WW)yYiz#`b*jgYgw+ErtT{ZTba7PzaFQpEM zioUX}R?(>PsQL9O1t|1@WbJjOWOTW(v$(Y8T@~@uS$?|tU0CAh1v)QwFS6Z%)MS7^ z%YAHxPP8(dT_;Sui$5b~VLK_gC#-Axr>YyfCAP>|`4lc5|5k+YS#qQa4LJ$)63*oUB;l;%0TPoX*vk*NtTq`l7 z-z~F-=Q>DV7^ja!02PC^gpKjuD5OdIEwz{c^h2mY-a~rpy-#_T)IIl;WGr*53tSUxx)3B#MO#zTfu`DAnT1X9A@Ez}^=bt^%rcU4S})clz{MK9lBhk0?iX&a}T~v|Px!9+uXBWq=Xyeenr=C7<=09|(s36Uy8q`4*L6{c;xb z7=OxFx1JvZ?hWRbSO4vY?>}rwWk0fGF;({%r@)@#`au*!lb)G9iF6l_deGz=EbRcD zEbLh{k4gP-l)mC@`3)FGwjAmGX-25dEL?N>)tK3o`uf<+Lb%P*+n>n?+;fTaJiPO7 zrFc(9*Gvdo&5WZZ{Q?{p(WF@=5wHB@}tk9)7ifEb2c zi^$a{yY~C5!^v7{XDT&sUpD*UGg_d4NV=7WYvsnJaeFhg9`%Krs0_H+AWR6d=};h!KWr;}}jh45&jZK|Ks45g{^Vnx=+($2ekz2~#fNlB7YdFLA3bI`whlP6j21@2R`Hhy;Mud)5s=iEzO7-_Sm$kSy z3s(o*$vxS2v^2}L8p*{it$w+5^FP`8W@-r$f~z#|ErN*SuY^ zz?L^)H+B|0&H^0kbbO&?W~F$e0Iixeet7=-DBS(`1K8`8wECI*r;eN5=`9M-X}!GL z_P!(g`vSN$E$gO4C>9=5*%bfgH`}Lfi zH|g{7;1DEtqWq@s%o|^aoa=e(t#|uVOvt=s*6N(2m5Ti6o_oUcL0m$6aXOo~ULTus zp~9$p*^?~sSO%2BY8UgYr2Md9-J%3z|71r9TFZ)v}ALP_xWuX#f#}@CpMTlOZLU&HMJ~4`H&4CvH ztKF+`zL1+xh*ljbRBDY@GwxL-d*{r*{ucC0MLr7wotebrH<|9lyTNohO*-j0o0wU& zVriF)MB(g3l@`N^>ulgZ=q#TPwzJ`;ocXn9E$tst2CZlY9MF;^})!isS2lwqah zlW$gj>X)dK;WqhmXUn#ISDiNN=Do{;C1O)v@rhB12lqY z`X0KOUK8|ZZ}ilxyZEkduO2Jq4)1l2Ys8D$_iHhs8R;y#E&+xAOn7f6g#E0&69&gn z)U3|Rvg6Q-?$e$2&k=`r4eGu}y9@1oCaKHI8L(8UG)ZjSAxu7OvdQm#s;kY$IG*k$ zp?qu!au$MIE=GdZ3r+ty=M%dY)R@@@xg==fX%tf-2DU8YckUk4ISCQ|(lYfTt^Iuc zHL~mAz8#0liY=YGjqItqd)=;wkL0keVu_-|kQ)4z+!vm3JCWNi%M6O5lyCCvN5$1l z-XZEks|ZJmc7RIAE;-5v7T^Iv`_XurXpS(ng*8q<PVSFkvjsiC zA$pl)+uHaO0jJa`%a16Ur^u7){qlu^O&g~!_^em~dc7LfQQw}rs6sgXj-$!$H9xJh z%|3^EMX^P8h@{`+a}ZIE?QB$Bi{6ki|Evn^SA|2mR#${=D>wDJjamJU-{u*ab^O93q+9*Y_htG{CNyYYzEPJmaIX??Nb;mp!CI~u36WdD`z{Lu&eX>8kB`;ZIS}&Lznz|cGn4a= zBsn#{0Jx<9t-eq+kxkQ#HE*9GkOZ^+dn+PlN=|^z$_MORN2AOm8A=Tf%2|yOVYRCsOF(gr@ICP0yQ3?ctcvCXHzQ*6Bo> z=NN|d)O6vUOl3Uy_bk6Fe9zg@op+nMBYP~8wmVzJHSLaBR4vj1D0RhGiqiPF0Q8}? zs5WU;ITp$!oW`j2jHupJ9cnww#Ad+}k${+V8T(TIJn}7+;Xt?k-CjJr?YH)!z454b z)3w!5Op=?vPx>(v@GUQ7*c7X%AGi0gKJ%5QxBXl(&cnMZwZv=E%Mo`>5=xbvapR3A zPNANb@)N67y~7OXK>EFh$^2kK>mui0idIqIQAsFZVq3Rc%MLvB`O<~&5_YiVA2+@3 zyzB6WtKv`ctH`w|--R@bB2wOlRO*)775>|qH+GIeR~ms@6kpf2%sI%fTmJ@R)l9}b zrfe`(ukdIr%~&m6f8$MyZdxj~Z(*~Fl)Dq%|Bje=u`qY|Ti8@Cxm{c^fqPA_jLMUb zJq{<*a(@KHtr3dngm7Jr4MpBr8W${;RnPS0dSASSd!Mq*51QS@I|~4#v$tfw4dmj2 zULe+8yjGqy4&+JK)VUkOOKy>K%kmt(j%0Hmq=?11N-Nr^x?f9bS=;?}a(}ascK}_= zyDLXOdCZtvd>>^7BZ_JTK}E7y{C!g zXa{H@MkdATNA&&s7uWT;c5ZtkkSq?+=WZ8KV!)9C&w_TkN4-*_!NzmF?2sF}&+)DN zkZ{&QzOYmH8LJd+Ch~B08Jas&b}@S0 zK2So?`M%lOi+}M|K@|0Mi?H;svU+G0W2Im{leKN$Cs2?#UM8jNQ;CXs5|hr3?oqCK7q%9HIPwC_Yt zZE^Y)-uJ6KGXi-)I~gx}qL=?V`FMX_S1(l6hE=77GH`l1z<$y2+`@3zZON9%Sge>J zhe6N$mo-}TFYBwzrt+?(A1Q0PPj_mG?e;PC&N<>jP2-$0Q@xTd?N3VcrSa3RBDG>^ zj!-bz*tGHDf1#faCkaRdpa~5L@z$S}11Hbz<;o}Gr1 z1jUM!kr6T<6Mdr;clGFEXan8I#w>g!xJa0~W0rtJKJry0J#IQh;%olSY8WJ;9AcxU zcQ^*fMiRpw@1$*bZ}=AUK8|8>VG=QQWGCOd4e#2U4T0>e8*Npkr|(_4zGRhuSdHL* zx2a-5zRrk)G~QW?T0AANI31rUQ9GfiQEL}isLi`VgyDtqWF^=jA)cL`ySQo$8Mf3B36@bMK=7s;J0`Ia}oTF(O~ycE~sRz#44H| z!uGjR(@m;E1D@~7bqJ9Rwec}K4Y%2NA1~SB|H}dp6E)=RE;_;Wu;!nt{Pivvc=Jg@ zy=Gw;IJ%WHNq&bVT)y$%i0~|(P6o1v0(@ECT1q1m;zss@qUAhj7iQF@LNXbBB+G{y zLV=TVoUkTz^gGvo#(6m8k1~q!LYMofHh9;!yj$tzvM~CaH|bE-FlQ(&lqauBjpyRM zOjqwi4d%kRAwf!%tIw?SIcBQ_)nhQZIP0D-tU^!$Cso zb!}SSMPUP^R~f9}JLA6rm%NEY+K%yg`ZIevY8WZ7_q~~dr7LuMmSQde&?mVv!}uEpDM9Kwe{O2Jc}2HQ|bPM1M{)V3K6ni(5hgr){<7s!^Y>wVbr`@z;kZ68C|Ik-Z_Vw`+TP^h6~|* zn)H0jaO%OwRHJk4>b$&4#rt?cR%{%P4B5l9{IEANd}l(GN3G>UtG@D&Bbei9k%8qK z;Oc#xvBz1zHC}Y&IZSbA-34B8Il;$0Pj5cnmu9(rXCr6-1al>?Xdv=%SKV}>3iA1M z9_PVTDEwI$c<4|*kf`zKoB^^_xz!>T{{%5;JcG`;ZUHC7#pIsab{paq#^!JL<4)I& zUznY*b}2H&Tv1eWV?WwTIh*WZpnsX#RkF49x(LjCo86tpuXe&rh_#hfhrjW|M^wX=0 zcbi=^RdvV8p-sgR8?{Dw@{Q2$w z_ziH)d7jr=>s;$t>sV`K(=yutm;g6iXDtb`e~czjS7t|(x*5X0`eS|g+PjTk)R$hj zAhV1TH8pvCarR6oGRd}KHq9I)40Q33y-neoDU4ji+#@V8BQbvg3{w;o)WH*~fdF$a zHNziq41!I7o8d0Mcm7+ooJhkjyYp*=a?TlzDWZ!MxTa>#Lnm6U zx6_ssNIs%Os6%TeRBNo&Syc}vTm;fFH*Y_fIa8}+QeF^=n-waIP-W6nmlWrZcxd}P zQU1!>cw#WDT6vuNj6Kb^7oNRm%6b$_KAG#6?4oLEJ5%y)OXzKf<=diX(3GLpB#OI6 zlV{Q`XPLWPEPRd86&6Gm(2J1G?WIuXgIP@}WNVUJv~YqW!d+dQ;jH2P3lXEV zq6nWc`XjN|{JPRB_>B3H`EWkBUJNC_j$M^KxdvB{Dog6hUfu`}bpBlXz3LtQkyXEH zdPxt9iHqIqZ_`&ygK1ccfsV34+!W<}kc%l*K}1V|$2J0D$A9$MN%-l~)IE{~rB;=` zb(jjvsyX!A(i1*V|$h;f^>|0LojQGaA4XUMmJKpSn z5)s1Lo@>woQU$we%NM)#dqx0{N{R6he}SesdKyImpb<4J$OC0+#^?Ks!@&5Wc;m>U zi|?jKe;r2MgjPiHBAH~j%~~H_ci8vb<|PxR>cknwo3D7)-jVwyJe=fMkqCgo3RNS? zB*-_Fep>X}t=CF+wdA_00eGLnKr3lyf%kjv;)B$OYf113Z!`g#^jo-lOz-N|Tyl5W zh;81uw^WVMEmU=TS-(WB@^4xa2Y{~Ij(4Y~OZe|@&B%stPW7gpG;Fe+C2h)FnOEn% z7er|3UbW~wAm^Pu{yIB*9ADD<-FX_)oElyaeOo*iUbz^@g)*|pOL(sJzA7tV`k7_r z=4W`<9YK3o-Qw;MId~a`%u3wiI165ZI}*j*?6`M*uX`g-PYuns<}kQvYUo=8(tGO> z2(c-q5G_erH=pKmOCYGyoL0~4^h*;H&*7g4kqMQVuW3g(#c-|VhAG%)V=PYZE6}tQ z!!^dRPGKH36BUQTrhb*KcHjR9asYS>_5oCvqM0eexUN@wlj{x8qWKE@O0Hm+Abnp` zgstQ2qN`+{bJeE0u)u5~_;~<1Dv>K0-!A1_2x{u1rYozdJF2)g-ATIDHYvwK9==Vz zI%k_mL3*SI6Oq-B#H?-8N#baUN{1dad;?}XQW~8i>xMxIYVH4oNa3)Zv(1cg5<)&! z%@HLLc0)G2yb2w;=F|azrlae7!rg74u10#~)%9C&Mm_SUb||@-?|M`SYgCfWmFG{v zS$pX0VSAFTXU`D;GWjLb)}0)Kg>h%i&sm1k=fnb27g^<10Z-@Q-2Fk4V3~t}Kv8xds9cnW?oKvY&a?Tw#>D~pptjBwO*TafyX{$T zdbynr%*2zqsN3BVAe|Ul(A{5O1S$$w67mp^&BBB!v#UL3nR5fK?8KTu32#bV2G1A< zya{T{@6_x12wOZ=ysn>ed7Ms6Rl)axU)xQazThn_(Gn@YcYznTWkLM}Sv4pLlLUQ! z06h*?LH10e#v%lAKDK?W?#LKWXYgeqm^-nXeyoUmyKlT7uVUnutJz3?Zb6NfLgILd z$y`rd>~PMTu2*a958#(Ob)SJ@_Gvg1J6l;UQs2C0U6hhm$*SrBpz#K5&cfGWn=<9m z0SdMaWzsC3yyXtD9FlQ8|z$;=|lk$bRu(dU!K zac_CZ^6#$E?DA)Bh+$e*OQM2o+j_EVl1^g1|D$^oknt?z99Xxl8U97P+)&Ncj@4w z!}*wz%=~F8cYEKFdJoKd(Fc#PPf(cAx}%P@7#Ls8*g}hj@g$wtO$fft?ta)s-xq{; z1ff>Q-8^C7`GB*}VKt#hnsiU$ru_sk*&^;XE~(p?mrHiG6cEs7XK#NU9p#veNwla+ zvQR!Vk4lVX@sxFwirbqk?~qwCHQHFVvDNHw_6hV!xTiP6ngX`nzGf|-35T)mIp^+p z?XisrbX+Mdl^CX`m2tOHRUaMS!=o5LV3(u`%K6fBqf#=v7DL;BsYNUSw{c$z(tSWi zvbd$Nh5ZF2^gB!>B2Ca45r{g~Y4XARYJ{eUyJtqj+>m@nXPm->MT2oqE}p8)Ah6!% zaYrlXc7!W@i1gNsmpW^KW&xy{VW*2zJAlyoSthQ8)16Zv^9UI-Wr2eh*6%E`wk^zC z6p!QX?rsa}HPzy(RFwU+riHupRJ{6eZ7Ub7v6(NuQ>bg0K{WD7(sO96<+?)%m<@ho z2sInTB|w6vO}EV`9ca8mh!=)h>#n}C_>3_%+}68th-G?79I$QIkuRi7Q$)W$0Mgxf z3(OEYQ6f{Bb#P%VNLIUO-#Hj>Xmpx|Hl;8g#oCSzkq2;j)f=3DnX5YHv2osgTCEg| zkagck>H2O6-6bD@ex{WrPA7P$=3BM*vH{*0Gh4q$krn&WiMs&-;1@%nL zY-@;Ra|-X4r>P~0;;K0N+(czese$H_m9|d;JShu{WGYfK8i?1^wvaNry)H;fR2{3 z2^=k|LBfTZs4>;=P8F>N1LT@{K9}}OT(a!9X+AE>b8gNR$J>uAthC92 zII5dnI20}cP}^3T8trKB37mg|><^pSDxKNrtnP5@^^_%&E!1xOI9eS~LM}2RFV-1e z9zt-gojoZh#dIHCi~f}7nHA|CL3)dZ3-4+mYh7MX8`i`qw|gNzZ%&lQ=p--OcYiH# z=&vO?KLW0$PvsI_Kmjzwc&OB%dMmm#S&!K5D!@tN*fv;2fok`oyVL35NML8yJ>d=T z;*EO;bcC0gM|7OSye}>~hTENdJgCKN&;&@gM(on?0u=aOA&&Ms9`7nMnirJdT@Wk` z?acYC3$ON7&kZTro^J%W=BRYwxeS9w^;-r6B1sggjNM zlP~r|QDDl*tb{zlbPL4eS6REdEW)H@EH)kJ?tNFj8cJX~Eq!qcW6l0(zMK;P=LZ~v z-N-NN$*bVFBm(Z{-4S|xStT8XAi7$0plw4u&W~?`^ z4AumkEGY^jzXRD3_oXbwTNj!85#EB0W@5#lEBdWETYV2i!+CbU&mip-+vfL2bfLL^ zOx7YCmIs&Dy@rCqO>rHB3q!thCF{qhWYZhYSuG?x1@D3~a_dy?sjNz4Ue7Orgi~3H zxbxgbPLocfX6x*6ZIz+5Jsfy5)}%Wd0DQz(f|@KjpR|;)S%EjzyhBHbQW!;y@`eri zZ4T1~z>ziutUL2?t7eeKTPL7~f`u%#R;P38NeOrBZEaiaCR}!6|2ku}JHv+O1Ck>S z9NHt!d>kdrv=QdutTi6?)vBT82K(y=1w#=mA$bOx(l`Lb!&9ECK(J%QHWitg$};Oo zNz8SeMd9Sa0|`MMONDJ$RwIFQ_L@)7o*c+tq;y9iqrGZ z#S`N3x;5E>xTYG?kHgcXkvTIA0y5NeNzLQ=#m%vEjHKyBiUvsU{@;MP0h{zRJ=D~X zArE>E=uOf9f{0}&VFXg9j(q4{cO2`U^BlZ`>S#3;PPyOu`qY`8uQ_Xa41d5c(DnEN z-Ay&){O(iUW=CYey`B)(hV8V_%_iAcu|l;Y0DDKj<$Sc(yAPr>=j12}&J93ppD&Hv zbG!<-uRgm$)mkHjPE9uFQK7om+#q}4Bwez-FJj(yRU39(z$}PUm&5z;_%3mJJAoc_ z#wUrK!>8Y_kv?|No`{3j4d9_Bur0-;)^ih#gA>a8^I;9V;ABztBY&q zF_hi|t9Yljjwx|Ab7D!td)H@n^>RV5OCLH7+zuh(FM#v+r9sPY9b(AmHYU9-RCyHY ztmV?Je#ECN`$>YIc4NNwvl%Uj;y&?08NA4L4ti<>RZL6@7qL(k(@}5aIWj5hMQ${gC zeHsuvcg+}+L27qB+l=n)|HR%ilakmg`yBGc@@8n^ zY`VG;+U^clPOnU|ngsF}e>%dZZI1qADLjV~(YWu)y1nk|vz49}u9O_c{u*deH#g2y*4aU&ugHs6bJe{BA7Aux;URz8K`$k^;sbKi)PfXQN%2&TSmIa_og7TPhhR>r zL*G}@vv_RZANRs6B2Pw^IObz6s%&NOYK~#Z6t71mg`i3JRjo(4>zm*o!lwaH3A?&< z;AWuQQU?>wc>mlak=<(x8JDopCp!3yQ9+%G5VKcgc55 zSr)d!@w42b)#bvl&4g8Cv>iQ?G%3y8<<2Rd6&Y3_4odsTi;`>%x6<=z&ciY2yqk7e zp_PV;w-edk>6UXg1kr5 z6Al!IkudIUlE;Jn67YN7cnqUOZ{N;&?GT$!af~+0rqwfYs!?*c+{R6xctwV-=ia8b zlf9Ym^zkkaT}6u7Sb)p!3`0uz#;)*QgnPL-a-PCw@WjcR!ry=Z_enDQmcV3%dAZrc zR}mM3Vp@UIvJ>u8LRm6rx`@-gi}e(T{z=n79cW5H>8%Lf7O>*|Vof3d93N=d-9tty z2*YRQjk1He<8*v-=9}wOI>HaH$`yi2(zE@jJ#|N4fuB}m4**2h)dJI0;*z=rJR9Hj zMMQr221M{^J^?oEmTcv)RW6^L`%H*G}U&t+#jB)Du&qyrlI0 z8tTI~Dxwpo<9Qv{_*s|Vu<`sgDc2ngI_FwE(n4O#6?eo%9grAZ3GQLVVdj{hg}0Y9 zXEAyq+Tt8eDnVz^F~$CXvw7It#$gQ7RL{!<NHjf1?Ye>&ckGfjLlhOX)g}wYZo$QMEM;WVFM94GI1AGb@|fE?Fddv zTVKgE=*}qzq4wCk{slL=_9=CO#{j}0|7zOs9H8V;Wo&8Q6Z}fijnhQuDC6KO84yeA zUlD*HA5UYwg?U_EekF-JKW7gWL=5ojq&K-9pG8mr(@R)xMF_UYdz>x7z+wFd>`I_G zFWxoA+O0&`^~rAC5f`uP3NON`BCW%NTx@`cy<;F13}6&Z^BKDX^4GivE%mnISKd2& zJbtvFy@aWXA7>ChM_wxCj9K$?5Pj*EwhGR68VcUwP)=!r9?i zY&Za|X!IehOW8S}rPKJ}j!dlA?3p?U49Wf^T;N^%WTLzN^m67M4z!D37(qD`%Iytx zmxNQ$0N0+@j`-V4hwjG-FIv*Cc&nLM-yD_n9LW^11H!pAV=c4LJLI57A$guoF2mYT&LYqi=9&J2p zjoE3ZeL)QYxZ^xy>@kq4pI=YJqksR=@l8^{o&;`igYJ1ftJd|XZpmxL)h*8jAg&I1 zXFXBk0bhzMH(pF+9-ei2?C7DMC)5(o_x+5>ooVWNL5L9u&tVg3BrRu$Uv~3eYr|`? z_Giosou>z6D>JKZU;Sxz?ZHIt1R~VMB5&RVhf?sX>v|oCC-6BJZjgDF)MZjda&Xy3 zk;UJ8e$1ANEx^FQFIVSLy*(9U@k8YI(7V?Oh{~nmy9a2Zm-$pv8-^0?se@VEsf*tD zSyb&CaV$_>=POqBfpAMv5SZP1-7_L#jdxAVz?S0ZDzt%arv#Q{56=gE*m664YxeGx z12q-!5w1~+fJ?vuC}ZjZ<~NHozGVB$l8oQt@5pVm&sO_{J<4SMW^JKwa^KFEG3k^i8>7Wl$=0MAuSqL1{7%)UexOr0-MQ`Nl**@P zUx;N8pDtRlSpP`Ic)K?T@mze73XW)b{g5FaMWOf_{oWoZt?#QbtQy0VBw^F-mVigI z4J~I1!Wcn%VfQ{CJMeaE#VhUAVrMH2 z-nRnU1JqGWcI0`Eqg&s==4s(vFO9oZdDmxfuGHF*FUD z^VwilV+W+<8GwQM-fppbGwYbId=fd{_17;Tx1Y~t1$HHJ*gS^G<&`yEz3jX9zJ?LV zB2{aK>g26|5zGcU5r5Zj5gqcLppMZgjc8F+tGK-k zD=h;!wPHf&`?R3isxO1yum*!5Gr$$4*YJ~CH_u7qverz%CY)X|byTo;*o)UHPxbts zaQsnfr<|{3^|HF;uz7>6$JzF#^FqD;qNpRQ!r&xt;1!VD4WILZQLu)_80PtX1__^j z!h*{l_Bj$R)LzES9$xQFW(+mw629d9W^I0PcCmW@dkMtq1B$|x2jGcjVYj?-;ZxXi zE%AVU7k89qPjN-s_=L-G3&Up8axTladj^DKC2+~MRIU~=REE}3RVF(q61neP%a0GX z^uEhwJO3e$c&~F>P>}kzOcVp83)s!N;mo#>)f&R@MGl*L-y{0=GJ#o(Vly4&Am#8# zg^D+Vh;TxlsLrz^XIT`a1>sMqg@DVa2NGql73lIOAM4dmVla;+C^aHPuP?lY@yt8a zMcy7-#0zr$0H21$V4di>EiL-wp*ub(iL3TGw{G&TK}{V?QeNqb$lIw7*Gh2wh}A7X zLH8`H#Xo6acV}+^C|(;O#C^GeU%a)syD^6YzuXJCDU&NKtV%4nUFkpO=G>H*6r4R< z{yeH~&-F0|RRw$Ov_v3c4SJ?Q$~0kKi20GMLhMdMx3VnpTU_|WO)$TEG1y(DY0f#5 zP9Zefv!(ieIb0Q~F>@CDRIqtj6C=z&#`(S4)}!l~%~4~u(|O;Poz3IQRu8EZF;NQl z(4<<~#+0%rGZu3K-VxsL{K1nA+jj^bo@_}`e6*O zF6KP1XhKPOL4R4f@e5I@pm~K{XK%wvHJFMKPfRRZ{3968j2%59WI-6`(9oC1y zxyU+ijegN>@aXFNLfgM$~(EwXYlat*>y zqe#iCwxy2zg^zB9aZGU(GLVqZ4{3QLg5?emxtngMZOPRO`Zn(86bKj=<`IqJGipTV z!-U3tW}rpH;kqxv`YYH}lqf1_#`NTjDJUxV#+RvN!#whBSO@EtkU}XmJ+d1TnASgE zS}0sGaeG?aVy~s%WC=kWA~Tpdp{6p>84D zri(;jJtP&PX+1Sg5_w_^(#<9)xqZ}Rn);qjxD04PFLR&E2wRk_DkJ(pv`x`zWgnKaL_^^nV z>6mnTq`q4Fx|%WjU3spZ{--g~HL57KymQyVd^eSwLbu*J7$jx!$E; z(e6xldT})O zO-tRZwSNSUqPl@^%XKzpjet2jGc#Q@Jetj>=PR_3WvavX+;Cp7k zZPwKD4H5vu0&>&^Yw^22f>!{Q;>AX7)tH};UX_wA1wV8?Fx>3U4qskkq32c0S+L3) zxc@+#-=OI6<^^Q6jcR-QgFyQwIgQfUNtYz$b8kM5slE7sMG9ct5&l44^n=co`L~m5 ztwJuAYI=bu*2~NzM~EqI$JR#Zcx1nEK*Wo^14eh4h1w>6x&Jf1))J@UJ$R*NsZXFj zreITb^2js@B;4mUe&To0D--nN4j?H<9y=WX;C?t-I{X5*-U-CHD+vRpTe}TgQ|`5_ z{%Ng{;}de*`WD-`XU|d<-|k-wbCoYrC{P<=hP=K4OcK!A#Vs3v0D1++i! zPO@zTdZ@@A6ppKcZQi8Oe?wC!Mp1Y+K?sN%)q_ht`8Eo0R5<$%I18rYM_+3BMA=Pp z9R}3%UTRcYs2$Ojrrf3A8-*9@)VpZI^`HK_6fa7gS_${oVJoBNr}Zyqxt%6?dE-|T zAp1!Z=8+;Roue27f0`-wfjB>VfC$ z*oWz!C}%O@?vraa)wx=;&esUOC;nzfHV#z+dl>IIXD51E)nYxb4g2&%FpsBy1X)AJ z*SyRXPQm6CZk2)j5ZSy*A;YW;|fXOa$tfxAILshmB>U$%5*4|zbbUMN0G3M# zBe;fX`-IzQ;}OChblg(Eme}c2;vzg1zB)1>6gLEhtbr}#HnST0n{+OqpKwRv#}w26 zh2<#qa0jXffG7^t#vLbo-seLdvcy@^y!{z0O#n4z1ssPEpJO{)6N|X&@eGznUSDog zPtm9O*r0iHd-MdRa=oVXK7VB^FvDHl{RNSHK0lK2TxoH<^2lX-VzE-ZKm+LU14%(b z8B3#V8!2FNLYJKZFONrQT$FkxaSsIGI)Sp9@d@9kBW}BTP-(tlp-|10`rt|gn1d}eQv-&sIwj|MG01JZb&Be4_bdsI z=u6M7DVXiPkBc}q*VOuIhao`ym@q8TFMO|U;7>OdKSNRRa;;t%K~QCpQ=f{M1!<;@ z5BGjJKe0uu_j=OoJbYXo;KE`5r64>@?3+C(+q!lS^#Pb`ueIH%>E4YswbrpZ9U0zM zz_cbpjCPFl!NVx#Dtt!zS!~FMM$*Rhg99bhZ6>~CMc?HX$zJ@J2GV*8tJ*$%9_3`N zGjNY=0vAZxSt{(YI2Q)}xJ#;W?zsBk!tjD_M6v06g|((kVS+ry3=J5#?e9ngsFyz| z<%Z9gojQLzwt|WzCZ6fK)C^!f5y#m#lY9zIS_rk#!z>BRrg+E1u~=k5+#}QRpc0_R zIbCe9UpG*+%j-R#@6)uPZ}1e~?Oh1cOk00-Tae1|Na_@lA z1t#b@03&?LDR|7QVSwRw=&=)ov%g`QDF;)HadO<@Vv7h$aLl9#-AD(+D{a@i_tsJ? z#dLQ=q|#a^tfwW>1l0W{(eWn~M1<3m09+!Jj9Xs4MDyw8TCH?6opO_pH?r(i!0jjl zQ4W4(~js9(tQ=we>DJZ#+B_vvYHLDA`MTPQ(hFRC3vY#Nl;1tx7B3V&k3t zZC9bzzEAYcS%O@f?o==Ieb3ga9M}7O7#AcCMiam=t_Ao_@2b3N zfw^+(6)iU3GQ1%j89QhkL>b7qQstcB`BtBr^}VS|nek#BMxdkDi?G3Nso6T`9QBe! z@k7wamP(1{m>p}@&)yN2htt)zaRF~)qJWWPG9R(OFbbNJMLv~@OLU?3%(l-8%%u3a z<{~90$F61=>y}cRw=*4X_V^rG0sa$3Cj`bN~sh@|9UR*r_0O%n2@l|;RKAkkW z=Zr&eyR*Dy(zSQ>HYwFjfw*LDVCEWmA2~~{OhHdkT#=smz?>~oYrEDrS$b~rm#2cy zr?#YPnOI*%WTP|EvGNhDtATv_5(Tw@k#5iU`}R~QEXMQAeS)~7TJP-ow#8AV*edJ93U-_o@rPn_0zC$)1(H-#oTorlsHxa z=`l0Pid~J-DK-r`k`~dESrRJwa}|IWx-8uE4aiE|P+9>t!wdy+CiGDpN6YJ4RT;jak>9YRf~^3I>>Nj|1KA?7tRB+x0g? zO;uhCr~wjmZz3oKjexqqmu~wDqVA3>l&s5tQx<6KcV)I8y#a*4xoP;f`TmFARLL+j{TO;@h@5AG?K=Jy6`c5HZ{tYiDxYp&72rj%%(QVw>ZJ zl-WFWsQcE)Cc;Bg$50BaK3u1SE360XpMeR@u)tGXt5CqkhS$XOG@TUFVUOb z=zmM!X`_vJ31-9hr4}jHLb|U0p98J@tm=J>DlFBt6da2$Mrpe zyEG+50MVifS2b5HDkRArdZgKMla9OWamqH|DSUYtkEdMwzLxwIr68E0-lb=H9!VDh zwdp%ZrV-SV2A^ikyBg-QL>)Ahc9v<=bM$f?=X2?vWG;x7tQ~eKfV!{LCSzQ;`M8=1 z`^QU{g($5>1#ZdRVR*8orO0~ z?;TIMGJ2P2H0h)wtOC)25JvmDd*Ij3?egA@QYk|w#e3+LHZ>WWWo!TfK5$x2yA_OE873R^<<A+P#}lW|so(`Ocqquz`l|mI+@uQ$`cGVD3F75J`WAlMR&8?PX!Qsns0w z@H=lm12t^lHwT8ka##&E^B-5{{^(!?H?Rx{A~TW7w~zPeSq08Qh5MwcP+mn?g+y_* zyEF5dvMcRb{%FW7q#JeEO~)I~cZsnJG3?iR4@^NwKiF3tZThlnZ^(xg*woL4+u z$4*mUtk@XJQLH4+p_)OH80{qaj0?Q%cv*X^Tb#C4MuULV zNL`rvNl{X8WR9God0z3$GXOm6sQm8*&}{gFil1Xo#<-_|Bv~v_kC7Br?P(O0 zATJ}se&p(*r+MfKEmf6PP>3F_aGEsCb*?QfT8*w$tqO;|m5>+*k|74Um7%5m_rfe= zD3Cq9M;+yBrgc3#Zt576U{M8=+>2YRVlVr$QpP2{nfX}ZmSFbMBe9;dot|ETDlC#> zto{(rl03d~zLwPkT}pSX{S97gGY_`S>1uGY_vz$t69^e^9J(yX@^^BSjk97JkQQar z0b{H~>l&rb-AMg-i~svLu06FbrIW47yysNC@YUiOBxQQ% z$?_$-Thu}EQ6hmzGqKwr+yOgN_vM)9YKH-WyP~*f0%yG{$^gCHm23GO8oJ<{wVY-^ zP}Vt{<_9d38d-wIV+`xLNJ^9m2hxcjH>TD0!g{H>!;Y6HxnR_a_tDQGnNO0%b;U9< zZWuA3-x)DPw5YfZX4H9J7l*pkrtolzHlF6J77AS06IqO=B!&M>6U|Fk zsiGfKQiTH@*zf5xtlwCG&vMZ;ndyz!Y&N*h4{L+w1=m4-1Kk4}nFfLWU?^_}Gwcufg=Z%j*Y{QYUrNLNeJ!XRU=}bU z;@+)AnDz}g#ZB9*$eG$oY`AMOKQpuql+H5s7^sYZpVnqjxC)GDajVAAtDNqPfVYLi zXYD<^{f_{`*U|eDLT#HfVi+6uViGB*MQ8e}0HcX-27ptd55`s~-YNez!j6jm1Bb{Y zAZ=grr#AW_3qC-x);@FRHo;vzzp>Ka(C>H<#d;xj9Pm6n5?s9{` z64w3=Eym+`g9ex%_=SL0LJA-tFV70VXnrp^{;N_x5p-12tJl5;E@clsokW02;qSdY zc+b6vsGFlBYOF{B$(IrZ1o!Dtwxe!Hzx%4U+z>AbITqRh_P)C~M7cOl_GWpOuYR|9 zSOkOEyh+toU&buQyAnBzjC;m+R2S@@pHGaCGkyaOGmQksrtX^uL}0p=DM0%hoy)U5 zR?;{eW$KCAwMTs_dE< zMKUy*@*{5>Jn_e<>HHn?c$Boat#Uvu8w+F18iVw5IVqa zh+xzb2Rwl&HQ#i}E`g(DXr`mCyPk?uKZc~g2m~-01YjcA(`b=m_zz9|-@3^tqG51+ z0LW_Utj*dt@Aw`{LD$Qw@Pq#b;Bu7J_WMSA4wyYZP*d63~*H4 zxH~+4?TAz3=b_;GEPZ%i`Tpte65qDvk9deKS=EEM^~bi)GTG1F>j4Bjd74M~hGy zlT`5b0ZJIV`=#kqRzAAk2$5Qmdd`+SR^x!1`7amvkx?Wd?Z=7f-UaRN%ez?aR}X0d zEcg-#&m9B!KM|$-)Ei1)`z4mbKOQCW^gwJrpK-bquUyf@ZXG)665YyW-WD(vt8!apnGx4 z;r#MIz$7u2qG87*`AdJ0Is%R3yo;7vfi>z6@p+;^hN*@Qh!2rpbTiFOoy1cqKz^|J z68dEpA$$)%3IMN1R0wsO((e{N@Oo~EEHgaSr3MIIsK0Mr>OLxNht*`AT5O_)KvRDT zJuu2ER+VrYpm$N+AC_4Zxg`LM*Nrr24eVmbOMa|V=kx^_yZE##rmg4oFV~rFhhes_ zQiIqQ1fjpbUQUAvfo3Kx4oCoHzZ#$c-~8XRH-HEQ4^K+Xl-cPLnkGP*4;tyf{sL5f z{v^OU{kcse#Px30sfYSIX+)YXUCjk(&BF?a5Oul9!0Gd&iEjQ=fQz=0UqhHY9?Vy@ z6%LR8K%NDtJ#_S|ULUzZD}@1yVfcxPtkni9>S zeltbC&!kx9^{Cc+R7t72+nFjR=%e(_C+{4Q$I|anb@^`aA(6)zcRZdbmv2~w9BBfI zikkYTCVV1RU1$t{8+Gy~>PBH3)qlSWqZCDodd*H!fDB#NjNZD(tJE+uZfqc_#kl|V zx!M~(m+fb?lHp$o<#}EK!NgNcDlfbHx^fVsU#uC}4EEcF7+aqhVRftLGXeU0cb+IN zd~4@iy;|mzn0*ZOR;7GES0=*mh)gD1Mo7P>{C(ZcsCV#Bc~B}IUlH}24)O7F9J6Y! z2V}jXru5Tzg|Q2BBYKyYjr9{~Pyl=SdmEL{7^k~^Uw5Xfdw^MKt8wNdM!PfaQBhHk zRTn<{D2ID)xRDGZp|CV9!9GE;1dFIxynjL~OLb?E(cAeAwSEm$Y&= z?-Tb~`1;9>%QIW=+B4VSDW>=6(d6>GqnA4`&)7za(+E7|O-yo)w)^nCM|UP9OAyiK z<0G5BvaZ^BI}q8)YK6bj>zs(2(Rsj#!tRUpn_4)$uK;rP2B=z{ReShrtw!+^#dq5a znm2CGbpH6w)aA3tdyKk@V#Zc@b(m$A_L)6fneu%sC*IK+;p?S(P}HZ7altod7)g!l zFM#FGw9~e#KXE+^R`@!49NMf+2ZVb0*Z`ST!iLIOb+mQ4F}%C=$Ngvc1Aszt z{jN}V&VlQ*02nc?nIPmHcxB`-U^!mQMnjIf`oz^&vIqu{5JdZh;8wgl6(iYlNnFac z4#x9gH^9oTbm*BE>W{l0L3lj=5UdqCh~e2!XWOT4vMzqNYC%!6q0opwrGcobnQ{K= zIjDF-bt0$*0aT#L`?ZonzaB-v9y(j^!tw=}nEcQVW=Kwr34gokd`0k3OZ>|n*X7^U zITc*-_&{Xg#&uS+b)4pm`^ITd*FSp!n8l>rn5r8B*UnUB^bH7Apq%3n&vxTr{7M|f z#q@Wha9?`2XufOoN!rdNna{LW<_XS6o02h&<{Qh_D)Ja^bl=H`NobmZ;l^uqd1qTv zZ+cljqY?LBkErgYjjtihUG&csq@5Ny|KX;!iR-7j-$P#3xVNp^@yeq|>FSR*pRx(p zFEq+u_B`1{S*m4a!v5o3d?4<80n9c5SpQAer}bVZV{hjSfzmpvm9Eilpcj;E$v9ym z_*Hsn!=ucIx>WC(YTL9&_vB1X>XsAc_%hrRzT2kx>>Ne({a+VNR!`-Ue1pe{QD-EL zL;E9BDS%iCaCH-28gG}n|Jru{<;(RGxTC1XanDVF*bj}UQ*KOU7ltOlwq2-*;ozvZ z;dg(5VRWRepWvAgTD|LDEFdgB73L$dk?WSg(8JFgkf?_X3_5ic9G2s<{Q^g_yca9c zIY4W5Wndx~gI1N*0j~i$!>?b@U%j`s*S_tDww2fR@zRWu(~t~+2U6#R3*SYe(@_=`K!eMpNSx_dUd|P%4OAH>;W2j>1;l_sHUw7MM1O2y3xYK z(E_?z&9UgyKFf}i*KZpTRI)AgGU)|6sejl*#7o;CPB@?Bt#ek&CO9Yywn|4LzytMy;4v2MOZvwld zvP-U8200Ke!z_8|`HKx(1$H4n(cQXZnHf{~U1I@DF9X(X$OeY0+(_Th#U=2*3s!{O zAs+wcn!JdCC<_d+BKKP%8pP(|DlYdtvq|8#O$FZLh^O{9eLp5HVh@*{DPr1YLhQqh zs+nNE-I_7y^|5>B>0=XaMK%{@zCs0MI zsl(L=hFEc7H&=QhhxubbmroiC)l0T8oSmAuvJI)6DR2F2s`_6Vg8|?UdFgGFRR8V+ zZWK-VMg|H;`a*u~%KfDY`)^Y9%nUdo4Ca2sza@u%JSyr900Jnd*yu9-Z}#&F2Y-RV z%>Fd!Z!W0+I>NvHDN+>xdZV_Sj^6*fB6A~9*qXq8UWJqk|9hJ7+o||v-v`t|H7|+e zm#y@E?Gygv1QZ8>{a|Y*U;pDW{p)@I?W3N=03l&E-z-Gqf3qL|2SCs=xU~K2F0B7e zu>MjpsSAJr?aA2YTHE8lUGd-FIYbq}e*7Ppg8q%O_{Wo_pWZ-!^4zQs{=b*~OEpSQ z#{dG(3Lf~cGZ(-7-+%l<>N%i-($016W&fM~{KusH&(i}!tN)mke@Ofv=kp(v@)xG( zKPKh>6%*i){$G>wBu2#J(nfkR=DE3f{Zm?6vD2qON3b@!`sP=+(`6H0I_$TsW;?WD zD>r)88qqJF(Ei0i|Haq*E|*UV0Hgjy@Bs2>7N-;BC#O}_75-J{qKWp@SA>lqU%57O zv{<(!o}4-8^FOZ0f96X>mTvBy{wU^uT%(Q6hib4V?Cc{+rRq6RsxQYR0mqmYlOmf_ z`XRm)o}V}Tv@2Ec(f1M6jHxQayt7Oh0PB+@9hbvH(S7=_c{xBD(5;5(Pyfmf9S1<` z62}Vk@?wF?C$1JxeaAaQ!hW;$XPt>BWNk(sAKnPG?Ooab{GayhZ}#j?2zVRZ!m0o0 z&$5Cqr7&+Tik$>;$KAR=*X7sxdKky-lj%uaSgP1HA7_r&g@d#$+TT3#e}2D+!yUlo z3_t2E`Xg%gL%aWN%(YOh;K|s%0!L4jpNHT7cpd3>=&>alwD8{2B(6gGoA_~&9Nkkq{SMCdbhScki8iB$D~Gjq(;X^? zpum6bJN}UH|0#0cN73D!%p_j;6`}flLYtu{FxU8JkL9<&QxpZ9jjw}*Jw2ko`wo6T z2&m%~d3Lue-LUM^h`>i}L;sRnwW0&KxpTOaXn)G{0uH|HKEuOT)R`z}(yvJ${8Cx} z>KA`?ws+`=!udU!b%EWs!MyA%{-w9&4xr@Gu3}aHIHYqlbV4s|31Dzzv-kZkfj0?` zfBuy`RRuPSC0GVw`ojr+Mvm%_^>MjhHzW?&?E#-2IrE?A@XtI}WDa;6E3=Wz|5!vL zN(*`#P;d1Z+b->OwgKwD^fW9WcvcZrU}DAj!yUzz^wk$#tSN^sJ#F?@hV+w2{!4L0 zq$?ol?%YN=e;$%AtH_I%Jbm>4h~A$k@;9>cAJO~&h3FY!ZO_)f-92CT9tNnVwF{6h z$^ZtYJ!xcO()}Tgt|!j0F}6)6wsVWqauqpQkK@Kr$Q8bPk0D3dpZa-@en>twH3Reg zVI4H($Fs9iuH3^;VTv3jCeIG0(h(e;b0Ji-xbC{#%;^V-FtDW zHJoKBDILp{podF=*!|?RIj7a1EC^E*mQUt+Y4Q^vO+fc05r8{U17EE#8j;#y^s`gR zXNXQJQUs5R&kO>kKPeK5!MDS@S(!6AZx zfv1kdc<=Ed!zsbxyWzTg)8{x=t;e;FtB3VM!~}|d@jhvG5B@cP=uQ!^Jnk|O^7?JA zY!U-uo@_J-aUnU^7cF!9-Udva*3D!6k4nOWud3z%0k7Z%Yw2 z`~a#L6%tO_e7u0x_=CUn0jKZCHy?@uwXDPq$hy^3Pxr~9b2zWxD|rbG z4fL*a-lc8}dEi3ab`EWwr}$EbOFA8%)#-_m;~G&~_42{{(&n6=xYg~+BZtK$@!G3t z+o-`icjy#SwNu|5WgB&WReBG!!`a7lg`Zk8s23)iZ4CV%_TDqB$u4Uf7AuGdHWZMq zbVRArMU-9yq&Mjuq<4Y^0j2kj(tA(n9qACM5khYPLWdAK-{!uZd8Rz`&iK8*-s6~m zCd4bb_OCx$|X zs$rW*r>yU7`;%iK|I6%7zZGQ=H3LL&fXo;Am?2X;fBJ%bPt7#Ljgf~JM(L`C2uzsN1*P( zaD#yjjabEp>2mqY!=~Iq%;K}3YY(qW3=rpA3cjTbEtqU*EHNCp3&V_8J4D?4{`qGo z#lqPF;Fg_(gYIUU@6w~qu%!WEW((CT&KpmEu73k(=HlW0^xj|MnE%M^(CB!dgF`aN z`JBT8jbMMPdwJeYFxa_B)m}*{$R9&?cKRyEB-WD-`;QBds+)J?CuAK zr8(#wUGPk`B|uFV#;O7AYST)ysAMAXY6 zFU*RM>UA(5US~fq;H9K~$2ahvRgmE z)5;>3o_|diF1#-ON<@Z`?xKb_s+v~(flb|cSZO#=QP|&|LcrLyW4PRdEXU1HEBo`* zm0U5Asx`}sM78J7HkXdz?!VTe`(Ka=(XBr=*Rc8EzJ!;+LAb!lo}eQj;#G@kuw~agS+GhHpon3(l07VyoAMz1OO7=@{-K$c^p-I_$>GUxIv;GUcSV zqpaq;M@sc3JX2oyr->uv!q&&f^d`8OxPLsG*y~UNXVPA=>NTF(o`Rso3qDmB>ywm+ z3#N`x3zcxJ^lkeU6{=+6Hn41ZVA)#c&cW$fHAtzGMqnJg?!z*SDiJqQEE#Z$hz8cf z&rzlQ6}pd8sA9U? zbBEVH24IWB)1*;PbG)H?*Al6LP;twRDgU4NY9F|M*{G>fJyma1Vf{&1jnC%};nSm8 zHRsvUJ6on<4|LFLW0kbz)jJ{iNaq;z5lMdA5H1cy^owp+g28isI%r2S&L~PRf#q9D zqCm%lKF#ds6wU%XF=Y7#imz7KfGm}QSGMYrN}k#RWhO?0I12?=h0x{*0Jrb9v+|Hu zENJTst;qX}c6&cWfIrUG#Zaaxs)+%i>20c}zEW9Wz*EEnYk$kXwiv$irD;edB!S0q zs1D~Lsp{_w>s`9cDG?$(S*=K=t0NgoQ@L_r7i>8K)2=(67DxIKl5nI&Ypu_PR6)(9 z!XEe=Z0*AilRD$wIB31nGqZS~O0cgI5v6 z&n@;PYsHVdbBSr0s?`Uze7OtPq4<B5NzL-6r10 z43cc)?4gpoSV--D6h&I}zZxFJkQt#0*aB}n_F{)sv)UNJAC2fhUT)2G8j)#tKzq7J zlDmK>*Z#7Q%iLgpeBh{VzJdjqV{WcPH}I>W1l6Qohs7s!%l-@iI-D|xtpJ;?w6NEWUvMI<^ zIhorqZ$qbUPcd#c9`+Y&Tj<8`v0xqLGbCc4UnjxtCwU$kO`a5XuVQpql}jr2$fo+x z?h7lGDTwsL2tG%q>JHEXwm(w*y_be{8JQJ>pKageQF zB6--RIr7j}e|_UyovutWulU>~B%d4e&V#GcVZVe$r%J1CtX6f$^K~8#BrP99j zu)|C;mc|FLhQxH9z!=q2veIP3Ta^W63PJN>#QT@heAXO}GiuwlV`k|Cm`u#4Ybz!? zXgTCZ)YIpD^`vFU>QnEPiE8;X175a-$c+Aafvz}yZ3q02kZwu}UK0RjnVId7M zUV_Kox2d1MV;ShTor!G_N`)K<0WHo%UiaRLg_$^%jHvg+&vzrE1CwLl@mLq!=$-xX){1RZO$t%s9f3~7&0z7}-Uh3#l$xE>%;(1WSoT{71ZE?t@- zYl(IgX?8-wY@%O&iEft%^fCGXxH)N_`);hs+{;;w+}Fh^hVYY52&aPg)3jop>-DE= z8WBHjXC&HoS6{48tCynMK?Q!^#K5qvjS@13n_0&)-x(De){5dpq#=${UVbqG^Qaa~ zz6%FpcWYcHBc1IQB2r#nB})|%n$B6?t1?;F**`$rbi+TNi6d%NxG3njQ0Hz^?1$AD z)}ZFOdyxPOtX5b#+_{YOyr~_ua`!)iE5EP})dKECM{g8F>|UB3hOhNQy$S6~fFWeo zk%Zq)EoOAUx3(=Hnc-l=$#kGi%rD)kXIbZ6Xp-lFd*H-lr@_bjCx?_qEsMoM@m6Dt zwAtFthw+=ocpEEYP)#ALS@HnW1KSJP>a{s=&x6vG#Y3ZcFhm2EgXAs7x0CH;iG?^tPc3n>(aI%wLAb8*>l()vY zgv<8f02`XtDq1dcf`5%fNNS;1chUX=WL4wZ$;>`B+(w_8*8?B&12K*8BC zi%!x4B|#FLU*!q270TbiojAy*1958I+Xuwjdpx&{ZVqF!@xn7Y z)3OJ~<9B)29Zq1YKS^c;SIzSjeF%H{>=K$J3i#v-(E+a{QERm|Dp0e1f7OQ0QpGPS zt2NI}-Hw+XS9pD}PdySRVfh*oLvfCR&h!?lETM)SVX^b>0dM3By)Jm_Tqp=kEXdzT z5++zV18TV1iFT{4o5~Oh?ApZ8eGz!^b57#Rnvbb}ToBMKw+^?PXWnyc9Ux7-57)%ZrrbnFIAnA#qT2JD4>x{w05Z zi!v&_*g&kyrv8`?YL2)=F#S}PR9_qP5efiGmrkzDJ(2F9R+E$w`x=O`iwXsgQ^0G6 z({CoWWvo}gjQEL-q0KLOe-??<4j=fid8aEnavwjsB4^KooXfJP#wws;TF8fZiq|-R=rvSD z5G&igpA*R>@nvCRQfpoLNyPcgbHOk{lZp34v#J{pZE~fq$|ei+a`X8VRz>jRFG@UN)qp_*KiwH73O4Bq7TEI3)2ez# z;lA<&t{l`AN|uZ4aD(u{~RPW7iSC+lK&LEMLuCa zWM6B3&OGMEOrLj~aG+g15;f>3i;!2~Q0ww5ASB48wvc*I-RQ7)Xr;T{vo&0Xl1)&1cbVAf{KK2SC=h?4II8a zzqD0J{C%m>#lnbzgHNM%#Vp*CFv|D#RHdcIEf1&$ruMmb!Hu{2fGOEYA*#E2dkW7p zw`gc_pfvvWqt^|i?Tifu`8x(;;^h5(zA+S9R4JkY^IefQ$Yon58A~n*)E!LQc%yO) z38hjql3ln4>SU!ERTnPg9s&2LnD?5u?S_Az)O8Nh7n&RU*U+^Mj8oG^vG5d;V>ju# z8rM9`4U7=xD-IDs+aRad^{eRD^>VCcuj+#)y*g_m7! z(#h;KAcl{42VO#i1Qz$JUAHq{=jiYcDnJBQIU~5O2Orx{CZSU-cz?dfYb2ej^g6DZ z>kL=z{bZZgdgfq&3q-yjmcvQ%t2?2SO`tOBih#wf{wOY$iFw+m;nZW;wOjQ-E<(*| zWo(J5q}B_&D$>^HVHuh7hWh7PI0Kcz-T&kX{q1fiNq*d;o)d<*PUYVd9=;-&Zh=xRGx2;o^FM2aF&p{@yc`P zMi+9=uXa9mVO-M0yfz-U4lDeXYIZXN^Y-=qAIK+lo5NLsGAT}A94rLnUfv810i00F z_~8mBd~b7Rq?6L2QT0srU9rYgPodfspV$@C46T2i4eZ@ej>IUZ*&MQYvSW~>R4X@6 z72fy$`PB_Tq}91P52u$5yGK~FHp+TP=wSq%6)N=_vU5GMJr#4eoebJPJpawd1w>*) zH7fGSLbSpZ#~t_jjPnu4-|~$>tdJpdj~4IdgNjKP275UmiVLlYr&ndr{QP>aD8RM0 zlIfs|-mz9Ec_i>k)<7mi#o=}V3tZSl2LS>E8W3MOap~19`)Ss?J4Bwvr3ADdcIaVN zMCaKqN&MXC%5$G!DScZ~+3^4Xn^9%zzl$M9Rk8Fq*0 zm^@7L7XS8s|DjFN*n(iO?EtYlwAJ0gPwI|PgbHh1tksfkolMh#P&#!J=m?2j<+h&a z?=sw*dN=GDL-W#3GLb1#z1-Hg{rjOsKQi5pHwsXh(rn+(qYgqgPe)NvmqteoCAMsd@&AC`X zkJ4gY#}}bcS2Q;XM=w9;W0Tg~+jSm4wlfjADEG>swSl$@VTX^YkU$~IryI~ROJt*T zsczv~3c~WM%sWzI#@poW?cMGWU_NFMF!o!;LZ52X#`(Z$h> z9>|boo}@S$Wf{QsP@W#%A1a4H&cuWBt)qJr1B#|=Z*+5}MR8dW+k37Hc^WM~KJ?R; z40{mjf2);D_^w@7{ppEC01T>>qmpZFA%8bRCW#?kxPmfo=gt(PVnxyGtXDc~C&%Rh zJWbrXkmEpAO^vT@12yWmTo+d*AB^U zAA|eu;v-p7NS>@1`gRspvMi;@{&#q6@4gb_j-G?JFb){b%$ePA+5!teCrJqrkXhBP zw-u$6Pt-#$q~Fyo3cQ-i>xD$xASEgz&zn#TrJ8&Vyz!}_y5$yB!&atAie`A3s1 z>~d};VHvHk$0c|29tT?plc}&pw^5W{SxT5VPMs zyF$8P3L4A9E*C_+uzMBB&#keyAoR0a9d(53Bkcre?=6wqveE=MBld^v*}Zn&a;O=j z{fdihw^exU=G=3iMmBf( zll&6JZn0~Lr2%yFjN8l_Eh-rlY&AQdwdy=ZRO&1{^g9ebI*(-%knu>RcPfv6NpsoM zXSZl|$n9p7Nj*#uvdU=*Btzt7(w@w*y^@ei5HPdKaJS1@F(YL5?78UkwNz}yFj}HcElZNVl^XR-aSJ+JKpn#v6w05 zCiO0UsO7ua$8&{nq*IthbM)HlpI6m4q~Cnc;p)#krCRy{i+mENjMqjKd|BL!*Q@E8 zrNHKktF$XB5=>ES7N7La0&_m6RlQGm=0jP__d@XUX#uM2Q+VDQaBuleg)OFwu^}8} zwd`hFWeXLO%e2;!PsN_1L^2r%j7d}NWMNioCpuHiQ`5KA1S~)DEqfVK7E_l@W|qyK zTz)2$T>k1)&^;sm;ZGQ}R4pHul~L8kAtgj2^k%x%Jg*Nr z5VzUhC&=-bC*4A4E9qba@psq8^od{2+3xYx%dJC`j`6J2#XC3ePiM=+t#xpeA@JnO zJ)CbBgqDsuo#q~6X)Nl(Y_A8Ap_)7o?2xT*$Jy4be4K@dKHwcu{jqf6ZK1*zuXA2tQ6oWf>a;=5zaHi|;oZ5nAbUDqkJ(?QuM zpB;WK7}{hY*34Qb#@pu|aXI}cRsg$xduqfq;M`8u=IZ9!KtkoV5hzcsy`14x{1mhp zc3L|Fb33`{@|+Z^H=N+guD_$E8P6y6V+zO9%82FTqar(u#?p|pH%UPMp`^I;=AD^M z%he%Ooha%HhjFw8M`i&JLLQ5phckJxi92~`s)vgiH?*tzCY%?LbL3KkIeowBIu|OQ zpyD2Bc5X4#pzXy6%LA{JF23_W`{$W!mOl|sZPwUm5olG~DSwV+LThm9VroSuX|=+Y zemoXfR~jv-$05qZ3Xbc1;FTL#6Lh!wp0r*Gi?qfEa|Oe(pCMB!b~aA?Gp<{P%Y!=e zdITFUz?QlS(g-Cw4ug~sI;=(4QKeBpfKyLJYm{ekzs2^(MHgPDG}uhQ3n-coWur%ZFget;nP%Z_iyt*m z?bD)R2la;&Ad=}SPHkOO-t8l63m~W0u5v0}O|eOy=r3v%hq06UY{c%6SQ#bZP-OOf z(jrUa6r&i}P7pjK3*%X@Ze<8yt5mtvZ9z3bkMluP{I`ln?-kDg^0+%TTv^PNDkX!L zBST_AX7>u?vDM+v;$)+in9Z5}1{VpWyO7M<&YPp33e68dHKaDK?*@;<904$8&t`4B z+Yq8lqfRfbxFk?8fqZkB)zI_KXy#cj7ds@b(;VyTZj}X+7jH@5WqKuQ^nb1MoTE^s z<@+3Zxp6G>u5LTi09~Lxo4(NPcc3WkuT#&L6Kb(_ZF8~x5nqXaA!FAzykl|tLl04K zA>T;^KbV4@;HD!lXEpCd@B2VHs#{~;K|Y9-TW!LNxmvl+zIF0k@MQyH520q#k;YPT z?cvaBSM4X##*UUA7NG}O|7_!<<=uPviJBEbs(5Knz4^Qay8rvZJWM5<+`7K*QoQN zJ41iz0HK?e4}HU|gN@rRbJ_gJ0DYHzEQgN@tJLrn{m0Z*&!0|@6rv4Zig}xWsa*XT z+U|%)6pN1NC|Q-qAegmZ>o%i3vLfu_z2tJ4V3ExcJu3X=$n0YHfMEEpjn8QqUpx!%9Zb7K#2AcJq}w+XJa>vLQ%rGH`6-=ptC@o4%VQoQ5uHJmJsqHqO#>v`?SB01@k|VHeUsr=5?ZzV$jkBS#@^N33Q`Q{sKN;6Xle@OsA`(jX4s?HBr*#GUm)?( zxAQ>kc+atC@|^sK)}HzJ55AT=iTq_;;*lkHNbxmc);!{Qwvngnkkjph*eQySpWA2O zo?X?n^5AZg1+xG+LO0-Ao?BSu=19_Z{Ev*l2TbGjzd*nW>`6#)k`z+gB+AzWwW?jd zA_&mKQn1tagMDDKnVf}RrtvD@)@-o&G2f@NP?opUfw0VMrhslh@_ zk~it2Hp4T1E?<)J7Uv&IQ@8-w4LSLcsv<|^^`+t5Le)wo5s=n=s5>DroRhL19hs|I zsL{oDi<}lHeJCsPL2V4sz82G8B6HJKCvgg1ZgaJ1+H|(-C69Efy(Lzw_WIy}9zi^* zN!4r#rqp%12NWd{1s(3+h2L7Yq;l3!C&d8dBAWcvDWYa~xr^c|jW#IAK+)VTk@Qj# z5)&Q!ur!GznXm_vGh8>Lt%<W$bV$|65vZQx!92)nl^VezMluN zah;!!J{mpRcEvYZ^izOUz5G&R5;Qk^60!+;NzfHXsZ>@ zXExPQx>ljkJdZ)>6kn1^5Tn5#J_oPx53fBhO+E?>D@m8GT%=hULbRJF6D-oYZqHq} z6>b&_3gxv&A(uI`bj6num3Mao+GlYsyy;mG!!fqEj7GahV6(XL-X`z~&;Au9_n4Bu zkWV~YA}+nhyWyTeL)%d5+C|K$`zQhuu`*8d)sLCF(BI+XKq z)EX4#jj&0zFgK3C8>F1xQ$kkAVkM1o^qkHE@OVaQJ;RT7k$19RK{HgJPBG5v!ACgg zq*FfPSsi=)@B#MI^b}cc#z?ATm+ZB-nFT_`hT8T7#MhTYQg>=P87tU|tefeU%mawP zemfmw+j)W_)ms=CUNu5RS9=a_J@WcNB>(lY3xK{f)lTc7#QCGH}nS%Tpzrv*?6v!tMmGb3mDV8ilK-Yy!VdjTN{d#o_}=*E_}?OA261&cBAw}U|q~F$53IkE>CMC<5l>uFqJPiX@boVb+M?6N~|_H*u*=8xBf$AK1gAx~{*)G#Ub zbg$my3A*iu-Jo5i#c`{8@F%U3fmGiN9>@UvkjPdj?x`9dlp4Z(9*M9hiGGraWXfZGT%hbVlnktFxVXvc$!F?yGdf?zqqV~iC2kayTyZ5RI~Oe6_7x?2fnKKfyc?_Y z+C-Oa#1%Mee}*jWY(UYMLqGaz>`M6XDnDsz%GhS!n!UQHz^pi7`Fmy|=9BU!#)ey? z+y$8FN_uUpkHk}b=aev`l&+Ul91hd*g`>;N`eW*RK1Y`L+;yl8QVA2bp^!8csHnv@ zsOkVFA9J&mFTba{v;w4d7JM<$D+MC1G31br$F@;Lnf&mm>nyms4FC5c@xpynv-c>2JP zFQ^D4Y(5g}B2?;}?c-^Sd>pI*+HSHtsXI9M%4vS!Pao=Ph2j!BFSR`ve#U5c@!@^a zfH=myOWe`Iomd=c_x0i}sc1%eey6o-Lwfrq55@erqVshetSrzi5PVQ;lS$x|9Ca=V zHeWR702~ylRv8kb#+pjwzFzI>xextD+PV3vbb#4GsB5)ziK2My_K7-8GOFYyAEfp zTMz~j3#r^5WkzP&?c1gKB^udO|eC@3SRx^C~HBZ@lUcE_ZJv5F1Z-@ z#M0A>1!G(jjD%b!?~6z?o1RP8X{Mw8WS&6Eq+1XGBGD!@Wj~{~pko%+(K6@z`OGZ@ zr<&^dQV;jjmfCcbX_uci0eRh^kEl_*s~aEZkCslvgDB`Zz?3W1#S-CxFKmX9PiTGP zaA+aR;x7|{uX>@1OD}Nf$R`XQ_R1vW*=J(5>A?W?1ztOOEQO%aTvneDnm_+XsC5*p zItzgCFcon}Z*k|bTzOp*?rwetYsb#E?gOaBTp&-$z8ab1cX0M!jNd4w|1G5At4*~C zMuNQ;2enHM;~qS0dMfP*fJFv8Sr(~mbQpk*h_#h zMz}tKT`c;b_a3ytUJ4s=W8%qk2I@-=l*HV+gY>fs~(XgQG9fdw<9&bijBfDfE#w-e5qH0r@6j^o4FL@r>#S4weGQfDvq-)3n1xN*@8So_TF zdr@Fc%j7o$li0_$DN0H+BvWTZ_jKJ;9CphJ2<=Mk?s6`>&Dwp}89et8UcU1JeEQ=* z^qLK)s1AX?%>|yTju%rp!s*%pd|JzuiikQphv`>(dVebeLt*|WY49Rstjx#5ei3;^ zXavl@&5|J%9(CEx6u@fRT#Gx>F6a4ymx69gRx>k%R~L;K{4}0R{=kF(!JB@l^{f zKWL}Ku<_Wp0o9$z1~7i{(9+nq{M&3AYG+gh0ZyTpfR1mZSxA_T$X6EK(R#v*uXTLv z%&1WkP+1bBhq3+OwgvF5MC_LG8~}vFV)DX?a;<^)jRkfR`kA)5ZbcEvPwI>VYU1u; zn`*UmlqdD^ic7zlz_=!;^S{#t2q&DaAjD*)Ol_c~Py z@V_kd-h|e89>Q+O-Dl%>UcCViEiqRqWa7$EiDFYJ*d?tselw|)f`H1QdUZqkDOK1} z+7Pp6Hr;;I1uxOc*y!?*<0R}gQn8)G2n&=O#AdR%%Y$&|8j?3qKXrXS7g2DRz%sjX zAYwM3C;gjke%;XwzFcmz7R(6k#NjaS`bdDRe|tR7miCPAiDD|8-=Gjz>n%L&)kX{x z(~h}p&OF2_&s4!w3Up>MUZeH$^0W=n2o>GCVB0#7)$XF$G-#HWIhUY_jnpc|ERZj! zX2@Jk$zcKLrSDD?9^5Ojw=Y=Cn@u&zeON-JCVYz>%Q8`v& z+YF7Z*_ruLVud!g7~v}QJiIz8lA=M7Q*#E-~WdYs4%Uw$ox{F0U=i z2L(@x0zYwNH1?@kSlRlI4i#?04HLy&V*<^Q(-W+)WPI;Q>;U2kI!pBU@--q)M`0D! z59$H%$39nyBX(CuN5u&2@`DQyXRsQ*H^TNiNs)$T`i)n>AeAkPwo~$)z6K(@t1eMo zU+q^99NQd0IW(%T*|!~Ez4U1Tw)|E7{PE5bhh3YCC%Yq)>)=m%r77IS3-!^EL)ikY zN^_yNhjZcC4;7j|z8h#fx9T4kxi=+R>I_PN2Svv}6-4L#Bfj}MYxyq8`=LHyrTv25 z%v;>PrK;I@l7m@+YNrir#3eOdT+S{tt9kzUBu*S-TXBFv+NhpX z!zdy-Tix~VHw)A#sr61e&Z4&V@$bG27Pd~P2xB+8DjB~ZDH%c~;!=Gh8+k?QF|~k5 z+~O!VyIjDh=^c(3o%$pZJ^1ml^+!edWL{bnJUQCMc2?4SRuH33Ta)E>IuE|T-><9>#`W)aoj^a8$Sq_oN%_6PQOYiwID-6`5FKHhZj>K z7dut5%!AI~0#3!?V$hM@@{ZYlFYc4YofQCvVnX%s*DDT)6#aZRf~5UbFB0K$zWb}o zB`C9(gwV)Sy;=)z=YOto*2eXp-$uiujaQdkR(yMJp2vL=(E!+`X|7$|XNKr!Tz3d{ z?Gv}5H)n~Xd;u1GIcka~e!UxMkHANv&j8^2MHs)jx#7r}+U$E>s>D#z3Ikk-CcPT2 z`8%-D-;>6E_$_%7Tn5lOisQ()csej-Ed}iY-Qnf-BLYEmSwBeCA0*Q^Vjs02!f+U#CZPQO_`^VlbSNCNfE#xWp-Yl z!0-(_EG31Vu@C~6hiZmjB49e?->6J4ou3!N(|B|J+V3nK)fDLKzc|MDP4rh-LKJ=p z7xWdBich9Wc^^K=NgE;joiqOID`;@Xi{KCu{`L5%7%t+{UHon~M@6F!t{3*s3@Ana z=a2Tkzwf{6_uu{Zze+*H;}vE*2b1@P?W=xE%z|j;{8F+U8KbX+H@%{o+3jC0 zm_p@UTi65Yn(d+_k05@T^VsJCJWXW;k-yj3?PvM!-mNKrOWe`_gD~;GcluOxcrW&F z1)eNTEiwPjmj26{{WZZFdNz}y#K(;C-+$-7gqc&{fLP!5t7Nb1zXezR;d)dhm-Mx~ zQ7ze}p;pfi|D(k3KX+yR@!xJd2sv3QyD_NPZS@HZMig}3`9F;Y_&4|p*9G8HnkSFW z`q41n^+k(R{}=1sAovb+H0*rK1nc!f_x!oPK8D}F-B9{x-o7_nXx5igjD7w~QvrW)m1EA?IG@Kp+ zM3f7p-d_xEw}%@=nIw5>0N#~n{Z0G7wVFf^&udp>!^rua;|`Y*nbC7DhxrU6Z2Ths zyz2k(o$p27YyvTB^X{kHuX0oh9$?Tzc%Pa}g1(vhE83xY$^GxLC9%YvRz8pnYtd-C z_RIVWQ~-$32z(bwottLSYqo6*ys6L=i| zohxhGI%&-A5RNP{LULEWoleQ`0rcf|z&ZqME*Fvm02Lxp4veiKaFZV6qKwJx7;aJ4R`l^DvZ+2hf{k0?Cs{^^a$I043 z1pjiye*J-h7`SM{uGodAf3Clu+8-|aeqk11L`bytj(0o4g$@J25t2`WJSz1l){k^p z8g6F6-kEY0bibb4Qw6n<+%^u(H_D7PPA-dB<#&q9o$%nO234ZgY*M2quctaAbM21y zZ1>OjJg8*I1N#M;Ev<#_t)B+Ld=!21ZC+8;&Zkjq^<%Wn;z8VKFuzXf6X9`p&e=Xb z6QoMubJVb&s4|PnblsA4J=$!`=ab{O%cc_sn6ojtVGm{LMy(|V!2FVVz{Bol+MNB) zBktetb9%fEv0FqoZygf~`|ZwyuMWSqG!Ty&JLR+8N;^8*X0to!R(EBx(r{h+e)oZF z9Q}Ci$ET3ipK@5p?h0>dlJ#MNE#-1K<}l6!GooB@Jj{4jHU|R;9s-4gAldOcPtCGZ zzK0<9-{P6mwt0)byUPF8!>G(;Ig{S)yT6W=3;qiSZy@%=zhIJ*4B@r*Fn-ExHjwc| zm`xz@k+vVL<|m_zNz&1ex`M@nNt01QWXa@;G0FbyX%Lm04pai{ubH z-6_=rM2Y~fj7Wrz=Swhv232(F<^OtS{}#tSdxH;%<41s&bRQ(%6Q0^C0&8t`avclU z?$@_}fngn0O9v19nPdz@+uy_!=x zJqf2@2M-r&QySBNB1%_?9_+_U=T_k27^HO>_5M!RUH8Ea8->$Si`MQDm8lW4T+*T3xMb%YVIM(y$a#S) zbqAxk80Sd*yN}$gtskits=SQj+E=4+ox9P7A)QF#fqaum5cqy=%|)f33NDUW{_GX> zEHyIyX=9DZi^NYdoR&BH=k~vqHHu@5vTk&5a?N)nO1n9&GtDzAZ|)OPj;m0r zB=TB)7QnpLD9ix#9x~dPo$Rq1*Di}h_jeTQF`>=#P=t;n8!=pn$D&{DfUri`Ei``% zf`1TPeB!x^>N=j*2W=?k5r4MkY`i(YCTu{-*^oL}G?=!ZDfFR#O>h9d?dgWuxH_#zlAi!o8e!uI&`u?V)jwlarfCLGl(_Y1XjTq19X|XbjV(xgS?}nJ_wRfL zkssdqH84TGzeyJ&_^`Tej9ecv>4lah;WG>YEz<3|6WL0c>DilOKRd$f-SW3texN4A zu93}P+%@vCzynBKnf*dqZaK108H*?>H2G1Ma%k^y(WPebeL=%^Wpn2p=5|BckOkhQ zyF2C9ND~xPCZr z>~TFgLknjv5JY^BwI_ki*wJe`^7*V`6-4^vHRm8vKc~^CgMHZz)x@OnYQG@krrR8s zPAZ&ax9%`lI_fktt>0l`06J(X52Pa0KtYR+!vtP@>GiHh<@kXy4*BuY+BhWO8-IP| zCH!?y+S(J*Z=OhQm3Z|)h_^sAZ}=E+40a2>CXw<A>6UU<2?mq14(jA&Mb&S!Ev~RAi1Gb!cF2>CQhA^d8gz97%_k3oQx>Ebo&0_Jk;!&r=<3PaCpZ*ee9lA@!J0L(PMvV< zid+ydXq5}W3txkv1H4pzBqP938P57pv-zB%;j9m%)5fx7(zSe0OHV?^hEkhToc%04 zgWS3kl#RB*VP?RM4?DtNu#``!z7VrPhY{J@NHY-1=LrA;QT|m?Sku$|3cOxU_4!3we6U zyH=XjHZ(x^Ak>`89GN8QzB^*_03viuj<#Qj=C?-HHShAP5L^f(I1&ggJC@=;gbD#Spav%_tQvzw#( z`A233Z-9#eXvP5sr$S4E_;yk zwt2?!dVa3i+LBi31CT0RG_#<@`uK8b_fibtK^x{Hq~uZ+%_|=kQwO~ zecb1Rb6?dv)*IrvcOIXSsr7t1hj|$=UXn{gsks)nY#|jf=YqdC{fcv@iQk@*98&$! zav;%nGd(YPnh)q6F38p%%%8Aw@!Hq1&WZ(>4dF3145X@3s+Txas_P1;%H9{&0I0_V z>9vr@%kHxHwp2!V+e)ok3Wz_LKC-A#KMn8UT7q0A+_&_$N_#>6Ez`D?xMUI+zmg6j zw(c-A{}<=^4*}=jy!4a9$vh|T>tvOt_$PoYbSMih>@aR_pr%3ioaaMmUfn;XYt!QyCvtiLk>#K#bm_apc)YsUuP5xy0Z1zMrc zeb!G!jgI3YgPMKab_cdavkTwG27Mk_2|uJJ&4l=wA;kl2;tSQ%tDy1P$ntxsH^<$7 zV1Vk8;#BP+b-b6So8+cGRwzM(?1(3IGUV%I#Mf|&?O|DTKE%*n-zm;C@PkDAABL>v34~ z7##@ZT8ResYwHvwRo%Q!Ml1qc?g5QBd;VkQG|;ZGQE4_SrdY2-Zo!WT2)bTjbtq^5 z;sxt~SH*K7(v?2%U^zZF_?-W2K3u?f8Uk5r4;;!t!0H}KefUY*Zv5{eQ=E(EwHr7Z zQbkA!2A|5-pSVfoye>5JR7gc}Xs<(F^&ZrkC5=@YwiGo6)n7SD-hAori00uGpX)nz z5kd@=Pd)=}God4w2xM4dc*GSS$gd4Pw1s%dGgkX9B=`J7qyFIJ)zub(4#b&=zf2pc&sm$p+4)c~* zNH<;lBuc3>M|e-7pD^VT{v_e#qOeC&S3*awdhMEh&31wE$@h|%TMktY3tL-kDiVmb z+HTk5wt)6P5lYfeQjBWd)+CY?>YrKYIwQk8kbt+ww`JD8lFVlr5#AcJt3$t$|SyjEalK z-{g``6*7}IcS@5A&ANZ$vFr64+C#5zf`QWi;o~lK+;($4DsJ;|atG+1-MZ<4eeL18 zckuC1X+)vg&}c8m+?)J88{^573tI}!tty+XnB1O_AR%$A^~rM;i7flJUB#2;5*0ys zEVA52x_4|pZ|2*FD^l-{u+ig?$(mo8BjxDWsaqQ!Yj4*QNX`UclCO+rPM%XQdRtuS z^{jqEu}nfQ*E;jjc%VX(N_mRRqv0GyMQM7Ndz-HIbvk-p>H~jRHLcXHikUqsEEx?n*0hwb5B@oP{Tob`IkK;1*5D~ZEmxS$+xbo)cRI0$e3yL=lJ(c&4phIk<=^=6ID#$mON zY6T{J0?E*^dXvmEm5Ce_?=~`$SuyLaZLSLlcea+9*}5n}W7Gas;^I!xgBcUm^!|CK zc7VR)z9f6~%a{-4t!Wq?Hc6ZpAeR2o>mua5SS931zOlfnIX?hM_Ct|O5z*IIkulo` zNAHC=zQS2+RI|LVEh5@9dO-@5gu}dIDDC>~uNN*a&hdT@R`&lxWE=Mvm@!(l@fIfo z#of{}bNkZ^nFB#PdJWXc1Q8ud8S3AXbOcX8A6;C2^1b7^GDx+F$cJ@0LIZWlkw=`l32`+a% zK!Rf_l_pSR-gqu+my~Lv+-2RJRjnpSp!$tbiH$xp7+Ks!jnQe7pizu)ksWPq#dQ|w zc*v4_zlg+#%0%dk`CcD)M#Cx>^z9Be?Gv%NqwdXbK14;k1?pH1loa=6Xf#ky*+ms^ z^ji-kFI3*FwFWz_Bqf6%(Jx%WWb>H0mT6 z$w2THG${zG&9!J`!D3a9zUxg@Pi>$pr6d$fJ<^UGb+wx-F+lGzb~<;uR6o8u|N37&5K@)K{5DcGR_ieJ7bS++qWys}d_C$c{UR(VeeW;S8Z^7|J#lRcW-Z{43O zW8;&JJnVI?`f|Usc72@0H>9isGSh# zPEs#Bxujcly@e2=D=r&p24j#&K3jH0O1?W^4@-2|u1u_lnNa;G5ss=~2-O9P8VKrC z3cAQGHtFv+nahAt>P|lj*Laou=pTNG~dqEU1g=pHkL=jn%` zN->|JI@03YTE-ZGV(G`1QtyON!+t74JcXxHUnp&6^umVyQ?nfBTUa}jmF+GcC=0K@ z3Z?d_ms8J!N<-l9fWkB2)nRH5+D)z1^KR3ih{i_%9A0ikW$_^5THmu$(LHxHO7au5 z&=!4}X$+EaKR)nG%1Xp5Zc{fUZnLH*k!bMVIjEm@;k}=7;n@{C(UV-Ad7__T z^wl+&L4r;#^NvUTOT{8e`q>tjTn~5W8BE{Jn7ukvJ>GLPA|Bav#%1Jsm-W&?$eT^< z%IN%le}E^FV6A4E1i z-i`2Xr1G=f5!>oSUwn&nZ1W7e*x7EuejpSf&2O3dfM z(v$Z(jx)}=e1}QhLHwOU4(n4`)l?70G>_grz93+NZjKs|L+#L*&UcKOg+t%IB!F`j z$ds~kACTC-jsO~}GSf_v?Ux$fJn9Cla+2*b^Zwo0aHHhM-B<+?_tZ4|Y33I28x)4X zP-|wDdT6zG<1P5Ag*Se_or;I?Kw5&c&~o$I;t1b;gDn2{ZqrSdKM{g?t?!Hnb8W9n zjH+4#Z-H^6bIS@jmgrOZ*){xV19To%N_i(fDy}^2=QMM!6 zP6$<_`7WHDKAg+wZlzP5&FZo_*C`hpzfbGGMR@v_G%a$~1}*U|e`;US5c`;_NGW^x z@H6Z_6*7;FLys4cCAV5ZJTsHg**+^ER(y@!1l)y4OD&)4Fc&^dW*S-w(f~u{RF}}9FmA7Mr zz9e3^6(fszKfGso-(O~Vp?6n}O$O*C=y-_g6!6wFk5`-1>6CiMr!x4hY>NBW6hkQDqs@AJWrtccfI;QhdX!^ zTzgLBvSpcVSgsHY+ljan*FABwVRkyDwe66fyOz_@yz|bom_ZFYCX{`dn%{9R6EypTch(Z{nZB zTh^biU^N#}%j0t8 zg|fFV@tS&uu0+jYC;!}t&zY^zAjX&GR8B@KhX=SGki2!aWt-Z<02zPl@JZXC{hZVK z)Ysr^?U9c*W)jT5cb}iVU1LYsTQQa+FO#w1X=)IPnI(dh{^zp55XpQ77L# z0yyrC;_}7K7A=B5v;aU!zZ9=4JfdT{%C^!fj{^fT-TvFO{Dpfrv&UbE7lI|W znlh^T>8RDbPh71n(x>6V+QQ*Gj27Qb76Yw@?xZ*F`?SkVxPv`Xhi4VVv?pjap8Rah zZSY6tJq^FOS$nKCXFUC2+b2-Scx=edzob+`y1xs}(iAeSm;zc(FtL=z>Ig=Eg(z-u z-nMB&q}?FPsM{+AjHvOjNpsjXhZ>MDz@#Yb$|*q(P9g%3AwXg`O+i8$zaHu^aKCSA zAOsB|%{=A%BKqOK5J><98C3R6nk+_|f*ylN%7r!iw6B5>jH}Olh_11zFmC+@0jtuc z2k!*eo`OwzwDI|CZCnR#SfDX;S;%x)D+}h!iLcGl6l%FT!Z@-J2L~zgiHNm9f0Ftf z*i@`5Q?&tupi2=+)ictbT%kz4lS22NX^&?lv7K@ezE8XRp=nFV$=GJFOsB#bv&b0) z^(PN~DsjdwWYI2 zJF}+%CS;BpkLpRs`#WTMq+lgRldYzNONLWRqqpc6NuIl~RX=waLdvqSmHh2&JoqN!f~RG2Eh^h#4uDOtY_r;Q~C&Mtg@4)K)aa zyKRwoztR_pfD}r>w?~h=I(Jv%jX7g0m7TWtinhZ>-@MXeZRk?X*9&uU0FWUP@bpLe z`ukY&k9p{A0I2-no@s6edt6F9jJG=v-SQXGE-*XnlqH+e1RZDL2Hm90}Ok^HO~wFROTKkdaacfz}Yz zh3L_WBhd2Hj^wp^V&^98>9~`vo~t^VWP{1I6-~QJBla{1m?xkzQ2Cw)mP3gVh*gZD zB`DUn>c?SFmB>UsXQJb?) zfp(%3`3EYsB_7;3 zTXs^IvdNE*%QM|OtvXHy-xeE0X(Ho&3#y^bper(iHKMdio}n&T#X-fto}W#zo+n9o z$gismR@wsYx$mjyxGj>f>pMHvdDXX#rAe@#h?~Y_UNnZirGN)6LKiqxbEIwIFCwrP z)tnE2!P_HTfWeAlFIENjpb&o*cNKrYRIBF)>;^3$3mDsg?uf^!O0i_<^{9Jxfe8&V zZpp2qL>A|(JP!o%{T=k~@(Ir8n@A1{Zl_nkccpQW*Gh~r<{83l+p5sDRdcb|?gcB7 z^lG)6Q*qXPILKGU9R&kR!h3!zLzSJZSU%GI9V8{#=|pP%hO}4*+;+wqx|On zlz(p`owPIw6+4)xJMSEQpH@~)bP_=1Br({G7j_TL8+I$@f~lO-y8!B8W?=3FDH&yZ z?O#2=FKVNW^GlY!;viBP+wleAG~}R@Z&9E&FqQ^q9AkTy%1(T~#lqH=6Vw;*3IIO2 z%C0&h;0JLCIuC@Pi7A0hIX8*Q2(#7JDK(2~*_5 z?jZQyWIp({uP2$}>A7|z7J3;tUrv@@S{QzrwCmG6d-o`Jg;$RRtg z`?Wr2)SU-!*4o?iWOy?ncvUg*u*>uSm7&LGyAF>abZ=|1?Y$7ID#MXXZl~0th&P?* z_wKf>4UY%r$I$gU=3Y2#f3cmnp>rqk-$b;_qOQi zbxFTlyEYlWw$qaI@;36^mGazQEF7tX4qRk8*iuuLs;EUNT0fp`)$$A*&&KFp5jiYN zmW$Z%t|3K6z&MS{*-P!l@A{~wDMUP^O%~}kin;c~c!}({;|P1|n^36czh%Yr_s}c? z!V||373CK-5X*bNY+U~a$^3tR2B;kWdoh3V@c#Q`{%nl=-zW3mC-WyS&Hop1hMC|b zm}4B0#oO^m{>`=iALH7eUdZ2m0C0NmJCi@SDi1GHG*;Mg7@Bsw1rsP2dAeFq|GRhh zw-3EBaTAd62+SweT}}`EdaUq|t-#;?jT?~{PK{g??j8xA`?qwo|M5ZKFR0PShxk|X z=fC^aU|{$U7nZ0AK;eCE4)^}?2mkQr{`x?Rb{z(=of$PFP|*g8rhAy>Cc~kV*CG>4@QLz zoH{EJ0jmGt$r2FKg1SWb{@`CH+U9@# z?%>}TvSRQhCnAFp|91tISJn2w`Dl0FzZZyEi?sko$j4;aU^&p1%oJ-G1p=kaSFk6) zxpRX3M2At?4Y9+mU?AzN&u!LrgM!DzCEoG1{keCS9@LR~7n0v@RYqbGIIT4~!qqcW zckR_@GM$!s44l!!rX-X+R1gnSxFPx{k?v&$-z-Q-}TURr3gE4O*7H(80J*t*}v zDQ=~9zYQ2?w{j7L9}}$Jst^JUY&cW9Zsa`m(DU~~+MC~};jPWaafiw)BLjXmR7cFj zQ^~*lUh3#s@{Q*?1@;#&ZMKbd1R9(tXJ0!jDjy4UmVUf^6RaSq_BIUltSR{_8JKQI z7AxsIek|R*sq{=Xjx#1&7$WDqI=0+4k`T)4lr%|Y>$5c-*Z;AiVztJs=G21PX$ZAQKzK4rh=FBTOi`+Z-du0j7*pL@SSqm?@lH8 z<0AevQpCO^W&12qvmpI!fFTe>iEUwNB<|+Z6 zSzv$Uv~;l60PjhcbxaFzmJ=?s3kTTuri=!;wo5LbI&$vHuv*2kf} z3IaDIh^m-Er>CPX%y$Mx+s$`rz-RY&FR0FpURKfp_llRI>bS>tSElYDwmD?cb*RWw zvCVbkXpM)lZj;ol)n<5GX$((`48|El7Ee**8Y%*yH6#h)Sig%SR=KXHIA z#Q_@TRSwnr?;~%SvZ;r0)!et;8OT;q+}lAfs+0ojutHlLiJE5mlM@l5kf26S?6>%v z4JLq=le#cu9vesln2=Be?Gr+vI~i`*5gUpyuMMrlq9b*}o#s+Nr2Fo{WL|Tqi#~*D z?W~M4H%Pzdh5!k8#$G06r={f`HDwXT>zp3Ucw(EP>u`}JgHEA|5{(fXSUO*`g>$-9 z#WGjTqDLzo=Gq2Q@y07sK9Uu9LKmRlo!i~t3J>}w*w_kG(K^Mj(L?ix>#OCC8*xV- z#_f-{tA&F_V@x{sE0X1-kOqt++W=1{tK_wtvM_Cj4!N*kseKePD!v_b>yFZg=`eYL|b8tZ%Tg#=Q?w{IfHDYIFKfc6VE~$KT#> z3|r^9^=*!&55yPXjiRs0Y#lL=H5UHp0_u_&)@m=Vm0=)Tv7c+kD}YTOwTSb>4jn9$ z?%{|d#q4W^2nGP?6}21{Pgr}|hjwQa>F#Zg3sYxRi*%IM(5s?y!R^=zcupmki|qkG z1VcHbo!7Fb2~V;(cSUs0z`zqsy|%|YQ-!78T?PA_jmMCp2bj_=kBV1NBiHUo*R7pZ zq14z8!A!)PC5Ii9-7I|k=*}`%^;V)WZWMZ*12>(RCk_IID<=y}pH0f#4bNXC^9Ob3 z!N^H@tMYTNktZXddT<})ZB-)QQ*hrOrx{j8!^IBoz!sg{dqOe_JtdwWH3c!glSeRp zShYf8{lz<=@ijGG)m}$6(L!Eh7kfOH=QvzZl#dN?p#?YAZV;j|N24&tVy{%s`Gb`0 zG0(O=r5fqt%!ILCx9Yr(=z={4{rnpDBNd=O&6c5>!h660+s^X%xbp*iQJ2IB!eHa_ zF3PS}9;{RqCLTgNBSkf0J@;7Y_*+u1sfj$$Tr>1b)$3TccZy2 zUy`4>J+p!}?2Z@r>zR$7s;(^2i`h^WLS>(NQ^!9Z9U2f*+1#v_=5g;n3ro^_Dj{9N zf_g6W?C$jj(w7#aB{_O>Sz+xd0Owc;Vzr~oY9wDWG!Kndd##(NlNL*T|9Mjgzm>L8 zh0_vQWsz@A1+#6U!3f|-a@tU|Fd+(vA}fld?g35C|!*vFN8Rejag z*SkqQfo)I`Vr9_&iV1S1JfS1yH08u%S8mQsXWYQNC!w&Y4vQqdE_~}vc+^Uu4B)k< zyYE7!Ay1A(HKZGp09@1D=*r-EsZF<(oXYOP-HQ@X_#=>1E2t;`C5`5_JO*8q!fMhfBlS{>}6 zN|L4lkTT3?eb!T!;Iy=s03}BlRGCmTI-*x7g63mJQukL!Tw~(PI;dSID=~mf9IF(8 zGr{Y(8;VrC#_hHk#6f=50KTmu+`PsfA_^mVHT2-Xu*4zde1|6%TcDHx4}HC9CJz8F zQ6gjGwN_B>r|%R{;uQ+_() z`=Y+(O9u^f4>-;?WAx3pTTCx?P)@_tGU*(6KV3m4(rEgU(%78tYH{1vJEwupdLSVIAPjtd5X=FyGK`bYdce_)Wn;|FjMvxD?o|Fh%ZyZbalIkMu+}*= z;%2u&^8t}D4Y}M&Rszmu?mpk4MkGQ=?KnyU?WeNGhgL+ zn6JtEiPyG8(wylA*0Gylvu5-&;$;PVAbuli$U#s_T%5vmIrZa{plS*8?&d%?hU}HY zhGd~S9*ZYpi%1cZ2PbhZ*d!nZn@UXUHRyyfr}-a4f`8~;5=&o@1dSn}bmkMAYr0uJ z>~IRc`Yl^WFJFqWyH9}!^@IA|3)ch=s#hGzCJGo`JjhO9YJvpgD>}$#tIKMc>A;Hc zeaV@rE%U|k(o*%j9q&J&tz;*`AVGM>-(znp9FKdOXP3Xu1(d|c9ZE)TUf>XR+)M7P z^LBhutXkwEo8}WMf_mpJw%n~X@vS{7h(l)W(af+MaaNVqz5$O&+8dh=Z65aaPXegv zlV29q8X53@HL@^ZE`}fVD^!*}?`}+0fG?wj7|6DkvhqF!r0(5Z*k;#Jq&X;fei(fx zc-TLjJrphC7FAIrm?+3>pHpThXYh3c08N9~%#{Gr>Yar#cIkc0@Dx0W{;vb009y&Q#tACd;)1-|F z>oFWIwq(S^zWCdx!A{XCX5Ii!rh-o>rP?a4hvhc}HilFH7%QpK`bk`!P+by@lUTZD z@B1%y13=r9-OTAA1j!p7=jkSS&u(bWo)dFK+BFS0L~g>zi-<2y-d?xsjD?olU{e0q z!uSzYH}Tt&_NY>~wvKQis8WQOd6>bdaK+J+71($(_DoErGxj@u5stKR$8Y2WZRLvi z@Wz0Vm+bC44PkUC9tp(bb}bAKaKCsf-?)(z9v>u{R*o=xv@%#Ag~Dm>=yz1N#Kxc) zY=?QBgWHnE()gYY*C(|Nu*d6_u6|@{f4Mx{8Lnm4es0!N8hdFi5=2H~1C(%`{QV{;D%AV{2;q*6UULN?MBBx$zQ3!`x%Mus1D7x^B(O7|F z_-^Nz7;K4wRXzWO#Zd(fkRzA^su;SkdXlJZeWZ;LI>OEA&SF}uTg%Gk_CXf93nPs% zZ)E}~B%bke^RCRvCS#yA)N2{{(X%tU0DMYg;ee*EUP5T@L z$4%yF5;?H5lSKg z&nG@+41koF0eT0VgXjQkG=$Uee&;Kzm6N;*z40n)yIW1>&VjCd6wXCS+0_dexyl_H z#plwkRYDC`5ja>LYK(r!dMFQAoMZHA9noD~CGAuU#@{YhpxE&;D)8*16+qkmhU5*g zT~*xt4z_n-%U^|BrJ2wva`D?*=x-D#`O*Es%=NT557vCt)78J8ySV6xDtlIphbv`( z&3M9-)A$&4*kOqQ2JW?^HK!u3B>+8TuEDT2+Qoecp)4>iL;A`G>I4>BUAq76D z=4X6k+^{R0pi_PB>`vsun=KM=WCzFHB#z@RGP_)OM3DCx(?GN%IRE1={%%_IKLRVE z%njwkhh{g#YzsQx^=35q(tfCgVq$r1$67A#XA4yko$D*a1FYI~9G&{N%zHg;Yp!81 zMm{gOBgKiUiMX~P?51bG(@$jOnal^Qrr0N)Ng$~;CZ+|&-b1A$$YiIOQK+hEtI%gEj!lxa zklSjX&s|u&8YUQ;k~f>oXZ7lh%Zh1x=4>JlSys-;E!9>e6Y)%0{sL-inC_G5Tl4cMrKue&uXqT+7_y9Hkk-d zF~`)H#W}wfIUKs_MD(Y1wVU&*7qg(kD0=c!@>mpPM-+uo?YhK&esrO zA-RIvlq~>2-p)z9V3q&XaAcqx_nz2V?CxX?qkMSOZ8zH(ag~!?PdU&8ojxVA%Xvri zjB??QoC!Sp)oTx#%u(*M6Q3nilbl7;zIPpp1PSl<&WU!D$SuDW6)LfHSn>b{bywL8 zTWppUe2B6g7Zl?6s7F6PKWK16QkN@;#0bcAuEyGS(M+7ro6_|9U-o=O;S% zFGe5UHy?vC3lIOa%qCG4HR&P%DqDuhlt(vC!~(KtE1@d<~#cC740h50SjkjuaY&27s&q+!|NKtSTefGo&jR zY5;csKN)Rb4KOUE8@j{eK^OKrkwXP!m4SGvA7V&>0T{-Rq^HL2Lq$(>Oxz9y+kZ6p*t7x!%WPa0BYyGm~Otq?qyec1V-n5 z^hxa>;M+e>Sl$-QU0KRvcq(ZxCyhcrD1cRO0Bq)LXgFRPtD`1gla>JlLn31F{2OGU zgkvzq+QSX?Pnf4PPxOf>2NdJgi<@I0v?WGu9PA zIwHfM)Z0;=(^{~SqL-=F7%nmwQm%#9Q>ezBfzBw$2!^hpsvD|N^KO87ASu+msG1f= z-n6ht(X{4D_PSf{31G+oldWgnWKGIq1|+4n05WLbc{PbXk#4ZEwcK?J-L$*wmlSj- z0fWti(vR||)JT_?E`wZNf1&*q`Gn;_O>nj$0vTN>eo)bqBhQT65~ZsZzyaG3mEj@m z(Bwf7a6;7CU%zbqYF{)}U*_vTK>8#HWqL~xpNX6t^hKk)bL?8jbaf|A8ZZM&$$lL9 zyBZU5=hrT-jLT$tK4w%fgFX?hB7o;zi57(@;ANex!`0?U)`Tte@D72a?pycfx8`f4 z*(xx_sS1%dr2MaPHI@BA63ZzY3~T-@tssE-RyOv=$)apTdyz3 zmKUa^JWqJ|bVtkmBZ0Eyc@CX|>c9rxt- z&Y5MaCk-fzYi|HV0DZLOwd-8b%mRhxJmFn|vyfM=1|A>6o;kl-Z@}Y-&#v{^4N8Jp zys>Ui|9K2=(L>=$4t!$E7H|;IaDFz<4%la0y=HAeHzjh?)lzt*T9qf>pIPh~DkK4CXlNs}r`#IqSmOqUh)$Nt6P?qJxjVP!UESg8#CZE5j~gxK_*8V`&I{ROL-w$% zw)Nc=XluWBB^EdIJCppyuzXnbXz+DI&r!>cwna5b^V5B^_##h>r`8nd^YW#S8erdq z=t7pFxUt!SK5s4tQ}CEaZGei)Zwyrx!I3tEbeFA8+cE@D3jh3Hz+WD;T`B`~bZTKI zZ4%v+7ia}NB%Pj1V^Wkcw5nNvWC^sOz&?53|M&nfRn*ea3Aw!jj=J05m)dr;kY-QZnXAT^rcIFm~vZ5P8iH*J*uA9STufq8&-R`CvWJ< z;jh*{8rZ?=lmm3j#NSov)w33uU$Z~2?N>sGR%t5^90iu`ZV)SZFb9w*c3mn05ajQV z)M&|1#OjqkYt8GY#*oztZ4ldqGK5;-V;>(M#C%9x0Y#Bdn|5x#ym z)IS?cE)R@mCC;3&0FGETKuu^xtjG)A7A?|PepJ}dRpAKa9LzG^4e0oti^zjKA1)#7 zlB4i^2+Lsxm(i>HN2FLf7XBO~`Uq|j%-CuHI@Z={c7qYAAa*l>9{%o^pnwMt&M&kH z#Pvu2)t@I1#8%AR?h28iVz?^pyzDzJvQDoDt047bQ$cDXcK-n|T z$ATE&4A`LcnQ{X7_2Yw!mjIyQ^99)U5})gl&(g-|SO3)1GdHhsMFPHE>idbNkE7@g z9HabIewxXLJ8rojsQILslwz&UM{subGF8}>%iM)86leV$)C;G)B@`AH|D2R8v$a1O zqbp{}YXo!;xeO|{@A$$7b3YVWTMdC+wRc8GTvY@#h!W>>A zx7}TDjAO9#{^$+}#@)Es{4j6=Sjj^UFh(dF=XI1p>ub?CRJ+ z?@)$Z7^_`P)Bwy@uQWuHBg$RN_?R0H1p0yT&HQOVlPT#-H|UgYwPG|Fb)<4eo86X9YVZjD>^pZI@JVd1ROAxr--gy7gu5fqIXivvj^gwqPnHnt7J3 zZdx?lxvG!k(Ys7;QlUn{!dq5agFL+e?viwZFZxS2MQ(R6J7krI__+th)%X3UPv&a1kmvc5{HayU103s1Q;aVb5zY z>@?^32qO!p5n#cy3uDu&9|B6r1fJ;gS9m2EqlvXNnQRKFun6YJ9?G-T)qG_|d6-Li zMzx#*4zrL4$Q&J3uG+02gosPi^9WHMmwa)far@K2 zASg%ok}eF$WNPG0*;HzM`w+)sEJ}&F>Uu$0S$;w;8C6a9xRYk=X1|eIp zFd@4Wz4mJ3Vmo{zJbiD8gvOJtwOK5|i3c{y*sH-PSv3lo4n(OpFYN zy#|p>3VMx65-WJdK51sZQ*4xIrvVZ(i=U8Cmzpd(bXzj7rU2Fkx5?TA*)ZM=1BPMm zV(Hb2!~_BC6wSms$@n7C*PA|PT;7}I9U0xV?=dl-V;70TezDMM@BN3KqSdX z`LLo*XVwx_E7biSdrtvfok!SMm22u3I5zoENAK* zh9gc`T4aE&+fIrk#`+Bj?aVrbxcNm}FqzpDc*y3c?SD~`{dC7((-&9pgfu1Uku z>-~8XC{Qdm#=``_FaYceAYG$o&7qsN-JT6Uw}BuIO^d|S%#I{^So0AJNEUtZzpl)C ziVWy=!87sZ;(NqVE+LC6f{VQ=!NBjZC40y*<`SY{ zdid<_m51GdM1Rm4D_e^kK$Ks>V%OPz8Opx@OGAJOEu<&(6?f2ypY_+?3u~g@1wv_cym1$P4p#PY^@R7&I z2-$Ajp{kgKg}Vm_dOioYZMXmiO|;&Q97p)=!sGN$xq_|fml~4whdzqc3khYD6AC{g zM*czy)@AGTQlB-8J9ZTsYk|j5#qsCyxjbRoootLV>7C=CJwdd}Hk7N#mR6GscxTF> zFq%`xI1Q?}9%yI47WM{yHSkR4=w8k=I%z$}W)uTibsmBX zHmLNIdMJwkpJc7}<+c6@9%o1#987O6AKc046u)l7-&~PrfK$Rlh2rL)Uk|&z8~dJC zK)6pq_LR$rT=q|mb?UNy_n326m~rh1PlNrY#e!Cb3ipJWxV?%XL^@!OWYWH;H6Sn# zJjk=}Jm*8?c>Ok3nrp<2*7t`Dt&r(VFn8ZJeT(pA!rmV@zz>7p8-ec~Cf z^mLm+Jf7FKXjc#Fr6f=dZ}K>zT#~$cW5V}EEsSwAZP@m;ldkTS(}OV5QQXKfWb14R38Ts4(fbbr&RXBUMneTuOEs&)E6#(m;r0_~zhrQuRvMl#q! zfmqx8IOz!r;`08GkUv{TXK#nff{<>E-&~OU!GRmZ1g`)B&;*+%MM<-DCtwmHcVlGD zKnt?59%M?(Q@0@CB>6Hw4B$$Icn;KmMAIq65*+!6s9mq*E>C|I$o2|AVh8wacpa+A#}{xT;2Fj{|L$ zYaE8R9Ov3e`KALjyXMvNlx*>`jS@7=oLv+!r^tT5YkgB8>k0P6o0VZV8xXYg-+cR- zU>urxI&6sSO;@->-awzqnqILD6Ie{z^M3|Y!YQ7g>re!o{M73s)0#iD0FH}?-wR(H z-i5aGQB0mO-G=M*XLmgVp1ll~Jq%ABAee4F-aC1%AH`439JG)(WM_#lMIh!r9aOtJ zhXbYR`w->WJ^kD2=Sj{jAADO1ZH zWEq2lrN0pdiJ}HcSz24T?)`wDu`x;oH~F8=!NC!&w3D9l+h*+Uit3t7g|O(;o5R$!o7cD;Z{1&viodJ#E2Ry^Po(`X4v*ik#iagsTLWyt=8yy~&C*uvB6UyC&7c5Ljh@V#mM4P_`cCO53j|b1)Dn{RwO4Y$8q+t5FQ$8NpK7H;9yysWJIhXTXs9_KFEj>4lM#HPdt>5WPza?WZqayHlNC|p%jvgz7m&s`*729VS@Y?;l zcOZ9zBDS4u(XlKJ#f5a`-QV)vb`!R&zDDWx4vfAl&;Lv+H^^j9R5K%S#?^KekmazE z(!(^bI=Nt`;;IP7)|2BBQiT*JR1Usd_PX8vvS$9vs))ebf)hY) zBWoYuYcQG%$m-6}Et&rbZm_W!)`@vq?`NDL&Y+kbHje||m3 z&*cFxKzadBx5!#4OWOba&5_}eu=vy<^a=T+4fw_H{pVjwxBjt)h{fTx6xaXYo}PfT zdlMuF%oE*Tawh)#*GM`ZYt|+1>V^C#3|MdAis_3_4?dQ7Yw`cpa{*YDc941T#e|(o zr0+j}c>l_0pDH=lN&fGl`5)f{up0XBq50?M^Z$>B=KBFj4cIJt?DlH`EUwlJsP%(* zEicPPF@31oXlcDHif`S2THB+PgSHP)hTVOg!aJAqJ^yI|MbTFRW+xFYCe@fF;WlA;VH4#4~f~WFn-|*7b zVEq@0;?G z8Pw@$Y1UAmp1DwlNQ-fg)gr?<&Y96dKjM;S*)M_FTfdKh0OvM}DbDW^-iR$Dvi z^~Mo1#&O+#=pcIjD>^ztU_N$pY`LL)#lzgtL%h(m`BrlVx72x^0vme^ zQds%{KwGLSDg9D7K}7+yQ&(}`j+H-BnlQt0B7W(?)}L*l4AeqRhW*Jo*nuqACJI~s zIxgjT0o+y(Sg=Soh<4lqegzaf&Q%Dub61!?0@7D2kQ+4_+8|GjC4kQ+tQ~vDr_=lj=|40shOSokA;Gs#5#1U)#xQhpppb7)!QxkTcsC;9X z+yQJgZn!$kpD(mad6Bu%UfQVW{@TG52$UmB?7-180q&q?VgTLIljA6ZNAm%F>tF)7 z#=EN5Sz2D9!OY4naAxIZydmIhcy9|ES1miX>T^57Be7GV5 z&@{K~Y7fG`xbNuXc~)%B8+lPkq}>E&f|o~*T)nWPSO@HwL9Ym4bvkrTA8vm2H>PJSO!c?C8o^uh z!UJiYZzva((XO$w;4tEaP|GS+f9+IEd%-)NP`FmV`nI6wa$iR8Wf5?)ghrrF`r2oW zQ6v1Y%{fJZjct|P-Fg2FKsA(2lsqS5JR0a=Spsp4Cqu{EUA*)o?%9l$A>Z#_?DKo~h<@U|UmukNJhTdeE7^q|txu0f z+$EfC2`e$aU5r`It4@(!-LTw24zLTa)<)zDlVYV$ll{hL3svANJldEXuU+ z170u?SVTodLBc>9l$1sVX%G;QR*9ieYUmIF5v3ajl$0E#bHJcu=o(;9x|;!pc+U&n z`@Wz1*O6KLqr(Mzn3euXXk$5E!A%rWPEH%Fc zy&GQaA#uaZ)P63V#g6;r!N^n#f^+ZquQqU;c5lSfdqJ13qeXB40gug6KRJ)pAQ?xS zV~XB>6(!r5ZrLoKc{>cMjaT^3Nv0>hkpfTJNAJHs-J_RDgkS#>)RxzsCu57>r5T!- zS-Z28Fnq+v;tt7jOFXX>&?BH4%20^Qy?@JUT^h*j6QS7$m!n|EK0=Nev%Kbag{2@f zN9`!Zq=Ln~e<>CGh*bZ(YYX!Bbdir#)N-6`(cHF~%6-HsA~z`yHtfInQ!^^Qb%&w% zuf2)oivqsWN6pr9+Mii}_B(lU-9>8whhM6eb3sc0cDeP22wOWUu#9>kW=OV@ z?Y#r*39FNh3o}u-IrPe}={u`Wc1*}37WU=`gjw~;He%nPfT1vC^VTJ)8^xhD8X>y` z=ol^9dF_RBBvNa}=o{SZ(`M3AL2d@Kps0L=REsKbomwbSsA_A0)Ps?I++?ml5anKe zEXy|tg_G-FN3BrMwFoAo$u5q#XmqBzHMTSq+Vq%J%)Zrgg^X9#-eu! zy513t{=rH~+SJxrW|e3Uwh`d#$EH=;UEKFxPc4?u$^=(*!}W_vjPS-svB}i+OIhgq zR1ZWKH96dyBQ00E%4|0;i0Gj||FkeVA$$^2>cu;6z>@?y*qm0!^Wj+0j&t`wooQKN z(w`0lUmf&GAsFdVR~20pe*Z;I--^DI&D_#M{hMJzz*1~@4xUYQoC?4!r%O3;X%r7C zG?qJ_8JF2kgyTjOIHx#w*)YXR%du=K^&fJ!Tt8fs4z6*d0y62fr)S{LsaP|r=g{HI z-YGJO!OEbyo*afXO16B8m?&ml6+yTKT|>HS?Jr1NL7>`vKgu$1&u)W8*T)Z19eO!` zI3ap5C*nTUMw6kHT&|_QtdP*HqkXIwhh*=s0fG^~-Hl z2!P*gQp>gwiy0QRjtolz?kG#E))4u^QOeSpfr=m+ajlFePS6Ux&nX@O5>Vk0 zP3uQ870t&YQ`}=+-cRwZjH^ha5?QfFIz&^Dq@muXHS@fdryc4fU3hS6orDJ0^ zutR}sp3;0{wr?zD>6l^RdCa_0Hzqmk98bg z6!PgznClh~vbrl$ATaGG{F_9XqaEQ_vSr|ns!DQ>*$SZOVm1H?>{vMgj*WL9-zelm z@;%25Vvd?FVBP>+0%K;7XcOS#70Q_4_-SqnfNq-sy#pA{9=ZfVw#S`cmY!!NixywtXrP@lHz`m zFHJ$}o3*Jnt}r@5nskLk=DWTkTL`P+!6M!NTpy`_|;YFuS`cD z#VXX(&EAq2wzHUiWrSligYW2~TqQ}|A9N4!VtJ*DpADNKiR*v?@Aa}ZHZ^sNT z8s3|$sY}J1y@Y9GBNsFphvG?pOmbr?fkzcF@C z@^d4BS|&y_iRWdGm&I>~^pE#+^hu;!#~pd3lf9@sFfrV)RF#NKhopPhXC}3>;lBXsKrw0&=C;$k>l4tzVDjk6$I5|FrpI?Y zqOL8jxD0-6wv4NCM*_kTr*=xtIt}Z^2QTFbfcgvLwiufQ;}sktyNl*j*N1RWeR5j` zOrk>yP4)AA%QGK@8RHl&pn_-yugQtI2VKXPEr)?+cMOMvs-~K^mp8XY?;I%(%L3XU zKBtcbtW9!;i|#G@smh+^ye?91zo~M<(o>2k=sd^f2i;-g&KDm-Z+>>so6@RWx1H%& z<5o?ldwTDr$1-YTcDi5;e}C3Gm3}Jtt{*uhG18jrYv*7wJkBFCl`CQhv@?vcaC=(J z%mFDFE`&IfQDJ6>^{pZxj`pJY$Euc$>;1sJ`dayFh}BMwkmc$N@q+8o_s{^h8F&=? zy-(h!I#L@?EB(k&5D0a;H8s+x-p!ao0E3 z*ny=zUFd@MP_c8v+kM~#&jSti5s(oC;|3Es4Yw>s!!jAy@7gRI6aZ= zp`*tH0(9mw@5o69Q8YsEwd~cx!TGtP6GgKli9A&bS?5)Nr8&lWdO0NNfM(`=XFs9K zsbhx^KVa=Z`p+YEbNsHC0nf9f-u+>oC*$5=m-Q^hVU-z7ehB=(R0yi>0?`0r538nD zZCrKGMWGS4fs7IwFan?z8==s*_?a9zjAx zbENdEvyD~d8L%=E0*BsX@Uc+IwQCBq-HR+p;3md-(&77N$8ntS;GX9cz?V-7%=wC% zJM9mZ+hbg<2Y2^LJe|WH6EA1$mW*VROplt}(oF1Ir!&XOMzxvuGNcEs?d@z(hq77| z!1`KlY)n!kF>csTki)pAlm=C2j8p$lcSl|?IYt57Y7&m1=CcY_w;UY62LVP%&B3{D`!hM3}W4M}_(66qe zvR=Hf1H6&T#3CaLz~?axl*DIsCzrp~%6%M>cRV5t72dUSkvEa=Zp9)sZK3=p9$6CF6<(MejxA|9q!gxEk(!ux$=5n|X#OT9V zr;>K#p=1(f##=Ru0m>J^2YxMcZ5Wh8te-@`56gqpp%^|DdHODqpCbWqoI0;dTwevH zG=SM*1_1Jc+eYvTYXLR_T0~3+@EXYn^D}H$wA)@i_7mN?P;xQ0Qzm5_0O>80!9{M| zvf}dm5eqqR`|C)OBuyhy_`?R|w_ntFaDqEJ-WVq5dy?wZyd*++W?R>6fT&u|UcKnh zyBo+ET*5;>i@Rk7&qadx%IlNKXoT;tj<+$Rrm(BEf)HOeZ^{h&ApqhNu5YKv8kr7M zW-KUq)~iVP-+5Rk3mo-qlyI+9KtK-j%;bHoS;B#kU=&_DoF+JVNCF8B{(wNQr*z0M zz`=;7W{5A70ne-W_dD3-L99;)5!6Iakfnfh`kIbg1t>aEr>&X5fNAG-7q*k_sk~xy zLgJC^`ln~-$Uxt$|K51P*#?Zo*J8sM1H*n@sxog9TrPA$kEH1Ui=~-q})SV zStve=wY-?LA9lO$92wjae$%_6DRSes2Aw0qvNh8vlt<$pPazqDj_m)q$Zu+=aeT#uP7l8a!w z42)oSjP}0Ku1&spV{z3-n(VNifBC;Lp}&hPr6^SYfq$uc-`X<5yk- z7tbgM8o`)y<*U9;L`yLKu+RKvokQI{KN^&#!xkv%`Qdi(oqHk}9?5K{I4cOB;<3fWi-19#-(Ke{Sg4>O5^e0Pfv6o47=ny81`Pz2xjkb zfgk}Ll@koQ>)0r=_d3d2C6xzHai*W^9MpH(*+^$~O5Dkt@0va(NN_zS77B)+yy?9z ztr6UQcq76H-lG%QXTk++?^PeX$-(A<|GY42ZxKj7!cRB)Jgx|YamtHA-!HGQjo0#V zN7VPucGiCQ80mNvs1`!Mpv7RuChb$~dt-VzoY@4lIMQkJ#la$}O}bl5pv8I09|P1- zfSF5clK`wOKH^Mj)1WA*4kB+odwvp+1(IhFV!R+0_WDAH2>0EIC&y^Be$;J$0tdR1 z3C+COwa;G^GLy)Q+5rRLRFxWU?1mMl^5d?%24hq2hwySA<1^BT*EjD@&TkgoJ4ts% z&}x2uAThf2Slu7dvUe{aqSGeyz&%aBC0)g zc8Kl~Ey!HZu8muXz}DGK-wHUGyW?>$XEqTx+i5*?V!`PhkxjH4!*u5$fI zv=#8U3NcGOTB^q|U)vmoly2mju!wGR^W{JYNza7RRb*8*0nMpf{olLr(Gb!X{54|< zIrkE+q2V6d<+%_F1N2128pAh^XhS(%#6SJ*<2W#6Y<%Bgu(0-~m{V6+U!{60sWh>% z&>eD>-PE2bZ1{OUEkQ+q+bRJQ+lIg#^6OQb@Gu}37XpOe2Cu_>;-Sm}V>n|uROVLj zC6ERK%AG?t;aN`8EEqcex;1L09P00E#@npt>fg{_)+{jofZr$ad381_fRZ!jI^u?Q z{f+=N0j<;(GgtEFhcZ&*{rp(j36`D~tBF_V#=D&DuW5|n4Q`qDWplit>V=off}UzT z>#9B`SWCQz%4U7~W*5q3iVqqZTI!R46EAM~9k1~Pw*{r=qS>N}blWL;?C5b4Uq>zt zB0^z~*U~COfUDG4vmlZNB1rR!G5b@>`Erx*JH+q|0osQ}8O_AJ_1h^<;?Ry5G} zF*{u;F$4_wV)lOPDNyg2UrfB?Dg8wW7~mRqd`q2Vu=m{|*b^mfZN@Jo7j*NR_I9?p z>w{3t&D0y?bDP{IQey*72=|1wd!j;t6_UsB`yZ%BFAo{|N&;F~F;$f?jM>g3${B=p z$Y0(PIM&imz1X>U+2iAuhd-a;oFejh{U$A|H`%3}KlgT8I+2RG=fF=Zo zwRM|5O`Bau}5uw?e!{42OWJohaKx6I;i4S zcvAOz#_TllVW6eO+0T&vN+&37Gtu&OL#r)NZw=&?mPL@pE+4_;R~GRbIA zX_cFJ3GdnI0Eq+Cw7Bjtmy@XU8#?aJxm#VM+bXChp02n(;sVS&U2|sr-0ZF|P;h~6 zh2v3Z=23dUageP6XI9zTH}hVU>#MN_5>t|i)bZ?v1hj5x7iXQ%n?9^g5XR|@o^xNX zq>JK!5q4=gh-tk3PSVWmNkTtc6F%5V3;SgY139GuV_O~t>&pFdp7;8F`uwv$I}E>g zonRQH9+in&ROo1bUL7F;+`6xBQ1KM^VJkH$GB#yAhv7=&@2j;DLN4Vn`TnlrVP+jr zvavkKy`q8KH_LQo45I~0#Mg08#V(}KtP2Zrw81NErvfj+NEGy z82)uChUpRIc46mOmy{sCJQ`dE1~n{HS!dwBw(uE_m{ttg0pb*$4ayL z05niQc9C?T#JYNcVyKC>Se1zs%)^K`F4$MN@A>KyDsxsqmg?o*^=H_%cw8u@P@*h} zd?tn}eTu2oU>fM9fZqTTBHZFd!Pxt+Y-#r%DK94{O&)pW{=U?HuGqBmBJuj?kR^xJ zapnv)oNAZ;=NlCDZ%!*b7t;P-J?#RIM3v$@MnW}eXe9S}!=~tu&G({S0FrPR;sZ*2 zzcS$$CH*OzaWGR+DU@KYz#^46nb#lRF7R0GW@c7Pdv0kCMsE)?x9G1;@bH)qXz^K( zm^p0RP4)#UNb=J>YrjeL?HfV4+V{-`2Gul0evJ$?J2WB&PPP46y-#hHME49EJB^AJ zN#B7pl$gF=3hzqC375P+Lw3HR1W2?<`zew~!V`Z?h-ZLvnvzQ8xVIX7oi2J~fKf9) zW%Dc*mlm)Bb=rK!f+?|NKi5RT;UK*AQ-sH94bT%OYiMN}5}WoU#lVt2mu`Nt&!3NU_&lmXn0VYSuA9|=3Ft*w!Qx;g3M@2>Ecp9JGrQhvI`O39uNBn&LssK+5j`1KqhW|2g|*^Ho?2(cZBHVU{CxI|1@Wf?N}Wd zdiB+@q2ZRYe8aYDbmw-UmSCc1avoF{K)(YqIUl|JpZmVeprr%{~~jTbd1@o3WFa3txxP zko8b0GhPI3*T&*ED0Np9Fss){t}7)9$N_n@#rRLI9zb5`+MdvoBjG>?lyc*G&LB<> z^1*xU4k?Vazs0ma&O7vwl*4-Cg~G#&`sEho@46r)*p9 z-bj*~_V$x5;|k=lJza}f9?qtcgdRD&I~F)~&{lNx$hnpPCo*Zkzpu(fV0-k+Y-;D= zRf95&f${t<_uk6T5HCBPzPE$OdmUkCuoW2BU7RpnA-nQKa%fn-4D>^KVIgL~wKsFZ zYeyNfwvcf65nF<$Shqs@?ivNt?D72Qekal=ZUc=2qVpjcDcrfhsO?%i5jjp|c2r40 zll+o!_X9lXvdQNkBca8+3T6B0_NjUZEw9<4JKi_IXlT<6gEC6SGDy08W-BsRkGe-T zhcWoPdVdyA?`(2lm}>I8O-?AZul{*w4OyKc zKk%woPoqCZ)F?@VNlJ_fXvqvN)r^3yifD)4n9~#L|DlFT_$(AL>?UIU!_uNJhCHu$ zd;7?aEZ9ZPW4FPeWcsz-k>g|$pxr-?JCnAMeCJrIs-J^bLeaV)pxn-%<2X5MxD!-c z{yb>^2;a}WDVA@2chPfN^=CYyAiUs0m-rw9ShVtO$~#{eE|Iaq?hmgCjV$Ir9!nFq zM4f|_X~FJ%n`SN*nCJ=Hm%qNS{iyREb`0T0qt9p8{tL3lGoBBO`XsuyXu;Esx)MQ{ z>3pokRU_JF$9nfkSfRym+dzcuxn?;`FZ+V%;fO2qaKuG1?jj;gkT5J%(x`elyQV8y zgHDJk?*k)`ax+E3qkv05h+97j5*+BY#LT}{o@%ozYh{UGZ7koQPSFn1OVbj&Z#UCD zGV`PwTV`8t8@P~wYn)2@N(oylF--4D!w!?bsC{)l5{&c9;w%BKfwJ|;*N)_R39;9| z&SD|BhZzQVDQl8Y;``_4*ogJB^$jqtq>xMVc%y!UI&@6Nz*JcB^DUsZTj++k1HeEj zHmR@9+|PLaC5R52ab9VF)o{ zMdFkOa!V(a!DTr<`Q4MLZL=?VA49q8iM(FR?5tw8H!lwxC0K%5xLa&Zrk z!W&u!Yz+ZHY0*qoG3JU-cebX&t8>hkzRe=MPNot5Nr(5n^w=UlX513b4&+g|HSSjp zGInOmThns*YMXEVg>&3uGv3hcX%J8cfYb~lr` zY)-r|%1&9L0TWHVI)!M%%#A8hk3}{thw;IW$sp!@_rqftufqYF%;EUOKgHV8iZ$s3 z)VtW(#~w`(mt7ritW@_VZswhpU-sDBD;ZjIl?Fqg1c`w2>}-BS7!A5#={AU7R|{uN z=#;x^@@!GGu89>v{(X6U+j-O*N3ixv{VcJtdqiYIK80qzpWWTsenTaHr39V_V&mO2 zT{gHjU6~lJiRxAPWc05)LH1NEy|8jH_rN1gvy@ft!1|?UV_%3D zPYwROwyRee1sbmrq+inrSLa!t+Do8%g; z%IUiYvhb4e;?bX;pg&A2FFgmu)kr=YEtp1H~0@fj@JpMLn!A_S8W@aKYXvB z{z7~lh{piX-fCrB8O2W<%YV0+|Mk`X9h*PCcK>&5{_C*(``iEj9-E-Yo>SceYavrK z4T#@~0Y7~%e+0V_9eVrB@sQn_s?!OLC-xB;hMHAabN z{A3~U+wa88zxcD$GNT`~y%?{2ea8MzUgpFL$OY%x<-vc#r9E}R50Gh}jy_}hM}HD@ zafoGJI{fhy{THJ4C&+s6lH;X+!npGkmj>zY&Y7g&W(L3izGeFDO226V{QJ@R-LJf! zf`Xcuh}Q3CobNx)wWmVCWe$E}@^jed?|fim}vH6dK_W$9r`P~EZ z?{ee+j>_*A5I~gmdCNzVyqMkEzQDyjbh3K}su>UwbfAX~b?#=dk$q9ngi^Ps97o1^d zxG9(WCyDKes4OpMMuk`jts)DKR8fx8?^{dmcmtVuvS6SOwapi^XE!qv4tBDQ80)Hu z5`^qO`2h7wfZcLKj9b?}T?&adoXi&ZxL%I!1_lyfD6SX0m#bUBQ8e2f=IwWqXrSIS zTF~kFc*}}cK{Jkt3HY3eHGPtlYe6u|#XiV%KXdBXxl*IQ^J%;qnDT%c`al6Zv8;$K zhFfMtxa{hfrH#^WA;~ie6#SflL_$;}rafs@Y}%+#>S;1zTB43(6)Ex|cTj>h62%t7 z93@u6`3s=M4R^5A8gNU0IMb=(b08n)wK#j>^(tJK7 zOG$qu+^(AK_ZK9j=5xQiG0V@zzq{IOY6ON?%2ShNs(($Qbalkv-144Gu$T_+-oB9T&`Q;rYsm_cn?$~SOO;dk(8*{sIj^aW ztKH#U{T9I4@5XMoIH+rkj)BcVSacszHUQmyE3+y~pk}EH@P3BCedf&7d%tQ+U7m&X zg9XLR7tj7Xe*E;Smu4UVBn>LX<0XHbPRN(@3;yks38j{%TZ1}a0DZB)8i`GqYDM;7ax)>J%;;m z?K54lc^4k1Rl_qtL=97~{}n`wq`u@hTI3XQvjdo53)pS3mOcFn+XiYa6J=9pWPtX} zcoiHQ0nFZfb{>@5^ps#}qn4C*=>e+u8iY>QbDl}Pfw$Saa09UtJ31}`M;C^Lw3sna!X42@~*%%SzkBZ0&|1L+RhE^Us7DaGQD zi_;gr9U&rbpDOGzpiB4V$ zw-RxQ+Z%5TZR+p1Y8h93CvslEqT$I_`)!p%>BnH78!$@9DEu|N*HWlKLn?5qs*}f5 zCE$4Bh~pkFa+h3|7|KWoR*ompjZ%BP(*URC0XhhsrSX zeHpr{kiIK`*_q^7Oy`~Ea+_qU9)?`rsC3MS^1Sxi$B|0_Lv4k?gUu>Pj`Qe_*r0C?x3W z>Axh^^1#QcC#y4>TJAOMZ00P&ssbf^MDqsY2Q25_Z*KP zT!Z^2Q5J^}LrpTdpF$j#{Oc|9Hz@A&w0`&3}h_HIU&4hsBUOn|7%7%w(%P>+HX3i}J#qJ`vuSq^&9FLgBT-ofPD z-K6G~43fKo49Mz&!QL9r^O%D$ztndIDFyE6T!W zsLG|Dt>TtTo3Ltnz5UV-{i*fIIP2&~QSK+4S>2A~wE&c#lq*97zE3#QpOH0_bvdRe zD-tvIoaJyans4V+P?_8X%7bpm_|9pss88O+vMY@pN*(E%`7ks~SGs)YbVo9#T(`nO z4&XK%^u0wBdvmUGrpTH`-W)D+LAD_HvBh(K*?c)#{caGBq{)h8Hn`{X&7X06czvmXXi4ls$<(6MMX%taLj<&N zETu~7YWC3x%}&&M=uoA-T&$zIbEizNMvk7jx|a=({ULqgcuUA#yP*lNd@uqt_C;f2 zE7x=lsqX;SD^*ZOy899odSjYxAM$nGr07n-6ysKd+8kHDNjiza^js_eTQ~Lbp0h*Z zr?Ja7%+?=X6t*iZdq_5ttyBC^#fkRY5VSv)33oFlEKJtFjQr(wS;Ygfq1$lP#jkx1F*oze$`HH? z_-U3(-CIdyfMC4_7UWhY9}>y}d#dAuc~%j`zYNHJKV-@(peDHGJ}=_zcD*YNT?lJe zQ{b^U-K(IX0@)~&5zgp$XKA=|K*Hv88Pf-Jq(GoNVi&mUPO{iV#C~T_%tf|N0H{)B zu5cki0wk#GBdgZ(VTa!FDK=M}+)*QU_lhPKZHhja*lZ2G_UuUo-Gj@3IO$+dgXnnl zskFnZEvJ5ednyswkf`DJTLF}&1tIb|p_hi1ge5wSt{=6WJ-Jp-R~y{f7F4 z;NSLtf9XM~@woDqBC&d>4zpQgpB)`|acvkXL-ZGi7~qS$N(pY0nvD^xei~68RHFkW zE`AWFX-;RJ{uC_%7O3TiA?D1pb&9lNdCX1qt=h6@LK$qYS1fS=6t*4}kC|DPF*zgbcDY#hZ)zpCTR(|ZV%Ys5|(eew&-ss{aNzu@+cZu)|QxDX$cl{wV!TIAQo#9=H z5KaNm5A}g6fY{bWq+UTAMY*YQPB&?N;XnvR=dYz| zy&24RtB*21JKNeZ9>- z^=*DayUNGs+YEa`)^hQ;p1CU3|G;(swWnSD=(*|69KV)Wkv_d)(hL=uf&5ekc-y7b zI%=fc>kBt6wD0?X3kji>O-?)~^YUW`u%zw9xU08OegWq>8L+fsR(dG4W@D9Vz2Qfk zwIcU!6;k98x!j>T917C9fF;i3-P4O(Ht7#?^qOfSS_|TbQ-}>1fKzrcN8vH}@|~cL z>V^$YyW)ENMcuIqInWcJV#}$JLypUDC2&dNVl%0D#i6v*hh(I~5?w zX?!^;)jG3sWi;c%^$^{n`eDV3fAORpwmkITPV71?#9owg0^r7K$_RiI+%A>4*%-xd z9X!^vFXNdGM_Z|UxZ_lflwkn&qY%2DxT#vOl)pjs7_TDN`y=~}4_n^0Ngj$RiI1NG z?Tc^X*jHpAFkd@2e;V)&1!wG!hX5(;a>KX57@-Z(lO>yeazVcA$5WXm?6D}{7=78B z*>7F)h_vP4uvj98b-C%-;3M)5&o)+MwGm!E;g+IZnUn|Sg8??$d=N}0K<~hCKMJya zSbpd2b3>b+isgUwV}E|;0RHbOpqWBt(EMPLHt?Bb1VsN6c5aaGhY}%+OAp29%C|>T zVrNgtc{0jvUgFE+17aG?>V?vI8lv`tze2RqnJ|vwxO|JoPml0jhwe|Own5~7@eSs> zxa$OPZ zjh?>iof~zAa!#g15UohN!W-Zzw?lTpaFz~$s=@kxRWB`j(sc&~@WuhVn;6~>KgnU9 z7i+b?yzfrdS`3scnDyqu0(&^6PA)~=`d|QYf1n}>(6%i%0D=S9gr^qZZ=Zv3>jN{} zqkJX}H=c2(m%A^>Ix#E6zCD@zx8y0oo@%;D7$mb`0`KFPn^A&kmbfTPWgYVLsK2LJ zC6luZ0N{i(sr3fhTYN9CZapm8q!Ge2j;u{JvyRmVk`#-*`Bw^1Iw3a3s^K1DbB?7s zrU0g(M0TeI!F?C(=r4T=@ronAbc<<%<#@j@Iq7-8gg7qO5`EK>#;0Cmv>8z`4hT4{ zWG@Q4A4iz+LJBBp*3twCp!sa}Ey@DeXB5EEs7Jb^hU{YaVQCy_G z_}^3m9cI|8b4?X-%vVoYL65T_e1W52s)ZXFBjTbSBVZ{Tj6`Oso@3TiQH3Lv|1Ot7kl085*g zlTgs=xaxQ4n|Sbx8J&>V8-0sVK$hm|qtWr4st;pKee^DAbAK0bOQ$;NUmt&RXRz23 zdl>){Zw>>AZDZPB>Kv0p33VF{CCQYj5)jOP2Y4v7lEQr~%{?E-ZcX-KYlWerUv=%;>`>A{y5 zfrUem?dzHQG65EUEdl@Nluk=L0yy=AbCTy{vt;IDPE@7BY-Yen=4g3&7J$i5S_oL| zHBHEkgCtjgciTA`>>sw|M>k~9X5n|h3CI=a9v?g=eZf#uFxHGvDRO3aAqMI~VY27; z{F?5)zQhiRi@3QRQ0I}IJq$ug;h@E#cOL?PPCv%Iy^Q(6s`}0T-C98zAX+m_Zh8B_ zhEw}pDA_HD?bL$H4hB<;?DmA`?m=Fj9D6aBZTVbMV>glaQSp8GiGC+NjA%#V>7yJr zDMtmI*Q2LZ0^0bVit95gXTQgi3o6nnn8VN^m`XIy`dFoJZhSK9^p+K)`<60`MuyqW z+?mt@n0#22zM^?Wj#iaA6~Cnwj`Vy3Ip(G)i{mrrgZrC?5OOquZl<^NRjJ1lUbA@7pvf1av`7SWB}+}z6hY$`F^FoDeLR9zh%`L%M)7nhH6Va zZf^;N*Mks>LY-r8U+{w!HW`WY$P<41Wee<*}a7a z>)CwWIp*o&S#v}Cd+?dz3NU}EuE9Rmvj>>R5P*5ysnSCR&i|$6^0!gUE+ZjKgd!I3 z?DYVH?9{ipxmjW0tsD%^eK)`|DfdCHiUQzEFR3Ag3Pe~V3^%eLCIRZt=b9)FWqQCr zs|wFa2v)p`zC!?3CnTFqJpJ63O%ff?l7B?H_fSX~A^J+Zd`ZZrJ-ak1-Ql2?_KVyx zO@x5zy%-^vd%+7l67pVHfO1NgpJifU!S2A}DEl)&7#T$VPyc7Z-an}Zh zMJzzTjm>UFvv0iCm!;i4BiGl z%R06gE_>&<`#V^10K_)7Fbe(Rf22tD@N=HB2bG+}a3UD`oj3{v?i4xnQCEm5t_6oP zs~(M&q%Vwmo7E2~z}uiH(jc8S2pL)E=i9$DSTuKrbEM$%X4gpw4ZG{Qk~D;}*Zw9- zqXBq4RI>&M%69Q!(Ph1|)qrQfb}JRsdc&rZf2)S5F}Su2>EdUorc2hBF~~(eI3sr8 z8jaMe{{ZGleXl>bOvb=Ixe61}FY6Oep|{&$Kn#0rp%@JZ+UEf8)^p_r()G3vKTtJ(h>~{=uu%Bm*?!nn}FV zp*!>c{Fs0Fuk&JJ0ipQS{%D!y-`S!5*SmR1T^0A7>bSMW@}D>VzyIt%y~p4mD?fnt zkb56(G5_RMg6@H_QS!4%wLduvKfOFjP*7#s@t!LFCm1W*H{eyI##*xea(MflOD3TV zDi!5*BDCetB|852x(QQ(R|%?%(f9|D-cv7=pe^(?c>?C=+K7LA_5XhuScDx25>#Pm zsSdhx|0K`}yAx)pGI;EA>|EBrNs!?8YtzghU&h}<-2Bg<(SlHQ+G(CpnBQv1ahW3J zAArK?PrW;q;4~R?!p$w{+cyx5NsRPD;*1&{nBqVAuBA1Ef1L!q$u zpM=80{NM+|vQzrX|4I?NbS`iLm}Jkvvt6Q>vP@T`S?RZ;(cUC1vhWknrQvWmKVc^s zsT^F!&rU`ICIqzsxtwBYZ_zU< zB}8J~@d{_IIjX2!Q{v#P-VOZ%6g4~RpYaQO>{mj*+6gl84^`6SpViktqYL{D!}a}2 zpPwEo^0OGpujQE?&ho1SDhOWiN31`8qCC$%g#ghFv6U${sc@x8^TSyjJ~BX1C-SEjfF5E9{q-@ zJL<|3B}qZ6er+VWq$xDD*$9^)Vl&%qio9-J9U(IUC~=wU8HR%HdeoZ#QQeGQrtAl5 z7GT1=S&aNv1?QFnun}vQ>Sc%b*gSt%lzrTQu;U}K#+a)vjtUun0iL~lx_u=afGhcn zfSOYn(39OMbS%}-M{fmId2X1iHv#cqqrUIcC3?>q#COG%<;88F+M2q=c*L+bzKj4f zQ|G%ecU(Ti{FJm%&RyR(FP9%eY9PM?li4%Z&U%6q<$0-FKJvo$7&hZosk&UVqFc{AVwi^9%D{sIQ24{iOgZWR2xZXlB^4- zEO%I9COywltOg7WA~f`!Bs)`GMq0iU098@sMRz;n#2HmanWXf@V*LFRv|d2ZP_}C_ zr{x%EMzvyqyw0+Yu*Pp-{|@z0#UsK3w#HE(kL(O{1A#l->(C=Vql}54fH}$VMa=y-6cUC zc*P6a4|W5kFmVXTP=Y^yoQFq2N_q_wdx!wgX!~PRp*MU1Dm*IU6jHqeoH3*d?U^D(+r_8yw zn^7rJ{>(M$N0=>EFetp&h7g1CF}Sk4n|^kL9Xi-_?z&= z+Fv`0F;ujCS=H0A3)97lKpUT4~&84%i0ph@k<`T05cG44*qNG36bIYTMqN3 z9|YZZUt}mQ-UPtUbpmupmDmsH^>fKlJt9YdIWgXm7)-zAzFV3i9Wu0g(w$F@#s3_u zdTDkE_mQBs^6g<9)@8?*h@qp)Up|q^GPZjaaAo~mTye)m>OpXq4RmT#!uu+TA=+&d zn6G_4*k=WTat{2dzgaf?@u3txOZfGeqq_*a?e%BD@r%@H1|QbBhXDMu2tyT+I#!0- zaju3IV?eBlUnYFo6#cLyWs*%2AOy! zW$~##Zj?a@5-8~bN-{Hees+joKw7Q>vr~ImH(b@ltM>tFStw2AoSyTFWc7u`F_G1V ztD&O0g4GK{W0%J2{8@n-SvJ<;0d#6RWS}%bTa7C?e$1>pm(jyT@F}3p^L_cXa1GP1 zI`F|(1Bl_44d|4QOLd}j&5>ykj-z_N+<4Yzvf=#Gw+TX!>y_SX@}4Gi!tE49xkJ<* zzIw%FBtumUT6wS?Y@}hV43pt-E&mj7m-n1E5BaU%2?eC)3FDTU2ta4drS4t zcn`bPaT!yfu!k^h9_ZUn5Lm->R>{F+LdP$rv$WuVx@dFzwJarY}G8#I=9j((Gs+qy~3)VyIS5( zW-b{b^BPcFIO{fh5~vRSr1@FC0m)zNw13if7r2sWJ$!z-8|%&*_A6Dt(zjZm);Jt; zRVwuzjIHj!lbR1<)Nm%5Zarn4~dLdDu~1SMUd0oY+`NUd#g{^7$y|8^tI(s z$zBfF8>cw5&ORQzf2eH252jy79T#^JM3;x*D#flwF7YyB^&>0dd%$nnMMTndC)|G( zxTatXR2`R(+4;s?Dq9_(=wZy7YDH-4xh$OXr{+%po;pEXMuT?9_WI47xcRVSKy>nJ zO!=BMx*MV27+=PpD2=o}J6LRG6to>WMb-eanSa~a2m!g1+H_Q0P1fxa)GY^gUUo|Z zn8F^fw6&?0LcEX0Q@HG~<+|%CfIe7PCfq*hoF>>a02{K7hpbabCtHE8;I~qESZ0rT z^N1VXLigSCV=aYgU{fCf++<_ZX)lR5v{)tH5|z+H2Y<}1Ale;K@Rahn=M!+YKm(o_C_uBN1!r2^)WZiufXYE`xgquYo^4(X3rkNV6T-ZIb`SSC2 zzNi%n>%E&wNHy3b9!)wkQ2`Pt88x4O1vVK!_w3;x&yar1y1e{u1EklpM5O!mSXS6c zboMpo@c@L7BL3?&cf|lh$CJXjpIXOoOu_MqJb63p3?W6F}v$3>R zy~Gb@7fjY09-rYhmahmpzvJ48H9t;AQyq2Hg^#;;p%nId5P+sDY0qbjl}ED0a{`Z` zb-|Mm;mM@>%*aETy=*bnG=2ulSe{AIT-6F+$gm68Z9kG1Wl`16Qlz`T$kDLM3Tof-y9l2fB)SuVRf8*?gUS_+tVU5bwzrca-yRXuVvT@k^z z0O|`ithIkdm>hPBV4BW1ox_z$IjBfjs(9+B0XXOH?=3GG^8S?ZVtiaU2( z;)J!oGMhA^-oYGxfiZ)AvCyh#NOR)H!s|Bk=)RjfBLVG#5G9jN_h#~^xDP5Y^s@w& zFpvVdoWI=QToNZp0NR6%>7?glHmRCJw#3BfU2W0t^4!?NILa6S=mO6)j}+n#mJ_TXwablEt4huA3#x0bh`7s&)Jd1Gr|eaZi!V zDoz{x*TK0{uN3|`ftkgg_-r|*WJylo*=o3HIx^~7BX^qX{;pGFM9Ev`xu}sUXX$s( z`Gq=*R}(+AXxg@@(%7kah*J0FlC1f*!|8gzFKk}65iVk0D(clXY*s23^uchVOh&)58U$Rg6M zf&ZFv+?413m^I*#URB9`VuXKN7Yp!@pO0@?bX}nf&r6Im2Bo9ZR@qXE;NX_h|HIx_ zM^&}1>ysiPprRrmrGyBAlG0$&pnxDCrBc$l=nw^!?hZ*2VF8P-g++(tq8CVavw%hX zzKMFyz4siq=WhS`{c-mgdkn|kI9Y4XZ@%CAJ?|53-lwur?;q2jSP=G^9~(}ppD$Z$ zwzhvi5UC~L~WF-+n9n!;R_7>c=OtmpN<0kBXXeNnFpStf_FdN2xt6PMSD z+k;GDhD{J8-;1>$o~H`s99lc8VifUoaD5Vg0OJ3+i7lFx+=J?C3gQqUfVp}o#fhmv zltT<$7arJ^t1*;Iba~4`XUN?CSmj`ujq?o=gUTaR>`OlI<5d zs!Unoey`0Ajq@)nH@8lo7IG+j;<`o(f83&*cefm~)!Q{g6F+*>a;QXy)J@G*I6XnQ zDC*P~zwN^Eq8%Vy1^(6(<}a#oof+MPD(GUKVP9?RusmIOcg8^g9z=Zn`gXoY6hTMCX+6K)!12vF&JX9m6T2# z2Zv2n=9Z*tVfPrYHWY=O{SJ*hyI-@16)89(#JBTt+xEi1V6p9ir2WcAr&Y6est#Jo zI6*G5_N0&J=Dqjvnm6@wpDoeP76xUyRV_z`q~2h9^6+in`AZMZ5AXj6zWN6~o=fNI zN$1pLp0Oq!$Ee;3qGC_ISsSyk*yic63-Yftc6y?D(81nIxX9izxu@M(mS8DpTHFv4 z3gpyk72#*<0?HM(?RMYd1k2GN@%`Xt4V&IsNqn)8hk}+l72=jUNW*lIjV_hqD=(&5 zM}*aECf6HQJN&qOT;5bUVE;u+CbL(|B1$wTZ;xID-WA4kYkF)7Gp%WbCwN%=KKR196- zV%DKr+N!v^P(oz(`k}zI-UmkS9T!sGbr{DiohvS>HL%Y7vbIzJ;vCno0M#&TR20Ql z0C%>1#;bDA|4uDPMRdPxsX@2?JGDY>+PL3FMr4e9XP0M6>sb5LhZ46IS@HIJR<+-cJo z+|6L=f8{(CRJ|Y4HRBt*0$K9`h2M@DY-;lgI8O)lI#j>VZA%Cf**?}c+dy&i zHhrx7=tn|8X+`Gzqiw{ppJu-&OGdx{#q23S_*AnVw)0;E{-n=Rz;fv*6aQ4PWj`k% zjX0arGRd7TD_c>VBd<}%H64vhTM{gw#5pri5vA?1G}O#(vvtCy9^&ej5LNTcRU@`~ z??BW?FXKZ8rZ#hQ4=C=zLK`%Fd)x+%moAF%#ENY8J}zEVJQj9g0 zam9kGGoMw?ARMsRp1Yi-MajjckezCc6OLjvR;eeLaV;sC`>GKMe=ws(n+39XXykzN z-i7YSJ3kKDQn)bCaihp-M6P~p@&{dcG`hhEID>zYi)sWoL4UGMGX9XFrxbQV)OjOP ziIQX0MS5?{5;XEm0E^duh6HL-VCR2%42G;%UF<7U0LcR2!oofu_|>dN zt@GTD5=hS}o8{ywL|15un4G;~Q4E0Mp}5UHj5t?XR3A?!p-7-bbEs$(?PEblJr96; z$8FJheClswl6*#h8#F;jqy}XnwZDI54%zn;-2Nb{rcA$DW-wOniCS##6*?=>a+Wm;ia1hbrFYdyk#&AI*BiN(DQ{R42YIlkp=207-ZgdJ`mCHCST zRUSyy_Ya2_*TBvrUH^9Uq!0BxS;(a=#g%jI94sN%nG{m)N@VuzmMV!;oOm_gGz_vS zbTh%~E^uknSqp{AbHbZDl(dfhIZ|9Ioc9Z!+Zm&3!!NFV8Di_+bb*-%S(V&Th=sI` z^1`qO6JXEdHr;$UvnF$JG-#B!GznM;lhR6vJf1Y8D74Y(#8e1v9;5}unAG+4!)`YrUN|p#Ac}KG6^v{cfyyvU(%UyyyTG93!Y7|wyC3lI;D zTtSn3`4t6g)omX8+sUf$G>6DecNJoKgYSioG_YQD^RMT(J9q@w%8cyF(dH?r z=vdS}`_)9uzFf(u-EN~I1|4Fgk`8om0C)$%2%j-b@1puL%h7%~ZD0hFCRai?X}})N z#h#-%2;!m4zt)RIh;es56Pn6F4<#sT>sCB{p&opD&NT`&O~I*kfibfd+Cnn<Nl+oIx7)&jV6q&y~XWul^Vo$ndS z$C)+mS5F_85z-1JJ2G7T*f(AnHk=)^f1K6FNBZM)VudQ|NEqKfj|aUI4y1zqKgLrd3@)8MDa-h9f$mB4NGjtcA$*gF(sI0pL@tE1_!( zTI2##3>}TCgCz=ekfy}NW82DwJRd?X#$h4=Tk4njR!iz}^B&f@(>=L1blWegcjZxt z)6q0B|CN@K;UYL6h-GSsoe+hf_0w1OLvnP>%<`EOVn63%*?G;|WqaUgWBzTQOCb7> z!mrwXF+VhfblE@Yx{MDQjmh10#!&z@6}+Ltyce2kgIp*`VjWuLl|K0dg?}Xulh+0Z zTHvoAlfuP$7=pWXI4~cxmD|!4wf>f7q*?LKK@UkbophCx>El&zx&@G0YE#p>-Ng(H znxS?j)+U(?s-RA5l+i3Nj-3r2t|}*b+FDF>YXry_!(qcs7HxMjySlC_KpKz)6)U7y z86|F_Tok8FalBK@D4%M9^91+yLJ0|oyKD(esxE8y$_9rhD@DeSON)wG{A3#hJ=}(_ zJcE1g2YWCl0aL{ax90o*HY{BiQok>fKq>tcM$u$N9W~ z(U?M1`6}XmN5)V~m-8)LI?UW0gQgqex>KWSm@BMUuv24p2MXl)_&Lst!EG(m67T0( zfx!k3Ml)h9o^uB)Kq107U?lQ^7CmvWzjVOVDXd|(0nXL)k|Knp1I9^)K+~PkHe#lA z)uWhrrX2gE+`KVuWfhImv5#H(Yws&)wY)rwx42>%LM+0eU3>$;rjOu5F^p35mGjNF zQfNvnbhxa)ea>HSC8-Na3jTQ8Z(tS-@)|&zFjEwlZsdEx)!v|HyLa-L|;a)+Pgq8G7YKR~G7Pe9B>v z!SFY-LV$jP34e-sYSS^Aol4Q|2U`#HUyPre=@P~6DIk5ix7K>g{#1Z8tHpucC` zTQKzPMDJIK@bq@Qb<6H7Gh4xv3As=PMo;$(o-`T2tY^(%WMAd9dv+Xi{AKqSftod- zbCunez28#&QmEnQyl!EEz_FPuk&Ak~y|rlVAg><#BFzYsYq++0CP@Y)Xv=G~J?`A$ zJGr04A8=^;4?(UrC14TBN3^O=?>`tqtjuh>oEfp!kwae`Z&0t-ID={SUZf%(C^5Z~ zE;sDl7&dfh-uf$+z;_IH`2-1oYMrV4#1f$CiWx9rfV^<{jB@+-kq6Z*VXQu}1!4?U zdZXm??qJUifuahb?leb-J*U4A(XGXw6Xpn#itRB*MJEHIgh4>__?6{q(bnE;7bkM$ zsTPDlPTk4VgqQ~nFKbzZ7=&n4!OJ(^7?GXAvp6k^0+I8GK8#pTpMzZwEpW_3Xp^XIwcMy0GdsQ)%S%KP098(K0mrKmDL$Zd# zD*WXGrA^ff!uH0j^bw^I=U^r2yL!ZCAmta5Wh-93>p68e1`zzJackqTLf^qu_*_q*Jsv=s}c))lT9Gy$yC}D z_6rv?uYUOAGXL~sBIDG@7bKw$Bn2k5CEE_L1G07me#4Pyz`#+8zH^o4_6a=)?o9FXJ1>sp7S(s!c0^ zLq%LUIOIqptLfubzxYL&DeJJiW-9Ro>~E(aPT2zhS!7f)J66D)!=y7~CHx0UdQ=hG zc)~fgB&!cbrMxlLKo48`wWaUKP%QQ5g`U(X5TM2`Puqvn)(2d0Kiu?*`Sc}_R*SkT z`{FAK--J;q8gK|`B*-WltTtz7(|(tB)?n-9J#LHLTPi*7gB)%2Xx{oIOK`jnfHF$W zoVU}X(%ix5Aojad^e-2l$X&c@Tsqpi^f!baJg>~q)i-CmET6O?V~md92i^31IaV@o zr|(_hJwL#iA6h)!8ZXFh(5N16JJlL@857N!^OZ;-bQt^hp8#^DT}NaTe}$Dmz_7M^u=$I8R49?3~v+ismyK0jD}dvNU9hUB*$JrC`{ z1;gvlY`qz~1GWm$Uf>#f7f#Xu4_~Rjf7>+IG znb0KcrCYth!!>el7dX3ptlDxodAe$h;KBMUf}P2Fn4sfwuu*M%c}A6w0SHPq2pv%r zhGl|8alH=Us8f{~nT*Qe*@HwmCM-X|kv@COd*$1Hm0UX>5=38w{KURg>!)V$b7XFS z`<&BxHU3DLUHcq0Z(b6UZG~|=$D7Aeaszu!{gSQ7nO{=}mEaaNF~**L2=2G^k`Y)JWb;xEZ~qc)(`Zd~S6@ zU3cJafkl@&Po@GivvXYd^m~oSGQ(jKM9e!L_P|8b$!1GAFf@6xG|YATCg0+1d@;Z@ zvsI6=0a=KZbPPEWI~r#Fg;^7xMw_SH7>Zq>qh@=boNH7JVp2LuX~*Tha8h@os7ucr zLdQIfJTRWmO*YzB=+wx{1^-QUP#D2+RS-J|4;4USqHC6Gi5=$Jo96V>sIPVs?INHC z;RDN|Pj)HBbykz9#`~q0ceS_ufzB&W@D*yA>x+-yP zEs-F^+60}2c(03W*IocBa(F9m*ZVIm0%|)4(5o)94z%P=uOqT6H#f8);k2=@I@oK2 zdRn3dwZKR&`HQEYP8%zL5qYyzd`&P-TvBO3dx`7O)2e8$S#*_W{ra5icRC=$1HwiA znp7T?KS_dK;>*+ZRuV|Djxtji&16{uPLP;&78<0R9CQ+T>3lR}2c9Q(W@vtSUAZ=% zCxebWw_R=#wTqvfJ1(|EWcCAB{)b)#oVw>wF+@i zo`Jk>a(rC9IKEDS?~zBjDv+LrfmuN-JRkn&Li=eW7$}OhW!{jZ3%SZYV3VyUnC4t!zP;6a;+8NLIM@PdK>CzZ^$WtvAlRDwX14vYXPS7WO>|G^^rt3@SwI-1+Q{P;QE z&+(ewmm|_l#(Ywe@;B$iFE#gGrS-d?ZAc0nRA4o7?Y_eB2iq}#q`}dC&AeI;#B-;+ z_H~2`t1F@v^m{P#L#&-5N!>dn1T#iO)IvLXTSTLQq_#wk{Gk6*65GBNjY__-a}-$N zD1+E?L(Y1%cOAa=3_$g{4b9$~q+oXDV>);FQTaZ7wi87IA8E4nXzm`)n7-5R5BhU* z^nG-Job+*BV4SUV{PmDDV177AHi~pesa)13h6dVpv7etSi84dXQ~-qxwXdwpEPDV; zHvth#Wk3OB0EhX~)UmAr8DXFOVZAMq48SDFiK=R8$*4sRU%EHd0AM zbIoGlzlH)>ByPnV+Ad)>V&vz!S-;DJYHk8SAdqU@be^;6KRW2)1%~X{gW1aem9f@8 zm3|U--y;*s2b&%e;b9&EZJsFWgy(OE?f<;OKmR0g_mHVY5iTG`86fclqS4-Y#s9Z1 zg@bqUa6C@YF8a}d;F%gAMt|L(F0=X_ruJ_Uw~@pj3^L10WIhshr%D}HLO~K%<3r`j zt(7#H)FbUPtEIo+0RAnQ=XL3OT-R$0KAj_f0T};94S=qDRPm4i^=z$m>fPU0H2&X* z`PKUPzdQ4Pcjh;*;J=iP!!@&Z;)o*|tsvd)5+W||f0nxc^`ERt@JtdFtV?_yr!hg+ zpAaJQ>XM#Q`a34pxqq`jg0tv6CFwFB`Z7bevwk7Dp0&{h|;*{i}bq zUjC}a9fbnwKDZNE?fvcV1kN~^`(P7to0u5+lZ$-d1hVakIRzpaGexUbXpqoALL1=cNbc2Nd%#98}Q%`c?WTj%lR}ZtbAD4%yE{$zR{U zUo)f+-=b%GWFvp{1}$cTTZ?UM`tT>O;WHrSQOZN(J{12grTd>Z;eYWvK{bzYv zg!F_2R=`~#7|eH>p3GkdX8N}JnykQgMzh$Mh5js-p}DwT^44@(}5fdhqv>=R{ zF7JuYJO)b16SM>!dsAmzsDw)_L!J{;^xOZyk{M2%$J3o3WCjL9Wea{)sJ*eLd`}^O zp~cCcitpj-WP{tl)Mv8azKs%j$DaF>MnKk^O>mu@*MKm>qIvIX&lSzR>t*M@*C@J6 zpzz_kpBI`_#3a%;b(!$s6_%G}IU!hJ;TJ4XZ^8hRyKQbuNW%Jt@k@1D_7BXeREaL&jV1&>Hb?lmal2`)p%HHx4 zpL(enjH7hxhS?9WYlmde#FJHbc#zw9eA(s5EzhaMdo~QetK>&-fX^(sb#p58M`apG zJh}^Ll{g0nFHiMP{>)mMA0muay-!oK5~Y0aBMSimi8u)%VR)qCj79Jw^v@|=lVFQ9 z$y+B*rxQq?AiSk|3+dG6+%_DO5Ti|??SSz}s!OnK6D)D(Da9c6-5bb5O7$ktfL96rCIJgQ2@V-7S^T7HtsACy^>yRSqHtXb_6P1-2+!MF~f# z*$G~z{Qdbp0x4A@*Jg$$HGlU(8Oo2HsG-nCYvhvr=t%a>aq%4Sjv_Z}wNf-}?1(MZ z79TOoHjRFgl1D=}xFDBVLGWwP9`OH;S&cAT`}SgOe7?`NBhQ*gMG7Y<{naXAOP<~C z9DHI9jttTA7!EDus?^&DuF4^3Q{l~tD4V$;tpYHzrMhbTawfGeh9!{ZF?MbzHAw6- z&_)y|wS!RY1`z9PhTUR+SZejb(6wX1Ui?EOz$+W_AM3_)VuoNz#>p|9knh%ZrN>?i z@%BvP8K=umHZpOWPEMOKM)EGn^d}P{4tXTDXg8zgt}FLaZmmA*r)*xKcO3D$S^*Qj z&aW?2cDWkqc#l)N^2t+PlDh>@WVCZm|6KRz)1?ErA|Q`}7jsLO_CT|GZ&S-@HY;=d z3#D#wrb>D!7zz+nh7UB5|G8uyS+l<&k8PzFJ}pg zR|&Yz=Swlam=fvC^tcJ)OQtL-{)7B_7fpt{IBn^2;`NkLL5*MsVOE=eFj>(qprb^& z*vpUkq6365Vxn6yu9zXGZ9tWr0ROJ}ugJ1kR_;4LM`m{K&1{w6C(*6Ds=KIhI@kI$ z1J`r^7?+8OsEJpsbTE2uc|kDx`e-J7JN)!fULfc^F^AdHb+uNdTlPQsR`pFKtx6W< zPKVsVnoHH<9?*ZO>o|@&A@|((Hq^IkdB{|vi0EDKg4t2OrTFE;o0kHjgGAhi+h_#b zBLHw8<4IzkC}i@C%6KX>JL_q8MglZ%4rd2&I7k&I?z#`LbJ$c1OUop?747G`AQ?+N z&_^8|A%1&ygF4XVh$E26NQw#D)h{(<0%yZJWb~I0T(0G)S72+=*&m9a&|QjlrRLMYUaMB$J{LDE+%1-81EkqZ^ELIisN@hZ+Wo+L`*RCg?ClS7pqB>=IMX?< z2WbNkxd^vyzcnM0mE91;J?|Y^y~X5d1b%%-rGY6DBkC-ofR8S4QdyzYd%r*(E@HY# z#Qn7S0TRPcUEldu(k)B5ECtj>IRX9OGfGpQds^ZI1)6|Ik>XD!@L_2i<%EnhP68$d zXv+#UkTHJWhm@X;$FOB{K&mcphF~s$hW{vaeJYw&Dejs|ty1(jrb2VbDdgjeH2dsH zmYRC>hYQ?2Eog_X+X}cqk3h7!7!C|W8-dk86{qpiBJHYSZHf8NWPs_OyN|=d@M9p? zq{S@tvF)?C=~h@&b(wzcwd<59AzTx);vYQ&)f@(Dk`pzyl*5Ck(i{6ENJ)p=84@M@ zhLg$YxSM%1Wn+}pICURcun>LUNf@y(SG`gaAx6mR zDP>6($4^fDvE7%V%H%k2=k9H#+L&gIf&tMh+JzF3lDT3LKH{SCE@=T&^qsk=& z=QP$LUml||*_L2#h=lbx{Wu7Fd8m(A5G+q0mji1GfkA(0EtS}J1=4=xRZ0F)!e#jw zA#G>O0=*e3LxJZs5IAUrifpH;>yq|c4pfIh6sGK9wCt;IU($4wbW#wDdA~l1{OmvO z0mU3JO}&&PVlN94xz4TwwWe^GKRu7Eoqx;{r&@%K`0(Bs{Sp%WG}yV^fRgmGb+wY(fx zV#Y@)`s6E)6Z}EQRUDx^y|m5zhFw+-NnDoE?n0cBb*$4Jl_rl(y7Jtk#Z(VJ{4H^? zp)4+oirS2muxVC?56R_udWNwo$$*)pxh`z^2(CyZvn-{1xy^0v$}`mZIXu~OHSKor zs;6`!fo|Io@`>CEyhSO9^MZCYtsHqzW^CHb7E0v%zX;O18kbLd-@xR)!Ig}4xVAvT z_0sUQGqbj{kpQ<*k6lCf%9V;mPLR2xE95Zy^qpPu%xpx{V2gXxVw^IJNHpo=i(~b1 zR2@yI#PxvK2q~3BAvwq|ePK3YRd07oF~W&-^Dbg9j23bN9ys_?Xu>QNXqGY~YdN%Y z(_wM&iZf|zC|{E>%S$Pg8!hi+was~rgj^js#DOrpq8TSdnmmwaafWBug*gBF$hro} zZY6-{=;Lhokebi=z39D`ub9a=s__?7NjyB$2i}>qK$_|VD7j4DkjRq3@V~ankoV)J$Hb%QKs}mb2;xrsV z*;zdzL+K$8D4v%x+v2Tb749ew9jX(5u9|<}ZJ#&;OfWtcLPvYmtHDYqmDf=m>{3Crhz;oV5GmUb?kP01sRLcIi(B2h4-z~Xijg3A zYrwqlYn7BG{J5BqI~S&+tocC+!ljexZD$!2_6N0pJK?GO;TMSuXl7uPO5EPRfdDtq+823Tv< zZz;fxVB#&+sTz*o*;6SZ&@6zgl3Mc!AwV)H}-Dv!!k`$Y~LJ+dVm~riU}`oRj|1= z@mzQ4@z2sXe2FscF@>FFl?z7X!avYGy?j4wk?MHGfStM@$YvNwW-+2)vY>=375PmTyf)jTu!;p!uua}0a{U+xFS&GIY%G52y-viu*!TcQeUxO zR4zbx88vf`!!y&Zu9crXc{mE#KJ&>tAOYDyCaD^~d2v0_a*9>UY`I|np2k7_B*#wO z2ngqMG?48&F6vCyx)-!LRBe!I%)tA5&A1huWQ$~EW-=zNTsHl+PG&BZwQTrh_PQVs z^Eq`6ttWW4@vmQnWk@4&z2onPy|{%4+2R!PDsnkBEs2knd4+l1XF((;I!)E4@gdg; zBrnLHq7|@ssFnHRi(I4(h!!%}j_F`d2Q|?_o8%vhWs8PA>+#7$Rb|q|?u*y4Pf3^i zripQExhSg-bcP#ZJGGgqrHkjna&*E14nUYqDC3xLMpB^g^*mT6suT}Sn|JtLp2#6M zI#T9cf{=oXal6%WR1md1P5E$kKN6UxLrypHVfzI`mHGi9K!8)s4$9R9S<1tn3%gJ< zNzJ<>{&niZUZ2TOL0MNewi;zG#p`LbRK2I1d&2->o*Y*y7v&x-zw`uwJ$N#}f}FW& z?u#U;7D#udD0?*X2%Pil+$x+4#s@=zin6hIAt{=l*0tPx?FgKvRgau+PQYyaNk^G} zam#A!fX)H`AW>5oGPq@Ptkt0i$T%TR?#w$ftb@lX!{mkTY}9yw08ii0Ofzxyhx^4L zKM$X~?0AbfbfOa51qL5Q1BcalH=7@&`t4i8yr-%qqd+SwE7sPm0NBFU<`(IzG2*(e z`&ol9v|F)U9u6IB#5UOzEq<`8!tZG4xU+!@#!@QS(*h+9;y?_X#{K=uQ~F#cTN4?{ z#kP_~i&efh2QZ5uqy=Lf%N-Uk-OpCFiT$}j{tEX5D8!-$h?YqNO2LL?+`ZkzrG~J~ zvyAi%V6p|Np(@JTd_1Z>|R>}Pv>GL z&={}?zgC-mP!iD;Sw9veZa6SRh}ja&5nrFtfiKy62iV&#`P!_XSf*v~1DxO$%3B%g z*0yG^%6(XN*@@(I0Wyz>bef&{HFByu0_RX- z&TYGSex^s1=_y)(CQQfm7QUn1vT<>I8PtH*&f~Y?wEJ6D%}iiMcL_V*Jf>91B)_i0 z5`cM1Uii&Lr9Tr!&ZeGWm%L~*-^8V0$e8)5al(%o?0(m?VFqf?fV3*c?0&fl26<7} zH!F-q8dR`_{Gt)Elr=ZUwb}ZgG^Slsz~K}*DR?)QH_KpXHsa*WY<@&@F_e{7L(Uq#rFm1(F^$a+u~%LZHQ)p@5* z5|j3jeTV!!$Ig;;)N<|FXvN(_yz(VK9>MaicggD5eN*5f^Y!R5)I8)ddP6p7?wFp` zOjeb)=3seI>g&mpXCXoItQ<}drBtD2d(1bucE#lOlF>G>8B%Zp9NZK(Vf(T$9J(PL zBDdKQCGB_jb2zkx?5`S3NdXIYbHn(MPrkBmo^2;&B*Q_SAfa@LN z(ZJI9n>$2EO?VQ#VIv9N#21ucxNUtF253=bw2hH$%s~EWS00PnA&rB~(B)%jbhxO0 z=vlVf2=)VT=voNQ*}yk2dp-7TZr)SF*@{&@o8=a)ocS^X0vs2gxbltWMDi*aKOg41P5G)&KFedybGRIOB(6ES{+|KP3 z6;p;UG;xu|a0r8Fk?``3o5%rChjAWY%wbW}gtVI>3fucUlczUKv|2kiGoG2`9(tS# zZT-$*cu*sh(aqR1(A?_G>Npn@t71Ga-$H+3D8>|3m_xAq;PttC;K|Rpis5VT_S?^; z>v-2@l!16bpjX+2t8Auq{LfVr_q*pC0jm0~_49kI;YxnO zrSFZa9B5oE*35_Y8_2RWt0IRk+h+K4JAJ$EG~z{MmQaz5)Z3uULA=)~lec5BM1OJf zp2hn0mvGHts%wBjgF)xyl@t@Qg!$`UtvibNj*1CFSC%HP=sCc%#9?LwW-2dt#PVF- zt&pFCmJSfJF7Dt_6JeGj35ZW^Ijm`Nfd&UM1L@jW>{DorB1!ndQ0YspU|kNR&)ZMx z6oZD;glV3ka38XRx7dP+*0{nF!wRV*5iiKTbyJ;Nah%$1?Q&K`N?SUJ79>q0=ud~d-D;1MSdmVJgD2szA; z`qGn?!zZr{)OQ#(DI{s~Wy=B6C9{L&+VZ}@(p^~LGv`!m3|s7s3?z80ek^SVgM68cFu}gGo#$G~F$!4@ zdpk|R#!Gr}-9J!_@>(i${kv*X02$o zSIS6u$yq{ClZx$HpN(DI@h)341FFZz_uoCx0b94RccwVNmclQq4C|tfcGHTwem%k< zWsEQMJ3l`V_r?HzWtL`HC^$4+o7O1BKU+AKL2Alm@cJ~j^{VO&$|JdtU%mBwq6|ab zk)a^|a@H6+kv!yS5BS?HcXgVO1_~_cIVK8Lg==-}S{dQ%3qzfB!o3d!243ALJPAnH z{?a%>I;Ae}zXdMD0d99<@U=0P`Vq!Y{!PU#G)zEZ_KmJ$+TUcb(jdF*y*6r{gg>{} zZiEOLa!C|YFnF>)vz%EO)0LBH(-SdDPQK&PmdK|-rx<^Igl-W3bdgHYy9iMY@l)Fi z9^>_tDH>XK?S1u4kz@`1Ya=(wWxbv4cP(yCRIz>quxGm-hmlMeOVN|2(A$6$#zro_ zn#Uv`{Pl@@yF5?p`$gDAPoob~ere{|VBh2)saVZyz`k&xRhB-aFuB!fE*qPnih?cm z8j8hfk`<0OCg>_A2|kX+VitfHC?ZcAU$hGxyE@aj?Dx%Qj>Ps|)-FdeITbtsAmjxK zrl{UE%S?kg2Zw`wJV%ht1_318b#~&a-Il$#05jBRHX%+A@sa#z8Bih|#MB)yc}gwC zfc&8e`_>o7CZnd3Toe=KSJRkeqosj&<;XVI8+C@3g(J(QDMK^kD=~1){tkd1NjzyqvPM3nFtz215G5chD| zJSRLl@(L%ykf^=TL44P$Z(R{Qbl2-fYNa2JFTNK#dEFn)d9sRXx#C*HK~@bMxqq1p z897XcxQXRgPSv~*)WXFO%0v`hak9qtJA@P*cbybxAm1DTqBhSoY&p~iB#8~0Jt&zi zKJvWWQg)+qsyUW3hP!&=;f-jNE||=zcfNSC-(Cw+=jtoUZ2H~3=$-=2FBdRxEZ!TW zdTus>e(`f)5sSnDax>B$ODUB%v)EPVBbITX@!DF9wg0&e$O#sCJ?liZ0*2AedwLm6 zPOA-9jrQy4J*4}ibPyY`Q#Fra-`IBdu}cMyr#PW#)@l0a1cQayGpq8kyx*!>JBx!R z+age+k`2H2+W|~Es~``*`g3*QNaw_2X<F3LlsQl{MC68TmYB6Shi^!Mc);v z#Y45+MBHZDbI{{pubAslhU+!WB?016msSAoE*)cY#=Iv;51-Htb{i)MF1@aE0CwLj z<(!N%>40I>rF{vx36Mrs7pR>fSc!9(MpoXz@QfT-x=^^=?Yl!zlF@0zBG2v>p-zuY>Cc|xkS!yA?scL<=sp_Vbj2& z<)x1SG{{EwnH%_NyM)Wes2{svi(0Jt)ND~UG&e&m3adQJ8b7RZU>vz?ga@Op8B>&8 zNV|c7t%3nl7}$?`Gb+y1jLqLoI>}uX)}nC+HYo|a^AQqPq`#gp7ymi5{WVY|VF#$~ z%hMb0b=UIK6!+|ENPUX2Z&<+|GvQc!si+A#N+=*$UJxKmPd=6g=Z9rH zqd$bVuHEp&No24@+%Zspr}?QhH%F3+L~K_glv$4YN%mKkmfo>j4L&_PTqetPAj4H5 zb)VZ3#lkc)QjJ|C5jZt?K$o=#C+xB8y<|Vj$LmXa7HC0VQG^P3=({+BLOY0kJxw|a zqL0&^TQDu|rXYR?f;$l1W|~M%WoN@IjJBP_&L^qr;3(alq|X5~sO&yBm04a6t=*8y z@tL`bX|uH@C*}A=FxKR)issJ&QJ(eLTFJh#V8Y#)#-^fxUNvZv1~EQW#kOi7iu?jl zY{Jx-?*g|Py}Tm?CM+ocV~$x^k6$1BX{^RzLokTUOo%bCn36w6d4&;RbUbm#sdy!Z z+0~euCq4Eyq6(LKHNYNWJaB=!#&+c!^NhGva37~mrDI2>28Gk=E7p1U!51RiBr%>Q zIdj8B2rv=AgUv;mH@SC&xk^ie|;g0q-4zr)akHJK$OD>0oMx6Hc)z zp06-Z&bKJWe__l#0^AYWS7MKSeWxI-QRtYJ|5fiiEheie|6-C;K%I!|*2~jt{_w6Z zLqRfPR0?A+zSz$Mi z>m$zFSD4?;fb zkQ29Vwq}bxs7&DVb&_b0lb$+fSrzkP&5E~k#!xrNV=sX9vEu#C10Y(65ah{>leWtA z#%2H+VNF^ePli(qkP!~$-ejd=N_E4oFEUOvJ|DCa8aU$oc16Lvd^*)>wLcqvJuU|I zqq(k{#RDIJc93R^^VVVJ8wm;Nfj3d ziz=Gq4nlG|Z-BSab?G8Eur&Zd>1RkMt-0hPIin8K;>Zo#f&pS-(xdd_j5DxK-i#2z zI)a(3R;J_~c5xkprGuh3S|~;3bmiLvzCU59e=LlDfupV9z-e@O+G+Ml6YW4r1UMA+ zcJ>fc^GVcEHD!<{nGG<>B)WENw!b2$0YP`DwMi#+kf;oludD_3QJi}WP@0t+7Bf&T zn7HJ}W8m79o3tViR9`Unirhn`U#C)Lfjtr=Cf@W7$P3Fc^;RtG1!&NK6ba}Woa`Sl z_a6lt%CI6%Tfqc-nucGG4jF(AKqjS+WA`37$%hKcIncOH^7=%THO|x>+Nn#dp8}0# z4K_>Qx%1;`9o~Az%G*Sk863%$cw1o-T#=Gg`=w3$wj;?#SxGUw+pDoLgdmaB6d_m! zuhGQ`GAx9KN{beTx|T+*9Jl>Whl-Tl?DQ!YCOt2r3iMRdxuLjw&VF>CU^DI6LGnr0 zn$%Elb?yT@LL=0WK)U7(*)dtrk)(NykqWx)6hf}EaSJI3HjZckR{6Ypps?t=Z{OB& z6Ma&Xji8_n##-dYuL69mF<0Bg%#c1)TNfYfD{(GX@Z7nMvd1o2IytiS$Haeha16>& zw_sEUtRVZs7(1&#KJGp4C(>NHZuSrDCRs4_?g{QENk`iEow?F6s;wy)TWY-n=W;un z?yxtuK~sqq0L4{A7zelc(l@Uk>rdo0h+SV0sM&145zEVnQObW@7YfGIynUyNbO0oz z>|z+QBTeT|udDvkRpBR3>0eKpUM`x@2n?Miq+z|DhA?!JN_6~Y-B+?C5pAtYJ)(lR zQ*;%j26hcNaaxhLJ0*4=VKTlP!u8R=B3m`T(s=U}+MxD*Uk5a+m<4cDb#|5GN(u^A z)h|<*bw#(}YI9m3SQIn-9PLV=lU#Mca|8twVA_KhylVN1mwNk5xs^$;4pcKUJq~hl zX*^ua?I7RewHCVpu9f}H-jdB%l_V&eI z9(EK>V0AOXof#@YW^Gc|q>_G;3XD9Nc;FfZT#04I?vT1>x9mjex-%2{RZewp&Qxo= ztGotNQu$hcl9u(ZWF15m%r(csQH2fgZXPkjzmD>-abt z-a1wcQMNQo7WBNG09qtv^l)~lBFXqj&2l8j=#_!fX@43topB1J7VSwhCby&q^P$Zr zr(#Dj`)rlwCch{4UAPH7NnAmp(Xyq_oz+u7(PVFe%7}oRAWLNIP8sh7!n$Yq^!r`e z8>6mV1TntHDC|=A)+_q0DOehlKYVf(AVK6(+z+A=?qH`0?qmZ;#!PZ}yHpL3%^4AG z-rD;1I^xp88NJCk{l>obSw2&Rvus9Hnh}j1>9#~apc1chOwbM>fK|kE3_LHS$DK>i zTZDdEwPMzLoyqY6IJE)<Bp>~C@sqZdc-K4{(aHWLYZLoz z8tBzk#QX9eN4fB{ORoVMMQrm;^M3naU=0(y$>@Eg-sQ_{#|?YpL* z=kg0m52!`VV&WLij2b{8yOWb_71Vi6ad;RU%YN^MC5Pf@TqSpCY0-sjkQc4|!vuwAOv;YGHltqk?n-9c^&^B+(QqBl> z-WZNLwREOpVLn0M;SS+%iKol-$K(@*6n55W^#UJE?P)^J-R(;cT%1_sMBFC*Q8CkL zUjwLnqJXKiQ#UpjvU4vm+m?kQIj5uf(aZq0Q_gv60F-e-mMYO3z|0UL2JRjqn1L1X zJyneWj>HZ4*In>I%#>8mQN%F9u1!KCL7Jv~>IJ zCNuKOAPu!*cfv{9X=~IMB7hzki|_My(0uy*isFUIDrL=>9T>6bP5{50RX|2~r4x5# zAL<@Ws#DcT9}4YnpeCJ@Tvuhn_@*4->X;RWp7Z>EJ@unEMKyv}(d|z8*M8j4K^Hs$ zi`~lrAq|FTSRmS9yDGD+6VW4n3W(J>wRb)YV$$SeJ}&v+{Sauow`0G(xJy_DLfIR- zVz_S~YsWGEy~>H9oEW(NX|yI_Th~MvlR`x_xSe)a>MUF;2b9G3n5MWM2J7Y?Tp(w5 zlm>dWouF#+o@Oz0f;g8tTqn#i5mKx%ZA(p23=Ln*CHgTNx}XFqXFX!7{^(#Q-Uq-P zn4!SL4^CduN;h06pX9Z-nqQ>V!f%bVg6on@9mbmf`mFM+Ui8EU^q_W@7}Y&%kQYwe z$9Js0?kxWlmBl(GiWr>hD73z5@@+fl_s)I)p3Ur44|M_s zfFVVWhiSntTJ$fr0C(#l->&k5s7j1Kh z$N-##LN{MfR?hy{AeCQlqyJ_!H-bFN;5mgnJLP!v)=4jR_1s3_8rj^(A{{KzU-;Cz zEpZzHCT$|9ZP0rdnb+GJ`b?>N3AX`5f}dV z5BDE`BMg=puqkeE&%1`ENJvA3yikmpVZV z#-6wEgnS|Q$G`Q6!C!kRhoImylRq*h2_l3%! z$<&{$C8+d(%SdnaTK_>IJbdGCf=aku_$NhK0;)dXGEA=X*Z-gv9=`O9pcuOL|C9p! zotO5o0D;S#5_9;IatPjgf@M%tZN(P;q% zZ*5mkURS9tzm4;X^ZS;%i+{Kzh%t}XP`Vxs&3(2{`qyJ@wpWAncUWas(|9dIu^Iql3|A9JoFujk zOGe7?ZAJ|(iP^yuUtTj;tVd}#PU5vxAN0yf0_};KG3n)>34*`wQwFO+CthM%d-lhV zFx-Il6WjyZL9kAQ>}NRX;hN@Hkv+T14e{4fnUoUc#l&4d^O$1DK=vHvfDXU_Ooj{d zLEvG*leUR&)X>?tmD8!@%nGwzUa?t0wRmy-mLU*xo85IXhPS|KfAEq=5qP_{OM*$h zHyR^>g!?8qO0DsI2E8L*FjrS2A_CybRQdaNuLO(~+ZN*Tf1({HYEqND9(XuKzFpTZ z1-VupHs#rB@CXN5p}xoWa~{e-34wq{R$x!br4tH`bW4od`aukqcqlNK>9xNCO=Uw@ z7R)jhJ-9L=006)I)QAvQ7pz3r+Yf$fMyDYXhRjNrN`dRlN9i&S&e$MVhey6Q)Cc)ERG{V&Z-r!QROHSBJ(CHSa%nPHi7l4srD|a=07x1QX!2TKpq?Mo zsXo|G8E1d+S#@Zk@6%A-oAYi^Cn~nYtp1$@kAwFH>wP&5JS%A50U$RCiMJIlA!=YJ@m1Nca1WJ_kCUGbw-!m_}VnXGyqxF z+I>RS0I9RZ4k^lp1U}Nta-p4A2X}{E^KyqpatJ^>!yq`c0Yceism1IAG4WtILs~)L z%U2V&SgRwj*&KX54E&^FvHeptGVq%8f{Z0)t))yb4r#OiX0ah$Av zORY1v9Npf}G(g*zgAxgwLs&6lJgW?WXpI<%2k*_`K9Ey+dcl5w_gKK-Nx^x+ zcl!Z&fMkUji#7tph%lI95T(;dE6d>e^dy8LZ=C zBb@2+?m}~`Ao!29-~DW}70y26$$4>iw^~B)*37UqvL&CZcH>j4WAL>fsxx*$w+vf8 z&;A^XayJq?*v&IpncRfAQy`F#GSdXkeTW-@DY{K5vD5xMw)vMxZqqS-EJg%3r$6He zi|+aAQCfkP6>UU{2{Rju);4aAtpG>F)m^cz5%MZr#`lj@$kDKW39|i^7M(f6B+ zWE+k5_aH2F&Ki9R@uzm3OYm-sEI-)8K6KvAZyfPHqv{V>#J6cxC|Hgi^$Y7+wZtP1 zAPM5CqoG_xk^ZgC`!Iz?kG2?B4`3}7HXDcN%WXYmBzrq|7l~dzxF;KSD;e$n|0W)S zGj`WK4oqRyfh4D9cfQ;UWw_BaVe~l`-4WLU5$@J5F~rP332ELq%c_cH5D>B;VDVys zhA)k^*t{3K<1F}-Zj&cV=0>u;uK>Ox_J?WRw>o#k)LW{!Ei*^-wkuzm+CqOOlLI=J z1q9IrW)GLdgs)Av$W-!Y>r{WETzy+O%&n``*_lxqky6TU#WVuw#2*_W zqZV@qNmh74uo=W8LBnqyZYp2A>qbwFm!r@j6eq_AOAkEl=X!o%5dk%{VQfzW8{*Kq z`$v6?b%|k{TeIJrRd>gRO&F1cVzUop0VEX+oqK;lp|utLONAFB- zkLEQqIqJJh$9_$Jbk|Z^x91{6b2gk50y31$EUoIk#aH~+8cc#??Z%60oZ3rUJVn-M zcOKHg`82}1z&rsAACjG016Lbp=J@r842&=6ft%oE!gko1IZoGO=x^kH6{h(ZM#0SEF806_=$}u#5}kvcamMNK%ruD?oxssy*ND7pk+J!UsQO*4s!-t zn-dSL*Nz$*bouf)uU#r|#XrOO+VI3}kA=cb`up7dG5&6+T<8IhKP*C|5JV^bQ+@|z z7zHM)D(LoaS>`LKvMwf;DAx4=&YH7eqda*xW)E^n07*IUcvPwa@1Tvv54wwh*GhAx z`1^&nL@%AMlw@}odJ=Raw-k;2Asb+_x|1aVBIS0n*tpU9GxYESk9jcJ;r*Pi9`DXC zb7!D@sy##-OpjS=h#p4AracZZ0`{L*sbgPZ5?`~vjuJCDG}Al*qMRY-mVmqRJE}S! zNS0~RlxKq`X@7Lrz3m31J_hnN!@0DjuDc%Xc8iD|7C21B z9L6atsK)D!CO`Umv}V+xXD(mws|9eF2pPf7oI)u#vdnFh=+gA$vN@RbpFw$_0wp5j z;y`=yKfC}cPC~6KlFlO~fL{1IkjxYlR;kY29u0Y@KbJ?5f=r{3WC3ZvUj*`KL|ymr zT!2~b@)+o4YX`Y3SWj&g)rDoMvqjxt*Fe=ZT&m>l)>ez}u_G7sX%-ljN7B}6XfM^iV*b!2*8rs}4>24zjPJ7MWE)gKy zu5sl#36y_eaFssH$jj6T3S3rA5o!eQxw!ywt7gFGt>PXu==jt{vN}iPMkf_;h4S{t z*2c^}8G?UoyK}PB!cEPRsl-bD$!cNGM#MyJsywS;f$^oiw~6HG(Lb81w@>3~dEl=U zA}`V?=0JT_3U7!b-Y{AZQgo4zlEpC zz+keZmQu?13X^Ke+xg&;NCC~LSJ+Yw`{9s&}7{%-tqj5pqj{xEMYC>AJ1v&)g$B z;yw%kUV7rY%{{)yjsd3?KJh-33ZS#@P_bSX-+dN72ULY1x=r||YRwn2pHcjVuS^2H zT?dPtKk}MWoG-e-KAr;9WAG!UZ`TZyftGK1bl_#I3z=n|bS7nc5>PUfa6I~oR!k=i znlm18eCJecR1G(_Eg%tG#M3M#q?8y3F6kkkX`+z&cicyA(DoSI5)~hrzgFBqw><^# z%;E<*u=f1xE|z@-#j&~xAR8ETwPd+CVa-*rtHC!;I1Afd&mA@6k3OH=I#39A2<@^fx0!G))b=0_<&%&`Ia!Sr z^lo%LO9V}xaO&05Vp;~=+jlR4{Z*^Cr6a?6uVdXtC*8~;ra(=6ouQpDo^wpyna=mn zYtu{SQW@$f8 z;cG{^r@sO+2dcT3P2a_l`o#QFV7<~P+#iu%*drLNF5 zZnO)qTI^Mo7q(A%E4}s4?zs~VY{J=TdvYhUhSX#0v{(z^;UJ4wHt5^2*rz;SZF(1q zm+&#xMk@;EMXv$1>m@dISwNVvto@0-6b6E91|2v^XcuRCgThR%j#~q%!XPT&wdSLH zxP16c>kJ2rxwYi07m9`V0K0@4D97Mx?U|&(*??x*?^FhcZ!-Rl>rrr!3<=JAkPN9O z{0y2;zLz?L+3chlW{HTsn?98@3?WR^F3>qGSK2RhD~9K850nm_>68!WjdF8Yc@|mn zF3#Qw8*-M*x3yZ&c5c1?C2mR3E?OwYvljW9zd~GCkwzdey0^M8^#QNvEMUqeOJxUo zhe-(7itE&{m#@DQOUnXw-`4m?Fl}^++Exuu+4xsDZU8#TWpKu8q#;%V_swgai`bmhQZ;^O-z5_8C;QR)2 z7}Hn6s%>#(IR$4EO(nhJV@It!0So$r~^j|Pm)B- zN;oF+XwL}>w~h$%OaY~L>{F;P0*O~Xp2D8eT_vA0cm*c!(Ch)aFk9_-NchGmuCmR$+pRYrfFv%6+Fn0B9WvUR>;Wl}vsx&J_jKbz&9u~gmBir4M2G|&j*^U&9Vc2Kg+k0rvbE%ZwMskAV|Ug2 z%FfR;dtg(})~!?c-h`hbe2#TxqUu<8>4AjZ^B*55gXl+vTy-JlFHl5&y@nk;b2Eh1 zKjh3jdS$5p+Yk99X{1<+5`&GD--*2u7l#ea)#{rVpt}xb-;Yzy?d;9<_uw#Wd0KP{ ze`Es`1VfvZtB+6+^|RDHanKmKteb2SbU$Tj@wUsP`JyVW0(abZp2RffL88vf#sBw5Tt=1R0&( z4DHl-r6k%Xq(66WfCSQ8S_s+=GbSj}4av@{u(%17E(&St&#((F;XA()Gpl!WLTXD; zj-Q3pmPBuFaDZSz?yBZg*DVa!I@KG4v3Yc9#1t~3U(chlW7ObBOW?@^R^`iV#S1+G zmcW-i@WZ8+L3hC7J-_FeB=4kudg*tr;IA3c!JK{-e3aC5w4{2JA2ucuH`gCWbbPV$ zc%8W#U3<=?b(b#<%~f~cZGp-fycf=?ch07EA9ruGn)Nf!kAvNH2c(C1E3#iw513Cs zBfhi@v@W^uZ%H?Sd3BS*(MzonA^h3ePFH)(2L_B9x)>?QD1DlMv{uNx;i5Be#v!(H zS)Wk^k`T3FcUE#fq3x&DIj&VX&P(oha>eBnueW3NM}Y)u9^P3eK;~)31)SHz0KF?J z!(;MYKM=Wrh7{10-xF4zw}hlBgajovM!V2m$)c`0$`O51S~O?gxT$RSg&u$#4Fz_9 zEzAgy>fG0j1NmrvT&&|*coCNf1FvPOYxt2-_}41I^TKN5pRbd>um|WtwRZ*iFLWi1 zW5pUsQbP_aIbbBuW;yIw&>20djP2hDD0bnR;)?M{X%==HA}>X+DjP?-%zJPNwLjP6 zt8@C6)SRNnLpI+g^5?RRx@t~_Y;rhMw!@C6L(tVaiD$K06-NI8||+vASF8HUWzk0D^szsWk{^ z0=b0W-@n3Y$j+`)&;1mz8e(tPF1gb_-4Ys(!)V16_`^Ey5clUETyuUo8@!Kj9o}>t z>tNTHA}g)TdX%2wX1fuoxz+wtU-IC55~6Al8R_*H#Ls}0&IXv&Hl zP(tOywMRfBTWt0uq`{A7`&0f_24O-K=|~6~0zd4^R2jfxtHD~jxY0dIdz9^YR9MgKnl^Lxru-%lbP(~~7@#;m3h|+Hl!wAEJ0@Snl5gAW%Zg9j1 z6z|p3E*c4|Kp|-P!TSwsJ9@a`5Ro%^E0d6sD z&;aC~!0lIJvV=pf5rg2L>+ce-{t(B|! zO@87xGP69|2cO4dei$@;-d_XUX^Meh&aGFTDSb;Q>(9WtN90?o9au3gjO`;u2X(14 zk)Y^^*s!j$Y7tr~az&1%=%m=o&I4bNo6=L>K=4QKsIXQqi{Y~x0)5=D6`s$!??J}X zbc0gRe6Ei=&L)SkW`>Q`N!hQ0LI9J|pT!hUFJc0}1CFldpUvk%Y_%0SN3ux)FEn0a zjipc7x7r7fx5#=}t0jzs&VH-rK-`wFJX#8i=C?Lw;C;P3hi&j+^OSbXB|pPs)N!7y zsoZU+(K8j5tm=AG^BS*-IlgU>+kW7^=iY0+Ib#j+)?eF0mpsJD)f^4yB9py|5LR~v z2ngwEQ11hyreh2o8@r`Vq^tztMhLbX=I5|n*+@W=%ZHgIAW!d?oK!0{p5>?YypqLW z`Nyr{Z)jJ-0OY4QwM4KnVTRrqb*(z;EqelwpnUrvkUZLM-4qTQ^ zaot+t1eE-9Xsj6%kY#sHFd5=~HD~PQFLGT6O-@lNy+sx(ot@!PH!SgJEw=axg&AbMrrTL`ub8sd*eSlLGtQ`=+!(qDjW<&~%BO@W39Wwu&G_Q`kkL8F8qDc zF^3Lq7@+l6)I7)B>WW%>&GGOmQECqGK}jqXWO&O&BumwscC+m8&rm}!8F#UXIiKp51hIK#MiW!__%=-YwO zkpMgH=%04jF+@S*I(iIo+#)X&m6N+X&aySDGE7P~m&!#H0m2gMi%e-o8tGl10Cu|* z&9JBNm&3v4c-;Tn;8h)=7x+vMg26@$tPhB2)0V9I(I{g4CV!NjQoI<`54ks_tGKy7 zO?tQ6Hl&iV@)M5Al=s_bs9BXPkZq3!o9iYUl70V-&a{;T%6L}08DDOzji%4DJ3qSg zRT5O@BGa;C`l4(f2|X_Huk2>I79z1=;5&~#71g6zO3n8o4U(mP5?s%|$e|~f*#^Z; zLNg)bUaWaxc(x%bg(Ib1;Qh5u4S`zWFWfDsAps(gNVF8qbHA9nY=;rzWSP{qmU5Qlg!n!0(+*Ft2XlpA2JZp)3o9OF zlSd6bvKPLQ8Fj{jvIhBb9QAtUE=}30OU26Zq}rHLPtm@*eZ+&~#|lOWM0VcWq@u(@>FeGRJK#CR%kvZj_q8qaT{Zw-A8ROO^#|he6?64|q)w2@X4da5 zNf}3yW!MxbHl70cz?K%y6k(X&k%cb}77T4lkn`cOiBj1@CGo2C99?JPy=f zPve?nDu?79xu?o_f=nj;En}G7cehL8ttUpxu}P1*ewMg&WX-R2AL#K;^2XkIF$B(O!qeTtz|F3q{W9nm~O8!ee;>Y=Jg6Zm8h~_vpgEPjM=-%YV z^?1s3(5|cvCLk>XwZQo{wKBn!g6Q|petCe{8mxhgOugnf525@C_XdwLH_eQvxt4|Q zB(#sP*2dEQZdXB=VX0z_Kn_`Sv2( zvK8P??c>UppBPNkOY{m+SsFX}n*E;xR8y6PEqMKCdP0VZ%?4^HZkAcg z6?*`sN|KsiAgz+eFm|{UX4ZSMdR_;Lo>h)(gOhDM;sFta)B?|g=Vv>j^#yh-S3v72 z2YL&OVxU;Iv_R*7bgMEeo=+Ww=jakNhrleM4`D2KJIrz38}#DVKl^jE=A&%ISLzKg z6=Vwhw4zmnic#vaPZx9dm23~Zlp&gUNwa`ws_D8~fpe=GYAhe2eN$CvB>hZiX z-Y2d(8oj14w8RdiNrIRvDJ~)?q?2^Tgg*^5Z>^h)weO8ax{x{_)hx+;v9lV57!wfD zIfJ+@-=dag(EySlo`Ww- z#U=|+4i8#91tR7ovuQc_sO~m+Zb{0uMBE-<@2h4D9Hs5c35JX%NB!1BZ+-rHo`yF8 zxJSi>q$H^T7cU|>xXxu=%%&zL9!+L%XB=vaCjlyHZu>WFk$l7U zY?@!dWc(uDbJ`*kGT{sD(5)QKkN`Ej*H@a$cGE3SLs+B)J_$IM%$4uaegtc23kJa% zz_$~fC5^YPazyXrcsH)=ST{I;tmk0B)wfk$MrBrBymdMFVe^ySJ;?)D7npi#Rx$I+ zvS%e6C8l(4#K^=V_h5LR%Invt~=w)}?RQ9cr4A zwPnlTvcH|uzhDg^7@$7=#=D38L6v-OuQTh-K3?_DMF%6NA%`dk2r}t>RjY8V5{?P4 zAfp*ZS&e1+JjpyS^sZYpDABz%L%#>aH9aE;zXdIdZ25<wm~_4oyRyRxq&6D{ZRy5%ar_(;x9P6Ng_Fx0c3|~7&_wduj$97W{gIAZ zTn0rLS#TGgGgerp`Da3udJ$(QQ0CF}09Ee?vCi^{w;w6x1N0E1PtV`*Y{ZO@)xjPG zRqiRZf`?I2+YrYa8OEkwM4m$9kQ){KNrDZNm?A{{_ClHfaMXe_EBT|g^w-|x1GBn- zNQb?Id+Id1g`la^PCW*oCdHUinL^5JO9q5SueaLPm)GyKOEKd)++JcdD~YW>AV&Q` z`pc407-=FP)In+w!)5OT%`b2(G&{M-iszsez_|H|gVp8)?H7ldJKPc+@CU=PA*n-O zJAi57(Qb#|+>fijcTR)PN9fUn_x-5k5T8lukEH8m5n&9u!DnOdqbil-?kaI7dDPnr z+RbR+)Dq!c^xyZ=G&YwDXHOq}OFEgOS*bKsLf(8njSMqf68_Rq@|CBB^q(6P)QR2z zfN^4KS_ns}S(gI6%iLAh2B9EZKP~KVK9F+5dryIBZKuygMC0x0wkTZ}d(1VT^DIS# zoryI=)4_~bfRlgDM%g~4=1wYIM8}D;MZay-wATsu_mKxdvZ^egHaZt;Fd(W_=wB2X0*mg*JTSpsbD`(r1SUG3Z)D*K|d59s;%dsl~TZM45{^OE_q^sWJ z*D)idpnUbAV0!oTX$YEcu_pu!6toO!9yP7rgHKwHuzGCYDaJ5Y#9*#( z}HK`nh`jy9F6$cUvMSR^y9*uIrhasa2qRcaoCz~kif@82~8J0Mdl zD?~*}s_93y0*?cMK9_171V-7LqCT8`UI|>x^@~^w?@zuXd*w^t*+TJd1mvb_{@Ct0 z+cl3$<&%0rCXGf{Ns^kMJo&bCrS6qg4I)SGpQd3g3@pyh`*S3+-U>-r2VmNQT~whe6cz#3{duHlF$*PZf1RHlvAI;D*d@})HM6^0VQCl z$nA99aq%$+6xA>>3cBj=4&4#wfc-;T`oM1Kka>TO;GTA_TDQB|#~I|wH5F-P#ONn@ z+x<5bd0Sy_!AAmcU2M4LrmV-6_h<-!jI+^gMHXqPQ#xd-O$n8!O^i?+T|gtDN@g`! zi9C#+twQrT1LB)bHhCHVFnNU*qYUH3K3?Krk0)PZm`dnWMYJHKCD3I#iJ2zRXjD?H zIGC%CV3Bz$^w?d-D&p%Uvg|uZ2ODeL65*PiC z>(O;RWK|}ws8B+WJW4hjzn!U7VmPVgj47@Y5HQiyP{M)HWKQfAudv+LZ`5=GX01*r zAed_Xr4p@OR`|&ngfzq!QOHwsWu#8_+GKM*X}ZW^t0`1Vm~Rlg?do$~klW6+wba`` zkGT{FA4qw6I$gEWC~&1Ow$d@#H2|4)^}eWO?lit|b*IiNIo>6}Nk%6oXHQZ$o)XORK^_b^UW=HU3WMNHt215nBVrpKl#3_C@Zxz<|LZC_ z{J|dQz*6B2Oym-PBUXzRoqIAgb%%;gFC|$A^`RmpJo-{|Ee0z_9SRv+L6B_(z}6}- zu=E2pVZ@9DwJ_&Ovfn-U%E3{@E**%9A*qQPFC;p62%b{;PMK?Cq!oM zEu(9>B;WaM3{XD`sSFcKzma|A>W8_m#F&`XdE=v{Ak~!A-`;)%M(kyb*20~xrSR-%mHIp16i3Qzvk?Qv zOE5FICHg1t_w+u9X6W}QgqrzIX>U?FsighXRkExs&B`$iJ`)KT6vVE{(FAN3pJodT z&mtq#c_Yuqg^1RnGNlD>9ss|}-i%&ut8WqazIh4-2fxc(k}RtzQG6{|XoR$NDeD)) z8~MO3wd7{nqQ38M_2BtnKtb!(j~qcKSd$Cmc>3?CbW@}f9_MNjN-TZh3nv6;WR6ZY z`UeAx%$OM3co(jYC}Egdwx)Sh^d)<7%|ZhTGEdVO5k;Xhb3+ca2K%qx-s_Mp?OHeM zF7uk|2t%PbZyeHz-yb=8{Hz<3Vst5Y$Ai~wFb>%Hj6#9AV=P!8P>j|E=OfwY-Fk?Ym4e@6Jc z3%Fv*n{O3+DEgk9u92kMW737WXLG%^>dEuE$ffpu@uum)7f4)Rb?l?Bm}O2F@R+{d zOPAmc3<=6}P8GOKRbma}Cedc2$6jrq&G_t}B)AFsQ1ciCS{5^w)mT{xeCf14#tGZk zl?&S&Fsb#wQEa*X?p%E9cP`e}q?;&Tyh|>Z-d!Z7#ADAv`_x5FqwMt_^{*LqXTa}& z?jFdLIydgjSz5Axt*>JPgtvqpKM)Blv;8Ua<$({gb|3rN&yQSU5&X8DD-_$9($+8a z%r)o5YnT}kwW!_B7skz-D6a^&UAxr!z;eS{K61t3q@wHY+Wk(GU(-@Iu=dD_=ozXwOwS*!xjN2KH%ZH} zG4eswwKg0C15jVGe{)&!`!G6u1)MHQi8Rh*sw?J0rIwa&uvN#LUZGF({rP74>y!TF z`@G(>L5H96_v%VP9+EhfIvBBt_})fgk=gLr&inIdu-E+WBOTVnQUCA&( z7vNZV7umElPOQ993&i}(&->Ha%P%~O@e2s&x>|Rk3v1g!?)B*OwyNhWU|2|b&TK^S zsM$y>>qw&q%dCo_DPD7eb^y{~@15V_M~m6#UF5j$$y(%Y@#hcdKOi6!zygJwDDJ&S zgx$LnZ&d2ykBZE8#76)Rvb(XSUmlz^9Ep4!ET*fK`ck*d5gRAsw8ALkY;tOYO18+P z>#`Rsuam|9DMT^o{^94!*e2QqpLioRo?C0xTDE$@PjT|Q9}^fppJ=y=YvzyOI^gt+ z=8d>~M<4hzG_y!nY^=?vhE4U7oQ!K;-DCtNf`QmtakDGA3iH9if_5p!aEr6QQg#1q z=Q=F*{>LBN1&2}GHEtf={QqHL{KW@ZhdtbDRtYhY7ycNk{Lf|J|M*ll&BN6Xs}VP) z{gWN|e_X=9Ki=^z5N35^WqVJt?c~4z+n?_2|NOZ%?1%Lr7dpP^&u58$c+o4aAhn9b zX?Oqn%>TI1zx}enC>iS{JsTsi%0ut|?j!!^J=m^3eAw+r3eY~5nD z{GB257njMuEP!3T!-rj4>m&Go`R~7PVO_+pl{ZZYDY^PLp6)-N^Wj(ST!#-EjsE`W z|9*QJ$6J4`ytO^q;lKTe|G7MBL8K&j*jVcqO8;`l{@#yVF$62m{qne= zc%wW$6&atdC6y#M3DQmn>93`#ver#1tA8x{GriUy-b)DsaE+7ROe+s?G2!cfZoc%# zm%hS9R7>mCSnFiU@n5Xz!{6t^AofVde-pFHr~?+`8#XTC|K@_;Lt(hrK+NjSf0K~< z28!{tnKqpLw}t+<{u%_ucpB6a-}U(C;{4lRdD(y%&kH$KOaE2&YysGFd_c_V>|YR) z{^2%oE(hG&Z($L!|IM9!0A&LQftc0ZKZiK}_-1_r*}%$WYXx?H{4IaH=6|0yUQJLo zZ~>a=(|;4QDp3ZqflsRI!T&t2^PeZlzkhXZ5Ul(MAZGQydY4Z?*}xMmtsVc(oxKKS z12=)}=Ka65*8lCBCqOnZtB&)a;Qy~S71%`}$gOw~K(F|-G2q}yd!-|`MyJ+RcT6UH zDIHhe@NXCKSN}?(4Hn~xTx)%>#87yF=qmY4<(1ziLLV* zn7-`Vsga%am#aS?I4LobsI2B=A88_!~rzBNw2%Cb|pnQplh z1)Lt+qDA{h&6>mcGB=icb;q1%ElU?~J~-mld3x@>Al9hE48sVp*0s^n z#tT5F=!$c<3RxE(J3yu@{@awF-?q(Q;t{)g3?of+0Now0}^py$g@Nlx8J9 zzpxw?IOMq4B~QtvX=HW%c+D6`ZZ$5WxD5gjpl#HNvdk45*OR8H4#u7{<-z)X@1{X$ z68lM*$Edle+TT0>#}~o&E2481Gl$N%6VNPn@q5bTu<#^UK-tO}Kro4g6CJv)pT6D2 zhpzyYrq(8g2go`;x~U{btL~A5nQ)e7Vbf`@Dn^jS84T!{g91Tfz%n75rZ=`lx3jboqGJ7NR0uFG-QWo( zsn&A4*<>Y5w95Wuz$aY&KkQ%2sGPP}5_lm39CpC;Y3C!$=JP*Tv^M4nfc8qzX-+%U zL7oyBF_8a4)3EK|cf&tCfDZp*-1_h~-y?gTL#v396`kn_i>iGV^q3wHF$ZfM zQ-K|)u#D)bKRrDErRP{V4h|mr9F-JyhNf6yN8jR*>;54x9uiCP<-~+$K9Yr|Ef@Q< z9xH;(v*{|pg401n(D*`(5?gW9|o)9MAr*!~8U8HK1Mcs9*)#^age zNBeWpfgm&?9F>ZcsjENI9C`KiDy+(htI~20DBFWQ4~{;NgaP>Y_5#3jWm4P(zh>mN zo;=SSxDFB}w@iRt9>n5NqjTf|5FUkPfKo;Ea^vvDbLYT|uxIcD8k{A`NAyK{fbH0} zHeSq5&1$TI1@Zy#rAEmQ@tRg)IGcnpWv;j-XF#hgSX~bU*(;Nml?woDwIgjml@TJo zd!y7E%|G-RqY%uTk22B3y@u^gl9%He`4VLotJhlC8g@el7D*kkV_|>~M>|}zR2FB= zEN|52if1aCb;Y+)JKG$1(p7=`Uizs2{&c!%=&!V%y+7FmV*s#Yaf4FM0 z5=*}RkAgmDGvp88`5yDyduqAbHTY|nKgbx9-Z&PFlO=f3Ic^;Eh`Ape+hxVptov7O z8c8{?K15pPTO+GLgwq0BI^unCz!9P9z;bv?GNZ|##N ziUn}>zmeWVmb~50zpEToxLr# zdnGqD7;86{x_VCplW3Z<4U{0feQ9_)PE`-8Q5Oyt7==OXqK+#JWjO`mo5uEapT9zG z>zQq0A%G1}eaG#+fwTCEF;Yi-Wu40a&|Vu}L@3cE*(Q?yT0^)Xms3+nR-P0^e%vI! z5fx|cKeCfG-rL}i36kU(c8q@$gKi9qP;ag0rPsO{LZRW>_8>dXQMb$glnGcQ~ST0QYsFXdrXC71T2^48vFY6zPpiJvca%9 zPj$MR0xlVtymZX_26%W3+%&)yA&6t)9f!w{L=qS?T_BAZ+CP^*K}WV4i2@PVNiTcr zDD2(w6OpP%)nA_gXtRay`}5*@B6d7tdm7I`98h<$EAm{dm-**K+560Rn}L+k$#eds z78?n7w2Dlu0tnvv-=lP#PiriyWoz8R|7aSX!avH`f_p3JR$0ENij}C9nU6U&q6n#Q zfGng!`%;pE@SK^nZvTA0d_PQG%6W%^H8La_q}RQ3kR9JKwyh{rbS0o=b1k~nXP$rKI)z#D5w$W%*bCqBNhIplj2Wz;mz z^^V4Y_j4s;Q7U&+^Dp=cp4&f)tMjc1blLfk`|6FMj&{CPyBxHss^(W?4K3cq;P~^#Mhh^J2IGfw8prnc`9e`)Yzg$3`55mEa}{ z$!&>ONd3OwtS`-M)E3#m56jkKf`$k6oE@;drW+{27i(;tMCY(|t)W{`*Ghk7RH0U5 zw76q<<74xU)3O0^0ceeQQ08<6>AZ>otOZ=fP#kx%yd2I#b9y7HupvzZN6;W<27F38 znlDdEVL)RBMN8GN-PFr^55!N(_DLC#^aJqFV*4tEQ$eV8^H)i<)}~ge+12tZ)_U)0 z_%g2GaoXU@tLFkkXY6eb=$ROeIbCVIW@VQgU|;>}E~r6|K63Um)9Ly5(tuRJQ)AK* zTU$4e4aQk&%L6uE`-0oA3UQq0=-ZFsdHboB1h{3=!@}!&!!vloM&ku^D=6s39cceb zCHW@TQ?yQ>UKZA((bmPVwAU6jg+qHlE*_uouctd=RcAV)pQ{Oo)iZ(bk|3Ce-z2(q zne-oC08drUUw6~>ciXBqfhpN_yCcA{e4DbEOD+G)LnxGKtv(>tV7cm;(jEbh9k>Nj zlkNyi^Bg?|ZSGt49iJFvz)3oV6?RN2X6j7l(sabMer8UXKTS<^Q;_9wi$4dp_>)Az zskexTh*Z+$9L!HhJCaA@LNTnD*e-wdR;HG8;+2Fw)=+5 z@O#=Fsl*MT9Nl7_U}5^7OCkP@sudQ_i}V^*HaP~ZG?%}3Rp4yim)0o)@|D5Fmmdww zi#-`P<2M(q#6$V48N`aDOF30)_2|rFQ>h)vlNmq^rJXS{5X_*3EmLN*X04R)eMAU? zr{ErD`+hiJX)b>1HClhi2U6Is6@FOVX`Sa$`SL_FQ-Q&=)I!VHk39)~>@-zrU**}i z8Em`{p3Wq$_0e90^g}>`TU4@oar_S3o1QVfTqr_XU+dXVv8)dD;VGK&biC;$h*I>x z=ll^V;4*ZQdPpt)T(7KG*S~W zWO_!OU>(oNHTZpa-Rt0ECyh)QdMWhotU&aYtSxRbS~{WO-Y_IF3*$H zDVziL9L9kk&p-hOqu3U{@jQdBDn>~up{Fmt&XN>`%!B!*Sh#xF7E#$QxY`9fG?q?SBC;jYAyd7hMr{*D;2?$G#*|MuqGdcaKWK zWiXgc9b%E^4mCqYu7F>vcY3D%)JwnOxJ9Z)eKcJ#2vuIZb^X_F?{;4zo{GC_Gp?7? z8cA%F%%+|z7w7i2?P22$4|6{>MvdtABKTh$C!|gtU!5J;cVq(@?*+m~s$JDzPIH)4 z&)Lpw+zb|n>1FCUkvJ8_4|iBU7qQDG#vM=QK>MHb>5k7^jX&~-4E_H?~?<8y4e+Y&oT#;OR3k%%=0|% zH*Z5N70S`G3r+?nqZa9$7g?G(XH66btRUp}??24$G8%Dsqg!NwLuEPwMPVZR=W=mSX^leD3S8$Ed@u)=QwV5DIo)j067l5vxO(nU79oEiJC-h{{(i6oM7|ONI1=VNi z4;U9AQ*nDe>GtX8x#1f!1T%AYz>VU5xuKyysCyumd5wqSeqC|y6v#;bq(@^V-ki5Y zYl7xl|MeyUhe+icIh%mCon*LJ(icu6bD8f&$1bTIha&*hq9pCB^+V2RA&hS#sTaAe=Q;vsXH|;v*#P(soz>`$bV=NAm5fWGizx z9M@^!FcXS&f%m^KFYqAh=DLFa!NeT0C^^5D{E^u%rLJ_Qa(NQ%aS$eeZnn7OA^1EC z@IQJqIM*K^+ga_osXwND;>XB<*!>{$xip)u^DifUJ}k_bOXI!7=FVPjHxqjCX0UeR zzQ!dTqqv8jtnA2s`tc}HyJ+D4Gd%5klxm&whQ-@*H2}OsjkyZgu-N_DLq&|efc=^> zcTH@w*^e*B>{wh&vib@tqr~^iW}IkMiOiqM+!t3fH2Mq8@VAF`^y91iT0Znu+5vh6 zK#-M4H(Kcs4)Tp@*74HdMRS2;Z*4C0mH0o8r{;GH1>PZpH_U$Z8zVJC(XcX;{EfNZ zMh$p>Ei$7?;mtWv>F%J&8=0Z%6UhNK&kE5aZx4TQ3NZRZUeNnl4a0_j>Rm&7x+O|7 zMR7+(&+X2f%}1@~xPd-H#Q^gUBf~vko@x)~*@irx2D>Gc?>3fE*m3+AP$hCgTzx>y z)9^Z_+-|?+dB98*2Rde%pr0&$ql+u?1l&>)Jv|7xDm{zXCDdDalNR3^Fw$X= zs}OmkU#6ZU`$uCj@`C6%n8DVF!{UM#O|6<{{poogrwe)};(29#SKN;)2- zN^!gQGWyaiG02jrxg9DF6^)h%f=q!i(nxS%QRE|_zQyH=))m^0n#o3RX7Aa5n)w)Z zgTojZ$J26R=IMYGjiv;gh?Z>TfX%^ZSt!l< zwCf+KI>Ib83u)G`?m+i-vv#nw@$y8&a0yPS9OfqW=oUidYnl~#f-k+G0tj8kSPF34 z^<9}4U$IN<5XM$O^(3!c{0}-z9miCy^ z2Mm-BazP^H7Q+P-KFdr5$@FFYeUR(S(|BBH5nchX@T!QN)h2PR=PgMmRg!G3CR&xB z3IidEjZCt|7F`N}lxi62ikFlo<0e+ZuYVC2xEkeNZQpzc-zC=-l-rhmf9EB)Mbk68 z)hJ(R>6&|tv)1YBO8wfRcb`0K>6zztYPy}cDHJ{Z!SG{M9OTkN_jUHERP3!V@gU@J z+}2-(IL^_74Z?%R(fFB1`9@Fkg=CwAS8Bz9CB_3QkGXBT4fHEZvb7;d9Ab%5a%!=Q zx@_M0FtUj)>^B@cIJSgfd3}22jUig%AY$^&;Z}5?2|S(4K!u(oKdqeE3N$r-(KK%~ z%AWayXx7`291OYc05qR!(XKmn0BFt10QsGA&~}ynVL#vf0X}5BhVaVW!bE5}EYOmX zEt9Q6Tz>@N>}7{h(Q)g+3dLw%bJHvk)D~tjSk@7_g_8!@gu%3W05=3wg9YL~6BozwJ= zo$ay0sco@*^%=857XWseEPjhiXdz5#tma0!l{qIlHUItVig9M_W}juH$+=Tmsl8Yo z%#b*o`4d3!kGg|dr4klZ(um_CqQ7K#Xb=1T#!=}Sf*vphGm7mP>ScQ%U1BdVTIC>L zfCwDc;&Yp$f<_8if9dp%3y_c@<{7GR-#gx%C1~gOZb3euwkOlPZIR)(QzmUIK)1pd zdc}(S290%KIf|Z*IrY1!Qv(D!QmXNjtu~A zi#;_*b#m{1LP+TmWK^MXvXc*YpHJE8m4U5GBg6^iiKDJKxlz2I?+^Uu!eydJ-Aabz z>TrwE8_jeRJpJ>)7&hbrt4E;Yn7jNJ>44>;q5Vf@IU3b3qpC1zi$e1!Bp8IH2JgJK z8lq4ao{C0|*+bO0HmdnhVGTowy6QQHxBU!$lUAin<0P0x`RDuWzk<~m{6FlybySsU+c&I&Vj(Ij(jX-*Ee#?d zAT3=Y-6Gu}Dkvr0Al==K(%o!&qjWbL*lhUD3uo@gH&Pt6 zgA=p8rJJw$LJOd|;O3|18cgKG#OG25iVI|YiNY*CZ=H?3MkWrg;wAAe@#+)Jc&?7> zem~|ETt0ns7TeeJp#TZVPPmoS!qKKcdY1*1ropw^O5v*@aIyZ(>cPb`wcsr3o z;*2<;2V;Cy`NVAs+CyD*QAgZTcc)6;pSl3Gm5zMq_AflHoNcQfT*=lfQUnAmg1|y~ zFWdfPOQAG_)RL89iIq~q?139y-p<3#e`W3VE<*rJ1Xo@1i#>Ei148;!#2=T~fS~OM z!b@q)FjpN?v!49#NxDCh+nQ^uQQW)Vgnd$g$fpRe9SRwP*8mW;|MiBsVzvAP6uuJ& z*BA!7%k%&S{~3oV6-U!vu+(DoA=9hI*O`^B4U|TsV-DnmnD=Ho!bLBfT>A`$ZK@t< zRt-z?RAOH*9Ciuwp#;8&4@F7AWH%yj6W@c@lkFTBSSm@JqtZ>2Xtn-^!4ytXPHcTH zbKRj5d^BpRpYGECDDGCj4^;h}IgLJ57&_koo#o{#oOikI9bvw{F3E-5>P@c9)tnsb zFLF}Kib5s4xVDz?1=_$2)2YmUdw&gT6f4J~m}?MmbQ(PT&qfvxwy?J?o7xq5WBXk0 z(L`twfyoKC-CDsDQGE)Oig@6I*Rx*$%0N7~`eN*|UPqJy*x*xtyiFNWw9ws_*d!4< z+!M#yb@^%}@;XC3wwx0(yye2-PMfb$K;1}W%N3uUA(vazOu~m}?-xj!cw;$=N&W*Y z@?eYS+h@;uP<>i|9t6M!cd>%)@Z+(Y!c@w=|%aLj#0VI(qk@;RvhC8aqs`Fy#Wx>xG&^JQ|gIuN_CO%PpN=%uPU( z-g}6k_kcPM5Kpt;ntX`x!$9TaUxprX2Qz4KmZ3grj*B(@#;s*82Z@u@-GLst%+(gD zXlKOQd55Kx@R)XM?E`OF=I+qnkV?%`aqWVMc({>o4obb)RglJhzB(g|FG4=oUFT zjyj+b-@1*QzQ9u%pZ{132^YqP<=Em;hy`fJ1FONa#^lp)O(m>0%Hg-+jfjBgW|mye zhk-@sb-c9m^Ljow&)$f7d!4M5@qf#sqVdDf&c&p=f z_35JSEwnha@iI^^54KL8lS0b7nAi77UYu2l-k;Bsq@q)r1C|2My?7qm#`~5{8t)-_ zLD+f?0onShW!YHY_Y>Zw;gVozj5WS@=cu;6ORX%~qRykxW_+JSFcdIQ#88elzGdxp zpjvnwnW;e20anYNF%h+~uH*ZJ&ECwQ{U{z@;ZrR&)TgiN2K(JMyrs0?3Nsi@^Y&av zodgi@^X0g+=)&Q;`cN#2^`io8QfLXJ=Q$t1I|bV^=!MCDo^)w184y6s8=ma9;}CvN zRk50H^NlQ8Yih1u7p*fL%Fr0NFH808{V9%N8eE%91dp%o)U_!isy2mRt(2}mLx@`} zBImCDwzk|uSVkI3!{&6xl?dC79 zIDzTI%e#xCYWEm4UlBNOemO5!AtxMwSy1=7!Y+>p@|Ym8!6G;iNqrto3Mb&19bAw? z2HV)}re&)yVJGED^y(~RzF`ITndqe2sxrIIQmu%+^MjhjLu zD#rbpPw$lPj$6omkzq>OI!f%b*ss`tuQ=LDgSDxR8{OHa_~{K$%JeL8nRqqH^T!2& zEqdVD%DORVcWFSZ`8sZ#yw77(`^(3UOF@f|;;(2r2v6CQMLzYpNB_dB0#xgk@jQ+P zG5{>aC<{>iL#`8M$=VV2Tbeft+cn%q`B2t(CXpFZ?kVGAQ5&u3z_A2D#<(&^+nFp&WbB#Ccxz2<6V%j*1+TR5? z-mCzP@6+6suF(fKUqMZ7YT|=m5BmN+#8(qc80_b?DJ}#mf$7|A&3??^1kwqNKHQA^@1U)yHUkCKul0QY?a^+=Kx2=EYxUo6Z7O&`!`tsuUASB%D0LFC-LHi8(A7RD*kKfrDYO$rYi17XvFXp$%oZbL& zb&R^&@A}I@)SsM7JRmL141{|%{{vwD69V}LjOqP;o17Dcy?>3=rJJ)0;0=U;AKy1-boD|dgjsm@N& zy`?UXIOiWkG?Z|#$iUn6&yLIOC+P*gp*{8wApQr%XACDk7tj93|1Qq{+TYc~LAro) zu~WqUKX_<-^k<|ZY4{h`8^12S{wR@d+JUi9n~)rUvuW(t2r8z@tkN_N7j|w${n2~n{*fN(W%n`)IQsd zL(7W7ATEVW0|$Hm_n?mCDX6b;LL_Sk&%gQ6D3&*Ftn50qin~2#lIM`q^6M4ve-A4- zD5b$-cZLuTtS)%@S7=u_OpBM3flkqzxmHpDRtuc*@BW~*F-~Q-K2b8fqV{bZY@+D3 zDtDtpsFl8|y7uE<;E|@eXSuuqjJw84H*{lU06bdnFzEfQh1s~BNN41lcI0D+%3Q0dBm%ZN%S(r)z-FXC%Pp-a*SST z1}6o&&ILDQvh;0?1~T6R+STMkr`^TCMXRl8(uQm<@N>hsN{pc}_)m(kgYdH&g=k|0 zKvtA3P8ifuL#{8vTFfRp!rj8<^!Q7MODtfjtgAc811KaSWmj}=uGx>@PZDE)qr;m> z`qO=nW$XtIt~l7p1%sYIAqcdzYikNt+dYZ`qVNpWny};D9mZx4NHGe#**`2@HtCX4 zYkg1MXRt%ZpR@eY7}kY9EVer0`wbiY(}iE1`oCVbn*#M%U_<~4Bm3xidKu;Qi3*c! zVjxC)heo+xXA;QgbYStvF%P?`Rs=y3Y&sg!w1B%r0<`p*6j*mNxu3SEVpz0(56v43!%nxNm0uY~&JaO4#*8!RY@0ZE)Vg~Hpg@LHYxq6dr+xmArEM5;Csz>BZxa- z-TKts8ra^%lp{+LtZNb%{F-jf@QxOLLc8PV7oP#Flh2e+?!bmWa$_(feii}GyT8`` znJ*S_)~059(pUB?3BuNfQSW!hrm7v}i&0!9U~98gWWBdlt#ij`qQVMqM>a_Wo0kfL z_0=n(pTS>XnI>!oAdNL|T+6Cww?Z{zQma;nC@|aio}&gTm;Xf_`;&K~eg|lB2}?wg zXAHqakjyR{ivz%1(jn$jcZ4&bD9cW3&FIpe z2R?b#bk+HMa@xKje;UwJZv{z~!fcFndsxjvJZ7XI$7R`f9}W3kRMhRUC^7$N~;hZ*VnVfqs0Dir{n!8A=fO=P6d zthh?dCgqlgGxR$7Kh~JPB+j4$#1v-j2}2$JTsNMz!+X@4{dkck;8GDOPXf24VkF9b zETh7z<;EG01FP$JnQbLmX`{9|)v+45CUjIEUegiIRwUY&nq&dxCjU zGKRHjB%n$hZ`x=yF%p8oO&VLWjUo5=aQXTV9iK-%QUq>a+GcLuTl`m={o)@2@?Tl| zD$5beSdEE6TjUIyQ^A0fB}G+j zHBL#bJS&uC3e>aBwT@A1WnO-Kt@^@~@m1lq7VrZ3F&p~{xH6Kf8*J((-bsU(ndBRo zssiC1N{%uVsayj$-VcW$BY=7xHl+2f-64Ndt712&l$8P$bV-dTl0VMGT(}^8`$3;q z1GsbXTKe4m_+}j(uW8WBlIge936hd?#>AX})i>%jwXS zIj6Z&boVo+NT)OkJgur ze*kxJdm-XaWb!|`Ds9jXs~ETFZOraT4R?ypg83QCuu>51NE}$;cn%KWNJz)>4bTvu z$s);(sInWZGLl&UDO^@_Ww?416_mNO&oY<8ax}6AuUP>5 zFDyu}U4^iXpLj$|JoNhW#Y<(pD=re#!&+`)U%xfwh#0-9Qx1HvyMn7NSWHJW^ZlNB6HNGmPavIbBhH!g}O^KH!|@)FOF3Nm-KKDYF|b@NJ{Dj z;GkZb_<4IYkF+x;OWpZ+e4ByCAIOLPi^)QSX$~l4MLGw~JAlXnuIBtqLwDV|!X_!9 zkmYv3{1s1Qn9OnHl24a8-`8hyf<*8iI>KF7z7A*4uE#*|cR)TgPz+4`0YJ8z{vZrST!!jxXd)iD~A4P`qbXb zcuA#$YiIMj5TsW=FQ}T3MP;Dv#O@objFGsH^-TBDK`w&D-30U8*0;`}O^wIPN*rlLKbFh#jb!O7vZ z=a#;YSWZsKIt|@l_Pwo0#IoEM6aAc_UQ)UDncwON1f7+>)7`W)qRpvCdrZ&Y76I&pFqtpVC&fJxIUk3LD*Y{6%p)8 zip+nZ0r%9n5b*-_N|86 zM{h;;f2|PFGyw>!#T$zRi4PSD-+{(zJR`2wR~dOD65GY{ z`A2W$Qf^){YHk_`Qu3*eDM?VR!OQiBW^hWPZB7nmqV6!NG^?}nmGHxdC0AmPswXRL zB(C0bI4*Qslddp9-b+LF8ez~u#qF(U(Ci^Dtuof@Iw9!}4nyI7!gu^Nb)tST&r2zU zWP%Y!YhOJtJ!`w|kyQ51ntyPI@cf#t<5Itch$nNh^um^FKAby8jV=7hX6140J6?hx z^iLDXx`Dj+$jI=3(8L}v&IRJw!zUaT%CuaSVy^+;AcOBIC|9I4o%V)EqKzYRgjY%- z@n1mxcyG`!rZ9}Q0NE%@l*J9(4x~C$1Ga8K*cyvknOXRa*h*F5{RG*yg{-?b2uEmh z0oMm6`=T>Y4A=~i0!D3mQ}ZqWkV6EYmrjp`Jv9eXpIalptRFRPtxR$gB=I#lI`?ZI zq?x#jHb~$l7rKqIVcD&Xv1Q2Rq~5K~kg1IU0vZkKX6QMtM~65%qXTM}tmgp#iI6m| z{ubW7?)Wup$u$u?=7Ce(>0I7iEq}b>JX(4+)}k?;cuPVJGvG|oJT6%5kLtqE9$HVU zDMhzmRSE#0lud>6E}E@jWC24w5SxesR#&COphcGoSe;KetPQI{>jPMe(;5K@~t(8_oq*JSH068 zl90ZAa4@y7>ek&|X;rZh=|x;1Qg4^5TfF0j`tBrnd`gW&rEi=Y$RTzt zGn*pWnrYz>RBRG=eW|qLQu$%+Kub8@I6+Wc6}Xg>*cf|V+Aa5Ey%wsUw5>D|X?5XZ zbp;H$-Fk}?kl&0P8+Npp`$UOr%#bcw3t{Jo5Rpkh8>vrW8tT_qH|9#JaQfgk2^WzF zC}_8jO!CIeWd6b+t9<9e`nkByNS1bB9FDgB)U#P~%`<@;kSdSr)zc>Y&&6%NCAIEO z>qZSi^fufPeqGJjBL41`cVhCoog-WNeeh#G1A@E)ArLCb8)&I60fo}=lfswU&*o!6 zJ0;#c%QcbX(`BhEo(c3!wF6RLcb@12Jb8J zP!k6*hngdVXj9{wPs*P&1a1@w; z^3onoF`8Dry>e9Gneiu6kKd666ZatBIr~xZ-7cryWip8{1`cFjrJhxWD#b2oUm2Nu z8n9Ldz^jXOExd0A??>|3R4$o}e2E*(QybqX2eVA^JU$o6__{+ESEWRz=Bo$Yp*-cl zu5HHySRX#1S1V&_QJVs;<{5|e73;e9i)tVSD}<4oy@qx3xM4y5y>@2!08VD-J{U#S}OBzE)KYvqjq6*$B!rY z55AfPGxdqS1UA@^AWHCjZ<{~1;l}agv?!(f!fac7W1DW`5q+wK*@st|dXQ;DVon9NE>EBnL{7D&Fz$8H-zW;VfQvsMVkK*CPGlaB91o zqCWbP$s&81>ypp%_t!0JIbgC5)$7wCP1okzt}!NWk7*&EeC*eoYy3zoIS>o%QqPgP z#Ba90zlJkg!?Q6?+RaqXA~f1fLR%}S@fELvg=~o*eUbn2)6xk))Ub>1;>7KG>`|6B zd-R*-Y8Te)0$q^1K~F9Uor)Zcr;&-)-bP`lrDdO5mykRBxMC3aYcHpB9#tg3($caA zd#8*Rg>fUSVrM=&7mE0)lbiK38`t~h<7f}TVYU0`M1K>mu-^*0(%C1-b!rFpEBe(= z{z3?>Do}4UC3kpD>dj?-_T{N6wFrB61^bheODtBp)$&8IJQ7g9MFPZre6Sy@_5SjhZmV%~FZhCnrYg-mzES;O$zaTag z(d>8qjE6E9F|b_pr>xqOw)Qb|9nYp0{Fvq8({1|EjJ#R&(Q_Q zOMUeY;1cdQ)Sh}GRr%a@(wq5kQNS)Nc(yJ0qew(m2k9=28eAQ(8kk7g4`-PQd8*{L*OVbow6OhcY1^VyvG zvPlLwt~@OksW%OH@|Z*gs3Ml1%BLAx+k^qlgvb!;xj<2(K)}!!11vk9TSHl9s=evJ z@vqc51u{Uc&PhETWc{Lw$7)Ukpx*1a(lKuK(Se*+{9)%kKSVI))kLYzci(7JoU7`et2+q-;!16x z8qtNV^Rgp-ohE#Zg6DEL;Q5k2pE0b4p32FO_kO(iQ$ZrW1`Fdp!g}oIyLI|sXk^MG zT=M25?zI>7@R6;XC~}isMLzb_#u(z*($F_N-dTUWCtRNZ-|G|F-%t4mLl(( z&Y7>4t*bo+_jugP9uffPnKr++$S+_N_JsZ`I?f7j(G<7I7S5mv*TG+a)VvnGII;8k zTZerTxA3JnZbxJLqUf8@lV<;yx89C97ebQw!y5Wt`N9xttA#j-TT757*Em!>Q6+sM z@(ZOeK0Pi=a^1%o{j;>I|1<5%D6IoGBbv~xq0d$eCy!>>9_lP7U%7RO~M^kLjU2)*R`%yOGS`nlHM4_dHKH zgGLZR#dhQx@5zBqq3fD4RNxrC^b8A;=BK9(qYtv*`Vl$z(iNDa*&ORi*Q-`q8=>Vo z0%^RqAl~pbVkXo4+0MX8I;CuN$=Z|o^bA;*yc!-`wgIT(42Ma&h?3`O0LSB?SMcP+ zdm|*!Tz|fw1T36KYZHa9ifzPAu5E*~`mucmll`$U)}urcET7lc?%JITFsDp>ei;Z- zg&z4ujoD398rAR|zkU^*A;$9K@f^6j-}LYz9{U73Vh*>WzEV}!=WkA_vpk<%LJgxn zo~7L>l_V_SfgW7Hi(90-xEZJ)&Kf#r12KLB{3S zDHB@vk_aH+d-_JVN8jsW761C75GMX$@R5zlxLG-^&ppOaO_!r6O1B;4gic0_^Ld1N z8z^~dkB;ymR}anu@w@F3Kw6E$VEr>^jE(WoXW-+c8UZyD3tG-b^Ok_yFv3DykxIb1 z9l8`?coI8#`78p$&T4h(cj4QkRtw3Feq7rF8S?tsm!3S2*~VOT0{{*?5s%{Gi21BZ0tEY41GPmx9&FGPv)aoo5}el{4zg%vplD7wGv2}(@@Lt=zZ8<8rK4gZqsEv`drx=k9L53PJVs}Vmhz*9YT=0mmRnv(95U2 z*Iny#r|cq{4QWN+d}px1CkkvF!kCht6rwF6pU!Pv!jDPZz?c2(X>fh*VTNp@_3Ze?8 z!m_Ioh^@u0i!FBWXdWOcLf`d#=lZ4h$Bh@8yq{*Mq@Dq!y=f6JEku;Qts$K#aSC8H zo#-?z6QBD;c}^dUw)It#O4$xN%Z`C9-Zm(swvCqugGdA~q&}nX^m=Vdw-^0D_f@DI@Og$jIe?v(N-|WvS_KIP2v?4fFCpZ$Zx$8 z=85ds9$SCu=UnvEXgbt20ug@L!ZFa@-?TG#xDdy(;gE(@DspUS=JKR65WkJF+ zgw&{&N_{hSpxLOmIS}X5!olPk6wslE%#%nxkC2m+b^^I4yr;TzZ-Se;HeTZW1vK`0 zgusi7o4(PH#K11fa9@pOc~PCn4OBCbY4PQ8;d22gUv6K#uwD^VyMx&@<#0bBC#Jztbs8nNyc2)=53he=S)(66>4OwIe7Pl&J*3sEOx1<8l^| zQV9n2)aePDZD;13#OzX<{K#CZB5O)ws^TEj&@T(0Zs9O$7E<4|v8V#obc#giXOQMc zV^4o{koA9&#m2T-V?6?fhw%KR@+sabiSJJ?LIQ)`#@d=TPfSnV3iIF!o!qSXIM`}V zk9R-e)@1G6NWn~;FDGWFeU}^y@Q$<6;FDQEZJ-BCpUlR+ zG`PiCyxG$`_1M`i_UCzIZK@r4zFtaO908@Db!W~!_&K6U&0KZP6w$I= zsPY-_isv~D9?XEtNX4IYmC{G?D`|qcNL97|8du&D4l1)^EQ1U-=uj?%6TbiplaIs_JM9!T7Q9+Qk6tk6njwAP?p0a3s8Rm8^HUH z(yv)?4_ER3%6PzA&LjOQxQf2^6B3M^lI39_fxN~ZezF--O0}fPZb~^H$pZSpKGTP) z-;hMA3l7W9()1_uh{zkRjd;*)1XAunx0}po#Lhx6@C&jZH7t#DzIZjd!f)LlR8Edv zP1Xom2wVt$Z=b49V_GY$yfk?kJlu5{uT6x-~ zZBJ7wkIz86v6YEgPqh`}dRmSFB6nWsUD0awjr)(E&UYe5R6^QAX`YP_)2|f*$8r2g zK|yfmXIGR8MwO;Z0QJn-Mu5M&E&VwFyE>WHpO{6C zS+Cu&?3U~~Y0wm61w^&!b*6vc=PkZ{w0?O9o)M!ycSCtJe!kX6E04KM!F z34O)aQ+c49gvjRxeYNa+=;Fqu_;!73)yLaDZ@zyM9(9{`hhgBj)iDM#^}Byzo}(Ex zmF-3@WAlJpoCe61>|W*SKUtEeYzMk-bJyAO>>Q!kN)id!yZWX-b`b8P+ZGfxP2;BW zq56h52Iah80ha)$h@#ZfxN)DtJ~e0(iVH>6{z)gn~a#%crd*Jc!}q$1u{Hb5z-&ac(4b?PjV|#V5g~!E%Tr)!8JqrC6LAVkzleoU3Nd(t z#)GTduLEjEE4kg%UV0qQH*9YPCN)f|18K2jq?bl@|649v^^+CGrNKiX%YHAewG+*~ zC3G+rxC4I~(t3Z%mW@^hkO$|+Pm$g5@e%f#4-x%Y zHZv9(VzT~&Uv#p~_>IzvSMT5S*P5ABJ--NDptNTek=UE6cG8!z9zPUliPIRLbyd0) z2k60KQ^)UD7ZW@&o~^5IjOx)8NKAvet3U6QWVF&uEuwAL4{tb}CX^;Bt=(>8bezB* z?6T(?7zi6k76+EffWQwJ`0TXHB#x&mr71>iNqHha$e>X;*)fcbg6ya)zF~l+U@FQaC%9zYjIu!G<6BfE6 zN%K;r2w$&u*&bZl+(o9Bt&Ubnf50SO+7oCSykB)B@z_46#Y=r>^>U~Q zMiO>b@Y*aJs1&>jg*+{3O>Uj%(Uu~_1j7wU^xZh4{@Dyj1g6d@S+E%U1bc%cS;1P+(vYZbwD+4Rm^#{Pn1IVQ`$G(8&_;kbJZ&zoT^t*o;qA0ByyO&^$}D?`B@^5 zr7W%!yFRvzusi{N+k#PXyYHwXM74-rc5gYfV;=;ALO6{(9cB_YbGTLpp9|P=YxToF z6)Q{@;}PT*BQr~ml#z-#=))$tEgdHo*$+k(GBO*)t6$O!12ShYZR;#xkKNAmbUQ7T zB%hbyFjo(sZ42tamgXTcpY|_`L+%|P?j(NIJ3Y3NB-&G7k>-n3U$?sWw6aA+Si|#m zcN{NOnj808#s2Nq0cyDel#BOb&HApZR~f(Ka~$5_A2Uq>7H`E|SiZe4V*rzB_xj13 z2Pg!~8^ETW1Mv{)6f+y=9viOd=#kBBH>0~5_Y*PxC!^_ljXS*#j4c82H)D%J$v<{e zN!(sK6t%NNIGFr)sdx*xpCh_p|DgQoq6o?Q)pt1c*mer9h65ql7Ru*Qw<)+a%3q!b zx*Tt*8aj-Z#&Wzy^?}ZTFw3UQpWRA-cxQQRqFhN2e$lv(>-ts^5RvLr-je8!gpaI9m%~|BM-3m(SFR~j%-+YlN zqd8V$CVWnMmuG18?yp~x--rAEI^;e;ivkChxOcI=oAfmYG#HFNlHHktJEgqKxcasQo<^LvVOJ&UxK9zDVT&}!m;{7``xSI)B03k?ZV zj6Zy6zy62%fV0~Cpn}fuOlIg`ANM~yDsP%I@9~eBCLh=S6_fwt7ydU7`1cd}Z~xuF z*{*TMBII~dr?O^5Yw zeKU{r+a&H(mo9A*@E=;eh;{o%C-ge{Vw@XglbNg6V%?$rse||~ysMX&+?{IhE#BWq zs=uB4%}J0>KYHNWCnANLPX5W!lT6?9w#2oc;Fc!*MdbZy)Osv}X`Qq+ztAf2-~UwZ z0>1aa$&1AE+5Qx~|9ig?NhK(YuqS(NBL9g3z|AD+i2Kq4V*dd(_@?(dcwe6)SQ`J) zLuxbxx$M>MlFq_k7{&cz%qIdN0iHh8^#ZqllE|3AwH|lh`T z;ODVD36R$_0sNFbyvS8aix#IWy8oWA1?(@Zaao4}PW zz@1hhRH{2tU<&V2C2<<|UggBbUFsitJC zYI6WS1$DeD*SCV}-mw?pr<cx!&6Y|o0)I*ty{;?SCvcEkfftcGPl{Qs_+47A2 zE!pRJ%AWIM$jN}PsVj}mLv;RvD=Pj7;Z{IAd;>z=~&90C{h-Nb;dMNg_&q2I)AJsyGrnd;n3yFylq zLr3q+7(fUW_u4lj*Q~a;^&6-|QQoCd3?-J1D>eh-Yk>=KPQwf)dXq<(U=t%hp6bD^ zBZ3*;LhLqCY9!2pg5VdWOXUt$`eS-DmY}+fE4Rw!l>2#jKZ4x3-4{j;;sPb{ho?dt zsN5HF^LJ8z`729+V)r8jRZzq4?*KPcteo4v8k*S-DCW;CGJ=})dfi=JMdOYR0e6Zv zer;tWA$BlBp@G391*obOTB|&}6#60~E?ioXA=qi7Mycvs$S9CH^O|%T+t=VOeFg?2 zYs{hL(7QG9xQBq^XAxuxT+SJJWArS08&59IN}b z+J^pT5Dd{%TRNg*Kr--x?;UE*Oy9E=dY)WG;_l+`kcnvqRy(v~*B*;vHGfDS?WI4n zg}113u!ewBNR%Q>ciuV_0fL|%_!T24Uhl7R9u`zyzN2))nY=egyGDJe zzQmOk+i$dBlNg>PTE`{?El_~KAC4pn?55dafSp1R4A9}wEa%zsAY+FQWA)9c`ER=} zR=ep0+_q0#59e@z#eoO8bX;e(=$Ezf0L0;oo(|6|*dIny*_{!Sh;0Bojo_O!n6z^f zhMzX?;is;Yz%=*TbLV{d4+o^1r=h7&D)<7%jjqCRTe{aTzhKa)BnK8Yx|PHrQ#dSV zP)TaIl_56x7nit=3a#(bZMuGu?ERUE1hVgXFQ1D8vc8c{iKjcqsMEF`JLFPYJn*M- z5sq`(R{{Cl=>nGsC0c;~_@6uy&hE4wK!xPNQalpUF^KpXTa7b5;fpd zKow`NF6X!}zp;gOajy!N_Un}Ql*2>dJdZ(lOlV%+VKWQkYI&m%agQ^{(K@$<=;N|3 z7iX^1<)Vgor}x!PR|Hhu z&X@{-ISot}3oeHD|8yA?3W8K zGMmAl=W&{9cw_Mo+i4k*gm!z&3Wr}=u;e>DvG^W>hY??m?yz_Y z&5&gMdt2#P*00ZtcUT5l0i9Mbj84ORIh0o6bKE5VE}m+NqvU9bNq~{bWafKTW0{^T zc+G3DITDK|EduWEuRc<}Bq`BIz0HEo>J9Dv*k5Wb}^siIH_HE z+(9437MLoDCkAX4AT`3&3TR`#*3jL3vo3FaB_C)K_9tlI*UiK<6T9(HSM1z<0y@XM6t>Tzx#TgQ_d}b2CE4C zfEXf~^*5K_Y@AMepi*={zS2ZheyOXnsE!>jm4pq{VwQdxy9Aa7Ulh zr}}4dzG;A-NaHSgT3`ZZ*6h=x#4pXH6+8Fi>R=}q9Nu1;9<8#dD>j`R-f#)1)yjz) zO0nsf->yeHL31#lC?^G%U#P53^B9RS!qu2WI#>h~*b0ifqJ5my$@sOHPoG#lhr8%P zu^&FaY_o6lv>nIFP!2uU{0VXcF`nngW%p)eGmu$l*tAZ^+BKZ-WsT8{$oEUzi-XcT=Z`gvTRP*PX5?P*jO31_K-0FUQ@R z%K5Jg!1A3UJvI_3_Ud-s#>=0p-NGt9Pvzd?lC$WAU1PM%A*fKcgWND6w+lfgWz!;BxRHa9Ol*kFbx#9^s?4bkMS6ifLu zx7PbHIQ3cGdC$%_@89U9NN*68_GB2&KfnyL6yGyem~CK)jS)ryom`O&c>YI~?ilK% z&i&OQ`TaG1K;dG~AN7l|g)8w68wFFnYx_KRW7>h=jc@19<;m14fP)?Wa+XeRhNCAw za2Jiwe~~5S>F&{QF5P9%VgIu3ks$l6i<{3QxipS;*EXg?;FbEwXeP5la_O+nEUlvR zz|d0|VqFfcTO_<_`_aFVG(}04Q0F@KXD!>$>T!`0z;pY&q>1@0p8yLAqIGk9P!%=@ z;8JjHw3o#^-bxzGh1Kh>Z>{ZU3_Dn0w**|-;mz?!uC(7Znsbnu0qcAhrE5h5h9}`S ztb1n2+^Mc>WSeZp(q_PRN4B&*`>gpaW>UNK0G_(}Wh;>1OtkNNMC9ppednq^K8tbr z9eaMa2um(T!38QqqtWUNR-=hWo%iw1e(_gOWmRS`Rd=*(Lan%sRuy`2N8c;5*KK(r zp28s4+16M8#?oYwXhc0hS8Ae)LQG<$BdZP;#r)-%y4QmJdYAq3hpDnw8vDxNjswn{ zkB`<)+h(Z;s^1FL9{u?2Qdkeza6pG*g>vmjOa*Z?8Vu_yZ{_<-=tg5rgL&1rDaSj3 zQ(=Q{j^*rpmljEEl{JeG7f@_pzAL3vuLM9vPM_DM1HWCQaH%ZbuAy>C(x2-KNt1He zZksJMUo&=ir2QnVtvdHaH;2`1sy|DTWpuVRP6L$k`h4}A9HreP2&M`**e)+n7@xgf zl(+tp)6984(m;3MLrEz-!+FrcofE4vTY4Tw#bd~c8n+0l6S$SPlZ5O1X1>*2E^~dP zqC=mzf6KVQZ0AzmqCLPM`dfZ-eLa}{1d`(c=D-q7SLb3FG=Gj5yjd>D-l|XfT90>U z^$v)PaSC{mt2i)PeZ|pSCNFoP z_CX5dp=7%;O0~lmXMtRSWsw1@U15hR?qmCOhsuzFbOiY!4}urm*G<6;C+Vgm`C_g- z4*g5)pq79@+J8Qs6?lI0HO6K+0x>k0LE&~%O-b7Zyj6LuI@s;In)>U#&53Po^IpyS zhtg?C)p9xxmZpjCRnK;0!3$E^vFs*_jeKBs*?8Akb(VEM67$z;h|=;p?r*hEC+#30 zbjN&UERel0ScfU*c?mBc=XJ#$>IDE(8JO+m>8&5$d)J_w(+(BMtoIp5%Us~qn1bi6s(>8YW_ zTP_7KT5QV8-|VXBL>WE$beh{fYm;?KEp?$X=X~aHdn9A3j;)tN?l7qKyFLQ2zc53Q zdv=xv-d7l>Q>|d@wik@{GUH5HG#d1y`_>dQGzT=bV!2DhRoSwt;N2FJa7m*}J+mXg zy6A($lWL+OqAK|qI>GL3V3%RxgQXZ!lPMWr0>^gf`+CH`r_Hxb{q-O>)%IhB4g6{M6r{EldlU zArl^LO{mN_M4N5CU~NrTj6EuCuY&!6DlMMB4z;>sp=14J2wFENRTfsZXUw0$UrI-RP8Vm6zF){L|rf<*lY4eYpk+r zEMRUghMh3e7Wpp)5b}Pbl~2@P>~k3bdJ`<2=u|5P=)e8UKM6=Mi#@)$X^r4Xt~8z# z*R0CLwG?-ek*YZwhbkWU2Lx3*S8EO(r-2JsHkl_p$cxW zhV3s>Y>s~K0Si`FX6}@}Q;%E>M9Dy@jf9a*T`tT69lyTlv|f-}tKgFgN3Y}rQ23yb zA11Ixa+g``NAnHdIM4!QmlqZfl<2h}bHOb_e8mZf>V zG9?6?EOf__0+2XX5g{N-FL45P=) zgw>n_oURU-v9ssr+5z9xQFGv0GPqiV=F}N{9~iw*;W8VPjpMWHegVctjSN+^Ec#vS z(k8v^lg(dUs!U~qEzb1-Fg}IaFx~plZ*|g5gNqaAc|OS`sE=Nz->pr|SfFNC-6gi; zFu&gR&8~;{%B)LIa5OaRc{ILRb9#8x(K8F4d(jNQnOI`*l>sAKJdW#_6E^H$Jeu%% zrR^&n_dXf*zko@74&WY$*Po7FsMv7W{E88)u$DCy9J0+L+}_qWkYi$yp;%J5miB9e z%TuNE@N;o;6h0ka&ZCak*MTlhvV8{gzM!8U-eB->j_OE3#m>Eacr;YyJ}2OB-ayp5 zoubNKq`&CuzBb?`+Y`+)V7}0>&MRqq$E>%)JWGi#m|QF_AGQ9;=_IbyZEMsuZ^P!w zw53VYNhPS?0Hpg<9SL*j0R%gC?kqCs!HpnP;gy3mr%L?lNFa}*F4)ZZO zn-#Cl_q`6Exrl?#KFduG(27^I?eN%)U0*`UE@Gyg6b$o^d9*{T+wKVmX1y_M%ahj_ zcxuV71v8Y=N~^a-q6CBt1HNBi0ZLEnyiXGM`5$VCUji1O8H)sp`R94K99TtjtJ#Z! z>*^x{aOgs$V)EC)2ui4gw9*|SAT1#X2pDvC zO35f9N{2L(A|;Kq1=0G@9R3R zGnPuH3%jeR2u`&nP!6r3d(EeJ-6ltexl!^4r7K?XZd4sVP7dLpRnb#RI(WDu7l{@K zhVaK1kR}HNGUN@nv#+DwZx7_VWeRMW8mfK)&Tv0{C+B%hcGJz{_t%{B_L$dq#bO3W zrgkLa*=*Nuk#ag`Of~(ets=b|We~q=^y_&A`k4ppcC2RX07}=D9wv8llMysG)FaSX1b23ixL}lBD?j`y2btc*644c0#czK_!1zlB)`Lgu* zNmjq}rSTsrKoqjHjY-|!n21Xedi>?8i(s+Wt!OXMut?U-Bq_vfrvaeKu=F5YJ_Wt~ zRy)tH@g=+U`fd7qZ#~+H(dqMYQ$LA}MG+4NUCgc=<5gKISN5Sc8 zrco3Cnf5(ay9{W$i^X1k9P7-F!UC9hE2iiW>mrjbH)FORhI~?u>xuOm{R#t=*9@P( zjJsBwUz)dWNTw=R^G+Pwal9egeA?5E$LLyt`$E;OQtr)L8nPgq92EGV_o+C5FxInB zjQ{7AC^ck{vv9A{ZO0v>u~&*((k<_B$#y0n)9gh!vHB-Fw$|8UsiL=IO@*CVM_Hr% zmL*77?uF!&vhn*S(D$KaiFmHMKc(a8!5?a+Tbc}Z`U@qPEeVuhILlh8O(Sg^TJjrf zGpw>B2q;{UYLCsO@Oxqx+4m(prdQ(9|2&cE14v%|Q*;Z4ZHF#Y~gl>R9vM9!ifXxF1fVuZ>|WS%aK$?gHn_g;bf zys&-#Xnj7~`zay5J4N#&CDKfh4V2)y{4W=P-SO^IAvl~vJ z0qdk&5;45&sZzAf^Y6}KUS;d|D6%sHe80RY$oWcicM{T!Ix@+ZbnzSx5vDPF;pm=6 zc8y5%X?T8w#${Wx2_37d4=1yAFoYyRqhSrIaS_QbWcChxi-*xTj;l1|Mr629>k|IqUsW`pNS`nf|`CCK8ETYNNOvwk-Wgm(v z#C#pbPOzcspP705ydYhSZSA+M6x#zRiwvNw%T^48?z>MzSd<5|7Q3P>>KdkkKNA=) zlwZI2vve^3T$B(vq*tew8#sWTyobtRk`%_0%j|*JcJ+K7jv?xV^0H2#=(|0YzM}`h zB;%RmjS=*uZK>?58^_NHo&Jek% zqSsdF_;VP%NJJr8N4Bm~?3uoB0N8W?+=S0uTm=-Hu|qylYRcU6hueY%;`8x8+NH+XeA1DR1?` zcjp92w!Z~e`F-ZK*&aqa!5%vn7q&SZ&C5hpiG3X_(~)Rr;7oJ*##dtA0mlv84SR4E z-0jzGHsVc>xQlriZ;gL@@BxNSUS#!Dze2k~f{0&??fC9a8qWg{cd;9dKiHH{I2|4RE)w;A4(%;VNCDPNVOMD@P@TBT3ty1v;V)9k z4fge+J0hEFQ+lY+f~ZD(uG|=vhcO;5S-RJ-1diR#xAx21Qth178mIgD7->#b})5!%y^X5f4z2MVPlz|MDB&{gb1Mw-?1R&Ucz{E!B zd_{O$Y^`I{P0_|i-&?ozn%eh$a- z_QY9-q*8sH$(|jt)cX0E&~!DL;XO8s@};_ZF6DDr0&X7+55$C3M~nB)p=1v=12oPI z{t`pU>czLQoWSmpT75}($?}Icjn$8vv3qA+j8W+4I5= zzn==`oq*V-Ak5B@FCZQ)tgJJo)?`R@J^lBtAs=G~X5*Xdr*2dqAL$y&MjY@jJbY6MYj zLTDi+jl%AeLf4uuxdcMLli3`G9XGo&QHIafaTVdVh`l#EV{@QwMbdaJ7jAo92Zb`C z@3_#h9K@L31bH}!lY@SA@RU)@&;vI6ZJc&4$Rq;Z^5~n<9EGqe`xLeXI*~*E!o+cf z%EEi5ZN0o$N7Ix9St*+%X!vS{6P6xeV%c1p1lXBgs|51hj$_bHGbYIM+vRy|X;G~1 zHY>v5aA)WpUHp|sO`|y>+v)G}#;V!02V&Q+J6N;~P#{L=t(5c^xcWv5yNhyIO8B}J z_Y}LyCDHy?0>ah{kZc+*OUiE8X|Vx7oYV4LK!Y&feQ=BmqH|zjR~3Fd<$aA=j*L45 zOoJnx<0&IKZ{^_`)ke`3$(|=U6P;KOA-ma0jR9YA+LNV;&kT*5>f}9SR$s|E$*u>| zQ#NDUZ_1?ha53fSH(pmn_kOrPXet%8HW1($8;6Zc5V%NmL+IADH{Xt>1N+lMu~CYT z_Bn8##g{G9u>{Z}4&{{*+zu&#Pcof|8)Qv#+hOi=W5g1$zY<#~ytjR&)P6qjqnLM} z%Y`|eBKuJMaXVD7(N@Bd<Uno{&Ni3V)kJAo4n93z?D=~9%`7Zw~8tzD+ zV4s`(l{}9{C}dx^_$_B5ph;Ocf2Emrp`#~sA7meFr-I6YSvZb9tj@1RrnLM`Iy9P1 z{RiNT&^1lU=()3p9WVM6FHVC#q+vCKLC4MMrgR94i3ez^E&~sBvB9LlH0%QBTe?im zf@^RVr`!|gUFmti?r#I&UF^oUu`#%>uI=c!THQiJJ`#V}LB%^u0wcgDHjl>})af-V zf*Kjl4BixVkFv%)GD*31pKZbLlxNmNh(+b@oHa4D4CIGm*`34 z#7^zsOl5L2=i1)8m3x?=fw~W2kLFuNrW$lGJ?~|5nClW7Zy>gaw2m zRO=WCBYotmv<%`t0io{NpCh>}=+lMOO?p)`?_Q2HlJuRoGJYW~cPCA}J-I1Tu%`d$ z%=`~%!>L7|;G`(HXWacDt&wkR5>~JW{rfhqP7m>6m7_!Whs4+q>Rk#?O@bk-)OdTP zH(OoS#0a(gtL*AyV?xD|Ezm@UpUHbPb*i~rJ1F9O;@ybw(iLd&_enU5o7L2 zmB+4>ym&h)Py_Z2T!fe1YlE#)3k>}9!PTD5OK9iM9#o;6YP#?f2%)8loYh4TR*eT$sydZ#{sQw3g2156fcH_e1coTSSwT2tF8H>ORbmN!kbKQi$( zxhhX*6g`}}3H)r75q{s+Q+7sdMOny)TFOJAO^@TAi;=Y1C{e{`Bt8!6&;)l=#E7pD7@qnxb0SR4#ds@l$rM>0Xb^~{sd!D z!Z^|f{7S(Uv{j!}lo)0o~V)>!}#4^f1;n7s?bbPAVs$(Ce3T7-{=olX*k> zZU1roSrIHU#Bv<8F=c>Uv4QBgGUDdr3BRd(=_Lgwy;m%SzbR6_uXsFlLn@jlXU>5k z)qP9uU&(cH6*g72O%Y9kHq!AQ%*h-T%T>v@4w>(0cVj-z;w?Ty_9Gl%uL2s}DDeLWJ1_Q710~0D6{HgSuZeZxS9%|a&-@{G~=1ck;|Efw9_B@@!3@#|7-g?l- z@y&3R&Lm0Qx2rJIrHDnTz?$aYxKIC$754A7_w%g7pe`?o<7>_VhgqcWEbZI$kRWKW z8qYPVX_||oamv-Te<`&7-e@_@p4YuXpsjLVa_QR@!oEUwgC^-j+QzY}k6wjleiK_i zpn&*3798uXFahz#EHn4GV~8;QHqE_UG0nW7@s41#D={-U7|FG;m^?j1!RmLn@<1{f zOU$Sy<*RB|{`kdNmaA8u5iA;)0w+tiDXbkgjyLACUY2$B;tS+>K2-_;A-<*1>a*Jx zCkDjnrX^Kut>ffRmCaXd&9=aOEtSc)I+v?ztBI&v@qa7cp3rNpIyqtVqp6BXovpS* zis+NVISwedsK(mziQU7F9EpvkQ|uqSBh2(_qty|U z$Bm_DDFk+eWH6h0c|o{!fEs&q^|UJ?sx1#A$>v(k)OQaYFQ?k*IW$9{5XbxA^+XBa zI;4D%T~WkL`Nf3byG5o0=v7g>C@fbsYxRgaOCOtCFE8#DxJ1Hqaa=+@aWI%pfi_iPPd{&mXXXT1 ziyD}^3^N6J)=2*pQ&VMBW#TT(_LfV&b7a#k@x|RvkqKAjIIkVoKf3F!V-P(rDBjq+ z`lVKd2SOaPT^P&5+gun@CwJ~OUF>Nr-x61N*5n%N!e^jGo+v5=Wxy11zMxSZ$5fw% zM;`uiB*UF=JIY|~KH6g7U4A&Kz~Td4`!KL_j_o5Ve?(>4-;F?I1@-Wbd}r;8#H|=X zO;sd9@*sl76oP2B}YYn_^Fq>IwcvD_i)L1kx>if}R3A`h6cma)`?qNr_y`7i%4OTK6 zO?0-KeD>(+)oV%w*ZOkog4-Jz_wkO42(7=Ww&^uZX}6r7#{;*hN`c3>&(J=uN}u(DcZxFsO$r1MJWIQUqqtNAZP)SlcqPK>OP&*k~>CbEd z--9^H`92!=oz?ISKL_A6eqPz&Qn{+8TH-KshIUe-X3C*pgjP00)XRG#>zuwirHY{W z?5%85ktV92g*Lz15>#4Ggcg1z*Et?}y`lfwnk#>;-Hz5bB<~)a(;M$i=zjW*dGb(0 zspJvt3jG8W*xz*TEe_=4G8^8mhH3~K7jLS)JY6O0Dp{47PsCx!W4Wfd4cQvG!4ff7 zT5Y2?J@q=LIrq(}GAOTZuAqlcNe{noCv>GLshFfGf)Q~#U)i-A-j%_~Ol||3_PK|)w zv(qHrKvX$YK8{y2MLue{Pp>es+Z~u9+iusav*Q)MX9H$w#`oSKRbX>6(Y{BEepS?| zb)*A)a?OlTg$1SjvRdqm4YB+RDRObc7>5GKoP@SwMQo?3uOy!`N=-MUjXG0ej0l^B z8(wf6RKln5aQfXV;nfm;%O&6k!;X4zSuEQb^bML!c zS6X+=ppM)=reU~Y3g*Yk|MwH{iC?U*Jte}hDy9u|45DJXeS9S-A zLXxOgiPF29R@t*L4{+OlQHH}UaFgBhUvt`W>+wfZ4jJWwgfc6xmbpS_OQc4gA=uY2 z#aD3P(FJdoSYA!`xqGabV-f6n9Lsfs&2h~Gvqcqvjj;WBomscGivbJDAgV>=CD z&EpmDU$DVjxY%E$33Av6slMHJuUZ}uL;_}{*`?^5-)pJKP} zPF^2KIWKhe9B25B^>cul2?JDz;g0E7;~*g)dr9XSD>SSr6GW}>&8At1j!-yyon`j4 zg;Obx8=svt2X@1VzaTBb4@WLdx$;nLy1ngX|Lutn_8hlpfGf*gK<>^s_3BL};d!{C zqF1xE%Nc4Eq65;^_PA=oE`L=<%xU&DxiE3aPF1M?VB`LLR~~w1FaTDtZaWTQ5g!5rW9IDv_-8Mg&8~N^74xA0A0J^SWfY!?ew>cm&K(x*GHQ(T zcoB_4>~_i;9c?V$8sc7r+$RaCu-L83W$)FF_S<>FL=3&zl!N`tmhKN4zAsdWoTjDr zq1x%94D{Y{02c*K@bl8^+)1Z%l@kVjsF$9 zBI_ljS@`k~*7kpVtN--pJWf@87XKS7^!v}MGCK>?K&~ITM*m*#{$U~fN564&XqfDe ze(x`>&JY)jm256p^>7>j6Mu6{;w)Ga{H2xqeFgseuPiSh`s!E3reuF_AOGCohaHP? z|I+vVqZgtW;j#Bi+~a!n-#_2~@?m2M-22;a_x?KeOTa5`euX9Kr%K`PtqQ!4S%2?u z{%d{A6A_xt>2SGU;n;t=T%Z>3{?kA3%jGRnq^drG2wIoTM(?-(&0V_x*A(d&w%O?u ztZlAaYl39qTYI(qKlpCf#bAvC&y!TyOtmGywzA6!|0kaib$ws047h}3ai{4agQKiI&}uVb+-z}@wTn}qxyJiwzzNgXqyJWdE~)BckK?8|HT zLZH&giSqUPCmPppRf+pcBlrKq(gb))&ch-vME&yh{F6<8>d|3c#0R9>oQ3b79A;G5 z{)5=ML&MzvCffUht?|ceM^%mU4e`i+IdISRpHu)MxD@4h->N=a9R80l%AYL9zb&Q6 zOZ)&&uyHx4cf0-M*F5PlL0=O4V3p67|Eu)(45y1h;7^?&fC;5AsG zfS*)}|JN4bKVJ*iX<>z-#+@Bcc=Qjx6l~r}LHJ2-L{JofJ229ZsH7>?eqQ`>#8CY2 zn8Lu9@(|uXct++2kE*#XkIQYKTP|&k_$x1UL(meq)loN&_{%&y!72?|XrBrQRV;Tl z?Rae`-$(PgYGgrEWYnG#IuYxt8brOJ6F=})?2sq_)dgT9B4Cp4Od%_^V#051*6XL7 zIBmxH9dR!n`?)n9O2;dQqq_{Yx!PU9n0Ktv)Z$CWn@N&i?wsd#hmVsmhn-Fo;_SZj z=AL?*dZI=~N%bf37xmDBQiJ3s_dc2l0D7y85+N4c{h#MqO0#eAa(d8(@40*L%Lq27 zIu2e3bhG80a;iKH!Ay7XLvhq`wFxatNDv_v)Q@h^d4HQlr{aeebF%=^)R ztllp1`@aTX)lA1Ue3Kw?m#~Peirx52%UjBpW(uIVdA|Ae5%BysD8>Rs67YY%pT809 z+rV3LKh?5?jX2ekmY$(eUd<8&6lZC$!lN~66JSAI-0q4ja4D6fg-^*S)m}{yv$j?@ z#;BI`ATcF@-LEtpD35i^&BAnB7)ne1Lq#*b=bRLr?dKm_ERpFR2D)DuZKFnHdRK$& z@rlNhY@RM7)|{O)Al)7V-f$k9S!ZYnJ!Lt!{L8$9b9YFHcs(0Sz-f6r>=O2tWZxa4 zDk8HG+u#24oBsO)6}<|D*3du#_^uk|3;!p$PF)1*r}%AP#hxv`=(UrG#NyhZ1> zr#9azAS<}ZZuh-D_v`V|TDrLR+>IuA=Q8znp1qRq!K0D6wN;q;HRi#@(pY_4z7wPW z3BjGMHSqU8&a;(sMgR(ZoMxBC(4UeuS%k|)*On4GCPOk9TEDN&wYmMP!{<2-{FpC6 z+(Gp}zx>I4OEZ6zW|nhfuY7ehyd}Y~q1R2fK(6}DXJ|ueC}fzyzC8#aC+d5e%sjR{D;UM@IDGs- zaNeDIhZ>uBR5J>Ru_;1kksM9)ldP2&!x`1Y_V`adTwS*Ga6{A_con=?A;4H^cXNhS zD)7hYdY%GZw=q2%v)z+!YyG^uuABL3g(yvTvjUW`d@M&NqCpmjy!^B>MK#V~bOD3X z#0)A*E#5k|y!$lCg>UG?`rON72OBU!imC+L5l-p{u|9s=wki<5$SoB-ZsmmjU@%ro z7rU?_`#?uRg>9i9ZSXEOQD}bKI3`Z$g3!_}>luOe*9N~8TaEOle%YTR{ONmWi8{It z9+Nq@!9dh&+w>K4yHwMvMRE2dPxjI3!Vj$5m42VCF>hx0^yTQ3_YiA6Bx(W%bL{#o zV$Vph{Yu+hbs9HQDqidTJZslzvvY1!F^pW8yB=D%@Z&h^dWR(j0}&Xtc~k9MrVemB znp_-jPGNcQ2zy6FcAqzFbsp^W5e% z?hMuJ!j5JEyHHN-xoz@du~ZhAc`_(#rL1@(;WepKRUgF=D}8-_WgyBvnx;-|>IS)( zL1WnGw0Fu|L}_rEBm4H!LNMp^{72#D30UeCVaUn3iJ787B*KyfTOU!0p9&O<HWdlJFi^lBfM zMF6mgrGY~^Hd?Ek74|0mgY|g8xyGnlC8=>I)g}hI#|dVVTx*(gof|d$3;7GpmX2O) z6|J};;b(t%RDewL!dQK1Anxk)YgRS;0hFqTV#SlEF^7=gRhc(WHs+=+v(>V-LyN4( z?CQJ;NUwfCHR%<~xT?snly~S&kK&KlC-qqF6k84_yxg>XY4|=iTYRqWuBYo}tWiJ^ zh7FkG5!Z}f(8Hdb=UQ6>BMrUcu{ScE43sjUxt(R0ume3SV!9ePAISAiC8vxdLr0Cm zZT*1(rRu>6RIGfsmAERgxj{L#(eiMtYs_9vD8uUsa-9g)D@0W`rEzIwWr7Ci$E4g3 z67c&Mg(L|^H*OwBh7wwyowCP5-)&Y(@G&Y+_Ketup4Kz`cYN|d0yRxKgDHJz2)XPPA`*C-PaFZu~L$nQF?BRF?b3bNG zv3^4iHYbOpq?hAtvTF@X?n6C8OMiV5WN@K1hUwf{2%me^tv8$cCgSckTI8!B&3ngM zwyCyE$)qNuhRM@Q`i0!lg=owm{fb78mjK6lvRvuO6jiUAE0>zp*2v5(5&PbC`O~SX zpjVTAM&c^z-uJ4EQW_HA&7PRgI_qRRQ-C%h>}40z5%ky6Y->Fs>nE!HbD-@3tu*Jy z8)#>pnP$zrS%0$YDjZi{m9NYWsDXylkPAhxa(i=i%1G_0OJ{tNKJyKC*41t949yYU zS)Z#Ol6h2|hW#D7c*nYP)I6ODFR6o|2!bg_Au90IYXw}>}NztKmhDP>$mX__y&u#K{ zY!iiDqcs<}%}+Xb0zgokU_0b!mt zh|+yqF{SbKl&*K{GtiJ(>7Tp%!MGr_WH}7}^C0p_&^g&8ML>O1GXR~!3(Fk&IZ-Z>PNJU z%{h-MdE}7m)R&|o6bJtdkKXR4^JOy)@K|9w6T}j8Jq_lT_&3_{`fnHSgI_C7e~qD$?-1LX;|Vw3(Jqmet)}$N*U^s`IhFIrV|*qI~!PRFBtYK58ZDDf3qTxd#?{qTZfR*mM&=TggH}K>UbcAUGNcnti0ek!%-HoJJ_PXWjS2 z4DQc*xVhdm7{X|mdBy8%)YQJd?$yq8`%rU8TD#st%dmW(2B*ng#f5@sGZ%5}R1S|i zZD-q=%Y5w>Q|)nNS-L+BWTgbZ}G*WY7TedUWu*LF%o*TfaFx(Z$iqBtZ1su*`B=G)GMEb@(Rr255@NF$aW5C z??`gFw7Q?5;E2zbOu5gM*gJfeY{%Yl&YEZXB((X8!|m?9Vd3Ml-DrDCA(W~R7kFd--0IbKk=$ls%`w^A zS)minap3|RsOxjKOem-=1geQ0ZL4sRjpY0DL^Gdo+rg`C=&^|w;z;+w)hs5NC$w+8 z*H&u49Y>iD1y%^(>IGgbQG4Oij1$ZV?~U83-22GM$2PaYZ-m?C8DEqKX6Jrf$=0Az z$NN~7>}k?`V(?*d1vY)BaR=MLrpt4F7j*R5t7p$$%e7xUCKa)yxTUgP;fyNXJe8&s zTZV0vk4>PTj(Y1e`#q|P)4rUPR|m@48Jj2R@GO|n_yBu z&$adB9PY6pdewvwBB~HZgOj8nfsxcF-#Xu&iB$q!rM+7==TYMy>n&8QW<*-RuncFihVYImA~dXm;9_}sZ~aoqO;{M%n6rqzJEJrt=TuSy&U-;CG&O|o_3MQ_}1 zyI0b8N6X?WKAvdY!afcEv}H(|3|ZcS_{jC&|1! z(;b8(_}x9H)M`o^qu7;|1eTuLT*SO^*^Ui!&(TY)LH!iNuD*R~(NX*Lc-eAnG(RTQ zb1^ofot!F+URlDRf9T1FJZjfXgNqGFR(B%p8=}HW&Ef!HLw-(sv~L^V2yO4Fs^z%( zc~W)*8Av3E*+Fbp3XD}93I`w*w#eqmiTv$yhvS+?R8dHVLySJ)BAqx_E<*39te-vm zy-*5RG!}p{9LE9Abk_2%XSh)zSG8qJ?4~BI?gRqW$KxgDMftpLdc-xfa&6=or+>vc z7VXOeB^Wk6GDEh9cfY&Nw6>Uh-g=Xdup8fvZfR$J;&NYKeEwLa@QHG~jz_EuS9#AG z+GMREZ3!9=9Pa(;(kpe(*lfS_+_Ojb;PC&diE13IX8m5fi1zw$d1q44(mo-UDh z2{b~M(}flT=UMt>;p)=`7EOvA60uZQ^B`}`f9Rc<(=Hier(j3 zwI?;_EWbEmqQX08Q}=z5NVXu`GBx(%{{Y)lfYJ?m|X!X7&Z$x!||Aka+h1f4##+}&hgs^ z0y#2b4#EdzUTM7fXq>6Rf!Igz%{@DD;nJo{lFZFV$D?ERzxqQ@05EVCwp05Y=So)F za_uw?bQ;mEscH^!J>2<$LbFhf%Gzsstq=I>8%o6ql|v*}Gk#TzGl9l??DMu;aJ{WQ z9TOsMiTJ^)Rbslc7j&=W;wIl_drks9zukuz5Ush-g3!g-)=(n+*jSx3*yrVcSD)^% znAEMFLKTIdWw=|Fc0B*b&TT%|>6=dU%CwcG{l!*dgWqE7(;VE>7SSg2;e9*16(RI$ ziWPgqlI{K}beZ>R+22<4dK4gLVg83=iDgJ26v-%>mNPS+bPHpCe^023X=fvvZ=Ur* zT&4yKp5_Y*a6X{Hh7Obb+4AFrY=2#oxiU_b&z#6@FM@3Z<=v$@f(%2d$mRqhy2A(pJh}e zMZKj0)>#l7fTU5ml%ZjTW*U)pzKhVET>FFd9ov-o?26r99U|qd zkHip(Wc%a{N9YL92eHv&W94+f06y1KGIzX{!<$Ya_L$?7BFI19UdN}`UJ|o8QLfXK zrF{uGnS0hwUi6XXqMP?_GmGk$0gJK2+P?hjDUb@k^u~^L<_B5^Tc*%#aHuE_pfO(d zOY3H%sSkTYd?2#)fv7}%SayhCOlf4-pb_nWx?dTk-yC}9$ilx0^++SCK_EiUYHe4P zWSfBhPY`!=Uy7OSk%dX7GXpMbVG#bhDS$_CzHL@#zD&8Lc6TK2($k-HN<7l^#A!Yw zBt1Laiew^2YBiNd3%N>!W!}A1TG2m;!%#X?-ko@+%7#BKP11c=U1!Vj zC^R`ZXc6A*craJ2w@EVuK8=1v$*9aYjB>gqqWthp=id6X1|5>n3&}nuadQok4O_EVV3ePz+rq5f7b^Jk|S0M=&7cwb_+vAN;%e?kGddQtQEh3 z-*)jI@}wZqwQtYeJqm@}d8P*zGhw_vQZ_nlLH``wH#u_^0wNLGvrtT5Y`+HBD1K^su{QCHMj^}i_?`W->@^f z>b5=Uoyg3F{(7KG_BNlgTYU0eA0_J_-8yo?VCzt~Me`!U&Orx^$wpaMvWSLKxBzY= zo(xD3RR$>|1UEZgF)2n#IpA!?W4Y3}SCr|;&GIhu>}=M{jH>r zFg9ygnwNO6FV_G*j5!$H38U)nyq&d*-HZqm+F=i37)#WDhU+@mwpWU6lt5NBA|#?B zr0l*iRvfY(pPvWX3v!wVH}*(R>_HSZqF0sUYR02{`RK_;ZPmy^&MysHFmI6y!)48M zY3kcBR`W`!n0NByukR~Y&3lTQwq3bPsvnRGyWMyZzTLv>&B+)pHQkXGZKlI07g-bn z!^S9vd8%WF9UuFA9n6LLGoP4NKKAnGXZ~zaAXo@|a%9Peykn&4-SR}7yL82qH^Hub z5wZP`aBJN7lyb`#O9q&;VOxu2fUXdUKOPtb6JYPquB>-ND)FME!PWk+uD!mb6yw5K z;1T&vqkrfp(=OzSjkQ#`r<}RR&~BTanw#Y#F&yO!=oT0YnDr6TpJnWT8!1roF%xx z)`DMQg&|fv3Y7%1M`{E$UkRaP zf%mMW;mJQ(k0?^JDJ`l4C!PH`}W9RaNIP{ zdj8mK|7vna)`iH^;)xi}7buhxfFSJa$}?AvK}V89b9@3M$n+Md9N$%OgV3I?j~O+7 zJD0UeRKGnRoY?eWFX*4rIk;OLY0*LKZdy7fjCdB?w+#Ej2!gIYxRJN_G9oW|uj-@z4vgC6R4?WY zk4UaY7U<1aWXsB_9Cn<|TB$TRattzLU+~KWDS!Lv5O)2lWhs`xnklFA3(vy?tIJBl z+;cP~@W*$N&B1=A(4;m^3^=}u%~*V)$s#}wQha^&!)o&KiO2#&vv=h+MDnjW#BCESw8onvS-{YOZN|yEl35Io@Y>8a(x*QG zjxMlB4iCg-NE%xMSi+?;7}}^YD(R_`+941x?2i~6Dvnw{MIOPt4u+e#iV`a zoGdxOLX+#Vo-8= z>(>+IsEY@hc5WMo8&%a8D@ZyugvEDfM!0oZd^h9{mM8C%N(-9j??YFt^dsF(hf_Av z_)3Ro4&g)?mN!{0ICn8oghk%Sg^WP?mfO&>WgLnX8XSl)BUov@e^INS3vBa)dgD5- zja9hez*&e@grGe3^-&j7T8mbBw6kl|enPY~1|Q z&x`0f19Ch8aUeu0I5pCygVc1zhSOf#88V?*WhFcXr*s z{mRottqNMSW)(Uvi`EPuP9erXn;(3lj|#}*cpmlSKLo4PAU3`4qhF9K@Jn{(3;p37 z67rdpM?Yd8FX5NoLW%}H(iI3ikIr@O(yc8pekPZZK@3u70nwZ@a1ojZ+({P>21#3q z@90mS6op=7X6pUn%T3fn_IfuX(r4}W0fHQ<{iAJru$k|g4&{jfUtHds`I8gLFR4w;zQR;hu;q;>i!s|UrTYC z>q<_;7ICl`{%U5no;<~Miye4WP}8A}c8dVEP(Ybj=S^Vr#pE(kV*k(48j zc+b|-fdWQE7Qf#dP#l{WSHJAa*gM)ToO$3|A3WP%P(KpUYfjX2h{BALJT6T+wK)K^ zem&mY*=x(n(L99TcCk^fO?~XKGou6_y`z3#apA9GF_Ja(F@VO})b->U1WPI>1!PzB z&dWBLsu_Fu%UjNt-|XfAlgx1`7&{4+6z!?Hj5m<_FDm!msxfMe;AQT=Sqlc6r>Zw! zz4f!m7j23bWITeRe`pcejn?9a=(t!tjn)De1^?PxoK>ccb$S$Axp1-_#qrv&oYu>m zmr0zvN8Yqqh*1&)9U*9aam1y=P~-+!8ei#5J-KAjmqLR*BZxEaPY`3J)ox7Y}n4OTMppVktKREC)cYid^ea$X$D)JFBEzu*B^pcpRiS~ zvUZ;H9mIMwZw}zN$CLNdwDSs;=S{@fC0>#7Hay0%>2|>oU&Zz>M?pw?(wopDjBNEG zJ|omH;yuh2!|nbiA`oSsgvA8ItO9P=3W>8-He+>nraF?-Gq(EOB`%eYfZ@Tg6_bV0!M05(F!&g4h z-YE1R-g!rKhmiC;>dJ+HfoF#(G%xdL@71qMXr?^=T1P?T>SFLZx6if0&5OgLr+P4C zX{_$nOju)#8A8|aynhe`SBcr2!&#W^uMnrO*Bi!h625w5qepGH%3W$=XxXHgT^hp&>J{n2zInVls=r?i(>KAu=`5XxG;rm~IU$f@g$ zA*=>|5lrg2maBtDHn+lyL>{CTn@spEx%oV7D$FJ>8he=*{4&^qU(Vu#bE5&cy+>P; zy5(o;A9;IU&~&0XyxT?1-^^9`*~tq=YmMCCogTZnh zdKW4FBJLm2pK910^u=WkeOy_*QRzV5chDTcggS=b0V+#^#y2ZH4z-`0_NLyacV~nG zbmFvq<+XiMoE<#PYo6ukiH$q;;onC5y#*%>1K#gE=6pO!<;1VnWI1sfP7nm+nPNAn zt$r2td3En*Z@yBwGcmpDsG$FB%rKeKpEqL2@Af}B+@8`L;_2lbnU_m?^V~V?B@N+nd8skP|=*)T%ECQIF{Ag4Iw^tBdSBq zP)_TrNp8)cy5<|w+Ec~7EGgdariRr9)A_HB^qYG}N==NWwOg;&5lg+k8ki&>T9;~e zv!?Zatg@GvmxopPXaYR%LBU!-?RTg=ZRgL6}bMSDA3x6#1Qd@*&rT=VHmWel149L{}j` z)RlWyO6CJ4MzkUFi{Gv&Qp}%&7oH=pxmKBO?8jFb{l}W`ROrVlqnic28}JWeRV?E` zt|xY=+0}W*y7%apG7)G=UJLU{{z=jDX9J_>%=?dxNE5A>h3#pG*fsS^O`ZC)zw(Fw z(ot!kOM&-A-Gw+`2`V=D!8av*xPAp9{K3vXeGtWY6LWO+C=Mc27TxNy8U9;8OeBAQ z$rSPjZ`(c-F+H(A|iqHKLGyk_T@b~5MmLYJi^UF3OK)?|lm8&S6bBKN1BrbiruojV51l_;JcnERtpvIyv--g= z$t{1lk35G!&KbznQ$Gljp?We-%ze9~^Aa4nk`GK*`&G30?*C zX9V`yv-dV9{3lO~%ChwER4iN`TK$+U;f3sieg!qrL(MxY=2lsM_-ual8>eRBd-i*i zIzHJ+8Qc7o4B$VWVLzWJn$>fQ1smd^>U$0bQUC2N{MG09uRn}~V6Y$E@GN)!@?ZH6 zhr@N45GuS}YUBO;JHSKaJ>qOI>u{*s|5sNy3Yxb1gXF&spsi<=BJ}DDw%-P@rG^1e1v=|>0HflO3x;^hQoH`L# z|IzYQ&dS@SM?;Rl-3c~sw^QUtcA84@E*1xg^Ba{Txst~oXuc)n|%WZBh@;WofLD0@0?)5XQi^_k-nW_?q=%I=8c4>PMb;#f)lC#D^q zP0@#?arEGiYkM&9<{_j-_}^FW-~8_LwN9j~aolaow-sB9ec6!S(VzDDA6>g&3TS#8 zJ4WC@8|u!f!y@d7OSF71eI`}zWF+j)9WmTfa{VXYtP3n+wjE*a#a+RXnW_pQ58V-K zOZFC#r|QX+)~1^fMS!-XIJ6d>xGYvN5%XrM@%BjW(NJGP?Im9hTs8K@hhT9mo6g~8 z=;^^9cVN$l;~D8+fA(XC?d#|5+CTdG!`Iv!*a{?*B zXUO(^$t#&bhDmfnC;w1PJ-L78SU|_jqw)sAI%X>O@7kt1@r;NQVx!c*zYh%;XbHC5 zGe=zKxsY@1d?m@fx!kt;FG>5{uxslbjcIkFSmAG+u{<$nlYGX9`fVgSU~WT0b_1o+ zy&tvvhHysL^mmU4J8gZsmVduC*9kj3R7Ej@f8%TMbZ7PZDdA_+92M^9j0&EO@s*P+ zMLcsr8Q9Y1W7iD9pcB5*8C!*o>d=a3NJXZM)qVPuu*=d}uo^A8rUn#&3Nc)9Jq-iR z00=<@saK+Kv`@MSI3Ryii3Cc zy3e7yYxTA#L*+70?Mk)S_H-o1f&~rn!lxS5-to4nZf;G5#%-eZca&V<6$ajgi`|m6 zmq3tMl;$-JkENVB!m{gxUccN7SSQA8Icjp%K-Ys_@ zuXwhZEq1VkTk{##da<(2Qs)`B=qfvMitkUysU_uvKqYCe=cdX4|#Gn1xC2fjX zrfS+%96YCzd0Ct0^$TCm>Tw?i&a5eTD2q5d%E0sji^u#$1%ya6gMHk799U*L#UGZ7 zqY0qmdXGCWFX0+lIaF+8xC+S5ixv2t*`};PZ%iZn=IVqdED)c;C;5{3fymDG6PGrfT`pc+Fx%?R-Qb4!B|)>kKT9-*j=T` z=9FfX>vmlNAu6yeje_?}AQ-SJtJrc(JzFQwrgnOHs7gn~8G|`mTfsEfv{xE*UZQvE z{Me=sLXroB>$9gdPNcJbNCHYTe^aD=d4_F(4v^|pVd z_v;w24+ASXkuEfdJ)@NC9i!{_^9XzxE%~<0c1)4~cH-YJ6<*6f0)`FU>DJV! zwhjBFbjjViEY}07o@*P0ypF?W?(1zf84U#cT*lF!PYxQPDECD-5oVpn=XE}(UN7F3 zBAi1D*+gBezxQ3*`jV=#Qa_hoE^0<7MktSn?>A0n1IA*THzRI{TaKkpi`{J-DRI&< z-0|41k+dd_bh!eF3}vLd4e%FE9rv8dDZsb9VvTluTV|(^HM`_+T(1H~3e<^LZHJv- z_in2s@IHXFXO>LSL`gH8+KN*d@8jMt5iuFZJuO~`RQ|Zc3dGdBcsAZDrvD44 z{v+6LK!L%>*t?Y$(&3km+1v2qP&>z|S5!UAUHbgV7ZNuoqJ>SgPvFUw&9dTB62Bq~ zxJq+W&UC?>YaP*iuYwrFFO4p37-JFS*C*R^6-QqU}Qcz5!?a^Hd8RzpUI6i1CC+1X+;uddq zCRqgxevha^Xg7F)rW(Q+W<62Mfy?R@R)Z<~P|6j^h!ywfvk{5cl8Y@H_t3~ti<^Fc zt#MfEz19@YS)j4xea;!&X%15U1f_nV@n`KS_RtjN*D`Wk zQ$4k$4NkAfsbaL!Sp$b5G*`f!Q2B^@lIOe@trvL24yxy4Q(}2x4j}@LJ7-Yg!y-3} z;jh{=@(SusWzr73CA{y|;))E^6itVS7&7mfH=^ax-9`fo=1Aczov?fx`)a%H2iUPPHU978M#bLi4bChKa7_PP zk*IH@gd|li_s)1##C0u2THR?0S2(>0)amUE=ftHy^4o=&y?|=}g4U$?%_oBVCFo}B-iI1o14u8>(i00_`g5ujuzCdoJv)mJ!K%(Xc&Ff zsCUP}?6~mLNhUtkR&i~c-NCL8rcF*>V`QAxZ@Aabazy~`P2A>^b}6%KMiSIZ%CWeA z?cLsC%F&+JZ5=>bL<5roAT=-!PZa*rupGy{?I(4G^n=LB`y32?T=y~Q_doCU%Zyb_ z1KnwndyDoENUW>UCke)GJoyeu+g1m&s$r$5U5z0z3}{s2KU`bjZdtU=1@z`nDw`?-MF)1CwMCtS1R<+#W$oODw)V{+csGwn-sCg4{(I_l zNSHRPnPr1;eD%o@#}bw!EgVSS8V&j@5yJWH1r88VS;QBn#d+sWchf_(8!lGxMvq^< zIj82c=WZMEo5Rb8h6Iv0+@PAAqTlZJlFE8Ebr0h-h_@t0j(u>a%`&Kp>0t-YrgSj| zujMlbA`-4p(sT2|(KSWWIA55P zZa!%}yvctB85}K4a)4|Py}zV6Tdc~kiwyI4&9Xj$(S6v z&uGkwm$r;`f|C&HXBN{aSR$D!N01>>nxr3iPp&{5)~RK$_ANDL zTZ=s7UGPpLgEq5(V3XcBv5Qdrb-kV|`T|mN?N}`tYN%>)@~()3GvNX`{8{@(*tJ%+krjc2GdRX zh9IhhV4rOu1^}eI>V&3Z2EKHq0hWx1u-s%x*O;E!@=Xc2ZH;kdn%Bky&Yb&#ne8=2 zv&a(l0%=o?X$0wo>-UEJ2&l^*_PKf$t!5ln6NSju1XSnwyDApS30HVf8xcdZoO)|L$i%h+@%0h-E5OAdn?L4>!@(m+-`yJEatvW{DL&wujwq0863ZweF&BZY*Vk|{7 ztRs=#zB)-Dv1xn#>~!F=XzuER1Nn#pw(!x4`i5A`sRPD3r01L<%E$Vo>*+HWXej(X z1|4}=_$->=s(W<7xj!f?0yst>Y=JIw>3vK`qTmqrwk+N|_Yh_NY6se@OWXS%)ZUI* z`qL4r{C6JmIi@LZf2?ZGtsBq!K?0C&W4v@1 zk90Bo7*hiDH>Zr}W$W}$E%;V8Z{CKDds)KFUAts80#!khD*P%6Jx-n8LeCQx#K5;@ zQxIqBbTgY+5P%+z#?>OOSf!I!&CY*dlaF?c}X5Jl6lW}D%*S;6ZzMfU;>QZgT z`~@M&079g==P@ixJuky%4EJ)V-!EB&h@UqQe|G^t!F()stPzu`h9c&*+m%6PNptgM zzRVCTLDv{l0+sw3s-+2?4W&eG3~>eN+8m$1G9n;9x-Fqhu_XRIN>0SnpNx20-+OoU z^bgIo^4rI)@4S>OeTU)D>miOuU65A0u$uxKxYTph+4a7*x|fp1DN z3hOa>Z59BiGXu?qOP?*A8#`1r;Er<;*+8n7w#t2IEIkYIMN_2URASvkR}ABS&7{bx z6*q)9ET4{M^jnm>q)uhBc=6{9D(jgy#g-}t#*(VZ5|_cT?UG%MWEG~%{Wmz<;Ya-A zA5!-z!OWv#(VDn?PHb~JQoYjYT9c{Di=>P7f!>XA!lJsI2W-BX@R9xx(llJhWHO~R z8;BRpCPl85bKi&;x$s&V>=h{;*MZRM79;|xz+v5k=wgd1(r8D2cbEw=h zkRp-F$L7n1M}O5Nd^eu!Py&LIuM=*Z zi0bXV)8DT6)OvY0=Iq51B}w_1&e%#%qK?a$=cdg~^vdb`Xmp%i`2>KzNF1|Bf*?S} zp6BY8UVY?w{SY^Ni~A8Y-@Ad@uU4Hz_P{53;B1Qzwa|G>E^o-MEy z!5qJ)TGpuY4ww+&2DG{~!^M85LkMH=6M%2RSl z%dluNgTh}026zksQO89+$_T9H>?z;|9)(o(A(T&hgMgaqcaN`hG zp`)(XeL1dNSxat#aLc4q-a>O{yFxX!EmYzA$k?hlzqyq%!VEB{EK*||&2eS#Ts4(T zXqEj(6m3>y+Ne5IVO_Q~R3((C65Gx^J@uAOteIkRb@I^^H}q1Nm6kmd zCbkA5o3O{Sfk(yGn=~>MKzFK=pd82B+%3C)q%`w1v_!dp1N-->De-=;>m#*WUwW^| zhe{OZ2 zZT!lt0OwV<9!?Wt-sR!a2rI4YN|3QCVsp@eHqHLPFGCC%QE@*kzgkek5}&T%uMA8Z zuK4UZ8*P|8I7@Zj0A*Jqwo-S*LX;OIRo)-AuLG05AU{$wY1F<2h=VUVzJ6+pF(Xn- zzusp?z#(SOzomEA*}-9E_zZIhcp*@BTx>?TfDuCUTUMR*t)Gm2k8+-dfh8IAf;^=@ zzBG>1X^ttFPK*<^PSF689pHiz0q$CSo+w?NW2Qtb)`F-bUaMthXrsf;QFH{d2ZijS zY~(CTFJ#{O+xElPmn|jEUBaMBb%*-=^ih%d@*IS*&FVSSJtFF|mYPe@+r}nw^#(b$ zx3y{wumR~VcBuD_?J9-K6>@m;gh?g`?eX(Jkg7v`JPbg6-3P5N+&n`Kknp)1_Xo3v zsvQu3q}ecpEBSfy(J=}I4s?I6Q~ZL>6puyol8x)~=!5H8Cn)C~uJ8NI_*1tu1Hipi z`}gP!b)76{wjL8sZkH=MJ6fC~HLCEJyL5b#f?~BgdDqYb9NFfp5#|{lAaj1rXK9S0 znric)ah3XFl)yuMn`?>>Wu57K;O`>&oxEtwNK z=5iPuLmaW<10|!5STw=SsiV~k#WyF~!B^?HRXgW7{r!REfh7lW90c82_NEv;t=bu7 ziAhk$5{2O|s{tGQ{_dYb_R91U$; zI3l*tU31l1dwjdPeRn%E>Z(@56eSwii?Ut4`hAQp=GUs|*b`zzo;obA_jI>-e!zlp zib?;r^A~9j$9NI?O@mk{^I&LKj zPsA|vWE>@Ei->9!Mx@VlzBg7$m{nm?ifo|rw^buOL`iySE`ayau@+7!HEWMBbP7Yo z>}*Pe3+7=?nyrt|G)Hr)SDP^i+mZSd&quk52qaaN`&e3+oA#}xpO*9%bEQQt!Fxok z8~EKt31car#YI0|{BkV!z{=N}YV8=n#ZNB+zZdw=Iv9I3yiC_(&plK$tkV0xp4S*>lQ;h2&-4 z8Hs0hDXiMQa-u(z7UTx_YW6EzfejXr;V2lU|E|9)B=DV+VQ(U_yWh0F9iK_p>9CXN zXo5?oP_a1s%1x}B>>J_mJh9{_8Lyi*1P@^3<$c}=!7ySAgB3ZJb8zDYIE#mI1;~m_ z#=p$$NDPl)jdCXW_9ITK%PHV*wQMxkNNa~mM&cis`3aW+PO}0h`bW2#*y>mDIS5+sT);O^*TRR*$bU>Mm;<(%Hi&4FK1M=QG+b|d* zQ7^g0V5mq0T{$V9(?NK7ym``ZPdvnRW%ALsO&^fltrPv)og=5S{hcw9 z@rFMC+|gQscXEY0vff1h2N*=k7S05U^nEZSJ2}spu6PE&E!$|9YPp<%<3wyG9bC!sh#cfu{f&<+++h&k%fvB_)C|~bURLMWft=(O`esZEbvxp01rf-yz zooWnX=qc`Fg>~$+l}KdGDZ6HkREM6@zKp-1Us4L2>@u~6M5(J4Ncy#_0S zj-BxXeU^Nlh%4;q?&BX~oS$~$VsBaUfhu-IiS(a1x zUv8yQn93fRbLk=^felzwnI~>A!R$+U*P7A3SGx%1-!)dgzQ=bxwmwxGr@iq1fi~%Y zQo6?BJ1rXwdnXgvJ%h)C+ zypb+&S+OrW`pS6E>a-50vWaB)s2$3z%gcSaBnWrCfcV1w_m8zg{X!YHuI_qYq`lw8 zLH*=rW7XF26-Yxc-ij-g(-7BgKQEkS_U5trr#ep-XI!(d^j)*1atd6w`!cZXp`dWL zDb@zobT@#$;i=$2?gJGO`0%(MH7F36G~$dqvTudeY|E%MCu??3pNUe2hB*;rSk)-j zca;3pB!k4`s1wM2$FuN=yUkJ8+LD)-M_em&TG#4NuFNO6k-x8WUA_dgAJv&WHiy?l z>O$vHCyT0qiSM&rwp7Jd^3%K9>qU-p+^~B}uBn6?<4)6DqxEzM$2rdpTpoBGcF)aq zz9+RV`BA@BvxGI~mJ9-#vFSYNBw%54Rr0{=K5lseTR#7M92Lxpc2Az}^8IZi_UG)L z*WDxl1P;#|sdYI4yt8h&kGsnmc?%;FclN0D)@SrXeeX`)qe_!Sva5O;4i-5lFr+Sw ze>@1uItaj*}Ho}+IWXwM+7tHc0D!uw13fz`1qXzaS7vb*BRXMNKuoARK*^V zOI25ZvvqFa8=k?OU*1D9Cd4FS7rnM1$36wOj(BmQ7uv2lAQp}zU9y~Sq;V|IH5TX= z*uefS3{2%?W9M2UPIC832SnAKzU{J9zOC-C`}NHu(fwJbxO8Yyo1-S#)1c!k$I1Wj z!$n$bceSOdWq0{|yajTxL!CQLtQuS@lR4fsTy)&uaFE^_?aaZTzX3(4$> zmG;<1xl@x}2UyK~eZ$copMkzwzBE^F)X?--D3_n_nU$oMjROi#K5lhLb~LZiajI;B z(_1WPqGf-7X3ZZwAjZ$i>__w$!BHGIHk5&6T3iyX#jqmo{p~3(6-9sBBskQ%FT=mF zKHvX{8p`@JSmY|G0(Umi|T^2a9$~J4x2;94f{Q+ zvuCbrU*)OsaH*Pea$@{eAZQa+u}=RTl(9q)@c@bN3+3wB%EGXWWZrb8rn-GNTySs9 zqgxsz7PQhHo~}W_c{T|y__7B#ouv`MVJwZrFh6u)+}7=kbF*kqx~!3@f`SRM*cxS< zQFO&+ch|hbCDp`o8QR?u&_4yNui!O?*?k{vaUF5%wsE&?o5dTusa^qpwL32Z6Dp^h z{LMxzIv9L=-L}4W74>=;V>_d3eOhMQriUd5o{Qhp4aO}WNH=@we%|0PZO}1aI7``D zgb(cjt^!Te%_80&v?%LFZNA7zSA+iF1)ksUfUp4L{64Bv4Xf&z=mQM~FOsU%_8^OH z$zg+IU|^teZ0dmwusb)qL+_Cb z_&0}<7r+#Kt(lYIihl7E6A2bk8D+l-;Bc-Mrj~d$TWRzU-EXvn&aL0ke;yNL-l$c$pqS5oHQmqcHpWjo zX=z>;)4#r{uj?bEnF;;E>vvCgdcM4i+PA_uMCkeGMRQqplY4|t>2Q2_dKnRXAEs7q z0zV1B>WDKtUSXoGdJ+yH54~ecpYl{X+PqYA8pArjYd5ip)~StMOk7u0MsD z|MCY%%R%o(bU?Hq@e3~fo6r95hYYE&^x&ozO0s&q`ZI9nFF(ZZzIhTf4J5lq(g^=Y zAL94_`ILb#k=oaNuX*PG$rtjaq?x2xp$^0R!Om}Z<)QzF7r_6?Un~QdEn=q#>+fCv zCtr@3TM7jAN=jA!kCX@hd2s&SJ@~)$;QZ%d0WaqNTUCbm?4+>APfg#*9{AsFW&b!J z|NEc2uy`!Z`^V5n$D+@P*TDGuXz+g$`$gjo%EKk_!7AKVA8$#zhx~(j`KNzmMgHi| zZ-IFEagIwU3gV>Ky9=GakF&q;1pdtztpn%G%_C~Q#$TLR{~)pd`E~g-DcI{lP~D*e zi!Ho_0oT@ zsee`^ftiZ@C8)rK8pp16nKqiu<_ZHj7-2fr|CJcdo$aniQ=)KWZtCwibzkJqZ2EN3e|mF(R|Ng9Fak{+X})0rZL8Ph z@_(%&|E@Fo{qJaW4=eZ!k8gGpg&O*0wmcxVf}+qC%x`ABuT4JqC51ueLJ+ZQZ4<}L z(#8(>?n6ZutAh=Sy>z#qZG7a?s~jad%BI8~eO>!?<+od6szA64+*Y;SIZ@ZOVrqNwM{cNOhJ$YP_oi9^uRrD(B&(FU$PXF{2aotXRisDl%c4?#_I`^1q`ldF${ z);OW4y4f2e=Ux?C_I&@a?f9WuA?3)4^J1ESs$r{|Y89>6U1`29CCRq)t1qvEQ_R~$ zwdohr*9euiwD6=yl8}x^o>LRMI8r;#?TPb->AdNO=;`3nP1%_K|H>V`@agEW6)>FO z-uC&uB&9qX3AlI(f{Y)|iG50j$a8A
  • $9qV$|dZF=jxFeUVu`mIoUm6)VLdJN&tMlDVyKrXy;FX)^F zG@Y6Qj#YQrfmEhS;H*mePNP<$^Y4-VAKnNOkmF7bdZmIv!zxs?+M)peCS$2yl}Gy5 zU^F3X0?LwPsoA;RI9zRl_!`REp;w@lnbfH9RbaX&Q-^xH{N4?Y3o^cqKOTQR4d%^l z?js~(;aI+zpu^|o3wO%J?YGgAF~%oZctz*Fha27tJ;&aaDuD%Iuj^H2*H~?T}dhd_w zt*KX@MQncgrr5jyGLAgPL`s1F;PBd4TD9+S^-aJT+0O(ApB4(Y}+e6gyoO$pl-{h8Q~Nl_&szyiizjpmTdCAjtVLt*AkEVemRL z&q7XlW8F)^0L8{s!)m9l^Xyv9QKvcD$HyA=XNucS?>)$v{1DS&FYQp2$QH$JAz+*{Pp$4 z1*2-wq`{hc=p_Ykm_WkMhButoG-@aFEg&rUb<|k?)E|COa2vp&6J3r13w8Ojr&}wh7Dh{|&fi{1?Qm3zzMg*KQ~s;i zJSMoS`(^Y9Mk~1m*xyEbB=9pKE{$_d;5i=~3N?$_Q57 zwJ;oD9j(aU%))yZx-RX;u0sC|1YjanffJ*~|QJ7)w31c56n91{uNua$tbcJno()CqfL55FEKaNgK z+&jUQ$IA>qbxI!ZB~4FYf-J zWq^!d*c>q1=~Zx{JG8G(qq|Ud*LTkE;pjR&R}@(BC73R>%>tQ^c!;RUPqVE1`}Lj{ z@)zr-KA*AFrsvUPH>|>MpuM29BYT)nizQPGpdMKc`wTZNS(Dp5_f-EpgxZgIjy>x* zE?4lNM*HsdaiMe&)D^3skuio06s<$Tu&&ylKECYW;m0tSpi<>dM7C<*ZZ{kyPQp{t z6+ahtml>t8>|XH`BQBtAZ4Y)zKfQMnnVl{4U`Gln$!;Pzl)I6d;o&oRS| zKz+dYxx4%dllnQgF!kQ+cEPllM4={=zq#EZzWXop0v?w{-O$jlbXuJ3-PV5x@MZ>H zr}C_Bz1tvCzYGs+i$v4k#V2EPiZ{&c!sJiHJs{d~o`$OuO; z>t#JdJ~3gs{^#Bo7sipHLs9+l5l&sZ!XO5&1(N_`;F!e{ZWz!jM2eIMhk7_$CtOoK z*j>+!+W%DDSg{a!+{uCwK?-}d3Hz3GOFlGnuEc6v9k!!6HVZaem4^K1*1=vnLQsWq z2tAHqLm$c$(iYDj z0gSGPsuFatl2euTn9+)9Eo+fJ;C|4YKQ_T=?3sC9xF~@pG#|bB$$ed0qvWf{&#!hl zc`-IaMa>|X_nrCPswyTC$_<4fNPjinmvW@usGB1uYJ!>iV?|1=1KhH$2l8SCmnEEs zmxc?q9ybIntl$eDuwp^$uLTCdX12Z{P35hBGrA_pHS5U^-rtL{hL5<`#hYp&A#sQaxl=A>{!ODreI87g)J*U(iI z0hcR~J&ZRQ0ssxowW?!L3R5#} zLSh|Ck#oTbrB8=7^S8pG++RIB-Q@OH#b}|T?itLpsRpX7iH}*due&NKrtY;EXy0t$ z-hpY_KV1eHCsn*|k0faYi^>*~(E%#)p66k}G6JmQTkS46FA6vH=+iQ=VdJe(qbe#5{|2alK?fUWh^qHFR^r1$c5 zcXo%zuC%`ySby2cvK0Ed`pVVwig=?JGLv0Ao)K`iU2*d4Dis8xSzwUcOvxjLEq2!> zW-&JXRy+LZfqp6O#Dg<8)O9^v4q8o>ha@~&L0Y@f;&F7!YNdXb%R z`utu~IUB4ubuHTBth4zy(tq%{t_sy5@m1J#i{(m-fl0fc`y#HY$Io0}Jns$p5A$1| zhKZMxViUS6ovjRd_{ZIOu<-U|NuF)Wac18U(4b$_pKfnUcLgljIVBKH+h(!6o>9=R z7EO8Ra<(;NuhaNc`<+w#k8 zYx=a?sx>t!$Z5?i{rWg6sk`?X1ByU`v%&OYpS6WR(t#RxcNO0;&#`Qtrc2-N%g~%hjPZ z_p=lQwo%F6f;r=+151KC+0v|3yH8d(iL5j}hss~3i7AX=ROVdqegGT&fIm{Nll$-n zv#5`S4K$gaJipb&#giyxVB~cx&$JXs5-o>!+mDO{p5eX%nq8+wUFb2rggiV#yp1O= zlDes9RV0^vtx12)5iw(~L{3ZfOqXE~p84X88p4K9!X{;3 zOfZ_wV`Kg&JTkX-@%%#uaf@nT+}@-~h|d-%-y0n<0AK>T-~MKThZ8|rG3M%QK0S#T z%lX{H;wvNVrxeTGBb+?^o-iZ@1jj!HOB%3?;uTaOI)o?KnVpm@dxsWAZ zm)_TpL?56Y&<+(&F=FG*-jkKPNSlLWxc-F6|E% zOmV2j09%DPE#oNyFv-4m5_(~K`7nK%$d0fdx!nKESgX zMQFJ*^OjX%MDT_)Qe)KEcZ2ek_v%Wf+e8#*sATMgsn@co+2FA_L3lt`H&Mq|2U9Nr z$I>AU`NaSu@k)i&$F3IG)srh;gth?**hC@qbdsNZ*^t^Z$kuYY$+*+bLd9`d;OpmF zU_zv?ceOvcGqhmp6V=~u7A8MalphfTEmTYZ)gqba3Z!&78yFkE zqkSX331AhXwygJjGgWfv)zhEJ&WVpwg0$-x(5c3${GN4ObVOTzV+`+{!Octz;F39h zcr$6oKRHAeKk4gFB*@LQD7?biwibol=ywPN2p1F{ld$>~=K&@UvyJa28HoL-9b_}& zfuTLGp$_B1>Ekq$7wc#-EjzD{kmtr}l8H#OP_1$q-8PP3S2?{{NU`!l@ysVGLEG_5 zAd=;VNZWgHQPrm$Q)n_tF5)Cw4sVeAAj$t~V5N1qk2A5e<&U|xY#hv29a+){vV2m! zN|w>Uh~Pw=VME_ny`|2=z+C;B2EbLj^Ban3<;D<&sl*aMnuESjM zl5*V{DyA>dA&PO-*o-~|nK$>+Sk2uJV6rOSpPTS8-(HI@T2C-6T1FQjt{Ec8j3|$~ zRZHp09+t&P$-~nVB}dp;yeJzux+jjIRLG3zM|~=3spQcKuxwEBl>AL?{OhAL(O0yt zr^1wc`;*PWBkFO$mX8yMC1v@XAx;Y0x$Da@7K{?;$4qP4+8xUKv5YTRN%y@Yyq2ady^i+!gF1n7m*w~kjHndko8&rC&otR3lno%Jr6B^sy#wUYDZ~g( z+-%vICV@ZqivcFrZ&jri?I{2uuw5e6FP(4=!@&R^!FVawL6rULSG_||$6I6SE!Ve; zo7Q%$8O(pR6(xzwCWeD0ra4~$Lv#s8!1~CeU6>QYk|Y~1)lG$-wmlp=KYW0?(isS5 z$r7^dzHim{k>yf7&@rbukKj}jpa7c4!pHv*)9?V>!_e_WS1-3im;wLcUS1^_S|$?u zoUOIxp+2;GI*a#l?>VF`$k2)qyDG0;pvSo^xolP%4ER;g=9(=V^cUYL-4L+WjI>Z; zj5GI!O78>dL%fdM1rf?D`Jf>r;ZX?w-fSzc^V-(^P!(Se%@4lTDMyr@Q?d;C4H0~e zBWqvQJCvRurI-foQHyDy@yYqSrI*I2{eFyXJk#yq9{r4ICwOOU@%|B`GMiuehKi#nwT?GOHs505><$?rrc2b_fA5}0fI)uU@ z>d`ch?$=^Mxs`dB-p5fs>0nk-MTmGM!S!pNKYQ5I&B^Bfc+M<}eZlfm_o9ERf@sUgOnS=+N&}Tc`A9`9#Y(vViO)80fetTooN-W{EMM(9c=l^=#f(MR!=%1gIn)u^s=U3UXID?5OjKVY3d5 zI~AXP_mZmC_@#U84F~j$f_Ci2_7f)k$5!0XpMETy9lt2r6i9UC_9b z=J4u8OB6R-vNPt~KLu*)z!83eRYZQRnxOg#!Sf(HcindeIE}SbVW(Y&cScxUpMC%m z7IkdoaQm%M%0G?)mD&b;#YoZlt;6>wc{%g|9|>rkW4YR3ROv0iznO0Gft`RM^aau! zQpsF2vb0l6mjD*W&{*NMa0ybtLJ(UPwQW$EBMk(G|FOU6P}&BswyZsFsmN;kv_kl$ zikijsKITWAdO9&&@mO#JFB7wr-J9gW?pnk`1i(c3Cu*+?egkioXf&D6;Rt5Vq=Ch5 zrt#I~>Q*!_@tJqK0kE>%X?eUSSIAD+c(FI%GG=e@)GtX2XtUsCv3ES}u|#T5kYvHh zxAlO+8FmtFi`oBzvwT&m{(kh;`vjR9@+@SHF*qoqep(spr7vUmbCm5zd1Ir_5trR9$Ml@8M%Z}B>; zX=)S%IOZ`K8gnL^65?2ZQvKxrtB%f zggo^Ry0mZ#v5!Qd5)buQi*jXQyqKAg zbf%gbG$|evEprcQ+yygJjG`7M$l0l^YJxnnicrT3mXtJ4ena7HAqh7X8iJUyy*j%e ztNHDR4+k!ro|nLzrT(~WaRgZZJS+9ggdvU>02s~a0&(pk^J5S+=^u4t-A^WvzD*dp zCwV?>hl)w`$IZMIa5E=9StcZnRVjjT`EFw0J@T*g*VP|_27*WAyH@0weo^&yz`VeM zVaR#m>m}%Zp=A)jeTB9$_?E)cDnng+MbFIz4nV~%ay<*zzmpDBjdn5+T1Gd%a0Bqi z`Ia}}I(fR+)f(QK_t*H}th{AhE7rY?Y=;o+o_jutw^tK-vj(JqykN@4;I@4!v{Zl@ zjzn?6UHh4-u*&CzhD`-;P@&-!E{I}uXVXx@-73Mg2QQylzybD{?$AT8ItHrd6+ll$ zI2-D2e0xkQ|IB`aZZb~5Gk=5C4NqwStWe5Bzd@G~9O~gJb>fc6NeN^#AO&yMp`&=V zrGy2Qd^f&QtZ4hG#4pt3Ur?|AI#fx?oOQcToD`fOs032X6MOnzT|(D~kIAyOk7kTz zVpQ1YQ~g-0wnmbHR4A}L-~(Hl&nu(yCxsjHeT~KI0<|5QE0?>Ey>QM=gZ~_#Vs#<_ zDUjkpZOsz{GR!UlS$Cu-FT-}&asEk_;!ujwY){XRHVJS-VkT~+G3&veDeWiZz_JcPTn<+25UZJ`t~je4 zmwQu_ze{pLL?hg=*si^G;WUTpd87htpn20y0%&Fyonqa`;d1+U)Ik$w%rh~rY&5vK zLkT^~cV)=d*!voSLPa3gIF!kO>E&Ub9=Lib@27^7`B%ze&Ec1;;(|HiWg@`}R)E-m z>CWA9q=m96j+yDjKM3EuMY+ZFuFDPwJY6h0;$R-R)Gzlz_mYW+9qcGcT-9iI*DcYW zN3mx5Y@2TbNwW;=JNRT>sH&%@AA_3>g%RruDcwfJVQkib{bzjOP|j@Mz@T*Ae1B@J zSD{;0kKom-agf7{9_(;HzW#e{pLYQAM0p=|(uC^C-Wh&>c=G6mhsC?~f(8BL>oc4p zXeIx9D)0c2CEC`F9cy-T@uw^Ikxh^?#L4PD;!O`64RnEe{+BVH+m+aU74!_~ zQE?JM{~NI{(8vkkx9hFbLnSA414~MIph37J=0l(wF0nMXm@gdI>|>t11`dhsOM*6E zuewHh#weZPxo%kHwDxYH(JqZ|3Cf}auWD60L91Rgl07K;N{86ggBfwkP`Mc&PDbyY zmP!WW8jHN4pJ?`n8XL}wg5?|=nWDg`x8XRs#Wway^{>~CW}B3M%ODlU!6L1149g>) zUC`io6X5n6(EN*6lZfQh-AQRuw+$Qq6peKKHX5}hux6Y7-j%x6mOqxkOq@hh@$p1z z-Cj_1@x%x9Xe$dCi6u5yCtFwA=>!|p>Atr51yVS-73fwB zGrl`W7g+Hwkn0{9-v&rg8iu1p)}O)T+RMP`DB!5~&M-rnk;lF^T$rn>NAUUaUHFgZ z8_PoX1?`8Pb!>Kb#v`(a&ft6m(Is+KuXZJxum^Z1R;P-R=z zfN5`ug~##ya{FTfquoePon)1R+wkF0n#HpzK@7&x(t8AX$U?Rh1;WT697}KDGtxNE zaJlzoamtZrb3D3N`CuItKo>Gr@Fe_8#sNs2F6|c!boI>yapBOo%L}|sLK7a2DIF0i zvDS_~{Vh-rxgdiV3mX{5*yFzk=^@Rh8rqBm9+Mp%$Tnxr)P2CG(JfAuO9e88?(;u2 z_aQDGd2+l~FY=y+b{e-~Rm&P(j;f#3G_$Q+Wq|u5?y7l0LOa$)?B{<-AZ?e8({t!@?rGW}m zjeG|}MBQ;iXCOwz=)fD}8V{}g8gD~817ta*;Rld}i*e&>N#yvs20;GM(JC)p2m-fp zpA8bh&TIQ5E35@wQHtmP`YE`8D0GZsdCs0g4KSdD*skh@iLERDqzN_0&Ux2(&Ad%4 zyyJM5$1nm+ku8oL;+$&Vg3)%Te-;H~pJWxjK9Dz55ura9@l_;~u*bFGWj9f4o=;)S zcje{asbA(BG_PeyeD>Y7z4vF%ZZDTb&f=}Y=mr&Hxo;pbG9NnLX+WUEU-i4hM|V$k z-hx4l2WFoC#ol{GHMy<(!-@zhA}AtDkRl356Y0_dR+@kXLX)C20RibH5JUt-q)6|m zG$|qUUR9*`5^93d37wG80{vnJ3|VE7KVUd7PB-Qs!b+8J{ZB80e-ShelDmUW z;nCW&F70~dQ){1%e<1<*SG(Z3+!s299f$c>s0{g$i|bqzO~rrW-Sd)jKQul;0n@dAo4)@0?|uW+Q7|d^@4NXYYxv)_`PZ@Gziac~ zwfUD(^WS{q=e}u;>gdbToJ1LTMnnrfR){S$=YVEwJ%acyE`tt7Gt%!wlQwp}xF`{1 z`HD%9s*CWcm~BZGslS=alMeUEd!(X@r6#Avs3E9%KBmAJ1FSmgyeVbyO(EyKje)GQ zMSqUf^{Xrq5GWXhoSF&qJvYl{^$#1z&BnD2B78KUN! zgeLyScbbyS6fCe8wfZsY#en5lUl%?x&aog%jx zv#qOWVQDV6a1yUx5wQ*^W@c8AO{i>Pl4gKrpB*^ss$dOiKzra0;A6hawY#byT7y3BkZMxcQpp9!4!eD6S}2plT;ynlNGy ze0Ev@Zph9wQ|ZaN;7kofALzW1w)g~Sk9=JjG)s!m6gCYv19T3fy%1=QLCxiD1l+Qw z9r&8~BgWHngTlD&P5sXbn<`XDI&Pg!+#RP`b7hfnQwKC-u33FIMO?>vm>OiZuz&#& zsthFTHgdHSBu!pt>sIvhG4QD?SoCI_i-)O0->LDl-gKp1Ivd&1PJ|EbnISqf^ZR7=z z_k+~XYT{OmTtG#gu6BN@rpIMemVv>XmawFGr}7~K9Ugbw+d^|xyBQ2-bi?v?hF*W` zeVyx6spfH*Yyk}urHJlZ06tKiZyGt*oe}14?$bPC(t*$rTVNRePYSEWrbo_T}M!stSu5c^)J)Z%GS$lkUPwX|`YF>rZ zJ=P49!v}8g8Z`t*We?{2^tU3nmMf=;?oMVMi4im{;kKWQw>D3llO8ZLsL`p>2Tk_z zUX^I+LH9?ly|+~<_Ysc2$C`g1lf1UiJd+(6L0v0%Sm5E}e5bpllLYg*6O}un!*ZUU87j33^dWJTkJRrN~?(Vcq)M+A}+YrMS&N@svl&w>b@` zvJ$QK?~~J{3lPvL2vk!YQE-o~Xd{!xkZX_l@C7asc}v@fx%XZXjNEOaY`sV1u47#ahYfGf1r~I39=AUfwVjhl^E)J25)Kh$$0y|UI)B1o@ zA?Q{}vN5X~{>Ec#>|(M+%D1uiEbIyC_FRyq@BPF>}OJ5t129!61cTgRg- z#-wnhgF&^x=qh|<30%Ad_67k%>+nMquYBo8fkAix^lILlwfZukz}8)9($esFVv(Wc zAFir@^ChjA7fIG9Nx#Z?@-_YY#gF5)ZrbC#AfCewjC=q!WKPtg(cQ9gDxxJy1|4-E zn39tB(dp8pywEVtJ0W>cmxd&u9Hln-*=};jT!{$;0=8}-$3lYG1?ko? z*sOJVgDlUg;RGp|4$6CTN-~e{dTbkidvVw_&R1^;0G}0Y4nwD#S<9K zMrr5N-&Hq%zZH<8*^!vWV>MJLM5r;^wy?CZ$Ak|J(I3@!+0=n57bG=DbJ zA-flDXVT&pKP88NTbTzMyev7PIX2Z6! z+Sh}l07K#d1DchvmPqrxpeJ326ssK!klUM){@Vo$s1hV!uIcse4@uL&Zmq3Vh0tSQ zPrDBa`!`Sie~pT3jZQ{ig;%~QV9~dVV!suj^o{-2%?4^B@OaC&!a{@d9^de#rejui zk|k{kmTe+#9Z`Y~ZMCW97Bu=Aj?#yzmHnmqyZ zJx#Xp3m<6KoTEa%I+qXUk;FG{I_)=zcdEZSat4FMIR=kHOnUc^c~Wbano(xlD@YUB zvcaFLp_#Y%_{|saHzjnxs>NQk(GiahuRR$pV*BAuk*Q(Vhl3G-EZYdi2Gh1=HA#{j z^oYTy^XUif6q*)wWtG>MiJrtX8QyFk0(9lcM0ehb=uxeyqLJSu>vVPUJ)9SZ5L|jEhDA){BCXWgPRXu0Ws@8uqk5N~mHcsjE4%Ko8v(CQosk@#3QP zYg~sTctbKl1)q-HsXvqEPF8EOkf%u#HF3)KdNVK<*g}K zX{~(kM#$3u!p9-GF(4!#0&=F<#JM~Cl16WkLQiTB#2B^7eMrrt-^yG(CO#tupw$=; zLW2AKtjNX3WH$`7>sX!TZj~f$V+FXMw%s*~iU1idfbA^w85&gCC}G`K{d?`(#3s=z zK;Ch+TC^ub&ket5mH5#RSz$2UmcV;xH;!N4Mjxg7E$TPH$I+bVgDk}FV#XWGuy#eJ zBpDbqQ|mnkj5Co&DJoJ`ul%ZwsKc_M@bFvfM#DB8k@jPn^?^*Nj1mSBm-`Fj!3i-g zBIK!$E>+A#H3C3>tcHiujo*n8E#Ys+0WpG8yI>bojDyK&j6gh#l#hLV62bj$Qi7AC z-}=OJ&OkaSN5=k8J{WnGOKZJwe+aw@S z=;!r!lVdFE@Nb_E53aX~e^?j?nWBXK|L(-NtTRa1T4_Rz#j{#j`fcu>o z0z*c%;S%Q{X*6ThhG`^UdpYcpw-H+^e0Fup=p@v1&N;6GGOp{(TPs~LQL(B>spE@rg|z{ zDUydnuiR<<>J9LSw^Ck&LI+ylv0liyx_YQZ1mk^rzR z5*!viUiheolcYa40#eY|;0a5Oti|+|oQkx6W)i=C)7Kw5a5F#^AvqM=#+99#AR5-G ziABYYmOCw7u2&9UzWTDp;Hr$%vFX>cOPlW?QWaT@?cE`fXFM$APGZduNeV|qf)xHV zwfLYHf5FP$Uz|6VB>&E;dq3kl@jEG zqNB$)zDZ5WIo9LJ^LOd=&7C1P->G~)zHWg!Lw!WT>U)afYHLA#@*MT~MHs0zRI-O1n4it6VH0 z&7r3s;dJ#Yrr4X}M7RLwom@@x=kj6S-xl46*7Kx0E}->rH{!)?o|}zF^24*1fT1ie z?QA|0aRB%Pa^FfqhoH7{r$i8@>#f2aR|~%B*A!=-k~o-HINROysXpNIy6u@KR~2fK z@Pux+zWdRVn8wVMiNcxFV@LZ4?Gnya=$6RKR%HI|b{>H|A@nnTb_7Vw?O*7Y{P-_k z!@Tck^!oh-q*PdOx+zcYRW!E)O9T4ZXaTc0xrSq9hol+dowVfCUU!d?3q$kcMa8TE zbre4gN)ilw+0(e8yTsWhW@|FaDaw$gmSqs4P15|kX|{j$T5oNQZjJ}9#?Zfh_ClKR zdyKgTwPF~jO89m)u#p8|J$12c1Dh>;$qQPWnzbO{C+c=ATd&}DKT?+qP^cef?!O zdnF4kxs=(DR^2R2xNOr~ny3Czy=n3LiRC9nkw*>OPY$NpZQ;zDqNGY@g*~S3xZ?5} z=AY>`eXnm@HRKYr8RoS8{zWhjn7lTfU;y4Eieg&6j6?p}x6%)&eXMr24rWE{2|$TU zusJ@PM>4MO!q>lXqg zrOuC|QM@qR;wLV6xpk>7|4Kgi#OvS)u%l_>eD7sc=s{ICz*KAQ^D_&aoanu&uMgdt?6ujHkZj(ck_C2I6+%K z`F$-(bs$MwBl=8h66FT{i*rLP= z1Djw1S@eM6VaNE;9B{uxf<{Vgi2i{%Wrt(SFXv#>ju{X0J_HYHLTh;wG)6D|ot0fD z1J&nS#r{LF(`_+EUsbnYC@1JlT69vU3Ru`9%Nq|&v@acgah7+pSy>>agvBOMv-NSH z=9_Fp0ZTkO<=%w~WgW|%kxxwScioJa_8%&n_e7VBz1&}$X#H}AT#qdVydC6xPPqu_LT%2;;o)@Tj>i4oc zT}^<%sdky5wXI@ZF+b~+VGyVLUw>uA084Bz0 zjEcZ94d0-V2-NI6W>pLfmW|ZOSqa!ZfrO!Q zzDv&&6M(!r=J~L%EvaT^mg%x zmb-MZ!@8bi4Yhj_w^MbaF5}&qc^}?KxH&o#E~R^yaj#PhKy&mL#7j^2#kn1lzNbOW zDCJi8<~PU)8iA_Rddr^19t0)lxtDLOZo_I_1?}se(-|tf9tmw;MTy`D?Qu4-8ut#n zG>7x^wq;wWl$4jrPTa;Ng4Lfbd%}^N7RVG3N$kbn^FQTO^x^WJhF`Y{5ZMT!WOY;E z`dlOzBtCMR@7HldUKOfx4YZ=>y^ZHk^JVrNH@?0xg>>d!CwDo1maasBEjlkW3BKyz z8Y|HQ3Dwkhz2!?3ukOUW*8`ZjW}S$1{QRI1GRGuP00FAnnjAh7iTQEh4?eJf6Z6Z& z7Nv4Y`iq(B2%dFDk>z1chD2FU{pJX6qfcaqkDWQqA2@Brn9~{J(_?*6{oB(P>?l{R zQyZgHafyar@dW|``dN^d>#g|L zSL_j;Uv9v!(~*2KfKWg6ZlOl3AgQ^ZGmkQ~R$9+E>S{kJG_09(|JI!N(RXt>@$^-@& z?hvPJnJizxL|V6e#~i7v=G;l>Pcz|3P~WNDC7N_I7!0%oXls}ZmBHZ)K6@T3EG7GI zeRFj>DzVC~%-L2s?T~B7U5_Ok`IN3kVti*bMAYc({2-o?Q@ozy1q^ZrrlX0IQ6|wE zK77DH>07%*$T)S$NvdD#LXwWySn<*Mfoi>}UBdG<5%ca>LEdiw&8n2Jheg(;$e}Au zH^a?)CYrdCMAp25=-F$H8ly{Zk=4D47^yT5>$TSyHSftuIK4M>p+;WJwD;0kbI`Gj zC79J}xAT(j_qcuKFaKpX!tQ77Ob2sw!_+!3jhioM17oe!7<##ZBFiXkNb@mju{QPcGj5)E*ro8xhe#a<9iY5F zF?!u+$Co7@0=iglKS5pYv;vF_t(DyfC^PwgqaL4V!a&pG}WFx8;SHq-^hs3V=!&7Q#rlbq@F$L9vXXj1L% zr^RHz(~!PYgh+aCBp)1W(qq~h(!k<5(rTFm`fRbT)Q6}`x)&EMbQt@4mQ{!Q5h^v2 z{SJz}>p51Rx7NEG>Cth9KCX#vq4l^v|1zjTsvo_Aow(;g7Mpd%u&pr549xdC%(*f1 zA+%ScY(7j)He%=Su>n5NGztra9%wcPK1zj1YBRJeHO?hr_lG^fKY>;JLv;(jtX-gA z4m!L_$enJ95@>l2CmNU257`b4Q8T%JHK34$zB&+mp%IH91Yx=Q*u*`j#l=3NfqJ^H^`B;~UQXdu(ApeBRs;rag0plX|W{f)&0|hXWi3 zaMmX?z$gs}gEy=4zWJse@k~+!dGCgW?r_;KXfyIfWWT<&m{#W_rxRh(my*rfc(PFp zS7uKpn!SV~U_8uD$klX|txddr>3^yz$ZGhew*-1hpw=EguiX>|6!kPmQ7zD>TZrY9 z_h&q5=c3ymH}W>j=TnI5BGxqQgiZ zlTGD$kchoJCu4UL3SSHCD9vW<_fE4c%Z!}y^$M~AhdVlkbhv{eaN?X=4N}#cQ*ro{ z*14Z7+7AsR+(;Dmtd@Q%^D>^)Tlw;%amh|RTk0?y3S6G#P21wnn#-D*Xl5OgfB(rR zL3>To6D>X4pBfNDWBNi#rtZn;X0^_Gs+Y#8UW1RHCCUL3L4Q>{LUD_*U@5T5FxEw% zoNXbPjM%GG;T4~C{Dve}!%D(8k#27a;HmVnijjOV!j}4e+8Ik7dJ(RfP~>GtgpspO zu6SzsXx$;olxxJTw}~|co}L<7pgl{?BeRa{5i;&%Go!0P`e0ycl3Mg`BN6cUAe~2M z^D|u#lS>JXXiXxmghlIbBtPR09vga=q=>z?CmXX9{&cJB-KqFfq~qCGU&X4@Z}|z* z%O6dsj9D6ug3c)J8O#>CQk`k^rjeOw9YSvH_1x!BinhGLdp3|s7M<>HeWd|gU~l@D zBJmF3pLA#3piJ`9lgCYgmTLIx2%ea^i>fuC%sn|~`^BY7gG^iqx~j{Y(o?RH_3Rq+ zlAuMdS!3=aV2NSAfAZP)fx$TYTvGO7xC^{Zh`UBveVih9Pj<=XF5bN^XEQT*T3hRL zUtm*(!Wigwc5fGS7zfhFucUn}>VEAj6Dgo;dR?zRJ5zLFsD#FuLg)59-u~7&Db0R_ zo@GB4lBuh9@zqC-LLM=R2)M{dcPf}2=>-Gi1i1V)CahHp4Ak!Bs5OOeIxgD zUNI`I&ofeogZYrTP8)DyIP2udY9q=rx(kALN~}i4T$em6b%_+A<28kg8*ZLkSIj2z z+98T)dpoi2T9v*`jnw7sCBN-w(>PoG601=jMj`Dgi->~8N{DglP}U4Q+g>I5%6sj_ z#DVo;8>cagmPr1^`cN@FNpnMw+FqlD$3Cyl@IQQlTAmMXew3%u-5SkLz12 zRmtruAxuKvzusFaQITsXQAre0JflOiJz8KE+_7!(*95BD4yDXa|ylU}yr!i)!#ElpvcnZ0+9hx2LQSBm221x%5n zC5eYgou;Qh@FX*q1~+IZ@k>#p0PMm2Ks=y^|=+3h%2LZ`&2y*I2w$3Z5P*vk$#$ z``EQ{D{&^!?wyLP0~q$8cJ)VWM%6Jk2|a;u0i1i`^}CfWD?3nyW4P!Spum8k8wr=A z$Bz`+ylY^Z*`4_dE)DM)Nq=tJcT48*41CpkZ&*L$S#o4%-E#yqYXLNB z&`4S|H+gS%K#q|FcX^K7&qb>>$?bucq5NTXs^yWcLlyNla3FqZ-uK~_0-D3<>zfbs z&!6lcw7>dK?dZNtZpWw{zumbbY=%d_^dIQZ&wBYZ;}pkrJ8EN4eFI@5x=@cix?gY4 z{IB$#?ezX_{PMfxV?ZOOW%trz8XK(8RPw;xQ#cI-Jzl;|`1Pyfk8Le?}Yer)+1XmIRB^rWj{_>!tTb~aWxwDXP64>CyoIm6_ig~RJVNF|qYF5tZmkexp@ zKx)VJy*P|VgT$JBiGlmn2bluWO!b#LhJVzKUQ!$+9B)TLIo`$K@BIqWFJprq*`Ma} zUUpF3H}w1)Zw36G(g*vGXe7fjnDEPxvH$z09{cR?)V%N4r!W38r0D;w-hY?ok8Sb~ zul~C<|F9r`eEI*wrTOsaxf!PJhVz`;w_iut{h#A-ko?7Imy4ghuUVg5D{t6DS#-@7 z0Zv@l~E4aP|?&CKWD&1MpG1-M@Ni{PFkX3Qv|2(6HpTJap!vKqq=Y z{~F0u_9FfVB!~ATt(L=yodz27zo3%vf8)^4h+MlL8;vegaQ>|j{c~?!|3R0ZLGs{V z{qd>)@+-srhJzr(A-cc6)Cb7+SLQhvPLm$C?-Tq_cFV7P)+_trK03hb=- z%dgmQKQf-(%*Vlh|05kBySwu(vQFWduU05R^%?%JvH*=|AVJ}@B%4c2`>9B z`fu*J{U1A0wZFdaYcdD^6MEM3A4)V2kKg&*8wRZCOZ|PA_5YV)W^C&4?&dVBgyWom z>vA2XHQa3eFF6wY5zE+a|H$lcKJFyo2XUK)WWpznpGgkA^61k$!ndc(K)CsZr z!-$!>kLHJ=kk_3A#cHUfNRhJg0Sg9 zS~CH&m$XxCS<)faVGP&FH}$w?p`OJ~z_2^!u)L86;75G-V$ zZ#ni02gtx{g|0f}pttV;6>qozZ(E!geu1`aTEBHSR>E6@H?rA-Flsgx^YavWG41P; zaW!W(=xJQcZ)dDNE9vs?$vdtOX?-x9Ii@8N`RZeUc1{M)RE0?qRHCgyoR5QR(Ew+O z-a*dOsYZz*fP(j2j_l^;QMbnUvb8qb9Iaw4lG5AM4bhX$2RiBl7)-u=6#USa3oJ4& z6j}C%-1I-?Gk158=4ViGA@8@@fX;~?*<~nU96Kv!{Laiq9HRVu`s>2LpBEKuwVy?Q zkJ{qyQoWM5_cyuc=3PYjq{ zC<5Jicb<)_^v)~_N%|tWE8ZH~7y0HZ2k{Ff=h69ywj9 z&8=YTn=E&Jvl|<`)^%VA=3}EmpK+JMWp{VmrYWN2U0Iro&Wrayv_B5zHxmbd18CCB&5f5_j~bP~aT*|# zLgj@_RtLW_7k1JMNbki1DU$y5;P%#p5;;ld1?xx@Qdc)q1HorI{#CT))2B}! z`^$82`Mvi+ zh*)c3wA(ECKwzd@5swA7kz(0Ds_rpsu*2<3JpHah{`B;)S%ACh9#hw$pI7du9~s7O z=7xfg||1NKDfGG%@1PV=Pl zMU&=mQ?yi9wUODT#ka>W+g*3pRA;1GrCxL}Cz(#2`4YM+-)srem8q(xg{luzlRkNQ zY%;z*XSAjzf_E>>re^fwAxfGehIF8uNP%A(cX;by-ZO;isPpzz7}&f5+dMSeqx||b z_Z?PGx`rU;s0W@2`I@Pf0;x0b3znp@4`2ce>z(3MxTJGbL3aQ^7(Mq0-aA`Z8&cbe zTl+&P@KRQ5PpV;~Ji(H?eX&NoOnPJN5WcWR0E~Tu=QkE%z)L4LUWS1)M#wbR3a7X; z)E+Ov>vw`RKm9@K9S-xJw7m3n<(mf+N%Iqde!+#2&9Irhkpg4auR}JsL8B~aus^0V zsR<~5G|twjDI>~Jp2hMfnE1FCE2g6Pw1sSg8V2`qpQMYYRKn{nr3((@S;e*G(V;Kl zFcadjw*t&X6nyQb%!YA-j?3DL{9!NC2Q#B+$^ zBMgb2mz(pf7Q-OUi}5?l*lwo20ps*y&+?a@jajXSs=xZuck0IO`Z z^vcHi%9llI5}W}>SSVoNPzhBR(iy)*>SD{?`4O!@Sw6HG0h~Njq^BU3DwMO0coN@t z&71Y~c4IaHI(C$bDvTP+c0+d+7Dn8u# zKo-bXJmF~xCw3p{p>84dP&z(JNT7@=*%X@E;_g&!OSFI;$f~gDjI*h{{Mq+7Zg#r$ zPV`m9+kt0!A3d5*xHVOz)kTx?i0GCNybd1pWj0bSDWlq%@2~=_T^fD#cD9;qjHH$C z?D(A-TkNhf0E;L1A#5RrxbN&=n^SKk;aUU~wdbCFWNWB_IC&AQ-!ieoh=e_FIs z#%k-%SsADJjGWoUkJl1|voz_dg4Y!Rn zW|4IBDBYGGl4|{Z2e1LcNu9WZ!fNFLeUkyBX?CjB8H@!`aOX)@;;DeS6rR`b$7e&uSu1SYZ9I$e3~^?FtT9^i=x2WH|t3G4EjpTuR>len$@T(gIOI$fFGX&zvVIQb*0g1#GNmyAaByAu8$z z6BWa1!XZS8Qs+r$9p2MH=fo~P9M}Wwj_wR)XYESk49vm^6~+@8;;=ZNkIRq}`SWNm zc?2i`Z*_h;M0rAoMgsHO?E9Ln=9X@ZTkUrl#V}uNI?X*K=A-{S#*=MCa*2$YAofJA z=9y=DADo1I5bO75)I{ivkMfOF29+_1xx6zjVp(3aTC{Zss>_msHV|;U!EE}iBUXIi zNRq=6@HMJpI$_+{`3+*kxQ*XcU!7`V&PfVV?p!%KG}oWO0n{X^P=5RKiaYxj0Mlv6 zSrs78hXDHH@S$SsN9mPqFW5LqDSp?SREuVIVTn@IWkhT2ZYlt!@x7|*Tvt6753#>( zH`Q));#yL6qJSEU>tdN1hj};5n6NmyT0g~ zQ!v@BYwsOf&X^{}AZXasAS#JM35^bwL6Q&qf!k2jo`&3IpBA%W3i6rc+I>Dl* zhvYNj=LgU;`tVH-SKM3q`L&N41?5kkUCKNry|`X3D}yXSyFUOJIt?Id{U&>O8S!+q zyp0IOI~X5iah~4U1is+?wy6qlKVAMjN6n;?B&nU~wt5sshq#$SU)-*RL4t%cZnW@8 z|IJV?HS3P_qD<>s8vHF!8-K6a$uEMv(~dN56ub$lvodQ7MOO3VzZzb7Xul4 zh1VnnpDbTFE57Ttti6;3*6I-^V8>ldGn>6;VaO^Hd*hw`WN%F##9Vg7 zwr$7};%U@(rSsJzZw`QHF=a~Fz!g9s^zIHj`t9eI6p9%Nn%{qxHwN(~f1-s>KXLYM>GbVxlZ$A@HUWbjclJ!G9XgKor-`~YQY z?`i_v%^jy*Tmc`^EoOeO0JVq^52AyxUO3AFAL!`kE7v^?By72We10K~n$2RmhlHX- zZCVB3)M_b3a+^V-->`}+>9@xAjn;D=r_&xw(+y5EJzzd4l$xRuU<%o`H3dX@Gm3mM zAQZ|IE`ZH`z1)XUF(3C`zts}6Wx4n~=tK%Ls2*mW2DX+T%n(1A>VG!x;b)>AE**Bp zcnWA__BjZI%8^L}$Bx`jayA=J(`A+Gl_LdPfVmDo2V|=g;A|juvYO}@rf1cZublo_ z-9J+%w-&3lUmTI(*4j8^&zef$aP$0bwV2-fM9bw%r)2~&9uBrtxdZmm;>tRU6|V7p zDBCqMvj^Mj0g4Y&Z1yY(l5QO(KxB9y3ZW{?H6t({pH_2x5tSar?BWzTYZg#W&A9zC zD(bQhn_KYBKi^M_P6s^?CIYOz!t8wx_Y-=&-@}pWeQyGKBNizIu{$sQ!&3HYv7S8$ z?c{Qez%S0nWAHtcT3GMwgnl5e?YdWWy**nraZNr$J+(Z>b8~XOAM=`05lpPwgc7t$ zpZJr+N%?r8xI;i}wJHX6luR0OP7uxs0@|XjzoZy0YZJxNyU4~2a`|zD5=X)v$9`Ol_SINr$Rz;v)yHyg3Q$9@D1B^00B6^t*~xR##p_Af?!A0 z((WXfBD*`Id}~J9n0@{EY754J--*S#B`cJ@++WIdE_|ZRljz+H1h9#bG>$bjsxTX+ z3tNU0Pp2WKHqRSm8a1{%qQ&ZS*e9N1;p=d@*F3e!YQ~*h!W)*7xloL*?g~wSVgtU~ z+?h;zw^!>2ai$hQ2Qn#~9u718G1gbd!oz(bDtoJC6kN!IaZe7{R=V2xz&?cL;Lv*j zBFe%E2%Tuvn$o3uN-d6|^(GZnd&(p%3c)X$sp$xfL6HneV~em0uV0nl4gP#|vvf(e z(5+*j>jFaQ4eFDODP{jls5PI#``4~Y;S_nIAu7MLHEf>Cdy6oBTtS^UclDA9ueuP8 zMfN?!>t3X0gM&wng6*1fPYK5D0R{C8matUIfS}}nX5DL1?{wuJ4iK_#E%LL%wnJt& zW9}qLw4jj`oDOev=F~GD~Z_4H`j8XfmIP-i(^3 zzb$>1935u`B|!f}WI>uIcd+bkbX{-eeQG8HnBjKC;kB0ZBnNTqejD#`{*M-n0S26( z!iVF!0Eq)o4UyXq(oZ}|TD<0srp9p=K6y*v@T{&xYqU1dk`%_3I`pd+3RWo89o|cU zIPqMbGT6c-4scQjw*gM{rLiY>oXmoIjEr-l!vfx4CJWUca*ddv@wVE{Mk!r2=2A=& zT=J9ENW(n!a_>hCl=LIWhy1sT%$5g=PEr1@1u%IooiPH{uw8J~v;9s}w4gW4UbyOe zTts*FElZgGA&o5@h4Y2l@qWV}siBvyz!>#1`D&0GQpN`VMy`AdXS;XZv93}Z62U)8 z_>MjhT#-_a_5%w2B4aT3C4sqer$s|eI-JB6R4cf(jC-T=@Td4!a+YrW;uAJoZ#D^K zcU?U~1N+k0Me+lHRc+hUR(S5|DTVWx6|ec8Ykj@PSqw~BS#^!N;)@>-K9)*J9-l!s zMW9~3{5|5$o>^Qc3CyD~&E}DWrR|uBc1I61kuddpr)W#) zeVGd7Ij#7lo$ZejJ4{e=mQSx{ueR~6)*l`}SN{gfmAIJ;k)kKgyCx^s9#10Wd=nnKq!ml+PpDFcmceNiL_e^kIif+!I^&Qym-&Is)t}q5TL9o6b~cMi5<>m&+lZp$-uKd1~#K$5>ad(NzMK-?P8lC12h11$#IrWfo1|Qh8OX7t7VcXHLwCi@AC!(6Oz2co|O z+A28~ljoTH1++;rt+VC-Xubc7SBoD(4w+2D!`fNmoYDa7_|{8$PoefA=hTaU{YnnW zM*xk_;7SSs62tQ4ta_W!ppmI-v%skD?L!I7$)#OVA(^7^mc84f$Q*SKct%?SuNVjoH5*C6|JB6Q8L$-^xgk@(f*#$Q*(sNEJstz9kz1P+l#XR7j19 zO{YCdTQV?k8lubL=Dh4=F#HM>@vpDotD$T7I; z86=JC^cMOS(A4c&%LXHr+{zR*Z??{dY{{8TFO|xaJH90xtKmi0^iixXf7v}m*#h#n zdBSe4*2&r4_X#oxgConoz19*uIj^O7i{w+`P9PGBJ62PvCU>`&7zm{e9060xTMo0v zP2DElpiZyAbk-f3K!{^WubM{f4oC8_o}tTQ2NI7!L8Dh_C( z0}hmUOvFchZ=7-w!7Tl;?AIu)&rkXof zQcVGSFbDFl!z|8a_Yjv-BU0Hd1LKN9G_v{!^eQ1h5;n}}r{t=0No^a23w(23llwtV zXon)u?a*ZL+!0i?)Tr&HadE3j-7E7JsiSZiv3a)yTpCOVi#`0Z^-9oH&58Ylz4;y% zr^$Fw+AF=KQI3Y_NjZn4C@u-Eq5VDgh?PyTf|TLjmQ$#-PI4F0Zv)-fo> zl7cHh$Z6a@&U|iGaCfH#dB1pPy=FZ}uV`dv&~fJHredHwMxLGbExfFio>_Q-KNa>>QC2DVg7Qh4jo<|JjgU=-j#{q z)2;pl7g&;9Iylo|0CL9G2kTYfsaHO3{c(5FdXQ?Zakx$(8Utt|;VF49q|o@9J))XE ze1X4U!<0RwBxORfe0XY zy;f$oO-jETbM*4V!ka^mGsaK}dOOkuqsnngZ*!qs1CY>8^>j$t&U8NF-l<#93xB|= z5@MFr#0T`d!bJM%0#1u#&v=TevAD2R#Sp{=fi(W6`_4^R1XM3Z9OgUQUuNk{GGCV2 zySMVV`@J4hK|AJU$wOldI!OsFT}E^?8PvO&Yrb|SLnFP~<7O-xb=vbL!gRq6Kc=R) z^doip3X_?2^F-+RwdX(UdJKy=VLDo~_k5-(9RVL50>f1em}IBG&u7|g8|BqYu<6P4 z3BT+}oW->Z&eEOIDOq4M5YS!ZJ2q|4dq|o$l0ULH<9-am^mk0db>3$Mpzm|kBK>rE z&4m#V^MMynq&7Y1;TC)jOIKAS`Q&N?IeG8X{HxV6$fOInwU@=kRys-QjMO*QGD{lNf~nD5}9A$Drc=qGD|C;?%CLmHPN3N!)AmShbmxtlPvD*U8Cw)5HV8 z?_bf=0bYRmX=dz(YEC8IyLtWmf?J!Y6}lO}s^6evC7-qUByM-|BDY5I3mM?6B_)*W z$qI`VAmzkQf4~TqigrN-rDHxXokpfm#1oPF$h#cAJ@;vCT0kK~XtGVN3N(hJ)MU%f z#CNWP_EZFL%bQCXgJ!Y1k_ac1&l!b z>nI_vae>-#v?iO{rOpJ83fnN?QqqaG@mV!7=}4^FpEB=&vEJ~k73}N!tt3?>{Rbgg zV&RuyR!q_Divw}pjbzd~8uAY&NR27<4a)MV_J{|eTs0{Dv_pDaD!cd7R4Z8EJ@ooX zU;wI`Byj=QhKe_cc+45Q&NXp?S;x=O3$2b9V1&=el-SFzC=!56PiK*Z_hND{j5Ius zK2ViIB~okueM0fr;`jDy^2Fz7kv1N>VGHO*7ofwk!t_N&mJ1$DWxoKwpbF8Ri;P?5qh zWN2fWN?a;*lwxWlPQ?0B1aGAH+9IXI90p1%9h*Wqgs}-gSx!x1){dT3@Xp`bT$_wQ zS3zu{Ez2j9jVER->)7-@Zg<(*qMat2>0$AsATeCf*m52f<&9DXN$DL;GHG9S=p9m? zbJahvk`X+fCQe-t+h&Gd+0tqt@a2C%;m2I!wy&LxuY!}*=pOmps6q7{N zdwZRjZjMyj&s(Hfp4nTX8?cGzE$nJNezDgFjm!2WKUQPVOa4M+q$|tJ*;UXIn&dJ2 zhT^v^9Ft)rsmm<{Zg}C2FQdF;i1?I<$k(1)>L5urIZ}Bsh;2phcqrGWZ z>e*b>lhQq;PNf7c0>^%v9-1?o7>|zK=O0*kK7DE*CgS|b-BjF$(N)_iZf!$Odtmg| z9R2pQ$BV}xOx!**)WLNbN^WudFb^|NDL2dfA5 z;iGU?;{72;m*Es!?`N{pY5#*&d-i}#oN3Hfv#J;N3KW~$+QoM7H8c;7ym*l&#KC8&`PgTgY*>bwf=oGKlm2m`~0AR(Aw4kbHt}{ z5$5y^qwhk-E(6*ZNwWpe$sHOON$_z*P)hY@-)oC0G5|H;vo9Mlh7C%RYyna3{8mGb z&rBCkWSeVEW=Z&E?X*6(ReHX2*7L^d5n1j|pN@kNNqghxp1FU5imKp2KjWEavA*p3 z?TLD%d0*K^$E{7Yv4#gy|Mg%BU zRwY8E%j^dm?t)7yX|nYvHmkPn=m^k5NP4bb;3Sje?J~$?l(wGwzvcgVby| zN`fuawd*F3N>Eqrf#r&0T=WXxY=icLah#*tw7NtugmHD#eO@7{6aPgqEY)D|TKnaB zP3lW#9^Vf0hA_+H;J33lBvnu<3DWmK!tkAMe+>8Ufoi3eqzj4|m%07E9preKBT14) z?u603P3c~3wcCdv^vzM!&iX2GC80LZoK@`{&j1fx*PYb>jjEfhuNJIRmE14tRDBsv zQ;j>H1n+M!%UdfM0u0iP9fYRTtoMa|6TvDliy8h>9tkNTXnclLZCJX91 zs}$<-)g&KRP;^`^m?~0Th`o;;pEE_vWEbRI$t)G(mXF2OZ)8EkVkEr2?p&~=)hRbJEpi?{TqD8BYNN|fMO)GV-zglhA7^VExzxur zFf%k9*?05{)eIV)O8*{2S_WP@BVzD^fn!#aU9UV?rLtM?N#ktkPjJ#qvoW0gYNjq9 z@7n%`mLvURjFYwT@=bVsaIzu#yaU*@{m@e+a#NJ%$9pg69^`|9ru|~OcAo)f?j~#3 z#pm7-=&#_+p%|~BXWpSQvL4C1YLP9T3t80VHSZRp;40LKf26bR-tRCwm|u$0mQZ!F z#?;Ol8Du{yHd-`B@Vh$2l6o;toK|jEMjq>+pbxe}OSS~MxL{zD5_nO}5_8&egN|^c z6Niiq7WZ^V5|>A>sDYu)W|=_*hEva|cRRY>*t-2w8fp$vhzSNXtnK+Jq~g)~IXBDx zZql5=xhthD6zIXxYc>9GW`u}Awn1`Tx`tz(Sje`;cBi|pr<78hwPBeF9V}lnZ^yEH z??$Ze^Ql(75nffTL(<0g?-TBFC*UALKUaqj?e{zjWcO!ur9u4HFgK)`5B6N;*-TrZ zCu6kY735tl>&F$9=zAhEbPV5%Ze#erZr2#LgdAlog2yG|I9mx}!^j!bo5D;0QPlz| zB}!#kz>^6j)3JBi3WQ=u$f=iyf;QJ@J_W~00=aP%v}5HGF{gehN*}SM*)r-pH;eZA z==njf{<@_7syL@OIag-5G=nhe-ujBu$0O&=R#gf590aOk)FCitTw;u{;U9%iC zXD+EMJsQ>+oq~GcP>Kaa&3T^FNnYta?HztK{<0tjy~-w?0yMUy^U3N2ZfCKs>cgee z0BpeNP%+(%+9z-``~FBxUQf=9_aoq` z>H*{LDj+`v-!uk(DWY!e_zp@Zri#6C8Rb}EQ_QiH;yrkW_gmuGDZ%w4)=qVYft@3# zF6+%?4Y^sMx_j@sGB(tH)1rN{bFyhQd*jkoY5#NLPa-Z1#9LiK-7M>2y0KmzrkU57 zau@MReCxK~BsVRPJ(^mmR%9XP!prAw0)K>I@miCH;Gt#Qd^jVE$nW6Vm1Ur@{@z2# zGAO=IC9%8ARh-&^|BYaZ7)H*E4M|do{kBeU|KRLpzc6!C2(}c%q8#&mxDIduqSpQd zA^R2kK`+8a#6@w3hgk+FBMDVu=g$xC20}$VK@PZ>%Oh@WFDoTWv5Ug`19^h$xW}0b zj_cna2*EaEz(Ly(L_1#*bP&Bc39EMdQMsO`?sLkEyZV^1fxs*4dp;h@u2T{t)R(5t zDnFGPCjbPPhH!#LV`#bw&Bjcx^f1S6R;1bQZPu^Yh5v#xGQ0oq$U$Xk2hez%bE=ja z9O^H!M|vK?4Td}fAl~IRi8yIS&xtdool4r;A2=VCNgX*we;M$3lq$gNuORLr(>oP} z?|(+ea_;2&Gq1`IO6;wsgGr|x5TRU{l@r~Vpa#vEq1%Y}deEjZ$%ctk zGydWal|wxH!nz@^FbAJw-)`*Z-GiR8ehK3hF;wam0VsU~W?KuqZL(9%DsOzDd2`C7 zgZKAHN8!KvS^HOaXJOw!kIltkJBI%+U+zD?;L{(D6vavr^mcziy!)@0{7reE?Qb+4 z)~555@cECA_zQQlP;S3!p`FrJ1pf6S{`>>^@2&Z__x``P=Ko8#rsGB>5<3$A-7Vjt z_iwxoeEUMaj^$61uzGu9zOFgc%?fw_*>vwt?oUm`AMKjI`aR}QfI%lbIUK8MaA1Mr zmlMkW&=q;>ah+bgzC37odh>T?=l|&2-nj+}r;l2Q=ii9!IL15z(7AE4Cp3TS8>x~q zp^{ngeZU2`Q zbdMbf0z#wy-dZ6G1ht&%81jF#Kz{$GK59UQdX2xI_HW%;Mo{t?PecBW?CZ{x9q2 zul;fvfEg>#ZoK*Zng99Y!~gyl|8Lt+)Yf)iQmmrwN?D#fCkKHcfeOHy>&4)qp)+@w zfAfy>+6G->rmfxTqi{~Z=c<1!kou3{n$yZ3P3K*5EjKJ-+t1*h6D@)UR6jV!|NHCy z@z02dQ1U#p+rm)A5uR z@n~zJ3!+Vw_t>}l-V7K|o~!a<`6~L7jsT^4_3mU0?~fPu=Do*Fy*6sucN0IhU9A(b zQ;2I5r*toUCGbfXm6D+YTQq5{7$Sbehuw2)k?J3Ph<>~e`c4jGY zs07Z4;kVS=qNu-@SfG*=BRiXU{~?DR1ajTjYwNvPdUlkztT~@Nc@ng#h!76*5Ql^U z4c&JE={-pjj`3)l+6xrMA5CP{KQ1}Ot+jWUjFA<1`={U?pggSGbMdBENW~@vUzreenn^XbJ)Iqa?(~&UkBzIfv z(eenF769Gq%?86|u?n(FAaJOmX2H2h%i{_F+6q-}6=)$|+mgjHioqI++StBhN9fCwKR*p9p0^vnEH%Z;YkWWdq{|6t2Dl5G(=7oJCAX=zWk{0 z@rnIq>^%bp!)zs9M}^?OtQdEE4p3>4{a)gp8iPlF`KeW3d59uHyxIV7|IV_r#B}YL zyZ-{-Yjf$Kf&b^d@hvLy!xvai&Rsqj_=))%*AR238gLsQ8sZunV%I-=@cJwNyT?9V zGh)7Uf{24=Ier)_EqydHx0mbDy5Z&xXGt6T zmS>@+pw>pcbtnu?xjfha+k#=+0vSR8O6m@&*)d>rTR$U*tJrIx;7JPhV|2fE4+m@* zt37H5ZotP_bV|8B1iX!;DHMRaq4nO4wvz@xbD1;G`J?hYtvD{_{A?NkQG7TlL-^gbv$-L_{^8-ciOuV(b^?Gw1* zj-~HR>RzVwjta$C5jW5E{a%gh$_TIY3rcwiP`oxUSseE8XJZ6^mpCFbz z*tfbmZ3LF@L648_tYJ$7|}Nr^p#3?C$}x4mZ|j(4^3j=N#vIm4SR4E_N|8TFlx018qZPet0qG$1BT0 ziHG1WI!KdnG6G(7jpEi3MXUeVu!{^2CjfT(-gkbtM!pNKo6|a_ znv@pU;)-S3iPwtH%=V5RO){M?*c9=`!n!}}=<60*bK8$?QB4mQ*-Jv&BH6$&rSOs^ zPMoj4E9rrxTdJk$>oZq;OJAG*a`Rx;rPw;+>ZFMFd5vuZ0rR_zP*h32G=2-_nv3)n zKj-OVHrU(T?-+AEyHAGEP3kjH&VX>e6Z8`u7&etSAq7_Ds0AD}zohGI%^Nwi0GCo_ z{k#%U$3Acz@TXO-<;?*%#k>hF)$cK(24+l*-{^KDfKgje-z_twGrG`r z9Y(o`-t&H#r8~4q!I$C+4hfwfAEWL}k1m}q0jncn1c?BHVW;_yrUiEW z5?a&A+?u<%&v#$>hhcI&dovB5oGp+bx&rowMlqC@hRfhOgRt}K?&Hl-H#Kt2nk?}p zcDpvoZO^GkI-;wmxT?*39c<|dl&U=~p4nWe`y09_4RDx=>$dy;t#zIHQ$_h+DRrG( zcyqSQOhO-Qv4rw+e&DP+Yh-FiDRk^qoq4Wj^?=3_1nE(T9bH%+YEU<={CKI=vQo>j zoEmHy0r=kz*-lu10qO#uTJL{YdkZX z%cJWo+`EIz+U1@{H8jtvU-ltn>zA?u_wt~ybWQwFtG-UjLpi$k?7$gy=(S&-<110j zBd3;Us#8^Bg|CXZES+$l>CZI{dOcqIDgd?0m8$G{C4Zj(G@^J~Q@Rc{tIh94Wm@4e z>7TbM`pJGgg{yGjozN+-@g671ec57RQrWZ!STpNUj1o7Eep?I)q>=+q%M>4gn2PbE zq+_uQ<2*CE>v!tH=y}KmK}ga$?uR$-Nialw9H-99vmSmJ#b^6lJk|OA>yBr;^K6=1 zqH73?4YK67M5ERhhI^9AY+klKKQ+a%31NpN_GefaTP$r?&|u!z^ci~0A^Fog^hY8< z#HV65Mp1h{q{n&0d`HVbKybINd`k`JMwZS)w%6()>A3YOrd#0W#gt#TJ73l5WN`pYoVK=Y zss+WUfiyTum~(x3S!l4_OtW9-eT8TvUu^%{MFAGR}2d)7+j~w@B@`&c?_8 zy3N8p-`B(q=UH^heto{(lEDJLQ3y`Jc9UKtmTNAdqXIE+3Z09LFRAnXj%= zll9)+5$WZd8V%4Zw8iZ(r2Be9l%p?$oT)tY%)K@PPZVUiV+AIBWFcRXZgQdDPRzK+ z#Z`<`tuEaLU3RJJ1NT8ugN#cRAWou4MboPEu98^j+63s3fS1uals+P5W#7a0rApr-}~V74(U9#O=q<{we^}h z1($E-)i@pN4P}C_2XQ{i`?eH+mr&AK;H*%(WcOrY>-8<`E!nOliNU^1ohNa}l%Lhb zeMe~sx(t%j2^fV6nWKsgkq&7r8y>Ao4pR#{?3xLo1f$%iL|}Eh0y!;%w;d>}Y4%8L zBHrWB%}#dgV{|!Su>t)oUzmjefd|A4(D*Ra4-^~2(lpkl*58WjFV3x=Pfv12Z@PG5 zml%L~%fZqtilwLTvUs8->a30+AY8f)K!TLt5PEnG63rvoT2gLFkJ3c!6nu8)Z*y}h zc4J(^Et{P=kF^I~UPDO5d&(=bW`*uGYSgVcN2hddyRC*!@I36++`jN-Ub!Y6>Hf25 z-C=$-VtYswxMqi5EMD!$Uqe7pV4Qx&bx0x*WO8xyrYLRBQFB&y{*E5?W*?-;JelKdC&Ugu2ttXayXnI30S<+?P!|9U8CUGFNQ=a_N-`e@Tm>6mtoQNMV}hzNI&fi48RIsnk#INH6N?Q6 zsZrl|Kz@kyEz?%pUgyEDen&Um-*J3l(D`9kBm^3Wpk3LaO9wGZPLJbQO`NZb;;6JS zaN!o?^5@dl`PjGYOXUuEb^3-u6OB zAjzuLDe6UQ<@hP_Ld*U~^nzt{d@e3e$7?D3$K0yAz)4auOxbzS$76@`HxtOC?js14 z88Ieub-9{KBziy#@ijYdnq93`QFN%(CEODGFlX`SS^YmvtU5F*1uNA_b@rC|GfZ`l zA8Q1seyjMW9Kf^hEYYAq3)&H@HgPGWof)fqaT91XVYdV= z*vByEc}xgvi!J7Tpl^y!(wTg>Exo)0MSSrU4)n5P6 z(sGkw4g{vNj~HodpXkqfVpF+!hjPhFb*RMRMPIY~SICIHN?I0trBm6CKq?ip`acMXBj zRSSm9Yo#VaE$^G2u8qH4{ns?80TA4?kwnLuE!7eHNr}eAw@ez1B=32;Z@ia3&+#2z zP61q6!Xi4&@_MLi--SOvyoz?lVWX)Q&|E8HHAovypIGpDW<9G{SI6G$QMe}Pm*PT; z*JwU$KDgp5zhWe9gXy(FN9)Aq-D6rcEVgv+>dkkQNqazGK--Os#3$@ASQG;VUg;T% z{jYWj_{p*bGbJipfu(t`3%_IN)j-lk8+v(Gr5NYmH5>m;%;AH%Y*k?fW;vG6mY&hK z9gY_5nyLXj;PeY*j64DFbvm9l0(yto-kv^uj{rrAI&}X=t=JJ#77gN2vR{ZhFQsYf&DatPkr;5424ejLtS~Jhwbc-g-nh zGfmm}W&fh}JATcZM#q7j!%+mppG#9sVU|_-YAA!c0wjVHu9=^Z8RAJdPMh{YSD+7+xDH^PUe00fhUBi-tlE^f4tFG!OHPRao)=C-}|0N z`SMq1`%&pSy(<*i<>5W#4gR1>pkK1@bq;=Jq0ZW`KbP?k#I#!WdYY}|Abw=<9+Lz0 z1iGCA_t<(BDL1(sbOoX!TxmD%%O8CE!R1m?41z-J?#pkHx3ax3!)oy!I_M?N(XzkE5TmZW-#ITp?^qMOVL8+mHKWQNF(O5nOtg zV4^Bo4$m9Ex;TjdQsvQnk{MZ*w8HL^+TKdxMD$hB_?gRR4z?IFNpQdP4_z+RH@RLR z?DEj)+p9B?S?j;{XC`7u(jV}KHu%^ZKWA*xGGvOVlC~pq_eAzUh*i>^#R^cQ%>bW; zNy}%`yST)6#9H_5rM(d%ZW3gKoGjtNeMb)DTgs-MTD0~e0bS+V0AzGJxjafGam;;q z$q(lHawz7(+;Wggl2iY3WZaSU`74u6VL<|m!u3Ex*+LBP@KAL+=>vl?P~|*X{QMCs zv+rPd#~iSBUO#VBe<4_-5S~kh(B)58m=0K6j6F(8$4bqnBru;{wwudoaJ%mar!`yK zK{Cd|T2yg{x|IvIUypC=Y`;SxU_Qoy-Z+Tr)I~l!A+IG)H$uK5MCWL%%fiLjrL0G@ zHnT&#yG;QWZzq_9+g)~=b#guHTA%67;NLNbbvEaXJAKwjg}2+pz=;NWd{&p0qWNwq z{dlBHJdYG;S|U!)8m>UKgFXY?mz`Dk2PB+IdU0}-;yfZsV*8!B-5>+6 zt&0%{0+-ZNwfbhWoR72u@Qq?ifJA)*Oms91A1E%)3Fz@Bzwc>M#xW7Jjp_nC({1y9 zO*q-hCVgoBXN@FSVe8$%ItK0b0CYhan_LJ>5HwXWgzV7KXdBR~ExH|>*!qa?s2#Hl zFQ}+v0G@bm2lBC4%PVWk-+;GXeQ8K)6t3%N?)-~>#2BN>l+U{u0Ru^u5tl=v*Hz<< zG=yAd-hcg$gq5<$e^9Me`oIFTN;jNK3+#dbmZl5f4G!==-E{6^RS5H_WhbY8_Cd>37C8nM*H4uGzScNc|NbaIncP0H9Nb7Yu8 z>X^KmLp~WiiJN0QPZkT-jvPK<)bP1w?U>i1(1Aewp-x=44t&$=>74>w^pfqZrX6D9{gb@B(_j`Lze;=E z5`U7uU2A<-)VmSm^#&L)GM!~>m1HG<$m}H!89DAFE%rMQ{_hTVQWpSiSH7?j{ksAxMmiGX#)=~W$Qiy7oT~zlz{DkQIv<|IWT~E{nB;_LGH?`=%GdGs#R+P z7%xempv{eE#AOJRbO3$GDLz6>WUTK=hD+mY%D0@6X38PfI!GUGZnJ#?^69%^=0JY- z3_@D^&je zNYk#JYZp%M6q@Cw!P7d~n9c7EvbW|1mMq1($yczRYcd!UV>E9VmF2~>JAKLr~PuFGt!wT-LlfXvUP6dg&3A*}t{fm(`<47VkSb8Wa&SL9K1-D0Q! zW#a;=?=Lq_z0DhBJ{}ZE)mR;SGupc`5@~i7I{duAtp+F=I~8Upz|0`{Qdi6AZvbMm z6X|lP_((eWtfeTijL++|xF;Uj zNODf6p~-&qQa)jv8Sy>}RyLL@BOVGS2KAe!``r%nI$)}7ZNF8C6X$YUr7hqRDXK)~ghvaLxL z`{he?pZZ6F1q%MPgGX4G#uZW}WIvYVG}B#^@uMp`8RyXPP^yCoK(o=Rlxybf`k#|d z((zSt;(Z6ig^SwkJ~TL;T9x^7K;F`#Z@dy_-DzWrp{<4~1DK$xqpdNi6J`{K~iWTQ!F2K3=! z*BFLaiMiVQs^=V6CEEL@>w5b$zn63I%gxAVfo=TjE3%47|8P^l|#yg+wun z&IQPgv1LD|S4Upu6?tjF(pMZLcDE8unu5?v@BlNK(2Oh{)nm)rQFx9Sq#g22b&3riA-B(K=m_7Md=nS0d=6F!{;q687Z+vr9vkW58 z!-Ks;9|d0`grP`{5Vm)PO<+E-(tPAxT^8~*ft)0K1vzj6M`<`K9e2MSXV)$&5(92P zjJWd4Yd!~u`djr&lkaDiKq~SrH6yaicLoe#XaT$TNwKNjcp6V(LWhb(h!YP{#mKGd z+EQmaRAPH^%q~_3W+Y!z*d&+oR?v)gby3h*({+nv+b_*eC5*5(A(AKR@bX!Ko(0p zbfjq_txM6X&~J$BM?Z3Gu#U9#cfgiFedm1+YyyJ|<-=cnOv9~$yTI?QmwGc7066)Qu=;x%RG=Qf&oKBK zHiiIu;M6a8L<^#!X_HOYQhu+i|9(*c?0trF1YD9IYg?R17gq5ysqgHu>@G-6G_IcO z&9dklqH^e_lLy*M&Ol)AXal8bB`m)v+t#Gpo=v4lX@pw$zItG6vi`wd8LE@-@w#k= zj&@qlNzrUW*LmSk(FU%BG~(bt^7cSrwyt?LM(&x6^g!M`Gw|8vyG>u)VIQX(;OYL| zN;E~B8D%o-aApVS|0JtO%c;^6VBu_)GFgHS?vm4?mKy1@r3IN+e9T)xykvJe=KCp@ zZesbYju}&-yBP4PH1WEUy|}qSJBp0@eu5?;|C!x~Bs(nQs>{pO*aM0uVS`Mb9^XCa zI=b`KcMrP`3R(}?)sK*=0V}X1=9Ut|J01`-Zg;AzE`<{Ppx{CG6sYJoECWuPMjVnG-B$H28J%KSr}zGlWFuFQcvB*06X zLz}ZJAiXMxo%=vbXM|p7`d|jz9F6Tzm%MM)ml*-DQZ!tw1R4bW{IKA6sQuraBS;&- zK1h^Urvo}El9Lu`TDtjVgX|rByM1l0MK!*(KyFOxwKbFH@{_J9L8dPTU+#$0eXocU zUHLX%&_-$*?e&gQG2}WsidFEm25&o;!goOSaY)4%{XRe5`42@Pf~a5EvwfW;Jq2^2 z0%fMW?@9KO0OI|DL{2In68(7+aNj=1qbUUfvZpkl02*tEx)MPdC-kl`shJ>OfkBGtRp_xtz0l65=|uv?)W0WPX4% z-RbmvYMdvxLO3^)uu(z9a524Z_8cYuc}_LXs&}a=EkORpGTlmetUlc;AbDgXqBD8e zL30w+#!s$Po=6aVD(`=e#{#28yB>4md@~Qo!Va-PL)|@tx(jen+l`&7G<1>Ky-sH? zpa#5KyJO1p{b9~1YMc58#zxO>I2tKVNkK8hQhAo>}m8UuuN zz!00f9^1(7ZWl8DE++Vrr+eFi5AEckHZcUuSBY|)^Mha$N1mA5oYSntU6_+|7n)BR?e}5b^Fsn=bx}qZK7PVyM;;*Xse=~Jrt6s6q8|t7H9~h2 zyLth3pY0c9S!s<6Sf+!;i2pEM7-Rx7k>?(*!sdo%olo1C&nA_gWDr%AM{Pj9aNk7( zKGE(YWcnB-U@@U*(_67M zrogT{2HBG4kO1cuY-j@Ls@~gEv-T7CELBx>3oW1YIX{`=Y@>}D9yT;D6W&E8kcD+5 z`pPwa`Esmm7D=LYI0{O%wP{G*S&DD-L-f#CJYV zi#*vimy2R~T{^&NrCaXMGk2!SbKo|#*QsHQ&P4I2wKSZxBm!F>@!IZ6(M`EsR{H|z zcCr@e@(@BK8~_nOP>2D7LVyo^9j?j?yJn!cvp&Z-YM>D4*!+FPfY<%VfDS+{+Z6Bd zwn#iaIw$$d=7jk)0ss+=loPA(*p&Ykixi!456kZKG zGvL8-UGo|b*sornCTR*w2abmfhh37C*|Md1LI{g66fmwFM3M<1jxFCli>sRlyt0R> zpyn8m@nYywE`FuN+VCM|h30!OY1fBJTpTpr_UW8Ofq>8_HCAm?oA5e!70q45>iVEbX&JuSWH$eun{|q(%!gIJSvfVH6IPwi0?KSOHSKhQ+ zI?xtRwb@9mBFZ=__URadHxdKqC|>n3)DlkYCW@6__H?*%|6Ndracv~gJu_P;zZqK~ zTz(U@N;Y~Qb50Z-`Uz5VfPue&gf696%xyO{prgAE#@Z3**n^vZLArf3EFMp|{1gmE zJ05H9x~{Ou1y4_S0|{oLdLP@@`M0WIe2^r%JrQ#D%Kf(@1A!Iq)@yI%8s2D)+mkED z6Ghh64XL*p|16>%yq7vuCh`<%L%^uD}%e~8) zV_Xv)%3ggC{$ve$Q5msAB;Y6=14w5-^_Jgd#ZumtVH{eMz{!imSd$Cq(@DZw*iL6| zFumSjnqL=}#m)w^DvjY@tA5=~qatg^$AS1kl9@s?Cp=JeRrZUL`JZt8lk=va<$fhD zwOuj|&1@52d&?rb+-(yG#ZN{UPT(D$_gZl_z43&Nqt-A5mng&oHh$ z%ckrhQ|G7xKYfw!C;TC$0Klb=RpZf*|Dn449}2xc`g`|)x>)6tM{vy1U!f}hc4dOW zR31s1z$HO*>Ib*+mzet>N=TR=f^EsCk*(pUdh$OV%l4bIKm6gpx8=XL<jCU;sb3BT^L_~vFTq|+kd;+`IO!9Piphs6&ju+KfXLD|H|e60=E8ebNj=M*jog! z`mZ>&9d-qNZLW1pw=eu1;m>a2K~UAcd*OXMmWRLrubS$m4in^8=oH)Eh9-H zDQG^E!2Lhml>hVL)w_K^z-PK3dzEB=O=1xM#v_0yZjj>o>|jjRq+{!@*D>INrGGz= z!EU%jvQ$8AWpNei^c!q*DTG7^NLGkQR{eSq0B0DH0jdi>cA}35Be)I20RmPph=~cG z@3WHyqjeT(?GtWeD|KXlX(K;&2DAo%)9TC~C+aLe)r{a_^77Cw-&*C!wjo9cyh~Eq z^Y2d6kUYyGJJK^{0IXMi&s}@etnb`pin^o4sqfSTcOq3W+p8aWr%YOc&@QlvAU{*U zc0S7sQPiXDe|h9xbR%$!v%y`Zm~L2WOU?ToOFd0TM(F2*5)abxyT@RbdeSw*0H1++ zVkKSjulR91wcEEcactt~73-4v(h6G0keoyr|0C|ZBr!<1$Kf|yRwuMm4=q5k-|4>~ zeB008wqH%k&@LW`c?HECE`VmtGh0UewUu^KFNEWO@rJ$cRnN8Tf3MyN(ie$j2~mLyR#KZIx~?kXM!M!TtFyNnTO*}-ovb|f^$4Buu-XOLDPL^)JQpX zyQ;-nxQz*rWo-`fYez|ViND^cJd}$kh&cd>QrQ4RF`5)Pm+$;1D3$+Ie+KpTD*<_# zk@Mpx-c>bH02fZU|*!`Dw4d2-H49#D{{EJg1v)VJ9@4rrSSwq0aw&Q#S!H zD{~)@8qonLX|XF;nm3K8eEuH~^q(&6UyIg1HJX*E{Zw_8N{&s^&=Vv$rVxT;7=Z1m>UH56%T(#Kqd%=RX=44XFQy&8ac z3K(SOqzK!ONs+jzZ0&5yp|W7k&ITewf)pZnBE{Q833*`pLEUP*TTWcA4s;lurbyu$ z4q1`H!9*r;7%y6}a-Q<7&Xc)4j&gM2W8v6&cp%cOVq;;PV|x>gRovih6XoJ5doLd> zii;_Y)-PwEDt&N{i-aZ-x5lZEf`n#9*AcP~I%3{Z%9JoEM~HHWQ*2x!u~CJV zTbB4-+NKNWx07T?pZ{a_^+yQJ@4x50$3%wH&NGX1>kaVhS`lRePOmY-&W&%`5p3I` zdjy_LQ9z?Mz{VTLi{p14+iq8c#OA>Il;Zf(4BwN84(UuB@f;uHN&~FAS067NZMiL~ zU+%P>YZfzbM#yD>!qFz2N2_+TYZItIW)ytJh7AE*lBH*Dc>Fbj(#azsAgF8g>7ZYW zwiI%I2zuw(k4VPlRdJ6A>b8*`CPkRdK}#BiLMt^2+Ln2LPkiuUd349p>+}; z*~EkY#bVpNnd7AFz>>|Gt6*22!%grx-Eh4lA-7oE0gX5rnOmeq;1;x2Rha|vEF{}S zM5#-(_<0OB4?u2L(3rH0zK=Nzdn3BIT)$1ivoYLms}sRMQ}}_|x3qfEwW#oo|0JkBua11lKY)(U+$=>$r;w zA3KpSEw^-&*T-5Uec!$BcbtlJ-^c>wUnHnQOOftC%65Ic_U+qc16}*V>3KYz`O{Q0 zZ26|6eMQ=-8QZ>;d|*4)rr&albkw}VN8KJLxw9s#V5#y7I=U-#H}DH^fLdfSX`O+3jVnl=M(0Ea}nj%GGzreNpBG(e^6 zE5Axkb|yD1*+PnyGwC~%?5mG$JOni900r1u=JL%-0H=PL`E3MnAewe4FQAlN@ZmTX zX!*SY5Zf^cXM%e!k6cRA$4=#s+bQNOnUG=ufchO-p=AJLb;V z4@}h0lB6j85yRH4(Y(A>aOd`Iz{gY~0Q-F#)w^~Fy@=biD)gcOT9-wuPJJpeBM;Bk zzAEOv@w$6O`J%-ATod@vCJMp)a`>VPpyF815Aw}%-X!|BmZ*JHy@^ih`(%(_c%6RB zqQ4{)!*gxPW`iI@$uuYEn(#FaOug@0GL7F43|)_{j8V6(tL|t0Y%nw$iVqU9Y8dkMxT44Z?@ez9ze&NVuR;7 zB>KR+rVhe_1Vdz@AFz7fV6 z>~*Cwu@33f?|?r5#f-Oa(5Yj>NYu`1nSjkc>-qJ&=n@HebCUj)G{;g;X?gXu69%=5 z0EghltRZZ40o>DFJ|{u?^17qMvf%~4Q;f{OjQL%An^!>@?x`vX4j@aazT>DSJTuBfr+os^`f*X`_8s zbymVaRA{H)EZ1?`mPZ8xXxMgJL%hp_S!fXBpfon&EV>3a= zL`r278l%fjqq2nh2h5i~9W_IK9uHbCigVkc+a7D;F^xA94Tjy1-LTTE?^2Oiv~ILI zMw+iLb#+hK$?%S< z9cFAP6^Wh6mn98~@BK*9lQkSCF%ClZPM+QCLS{%j;x`#>{+uZCStGH8&uX9^D?TJ) zZUyMKT`4j#$#tlBY$XhOp{YY_M8A!b!+0;!RJ!^ah3J!0sMn4Wb`=C=kQ8Wbx0?wg+f=zC0&4X{b)Smv3E4cOKbv1ww8jh>zeO3ZS-J^bvpJcOamzz6luE2!CmK; zom&PrAoQ}S)hW1M`RA#z4w^0LM>;B^$CVuBDV{ zRoTKG#!=3-I8XrozLkksnLtU?dCc-Fcr0*cIgOaC(cfGcVgvLzqz0W~3$r>KCdNXh z;w{Z}-75HSNu22$TKNR2E2?^zxRsT0wj{-E z{}lPUtxN+t;zyU%K=Jjm$3bJQpWS+H+C*dYruml^LHtPA;P#0~7Y6kr<9XKX`F5+L z8Kj=%63c$7OA?P6Ci)i6m%hB+C!^J8lu>(WXs&NrhSa^=>EWCQwdcLgH@Qdt`lk`TK8=@o^lpLcPho~zI=D8Q7zCOo#Aw$G? zFQUTP6ER{ctK@~X1nR-gQ&yVXA;3qacYSDi_&%`De=}s;SW3;JUaPfO0(-I+o(U3g zj_OUq+kj7nLP~SLO|-91H@c9-Y3?uFJJhDA+&7t1#~608AKA~nC;g1=bE_8B0D*1> zgH#`8z~U7Av6wb2c5o_k<$9Kw-Ookj_YB*X`&fb+ab=U>mkDxQNNp$r@@Je#iA>~R z(dY?gMgU`5>D$3g5!tR)Cu9rA4-74SFII@kKj!rq7F2cE^iG9%!8;PWh&l8JioCwD zC~{{I^<;ITfkmdkS}k33;}~zKxdi5`ziz7cJFr0y+?^sJOrH2n8Ch4J7zr7^WA~18 zd4uFDg+1>VEplNVNT2n&j-Gk)D}X>rr{y2++SJ-OX2-nHImwZvg@r@*gd9yGZYKJ| zP+A|ufHXlCy~$v+mO-c9vYt{fMneJO#ApZ!DU+JW1fYqK&Yf&F-kD=69}TT{|IPpw zgotQ@itY@iP^%I(H@$p6c6aj)BsE|$>0WVeN(GGOG9uWy{aHB~E|C&=5=BTd!A*8% z6y=1CgVl$xs~ad2c&W94W-eK#u=Ok;lcsKmXOkZP^|n3&9iL67tYzQ&sS+W!1w5bu zymY+Uv(_fk@`KJKMeU6Nxq75f-9Six@L@=lh2E&yR~&Y$7-?4jW=~;0mF=e#?LUA6 z2M+FqfqE4JdiKa0E>4w&w(Uh}3qz%tJm}K*VNuSZ*@gb1#bltA&=wp+}56%ZfERI=>$ymQU%x0*!&p*pDQ<%xlE zMCs;xeyhI74fa?+-@dHAcFsbpF8S+P?&!@6H!a^P#@%ennBRJUY9*ONR1$u~K_z87 zdV*AftD_Q6otZ~*)2$7!LsG4UHK9qb7r=qj-bQY z+dBz)RVDD{Y1=XL+n_<`aU<@P?!wu3ok0VW5PySGumx(m1yT)xepx+}EVhBV+H7rQaXLe! zOMcEBGc1ojD@r|`{bY3Wt%0Ss=JApyAp2_~Ql0uBzBHor(qCiZ4Nt|b zjj#8vwYCl+dAE;b?QAfy0PlGF5`X2dI&+zVS4+*p5ujIH77=;3xS+Ge0~9xI;CMWu z$s0VI|G6(CT!=UIwK(e853N6noWu=r<5D;UQ|(Go3igSx?1k7!*)f@8Ti*lrIL$w6cI7K? zFIO6nd}WZ0^J2_jJnNME^YNY?7N-!ZcRyBdLSJ}^c!zbRw$(=`SDC>&J~r2GSXsZX z78EUj;#?}!UmOt@z;Ozfdpb>}9o$}Ze=Z;mT*Pl+AhX|#qcij>F5UAqxvpDa&f0@1 zru$IL5Lmw;am#aXGzcKOvZqvJS+z+rk>)6|J|}yLnoB^f;R1<0Q|&7oJ7`Caen+B$ zyKfxdTTMmU&2m0c-rgw0B;BoMb)nZv9d>I$%u}*!=6#t-uV8Zq`@W7kbmZGR!^zOLTk^~JdnoTGLSE1ee8qzLw3 zOmRV%4CH?x1N0g=K%Ur20i>!hw>8?R&ArQ&)C*t)M!x%DY5oXMIMf}$4(ITHs#$fF=?s1Yv2$<3C>QAEU<5 zNAo)-PZ7>zRWIXOh=WMG!G;Sn&73`!gNiP8{YAsnyRV^arAE790!k2^*b zOqzg%W8KrTCymEXe7C-Avb%x$>UcU`bFO&tow&yzlY!|$g8KVmTObHyTstg(NY2~8 z90RC&Wm#Py6*B6y`bV4RJ$Uh9c(($+>e|xiQ>0m$+vt+643#P8zx%>^L1@o3NbQ*R{(piA4> z7PmUDeJP(i3m~Y51d0>CU+hkyv|_pYocxyQoD&HUPt&V#Nft%i&T-rF@>AAVM65B} zGrUO~=g9BQc%TH_(yf1}Sc;ecEeR+zbLTy(k{WJ387qZ&f#Z4v2x%NzDJR+XL?VY` zl8qax+*Wtj*s-o&)?1k4GY8Ze%;(N2X_+-eNe8paT~-lY@gcFtSHBwvx}WW_TFm?S z;LM0ljzhi^wjW)Vsl1og^~}=ZXGP(h=TGaKLeL=OWbH0eT@+=(Z(;*aGj=%sulBw( ztjTm;S4Bldih?qNfPhj}lqwL4qV%GGAYGK+rMCpJfT)0glmMakDor{;KstmDp#`Lu zgh&ez%6Wsc&R*x*GizqA{pb8x^UG`S68OG1&-*<0a~Fi9X=uX-dS+oK&L*Lk1DP^- z{3pNoSe@tUsfyg(p`4}uI|ob?mq3X2bX!alb1?EPAmp}!3{Vw~fFBQ1)$*Cm&`kFZ zorRVm-&#jKC``IO8K;qeoe&QvvmI<+uPyT(hli5*-Gkb1)Y!rQ%>gvlIYRu#{;OPq;7RhoHvb(>1^^ANn>wg|lh=Xs zXk2Emd2>ehRQvw=)Ae>t4X`s#_;xRN$^k^ervYaEO^oI!~&0 z4-Tu{uHhiV)uBp0$U2RP>Sa<}jDg`=Ne%`8hl}RHaIbg9?P&$Y-!UEzz3QP_r=H?q z&tSa0z96t_mRa}}Hnk&k=&RCsZ=DW=&P!cY+SK2iSeI3*7bF?exOCR7Un(+b$hd7L z1bm%HhA1Mdbhn7TU)sT;0_;851xjfPh1F4~LaXoRNd;5~IU)r}`1ae&YtPL%qBaJxCG3$&DXNCV8qa3L{H9 zVYpvyQRI=}LAmxbBslE+F#)sA2q4OcaZ-rHJ%9^1>2|F-lD|@lg8 z0jU0wV3dvF@tGoyLUn2gCvaD5*A~p z(Rgw3PG>)Fq++<05NX~~A=XHU&m@ww6hjYvRelZR%*esA@#%$*I1OM4#)^Wh6&EcM z`kA!4;PP6a#1cE=1k{`-sv?@tYU)>;A?BYxNWVOKP(!lk>*UGr)_jWDr%r*@bIi`$ zZM@m~N8?cg+MO}q3z#MDt_3&Tu*^V?fq8Vg2N2@{vHU`JcCe0pPlj(_rh!0Lnwx(9 zUFPc3{kMWI650i762Brwgo%^_4`w@78rb=Xlmksn%q`JEMasW1N)8QSgh~DPW&7z8 z6{r^h*e2H1YvAA>g53E5+tM?QfsE=nY^&q0yZZ#+P-KHNaj4w7R1oCQ3Fqs7eG+<2 z$!fr!6I?D8PCm@P?*2M+*)EZQ)-@NOlkK(7rPL2`882Pw`n(-#4OP;TGX=&PUbMKe zvt*A{z*UKu=Q$mCZ!9*G&ct8zIJG`JH0auo%zXf`H3e~gvs>$lLt+#G8quXT>>$xu zFq@PqyEwZ^Glj?jIr5R_xrbNW=d}rAFGUDArm9GkB0??`EbA9v~n#c@l9;`8k{9(I8iHb zu_wop9SJR21`a3Fgw;p{v;b8Ocpsk|!(rnl*up<;pEmlA!=kF_}v$am9a zNu<#C6ik|UGz;7Iip)nN3pshSUS39;-aNkQ7p`vu0phqE5@ zz-gL^UYpS_Dz?_1VDQ`+G+Ny*cpfUO!VqwQH$uAz_M!(41)gq2C`81h`Kg}`<;>T zg={bu5$6F8@|jmptPZ!n@?n#yve}DK1pU=EMx>dvh8iGvn5|`DxV(j#b59ks*v4$( z=q^bahu)0-!QAJK@A8j>duT=NfCev`5$5wvlJGfZ7!8DUC*0YVE?=*ueLR=lgxf!R z3ZHR`wDv6{oB&=MH%(i$1@AtWi1GGVYyQGYwig#AekGqEv!s>(nY7rnH{$s_=&-nN z#u>bFl%Ng}K@VOo>&!_0cG<_YF-Mf&yMb%u{>vuyFEl5Pxqxa`bKYqaM+srSOsA6E8bhD$YM3%oT3hPuF)01VeZ zhi8F`fbd3Gw~gKiX1WCJEvg~A;Bw>#sE0GaG7KP733n4@Ria~RgL`k8^*OY_*(-}$ ztpzXQDRZ2$qmNoY!`f(H1{eqLG;_+O-c9BZzMMw6e++_S6vp?F6RB!5Xpv)bekfgA%9#hO$?EbMwV8xMsmPt~5maj~d zY-oIJ1dGutJ%hhLAo}DGK2hu2yF@clz#>7S@YhQ4O~j?DsmZ=x70T9Lt1ur1I1TM5 z7u8Kwvs9EyzG6kPyPRV7Pm#-@4j%BAuWSQZW2v@8RU8lPdUtKG2AdZ|tW(Kc zru+Odq3a))1PqMF@RR$eF8PX6;zs8XSqXp48zwow;N?_GH z_V_x948Y2F2%6P?*zHMGG7}D~my;}j3O3+%!Eh(GL9yD@FExEeu<*>0BLad7_wQCTz{ z*_@9I5~^vc)CaF7`!)cu^5#vf%bW`-HV^P)%pa4KTefzWT z??783;Dn+t1bmT^yaQ;CZcX2Dhgd~Qp>Q5ONs>;k87#E9K>AVgolGb5m~n~9wwP=0 zMYCr&R3)lh9by8_4#KShrHGvCfb|*Tx zb7G2!sd3dWc?O`*&<5+6e-L{NNH~($*PPJB?YSACO)vqtoZ;QX-&+xzgTvCYy#FI- zMG6R}h|CoI4GepkOS|qf!x08HxDiHG80EUaKJ5J zYOirp^I5zM`Kz1gO{la9nSf{KKL)JcL2_xNMoD4nor7MrRV^TPn&Q|XG+C;rM*z*6 z%(^d{#)tSbdU`;AhIw*kDZ9dX0Rl3E0C7zzR0_KPCo zfc(WUUDrj)^uC8zx(C}_q zC*5SAh?Rsk9!G*^h!M|au&epBpngqr0z? zM{vV6=r;}C1dwFs4u`l@;0i6L%_{{v6&ByjUIhk=-dp9%_GhEOZRJU;w9K$+cC-_MKwrtUj8Z;t}c9w&Q zVS|w>K6|gQa+HcY=~I4@>&tJr;lTDD&e~ zQj3ttp1_J>C|UA6p|*!WoQyJ8jb~%{@i)vNPu-^`T@3T+`Lb4?eyfub1*L6~VYHlT ztt4~@25>URZugkOw^N!4`rN_SV|x-u^oa}g<$9&qgb!(;R^D~4x_WqDVSbHZeL_fp zU+W+zB~UivEIUlz@d9$LncAhUwZ2502+8m`)jHVFv3AYmh0gz1c1Pvw7g#^yin*g zXg88a%$S2Oa6f#3p^4x!)CBUs$X4{YI5^lyIZF(Njesj zWNhMMyupSC)Dr6t-`FC~Fr`#l{5%gAu&==!(F1k7L-g(2R0rVzyk|P|Iaj)tGZ?%) z2&p$2{s`x{-DyAN#RZHV`m!DwgmP@e-EJyPKItGxCuENE#&C2`sGdO4ceGx>1l@Wa zTv;Rr;1b~GZ%Aw0RfJ=o%Fci9SJ0F0^Sv6b!~8o?c%3NOQMYjUl6f^V7^+Q$yHQ3+ z8X~WoSIyfG)s`|EZIL(E8EKrxOTp@5;Q9V2oYd%~2WXQ90$&TB1cGu{y0U=O;4V{= zU#ts9c{(lU=AiQL8Snq9kE}filg>Y|UQwN}$s*nfI=Giqn7_c9TtLTmztA+0E94=Y z35Ia3^7FEZ$R-q{unFZhx#6jM{n{DsL~Wh8H6qHXGuCTv{C2%id;46KVK7l@?m%jU z7z-CIojZ^?fbHeNY?V63y$H)U?KNQ?DRo&P;K13p}V2Y!8>yv_6b;Lkh9;S3hXFEFLrXFkQ5@ya?BI3L9 zQS6{zqPnRQekZ9raB1NF&izrs=gst3DYpoeO9{2<&rs7?u@#zVjk_e=?cK~T#=-W* z!LIt2u=IBPM{f0$Ps(t9MZn`su0q87NIv&-gZS)MLDsuuLa*!aw`|38UMm&yl&f<8 znN%gA<$mi#XYBR^=cop{mM|s1vuY&m`^vmD31c|eCFYM-c=V3)dZLb7^^n)6D1_a& za-zR!_}R%ErSQt11EML42*sW>1=<;}eM8To zrbVU+Fd{c=TGl?uUs{R0*)kO@!J!5TF$5|zoBWHJD4$&GZD8@PGqPo1H(q6WDdp=u zamyrcpLtj3H_6sMMgoW!Arrc0?uVVeVi@T-#*ssIs=LE+7!g}_`CwBISSiGJIK2?# z{X%;=_U$7#=2)eMkiVKZfY_cbVXqA+Ke>nP6^Qpz3w9oN$MfsXqfh_3B)WfLVa1~s z%f~^3m@EB3%T^_UgU0KU$c{$06cuRVe22(K{sOrJz2zT)dp|-CY4k4^U~~HC-jy7C z@II2nk~%rPJCJX1BU2+4pjI?0!iOmGUyM%w9Ws7*$%Hje9OB#-{p@_*Ok3pho=L|X zFqTsu^F52Vc|w4vD8fUJs0r$exc*z(lov8D&$<8R%V{^d5PR)df&K(aim$CBM&y`d z1R$k}jDYO&D*dXhN^U;GTiS&tr_VHRqEG$R&HIP%i)QdwDYhm+=0GA#2;FZ2uCGYO z{l#`*atG-eY@yaYXaD#S|IuUr=l=&NlP~>&Lli851PlJ*xBnYYp62PV!NFUpK=cpa z^gn;gKRFDfzfyX59&kk8LweP}`2$J6JlxKm#^3lp`!~PuAO6gvhsC9Ah5R2m8h_(K z{P(|d6v&N$#-p~BF6OW0s(=3DIvg_irfOKY{@SnY&tK#5Cx?ulBN%lXs(c3fEH%FS0V+`f)YK$%ZC^CS z(YV?>g%|$)fA@+QsB8_j=iHN~8lV1hfBN&o`>%`Bam6c*fwi<5wC3Xf?muz(4etAc zbfWo(X8)_7=7knec*j<+$KLt(YwAmBXD}TFP?>K2Ti4U?4}r!U6vGCz5-Q+-Q#bze zQ2u*iKlPu7@}GzD_ga+y-%d)>YQOd1^oacd?`am;lUJ|fBLIpm`+~x_1BDRhPN_(%saN#P2>vJ0iSKNlaFroyNItb!qnyB(KB z9X0)Y#1w^X$Is`13^NH1zDO>PYES1#%}jN_Z1evc+XW72!QF}I3m@Z!p=oCs_}k|e z1Z^G~0oxY&Pfh7m6n_J2|F3KCSv0sAM)V5+?^hnEx9(%kGIysK$8ghfzB}p37Cn38 z2^Wj=(xpeV8tEg9;yW5G@q8HDkzq=^u?kcA?T?ek7#Q@*ECNJ7aDq+t@JbDBJ!&3D z#S&b(R*A7jMTzRcfK=|XY>7y=d_+P55h}mTLh8{bBq`=IB31_SIQ^+ug7ESnp$$CX zj!3~w4Y{Ep#sA6W>nl#{KA-51vw1M^z7YH}r({w2yABO|z02L=<@VmEerVr^ zT-=Fbt(qdcc9L)S%Zo5UyWzUnyAJr?LbLwW-@Jw0eCFLpt0(tH?`^-&|FBo-xI6Sn zy+X5v=y7M6@%LX99Hd^zT$x;Qdx%8kXP?xZ2DMMXw03Ovr*e+V+FVDd94%)JO0E<4 zKX`S%L)_BuI6d=tfvsdGiS!LTuAARd(5p~TKAL}bXPjLw2lJ)-w5~<{+}do&+qJnj zBa5k0`=5UBMZS<-?ZyT&+*U3$qujK}zWWxKd5dBNCj@$|=Iq(Zrqnph_yA4IV-JsC ze`o1*-2K_`g(-Iu;l+#hi{0Ky%i*fzZ4v}jIKLQ-)$0`Z(xndj2Skk8ec;<%4u|WIGm&(^> z0mY>nQL$%)i!f;k6GLT786Yp z=SE*xH`ElKtY7v5!|i?iJ2at8yjB39&D}GK*b|gcl*4B&(U(fz((N&5)?6NiDN~{zU#|dsfRtK{H+cyyBELNG$MA z*CQB`zT-Mp8>edQ=bjm!I~&>BH^Z0Fg`#T*`^5a|W_T0aXAaA-Y8-Fa zS(RT$(!Gq9JEdh=g^Nt5*75CFH5QWVOUpUz;D{?T#1)^Us0T0c#mRsh49q z!f)0-Z#Vln`Nw1q-OE=&wW&>+8bFSPI5GxIkxUM>$w9W>)T(@vT*|2pcQZBtFUG2bRQ6oj36<|u^pIz2(j*U}G?Q@md zjfZl^w?BP*;HVTWn7L=yR(Y@V#r%Zv_I5j;F;dc5nt>5sX`Sm7`e=H|Lacv@T;RNN8OBy zIpe*17O33-1325K|8BP0%;{{D=1CIQ#DtP+6OEJ)9vvgU$;BlPP_${no8lGpj1*tY zPWxT>Om{;7;_H6U-1i0;mnZ*+8&?(ejK21W?V#z0VZ38I{X0dUyOlbMDT1 z{aHxc#f?e(w#Jeqrge6K+dZl#pP8$KF*>Y(6IIcYSnm&I)_?fWY}P`MeULz(IVh_QL^@oOJ1 z>{wt5ib(Yt<;u|bTNg4uMQKioCS@}0n~Y!Eu*(wEF4+dI-qU@?)uf>2O?i5Td)Z*+ z2+(k*f8U1>#G^s+C_G!y41xzFgrQrtlEHj|AV4aTwO%f&Hc+nq09B<;G_V{=ON9WD z=wuctsDw$}-%th{3$dte;&&N#zI2OAuz9AsuCio20@&?sjYf=V$=r<(Vqn)uQ!S-5fPRFmsm`JQOn(vDlD`?V7FQhH!^uLR7}QoBd7P`&yYO7ZdWVi%Gb4ep z*jZJ1px3;0TYYuryefk56ZneCMVDIKNY6PE?4=malPY@AK$~cG)38u;XLD(#N`7SQ zt4^3Kpz~T2*jfuTuN3Z`K#mq^7;@3=tXicG*=(HH55DkpEpF5MXKG)T!Jtmn$_E40 z<_;Z;3j?{BD?nh${~-<)BX_Yq2Ah{gLhKrz9DM~jXCww9Q;GMpTnCoRdy3qQdbL=D zBl&dZ3&zWSvXYWNi2t*)sI3##(OP6UxZ`WUSP#F5U}0e1B! z2z2pkaCbNx?GsDe6YjdYz%((e*mAIYxrqv1Mn;Dt+rpNvsh3#{(MJGP%6+T#aFDJd zA7*v~3cLHl-iRP&JZizMi9bPjrZF&~_F7hjy(EyEwn?eR8znyS?!AAJ`jIo^)YC5G zP`O53siyHTaJxVouP?QYHjS&)by%Cvw1hP(9kzKOIpTeOgowRo6!K|^9OKHRJ*a?7 z&+&L)X=ru>kus*WV5+HFn?eLrY3|l7{fUs0JU=hId!l<3+lEwEJxRU1QswdNUUSMR z8L7>RA1y>D2TQKP6v2=8cfwv_V5IVm=%RW<~)!LfPqmzVukf8M zOPwOCa&Ecx;5a3~Lq$CApPC;o2Nz?AjNnkg;F;O$#p6j&F-QiE?x&x;#0X3I>UrMR z%o%mlS-(De@Znw1GHFj}PRE+t3$+<59#@Ew`z|v`-8T)p;%2YmBS~?=^V>(d^+*>= zJ)OHelqw5k+u+39QQ3gBhTpisN5}OPdJe)1GEHv{_SrqGdh&?WzKr@r0h7>51nlGq z(+EdX#Sexelrom-<|Fz`M!ER+mpT;MHdDR|VKT7=YAW+fFyIVnoi{e@!U)Ox0ONM? zPj}Xp<<+{AEM}R!iPcr=Z46ZQg2pt0OH#Sq4HFy- zU6?=Zd0h-FEg@?s8nI7#w#Z2x)|G)XpLsOKw}ZdhjueN^sxZpA=k1&w%Tf_fr~Ibo z|LS+gjDK-L9Z1dLqR(5B~nMi-)l98!=&$_`AyvG z48u`RxM;%nUwN{i`F8Sjk}KvG-z8WzH=}~qQu#0_*6WL=F0M*x=?3U`1(VZb-o(=$YLp-pRj0BM(&2ru~awL}pM3 z37Iobj?xgOnP8b3BVxh0xVdvW)CAX+wB;q=Dh+?vo($tBPf+7ECo@|keEL0}%j~Q# zn6%VwPv)$fk4wA{cFP*qewhyM&n!pum!l;NkT8SA0sHw~)$R&-FGt%4zKf309l$}= zbA55D9q3*R8H6)d+N~$&1k7_-Ev)Yt-QSQT+RX5CYC40oL2lioVyF1UoeHLvh2(uQ zlh(ckvlrcLd@X$~#`aWSMk4aJDg+?7vfJ#bA2%K#P9L5(yED@*+Fjct{O%K+tFkkL zY;U>_-s$Fg4spF8C2{m+YCI^T`dbxID*=2o0gpr7oO2lSh|K>> zE<0_!7a*LoryS?W6INt60ggdavTy9|WZ9@lfe~@NO1o-7>+az=A)u(!QaDMuHI$5S zi@&oY?(yk+D?DSJzK>qd+UE{Ju=wi>=#v9^gET$~KJ}awz}hBzK8*P`=DPTxlqux(<<=+Fl)^^|7=D4vp_TEe- z9ADB=r>dz1*Hb6?2z2P$R=~dPkms$)L;9DIR>B`1ycdtb6xfEPD8&rG3EIC2*u4V9 zy-t(pN%XBAhgoxV3m|u&nHh0?G6Fxnc`J$S3?fj`9VSDcd{=c---AvgL5dbCwL5gZ zXKje^T~*=J72n>gh0L92+;s4;EB50#0mZgsJ#$)v_CqFbbMN^p^glNOn(^aOh->;I zF(USgF|OF$?y`aCib7MxmZ9!MV?Kikkj&EUc&{<|=k~*W5?8}IJX>rsrZnqu_&y7C zez_OXK6tKLa+y|gCO?pxN%Gk}$mV;XYN2DAR7lSr%xjUgE@naeXi|GBOV`uwSH-zP zTw1C(B86H@M3?)Juf834?RSY(&Qwp&pL&6ghjxn2p#{e>KpIt_m()U=Y0kQlr$*WW> zVhs!)*YbPO1~QQ6jX{y+Ycg`0-L4Gpq6B!IohcCJ@0kE`ydy#qD#g#KQPRqo#4b>t z-h5I^kWRhIW_b+s`5lFRoknDYEs>sm7)q{3&e^$$sIFXz@!K8WC3Py@^`59M%pD^pU8b1$S1tp{)G0s-jft$Uf3>%zT8V;MWoji<#xgpAM zh#Z2gb_)NHL>>51{4xj!RIv0+uSfQBd;P`}<5NG(D&^@L`?$phKKdK%f_K8eFd@w% zB(nWO>mIhsZqE;-ZK-OEmAa{|6*^a9t54xIi|c6Eci*t?Kz9R_5yJJ$HTSk1`5hZ> z5g$S%!p}XBeoYYbKOz!-}nwMwnE@Z5dz{+V%7${R8~a2tiV7;GOudy1hLmv z{dv}{V^Z^ME|_dvkuZ>OCEwi8Vg{2wfUTPK^Zes%92_fPC7q3fjg5@%QlAkY2YY?w zO=PSDFVIIOhXwUF4hO)d-z9RiPZ^?t=*RdZB^RIzIEo0EQknpviry^Wlj*)2Q6hSv z^>2cw(-a+dzDkSz;<>$QvGNvF0N;(BFFIFzU5@Fmor-RcK)I3H-(ZbrpPaurnmnfIRrDUn2~aC> z8-#b_wMCZobD=+_4sK(s$sp&j8rlxQH7RSGde=u=ybWqV04^!x{vuS>pmM_j_h2Q& zyGpxDwJ(vddayFbtKIHBm6pg(W9fAaGH%s=xUw2LHkfNZe_6CX&T_6Z&R9ZYx+oh8 z+ut1VSh7{V7F*H$1{T{$_g^sWpZ*CK0EDEyRXVhmBNeHbJ&S`BAj6H>$ukgZC2lN= z*(P6{0KJAQ)OdjMl9a4s&E?I5Yn8$2dV`(n`F2d^EV#M})_T*?H#9{VE?^P|W>;<{ z8gb(GHT}Uw!Ei!95@jjRs&(pqDl#Ds54g!GmTOwGqt?|L8i2rN+&YZBiJ^Gg@lGTE zGq3f=;N>6fOU)W$$HFDb1EZAZ$~@3SB^s^ z9bZ8he&|%1rIPLtOB^%w9z7@}@;r~d`!fkQ%MRXI8@vYDNlHyr;~h~xFHZ>Dj9!f? zwFw6^L$S7WmAb=USFVD{?=z(kehM+S#Va6NhNABNcXs{}qyZx+9Q(>Fqbxh$QG=cy zs~4R8+&3Pxbn_E3WN}A=sSxGvlcVpn!htG5!S|kVeM_EBiT=7+nVkN62Qo<>m#YOW zv`t=p7V|(?Z(3+gVD3Km#!lqiL^*0Wa`V&Yvts zmq0(D&ha5m{;d~Z{!;u_Z@xWWCTLMH(9J#kb2zx;Gm$YzP?anFVPlo{EE56qKtQ-v zt^A^b4crA67K5esKkuHPU@S^VXfL!=v5-=5WAO*KBG=A6EES@nxyO`Oc|?YMvo$$@ z)+;9;x?jA)LYl{3O7|Uq=J?M42+B6E1UV7pB?J|4Pi=iXUHA5b*^52Tl@@XXF;Nw( zDg(VuEM}#BWwb^I@WiIti9E#?{aORwH3ORAyn3>Y9~maI(PSmp`L(F~j$&TF2I5g9 zDWAB|y`@Lq-hd1uc09?e=g7k_vqbxAUF5|mt(#8T{=7(o?4U~_55QHzNwnSEnSDiF zK@h}Z@^lQGPxWM|j(o*}glO|-5P`avmqlchaC`#@M0J*HK$CMWZ%T0eQCBHrvBh{_ zWucHGuM`miNC?b{?6bfv}2jnor1Z;^+NpNjPpV{!5MwVadF+!X#hA&g@9@HJ{Nv_ z*d+<|eUmhgDn#8%lxREr1pVusTrHGlezc^rS5B2oKqw12L*SBJ9>lQMY9Mi5sv{zp zEX!)h-aEJ(A=Qzg>`blLPjOeLL{LXKhPC;;zR-KJGbyeFG5ZQ^JILw@# zlISc2>2nYLDpX>G*EWLdU}UA zkdx)kpd!^~JK&QxIxJK!S82U=a9fWm=Lt@+8~#9;GMTo&UFBi&tqKhr8P9*8!7!FT z^|5o$id+M0VWoEWE0F+H>eG&KEl@MyB3^ze z@*#EC7l+ET!!8eo_%0i>VK>L)2XpWmz#p>xhYh+=@!@Q{Y8bg_?b@@DA3<*o+tjbs z`V)u8ieq3v z8na;sy3)|$o%115M)MM@9U@KB8$A87SnEV^tA`5RdD4@n#0wHC!>=kZm+nX2&wA`L zVk!5sDKX)&$DELw zv<#bhkj=t+2;n^TjbbniSo9drgDN{;shx6pEcDqCJB;B-NO&AT%NZJZ#-GY%uD|cO zD=;(a`3jxS5Epya^ldLuM#w6;Jh~CrJz|M;Ls5|y69OyQ0Lm2UJ!iDk8o_<-xm@si z|8mWB_OhuM1363!qV%OkW<7KuS3?gt2+mzEqF|-vYy*pu4x1~-08!QheJS)&JwM54 zQ7LkthB9*gGxu{@9i(f?@x{o`<77rYdx3A10j2VX8RvkM*z$9*u8cz8mGNtN^GP91 z{}+_fX?Jl3HBW^HR(ZOLk=aj!)?C@~nt3}B1zn8@ahFdgH53hNa&a6J$tZ=!ajx|mh!75>X<#}v zkeDik8x*PA)chO9-=ki>Ugsv@x9o~Pd!rB9!jvL&ucWKS3v<%*!#>_RhN?z(zA&YJ zw7_Q(b6H}ir0S=ci-2Cb>kZ5Pq;fj0^-#F|a9Lup^~kUtkA1(p9Mxrr$<7NqKt3&d zZC$#=IdAiZgyc-fU3mzt-?NdFy&hzF@__7p$yr3SOU>jF@Snm1)%$t(p1%4&_J19@ literal 0 HcmV?d00001 diff --git a/appletv/user_doc.rst b/appletv/user_doc.rst new file mode 100755 index 000000000..e5c2d2d19 --- /dev/null +++ b/appletv/user_doc.rst @@ -0,0 +1,290 @@ +.. index:: Plugins; appletv +.. index:: appletv + +======= +appletv +======= + +.. image:: webif/static/img/plugin_logo.png + :alt: plugin logo + :width: 400px + :height: 308px + :scale: 100 % + :align: left + +Mit diesem Plugin können Sie ein oder mehrere `Apple TVs `_ aller Generationen steuern. Jedes Apple TV benötigt eine eigene Plugin-Instanz. Es benutzt die `pyatv library `_ von Pierre Ståhl. Es bietet auch eine Web-Schnittstelle, die mit dem `http`-Modul verwendet werden kann. + + +Konfiguration +============= + +Diese Plugin Parameter und die Informationen zur Item-spezifischen Konfiguration des Plugins sind +unter :doc:`/plugins_doc/config/appletv` beschrieben. + + +plugin.yaml +----------- + +.. code-block:: yaml + + # etc/plugin.yaml + appletv: + plugin_name: appletv + #instance: wohnzimmer + #ip: 192.168.2.103 + #login_id: 00000000-0580-3568-6c73-86bd9b834320 + +Items +===== + +name (String) +------------- +Enthält den Namen des Geräts, wird beim Starten des Plugins durch die automatische Erkennung gefüllt + +artwork_url (String) +-------------------- +Enthält eine URL zum Artwork der aktuell abgespielten Mediendatei (falls vorhanden). + +play_state (Ganzzahl) +--------------------- +Der aktuelle Abspielstatus als Integer. Derzeit unterstützte Abspielzustände: + +* 0: Gerät befindet sich im Leerlaufzustand +* 1: Kein Medium wird gerade ausgewählt/abgespielt +* 2: Medium wird geladen/gepuffert +* 3: Medium ist pausiert +* 4: Medium wird abgespielt +* 5: Medien werden vorgespult +* 6: Medien werden zurückgespult + +play_state_string (String) +---------------------------- +Der aktuelle Status der Wiedergabe als Text. + +playing (bool) +-------------- +`True` wenn play\_state 4 ist (Medium wird abgespielt), `False` für alle anderen play_states. + +media_type (Ganzzahl) +----------------------- +Der aktuelle Abspielstatus als Integer. Derzeit unterstützte Abspielzustände: + +* 1: Medientyp ist unbekannt +* 2: Medientyp ist Video +* 3: Medientyp ist Musik +* 4: Medientyp ist TV + +media_type_string (String) +---------------------------- +Der aktuelle Medientyp als Text. + +album (String) +-------------- +Der Name des Albums. Nur relevant, wenn der Inhalt Musik ist. + +artist (String) +--------------- +Der Name des Interpreten. Nur relevant, wenn der Inhalt Musik ist. + +genre (String) +-------------- +Das Genre der Musik. Nur relevant, wenn der Inhalt Musik ist. + +title (String) +-------------- +Der Titel des aktuellen Mediums. + +position (Ganzzahl) +------------------- +Die aktuelle Position innerhalb des abspielenden Mediums in Sekunden. + +total_time (Ganzzahl) +----------------------- +Die tatsächliche Abspielzeit des Mediums in Sekunden. + +position_percent (Ganzzahl) +----------------------------- +Die aktuelle Position innerhalb des abspielenden Mediums in %. + +repeat (Ganzzahl) +------------------- +Der aktuelle Status des ausgewählten Wiederholungsmodus. Derzeit unterstützte Wiederholungsmodi: + +* 0: Keine Wiederholung +* 1: Wiederholung des aktuellen Titels +* 2: Alle Spuren wiederholen + +repeat_string (String) +---------------------- +Der aktuell gewählte Typ des Wiederholungsmodus als String. + +shuffle (bool) +-------------- +`True` wenn shuffle aktiviert ist, `False` wenn nicht. + +rc_top_menu (bool) +------------------ +Setzt diesen Punkt auf `True`, um zum Home-Menü zurückzukehren. +Das Plugin setzt diesen Eintrag nach der Befehlsausführung auf `False` zurück. + +rc_menu (bool) +-------------- +Setzt diesen Punkt auf `True`, um zum Menü zurückzukehren. +Das Plugin setzt dieses Element nach der Ausführung des Befehls auf `False` zurück. + +rc_select (bool) +---------------- +Setzt diesen Punkt auf `True` um die 'select' Taste zu drücken. +Das Plugin setzt diesen Punkt nach der Ausführung des Befehls auf `False` zurück. + +rc_left, rc_up, rc_right, rc_down (bools) +----------------------------------------- +Setzt eines dieser Elemente auf `True`, um den Cursor in die entsprechende Richtung zu bewegen. +Das Plugin setzt diese Werte nach der Befehlsausführung auf `False` zurück. + +rc_previous (bool) +------------------ +Setzen Sie dieses Element auf `True`, um die 'previous'-Taste zu drücken. +Das Plugin setzt diesen Punkt nach der Befehlsausführung auf `False` zurück. + +rc_play (bool) +-------------- +Setzt dieses Element auf `True`, um die 'play'-Taste zu drücken. +Das Plugin setzt dieses Element nach der Ausführung des Befehls auf `False` zurück. + +rc_pause (bool) +--------------- +Setzt dieses Element auf `True`, um die 'Pause'-Taste zu drücken. +Das Plugin setzt dieses Element nach der Ausführung des Befehls auf `False` zurück. + +rc_stop (bool) +-------------- +Setzt dieses Element auf `True`, um die 'stop'-Taste zu drücken. +Das Plugin setzt dieses Element nach der Ausführung des Befehls auf `False` zurück. + +rc_next (bool) +-------------- +Setze dieses Element auf `True`, um die 'next'-Taste zu drücken. +Das Plugin setzt dieses Element nach der Ausführung des Befehls auf `False` zurück. + + +Struct Vorlagen +=============== + +Ab smarthomeNG 1.6 können Vorlagen aus dem Plugin einfach eingebunden werden. Dabei stehen folgende Vorlagen zur Verfügung: + +- device: Informationen zur IP, MAC-Adresse, Einschaltzustand, etc. +- playing: Informationen zum aktuell gespielten Titel wie Artist, Album, etc. sowie Ansteuern des Abspielmodus und mehr +- control: verschiedene Fernbedienungsfunktionen wie Menü, Play/Pause, etc. + + +Funktionen +========== + +is_playing() +------------ +Gibt `true` oder `false` zurück und zeigt an, ob das Apple TV gerade Medien abspielt. +Beispiel: `playing = sh.appletv.is_playing()` + +play() +------ +Sendet einen Abspielbefehl an das Gerät. +Beispiel: `sh.appletv.play()` + +pause() +------- +Sendet einen Pausenbefehl an das Gerät. +Beispiel: `sh.appletv.pause()` + +play_url(url) +------------- +Spielt ein Medium unter Verwendung der angegebenen URL ab. Das Medium muss natürlich mit dem Apple TV Gerät kompatibel sein. Damit dies funktioniert, muss SHNG zuerst beim Gerät authentifiziert werden. Dies geschieht über die Schaltfläche "Authentifizieren" in der Weboberfläche. Anschließend muss ein PIN-Code, der auf dem Fernsehbildschirm angezeigt wird, in die Weboberfläche eingegeben werden. Dieser sollte nur einmal benötigt werden und für immer gültig sein. +Beispiel: `sh.appletv.play_url('http://distribution.bbb3d.renderfarming.net/video/mp4/bbb_sunflower_1080p_60fps_normal.mp4')` + +SmartVISU +========= +Wenn SmartVISU als Visualisierung verwendet wird, kann folgender HTML-Code in einer der Seiten verwendet werden: + +.. code-block:: HTML + +
    +
    +
    +

    Apple TV {{ basic.print('', 'atv.wohnzimmer.name') }} ({{ basic.print('', 'atv.wohnzimmer.media_type_string') }} {{ basic.print('', 'atv.wohnzimmer.play_state_string') }})

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + {{ basic.stateswitch('', 'atv.wohnzimmer.rc_top_menu', '', '1', 'jquery_home.svg', '') }} + {{ basic.stateswitch('', 'atv.wohnzimmer.rc_menu', '', '1', 'control_return.svg', '') }} + + {{ basic.stateswitch('', 'atv.wohnzimmer.rc_up', '', '1', 'control_arrow_up.svg', '') }} +
    + {{ basic.stateswitch('', 'atv.wohnzimmer.shuffle', '', '', 'audio_shuffle.svg', '') }} + {{ basic.stateswitch('', 'atv.wohnzimmer.repeat', '', [0,1,2], ['audio_repeat.svg','audio_repeat_song.svg','audio_repeat.svg'], '', ['icon0','icon1','icon1']) }} + + {{ basic.stateswitch('', 'atv.wohnzimmer.rc_left', '', '1', 'control_arrow_left.svg', '') }} + {{ basic.stateswitch('', 'atv.wohnzimmer.rc_select', '', '1', 'control_ok.svg', '') }} + {{ basic.stateswitch('', 'atv.wohnzimmer.rc_right', '', '1', 'control_arrow_right.svg', '') }} +
      + {{ basic.stateswitch('', 'atv.wohnzimmer.rc_down', '', '1', 'control_arrow_down.svg', '') }} +
     
    + {{ basic.print('', 'atv.wohnzimmer.artist') }} - {{ basic.print('', 'atv.wohnzimmer.album') }} +
    + {{ basic.print('', 'atv.wohnzimmer.title') }} ({{ basic.print('', 'atv.wohnzimmer.genre') }}) +
    {{ basic.slider('', 'atv.wohnzimmer.position_percent', 0, 100, 1, 'horizontal', 'none') }}
    +
    + {{ basic.stateswitch('', 'atv.wohnzimmer.rc_previous', '', '1', 'audio_rew.svg', '') }} + {{ basic.stateswitch('', 'atv.wohnzimmer.rc_play', '', '1', 'audio_play.svg', '') }} + {{ basic.stateswitch('', 'atv.wohnzimmer.rc_pause', '', '1', 'audio_pause.svg', '') }} + {{ basic.stateswitch('', 'atv.wohnzimmer.rc_next', '', '1', 'audio_ff.svg', '') }} +
    +
    + {{ basic.print ('', 'atv.wohnzimmer.artwork_url', 'html', '\'\'') }} +
    +
    +
    +
    + +Web Interface +============= + +Das Webinterface kann genutzt werden, um die Items und deren Werte auf einen Blick zu sehen, +die dem Plugin zugeordnet sind. Außerdem können erkannte Geräte eingesehen und gekoppelt werden. +Für jedes erkannte Gerät gibt es zudem eine Übersicht mit den aktuellen Informationen wie Status, +Abspielposition, Künstler, etc. + +.. image:: assets/webif_appletv1.png + :height: 1612px + :width: 3312px + :scale: 25% + :alt: Web Interface + :align: center From 9273dd77e36511b02f063ab528b0ac231424cc80 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sat, 29 Apr 2023 23:58:50 +0200 Subject: [PATCH 081/775] AppleTV plugin: adjust requirements to work with Python 3.10 --- appletv/requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/appletv/requirements.txt b/appletv/requirements.txt index 228b33db0..20be80395 100755 --- a/appletv/requirements.txt +++ b/appletv/requirements.txt @@ -1 +1,2 @@ -pyatv==0.7.0 +pyatv==0.7.0;python_version<'3.10' +pyatv==0.10.3;python_version=='3.10' \ No newline at end of file From 03f608a31e9d9713fa912e0139154a1ffc041d2b Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Mon, 1 May 2023 10:20:38 +0200 Subject: [PATCH 082/775] AppleTV Plugin: Adjust requirements to work with Python 3.9 --- appletv/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/appletv/requirements.txt b/appletv/requirements.txt index 20be80395..7f4c32936 100755 --- a/appletv/requirements.txt +++ b/appletv/requirements.txt @@ -1,2 +1,2 @@ -pyatv==0.7.0;python_version<'3.10' -pyatv==0.10.3;python_version=='3.10' \ No newline at end of file +pyatv==0.7.0;python_version<'3.9' +pyatv==0.10.3;python_version>='3.9' \ No newline at end of file From bf30d7028b89594a64d6442f21718b4a62d2619d Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Mon, 1 May 2023 10:36:00 +0200 Subject: [PATCH 083/775] AppleTV plugin: fix user_doc --- appletv/user_doc.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/appletv/user_doc.rst b/appletv/user_doc.rst index e5c2d2d19..378ee43a8 100755 --- a/appletv/user_doc.rst +++ b/appletv/user_doc.rst @@ -57,13 +57,13 @@ Der aktuelle Abspielstatus als Integer. Derzeit unterstützte Abspielzustände: * 5: Medien werden vorgespult * 6: Medien werden zurückgespult -play_state_string (String) +play_state_text (String) ---------------------------- Der aktuelle Status der Wiedergabe als Text. playing (bool) -------------- -`True` wenn play\_state 4 ist (Medium wird abgespielt), `False` für alle anderen play_states. +`True` wenn play_state 4 ist (Medium wird abgespielt), `False` für alle anderen play_states. media_type (Ganzzahl) ----------------------- @@ -74,7 +74,7 @@ Der aktuelle Abspielstatus als Integer. Derzeit unterstützte Abspielzustände: * 3: Medientyp ist Musik * 4: Medientyp ist TV -media_type_string (String) +media_type_text (String) ---------------------------- Der aktuelle Medientyp als Text. @@ -114,7 +114,7 @@ Der aktuelle Status des ausgewählten Wiederholungsmodus. Derzeit unterstützte * 1: Wiederholung des aktuellen Titels * 2: Alle Spuren wiederholen -repeat_string (String) +repeat_text (String) ---------------------- Der aktuell gewählte Typ des Wiederholungsmodus als String. @@ -210,7 +210,7 @@ Wenn SmartVISU als Visualisierung verwendet wird, kann folgender HTML-Code in ei
    -

    Apple TV {{ basic.print('', 'atv.wohnzimmer.name') }} ({{ basic.print('', 'atv.wohnzimmer.media_type_string') }} {{ basic.print('', 'atv.wohnzimmer.play_state_string') }})

    +

    Apple TV {{ basic.print('', 'atv.wohnzimmer.name') }} ({{ basic.print('', 'atv.wohnzimmer.media_type_text') }} {{ basic.print('', 'atv.wohnzimmer.play_state_text') }})

    - + From a3f44f374fefb59d0357eaa946de37d1132d5766 Mon Sep 17 00:00:00 2001 From: ivande Date: Mon, 8 May 2023 16:05:11 +0200 Subject: [PATCH 094/775] add old version in subfolder --- telegram/_pv_1_7_1/README.rst.off | 573 ++++++++++ telegram/_pv_1_7_1/__init__.py | 994 ++++++++++++++++++ telegram/_pv_1_7_1/locale.yaml | 15 + telegram/_pv_1_7_1/logging.yaml.telegram | 4 + telegram/_pv_1_7_1/plugin.yaml | 185 ++++ telegram/_pv_1_7_1/requirements.txt | 2 + telegram/_pv_1_7_1/user_doc.rst | 577 ++++++++++ telegram/_pv_1_7_1/webif/__init__.py | 105 ++ .../webif/static/img/plugin_logo.svg | 1 + .../_pv_1_7_1/webif/static/img/readme.txt | 6 + telegram/_pv_1_7_1/webif/templates/index.html | 205 ++++ telegram/webif/templates/index.html | 210 ++-- 12 files changed, 2767 insertions(+), 110 deletions(-) create mode 100644 telegram/_pv_1_7_1/README.rst.off create mode 100644 telegram/_pv_1_7_1/__init__.py create mode 100644 telegram/_pv_1_7_1/locale.yaml create mode 100644 telegram/_pv_1_7_1/logging.yaml.telegram create mode 100644 telegram/_pv_1_7_1/plugin.yaml create mode 100644 telegram/_pv_1_7_1/requirements.txt create mode 100644 telegram/_pv_1_7_1/user_doc.rst create mode 100644 telegram/_pv_1_7_1/webif/__init__.py create mode 100644 telegram/_pv_1_7_1/webif/static/img/plugin_logo.svg create mode 100644 telegram/_pv_1_7_1/webif/static/img/readme.txt create mode 100644 telegram/_pv_1_7_1/webif/templates/index.html diff --git a/telegram/_pv_1_7_1/README.rst.off b/telegram/_pv_1_7_1/README.rst.off new file mode 100644 index 000000000..c547faa89 --- /dev/null +++ b/telegram/_pv_1_7_1/README.rst.off @@ -0,0 +1,573 @@ +======== +telegram +======== + +Das Plugin dient zum Senden und Empfangen von Nachrichten über den +`Telegram Nachrichten Dienst `_ + +Abhängigkeiten +============== + +Es wird die Bibliothek ``python-telegram-bot`` benötigt. +Diese ist in der ``requirements.txt`` enthalten. +Bevor das Plugin genutzt werden kann, muß die Bibliothek installiert werden: + +* Entweder mit ``sudo pip install -r requirements.txt`` + +oder + +* unter Benutzung von ``pip install -r requirements.txt`` innerhalb + des Verzeichnisses ``/usr/local/smarthome/plugins/telegram``. + +Konfiguration von Telegram +========================== + +Zuerst muß ein eigener Bot bei Telegram erstellt werden: + +* An ``Botfather`` das Kommando ``/newbot`` senden. +* Dann muß ein **Bot Name** vergeben werden der noch nicht existiert. +* Weitere Bot Details können eingestellt werden, wenn das Kommando + ``/mybots`` an den BotFather gesendet wird. + +Der BotFather erstellt für den neuen Bot ein sogenanntes **token** also einen einzigartigen Schlüssel. + +Konfiguration des Plugins +========================= + +Die Konfiguration des Plugins ist auch unter :doc:`/plugins_doc/config/telegram` beschrieben bzw. in der **plugin.yaml** nachzulesen. + + +Der erstelle **token** muß in der ``plugin.yaml`` von SmartHomeNG eingetragen werden. Das kann im Admin-IF geschehen oder durch direkten Eintrag in die ``plugin.yaml``. + +.. code::yaml + + telegram: + plugin_name: telegram + name: Mein Haus + token: 123456789:BBCCfd78dsf98sd9ds-_HJKShh4z5z4zh22 + +* name: Eine Beschreibung des Bots +* token: Der oben beschriebene einzigartige Schlüssel mit dem der Bot bei Telegram identifiziert wird. + +Item Konfiguration +================== + +Jeder Chat, der auf den Bot zugreifen soll, muß SmartHomeNG bekannt gemacht werden. +Das geschieht über ein Item das das Attribut ``telegram_chat_ids`` mit dem Parameter True hat und als Wert ein Dictionary hat. +Im Dictionary sind Paare von Chat-ID und Berechtigung gespeichert. + +.. code::yaml + + Chat_Ids: + type: dict + telegram_chat_ids: True + # cache bietet sich an um Änderungen an den trusted_chat_ids während der + # Laufzeit von SmartHomeNG zu speichern und nach Neustart wieder zu laden + # es wird dann der letzte Wert geladen + cache: 'True' + # Beispiel value: '{ 3234123342: 1, 9234123341: 0 }' + # Ein Dictionary mit chat id und 1 für Lese und Schreibzugriff oder 0 für einen nur Lese-Zugriff + # Nachfolgend ein Chat dem Lese- und Schreibrechte gewährt werden + value: '{ 3234123342: 1 }' + +Um die Chat Id zu bekommen, muß der Bot (und das Plugin) zunächst laufen. Dazu wird SmartHomeNG (neu) gestartet. + +Im Telegram Client wird der Bot als Chatpartner aufgerufen und das Kommando ``/start`` an den Bot gesendet. + +Der Bot reagiert mit einer Meldung, das die Chat-ID noch nicht bekannt ist und diese zunächst eingetragen werden muß. Mit der nun bekannten Chat-ID wird +über das AdminIF das Items Dictionary des entsprechenden Items aus dem obigen Beispiel mit den eigenen Chat-IDs erweitert. + +Ein erneutes Kommando im Telegram Client an den Bot mit ``/start`` sollte nun die Meldung ergeben, das der Chat bekannt ist und weiterhin, welche +Zugriffsrechte der Chat auf den Bot hat. + + +telegram_chat_ids +----------------- + +Es muß ein Item mit dem Typ Dictionary mit dem Attribut ``telegram_chat_ids`` und dem Parameterwert ``True`` angelegt werden. +In ihm werden Chat-IDs und Zugriff auf den Bot gespeichert. Siehe obiges Beispiel. + + +telegram_message +----------------- +Items mit dem Attribut ``telegram_message`` lösen eine Nachricht aus, wenn sich der Itemwert ändert. Es ist möglich Platzhalter +in der Nachricht zu verwenden. + +Verfügbare Platzhalter: + +[ID] [NAME] [VALUE] [CALLER] [SOURCE] [DEST] + +Einfaches Beispiel +'''''''''''''''''' + +.. code:: yaml + + Tuerklingel: + name: Türklingel (entprellt) + type: bool + knx_dpt: 1 + telegram_message: 'Es klingelt an der Tür' + +Beispiel mit Platzhaltern +''''''''''''''''''''''''' + +.. code:: yaml + + state_name: + name: Name des aktuellen Zustands + type: str + visu_acl: r + cache: 'on' + telegram_message: 'New AutoBlind state: [VALUE]' + + +telegram_condition +------------------ + +Da es Situationen gibt die für Items ein ``enforce_updates: True`` benötigen, würde bei ``telegram_message`` bei jeder Aktualisierung des Items eine Nachricht verschickt werden. +Um das zu verhindern, kann einem Item das Attribut ``telegram_condition: on_change`` zugewiesen werden. + +Einfaches Beispiel +'''''''''''''''''' + +.. code:: yaml + + Tuerklingel: + type: bool + knx_dpt: 1 + enforce_updates: True + telegram_message: 'Es klingelt an der Tür' + telegram_condition: on_change + telegram_value_match_regex: (true|True|1) + +Dadurch wird auf eine mehrfache Zuweisung des Items mit dem Wert ``True`` nur einmal mit einer Nachricht reagiert. Um eine weitere Nachricht zu generieren +muss das Item zunächst wieder den Wert ``False`` annehmen. Das Attribut ``telegram_value_match_regex`` filtert den Wert so das es bei der Änderung des Itemwertes +auf ``False`` zu keiner Meldung *Es klingelt an der Tür* kommt. + + +telegram_value_match_regex +-------------------------- + +Ist zusätzlich zum Attribut ``telegram_message`` auch das Attribut ``telegram_value_match_regex`` gesetzt, wird der Itemwert geprüft, bevor eine +Nachricht gesendet wird. Geprüft wird gegen/mit den Regex, der als Parameterwert angegeben ist. + +Beispiel +'''''''' + +.. code:: yaml + + TestNum: + type: num + cache: True + telegram_message: 'TestNum: [VALUE]' + telegram_value_match_regex: '[0-1][0-9]' # nur Nachrichten senden wenn Zahlen von 0 - 19 + TestBool: + type: bool + cache: True + telegram_message: "TestBool: [VALUE]" + telegram_value_match_regex: 1 # nur Nachricht senden wenn 1 (True) + + +telegram_message_chat_id +------------------------ +Ist zusätzlich zum Attribut ``telegram_message`` auch das Attribut ``telegram_message_chat_id`` gesetzt, wird die Nachricht nur an die dort angegebene Chat-ID (hier 3234123342) gesendet. +Ist das Attribut nicht gesetzt, erfolgt der Versand der Nachricht an alle Chat-IDs, die dem Plugin bekannt sind. + +Einfaches Beispiel +'''''''''''''''''' + +.. code:: yaml + + Tuerklingel: + type: bool + knx_dpt: 1 + enforce_updates: True + telegram_message: 'Es klingelt an der Tür' + telegram_message_chat_id: 3234123342 + telegram_value_match_regex: (true|True|1) + + +telegram_info +------------- + +Für alle Items mit diesem Attribut wird eine Liste mit Kommandos für den Bot erstellt. Der Listeneintrag entspricht dabei dem Attributwert. +Wird das Kommando ``/info`` an den Bot gesendet, so erstellt der Bot ein Tastaturmenü, dass jedes Attribut mindestens einmal als Kommando enthält. +Bei Auswahl eines dieser Kommandos im Telegram Client wird dann für jedes Item, dass das Attribut ``telegram_info`` und als Attributwert den Kommandonamen enthält +der Wert des Items ausgegeben. + +Beispiel +'''''''' + +.. code:: yaml + + Aussentemperatur: + name: Aussentemperatur in °C + type: num + knx_dpt: 9 + telegram_info: wetter + + Wind_kmh: + name: Windgeschwindigkeit in kmh + type: num + knx_dpt: 9 + telegram_info: wetter + + Raumtemperatur: + name: Raumtemperatur Wohnzimmer in °C + type: num + knx_dpt: 9 + telegram_info: rtr_ist + +Das Kommando ``/info`` veranlasst den Bot zu antworten mit + +.. code:: + + [/wetter] [/rtr_ist] + +Wählt man am Telegram Client daraufhin ``[/wetter]`` aus, so werden + +.. code:: + + Aussentemperatur = -10,6 + Wind_kmh = 12.6 + +ausgegeben. Bei der Auswahl des Kommandos ``[/rtr_ist]`` antwortet der Bot mit + +.. code:: + + Raumtemperatur = 22.6 + + +telegram_text +------------- + +Items mit dem Attribut ``telegram_text`` und dem Attributwert ``True`` bekommen eine Mitteilung, die von einem Telegram Client an den Bot gesendet wird, als Wert zugewiesen. + +Beispiel +'''''''' + +.. code:: yaml + + telegram_message: + name: Textnachricht von Telegram + type: str + telegram_text: true + +Nach der Eingabe von ``Hello world!`` am Telegram wird das Item ``telegram_message`` +auf ``: Chat-ID: Hello world!`` gesetzt. +Ein John Doe ergäbe also ``John Doe: xxxxxx: Hello world!`` + +Mit einer Logik kann basierend darauf ein Menu und entsprechende Abfragen an shNG gestellt werden. +Siehe dazu ein Beispiel weiter unten. + +telegram_control +------------- + +Für alle Items mit diesem Attribut wird eine Liste mit Kommandos für den Bot erstellt. Der Listeneintrag muss mit ``name`` spezifiziert werden. +Wird das Kommando ``/control`` an den Bot gesendet, so erstellt der Bot ein Tastaturmenü, dass jedes Attribut als Kommando enthält. +Dabei werden auch alle aktuellen Werte der Items ausgegeben. +Bei Auswahl eines dieser Kommandos im Telegram Client kann dann ein Item vom Type bool geschalten werden (on/off) oder beim Type 'num' kein eine Zahl zum SH-Item gesendet werden. + +``name`` Item wird mit diesem Namen im Bot als Kommando dargestellt +``type`` Möglichkeiten: on, off, onoff, toggle, num + on * nur Einschalten ist möglich + off * nur Ausschalten ist möglich + onoff * das Ein- und Ausschalten muss mit einen weiteren Kommando vom Tastaturmenu ausgewählt werden + [On] [Off] (nach einem Timeout ohne Antwort wird der Befehl abgebrochen) + toggle * der Wert des Items wird umgeschltet (0 zu 1; 1 zu 0) + num * es kann eine Zahl an SH gesendet werden und das entsprechende Item wird damit geschrieben. (nach einem Timeout ohne Antwort wird der Befehl abgebrochen) +``question``Sicherheitsabfrage vor dem Schalten des Items (verwendbar bei type:on/off/toggle - nach einem Timeout ohne Antwort wird der Befehl abgebrochen) + [Yes] [No] +``min`` Minimalwert (verwendbar bei type:num) +``max`` Maximalwert (verwendbar bei type:num) +``timeout`` Zeit nach welcher der Befehl mit Antwort(onoff/question/num) abgebrochen wird (default 20Sekunden) + + +Beispiel +'''''''' + +.. code:: yaml + + BeregnungZone1: + type: bool + cache: True + telegram_control: "name:BeregnungZ1, type:onoff" + BeregnungZone2: + type: bool + cache: True + telegram_control: "name:BeregnungZ2, type:toggle, question:Ventil wirklich umschalten" + Gartentor: + type: bool + cache: True + telegram_control: "name:Gartentor, type:on, question:Gartentor wirklich öffnen?" + Dachfenster: + type: num + cache: True + telegram_control: "name:Dachfenster, type:num, min:0, max:100, timeout:30" + Kamera: + type: bool + cache: True + telegram_control: "name:Kamera, type:toggle + eval: sh.plugins.return_plugin("telegram").photo_broadcast("http://192.168.0.78/snapshot/view0.jpg", datetime.datetime.now().strftime("%H:%M %d.%m.%Y")) + + +Das Kommando ``/control`` veranlasst den Bot zu antworten mit + +.. code:: + + [/BeregnungZ1] [/BeregnungZ2] [/Gartentor] + [/Dachfenster] [/Kamera] + + +Funktionen +========== + +Das Plugin stellt derzeit zwei Funktionen zur Nutzung in Logiken bereit: + + +msg_broadcast +------------- + +Argumente beim Funktionsaufruf: + +**msg**: Die Nachricht, die verschickt werden soll + +**chat_id**: + - Eine Chat-ID oder eine Liste von Chat-IDs. + - Wird keine ID oder None angegeben, so wird an alle autorisierten Chats gesendet + +photo_broadcast +--------------- + +Argumente beim Funktionsaufruf: + +**path_or_URL**: + - entweder ein lokaler Pfad, der auf eine Bilddatei zeigt log_directory oder + - eine URL mit einem Link. Wenn der Link lokal ist, + +**caption**: + - Titel der Bilddatei, kann auch Dateiname sein oder Datum + - Vorgabewert: None + +**chat_id**: + - eine Chat Id oder eine Liste von Chat ids. Wird keine ID oder None angegeben, + so wird an alle autorisierten Chats gesendet + - Vorgabewert: None + +**local_prepare** + - Ist für das zu sendende Bild eine URL angegeben, ruft das Plugin die + Daten von der URL lokal ab und sendet die Daten dann an den Telegram Server. + Beispiel dafür ist eine URL einer lokalen Webcam. + Soll stattdessen eine im Internet frei zugängliche URL abgerufen werden, + so wird dieses Argument auf False gesetzt und es wird nur die URL + an Telegram geschickt und der lokale Rechner von den Daten entlastet. + Aktuell kann das Plugin nicht mit Benutzername und Passwort geschützten + URL umgehen. + - Vorgabewert: True + +Beispiele +--------- + +Die folgende Beispiellogik zeigt einige Nutzungsmöglichkeiten für die Funktionen: + +.. code:: python + + # Eine Nachricht `Hello world!` wird an alle vertrauten Chat Ids gesendet + msg = "Hello world!" + sh.telegram.msg_broadcast(msg) + + # Ein Bild von einem externen Server soll gesendet werden. + # Nur die URL wird an Telegram gesendet und keine Daten lokal aufbereitet + sh.telegram.photo_broadcast("https://cdn.pixabay.com/photo/2018/10/09/16/20/dog-3735336_960_720.jpg", "A dog", None, False) + + # Bild auf lokalem Server mit aktueller Zeit an Telegram senden + my_webcam_url = "http:// .... bitte lokale URL hier einfügen zum Test ..." + sh.telegram.photo_broadcast(my_webcam_url, "My webcam at {:%Y-%m-%d %H:%M:%S}".format(sh.shtime.now())) + + # Bild senden aber den Inhalt lokal vorbereiten + sh.telegram.photo_broadcast("https://cdn.pixabay.com/photo/2018/10/09/16/20/dog-3735336_960_720.jpg", "The dog again (data locally prepared)") + + local_file = "/usr/local/smarthome/var/ ... bitte eine lokal gespeicherte Datei angeben ..." + sh.telegram.photo_broadcast(local_file, local_file) + + +Anwendungen +=========== + +Menugestützte Interaktion zwischen Telegram und shNG +---------------------------------------------------- + +Diese Anwendung nutzt den Wert, den Telegram in das Item mit dem Attribut ``telegram_text`` schreibt. +Dieser Wert beinhaltet den den User, die Chat-ID und die Message. Basierend auf diesem wird mit einer Logik ein Menu im Chat +dargestellt und die entsprechenden Aktionen ausgeführt. + +.. code:: python + + #!/usr/bin/env python3 + # telegram_message.py + + logger.info(f"Logik '{logic.id()}' ausgelöst durch: {trigger['by']} und {trigger['source']} mit Wert {trigger['value']}") + + telegram_plugin = sh.plugins.return_plugin('telegram') + + def bool2str(wert, typus, logic=logic): + logger.info(f"bool2str wert = {wert}, typus = {typus}") + if type(wert) is bool: + if typus == 1: + result = 'verschlossen' if wert is True else 'offen' + elif typus ==2: + result = 'an' if wert is True else 'aus' + elif typus ==3: + result = 'ja' if wert is True else 'nein' + else: + result = 'typus noch nicht definiert' + else: + result = 'Wert ist nicht vom Type bool' + return result + logic.bool2str = bool2str + + # Telegram Message einlesen und verarbeiten + message = sh.telegram.info.message() + message_user = message[:message.find(":")].lower() + message_chat_id = message[message.find(":")+2:len(message)] + message_text = message_chat_id[message_chat_id.find(":")+2:].lower() + message_chat_id = message_chat_id[:message_chat_id.find(":")] + + ## Menu definieren + if message_chat_id == 'xxxxxxx': + # Menu Ebene1 + custom_keyboard_ubersicht = {'keyboard':[['Rolladen','Tür&Tor'], ['Heizung','Schalten'], ['Wetter','Verkehr','Tanken']] , 'resize_keyboard': True, 'one_time_keyboard': False} + elif message_chat_id == 'yyyyyyy': + # Menu Ebene1 + custom_keyboard_ubersicht = {'keyboard':[['Wetter','Tür&Tor'], ['Heizung','Tanken']] , 'resize_keyboard': True, 'one_time_keyboard': False} + + # Menu Ebene2 + custom_keyboard_wetter = {'keyboard':[['zurück'], ['aktuell', 'historisch']] , 'resize_keyboard': True, 'one_time_keyboard': False} + custom_keyboard_schalten = {'keyboard':[['zurück'], ['LED Nische WZ', 'LED Nische EZ']] , 'resize_keyboard': True, 'one_time_keyboard': False} + custom_keyboard_heizung = {'keyboard':[['zurück'], ['Heizung Status'],['HK_2 Standby', 'HK_2 Normal'], ['EG/OG bewohnt', 'EG/OG unbewohnt'], ['Warmwasser Status'],['Warmwasser AN', 'Warmwasser AUS']] , 'resize_keyboard': True, 'one_time_keyboard': False} + custom_keyboard_verkehr = {'keyboard':[['zurück'], ['Arbeitsweg', 'Heimweg']] , 'resize_keyboard': True, 'one_time_keyboard': False} + custom_keyboard_rolladen = {'keyboard':[['zurück'], ['Rollladen Status'], ['EG Automatik An','OG Automatik An'], ['EG Automatik Aus','OG Automatik Aus']] , 'resize_keyboard': True, 'one_time_keyboard': False} + + ## Menu auswählen und senden + msg = '' + parse_mode = 'HTML' + reply_markup = {} + + if message_text == 'menu' or message_text == "zurück": + msg = 'Bitte auswählen:' + reply_markup = custom_keyboard_ubersicht + elif message_text == 'wetter': + msg = 'Bitte auswählen:' + reply_markup = custom_keyboard_wetter + elif message_text == 'heizung': + msg = 'Bitte auswählen:' + reply_markup = custom_keyboard_heizung + elif message_text == 'schalten': + msg = 'Bitte auswählen:' + reply_markup = custom_keyboard_schalten + elif message_text == 'verkehr': + msg = 'Bitte auswählen:' + reply_markup = custom_keyboard_verkehr + elif message_text == 'rolladen': + msg = 'Bitte auswählen:' + reply_markup = custom_keyboard_rolladen + + ## Messages definieren und senden + # Wetter + if message_text == 'aktuell': + msg = 'Wetter:\naktuelle. Temp.: ' + str(sh.raumtemp.aussen.nord()) + ' °C \ + \ngefühlte Temp.: ' + str(sh.wetter.froggit.wetterstation.feelslikec()) + ' °C \ + \nrel. Luftfeuchte: ' + str(sh.raumtemp.aussen.nord.luftfeuchtigkeit.hum_ist()) + ' % \ + \nRegen letzte h: ' + str(sh.wetter.froggit.wetterstation.hourlyrainmm()) + ' l/m² \ + \nRegen heute: ' + str(sh.wetter.froggit.wetterstation.dailyrainmm()) + ' l/m² \ + \nLuftdruck: ' + str(sh.raumtemp.eg.diele.luftdruck()) + ' hPa \ + \nWind Mittel: {:3.2f}'.format(sh.wetter.froggit.wetterstation.windgustkmh_max10m()) + ' km/h \ + \nWind Spitze: {:3.2f}'.format(sh.wetter.froggit.wetterstation.maxdailygust()) + ' km/h ' + elif message_text == 'historisch': + msg = 'bislang nicht definiert' + + # Warmwasser + elif message_text == 'warmwasser status': + msg = 'Warmwasser:\nSoll_Temp: ' + str(sh.heizung.warmwasser.temperatur_soll()) + ' °C \ + \nIst_Temp: ' + str(sh.heizung.warmwasser.temperatur_ist()) + ' °C \ + \nPumpe: ' + logic.bool2str(sh.heizung.warmwasser.speicherladepumpe_knx(), 2) + elif message_text == 'warmwasser aus': + sh.heizung.warmwasser.temperatur_soll(10) + msg = 'Warmwasser:\nSoll_Temp: ' + str(sh.heizung.warmwasser.temperatur_soll()) + ' °C \ + \nIst_Temp: ' + str(sh.heizung.warmwasser.temperatur_ist()) + ' °C \ + \nPumpe: ' + logic.bool2str(sh.heizung.warmwasser.speicherladepumpe_knx(), 2) + elif message_text == 'warmwasser an': + sh.heizung.warmwasser.temperatur_soll(40) + msg = 'Warmwasser:\nSoll_Temp: ' + str(sh.heizung.warmwasser.temperatur_soll()) + ' °C \ + \nIst_Temp: ' + str(sh.heizung.warmwasser.temperatur_ist()) + ' °C \ + \nPumpe: ' + logic.bool2str(sh.heizung.warmwasser.speicherladepumpe_knx(), 2) + + # Heizung + elif message_text == 'heizung status': + msg = 'HK_2:\nBetriebsart A1: ' + str(sh.heizung.heizkreis_a1m1.betriebsart.betriebsart.betriebsart_str()) +'\ + \nBetriebsart M2: ' + str(sh.heizung.heizkreis_m2.betriebsart.betriebsart.betriebsart_str()) +'\ + \nPumpe A1: ' + logic.bool2str(sh.heizung.heizkreis_a1m1.status.hk_pumpe_knx(), 2) +'\ + \nPumpe M2: ' + logic.bool2str(sh.heizung.heizkreis_m2.status.hk_pumpe_knx(), 2) +'\ + \nEG/OG bewohnt: ' + logic.bool2str(sh.raumtemp.anwesend_eg_og(), 3) +'\ + \nUG bewohnt: ' + logic.bool2str(sh.raumtemp.anwesend_eg_og(), 3) + elif message_text == 'hk_2 standby': + sh.heizung.heizkreis_m2.betriebsart.betriebsart(0) + msg = 'HK_2:\nneue Betriebsart M2: ' + str(sh.heizung.heizkreis_m2.betriebsart.betriebsart.betriebsart_str()) + elif message_text == 'hk_2 normal': + sh.heizung.heizkreis_m2.betriebsart.betriebsart(2) + msg = 'HK_2:\nneue Betriebsart M2: ' + str(sh.heizung.heizkreis_m2.betriebsart.betriebsart.betriebsart_str()) + elif message_text == 'eg/og bewohnt': + sh.raumtemp.anwesend_eg_og(1) + msg = 'HK_2:\nEG/OG bewohnt: ' + logic.bool2str(sh.raumtemp.anwesend_eg_og(), 3) + elif message_text == 'eg/og unbewohnt': + sh.raumtemp.anwesend_eg_og(0) + msg = 'HK_2:\nEG/OG bewohnt: ' + logic.bool2str(sh.raumtemp.anwesend_eg_og(), 3) + + # Schalten + elif message_text == 'led nische wz': + sh.licht.wohnzimmer.vorsatz_nische.onoff(not sh.licht.wohnzimmer.vorsatz_nische.onoff()) + msg = 'Nischenbeleuchtung:\nWohnzimmer: ' + logic.bool2str(sh.licht.wohnzimmer.vorsatz_nische.onoff(), 2) + + elif message_text == 'led nische ez': + sh.licht.wohnzimmer.tv_wand_nische.onoff(not sh.licht.wohnzimmer.tv_wand_nische.onoff()) + msg = 'Nischenbeleuchtung:\nEsszimmer: ' + logic.bool2str(sh.licht.wohnzimmer.tv_wand_nische.onoff(), 2) + + # Verkehr + elif message_text == 'arbeitsweg': + sh.verkehrsinfo.calculate_way_work(1) + time.sleep(0.5) + msg = 'Arbeitsweg:\n ' + str(sh.verkehrsinfo.travel_summary()) + elif message_text == 'heimweg': + sh.verkehrsinfo.calculate_way_home(1) + time.sleep(0.5) + msg = 'Heimweg:\n ' + str(sh.verkehrsinfo.travel_summary()) + + # Tür&Tor + elif message_text == 'tür&tor': + msg = 'Tür&Tor:\nKellertür: ' + logic.bool2str(sh.fenster_tuer_kontakte.kellertuer.verschlossen(), 1) +'\ + \nGaragentür: ' + logic.bool2str(sh.fenster_tuer_kontakte.seitentuer_garage.verschlossen(), 1) +'\ + \nGaragentor links: ' + str(sh.fenster_tuer_kontakte.garagentor_links.text()) +'\ + \nGaragentor rechts: ' + str(sh.fenster_tuer_kontakte.garagentor_rechts.text()) + + # Rolladen + elif message_text == 'rollladen status': + msg = 'Rolladen:\nEG Beschattungsautomatik: ' + logic.bool2str(sh.rollladen.eg.beschattungsautomatik(), 2) +'\ + \nEG Fahrautomatik: ' + logic.bool2str(sh.rollladen.eg.alle.automatik(), 2) +'\ + \nOG Beschattungsautomatik: ' + logic.bool2str(sh.rollladen.og.beschattungsautomatik(), 2) +'\ + \nEG Fahrautomatik: ' + logic.bool2str(sh.rollladen.og.alle.automatik(), 2) + elif message_text == 'eg automatik an': + sh.rollladen.eg.alle.automatik(1) + msg = 'Rolladen:\nEG Fahrautomatik: ' + logic.bool2str(sh.rollladen.eg.alle.automatik(), 2) + elif message_text == 'eg automatik aus': + sh.rollladen.eg.alle.automatik(0) + msg = 'Rolladen:\nEG Fahrautomatik: ' + logic.bool2str(sh.rollladen.eg.alle.automatik(), 2) + elif message_text == 'og automatik an': + sh.rollladen.og.alle.automatik(1) + msg = 'Rolladen:\nOG Fahrautomatik: ' + logic.bool2str(sh.rollladen.og.alle.automatik(), 2) + elif message_text == 'og automatik aus': + sh.rollladen.og.alle.automatik(0) + msg = 'Rolladen:\nOG Fahrautomatik: ' + logic.bool2str(sh.rollladen.og.alle.automatik(), 2) + + # Message senden + if msg != '': + telegram_plugin.msg_broadcast(msg, message_chat_id, reply_markup, parse_mode) \ No newline at end of file diff --git a/telegram/_pv_1_7_1/__init__.py b/telegram/_pv_1_7_1/__init__.py new file mode 100644 index 000000000..ab1848ebd --- /dev/null +++ b/telegram/_pv_1_7_1/__init__.py @@ -0,0 +1,994 @@ +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +######################################################################### +# Copyright 2017 Markus Garscha http://knx-user-forum.de/ +# 2018-2023 Ivan De Filippis +# 2018-2021 Bernd Meiners Bernd.Meiners@mail.de +######################################################################### +# +# This file is part of SmartHomeNG. +# +# SmartHomeNG is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SmartHomeNG is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SmartHomeNG. If not, see . +# +######################################################################### + +import datetime +import logging +import re +import requests +import traceback +from io import BytesIO + +from lib.logic import Logics +from lib.model.smartplugin import SmartPlugin + +from .webif import WebInterface + +try: + import telegram + import telegram.ext + from telegram.error import TelegramError + from telegram.ext import Updater + from telegram.ext import CommandHandler + from telegram.ext import MessageHandler, Filters + REQUIRED_PACKAGE_IMPORTED = True +except Exception: + REQUIRED_PACKAGE_IMPORTED = False + +ITEM_ATTR_MESSAGE = 'telegram_message' # Send message on item change +ITEM_ATTR_CONDITION = 'telegram_condition' # when to send the message, if not given send any time, + # if on_change_only then just if the item's current value differs from the previous value +ITEM_ATTR_INFO = 'telegram_info' # read items with specific item-values +ITEM_ATTR_TEXT = 'telegram_text' # write message-text into the item +ITEM_ATTR_MATCHREGEX = 'telegram_value_match_regex' # check a value against a condition before sending a message +ITEM_ATTR_CHAT_IDS = 'telegram_chat_ids' +ITEM_ATTR_MSG_ID = 'telegram_message_chat_id' # chat_id the message should be sent to +ITEM_ATTR_CONTROL = 'telegram_control' # control(=change) item-values (bool/num) + +MESSAGE_TAG_ID = '[ID]' +MESSAGE_TAG_NAME = '[NAME]' +MESSAGE_TAG_VALUE = '[VALUE]' +MESSAGE_TAG_CALLER = '[CALLER]' +MESSAGE_TAG_SOURCE = '[SOURCE]' +MESSAGE_TAG_DEST = '[DEST]' + + +class Telegram(SmartPlugin): + + PLUGIN_VERSION = "1.7.1" + + _items = [] # all items using attribute ``telegram_message`` + _items_info = {} # dict used whith the info-command: key = attribute_value, val= item_list telegram_info + _items_text_message = [] # items in which the text message is written ITEM_ATTR_TEXT + _items_control = {} # dict used whith the control-command: + _chat_ids_item = {} # an item with a dict of chat_id and write access + _waitAnswer = None # wait a specific answer Yes/No - or num (change_item) + + + def __init__(self, sh): + """ + Initializes the Telegram plugin + The params are documented in ``plugin.yaml`` and values will be obtained through get_parameter_value(parameter_name) + """ + + self.logger.info('Init telegram plugin') + + # Call init code of parent class (SmartPlugin or MqttPlugin) + super().__init__() + if not self._init_complete: + return + + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"init {__name__}") + self._init_complete = False + + # Exit if the required package(s) could not be imported + if not REQUIRED_PACKAGE_IMPORTED: + self.logger.error(f"{self.get_fullname()}: Unable to import Python package 'python-telegram-bot'") + return + + self.alive = False + self._name = self.get_parameter_value('name') + self._token = self.get_parameter_value('token') + + self._welcome_msg = self.get_parameter_value('welcome_msg') + self._bye_msg = self.get_parameter_value('bye_msg') + self._no_access_msg = self.get_parameter_value('no_access_msg') + self._no_write_access_msg = self.get_parameter_value('no_write_access_msg') + self._long_polling_timeout = self.get_parameter_value('long_polling_timeout') + self._pretty_thread_names = self.get_parameter_value('pretty_thread_names') + + # the Updater class continuously fetches new updates from telegram and passes them on to the Dispatcher class. + try: + self._updater = Updater(token=self._token, use_context=True) + self._bot = self._updater.bot + self.logger.info(f"Telegram bot is listening: {self._bot.getMe()}") + except TelegramError as e: + # catch Unauthorized errors due to an invalid token + self.logger.error(f"Unable to start up Telegram conversation. Maybe an invalid token? {e}") + else: + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug("adding command handlers to dispatcher") + + # Dispatcher that handles the updates and dispatches them to the handlers. + dispatcher = self._updater.dispatcher + dispatcher.add_error_handler(self.eHandler) + dispatcher.add_handler(CommandHandler('time', self.cHandler_time)) + dispatcher.add_handler(CommandHandler('help', self.cHandler_help)) + dispatcher.add_handler(CommandHandler('hide', self.cHandler_hide)) + dispatcher.add_handler(CommandHandler('list', self.cHandler_list)) + dispatcher.add_handler(CommandHandler('info', self.cHandler_info)) + dispatcher.add_handler(CommandHandler('start', self.cHandler_start)) + dispatcher.add_handler(CommandHandler('lo', self.cHandler_lo)) + dispatcher.add_handler(CommandHandler('tr', self.cHandler_tr, pass_args=True)) + dispatcher.add_handler(CommandHandler('control', self.cHandler_control)) + + # Filters.text includes also commands, starting with ``/`` so it is needed to exclude them. + # This came with lib version 12.4 + dispatcher.add_handler(MessageHandler(Filters.text & (~Filters.command), self.mHandler)) + self.init_webinterface() + + if not self.init_webinterface(WebInterface): + self.logger.error("Unable to start Webinterface") + self._init_complete = False + else: + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug("Init complete") + + self._init_complete = True + + def __call__(self, msg, chat_id=None): + """ + Provide a way to use the plugin to easily send a message + """ + if self.alive: + if chat_id is None: + self.msg_broadcast(msg) + else: + self.msg_broadcast(msg, chat_id) + + def run(self): + """ + This is called when the plugins thread is about to run + """ + self.alive = True + self.logics = Logics.get_instance() # Returns the instance of the Logics class, to be used to access the logics-api + q = self._updater.start_polling(timeout=self._long_polling_timeout) # (poll_interval=0.0, timeout=10, network_delay=None, clean=False, bootstrap_retries=0, read_latency=2.0, allowed_updates=None) + if self._pretty_thread_names: + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug("Changing Telegrams thread names to pretty thread names") + try: + for t in self._updater._Updater__threads: + if 'dispatcher' in t.name: + t.name = 'Telegram Dispatcher' + if 'updater' in t.name: + t.name = 'Telegram Updater' + + for t in self._updater.dispatcher._Dispatcher__async_threads: + *_, num = t.name.split('_') + t.name = f'Telegram Worker {num}' if num.isnumeric() else num + + # from telegram.jobqueue.py @ line 301 thread is named + # name=f"Bot:{self._dispatcher.bot.id}:job_queue" + if hasattr(self._updater.job_queue, '_JobQueue__thread'): + t = self._updater.job_queue._JobQueue__thread + if t.name.startswith('Bot'): + _, id, _ = t.name.split(':') + self._updater.job_queue._JobQueue__thread.name = f"Telegram JobQueue for id {id}" + else: + # model in telegram.ext.jobqueue.py might be changed now + pass + except Exception as e: + self.logger.warning(f"Error '{e}' occurred. Could not assign pretty names to Telegrams threads, maybe object model of python-telegram-bot module has changed? Please inform the author of plugin!") + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"started polling the updater, Queue is {q}") + if self._welcome_msg: + self.msg_broadcast(self._welcome_msg) + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"sent welcome message {self._welcome_msg}") + + def stop(self): + """ + This is called when the plugins thread is about to stop + """ + self.alive = False + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug("stop telegram plugin") + try: + if self._bye_msg: + self.msg_broadcast(self._bye_msg) + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug("sent bye message") + except Exception: + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug("could not send bye message") + self._updater.stop() + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug("stop telegram plugin finished") + + def parse_item(self, item): + """ + Default plugin parse_item method. Is called when the plugin is initialized. + :param item: The item to process. + """ + if self.has_iattr(item.conf, ITEM_ATTR_CHAT_IDS): + if self._chat_ids_item: + self.logger.warning(f"Item: {item.id()} declares chat_id for telegram plugin which are already defined, aborting!") + else: + self._chat_ids_item = item + + if self.has_iattr(item.conf, ITEM_ATTR_MESSAGE): + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"parse item: {item}") + self._items.append(item) + return self.update_item + + """ + For valid commands also see https://core.telegram.org/bots#commands + In general they are allowed to have 32 characters, use latin letters, numbers or an underscore + """ + if self.has_iattr(item.conf, ITEM_ATTR_INFO): + key = self.get_iattr_value(item.conf, ITEM_ATTR_INFO) + if self.is_valid_command(key): + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"parse item: {item} with command: {key}") + if key in self._items_info: + self._items_info[key].append(item) + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"Append a new item '{item}' to command '{key}'") + else: + self._items_info[key] = [item] # dem dict neue Liste hinzufuegen + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"Register new command '{key}', add item '{item}' and register a handler") + # add a handler for each info-attribute + self._updater.dispatcher.add_handler(CommandHandler(key, self.cHandler_info_attr)) + return self.update_item + else: + self.logger.error(f"Command '{key}' chosen for item '{item}' is invalid for telegram botfather") + + if self.has_iattr(item.conf, ITEM_ATTR_TEXT): + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"parse item: {item.id()}") + value = self.get_iattr_value(item.conf, ITEM_ATTR_TEXT) + if value in ['true', 'True', '1']: + self._items_text_message.append(item) + return self.update_item + + if self.has_iattr(item.conf, ITEM_ATTR_CONTROL): + attr = self.get_iattr_value(item.conf, ITEM_ATTR_CONTROL) + + key = item.id() # default + changeType = 'toggle' + question = '' + timeout = 20 + min = None + max = None + + par_list = attr.split(',') # Parameter from attr example: 'name:test, changeType:toggle, question:wirklich umnschalten?' + for par in par_list: + k,v = par.split(':') + if 'name' in k: + key = v + if 'type' in k: + changeType = v + if 'question' in k: + question = v + if 'timeout' in k: + timeout = v + if 'min' in k: + min = v + if 'max' in k: + max = v + + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"parse control-item: {item} with command: {key}") + + dicCtl = {'name': key, 'type': changeType, 'item': item, 'question': question, 'timeout': timeout, 'min': min, 'max': max} + + if key not in self._items_control: + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"Append a new control-item '{item}' to command '{key}'") + self._items_control[key] = dicCtl # add to dict + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"Register new command '{key}', add item '{item}' and register a handler") + # add a handler for each control-attribute + self._updater.dispatcher.add_handler(CommandHandler(key, self.cHandler_control_attr)) + return self.update_item + + return None + + def is_valid_command(self, cmd): + if not isinstance(cmd, str): + return False + if len(cmd) > 32: + return False + rec = re.compile(r'[^A-Za-z0-9_]') + return not bool(rec.search(cmd)) + + # def parse_logic(self, logic): + # if 'xxx' in logic.conf: + # # self.function(logic['name']) + # pass + + def update_item(self, item, caller=None, source=None, dest=None): + """ + Called each time an item changed in SmartHomeNG + """ + if caller != self.get_fullname(): + self.logger.info(f"update item: {item.id()}") + + if self.has_iattr(item.conf, ITEM_ATTR_CHAT_IDS): + if self._chat_ids_item: + self.logger.info(f"Item: {item.id()} declares chat_id for telegram plugin which are already defined, will be overwritten!") + self._chat_ids_item = item + + if self.has_iattr(item.conf, ITEM_ATTR_MESSAGE): + msg_txt_tmpl = self.get_iattr_value(item.conf, ITEM_ATTR_MESSAGE) + + item_id = item.id() + if item.property.type == 'bool': + item_value = item() + item_value = '1' if item_value is True else '0' if item_value is False else None + else: + item_value = f"{item()}" + + if self.has_iattr(item.conf, ITEM_ATTR_MATCHREGEX): + val_match = self.get_iattr_value(item.conf, ITEM_ATTR_MATCHREGEX) + self.logger.info(f"val_match: {val_match}") + + # TO_TEST: ITEM_ATTR_MATCHREGEX + p = re.compile(val_match) + m = p.match(item_value) + if m: + self.logger.info(f"Match found: {m.group()}") + else: + self.logger.info(f"No match: {val_match} in: {item_value}") + return + + caller = "None" if caller is None else str(caller) + source = "None" if source is None else str(source) + dest = "None" if dest is None else str(dest) + + if self.has_iattr(item.conf, 'name'): + item_name = self.get_iattr_value(item.conf, 'name') + else: + item_name = 'NONAME' + + # replace Tags with id,value,caller,source,dest,... + msg_txt = msg_txt_tmpl.replace(MESSAGE_TAG_ID, item_id) + msg_txt = msg_txt.replace(MESSAGE_TAG_NAME, item_name) + msg_txt = msg_txt.replace(MESSAGE_TAG_VALUE, item_value) + msg_txt = msg_txt.replace(MESSAGE_TAG_CALLER, caller) + msg_txt = msg_txt.replace(MESSAGE_TAG_SOURCE, source) + msg_txt = msg_txt.replace(MESSAGE_TAG_DEST, dest) + + # checking, if message should be send to specific chat-id + if self.has_iattr(item.conf, ITEM_ATTR_MSG_ID): + msg_chat_id = self.get_iattr_value(item.conf, ITEM_ATTR_MSG_ID) + msg_chat_id_txt = str(msg_chat_id) + else: + msg_chat_id = None + msg_chat_id_txt = 'all' + + # restricing send by a condition set + if self.has_iattr(item.conf, ITEM_ATTR_CONDITION): + cond = self.get_iattr_value(item.conf, ITEM_ATTR_CONDITION).lower() + if cond == "on_change": + if item.property.value != item.property.last_value and item.property.last_update <= item.property.last_change: + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"condition {cond} met: {item.property.value}!={item.property.last_value}, last_update_age {item.property.last_update}, last_change_age {item.property.last_change}") + else: + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"condition {cond} not met: {item.property.value}=={item.property.last_value}, last_update_age {item.property.last_update}, last_change_age {item.property.last_change}") + return + elif cond == "on_update": + # this is standard behaviour + pass + else: + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"ignoring unknown condition {cond}") + + # sending the message + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"send Message: {msg_txt} to Chat_ID {msg_chat_id_txt}") + self.msg_broadcast(msg_txt, msg_chat_id) + + def _msg_broadcast(self, msg, chat_id=None): + self.logger.warning("deprecated, please use msg_broadcast instead") + self.msg_broadcast(msg, chat_id) + + def msg_broadcast(self, msg, chat_id=None, reply_markup=None, parse_mode=None): + """ + Send a message to the given chat_id + + :param msg: message to send + :param chat_id: a chat id or a list of chat ids to identificate the chat(s) + :param reply_markup: + :param parse_mode: + """ + for cid in self.get_chat_id_list(chat_id): + try: + self._bot.send_message(chat_id=cid, text=msg, reply_markup=reply_markup, parse_mode=parse_mode) + except TelegramError as e: + self.logger.error(f"could not broadcast to chat id [{cid}] due to error {e}") + except Exception as e: + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"Exception '{e}' occurred, please inform plugin maintainer!") + + def photo_broadcast(self, photofile_or_url, caption=None, chat_id=None, local_prepare=True): + """ + Send an image to the given chat + + :param photofile_or_url: either a local file or a URL with a link to an image resource + :param local_prepare: Image will be prepared locally instead of passing a link to Telegram. Needed if an image e.g. of a local network webcam is to be sent. + :param caption: caption of image to send + :param chat_id: a chat id or a list of chat ids to identificate the chat(s) + """ + for cid in self.get_chat_id_list(chat_id): + try: + if photofile_or_url.startswith("http"): + if local_prepare: + photo_raw = requests.get(photofile_or_url) + photo_data = BytesIO(photo_raw.content) + self._bot.send_photo(chat_id=cid, photo=photo_data, caption=caption) + else: + self._bot.send_photo(chat_id=cid, photo=photofile_or_url, caption=caption) + else: + self._bot.send_photo(chat_id=cid, photo=open(str(photofile_or_url), 'rb'), caption=caption) + except Exception as e: + self.logger.error(f"Error '{e}' could not send image {photofile_or_url} to chat id {cid}") + + def get_chat_id_list(self, att_chat_id): + chat_ids_to_send = [] # new list + if att_chat_id is None: # no attribute specified + if self._chat_ids_item: + chat_ids_to_send = [l for l in self._chat_ids_item()] # chat_ids from chat_ids item + else: + if isinstance(att_chat_id, list): # if attribute is a list + chat_ids_to_send = att_chat_id + else: # if attribute is a single chat_id + chat_ids_to_send.append(att_chat_id) # append to list + return chat_ids_to_send + + def has_access_right(self, user_id): + """ + if given chat id is not in list of trusted chat ids then reject with a message + """ + if self._chat_ids_item: + if user_id in self._chat_ids_item(): + return True + else: + self._bot.send_message(chat_id=user_id, text=self._no_access_msg) + + return False + + def has_write_access_right(self, user_id): + """ + if given chat id is not in list of trusted chat ids then reject with a message + """ + if self._chat_ids_item: + if user_id in self._chat_ids_item(): + return self._chat_ids_item()[user_id] + else: + self._bot.send_message(chat_id=user_id, text=self._no_write_access_msg) + + return False + + """ + Arguments to all CommandHandler callback functions are update and context + + update is a telegram.Update Object described at https://python-telegram-bot.readthedocs.io/en/latest/telegram.update.html + When expressed as a dict, the structure of update Object is similar to the following: + ```python + 'update_id': 081512345 + 'message': + 'message_id': 16719 + 'date': 1601107823 + 'chat': + 'id': 471112345 + 'type': 'private' + 'first_name': 'John' + 'last_name': 'Doe' + 'text': '/help' + 'entities': + - 'type': 'bot_command' + - 'offset': 0 + - 'length': 5 + 'caption_entities': [] + 'photo': [] + 'new_chat_members': [] + 'new_chat_photo': [] + 'delete_chat_photo': False + 'group_chat_created': False + 'supergroup_chat_created': False + 'channel_chat_created': False + 'from': # this is essentially from_user, not from since from is reserved in Python + 'id': 471112345 + 'first_name': 'John' + 'is_bot': False + 'last_name': 'Doe' + 'language_code': 'de' + ``` + context is a CallbackContext described at https://python-telegram-bot.readthedocs.io/en/latest/telegram.ext.callbackcontext.html + + it contains the following objects: + args + bot context.bot is the target for send_message() function + bot_data + chat_data + dispatcher + error + from_error + from_job + from_update + job + job_queue + match + matches + update + update_queue + user_data + """ + + def eHandler(self, update, context): + """ + Just logs an error in case of a problem + """ + try: + self.logger.warning(f'Update {update} caused error {context.error}') + except Exception: + pass + + def mHandler(self, update, context): + """ + write the content (text) of the message in an SH-item + """ + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"write the content (text) of the message in an SH-item for update={update}, chat_id={update.message.chat.id} and context={dir(context)}") + if self.has_write_access_right(update.message.chat.id): + + try: + if self._waitAnswer is None: # keine Antwort erwartet (control-Item/question) + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"update.message.from_user.name={update.message.from_user.name}") + text = update.message.from_user.name + ": " + text += str(update.message.chat.id) + ": " # add the message.chat.id + text += update.message.text # add the message.text + for item in self._items_text_message: + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"write item: {item.id()} value: {text}") + item(text, caller=self.get_fullname()) # write text to SH-item + else: # Antwort von control-Item/question wird erwartet + text = update.message.text + dicCtl = self._waitAnswer # _waitAnswer enthält dict mit weiteren Parametern + valid = True # für Prüfung des Wertebereiches bei num + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"update.message.from_user.name={update.message.from_user.name} answer={text} name={dicCtl['name']}") + if text == 'On': + if dicCtl['type'] == 'onoff': + item = dicCtl['item'] + msg = f"{dicCtl['name']} \n change to:On(True)" + self._bot.sendMessage(chat_id=update.message.chat.id, text=msg) + item(True) + self._waitAnswer = None + self._bot.send_message(chat_id=update.message.chat.id, text=self.translate("Control/Change item-values:"), reply_markup={"keyboard":self.create_control_reply_markup()}) + elif text == 'Off': + if dicCtl['type'] == 'onoff': + item = dicCtl['item'] + msg = f"{dicCtl['name']} \n change to:Off(False)" + self._bot.sendMessage(chat_id=update.message.chat.id, text=msg) + item(False) + self._waitAnswer = None + self._bot.send_message(chat_id=update.message.chat.id, text=self.translate("Control/Change item-values:"), reply_markup={"keyboard":self.create_control_reply_markup()}) + elif text == 'Yes': + if self.scheduler_get('telegram_change_item_timeout'): + self.scheduler_remove('telegram_change_item_timeout') + dicCtlCopy = dicCtl.copy() + dicCtlCopy['question'] = '' + self.change_item(update, context, dicCtlCopy['name'], dicCtlCopy) + self._waitAnswer = None + self._bot.send_message(chat_id=update.message.chat.id, text=self.translate("Control/Change item-values:"), reply_markup={"keyboard":self.create_control_reply_markup()}) + elif dicCtl['type'] == 'num': + if type(text) == int or float: + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"control-item: answer is num ") + item = dicCtl['item'] + newValue = text + if dicCtl['min'] is not None: + if float(newValue) < float(dicCtl['min']): + valid = False + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"control-item: value:{newValue} to low:{dicCtl['min']}") + if dicCtl['max'] is not None: + if float(newValue) > float(dicCtl['max']): + valid = False + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"control-item: value:{newValue} to high:{dicCtl['max']}") + if valid: + msg = f"{dicCtl['name']} \n change from:{item()} to:{newValue}" + self._bot.sendMessage(chat_id=update.message.chat.id, text=msg) + item(newValue) + if self.scheduler_get('telegram_change_item_timeout'): + self.scheduler_remove('telegram_change_item_timeout') + self._waitAnswer = None + else: + msg = f"{dicCtl['name']} \n out off range" + self._bot.sendMessage(chat_id=update.message.chat.id, text=msg) + else: + self._bot.send_message(chat_id=update.message.chat.id, text=self.translate("Control/Change item-values:"), reply_markup={"keyboard": self.create_control_reply_markup()}) + self._waitAnswer = None + except Exception as e: + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"Exception '{e}' occurred, traceback '{traceback.format_exc()}' Please inform plugin maintainer!") + + def cHandler_time(self, update, context): + """ + /time: return server time + """ + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"/time: return server time for update={update}, chat_id={update.message.chat.id} and context={dir(context)}") + if self.has_access_right(update.message.chat.id): + context.bot.send_message(chat_id=update.message.chat.id, text=str(datetime.datetime.now())) + + def cHandler_help(self, update, context): + """ + /help: show available commands as keyboard + """ + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"/help: show available commands as keyboard for update={update}, chat_id={update.message.chat.id} and context={dir(context)}") + if self.has_access_right(update.message.chat.id): + context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("choose"), reply_markup={"keyboard": [["/hide","/start"], ["/time","/list"], ["/lo","/info"], ["/control", "/tr "]]}) + + def cHandler_hide(self, update, context): + """ + /hide: hide keyboard + """ + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"/hide: hide keyboard for bot={context.bot} and chat_id={update.message.chat.id}") + if self.has_access_right(update.message.chat.id): + hide_keyboard = {'hide_keyboard': True} + context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("I'll hide the keyboard"), reply_markup=hide_keyboard) + + def cHandler_list(self, update, context): + """ + /list: show registered items and value + """ + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"/list: show registered items and value for chat_id={update.message.chat.id}") + if self.has_access_right(update.message.chat.id): + self.list_items(update.message.chat.id) + + def cHandler_info(self, update, context): + """ + /info: show item-menu with registered items with specific attribute + """ + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"/info: show item-menu with registered items with specific attribute for chat_id={update.message.chat.id}") + if self.has_access_right(update.message.chat.id): + if len(self._items_info) > 0: + context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("Infos from the items:"), reply_markup={"keyboard": self.create_info_reply_markup()}) + else: + context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("No items have attribute telegram_info!"), reply_markup={"keyboard": self.create_info_reply_markup()}) + + def cHandler_start(self, update, context): + """ + /start: show a welcome together with asking to add chat id to trusted chat ids + """ + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"/start: show a welcome together with asking to add chat id to trusted chat ids for chat_id={update.message.chat.id}") + text = "" + if self._chat_ids_item: + ids = self._chat_ids_item() + text = self.translate(f"Your chat id is: {update.message.chat.id}") + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f'update.message.chat.id={update.message.chat.id} with type={type(update.message.chat.id)}') + self.logger.debug(f'ids dict={ids}') + if update.message.chat.id in ids: + if ids[update.message.chat.id]: + text += ", you have write access" + else: + text += ", you have read access" + else: + text = text+self.translate(", please add it to the list of trusted chat ids to get access") + else: + self.logger.warning('No chat_ids defined') + + context.bot.send_message(chat_id=update.message.chat.id, text=text) + + def cHandler_info_attr(self, update, context): + """ + /command show registered items and value with specific attribute/key + where ``command`` is the value from an item with ``telegram_info`` attribute + """ + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug("Enter cHandler_info_attr") + if self.has_access_right(update.message.chat.id): + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"Gathering items to fulfill command {update.message.text}") + c_key = update.message.text.replace("/", "", 1) + if c_key in self._items_info: + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"info-command: {c_key}") + self.list_items_info(update.message.chat.id, c_key) + else: + self._bot.sendMessage(chat_id=update.message.chat.id, text=self.translate("unknown command %s") % c_key) + else: + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"Chat with id {update.message.chat.id} has no right to use command {update.message.text}") + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug("Leave cHandler_info_attr") + + def cHandler_lo(self, update, context): + """ + /lo: show all logics with next scheduled execution time + """ + if self.has_access_right(update.message.chat.id): + tmp_msg = "Logics:\n" + for logic in sorted(self.logics.return_defined_logics()): # list with the names of all logics that are currently loaded + data = [] + info = self.logics.get_logic_info(logic) + # self.logger.debug(f"logic_info: {info}") + if len(info) == 0 or not info['enabled']: + data.append("disabled") + if 'next_exec' in info: + data.append(f"scheduled for {info['next_exec']}") + tmp_msg += f"{logic}" + if len(data): + tmp_msg += f" ({', '.join(data)})" + tmp_msg += "\n" + self.logger.info(f"send Message: {tmp_msg}") + self._bot.sendMessage(chat_id=update.message.chat.id, text=tmp_msg) + + def cHandler_tr(self, update, context): + """ + Trigger a logic with command ``/tr xx`` where xx is the name of the logic to trigger + """ + if self.has_access_right(update.message.chat.id): + logicname = context.args[0] + try: + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"trigger_logic: {context.args}") + self.logics.trigger_logic(logicname, by=self.get_shortname()) # Trigger a logic + except Exception as e: + tmp_msg = f"could not trigger logic {logicname} due to error {e}" + self.logger.warning(tmp_msg) + self._bot.sendMessage(chat_id=update.message.chat.id, text=tmp_msg) + + def cHandler_control(self, update, context): + """ + /control: Change values of items with specific attribute + """ + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"/control: show item-menu with registered items with specific attribute for chat_id={update.message.chat.id}") + if self.has_write_access_right(update.message.chat.id): + if len(self._items_control) > 0: + self._bot.send_message(chat_id=update.message.chat.id, text=self.translate("Control/Change item-values:"), reply_markup={"keyboard":self.create_control_reply_markup()}) + self.list_items_control(update.message.chat.id) + else: + context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("No items have attribute telegram_control!"), reply_markup={"keyboard": self.create_control_reply_markup()}) + + def cHandler_control_attr(self, update, context): + """ + /xx change value from registered items + """ + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug("Enter cHandler_control_attr") + if self.has_write_access_right(update.message.chat.id): + c_key = update.message.text.replace("/", "", 1) + if c_key in self._items_control: + dicCtl = self._items_control[c_key] #{'type':type,'item':item} + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"control-command: name:{c_key} dictCtl:{dicCtl}") + self.change_item(update=update, context=context, name=c_key, dicCtl=dicCtl) + else: + self._bot.sendMessage(chat_id=update.message.chat.id, text=self.translate("unknown control-command %s") % (c_key)) + + # helper functions + def list_items(self, chat_id): + """ + Send a message with all items that are marked with an attribute ``telegram_message`` + """ + if self.has_access_right(chat_id): + text = "" + for item in self._items: + if item.type(): + text += f"{item.id()} = {item()}\n" + else: + text += f"{item.id()}\n" + if not text: + text = "no items found with the attribute:" + ITEM_ATTR_MESSAGE + self._bot.sendMessage(chat_id=chat_id, text=text) + + def list_items_info(self, chat_id, key): + """ + Show registered items and value with specific attribute/key + """ + if self.has_access_right(chat_id): + text = "" + for item in self._items_info[key]: + if item.type(): + text += f"{item.id()} = {item()}\n" + else: + text += f"{item.id()}\n" + if not text: + text = self.translate("no items found with the attribute %s") % ITEM_ATTR_INFO + self._bot.sendMessage(chat_id=chat_id, text=text) + else: + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"Chat with id {chat_id} has no right to list items with key {key}") + + def create_info_reply_markup(self): + """ + Creates a keyboard with all items having a ``telegram_info`` attribute + """ + # reply_markup={"keyboard":[["/roll","/hide"], ["/time","/list"], ["/lo","/info"]]}) + button_list = [] + for key, value in self._items_info.items(): + button_list.append("/"+key) + # self.logger.debug(f"button_list: {button_list}") + header = ["/help"] + # self.logger.debug(f"header: {header}") + keyboard = self.build_menu(button_list, n_cols=3, header_buttons=header) + # self.logger.debug(f"keyboard: {keyboard}") + return keyboard + + def create_control_reply_markup(self): + """ + Creates a keyboard with all items having a ``telegram_control`` attribute + """ + button_list = [] + for key, value in sorted(self._items_control.items()): + button_list.append("/"+key) + + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"button_list: {button_list}") + header = ["/help"] + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"header: {header}") + keyboard = self.build_menu(button_list, n_cols=3, header_buttons=header) + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"keyboard: {keyboard}") + return keyboard + + def build_menu(self, buttons, n_cols, header_buttons=None, footer_buttons=None): + """ + create a bot-menu + """ + menu = [buttons[i:i + n_cols] for i in range(0, len(buttons), n_cols)] + if header_buttons: + menu.insert(0, header_buttons) + if footer_buttons: + menu.append(footer_buttons) + return menu + + def list_items_control(self, chat_id): + """ + Show registered items and value with specific attribute ITEM_ATTR_CONTROL + """ + if self.has_access_right(chat_id): + text = "" + for key, value in sorted(self._items_control.items()): # {'type':type,'item':item} + item = value['item'] + if item.type(): + text += f"{key} = {item()}\n" + else: + text += f"{key}\n" + if not text: + text = self.translate("no items found with the attribute %s") % ITEM_ATTR_CONTROL + self._bot.sendMessage(chat_id=chat_id, text=text) + else: + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"Chat with id {chat_id} has no right to list items with attribute {ITEM_ATTR_CONTROL}") + + def change_item(self, update, context, name, dicCtl): + """ + util to change a item-value + name:bla, type:toggle/on/off/onoff/trigger/num question:'wirklich einschalten?' + """ + chat_id = update.message.chat.id + item = dicCtl['item'] + changeType = dicCtl['type'] + question = dicCtl['question'] + timeout = dicCtl['timeout'] + text = "" + if changeType == 'toggle': + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"control-item: type:toggle") + if question != '': + nd = (datetime.datetime.now()+ datetime.timedelta(seconds=timeout)).replace(tzinfo=self._sh.tzinfo()) + self._waitAnswer = dicCtl + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"control-item: add scheduler for answer-timout") + self.scheduler_add('telegram_change_item_timeout', self.telegram_change_item_timeout, value={'update': update, 'context': context}, next=nd) + text = question + self._bot.send_message(chat_id=update.message.chat.id, text=text, reply_markup={"keyboard": [['Yes', 'No']]}) + else: + value = item() + if item.type() == "bool": + newValue = not value + text += f"{name} \n change from:{value} to:{newValue}\n" + else: + newValue = value + text += f"{name}: {value}\n" + self._bot.sendMessage(chat_id=chat_id, text=text) + item(newValue) + text = f"{name}: {item()}\n" + self._bot.sendMessage(chat_id=chat_id, text=text) + if changeType == 'on': + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"control-item: type:on") + if question != '': + nd = (datetime.datetime.now() + datetime.timedelta(seconds=timeout)).replace(tzinfo=self._sh.tzinfo()) + self._waitAnswer = dicCtl + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"control-item: add scheduler for answer-timout") + self.scheduler_add('telegram_change_item_timeout', self.telegram_change_item_timeout, value={'update': update, 'context': context}, next=nd) + text = question + self._bot.send_message(chat_id=update.message.chat.id, text=text, reply_markup={"keyboard": [['Yes', 'No']]}) + else: + if item.type() == "bool": + item(True) + text = f"{name}: {item()}\n" + self._bot.sendMessage(chat_id=chat_id, text=text) + if changeType == 'off': + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"control-item: type:off") + if question != '': + nd = (datetime.datetime.now() + datetime.timedelta(seconds=timeout)).replace(tzinfo=self._sh.tzinfo()) + self._waitAnswer = dicCtl + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"control-item: add scheduler for answer-timout") + self.scheduler_add('telegram_change_item_timeout', self.telegram_change_item_timeout, value={'update': update, 'context': context}, next=nd) + text = question + self._bot.send_message(chat_id=update.message.chat.id, text=text, reply_markup={"keyboard": [['Yes', 'No']]}) + else: + if item.type() == "bool": + item(False) + text = f"{name}: {item()}\n" + self._bot.sendMessage(chat_id=chat_id, text=text) + if changeType == 'onoff': + nd = (datetime.datetime.now() + datetime.timedelta(seconds=timeout)).replace(tzinfo=self._sh.tzinfo()) + self._waitAnswer = dicCtl + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"control-item: add scheduler for answer-timout") + self.scheduler_add('telegram_change_item_timeout', self.telegram_change_item_timeout, value={'update': update, 'context': context}, next=nd) + if question == '': + text = self.translate("choose") + else: + text = question + self._bot.send_message(chat_id=update.message.chat.id, text=text, reply_markup={"keyboard": [['On', 'Off']]}) + if changeType == 'num': + text = self.translate("insert a value") + nd = (datetime.datetime.now() + datetime.timedelta(seconds=timeout)).replace(tzinfo=self._sh.tzinfo()) + self._waitAnswer = dicCtl + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"control-item: add scheduler for answer-timout") + self.scheduler_add('telegram_change_item_timeout', self.telegram_change_item_timeout, value={'update': update, 'context': context}, next=nd) + self._bot.sendMessage(chat_id=chat_id, text=text) + if not text: + text = self.translate("no items found with the attribute %s") % ITEM_ATTR_CONTROL + self._bot.sendMessage(chat_id=chat_id, text=text) + + def telegram_change_item_timeout(self, **kwargs): + update = None + context = None + if 'update' in kwargs: + update = kwargs['update'] + if 'context' in kwargs: + context = kwargs['context'] + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"Answer control_item timeout update:{update} context:{context}") + if self._waitAnswer is not None: + self._waitAnswer = None + self._bot.send_message(chat_id=update.message.chat.id, text=self.translate("Control/Change item-values:"), reply_markup={"keyboard": self.create_control_reply_markup()}) diff --git a/telegram/_pv_1_7_1/locale.yaml b/telegram/_pv_1_7_1/locale.yaml new file mode 100644 index 000000000..63f423cd9 --- /dev/null +++ b/telegram/_pv_1_7_1/locale.yaml @@ -0,0 +1,15 @@ +plugin_translations: + # Translations for the plugin specially for the web interface + 'choose': {'de': 'Auswählen', 'en': '=', 'fr': 'Choisissez parmi'} + 'I will hide the keyboard': {'de': 'Ich blende die Bedientasten aus', 'en': '=', 'fr': 'Je cache le clavier'} + 'Info from the items:': {'de': 'Infos von den Items:', 'en': '=', 'fr': 'Info sur les Items:'} + 'unknown command %s': {'de': 'Unbekanntes Kommando %s:', 'en': '=', 'fr': 'commande inconnue %s:'} + 'no items found with the attribute %s': {'de': 'Keine Items mit Attribut %s gefunden', 'en': '=', 'fr': 'Ne pas trouvée une Item avec %s'} + 'insert a value': {'de': 'Wert eingeben', 'en': '=', 'fr': 'insérer une valeur'} + + # Alternative format for translations of longer texts: +# 'Hier kommt der Inhalt des Webinterfaces hin.': +# de: '=' +# en: 'Here goes the content of the web interface.' +# fr: 'Le contenu de l'interface web vient ici.' + diff --git a/telegram/_pv_1_7_1/logging.yaml.telegram b/telegram/_pv_1_7_1/logging.yaml.telegram new file mode 100644 index 000000000..50db78a53 --- /dev/null +++ b/telegram/_pv_1_7_1/logging.yaml.telegram @@ -0,0 +1,4 @@ +loggers: + requests.packages.urllib3.connectionpool: + # e.g. hide Resetting dropped connection + level: WARNING diff --git a/telegram/_pv_1_7_1/plugin.yaml b/telegram/_pv_1_7_1/plugin.yaml new file mode 100644 index 000000000..a85382e8f --- /dev/null +++ b/telegram/_pv_1_7_1/plugin.yaml @@ -0,0 +1,185 @@ +# Metadata for the Smart-Plugin +plugin: + # Global plugin attributes + type: web # plugin type (gateway, interface, protocol, system, web) + description: + de: 'Anbindung des Telegram Messenger Service' + en: 'Connects to the telegram messenger service' + maintainer: gamade, ivan73, bmxp + state: ready + tester: NONE + keywords: telegram chat messenger photo + documentation: http://smarthomeng.de/user/plugins/telegram/user_doc.html + support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1548691-support-thread-für-das-telegram-plugin + + version: 1.7.1 # Plugin version + sh_minversion: 1.8 # minimum shNG version to use this plugin + # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) + py_minversion: 3.6 # minimum Python version to use for this plugin + # py_maxversion: # maximum Python version to use for this plugin (leave empty if latest) + multi_instance: True # plugin supports multi instance + restartable: True + classname: Telegram # class containing the plugin + +parameters: + # Definition of parameters to be configured in etc/plugin.yaml + token: + type: str + description: + de: 'Geheimer Schlüssel zur Authentifizierung bei Telegram' + en: 'Secret key to authenticate to telegram network' + + name: + type: str + default: 'SmartHomeNG Telegram Gateway' + description: + de: 'Name um diese Instanz zu identifizieren' + en: 'used to identify this Telegram Instance' + + long_polling_timeout: + type: num + # 2 minutes should be long enough to avoid problems with too many connections to telegram server + default: 120 + description: + de: 'Zeit nachdem eine neue Verbindung zum Telegram Server aufgebaut um Updates zu holen' + en: 'Time after a new link to telegram server will be established to get updates' + + welcome_msg: + type: str + default: 'SmarthomeNG Telegram Plugin is up and running' + description: + de: 'Willkommensnachricht, wird an alle vertrauenswürdigen Clients beim Start des Plugins gesendet. Ist der Wert leer, wird keine Willkommensnachricht gesendet.' + en: 'Welcome message, will be sent to all trusted chat clients at plugin start. An empty value indicates that no welcome message is sent.' + + bye_msg: + type: str + default: 'SmartHomeNG Telegram Plugin stops' + description: + de: 'Endenachricht, wird an alle vertrauenswürdigen Clients beim Stop des Plugins gesendet. Ist der Wert leer, wird keine Endenachricht gesendet.' + en: 'Bye message, will be sent to all trusted chat clients at plugin stop. An empty value indicates that no by message is sent.' + + no_access_msg: + type: str + default: 'This bot does not know your chat id and thus does not trust it, you are not allowed to use this bot' + description: + de: 'Mitteilung die der Telegram Benutzer erhält, weil der chat id nicht vertraut wird' + en: 'Message the telegram user get when chat id can not be trusted' + + no_write_access_msg: + type: str + default: 'This bot knows your chat id but you are not allowed to use this bot to write to items' + description: + de: 'Mitteilung die der Telegram Benutzer erhält, weil der chat id keine Schreibrechte auf Items hat' + en: 'Message the telegram user get when chat id has no write access to items' + + pretty_thread_names: + type: bool + default: true + description: + de: 'Telegram Threads zur leichteren Unterscheidung in der Thread Liste umbenennen' + en: 'Rename Telegram threads for easier distinction in thread list' + +item_attributes: + # Definition of item attributes defined by this plugin + telegram_message: + type: str + description: + de: 'Mitteilung senden wenn sich der Itemwert ändert' + en: 'Send message on item change' + + telegram_condition: + type: str + default: 'on_update' + valid_list: + - 'on_update' + - 'on_change' + valid_list_description: + de: ['Senden nach Wertaktualisierung', 'Nur senden, wenn sich der Wert des Items gegenüber dem vorherigen Wert geändert hat'] + en: ['Send after item value update', 'Send message only if item changed its value compared to the last value'] + description: + de: 'Bedingung unter der die Mitteilung gesendet wird' + en: 'Condition to send the message' + + telegram_info: + type: str + description: + de: 'Item lesen per Telegram Keyboard. Der Wert des Attributes bestimmt das Telegram Keyboard Kommando' + en: 'Read items with telegram keyboard. The value of the attribute defines the telegram keyboard command' + + telegram_text: + type: str + description: + de: 'Mitteilungstext als Itemwert setzen; Das Format ist "user: Chat-ID: msg"' + en: 'write message-text into the item; Format is "user: Chat-ID: msg"' + + telegram_value_match_regex: + type: str + description: + de: 'Itemwert gegen einen regulären Ausdruck prüfen vor dem Versenden einer Mitteilung' + en: 'check a value against a condition before sending a message' + + telegram_chat_ids: + type: foo + description: + de: 'Ein Item dessen Datentyp dict ist und Paare von ``Chat_id`` und Schreib/Lese- oder Lesezugriff' + en: 'An item of type dict with trusted ``chat_id`` and read/write or readonly access' + + telegram_message_chat_id: + type: num + description: + de: 'Chat_ID, zu der diese Message gesendet werden soll' + en: 'Chat_ID the message should be sent to' + + telegram_control: + type: str + description: + de: 'Item schreiben per Telegram Keyboard. Der Wert des Attributes (mit mehreren Paramtern) bestimmt das Telegram Keyboard Kommando' + en: 'Write items with telegram keyboard. The value of the attribute (with parameters) defines the telegram keyboard command' + +logic_parameters: NONE + # Definition of logic parameters defined by this plugin + +plugin_functions: + msg_broadcast: + type: str + description: + de: 'Sendet eine Nachricht an einen Empfänger' + en: 'Sends a message to a recipient' + parameters: + msg: + type: str + description: + de: 'Nachricht die verschickt werden soll' + en: 'Message to be sent' + + chat_id: + type: int + description: + de: 'ID des Chats oder Liste mit Chats denen die Nachricht gesendet werden soll' + en: 'Chat id or list of chat ids to receive the message' + + photo_broadcast: + type: str + description: + de: 'Sendet ein Bild an einen Empfänger' + en: 'Sends an image to a recipient' + parameters: + photofile_or_url: + type: str + description: + de: 'Dateiname oder URL eines Bildes' + en: 'Filename or URL of an image' + + caption: + type: str + description: + de: 'Titel des Bildes' + en: 'Title of image' + + chat_id: + type: int + description: + de: 'ID des Chats oder Liste mit Chats denen das Bild gesendet werden soll' + en: 'Chat id or list of chat ids to receive the image' + +item_structs: NONE diff --git a/telegram/_pv_1_7_1/requirements.txt b/telegram/_pv_1_7_1/requirements.txt new file mode 100644 index 000000000..76dc58511 --- /dev/null +++ b/telegram/_pv_1_7_1/requirements.txt @@ -0,0 +1,2 @@ +python-telegram-bot>=12.8.0, <14.0 +requests \ No newline at end of file diff --git a/telegram/_pv_1_7_1/user_doc.rst b/telegram/_pv_1_7_1/user_doc.rst new file mode 100644 index 000000000..d98fbdbd5 --- /dev/null +++ b/telegram/_pv_1_7_1/user_doc.rst @@ -0,0 +1,577 @@ +.. index:: Plugins; telegram +.. index:: telegram + +======== +telegram +======== + +Das Plugin dient zum Senden und Empfangen von Nachrichten über den +`Telegram Nachrichten Dienst `_ + +Abhängigkeiten +============== + +Es wird die Bibliothek ``python-telegram-bot`` benötigt. +Diese ist in der ``requirements.txt`` enthalten. +Bevor das Plugin genutzt werden kann, muß die Bibliothek installiert werden: + +* Entweder mit ``sudo pip install -r requirements.txt`` + +oder + +* unter Benutzung von ``pip install -r requirements.txt`` innerhalb + des Verzeichnisses ``/usr/local/smarthome/plugins/telegram``. + +Konfiguration von Telegram +========================== + +Zuerst muß ein eigener Bot bei Telegram erstellt werden: + +* An ``Botfather`` das Kommando ``/newbot`` senden. +* Dann muß ein **Bot Name** vergeben werden der noch nicht existiert. +* Weitere Bot Details können eingestellt werden, wenn das Kommando + ``/mybots`` an den BotFather gesendet wird. + +Der BotFather erstellt für den neuen Bot ein sogenanntes **token** also einen einzigartigen Schlüssel. + +Konfiguration des Plugins +========================= + +Die Konfiguration des Plugins ist auch unter :doc:`/plugins_doc/config/telegram` beschrieben bzw. in der **plugin.yaml** nachzulesen. + + +Der erstelle **token** muß in der ``plugin.yaml`` von SmartHomeNG eingetragen werden. Das kann im Admin-IF geschehen oder durch direkten Eintrag in die ``plugin.yaml``. + +.. code::yaml + + telegram: + plugin_name: telegram + name: Mein Haus + token: 123456789:BBCCfd78dsf98sd9ds-_HJKShh4z5z4zh22 + +* name: Eine Beschreibung des Bots +* token: Der oben beschriebene einzigartige Schlüssel mit dem der Bot bei Telegram identifiziert wird. + +Item Konfiguration +================== + +Jeder Chat, der auf den Bot zugreifen soll, muß SmartHomeNG bekannt gemacht werden. +Das geschieht über ein Item das das Attribut ``telegram_chat_ids`` mit dem Parameter True hat und als Wert ein Dictionary hat. +Im Dictionary sind Paare von Chat-ID und Berechtigung gespeichert. + +.. code::yaml + + Chat_Ids: + type: dict + telegram_chat_ids: True + # cache bietet sich an um Änderungen an den trusted_chat_ids während der + # Laufzeit von SmartHomeNG zu speichern und nach Neustart wieder zu laden + # es wird dann der letzte Wert geladen + cache: 'True' + # Beispiel value: '{ 3234123342: 1, 9234123341: 0 }' + # Ein Dictionary mit chat id und 1 für Lese und Schreibzugriff oder 0 für einen nur Lese-Zugriff + # Nachfolgend ein Chat dem Lese- und Schreibrechte gewährt werden + value: '{ 3234123342: 1 }' + +Um die Chat Id zu bekommen, muß der Bot (und das Plugin) zunächst laufen. Dazu wird SmartHomeNG (neu) gestartet. + +Im Telegram Client wird der Bot als Chatpartner aufgerufen und das Kommando ``/start`` an den Bot gesendet. + +Der Bot reagiert mit einer Meldung, das die Chat-ID noch nicht bekannt ist und diese zunächst eingetragen werden muß. Mit der nun bekannten Chat-ID wird +über das AdminIF das Items Dictionary des entsprechenden Items aus dem obigen Beispiel mit den eigenen Chat-IDs erweitert. + +Ein erneutes Kommando im Telegram Client an den Bot mit ``/start`` sollte nun die Meldung ergeben, das der Chat bekannt ist und weiterhin, welche +Zugriffsrechte der Chat auf den Bot hat. + + +telegram_chat_ids +----------------- + +Es muß ein Item mit dem Typ Dictionary mit dem Attribut ``telegram_chat_ids`` und dem Parameterwert ``True`` angelegt werden. +In ihm werden Chat-IDs und Zugriff auf den Bot gespeichert. Siehe obiges Beispiel. + + +telegram_message +----------------- +Items mit dem Attribut ``telegram_message`` lösen eine Nachricht aus, wenn sich der Itemwert ändert. Es ist möglich Platzhalter +in der Nachricht zu verwenden. + +Verfügbare Platzhalter: + +[ID] [NAME] [VALUE] [CALLER] [SOURCE] [DEST] + +Einfaches Beispiel +'''''''''''''''''' + +.. code:: yaml + + Tuerklingel: + name: Türklingel (entprellt) + type: bool + knx_dpt: 1 + telegram_message: 'Es klingelt an der Tür' + +Beispiel mit Platzhaltern +''''''''''''''''''''''''' + +.. code:: yaml + + state_name: + name: Name des aktuellen Zustands + type: str + visu_acl: r + cache: 'on' + telegram_message: 'New AutoBlind state: [VALUE]' + + +telegram_condition +------------------ + +Da es Situationen gibt die für Items ein ``enforce_updates: True`` benötigen, würde bei ``telegram_message`` bei jeder Aktualisierung des Items eine Nachricht verschickt werden. +Um das zu verhindern, kann einem Item das Attribut ``telegram_condition: on_change`` zugewiesen werden. + +Einfaches Beispiel +'''''''''''''''''' + +.. code:: yaml + + Tuerklingel: + type: bool + knx_dpt: 1 + enforce_updates: True + telegram_message: 'Es klingelt an der Tür' + telegram_condition: on_change + telegram_value_match_regex: (true|True|1) + +Dadurch wird auf eine mehrfache Zuweisung des Items mit dem Wert ``True`` nur einmal mit einer Nachricht reagiert. Um eine weitere Nachricht zu generieren +muss das Item zunächst wieder den Wert ``False`` annehmen. Das Attribut ``telegram_value_match_regex`` filtert den Wert so das es bei der Änderung des Itemwertes +auf ``False`` zu keiner Meldung *Es klingelt an der Tür* kommt. + + +telegram_value_match_regex +-------------------------- + +Ist zusätzlich zum Attribut ``telegram_message`` auch das Attribut ``telegram_value_match_regex`` gesetzt, wird der Itemwert geprüft, bevor eine +Nachricht gesendet wird. Geprüft wird gegen/mit den Regex, der als Parameterwert angegeben ist. + +Beispiel +'''''''' + +.. code:: yaml + + TestNum: + type: num + cache: True + telegram_message: 'TestNum: [VALUE]' + telegram_value_match_regex: '[0-1][0-9]' # nur Nachrichten senden wenn Zahlen von 0 - 19 + TestBool: + type: bool + cache: True + telegram_message: "TestBool: [VALUE]" + telegram_value_match_regex: 1 # nur Nachricht senden wenn 1 (True) + + +telegram_message_chat_id +------------------------ +Ist zusätzlich zum Attribut ``telegram_message`` auch das Attribut ``telegram_message_chat_id`` gesetzt, wird die Nachricht nur an die dort angegebene Chat-ID (hier 3234123342) gesendet. +Ist das Attribut nicht gesetzt, erfolgt der Versand der Nachricht an alle Chat-IDs, die dem Plugin bekannt sind. + +Einfaches Beispiel +'''''''''''''''''' + +.. code:: yaml + + Tuerklingel: + type: bool + knx_dpt: 1 + enforce_updates: True + telegram_message: 'Es klingelt an der Tür' + telegram_message_chat_id: 3234123342 + telegram_value_match_regex: (true|True|1) + + +telegram_info +------------- + +Für alle Items mit diesem Attribut wird eine Liste mit Kommandos für den Bot erstellt. Der Listeneintrag entspricht dabei dem Attributwert. +Wird das Kommando ``/info`` an den Bot gesendet, so erstellt der Bot ein Tastaturmenü, dass jedes Attribut mindestens einmal als Kommando enthält. +Bei Auswahl eines dieser Kommandos im Telegram Client wird dann für jedes Item, dass das Attribut ``telegram_info`` und als Attributwert den Kommandonamen enthält +der Wert des Items ausgegeben. + +Beispiel +'''''''' + +.. code:: yaml + + Aussentemperatur: + name: Aussentemperatur in °C + type: num + knx_dpt: 9 + telegram_info: wetter + + Wind_kmh: + name: Windgeschwindigkeit in kmh + type: num + knx_dpt: 9 + telegram_info: wetter + + Raumtemperatur: + name: Raumtemperatur Wohnzimmer in °C + type: num + knx_dpt: 9 + telegram_info: rtr_ist + +Das Kommando ``/info`` veranlasst den Bot zu antworten mit + +.. code:: + + [/wetter] [/rtr_ist] + +Wählt man am Telegram Client daraufhin ``[/wetter]`` aus, so werden + +.. code:: + + Aussentemperatur = -10,6 + Wind_kmh = 12.6 + +ausgegeben. Bei der Auswahl des Kommandos ``[/rtr_ist]`` antwortet der Bot mit + +.. code:: + + Raumtemperatur = 22.6 + + +telegram_text +------------- + +Items mit dem Attribut ``telegram_text`` und dem Attributwert ``True`` bekommen eine Mitteilung, die von einem Telegram Client an den Bot gesendet wird, als Wert zugewiesen. + +Beispiel +'''''''' + +.. code:: yaml + + telegram_message: + name: Textnachricht von Telegram + type: str + telegram_text: true + +Nach der Eingabe von ``Hello world!`` am Telegram wird das Item ``telegram_message`` +auf ``: Chat-ID: Hello world!`` gesetzt. +Ein John Doe ergäbe also ``John Doe: xxxxxxx: Hello world!`` + +Mit einer Logik kann basierend darauf ein Menu und entsprechende Abfragen an shNG gestellt werden. +Siehe dazu ein Beispiel weiter unten. + +telegram_control +------------- + +Für alle Items mit diesem Attribut wird eine Liste mit Kommandos für den Bot erstellt. Der Listeneintrag muss mit ``name`` spezifiziert werden. +Wird das Kommando ``/control`` an den Bot gesendet, so erstellt der Bot ein Tastaturmenü, dass jedes Attribut als Kommando enthält. +Dabei werden auch alle aktuellen Werte der Items ausgegeben. +Bei Auswahl eines dieser Kommandos im Telegram Client kann dann ein Item vom Type bool geschalten werden (on/off) oder beim Type 'num' kein eine Zahl zum SH-Item gesendet werden. + +``name`` Item wird mit diesem Namen im Bot als Kommando dargestellt +``type`` Möglichkeiten: on, off, onoff, toggle, num + on * nur Einschalten ist möglich + off * nur Ausschalten ist möglich + onoff * das Ein- und Ausschalten muss mit einen weiteren Kommando vom Tastaturmenu ausgewählt werden + [On] [Off] (nach einem Timeout ohne Antwort wird der Befehl abgebrochen) + toggle * der Wert des Items wird umgeschltet (0 zu 1; 1 zu 0) + num * es kann eine Zahl an SH gesendet werden und das entsprechende Item wird damit geschrieben. (nach einem Timeout ohne Antwort wird der Befehl abgebrochen) +``question``Sicherheitsabfrage vor dem Schalten des Items (verwendbar bei type:on/off/toggle - nach einem Timeout ohne Antwort wird der Befehl abgebrochen) + [Yes] [No] +``min`` Minimalwert (verwendbar bei type:num) +``max`` Maximalwert (verwendbar bei type:num) +``timeout`` Zeit nach welcher der Befehl mit Antwort(onoff/question/num) abgebrochen wird (default 20Sekunden) + + +Beispiel +'''''''' + +.. code:: yaml + + BeregnungZone1: + type: bool + cache: True + telegram_control: "name:BeregnungZ1, type:onoff" + BeregnungZone2: + type: bool + cache: True + telegram_control: "name:BeregnungZ2, type:toggle, question:Ventil wirklich umschalten?" + Gartentor: + type: bool + cache: True + telegram_control: "name:Gartentor, type:on, question:Gartentor wirklich öffnen?" + Dachfenster: + type: num + cache: True + telegram_control: "name:Dachfenster, type:num, min:0, max:100, timeout:30" + Kamera: + type: bool + cache: True + telegram_control: "name:Kamera, type:toggle" + eval: sh.plugins.return_plugin("telegram").photo_broadcast("http://192.168.0.78/snapshot/view0.jpg", datetime.datetime.now().strftime("%H:%M %d.%m.%Y")) + + +Das Kommando ``/control`` veranlasst den Bot zu antworten mit + +.. code:: + + [/BeregnungZ1] [/BeregnungZ2] [/Gartentor] + [/Dachfenster] [/Kamera] + + + +Funktionen +========== + +Das Plugin stellt derzeit zwei Funktionen zur Nutzung in Logiken bereit: + + +msg_broadcast +------------- + +Argumente beim Funktionsaufruf: + +**msg**: Die Nachricht, die verschickt werden soll + +**chat_id**: + - Eine Chat-ID oder eine Liste von Chat-IDs. + - Wird keine ID oder None angegeben, so wird an alle autorisierten Chats gesendet + +photo_broadcast +--------------- + +Argumente beim Funktionsaufruf: + +**path_or_URL**: + - entweder ein lokaler Pfad, der auf eine Bilddatei zeigt log_directory oder + - eine URL mit einem Link. Wenn der Link lokal ist, + +**caption**: + - Titel der Bilddatei, kann auch Dateiname sein oder Datum + - Vorgabewert: None + +**chat_id**: + - eine Chat Id oder eine Liste von Chat ids. Wird keine ID oder None angegeben, + so wird an alle autorisierten Chats gesendet + - Vorgabewert: None + +**local_prepare** + - Ist für das zu sendende Bild eine URL angegeben, ruft das Plugin die + Daten von der URL lokal ab und sendet die Daten dann an den Telegram Server. + Beispiel dafür ist eine URL einer lokalen Webcam. + Soll stattdessen eine im Internet frei zugängliche URL abgerufen werden, + so wird dieses Argument auf False gesetzt und es wird nur die URL + an Telegram geschickt und der lokale Rechner von den Daten entlastet. + Aktuell kann das Plugin nicht mit Benutzername und Passwort geschützten + URL umgehen. + - Vorgabewert: True + +Beispiele +--------- + +Die folgende Beispiellogik zeigt einige Nutzungsmöglichkeiten für die Funktionen: + +.. code:: python + + # Eine Nachricht `Hello world!` wird an alle vertrauten Chat Ids gesendet + msg = "Hello world!" + sh.telegram.msg_broadcast(msg) + + # Ein Bild von einem externen Server soll gesendet werden. + # Nur die URL wird an Telegram gesendet und keine Daten lokal aufbereitet + sh.telegram.photo_broadcast("https://cdn.pixabay.com/photo/2018/10/09/16/20/dog-3735336_960_720.jpg", "A dog", None, False) + + # Bild auf lokalem Server mit aktueller Zeit an Telegram senden + my_webcam_url = "http:// .... bitte lokale URL hier einfügen zum Test ..." + sh.telegram.photo_broadcast(my_webcam_url, "My webcam at {:%Y-%m-%d %H:%M:%S}".format(sh.shtime.now())) + + # Bild senden aber den Inhalt lokal vorbereiten + sh.telegram.photo_broadcast("https://cdn.pixabay.com/photo/2018/10/09/16/20/dog-3735336_960_720.jpg", "The dog again (data locally prepared)") + + local_file = "/usr/local/smarthome/var/ ... bitte eine lokal gespeicherte Datei angeben ..." + sh.telegram.photo_broadcast(local_file, local_file) + + +Anwendungen +=========== + +Menugestützte Interaktion zwischen Telegram und shNG +---------------------------------------------------- + +Diese Anwendung nutzt den Wert, den Telegram in das Item mit dem Attribut ``telegram_text`` schreibt. +Dieser Wert beinhaltet den den User, die Chat-ID und die Message. Basierend auf diesem wird mit einer Logik ein Menu im Chat +dargestellt und die entsprechenden Aktionen ausgeführt. + +.. code:: python + + #!/usr/bin/env python3 + # telegram_message.py + + logger.info(f"Logik '{logic.id()}' ausgelöst durch: {trigger['by']} und {trigger['source']} mit Wert {trigger['value']}") + + telegram_plugin = sh.plugins.return_plugin('telegram') + + def bool2str(wert, typus, logic=logic): + logger.info(f"bool2str wert = {wert}, typus = {typus}") + if type(wert) is bool: + if typus == 1: + result = 'verschlossen' if wert is True else 'offen' + elif typus ==2: + result = 'an' if wert is True else 'aus' + elif typus ==3: + result = 'ja' if wert is True else 'nein' + else: + result = 'typus noch nicht definiert' + else: + result = 'Wert ist nicht vom Type bool' + return result + logic.bool2str = bool2str + + # Telegram Message einlesen und verarbeiten + message = sh.telegram.info.message() + message_user = message[:message.find(":")].lower() + message_chat_id = message[message.find(":")+2:len(message)] + message_text = message_chat_id[message_chat_id.find(":")+2:].lower() + message_chat_id = message_chat_id[:message_chat_id.find(":")] + + ## Menu definieren + if message_chat_id == 'xxxxxxx': + # Menu Ebene1 + custom_keyboard_ubersicht = {'keyboard':[['Rolladen','Tür&Tor'], ['Heizung','Schalten'], ['Wetter','Verkehr','Tanken']] , 'resize_keyboard': True, 'one_time_keyboard': False} + elif message_chat_id == 'yyyyyyy': + # Menu Ebene1 + custom_keyboard_ubersicht = {'keyboard':[['Wetter','Tür&Tor'], ['Heizung','Tanken']] , 'resize_keyboard': True, 'one_time_keyboard': False} + + # Menu Ebene2 + custom_keyboard_wetter = {'keyboard':[['zurück'], ['aktuell', 'historisch']] , 'resize_keyboard': True, 'one_time_keyboard': False} + custom_keyboard_schalten = {'keyboard':[['zurück'], ['LED Nische WZ', 'LED Nische EZ']] , 'resize_keyboard': True, 'one_time_keyboard': False} + custom_keyboard_heizung = {'keyboard':[['zurück'], ['Heizung Status'],['HK_2 Standby', 'HK_2 Normal'], ['EG/OG bewohnt', 'EG/OG unbewohnt'], ['Warmwasser Status'],['Warmwasser AN', 'Warmwasser AUS']] , 'resize_keyboard': True, 'one_time_keyboard': False} + custom_keyboard_verkehr = {'keyboard':[['zurück'], ['Arbeitsweg', 'Heimweg']] , 'resize_keyboard': True, 'one_time_keyboard': False} + custom_keyboard_rolladen = {'keyboard':[['zurück'], ['Rollladen Status'], ['EG Automatik An','OG Automatik An'], ['EG Automatik Aus','OG Automatik Aus']] , 'resize_keyboard': True, 'one_time_keyboard': False} + + ## Menu auswählen und senden + msg = '' + parse_mode = 'HTML' + reply_markup = {} + + if message_text == 'menu' or message_text == "zurück": + msg = 'Bitte auswählen:' + reply_markup = custom_keyboard_ubersicht + elif message_text == 'wetter': + msg = 'Bitte auswählen:' + reply_markup = custom_keyboard_wetter + elif message_text == 'heizung': + msg = 'Bitte auswählen:' + reply_markup = custom_keyboard_heizung + elif message_text == 'schalten': + msg = 'Bitte auswählen:' + reply_markup = custom_keyboard_schalten + elif message_text == 'verkehr': + msg = 'Bitte auswählen:' + reply_markup = custom_keyboard_verkehr + elif message_text == 'rolladen': + msg = 'Bitte auswählen:' + reply_markup = custom_keyboard_rolladen + + ## Messages definieren und senden + # Wetter + if message_text == 'aktuell': + msg = 'Wetter:\naktuelle. Temp.: ' + str(sh.raumtemp.aussen.nord()) + ' °C \ + \ngefühlte Temp.: ' + str(sh.wetter.froggit.wetterstation.feelslikec()) + ' °C \ + \nrel. Luftfeuchte: ' + str(sh.raumtemp.aussen.nord.luftfeuchtigkeit.hum_ist()) + ' % \ + \nRegen letzte h: ' + str(sh.wetter.froggit.wetterstation.hourlyrainmm()) + ' l/m² \ + \nRegen heute: ' + str(sh.wetter.froggit.wetterstation.dailyrainmm()) + ' l/m² \ + \nLuftdruck: ' + str(sh.raumtemp.eg.diele.luftdruck()) + ' hPa \ + \nWind Mittel: {:3.2f}'.format(sh.wetter.froggit.wetterstation.windgustkmh_max10m()) + ' km/h \ + \nWind Spitze: {:3.2f}'.format(sh.wetter.froggit.wetterstation.maxdailygust()) + ' km/h ' + elif message_text == 'historisch': + msg = 'bislang nicht definiert' + + # Warmwasser + elif message_text == 'warmwasser status': + msg = 'Warmwasser:\nSoll_Temp: ' + str(sh.heizung.warmwasser.temperatur_soll()) + ' °C \ + \nIst_Temp: ' + str(sh.heizung.warmwasser.temperatur_ist()) + ' °C \ + \nPumpe: ' + logic.bool2str(sh.heizung.warmwasser.speicherladepumpe_knx(), 2) + elif message_text == 'warmwasser aus': + sh.heizung.warmwasser.temperatur_soll(10) + msg = 'Warmwasser:\nSoll_Temp: ' + str(sh.heizung.warmwasser.temperatur_soll()) + ' °C \ + \nIst_Temp: ' + str(sh.heizung.warmwasser.temperatur_ist()) + ' °C \ + \nPumpe: ' + logic.bool2str(sh.heizung.warmwasser.speicherladepumpe_knx(), 2) + elif message_text == 'warmwasser an': + sh.heizung.warmwasser.temperatur_soll(40) + msg = 'Warmwasser:\nSoll_Temp: ' + str(sh.heizung.warmwasser.temperatur_soll()) + ' °C \ + \nIst_Temp: ' + str(sh.heizung.warmwasser.temperatur_ist()) + ' °C \ + \nPumpe: ' + logic.bool2str(sh.heizung.warmwasser.speicherladepumpe_knx(), 2) + + # Heizung + elif message_text == 'heizung status': + msg = 'HK_2:\nBetriebsart A1: ' + str(sh.heizung.heizkreis_a1m1.betriebsart.betriebsart.betriebsart_str()) +'\ + \nBetriebsart M2: ' + str(sh.heizung.heizkreis_m2.betriebsart.betriebsart.betriebsart_str()) +'\ + \nPumpe A1: ' + logic.bool2str(sh.heizung.heizkreis_a1m1.status.hk_pumpe_knx(), 2) +'\ + \nPumpe M2: ' + logic.bool2str(sh.heizung.heizkreis_m2.status.hk_pumpe_knx(), 2) +'\ + \nEG/OG bewohnt: ' + logic.bool2str(sh.raumtemp.anwesend_eg_og(), 3) +'\ + \nUG bewohnt: ' + logic.bool2str(sh.raumtemp.anwesend_eg_og(), 3) + elif message_text == 'hk_2 standby': + sh.heizung.heizkreis_m2.betriebsart.betriebsart(0) + msg = 'HK_2:\nneue Betriebsart M2: ' + str(sh.heizung.heizkreis_m2.betriebsart.betriebsart.betriebsart_str()) + elif message_text == 'hk_2 normal': + sh.heizung.heizkreis_m2.betriebsart.betriebsart(2) + msg = 'HK_2:\nneue Betriebsart M2: ' + str(sh.heizung.heizkreis_m2.betriebsart.betriebsart.betriebsart_str()) + elif message_text == 'eg/og bewohnt': + sh.raumtemp.anwesend_eg_og(1) + msg = 'HK_2:\nEG/OG bewohnt: ' + logic.bool2str(sh.raumtemp.anwesend_eg_og(), 3) + elif message_text == 'eg/og unbewohnt': + sh.raumtemp.anwesend_eg_og(0) + msg = 'HK_2:\nEG/OG bewohnt: ' + logic.bool2str(sh.raumtemp.anwesend_eg_og(), 3) + + # Schalten + elif message_text == 'led nische wz': + sh.licht.wohnzimmer.vorsatz_nische.onoff(not sh.licht.wohnzimmer.vorsatz_nische.onoff()) + msg = 'Nischenbeleuchtung:\nWohnzimmer: ' + logic.bool2str(sh.licht.wohnzimmer.vorsatz_nische.onoff(), 2) + + elif message_text == 'led nische ez': + sh.licht.wohnzimmer.tv_wand_nische.onoff(not sh.licht.wohnzimmer.tv_wand_nische.onoff()) + msg = 'Nischenbeleuchtung:\nEsszimmer: ' + logic.bool2str(sh.licht.wohnzimmer.tv_wand_nische.onoff(), 2) + + # Verkehr + elif message_text == 'arbeitsweg': + sh.verkehrsinfo.calculate_way_work(1) + time.sleep(0.5) + msg = 'Arbeitsweg:\n ' + str(sh.verkehrsinfo.travel_summary()) + elif message_text == 'heimweg': + sh.verkehrsinfo.calculate_way_home(1) + time.sleep(0.5) + msg = 'Heimweg:\n ' + str(sh.verkehrsinfo.travel_summary()) + + # Tür&Tor + elif message_text == 'tür&tor': + msg = 'Tür&Tor:\nKellertür: ' + logic.bool2str(sh.fenster_tuer_kontakte.kellertuer.verschlossen(), 1) +'\ + \nGaragentür: ' + logic.bool2str(sh.fenster_tuer_kontakte.seitentuer_garage.verschlossen(), 1) +'\ + \nGaragentor links: ' + str(sh.fenster_tuer_kontakte.garagentor_links.text()) +'\ + \nGaragentor rechts: ' + str(sh.fenster_tuer_kontakte.garagentor_rechts.text()) + + # Rolladen + elif message_text == 'rollladen status': + msg = 'Rolladen:\nEG Beschattungsautomatik: ' + logic.bool2str(sh.rollladen.eg.beschattungsautomatik(), 2) +'\ + \nEG Fahrautomatik: ' + logic.bool2str(sh.rollladen.eg.alle.automatik(), 2) +'\ + \nOG Beschattungsautomatik: ' + logic.bool2str(sh.rollladen.og.beschattungsautomatik(), 2) +'\ + \nEG Fahrautomatik: ' + logic.bool2str(sh.rollladen.og.alle.automatik(), 2) + elif message_text == 'eg automatik an': + sh.rollladen.eg.alle.automatik(1) + msg = 'Rolladen:\nEG Fahrautomatik: ' + logic.bool2str(sh.rollladen.eg.alle.automatik(), 2) + elif message_text == 'eg automatik aus': + sh.rollladen.eg.alle.automatik(0) + msg = 'Rolladen:\nEG Fahrautomatik: ' + logic.bool2str(sh.rollladen.eg.alle.automatik(), 2) + elif message_text == 'og automatik an': + sh.rollladen.og.alle.automatik(1) + msg = 'Rolladen:\nOG Fahrautomatik: ' + logic.bool2str(sh.rollladen.og.alle.automatik(), 2) + elif message_text == 'og automatik aus': + sh.rollladen.og.alle.automatik(0) + msg = 'Rolladen:\nOG Fahrautomatik: ' + logic.bool2str(sh.rollladen.og.alle.automatik(), 2) + + # Message senden + if msg != '': + telegram_plugin.msg_broadcast(msg, message_chat_id, reply_markup, parse_mode) \ No newline at end of file diff --git a/telegram/_pv_1_7_1/webif/__init__.py b/telegram/_pv_1_7_1/webif/__init__.py new file mode 100644 index 000000000..655758ab3 --- /dev/null +++ b/telegram/_pv_1_7_1/webif/__init__.py @@ -0,0 +1,105 @@ +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +######################################################################### +# Copyright 2021- Michael Wenzel wenzel_michael@web.de +######################################################################### +# This file is part of SmartHomeNG. +# https://www.smarthomeNG.de +# https://knx-user-forum.de/forum/supportforen/smarthome-py +# +# Sample plugin for new plugins to run with SmartHomeNG version 1.5 and +# upwards. +# +# SmartHomeNG is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SmartHomeNG is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SmartHomeNG. If not, see . +# +######################################################################### + +import datetime +import time +import os + +from lib.item import Items +from lib.model.smartplugin import SmartPluginWebIf + + +# ------------------------------------------ +# Webinterface of the plugin +# ------------------------------------------ + +import cherrypy +import csv +from jinja2 import Environment, FileSystemLoader + + +class WebInterface(SmartPluginWebIf): + + def __init__(self, webif_dir, plugin): + """ + Initialization of instance of class WebInterface + + :param webif_dir: directory where the webinterface of the plugin resides + :param plugin: instance of the plugin + :type webif_dir: str + :type plugin: object + """ + self.logger = plugin.logger + self.webif_dir = webif_dir + self.plugin = plugin + self.items = Items.get_instance() + + self.tplenv = self.init_template_environment() + + + @cherrypy.expose + def index(self, reload=None): + """ + Build index.html for cherrypy + + Render the template and return the html file to be delivered to the browser + + :return: contents of the template after beeing rendered + """ + tmpl = self.tplenv.get_template('index.html') + pagelength = self.plugin.get_parameter_value('webif_pagelength') + # add values to be passed to the Jinja2 template eg: tmpl.render(p=self.plugin, interface=interface, ...) + return tmpl.render(p=self.plugin, + webif_pagelength=pagelength, + items=sorted(self.items.return_items(), key=lambda k: str.lower(k['_path'])), + item_count=0) + + + @cherrypy.expose + def get_data_html(self, dataSet=None): + """ + Return data to update the webpage + + For the standard update mechanism of the web interface, the dataSet to return the data for is None + + :param dataSet: Dataset for which the data should be returned (standard: None) + :return: dict with the data needed to update the web page. + """ + if dataSet is None: + # get the new data + data = {} + + # data['item'] = {} + # for i in self.plugin.items: + # data['item'][i]['value'] = self.plugin.getitemvalue(i) + # + # return it as json the the web page + # try: + # return json.dumps(data) + # except Exception as e: + # self.logger.error("get_data_html exception: {}".format(e)) + return {} diff --git a/telegram/_pv_1_7_1/webif/static/img/plugin_logo.svg b/telegram/_pv_1_7_1/webif/static/img/plugin_logo.svg new file mode 100644 index 000000000..8e4e98494 --- /dev/null +++ b/telegram/_pv_1_7_1/webif/static/img/plugin_logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/telegram/_pv_1_7_1/webif/static/img/readme.txt b/telegram/_pv_1_7_1/webif/static/img/readme.txt new file mode 100644 index 000000000..1a7c55eef --- /dev/null +++ b/telegram/_pv_1_7_1/webif/static/img/readme.txt @@ -0,0 +1,6 @@ +This directory is for storing images that are used by the web interface. + +If you want to have your own logo on the top of the web interface, store it here and name it plugin_logo.. + +Extension can be png, svg or jpg + diff --git a/telegram/_pv_1_7_1/webif/templates/index.html b/telegram/_pv_1_7_1/webif/templates/index.html new file mode 100644 index 000000000..d80a32c57 --- /dev/null +++ b/telegram/_pv_1_7_1/webif/templates/index.html @@ -0,0 +1,205 @@ +{% extends "base_plugin.html" %} +{% block pluginscripts %} + +{% endblock pluginscripts %} +{% set logo_frame = false %} +{% set item_count = items|length %} + +{% block headtable %} +
    From 712131891b6f2bfb0d183382d1b226650c89f204 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Mon, 1 May 2023 10:36:59 +0200 Subject: [PATCH 084/775] AppleTV Plugin: Fix module imports for web interface --- appletv/__init__.py | 4 +--- appletv/webif/__init__.py | 3 +++ 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/appletv/__init__.py b/appletv/__init__.py index 19e7801c4..abad567d6 100755 --- a/appletv/__init__.py +++ b/appletv/__init__.py @@ -34,9 +34,7 @@ import os import threading import base64 -import json -from random import randint -from time import sleep + import pyatv from pyatv.const import Protocol diff --git a/appletv/webif/__init__.py b/appletv/webif/__init__.py index dd44b0433..3210aa8cb 100755 --- a/appletv/webif/__init__.py +++ b/appletv/webif/__init__.py @@ -23,6 +23,9 @@ ######################################################################### import json +import pyatv +from random import randint +from time import sleep from lib.item import Items from lib.model.smartplugin import SmartPluginWebIf From 95d210abcf5f44df3445b99aa58f0a2f20e83070 Mon Sep 17 00:00:00 2001 From: msinn Date: Tue, 2 May 2023 11:22:13 +0200 Subject: [PATCH 085/775] smartvisu: Changed default value for generate_pages to False --- smartvisu/plugin.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/smartvisu/plugin.yaml b/smartvisu/plugin.yaml index c233ea076..ffb8f7660 100755 --- a/smartvisu/plugin.yaml +++ b/smartvisu/plugin.yaml @@ -32,7 +32,7 @@ parameters: generate_pages: type: bool - default: True + default: False description: de: 'Falls True, werden Seiten für smartVISU generiert' en: 'If True, pages for smartVISU are generated' From 5b752f71d2b3dac69f556d5767062cb836521c81 Mon Sep 17 00:00:00 2001 From: msinn Date: Tue, 2 May 2023 11:24:04 +0200 Subject: [PATCH 086/775] executor: extending base_plugin.html instead of extending base.html in webinterface --- executor/webif/templates/index.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/executor/webif/templates/index.html b/executor/webif/templates/index.html index 20bc2ca95..d34350a03 100755 --- a/executor/webif/templates/index.html +++ b/executor/webif/templates/index.html @@ -1,4 +1,4 @@ -{% extends "base.html" %} +{% extends "base_plugin.html" %} {% set logo_frame = false %} From c2b0c5f5a2765915317f7b91d327d7a48046cc90 Mon Sep 17 00:00:00 2001 From: msinn Date: Tue, 2 May 2023 11:24:39 +0200 Subject: [PATCH 087/775] webservice: Corrected typo in user_doc.rst --- webservices/user_doc.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webservices/user_doc.rst b/webservices/user_doc.rst index ae6cfb1ad..5bc884e2c 100755 --- a/webservices/user_doc.rst +++ b/webservices/user_doc.rst @@ -3,7 +3,7 @@ .. index:: Webservices .. index:: REST Interface -webservicew +webservices =========== Das Webservices Plugin stellt ein REST basiertes API für SmartHomeNG bereit. From 0955f4b5cc922c73b415b91829e708c3024768f1 Mon Sep 17 00:00:00 2001 From: msinn Date: Tue, 2 May 2023 11:37:13 +0200 Subject: [PATCH 088/775] piratewthr: Improved log messages in case of piratewther server errors; Added windDirectionString to struct; Minimum core version is now 1.9.5.2; Removed 'database_maxage: 731' from struct items to use default of database plugin; bumped version to 1.2.0 --- piratewthr/__init__.py | 177 +++++++++++++++++++++++++++-------------- piratewthr/plugin.yaml | 56 ++++++++----- 2 files changed, 153 insertions(+), 80 deletions(-) diff --git a/piratewthr/__init__.py b/piratewthr/__init__.py index 62c6e6b7e..cd021c05d 100755 --- a/piratewthr/__init__.py +++ b/piratewthr/__init__.py @@ -37,12 +37,41 @@ class PirateWeather(SmartPlugin): - PLUGIN_VERSION = "1.1.2" + PLUGIN_VERSION = "1.2.0" # https://api.pirateweather.net/forecast/[apikey]/[latitude],[longitude] _base_url = 'https://api.pirateweather.net/forecast/' _base_forecast_url = _base_url + '%s/%s,%s' + _http_response = { + 500: 'Internal Server Error', + 501: 'Not Implemented', + 502: 'Bad Gateway', + 503: 'Service Unavailable', + 504: 'Internal Server Error', + } + + + def get_http_response(self, code): + + description = self._http_response.get(code, None) + if description is None: + if code >= 100 and code < 200: + description = 'Informational' + elif code < 300: + description = 'Success' + elif code < 400: + description = 'Redirection' + elif code < 500: + description = 'Client Error' + elif code < 600: + description = 'Server Error' + else: + description = 'Unknown Response' + description += ' (' + str(code) + ')' + return description + + def __init__(self, sh, *args, **kwargs): """ Initalizes the plugin. @@ -170,11 +199,13 @@ def _update(self): return + def get_forecast(self): """ Requests the forecast information at pirateweather.net """ self.logger.info(f"get_forecast: url={self._build_url()}") + json_obj = None try: response = self._session.get(self._build_url()) except Exception as e: @@ -186,19 +217,28 @@ def get_forecast(self): self.logger.warning(f"get_forecast: Response '{response}' is no valid json format: {e}") return self.logger.info(f"get_forecast: json response={json_obj}") + + if response.status_code >= 500: + self.logger.warning(f"api.pirateweather.net: {self.get_http_response(response.status_code)} - Ignoring response.") + return + daily_data = OrderedDict() if not json_obj.get('daily', False): - self.logger.warning(f"get_forecast: Response '{response}' has no info for daily values. Ignoring response.") - return + self.logger.warning(f"api.pirateweather.net: {self.get_http_response(response.status_code)} - No info for daily values.") + #return + else: + # add icon_visu to daily + json_obj['daily'].update({'icon_visu': self.map_icon(json_obj['daily']['icon'])}) + if not json_obj.get('hourly', False): - self.logger.warning(f"get_forecast: Response '{response}' has no info for hourly values. Ignoring response.") - return + self.logger.warning(f"api.pirateweather.net: {self.get_http_response(response.status_code)} - No info for hourly values.") + #return + else: + # add icon_visu to hourly + json_obj['hourly'].update({'icon_visu': self.map_icon(json_obj['hourly']['icon'])}) - # add icon_visu, date and day to daily and currently - json_obj['daily'].update({'icon_visu': self.map_icon(json_obj['daily']['icon'])}) - json_obj['hourly'].update({'icon_visu': self.map_icon(json_obj['hourly']['icon'])}) if not json_obj.get('currently'): - self.logger.warning(f"get_forecast: Response {response} has no info for current values. Skipping update for currently values.") + self.logger.warning(f"api.pirateweather.net: {self.get_http_response(response.status_code)} - No info for current values. Skipping update for 'currently' values.") else: date_entry = datetime.datetime.fromtimestamp(json_obj['currently']['time']).strftime('%d.%m.%Y') day_entry = datetime.datetime.fromtimestamp(json_obj['currently']['time']).strftime('%A') @@ -207,56 +247,60 @@ def get_forecast(self): 'hour': hour_entry, 'icon_visu': self.map_icon(json_obj['currently']['icon'])}) - # add icon_visu, date and day to each day - for day in json_obj['daily'].get('data'): - date_entry = datetime.datetime.fromtimestamp(day['time']).strftime('%d.%m.%Y') - day_entry = datetime.datetime.fromtimestamp(day['time']).strftime('%A') - day.update({'date': date_entry, 'weekday': day_entry, 'icon_visu': self.map_icon(day['icon'])}) - daily_data.update({datetime.datetime.fromtimestamp(day['time']).date(): day}) - json_obj['daily'].update(daily_data) - json_obj['daily'].pop('data') - - # add icon_visu, date and day to each hour. Add the hours to the corresponding day as well as map to hour0, hour1, etc. - for number, hour in enumerate(json_obj['hourly'].get('data')): - date_entry = datetime.datetime.fromtimestamp(hour['time']).strftime('%d.%m.%Y') - day_entry = datetime.datetime.fromtimestamp(hour['time']).strftime('%A') - hour_entry = datetime.datetime.fromtimestamp(hour['time']).hour - date_key = datetime.datetime.fromtimestamp(hour['time']).date() - hour.update({'date': date_entry, 'weekday': day_entry, 'hour': hour_entry, 'icon_visu': self.map_icon(hour['icon'])}) - if json_obj['daily'].get(date_key) is None: - json_obj['daily'].update({date_key: {}}) - if json_obj['daily'][date_key].get('hours') is None: - json_obj['daily'][date_key].update({'hours': {}}) - json_obj['daily'][date_key]['hours'].update(OrderedDict({hour_entry: hour})) - json_obj['hourly'].update(OrderedDict({'hour{}'.format(number): hour})) - if json_obj['daily'][date_key].get('precipProbability_mean') is None: - json_obj['daily'][date_key].update({'precipProbability_mean': []}) - if json_obj['daily'][date_key].get('precipIntensity_mean') is None: - json_obj['daily'][date_key].update({'precipIntensity_mean': []}) - if json_obj['daily'][date_key].get('temperature_mean') is None: - json_obj['daily'][date_key].update({'temperature_mean': []}) - json_obj['daily'][date_key]['precipProbability_mean'].append(hour.get('precipProbability')) - json_obj['daily'][date_key]['precipIntensity_mean'].append(hour.get('precipIntensity')) - json_obj['daily'][date_key]['temperature_mean'].append(hour.get('temperature')) - json_obj['hourly'].pop('data') - - # add mean values to each day and replace datetime object by day0, day1, day2, etc. - i = 0 - # for entry in json_obj['daily']: - json_keys = list(json_obj['daily'].keys()) - for entry in json_keys: - if isinstance(entry, datetime.date): - try: - precip_probability = json_obj['daily'][entry]['precipProbability_mean'] - json_obj['daily'][entry]['precipProbability_mean'] = round(sum(precip_probability)/len(precip_probability), 2) - precip_intensity = json_obj['daily'][entry]['precipIntensity_mean'] - json_obj['daily'][entry]['precipIntensity_mean'] = round(sum(precip_intensity)/len(precip_intensity), 2) - temperature = json_obj['daily'][entry]['temperature_mean'] - json_obj['daily'][entry]['temperature_mean'] = round(sum(temperature)/len(temperature), 2) - except Exception: - pass - json_obj['daily']['day{}'.format(i)] = json_obj['daily'].pop(entry) - i += 1 + if json_obj.get('daily', False): + # add icon_visu, date and day to each day + for day in json_obj['daily'].get('data'): + date_entry = datetime.datetime.fromtimestamp(day['time']).strftime('%d.%m.%Y') + day_entry = datetime.datetime.fromtimestamp(day['time']).strftime('%A') + day.update({'date': date_entry, 'weekday': day_entry, 'icon_visu': self.map_icon(day['icon'])}) + daily_data.update({datetime.datetime.fromtimestamp(day['time']).date(): day}) + json_obj['daily'].update(daily_data) + json_obj['daily'].pop('data') + + if json_obj.get('hourly', False): + # add icon_visu, date and day to each hour. Add the hours to the corresponding day as well as map to hour0, hour1, etc. + for number, hour in enumerate(json_obj['hourly'].get('data')): + date_entry = datetime.datetime.fromtimestamp(hour['time']).strftime('%d.%m.%Y') + day_entry = datetime.datetime.fromtimestamp(hour['time']).strftime('%A') + hour_entry = datetime.datetime.fromtimestamp(hour['time']).hour + date_key = datetime.datetime.fromtimestamp(hour['time']).date() + hour.update({'date': date_entry, 'weekday': day_entry, 'hour': hour_entry, 'icon_visu': self.map_icon(hour['icon'])}) + if json_obj['daily'].get(date_key) is None: + json_obj['daily'].update({date_key: {}}) + if json_obj['daily'][date_key].get('hours') is None: + json_obj['daily'][date_key].update({'hours': {}}) + json_obj['daily'][date_key]['hours'].update(OrderedDict({hour_entry: hour})) + json_obj['hourly'].update(OrderedDict({'hour{}'.format(number): hour})) + if json_obj['daily'][date_key].get('precipProbability_mean') is None: + json_obj['daily'][date_key].update({'precipProbability_mean': []}) + if json_obj['daily'][date_key].get('precipIntensity_mean') is None: + json_obj['daily'][date_key].update({'precipIntensity_mean': []}) + if json_obj['daily'][date_key].get('temperature_mean') is None: + json_obj['daily'][date_key].update({'temperature_mean': []}) + json_obj['daily'][date_key]['precipProbability_mean'].append(hour.get('precipProbability')) + json_obj['daily'][date_key]['precipIntensity_mean'].append(hour.get('precipIntensity')) + json_obj['daily'][date_key]['temperature_mean'].append(hour.get('temperature')) + json_obj['hourly'].pop('data') + + if json_obj.get('daily', False): + # add mean values to each day and replace datetime object by day0, day1, day2, etc. + i = 0 + # for entry in json_obj['daily']: + json_keys = list(json_obj['daily'].keys()) + for entry in json_keys: + if isinstance(entry, datetime.date): + try: + precip_probability = json_obj['daily'][entry]['precipProbability_mean'] + json_obj['daily'][entry]['precipProbability_mean'] = round(sum(precip_probability)/len(precip_probability), 2) + precip_intensity = json_obj['daily'][entry]['precipIntensity_mean'] + json_obj['daily'][entry]['precipIntensity_mean'] = round(sum(precip_intensity)/len(precip_intensity), 2) + temperature = json_obj['daily'][entry]['temperature_mean'] + json_obj['daily'][entry]['temperature_mean'] = round(sum(temperature)/len(temperature), 2) + except Exception: + pass + json_obj['daily']['day{}'.format(i)] = json_obj['daily'].pop(entry) + i += 1 + return json_obj def map_icon(self, icon): @@ -330,3 +374,18 @@ def _build_url(self, url_type='forecast'): self.logger.error('_build_url: Wrong url type specified: %s' %url_type) return url + + def get_wind_direction8(self, deg): + + direction_array = ['N', 'NO', 'O', 'SO', 'S', 'SW', 'W', 'NW', 'N'] + + index = int( (deg % 360 + 22.5) / 45) + return direction_array[index] + + + def get_wind_direction16(self, deg): + + direction_array = ['N', 'NNO', 'NO', 'ONO', 'O', 'OSO', 'SO', 'SSO', 'S', 'SSW', 'SW', 'WSW', 'W', 'WNW', 'NW', 'NNW', 'N'] + + index = int( (deg % 360 + 11.25) / 22.5) + return direction_array[index] diff --git a/piratewthr/plugin.yaml b/piratewthr/plugin.yaml index c020cb6d2..948384a3e 100755 --- a/piratewthr/plugin.yaml +++ b/piratewthr/plugin.yaml @@ -11,8 +11,8 @@ plugin: keywords: weather sun wind rain precipitation #documentation: '' support: 'https://knx-user-forum.de/forum/supportforen/smarthome-py/1852685' - version: 1.1.2 # Plugin version - sh_minversion: 1.9.3.4 # minimum shNG version to use this plugin + version: 1.2.0 # Plugin version + sh_minversion: 1.9.5.2 # minimum shNG version to use this plugin #sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) multi_instance: True # plugin supports multi instance restartable: True @@ -57,10 +57,10 @@ parameters: cycle: type: int mandatory: False - default: 300 + default: 900 description: - de: '(optional) Zeit zwischen zwei Updateläufen. Default ist 300 Sekunden.' - en: '(optional) Time period between two update cycles. Default is 300 seconds.' + de: '(optional) Zeit zwischen zwei Updateläufen. Default ist 900 Sekunden.' + en: '(optional) Time period between two update cycles. Default is 900 seconds.' @@ -343,7 +343,6 @@ item_structs: type: num pw_matchstring@instance: currently/precipIntensity database: init - database_maxage: 731 # 2 Jahre precipIntensityError: type: num @@ -353,7 +352,6 @@ item_structs: type: num pw_matchstring@instance: currently/precipProbability database: init - database_maxage: 731 # 2 Jahre precipType: type: str @@ -363,85 +361,77 @@ item_structs: type: num pw_matchstring@instance: currently/temperature database: init - database_maxage: 731 # 2 Jahre apparenttemperature: type: num pw_matchstring@instance: currently/apparentTemperature database: init - database_maxage: 731 # 2 Jahre dewpoint: type: num pw_matchstring@instance: currently/dewPoint database: init - database_maxage: 731 # 2 Jahre humidity: type: num pw_matchstring@instance: currently/humidity database: init - database_maxage: 731 # 2 Jahre pressure: type: num pw_matchstring@instance: currently/pressure database: init - database_maxage: 731 # 2 Jahre windSpeed: type: num pw_matchstring@instance: currently/windSpeed database: init - database_maxage: 731 # 2 Jahre windGust: type: num pw_matchstring@instance: currently/windGust database: init - database_maxage: 731 # 2 Jahre windBearing: type: num pw_matchstring@instance: currently/windBearing database: init - database_maxage: 731 # 2 Jahre + + windDirectionString: + type: str + eval_trigger: ..windBearing + #eval: "'N' if sh...windBearing() < 22.5 else 'NO' if sh...windBearing() < 67.5 else 'O' if sh...windBearing() < 112.5 else 'SO' if sh...windBearing() < 157.5 else 'S' if sh...windBearing() < 202.5 else 'SW' if sh...windBearing() < 247.5 else 'W' if sh...windBearing() < 292.5 else 'NW' if sh...windBearing() < 337.5 else 'N'" + eval: sh.plugins.get('piratewthr').get_wind_direction16(sh...windBearing()) cloudCover: type: num pw_matchstring@instance: currently/cloudCover database: init - database_maxage: 731 # 2 Jahre uvIndex: type: num pw_matchstring@instance: currently/uvIndex database: init - database_maxage: 731 # 2 Jahre visibility: type: num pw_matchstring@instance: currently/visibility database: init - database_maxage: 731 # 2 Jahre ozone: type: num pw_matchstring@instance: currently/ozone database: init - database_maxage: 731 # 2 Jahre date: type: str pw_matchstring@instance: currently/date database: init - database_maxage: 731 # 2 Jahre day: type: num pw_matchstring@instance: currently/day database: init - database_maxage: 731 # 2 Jahre forecast_hourly: name: Hourly forcast of Weather report from pirateweather.net - Data is written do database @@ -6601,3 +6591,27 @@ plugin_functions: description: de: "Icon als String." en: "Icon as string." + + get_wind_direction8: + type: str + description: + de: "Bestimmung der Windrichtung als String (NO, ...) aus der Gradzahl (8 Richtungen)" + en: "Getting the wind direction as strint (NE, ...) from degrees (8 directions)" + parameters: + degrees: + type: num + description: + de: "Windrichtung in Grad" + en: "Wind direction in degrees" + + get_wind_direction16: + type: str + description: + de: "Bestimmung der Windrichtung als String (NO, ...) aus der Gradzahl (16 Richtungen)" + en: "Getting the wind direction as strint (NE, ...) from degrees (16 directions)" + parameters: + degrees: + type: num + description: + de: "Windrichtung in Grad" + en: "Wind direction in degrees" From 2242c8f118794c50083505b3130b34725c0c0740 Mon Sep 17 00:00:00 2001 From: msinn Date: Tue, 2 May 2023 12:46:39 +0200 Subject: [PATCH 089/775] piratewthr: locals.location to struct --- piratewthr/__init__.py | 29 +++++++++++++++++++++++++++++ piratewthr/plugin.yaml | 28 ++++++++++++++++++---------- 2 files changed, 47 insertions(+), 10 deletions(-) diff --git a/piratewthr/__init__.py b/piratewthr/__init__.py index cd021c05d..9d5ed3e63 100755 --- a/piratewthr/__init__.py +++ b/piratewthr/__init__.py @@ -389,3 +389,32 @@ def get_wind_direction16(self, deg): index = int( (deg % 360 + 11.25) / 22.5) return direction_array[index] + + + def get_location_name(self, lat, lon): + if lat == 0 or lon == 0: + self.logger.debug(f"lat or lon are zero, not sending request: {lat=}, {lon=}") + return + + # api documentation: https://nominatim.org/release-docs/develop/api/Reverse/ + request_str = f"https://nominatim.openstreetmap.org/reverse?lat={lat}&lon={lon}&format=jsonv2" + + try: + response = requests.get(request_str) + except Exception as e: + self.logger.warning(f"get_location_name: Exception when sending GET request: {e}") + return + + try: + json_obj = response.json() + except Exception as e: + self.logger.warning(f"get_location_name: Response '{response}' is no valid json format: {e}") + return '' + + if response.status_code >= 500: + self.logger.warning(f"get_location_name: {self.get_location_name(response.status_code)}") + return '' + + #self.logger.notice(f"{json_obj['display_name']}") + #self.logger.notice(f"{json_obj['address']}") + return json_obj['address']['suburb'] diff --git a/piratewthr/plugin.yaml b/piratewthr/plugin.yaml index 948384a3e..e2eadd5a0 100755 --- a/piratewthr/plugin.yaml +++ b/piratewthr/plugin.yaml @@ -77,17 +77,26 @@ item_structs: weather: name: Complete weather report from pirateweather.net - Current weather and forecasts are written to database - latitude: - type: num - pw_matchstring@instance: latitude + locals: - longitude: - type: num - pw_matchstring@instance: longitude + lat: + type: num + pw_matchstring@instance: latitude + + lon: + type: num + pw_matchstring@instance: longitude + + timezone: + type: str + pw_matchstring@instance: timezone + + location: + type: str + eval_trigger: + - ..lon + eval: sh.plugins.get('piratewthr').get_location_name(sh...lat(), sh...lon()) - timezone: - type: str - pw_matchstring@instance: timezone struct: - piratewthr.current_weather @@ -400,7 +409,6 @@ item_structs: windDirectionString: type: str eval_trigger: ..windBearing - #eval: "'N' if sh...windBearing() < 22.5 else 'NO' if sh...windBearing() < 67.5 else 'O' if sh...windBearing() < 112.5 else 'SO' if sh...windBearing() < 157.5 else 'S' if sh...windBearing() < 202.5 else 'SW' if sh...windBearing() < 247.5 else 'W' if sh...windBearing() < 292.5 else 'NW' if sh...windBearing() < 337.5 else 'N'" eval: sh.plugins.get('piratewthr').get_wind_direction16(sh...windBearing()) cloudCover: From 66913ea6152c07466c439699d6284cb5621786ce Mon Sep 17 00:00:00 2001 From: msinn Date: Tue, 2 May 2023 13:04:18 +0200 Subject: [PATCH 090/775] piratewthr: locals.location to struct --- piratewthr/__init__.py | 8 +++++++- piratewthr/plugin.yaml | 20 ++++++++++++++++++++ 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/piratewthr/__init__.py b/piratewthr/__init__.py index 9d5ed3e63..2585b2fe9 100755 --- a/piratewthr/__init__.py +++ b/piratewthr/__init__.py @@ -391,7 +391,13 @@ def get_wind_direction16(self, deg): return direction_array[index] - def get_location_name(self, lat, lon): + def get_location_name(self, lat=None, lon=None): + + if lat is None: + lat = self._lat + if lon is None: + lon = self._lon + if lat == 0 or lon == 0: self.logger.debug(f"lat or lon are zero, not sending request: {lat=}, {lon=}") return diff --git a/piratewthr/plugin.yaml b/piratewthr/plugin.yaml index e2eadd5a0..8747ca846 100755 --- a/piratewthr/plugin.yaml +++ b/piratewthr/plugin.yaml @@ -6623,3 +6623,23 @@ plugin_functions: description: de: "Windrichtung in Grad" en: "Wind direction in degrees" + + get_location_name: + type: str + description: + de: "Bestimmung des Ortsnamens (Vorort) aus latitude und longitude" + en: "Getting location name (suburb) from latitude and longitude" + parameters: + lat: + type: num + default: None* + description: + de: "Latitude, falls nicht angegeben wird die Latitude des piratewthr Plugins benutzt" + en: "latitude, if omitted the configured latitude of the piratewthr plugin is used" + lon: + type: num + default: None* + description: + de: "Longitude, falls nicht angegeben wird die Longitude des piratewthr Plugins benutzt" + en: "longitude, if omitted the configured longitude of the piratewthr plugin is used" + From 4e3b277f412f8c81f6acffcd50e90a4b11f112ae Mon Sep 17 00:00:00 2001 From: msinn Date: Tue, 2 May 2023 13:19:27 +0200 Subject: [PATCH 091/775] piratewthr: Fix in plugin.yaml --- piratewthr/plugin.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/piratewthr/plugin.yaml b/piratewthr/plugin.yaml index 8747ca846..b20fcdc13 100755 --- a/piratewthr/plugin.yaml +++ b/piratewthr/plugin.yaml @@ -6632,13 +6632,13 @@ plugin_functions: parameters: lat: type: num - default: None* + default: None description: de: "Latitude, falls nicht angegeben wird die Latitude des piratewthr Plugins benutzt" en: "latitude, if omitted the configured latitude of the piratewthr plugin is used" lon: type: num - default: None* + default: None description: de: "Longitude, falls nicht angegeben wird die Longitude des piratewthr Plugins benutzt" en: "longitude, if omitted the configured longitude of the piratewthr plugin is used" From 00a4d04c3cae2998681411c4b67ea82aa612d5e3 Mon Sep 17 00:00:00 2001 From: aschwith Date: Fri, 5 May 2023 22:25:32 +0200 Subject: [PATCH 092/775] avm: trying to fix yet another regression in avm RGB support introduced with release v1.9.5 --- avm/__init__.py | 201 +++- avm/item_attributes.py | 2 +- avm/plugin.yaml | 2056 ++++++++++++++++++++-------------------- 3 files changed, 1209 insertions(+), 1050 deletions(-) diff --git a/avm/__init__.py b/avm/__init__.py index 96d2f5fdc..15df62067 100644 --- a/avm/__init__.py +++ b/avm/__init__.py @@ -106,7 +106,7 @@ class AVM(SmartPlugin): """ Main class of the Plugin. Does all plugin specific stuff """ - PLUGIN_VERSION = '2.0.3' + PLUGIN_VERSION = '2.0.4' # ToDo: FritzHome.handle_updated_item: implement 'saturation' # ToDo: FritzHome.handle_updated_item: implement 'unmapped_hue' @@ -1872,6 +1872,8 @@ def handle_updated_item(self, item, avm_data_type: str, readafterwrite: int): 'switch_toggle': (self.set_switch_state_toggle, self.get_switch_state), 'colortemperature': (self.set_color_temp, self.get_color_temp), 'hue': (self.set_color_discrete, self.get_hue), + 'unmapped_saturation':(self.set_unmapped_saturation, self.get_unmapped_saturation), + 'unmapped_hue': (self.set_unmapped_hue, self.get_unmapped_hue) } # get AIN @@ -2487,6 +2489,91 @@ def _get_colordefaults(self, ain): plain = self._aha_request("getcolordefaults", ain=ain) return ElementTree.fromstring(to_str(plain)) + def get_unmapped_hue(self, ain): + """ + get unmapped hue value represented in hsv domain as integer value between [0,359]. + """ + self.logger.warning("Debug: get_unmapped_hue called.") + try: + value = self.get_devices_as_dict()[ain].hue + self.logger.warning(f"Debug: get_unmapped_hue is {value}.") + return value + except AttributeError: + self.logger.warning("Debug: get_unmapped_hue attribute error exception") + pass + except Exception as e: + self.logger.warning(f"get_unmapped_hue: exception: {e}") + + + def set_unmapped_hue(self, ain, hue): + """ + set hue value (0-359) + """ + self.logger.warning(f"Debug: set_unmapped_hue called with hue {hue}") + + if (hue < 0) or hue > 359: + self.logger.error(f"set_unmapped_hue, hue value must be between 0 and 359") + return False + + try: + # saturation already scaled to 0-100: + saturation = self.get_devices_as_dict()[ain].saturation + self.get_devices_as_dict()[ain].hue = hue + except AttributeError: + self.logger.warning(f"set_unmapped_hue exception occurred") + pass + except Exception as e: + self.logger.warning(f"set_unmapped_hue: exception: {e}") + + else: + self.logger.warning(f"Debug: Success: set_unmapped_hue, hue {hue}, saturation is {saturation}") + # saturation variable is scaled to 0-100. Scale to 0-255 for AVM AHA interface + self.set_color(ain, [hue, int(saturation*2.55)], duration=0, mapped=False) + + + def get_unmapped_saturation(self, ain): + """ + get saturation as integer value between 0-100. + """ + self.logger.warning("Debug: get_unmapped_saturation called.") + try: + value = self.get_devices_as_dict()[ain].saturation + self.logger.warning(f"Debug: get_unmapped_saturation is {value} (range 0-100).") + return value + except AttributeError: + self.logger.warning("Debug: get_unmapped_saturation attribute error xception") + pass + except Exception as e: + self.logger.warning(f"get_unmapped_saturation, exception: {e}") + + + def set_unmapped_saturation(self, ain, saturation): + """ + set saturation value + saturation defined in range (0-100) + """ + self.logger.warning(f"Debug: set_unampped_saturation is called with value {saturation} defined in range 0-100") + + if (saturation < 0) or saturation > 100: + self.logger.error(f"set_unmapped_saturation: value must be between 0 and 100") + return False + + try: + hue = self.get_devices_as_dict()[ain].hue + self.get_devices_as_dict()[ain].saturation = saturation + + except AttributeError: + self.logger.warning(f"set_unamapped_saturation attribute error exception occurred") + pass + except Exception as e: + self.logger.warning(f"set_unmapped_saturation, exception: {e}") + + else: + self.logger.warning(f"Debug: success: set_unmapped_saturation: value is {saturation} (0-100), hue {hue}") + # Plugin handles saturation value in the range of 0-100. AVM function expect saturation to be within 0-255. Therefore, scale value: + self.logger.warning(f"Debug: set_unmapped_saturation, after scaling: saturation is {int(saturation*2.55)}, hue {hue}") + self.set_color(ain, [hue, int(saturation*2.55)], duration=0, mapped=False) + def get_colors(self, ain): """ Get colors (HSV-space) supported by this lightbulb. @@ -2511,18 +2598,49 @@ def set_color(self, ain, hsv, duration=0, mapped=True): """ Set hue and saturation. hsv: HUE colorspace element obtained from get_colors() + hsv is an array including hue, saturation and level + hue must be within range 0-359 + saturation must be within range 0-255 duration: Speed of change in seconds, 0 = instant - """ + mapped = True uses the AVM setcolor function. It only + supports pre-defined colors that can be obtained + by the get_colors function. + mapped = False uses the AVM setunmappedcolor function, featured + by AVM firmwareversion since approximately Q2 2022. It + supports every combination if hue/saturation/level + """ + success = False params = { 'hue': int(hsv[0]), 'saturation': int(hsv[1]), "duration": int(duration) * 10 } + + # Range checks: + hue = int(hsv[0]) + if (hue < 0) or hue > 359: + self.logger.error(f"set_color, hue value must be between 0 and 359") + return False + saturation = int(hsv[1]) + if (saturation < 0) or saturation > 255: + self.logger.error(f"set_color, saturation value must be between 0 and 255") + return False + + # special mode for white color (hue=0, saturation=0): + self.logger.warning(f"Debug set_color called with mapped {mapped} and hs(v): {int(hsv[0])}, {int(hsv[1])}") + if (int(hsv[0]) == 0) and (int(hsv[1]) == 0): + self.logger.debug(f"set_color, warm white color selected") + success = self.set_color_temp(ain, temperature=2700, duration=1) + return success + if mapped: - self._aha_request("setcolor", ain=ain, param=params) + success = self._aha_request("setcolor", ain=ain, param=params) else: # undocumented API method for free color selection - self._aha_request("setunmappedcolor", ain=ain, param=params) + success = self._aha_request("setunmappedcolor", ain=ain, param=params) + + self.logger.warning(f"Debug set color in mapped {mapped} mode: success: {success}") + return success def set_color_discrete(self, ain, hue, duration=0): """ @@ -3336,14 +3454,15 @@ class FritzhomeDeviceDimmable(FritzhomeDeviceBase): def _update_from_node(self, node): super()._update_from_node(node) - if not self.connected: + if self.connected is False: self.level = 0 self.levelpercentage = 0 return - if self.has_level(): + if self.has_level: self._update_level_from_node(node) + @property def has_level(self): """Check if the device has dimmer function.""" return self._has_feature(FritzHome.FritzhomeDeviceFeatures.LEVEL) @@ -3355,10 +3474,11 @@ def _update_level_from_node(self, node): self.level = get_node_value_as_int(levelcontrol_element, "level") self.levelpercentage = get_node_value_as_int(levelcontrol_element, "levelpercentage") + # Set Level to zero for consistency, if light is off: state_element = node.find("simpleonoff") if state_element is not None: simpleonoff = get_node_value_as_int_as_bool(state_element, "state") - if not simpleonoff: + if simpleonoff is False: self.level = 0 self.levelpercentage = 0 @@ -3375,15 +3495,18 @@ class FritzhomeDeviceColor(FritzhomeDeviceBase): unmapped_hue = None unmapped_saturation = None colortemperature = None + + logger = logging.getLogger(__name__) def _update_from_node(self, node): super()._update_from_node(node) - if not self.connected: + if self.connected is False: return - if self.has_color(): + if self.has_color: self._update_color_from_node(node) + @property def has_color(self): """Check if the device has LightBulb function.""" return self._has_feature(FritzHome.FritzhomeDeviceFeatures.COLOR) @@ -3391,7 +3514,7 @@ def has_color(self): def _update_color_from_node(self, node): colorcontrol_element = node.find("colorcontrol") - if colorcontrol_element: + if colorcontrol_element is not None: try: self.color_mode = int(colorcontrol_element.attrib.get("current_mode")) @@ -3403,34 +3526,70 @@ def _update_color_from_node(self, node): except ValueError: pass - self.fullcolorsupport = bool(colorcontrol_element.attrib.get("fullcolorsupport")) - self.mapped = bool(colorcontrol_element.attrib.get("mapped")) - self.hue = get_node_value_as_int(colorcontrol_element, "hue") - self.saturation = get_node_value_as_int(colorcontrol_element, "saturation") - self.unmapped_hue = get_node_value_as_int(colorcontrol_element, "unmapped_hue") - self.unmapped_saturation = get_node_value_as_int(colorcontrol_element, "unmapped_saturation") - self.colortemperature = get_node_value_as_int(colorcontrol_element, "temperature") + try: + self.fullcolorsupport = bool(colorcontrol_element.attrib.get("fullcolorsupport")) + except ValueError: + pass + + try: + self.mapped = bool(colorcontrol_element.attrib.get("mapped")) + except ValueError: + pass + + try: + self.hue = get_node_value_as_int(colorcontrol_element, "hue") + self.logger.dbglow(f"received hue value {self.hue}") + except ValueError: + self.hue = 0 + + try: + value = get_node_value_as_int(colorcontrol_element, "saturation") + self.saturation = int(value/2.55) + self.logger.dbglow(f"received unmapped saturation value {value}, scaled to {self.saturation}") + except ValueError: + self.saturation = 0 + + try: + self.unmapped_hue = get_node_value_as_int(colorcontrol_element, "unmapped_hue") + self.logger.dbglow(f"received unmapped hue value {self.unmapped_hue}") + except ValueError: + self.logger.warning(f"exception in unmapped_hue extraction") + self.unmapped_hue = 0 + + try: + value = get_node_value_as_int(colorcontrol_element, "unmapped_saturation") + self.unmapped_saturation = int(value/2.55) + self.logger.dbglow(f"received unmapped saturation value {value}, scaled to {self.unmapped_saturation}") + except ValueError: + self.unmapped_saturation = 0 + except Exception as e: + self.logger.error(f"Exception while receiving unmapped saturation: {e}") + + try: + self.colortemperature = get_node_value_as_int(colorcontrol_element, "temperature") + except ValueError: + self.colortemperature = 0 def get_colors(self): """Get the supported colors.""" - if self.has_color(): + if self.has_color: return self._fritz.get_colors(self.ain) else: return {} def set_color(self, hsv, duration=0): """Set HSV color.""" - if self.has_color(): + if self.has_color: self._fritz.set_color(self.ain, hsv, duration, True) def set_unmapped_color(self, hsv, duration=0): """Set unmapped HSV color (Free color selection).""" - if self.has_color(): + if self.has_color: self._fritz.set_color(self.ain, hsv, duration, False) def get_color_temps(self): """Get the supported color temperatures energy.""" - if self.has_color(): + if self.has_color: return self._fritz.get_color_temps(self.ain) else: return [] diff --git a/avm/item_attributes.py b/avm/item_attributes.py index db2cdb477..8aebaac94 100644 --- a/avm/item_attributes.py +++ b/avm/item_attributes.py @@ -26,7 +26,7 @@ AHA_ATTRIBUTES = ['device_id', 'manufacturer', 'product_name', 'fw_version', 'connected', 'device_name', 'tx_busy', 'device_functions', 'set_target_temperature', 'target_temperature', 'current_temperature', 'temperature_reduced', 'temperature_comfort', 'temperature_offset', 'set_window_open', 'window_open', 'windowopenactiveendtime', 'set_hkr_boost', 'hkr_boost', 'boost_active', 'boostactiveendtime', 'summer_active', 'holiday_active', 'battery_low', 'battery_level', 'lock', 'device_lock', 'errorcode', 'set_simpleonoff', 'simpleonoff', 'set_level', 'level', 'set_levelpercentage', 'levelpercentage', 'set_hue', 'hue', 'set_saturation', 'saturation', 'set_colortemperature', 'colortemperature', 'unmapped_hue', 'unmapped_saturation', 'color_mode', 'supported_color_mode', 'fullcolorsupport', 'mapped', 'switch_state', 'switch_mode', 'switch_toggle', 'power', 'energy', 'voltage', 'humidity', 'alert_state', 'blind_mode', 'endpositionsset'] AHA_RO_ATTRIBUTES = ['device_id', 'manufacturer', 'product_name', 'fw_version', 'connected', 'device_name', 'tx_busy', 'device_functions', 'current_temperature', 'temperature_reduced', 'temperature_comfort', 'temperature_offset', 'windowopenactiveendtime', 'boost_active', 'boostactiveendtime', 'summer_active', 'holiday_active', 'battery_low', 'battery_level', 'lock', 'device_lock', 'errorcode', 'color_mode', 'supported_color_mode', 'fullcolorsupport', 'mapped', 'switch_mode', 'power', 'energy', 'voltage', 'humidity', 'alert_state', 'blind_mode', 'endpositionsset'] AHA_WO_ATTRIBUTES = ['set_target_temperature', 'set_window_open', 'set_hkr_boost', 'set_simpleonoff', 'set_level', 'set_levelpercentage', 'set_hue', 'set_saturation', 'set_colortemperature', 'switch_toggle'] -AHA_RW_ATTRIBUTES = ['target_temperature', 'window_open', 'hkr_boost', 'switch_state'] +AHA_RW_ATTRIBUTES = ['target_temperature', 'window_open', 'hkr_boost', 'switch_state', 'simpleonoff', 'level', 'levelpercentage', 'hue', 'saturation', 'colortemperature', 'unmapped_hue', 'unmapped_saturation'] TR064_ATTRIBUTES = ['uptime', 'software_version', 'hardware_version', 'serial_number', 'manufacturer', 'product_class', 'manufacturer_oui', 'model_name', 'description', 'device_log', 'security_port', 'reboot', 'myfritz_status', 'call_direction', 'call_event', 'monitor_trigger', 'is_call_incoming', 'last_caller_incoming', 'last_call_date_incoming', 'call_event_incoming', 'last_number_incoming', 'last_called_number_incoming', 'is_call_outgoing', 'last_caller_outgoing', 'last_call_date_outgoing', 'call_event_outgoing', 'last_number_outgoing', 'last_called_number_outgoing', 'call_duration_incoming', 'call_duration_outgoing', 'tam', 'tam_name', 'tam_new_message_number', 'tam_old_message_number', 'tam_total_message_number', 'wan_connection_status', 'wan_connection_error', 'wan_is_connected', 'wan_uptime', 'wan_ip', 'wan_upstream', 'wan_downstream', 'wan_total_packets_sent', 'wan_total_packets_received', 'wan_current_packets_sent', 'wan_current_packets_received', 'wan_total_bytes_sent', 'wan_total_bytes_received', 'wan_current_bytes_sent', 'wan_current_bytes_received', 'wan_link', 'wlanconfig', 'wlanconfig_ssid', 'wlan_guest_time_remaining', 'wlan_associates', 'wps_active', 'wps_status', 'wps_mode', 'wlan_total_associates', 'hosts_count', 'hosts_info', 'mesh_topology', 'number_of_hosts', 'hosts_url', 'mesh_url', 'network_device', 'device_ip', 'device_connection_type', 'device_hostname', 'connection_status', 'is_host_active', 'host_info', 'number_of_deflections', 'deflections_details', 'deflection_details', 'deflection_enable', 'deflection_type', 'deflection_number', 'deflection_to_number', 'deflection_mode', 'deflection_outgoing', 'deflection_phonebook_id', 'aha_device', 'hkr_device', 'set_temperature', 'temperature', 'set_temperature_reduced', 'set_temperature_comfort', 'firmware_version'] AVM_RW_ATTRIBUTES = ['tam', 'wlanconfig', 'wps_active', 'deflection_enable', 'aha_device'] CALL_MONITOR_ATTRIBUTES = ['call_direction', 'call_event', 'monitor_trigger', 'is_call_incoming', 'last_caller_incoming', 'last_call_date_incoming', 'call_event_incoming', 'last_number_incoming', 'last_called_number_incoming', 'is_call_outgoing', 'last_caller_outgoing', 'last_call_date_outgoing', 'call_event_outgoing', 'last_number_outgoing', 'last_called_number_outgoing', 'call_duration_incoming', 'call_duration_outgoing'] diff --git a/avm/plugin.yaml b/avm/plugin.yaml index c0078e7d6..1d6e7b1aa 100644 --- a/avm/plugin.yaml +++ b/avm/plugin.yaml @@ -1,1028 +1,1028 @@ -# Metadata for the plugin -plugin: - # Global plugin attributes - type: interface # plugin type (gateway, interface, protocol, system, web) - description: - de: 'Ansteuerung von AVM FRITZ!Boxen, WLAN-Repeatern, DECT Steckdosen, etc.' - en: 'Get and send data from/to AVM devices such as the FRITZ!Box, Wifi Repeaters or DECT sockets.' - maintainer: sisamiwe - tester: psilo, onkelandy, aschwith, bmx - state: develop # change to ready when done with development -# keywords: iot xyz - documentation: http://smarthomeng.de/user/plugins/avm/user_doc.html - support: https://knx-user-forum.de/forum/supportforen/smarthome-py/934835-avm-plugin - - version: 2.0.3 # Plugin version (must match the version specified in __init__.py) - sh_minversion: 1.8 # minimum shNG version to use this plugin -# sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) -# py_minversion: 3.6 # minimum Python version to use for this plugin -# py_maxversion: # maximum Python version to use for this plugin (leave empty if latest) - multi_instance: true # plugin supports multi instance - restartable: unknown - classname: AVM # class containing the plugin - -parameters: - # Definition of parameters to be configured in etc/plugin.yaml - username: - type: str - default: '' - description: - de: '(optional) Nutzername für den Login. Kann für manche Features benötigt werden! (Speziell für Fritz!OS 7 ist die Konfiguration der Fritz!Box auf `Anmeldung mit FRITZ!Box-Benutzernamen und Kennwort` notwendig' - en: '(optional) Login information (user). Can be needed to use some features of the AVM device. (Specially for Firtz!OS 7 the Fritz!Box should be configured for login with username and password' - password: - type: str - default: '' - hide: true - description: - de: '(optional) Passwort für den Login. Wird in der Regel immer benötigt und aus Sicherheitsgründen empfohlen.' - en: '(optional) Password for login. Is normally always needed and recommended due to security reasons' - host: - type: str - mandatory: True - description: - de: '(optional) Hostname oder IP-Adresse des FritzDevice.' - en: '(optional) Hostname or ip address of the FritzDevice.' - port: - type: int - default: 49443 - description: - de: '(optional) Port des FritzDevice, normalerweise 49443 für https oder 49000 für http' - en: '(optional) Port of the FritzDevice, typically 49443 for https or 49000 for http' - cycle: - type: int - default: 300 - description: - de: '(optional) Zeit zwischen zwei Runs. Default ist 300 Sekunden.' - en: '(optional) Time period between two update cycles. Default is 300 seconds.' - ssl: - type: bool - default: true - description: - de: '(optional) Mit True wird das FritzDevice via https, mit False via http angesprochen.' - en: '(optional) True will add "https", False "http" to the URLs in the plugin.' - verify: - type: bool - default: false - description: - de: '(optional) Schaltet die Zertifikate-Prüfung an oder aus. Normalerweise False.' - en: '(optional) Turns certificate verification on or off. Typically False' - call_monitor: - type: bool - default: false - description: - de: '(optional) Aktiviert oder deaktiviert den MonitoringService, welcher auf den Call Monitor des FritzDevice verbindet. Der Call Monitor muss über ein verbundenes Telefon via #96*5* aktiviert sein.' - en: '(optional) Activates or deactivates the MonitoringService, which connects to the FritzDevice`s call monitor. The call monitor has to be activated before by a connected telephone via calling #96*5*' - call_monitor_incoming_filter: - type: str - default: '' - description: - de: '(optional) Filter, auf welche eigenen Rufnummern (oder Teile davon) der Callmonitor reagieren soll. Ist der Filter leer, werden alle eigenen Rufnummern überwacht. Wird ein Filterstring bspw. "12234" angegeben, werden nur die eigenen Anschlussnummern, die "12234" enthalten, vom CallMonitor verarbeitet.' - en: '(optional) Filter, for which numbers (or part of the number) of own telephone connection the Callmonitor should react.' - avm_home_automation: - type: bool - default: false - description: - de: '(optional) Aktiviert oder deaktiviert den Zugriff auf AVM Smarthome Geräte mit dem AHA HTTP Interface.' - en: '(optional) Activates or deactivates access to AVM smarthome devices via AHA HTTP interface' - log_entry_count: - type: int - default: 200 - description: - de: '(optional) Anzahl der Log-Messages, die verarbeitet/bereitgestellt werden. 0 = alle' - en: '(optional) Amount of Log-Messages, witch will be displayed. 0 = all' - tr064_item_blacklist: - type: bool - default: False - description: - de: '(optional) Wenn aktiv, werden TR064 Items, deren Abfrageergebnis 2x zu einen Fehler geführt hat, blacklisted und anschließend nicht mehr abgefragt.' - en: '(optional) If active, TR064 Items for which data polling resulted in errors, will be blacklisted and excluded from update cycle' - -item_attributes: - # Definition of item attributes defined by this plugin - avm_data_type: - type: str - mandatory: True - description: - de: 'AVM Datentyp des jeweiligen Items.' - en: 'AVM Data Type of the respective item.' - valid_list: - # Fritzdevice Attribute - - 'uptime' # r/o num Laufzeit des Fritzdevice in Sekunden - - 'serial_number' # r/o str Serialnummer des Fritzdevice - - 'software_version' # r/o str Software Version - - 'hardware_version' # r/o str Hardware Version - # Myfritz Attribute - - 'myfritz_status' # r/o bool MyFritz Status - # Call Monitor Attribute - - 'monitor_trigger' # r/o bool Monitortrigger - - 'is_call_incoming' # r/o bool Eingehender Anruf erkannt - - 'call_duration_incoming' # r/o num Dauer des eingehenden Anrufs - - 'last_caller_incoming' # r/o str Letzter Anrufer - - 'last_number_incoming' # r/o str Nummer des letzten eingehenden Anrufes - - 'last_called_number_incoming' # r/o str Angerufene Nummer des letzten eingehenden Anrufs - - 'last_call_date_incoming' # r/o str Zeitpunkt des letzten eingehenden Anrufs - - 'call_event_incoming' # r/o str Status des letzten eingehenden Anrufs - - 'is_call_outgoing' # r/o bool Ausgehender Anruf erkannt - - 'call_duration_outgoing' # r/o num Dauer des ausgehenden Anrufs - - 'last_caller_outgoing' # r/o str Letzter angerufener Kontakt - - 'last_number_outgoing' # r/o str Letzte angerufene Nummer - - 'last_called_number_outgoing' # r/o str Letzter verwendete Telefonnummer für ausgehenden Anruf - - 'last_call_date_outgoing' # r/o str Zeitpunkt des letzten ausgehenden Anrufs - - 'call_event_outgoing' # r/o str Status des letzten ausgehenden Anrufs - - 'call_direction' # r/o str Richtung des letzten Anrufes - - 'call_event' # r/o str Status des letzten Anrufes - # TAM Attribute Hinweis: alle Attribute benötigen zusätzlich das Attribut 'avm_tam_index' - - 'tam' # r/w bool TAM an/aus - - 'tam_name' # r/o str Name des TAM - - 'tam_old_message_number' # r/o num Anzahl der alten Nachrichten - - 'tam_new_message_number' # r/o num Anzahl der neuen Nachrichten - - 'tam_total_message_number' # r/o num Gesamtanzahl der Nachrichten - # WAN Attribute - - 'wan_connection_status' # r/o str WAN Verbindungsstatus - - 'wan_connection_error' # r/o str WAN Verbindungsfehler - - 'wan_is_connected' # r/o bool WAN Verbindung aktiv - - 'wan_uptime' # r/o str WAN Verbindungszeit - - 'wan_ip' # r/o str WAN IP Adresse - - 'wan_upstream' # r/o num WAN Upstream Datenmenge - - 'wan_downstream' # r/o num WAN Downstream Datenmenge - - 'wan_total_packets_sent' # r/o num WAN Verbindung-Anzahl insgesamt versendeter Pakete - - 'wan_total_packets_received' # r/o num WAN Verbindung-Anzahl insgesamt empfangener Pakete - - 'wan_current_packets_sent' # r/o num WAN Verbindung-Anzahl aktuell versendeter Pakete - - 'wan_current_packets_received' # r/o num WAN Verbindung-Anzahl aktuell empfangener Pakete - - 'wan_total_bytes_sent' # r/o num WAN Verbindung-Anzahl insgesamt versendeter Bytes - - 'wan_total_bytes_received' # r/o num WAN Verbindung-Anzahl insgesamt empfangener Bytes - - 'wan_current_bytes_sent' # r/o num WAN Verbindung-Anzahl aktuelle Bitrate Senden - - 'wan_current_bytes_received' # r/o num WAN Verbindung-Anzahl aktuelle Bitrate Empfangen - - 'wan_link' # r/o bool WAN Link - # WLAN Config Attribute Hinweis: alle Attribute benötigen zusätzlich das Attribut 'avm_wlan_index' - - 'wlanconfig' # r/w bool WLAN An/Aus - - 'wlanconfig_ssid' # r/o str WLAN SSID - - 'wlan_guest_time_remaining' # r/o num Verbleibende Zeit, bis zum automatischen Abschalten des Gäste-WLAN - - 'wlan_associates' # r/o num Anzahl der verbundenen Geräte im jeweiligen WLAN - - 'wps_active' # r/w bool Schaltet WPS für das entsprechende WlAN an / aus - - 'wps_status' # r/o str WPS Status des entsprechenden WlAN - - 'wps_mode' # r/o str WPS Modus des entsprechenden WlAN - # WLAN Attribute - - 'wlan_total_associates' # r/o num Anzahl der verbundenen Geräte im WLAN - # Host Attribute Hinweis: alle Attribute benötigen zusätzlich das Attribut 'avm_mac' - - 'network_device' # r/o bool Verbindungsstatus // Defines Network device via MAC-Adresse - - 'device_ip' # r/o str Geräte-IP (Muss Child von 'network_device' sein) - - 'device_connection_type' # r/o str Verbindungstyp (Muss Child von 'network_device' sein) - - 'device_hostname' # r/o str Gerätename (Muss Child von 'network_device' sein) - - 'connection_status' # r/o bool Verbindungsstatus (Muss Child von 'network_device' sein) - # Hosts Attribute - - 'hosts_count' #r/o num Anzahl der Hosts - - 'hosts_info' #r/o dict Informationen über die Hosts - - 'mesh_topology' #r/o dict Topologie des Mesh - # Smarthome Attribute (Deprecated avm data types. Please use alternative AHA interface type) - - 'aha_device' # r/w bool Steckdose schalten; siehe "switch_state" - - 'hkr_device' # r/o str Status des HKR (OPEN; CLOSED; TEMP) - - 'set_temperature' # r/o num siehe "target_temperature" - - 'temperature' # r/o num siehe "current_temperature" - - 'set_temperature_reduced' # r/o num siehe "temperature_reduced" - - 'set_temperature_comfort' # r/o num siehe "temperature_comfort" - - 'firmware_version' # r/o str siehe "fw_version" - # Deflections Hinweis: alle Attribute benötigen zusätzlich das Attribut 'avm_deflection_index' - - 'number_of_deflections' # r/o num Anzahl der eingestellten Rufumleitungen - - 'deflection_details' # r/o dict Details zur Rufumleitung (als dict); Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item - - 'deflections_details' # r/o dict Details zu allen Rufumleitung (als dict) - - 'deflection_enable' # r/w bool Rufumleitung Status an/aus; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item - - 'deflection_type' # r/o str Type der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item - - 'deflection_number' # r/o str Telefonnummer, die umgeleitet wird; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item - - 'deflection_to_number' # r/o str Zielrufnummer der Umleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item - - 'deflection_mode' # r/o str Modus der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item - - 'deflection_outgoing' # r/o str Outgoing der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item - - 'deflection_phonebook_id' # r/o str Phonebook_ID der Zielrufnummer (Only valid if Type==fromPB); Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item - # AHA Interface attributes Hinweis: alle Attribute benötigen zusätzlich das Attribut 'avm_tam_ain' - - 'device_id' # r/o str Geräte -ID - - 'manufacturer' # r/o str Hersteller - - 'product_name' # r/o str Produktname - - 'fw_version' # r/o str Firmware Version - - 'connected' # r/o bool Verbindungsstatus - - 'device_name' # r/o str Gerätename - - 'tx_busy' # r/o bool Verbindung aktiv - - 'device_functions' # r/o list Im Gerät vorhandene Funktionen - - - 'set_target_temperature' # w/o num Soll-Temperatur Setzen - - 'target_temperature' # r/w num Soll-Temperatur (Status und Setzen) - - 'current_temperature' # r/o num Ist-Temperatur - - 'temperature_reduced' # r/o num Eingestellte reduzierte Temperatur - - 'temperature_comfort' # r/o num Eingestellte Komfort-Temperatur - - 'temperature_offset' # r/o num Eingestellter Temperatur-Offset - - 'set_window_open' # w/o bool "Window Open" Funktionen Setzen - - 'window_open' # r/w bool "Window Open" Funktion (Status und Setzen) - - 'windowopenactiveendtime' # r/o num Zeitliches Ende der "Window Open" Funktion - - 'set_hkr_boost' # w/o bool "Boost" Funktion Setzen - - 'hkr_boost' # r/w bool "Boost" Funktion (Status aund Setzen) - - 'boost_active' # r/o bool Status der "Boost" Funktion deprecated - - 'boostactiveendtime' # r/o num Zeitliches Ende der "Boost" Funktion - - 'summer_active' # r/o bool Status der "Sommer" Funktion - - 'holiday_active' # r/o bool Status der "Holiday" Funktion - - 'battery_low' # r/o bool "Battery low" Status - - 'battery_level' # r/o num Batterie-Status in % - - 'lock' # r/o bool Tastensperre über UI/API aktiv - - 'device_lock' # r/o bool Tastensperre direkt am Gerät ein - - 'errorcode' # r/o num Fehlercodes die der HKR liefert - - - 'set_simpleonoff' # w/o bool Gerät/Aktor/Lampe an-/ausschalten - - 'simpleonoff' # w/r bool Gerät/Aktor/Lampe (Status und Setzen) - - - 'set_level' # w/o num Level/Niveau von 0 bis 255 Setzen - - 'level' # w/r num Level/Niveau von 0 bis 255 (Setzen & Status) - - 'set_levelpercentage' # w/o num Level/Niveau von 0% bis 100% Setzen - - 'levelpercentage' # w/r num Level/Niveau von 0% bis 100% (Setzen & Status) - - - - 'set_hue' # w/o num Hue Setzen - - 'hue' # w/r num Hue (Status und Setzen) - - 'set_saturation' # w/o num Saturation Setzen - - 'saturation' # w/r num Saturation (Status und Setzen) - - 'set_colortemperature' # w/o num Farbtemperatur Setzen - - 'colortemperature' # w/r num Farbtemperatur (Status und Setzen) - - 'unmapped_hue' # w/r num Hue (Status und Setzen) - - 'unmapped_saturation' # w/r num Saturation (Status und Setzen) - - 'color_mode' # r/o num Aktueller Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode) - - 'supported_color_mode' # r/o num Unterstützer Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode) - - 'fullcolorsupport' # r/o bool Lampe unterstützt setunmappedcolor - - 'mapped' # r/o bool von den Colordefaults abweichend zugeordneter HueSaturation-Wert gesetzt - - - 'switch_state' # r/w bool Schaltzustand Steckdose (Status und Setzen) - - 'switch_mode' # r/o str Zeitschaltung oder manuell schalten - - 'switch_toggle' # w/o bool Schaltzustand umschalten (toggle) - - - 'power' # r/o num Leistung in W (Aktualisierung alle 2 min) - - 'energy' # r/o num absoluter Verbrauch seit Inbetriebnahme in Wh - - 'voltage' # r/o num Spannung in V (Aktualisierung alle 2 min) - - - 'humidity' # r/o num Relative Luftfeuchtigkeit in % (FD440) - - - 'alert_state' # r/o bool letzter übermittelter Alarmzustand - - - 'blind_mode' # r/o str automatische Zeitschaltung oder manuell fahren - - 'endpositionsset' # r/o bool ist die Endlage für das Rollo konfiguriert - - avm_incoming_allowed: - type: str - mandatory: False - description: - de: '(optional) Definition der erlaubten eingehenden Rufnummer in Items vom avm_data_type `monitor_trigger`.' - en: '(optional) Definition of the allowed incoming number. Only in items of avm_data_type `monitor_trigger`.' - - avm_target_number: - type: str - mandatory: False - description: - de: '(optional) Definition der erlaubten angerufenen Rufnummer in Items vom avm_data_type `monitor_trigger`.' - en: '(optional) Definition of the allowed called number. Only in items of avm_data_type `monitor_trigger`.' - - avm_wlan_index: - type: int - description: - de: '(optional) Definition des Wlans ueber index: (1: 2.4Ghz, 2: 5Ghz, 3: Gaeste).' - en: '(optional) Definition of WiFi via index: (1: 2.4GHz, 2: 5GHz, 3: Guest)' - valid_min: 1 - valid_max: 3 - - avm_mac: - type: mac - mandatory: False - description: - de: '(optional) Definition der MAC Adresse für Items vom avm_data_type `network_device`. Nur für diese Items mandatory!' - en: '(optional) Definition of the MAC address for items of avm_data_type `network_device`. Only mandatory for these items!' - - avm_ain: - type: str - mandatory: False - description: - de: "(optional) Definition der AktorIdentifikationsNummer (AIN) Items vom avm_data_types für `AHA-Interface`. Nur für diese Items mandatory!" - en: "(optional) Definition of the ActorIdentificationNumber (AIN) for items of avm_data_types `AHA-Interface`. Only mandatory for these items!" - - avm_tam_index: - type: int - mandatory: False - description: - de: '(optional) Index für den Anrufbeantworter, normalerweise für den ersten eine "1". Es werden bis zu 5 Anrufbeantworter vom Gerät unterstützt.' - en: '(optional) Index für the answering machine, normally a "1" for the first one. Supported are up to 5 answering machines.' - valid_min: 1 - valid_max: 5 - - avm_deflection_index: - type: int - mandatory: False - description: - de: '(optional) Index für die Rufumleitung, normalerweise für die erste eine "1".' - en: '(optional) Index deflection, normally a "1" for the first one.' - valid_min: 1 - valid_max: 32 - - avm_read_after_write: - type: int - description: - de: '(optional) Konfiguriert eine Verzögerung in Sekunden nachdem ein Lesekommando nach einem Schreibkommando gesendet wird.' - en: '(optional) Configures delay in seconds to issue a read command after write command' - - avm_data_cycle: - type: int - mandatory: False - description: - de: 'Poll-Zyklus des AVM Datentypes des jeweiligen Items. 0-Nur beim Initialisieren Lesen; 10+ - Zyklisch Lesen' - en: 'Poll cycle of AVM Data Type of the respective item. 0-Just read at init; 10+ - cyclic reading' - -item_structs: - info: - uptime: - type: num - visu_acl: ro - avm_data_type@instance: uptime - avm_data_cycle@instance: 600 - serial_number: - type: str - visu_acl: ro - avm_data_type@instance: serial_number - avm_data_cycle@instance: 0 - firmware: - type: str - visu_acl: ro - avm_data_type@instance: software_version - avm_data_cycle@instance: 3600 - hardware_version: - type: str - visu_acl: ro - avm_data_type@instance: hardware_version - avm_data_cycle@instance: 0 - myfritz: - type: bool - avm_data_type@instance: myfritz_status - - monitor: - trigger: - type: bool - avm_data_type@instance: monitor_trigger - avm_incoming_allowed@instance: xxxxxxxx - avm_target_number@instance: xxxxxxxx - enforce_updates: yes - incoming: - is_call_incoming: - type: bool - avm_data_type@instance: is_call_incoming - duration: - type: num - avm_data_type@instance: call_duration_incoming - last_caller: - type: str - avm_data_type@instance: last_caller_incoming - last_calling_number: - type: str - avm_data_type@instance: last_number_incoming - last_called_number: - type: str - avm_data_type@instance: last_called_number_incoming - last_call_date: - type: str - avm_data_type@instance: last_call_date_incoming - event: - type: str - avm_data_type@instance: call_event_incoming - outgoing: - is_call_outgoing: - type: bool - avm_data_type@instance: is_call_outgoing - duration: - type: num - avm_data_type@instance: call_duration_outgoing - last_caller: - type: str - avm_data_type@instance: last_caller_outgoing - last_calling_number: - type: str - avm_data_type@instance: last_number_outgoing - last_called_number: - type: str - avm_data_type@instance: last_called_number_outgoing - last_call_date: - type: str - avm_data_type@instance: last_call_date_outgoing - event: - type: str - avm_data_type@instance: call_event_outgoing - newest: - direction: - type: str - avm_data_type@instance: call_direction - cache: yes - event: - type: str - avm_data_type@instance: call_event - cache: yes - - tam: - type: bool - visu_acl: rw - avm_data_type@instance: tam - avm_tam_index@instance: 1 - avm_read_after_write@instance: 5 - - name: - type: str - visu_acl: ro - avm_data_type@instance: tam_name - message_number_old: - type: num - visu_acl: ro - avm_data_type@instance: tam_old_message_number - message_number_new: - type: num - visu_acl: ro - avm_data_type@instance: tam_new_message_number - message_number_total: - type: num - visu_acl: ro - avm_data_type@instance: tam_total_message_number - - deflection: - type: bool - visu_acl: rw - avm_data_type@instance: deflection_enable - avm_deflection_index@instance: 1 - avm_read_after_write@instance: 5 - - deflection_type: - type: str - visu_acl: ro - avm_data_type@instance: deflection_type - deflection_number: - type: str - visu_acl: ro - avm_data_type@instance: deflection_number - deflection_to_number: - type: str - visu_acl: ro - avm_data_type@instance: deflection_to_number - deflection_mode: - type: str - visu_acl: ro - avm_data_type@instance: deflection_mode - deflection_outgoing: - type: str - visu_acl: ro - avm_data_type@instance: deflection_outgoing - deflection_phonebook_id: - type: num - visu_acl: ro - avm_data_type@instance: deflection_phonebook_id - - wan: - connection_status: - type: str - visu_acl: ro - avm_data_type@instance: wan_connection_status - connection_error: - type: str - visu_acl: ro - avm_data_type@instance: wan_connection_error - is_connected: - type: bool - visu_acl: ro - avm_data_type@instance: wan_is_connected - uptime: - type: num - visu_acl: ro - avm_data_type@instance: wan_uptime - upstream: - type: num - visu_acl: ro - avm_data_type@instance: wan_upstream - downstream: - type: num - visu_acl: ro - avm_data_type@instance: wan_downstream - total_packets_sent: - type: num - visu_acl: ro - avm_data_type@instance: wan_total_packets_sent - total_packets_received: - type: num - visu_acl: ro - avm_data_type@instance: wan_total_packets_received - total_bytes_sent: - type: num - visu_acl: ro - avm_data_type@instance: wan_total_bytes_sent - total_bytes_received: - type: num - visu_acl: ro - avm_data_type@instance: wan_total_bytes_received - current_bytes_sent: - type: num - visu_acl: ro - avm_data_type@instance: wan_current_bytes_sent - current_bytes_receive: - type: num - visu_acl: ro - avm_data_type@instance: wan_current_bytes_received - link: - type: bool - visu_acl: ro - avm_data_type@instance: wan_link - reconnect: - type: bool - visu_acl: rw - enforce_updates: yes - - wlan: - wlan_1: - type: bool - visu_acl: rw - avm_data_type@instance: wlanconfig # 2,4ghz - avm_wlan_index@instance: 1 - avm_read_after_write@instance: 5 - wlan_1_ssid: - type: str - visu_acl: ro - avm_data_type@instance: wlanconfig_ssid - avm_wlan_index@instance: 1 - wlan_1_associates: - type: num - visu_acl: ro - avm_data_type@instance: wlan_associates - avm_wlan_index@instance: 1 - wlan_2: - type: bool - visu_acl: rw - avm_data_type@instance: wlanconfig # 5 GHz - avm_wlan_index@instance: 2 - avm_read_after_write@instance: 5 - wlan_2_ssid: - type: str - visu_acl: ro - avm_data_type@instance: wlanconfig_ssid - avm_wlan_index@instance: 2 - wlan_2_associates: - type: num - visu_acl: ro - avm_data_type@instance: wlan_associates - avm_wlan_index@instance: 2 - wlan_gast: - type: bool - visu_acl: rw - avm_data_type@instance: wlanconfig # Guest - avm_wlan_index@instance: 3 - avm_read_after_write@instance: 5 - wlan_gast_ssid: - type: str - visu_acl: ro - avm_data_type@instance: wlanconfig_ssid - avm_wlan_index@instance: 3 - wlan_gast_associates: - type: num - visu_acl: ro - avm_data_type@instance: wlan_associates - avm_wlan_index@instance: 3 - wlan_gast_tr: - type: num - visu_acl: rw - avm_data_type@instance: wlan_guest_time_remaining - avm_wlan_index@instance: 3 - - device: - avm_data_type@instance: network_device - type: bool - visu_acl: ro - ip: - type: str - avm_data_type@instance: device_ip - visu_acl: ro - connection_type: - type: str - avm_data_type@instance: device_connection_type - visu_acl: ro - hostname: - type: str - avm_data_type@instance: device_hostname - visu_acl: ro - - aha_general: - name: - type: str - avm_data_type@instance: device_name - avm_data_cycle@instance: 0 - identifier: - type: str - avm_data_type@instance: device_id - avm_data_cycle@instance: 0 - productname: - type: str - avm_data_type@instance: product_name - avm_data_cycle@instance: 0 - manufacturer: - type: str - avm_data_type@instance: manufacturer - avm_data_cycle@instance: 0 - firmware_version: - type: str - avm_data_type@instance: fw_version - avm_data_cycle@instance: 0 - present: - type: bool - avm_data_type@instance: connected - avm_data_cycle@instance: 0 - functions: - type: list - avm_data_type@instance: device_functions - avm_data_cycle@instance: 0 - - aha_thermostat: - target_temperature: - avm_data_type@instance: target_temperature - avm_read_after_write@instance: 5 - type: num - comfort_temperature: - avm_data_type@instance: temperature_comfort - type: num - eco_temperature: - avm_data_type@instance: temperature_reduced - type: num - battery_low: - avm_data_type@instance: battery_low - type: bool - battery_level: - avm_data_type@instance: battery_level - type: num - window_open: - avm_data_type@instance: window_open - type: bool - windowopenactiveendtime: - avm_data_type@instance: windowopenactiveendtime - type: num - summer_active: - avm_data_type@instance: summer_active - type: bool - holiday_active: - avm_data_type@instance: holiday_active - type: bool - errorcode: - avm_data_type@instance: errorcode - type: num - hkr_boost: - avm_data_type@instance: hkr_boost - type: bool - boostactiveendtime: - avm_data_type@instance: boostactiveendtime - type: num - lock: - avm_data_type@instance: lock - type: bool - device_lock: - avm_data_type@instance: device_lock - type: bool - - aha_temperature_sensor: - current_temperature: - avm_data_type@instance: current_temperature - type: num - temperature_offset: - avm_data_type@instance: temperature_offset - type: num - - aha_humidity_sensor: - humidity: - avm_data_type@instance: humidity - type: num - - aha_alert: - state: - avm_data_type@instance: alert - type: bool - - aha_switch: - switch_state: - avm_data_type@instance: switch_state - type: bool - switch_mode: - avm_data_type@instance: switch_mode - type: str - switch_toggle: - avm_data_type@instance: switch_toggle - type: bool - enforce_updates: yes - - aha_powermeter: - power: - avm_data_type@instance: power - type: num - energy: - avm_data_type@instance: energy - type: num - voltage: - avm_data_type@instance: voltage - type: num - - aha_level: - level: - avm_data_type@instance: level - type: num - level_percentage: - avm_data_type@instance: levelpercentage - type: num - - aha_blind: - blind_mode: - avm_data_type@instance: blind_mode - type: str - endpositionsset: - avm_data_type@instance: endpositionsset - type: bool - - aha_on_off: - on_off: - avm_data_type@instance: simpleonoff - type: bool - - aha_button: - battery_low: - avm_data_type@instance: battery_low - type: bool - battery_level: - avm_data_type@instance: battery_level - type: num - - aha_color: - color_mode: - avm_data_type@instance: color_mode - type: num - supported_color_mode: - avm_data_type@instance: supported_color_mode - type: num - fullcolorsupport: - avm_data_type@instance: fullcolorsupport - type: bool - mapped: - avm_data_type@instance: mapped - type: bool - hue: - avm_data_type@instance: hue - type: num - saturation: - avm_data_type@instance: saturation - type: num - unmapped_hue: - avm_data_type@instance: unmapped_hue - type: bool - unmapped_saturation: - avm_data_type@instance: unmapped_saturation - type: bool - colortemperature: - avm_data_type@instance: colortemperature - type: num - -#item_attribute_prefixes: - # Definition of item attributes that only have a common prefix (enter 'item_attribute_prefixes: NONE' or ommit this section, if section should be empty) - # NOTE: This section should only be used, if really necessary (e.g. for the stateengine plugin) - -plugin_functions: - - cancel_call: - type: void - description: - de: "Beendet einen aktiven Anruf." - en: "Cancels an active call." - parameters: - # This function has no parameters - - get_call_origin: - type: str - description: - de: "Gib den Namen des Telefons zurück, das aktuell als 'call origin' gesetzt ist." - en: "Gets the phone name, currently set as 'call origin'." - parameters: - # This function has no parameters - - get_calllist: - type: list(dict(str)) - description: - de: "Ermittelt ein Array mit dicts aller Einträge der Anrufliste (Attribute 'Id', 'Type', 'Caller', 'Called', 'CalledNumber', 'Name', 'Numbertype', 'Device', 'Port', 'Date',' Duration' (einige optional))." - en: "Returns an array of dicts with all calllist entries (attributes 'Id', 'Type', 'Caller', 'Called', 'CalledNumber', 'Name', 'Numbertype', 'Device', 'Port', 'Date', 'Duration' (some optional))." - parameters: - filter_incoming: - type: str - default: '' - description: - de: "Filter, um nur die Anrufe zu erhalten, die zu einer bestimmten angerufenen Nummer gehören." - en: "Filter to filter calls to a specific destination phone number." - phonebook_id: - type: int - default: 0 - description: - de: "ID des Telefonbuchs, in dem nachgeschlagen werden soll." - en: "ID of the phone book, in which numbers should be looked up." - - get_contact_name_by_phone_number: - type: str - description: - de: "Durchsucht das Telefonbuch mit einer (vollständigen) Telefonnummer nach Kontakten. Falls kein Name gefunden wird, wird die Telefonnummer zurückgeliefert." - en: "Searches the phonebook for a contact by a given (complete) phone number. In case no name is found, the phone number is returned." - parameters: - phone_number: - type: str - description: - de: "Vollständige Telefonnummer" - en: "Complete phone number" - phonebook_id: - type: int - default: 0 - description: - de: "ID des Telefonbuchs, in dem nachgeschlagen werden soll." - en: "ID of the phone book, in which numbers should be looked up." - - get_device_log_from_lua: - type: list(list(str)) - description: - de: "Ermittelt die Logeinträge auf dem Gerät über die LUA Schnittstelle /query.lua?mq_log=logger:status/log." - en: "Gets the log entries on the device via the LUA interface /query.lua?mq_log=logger:status/log." - parameters: - # This function has no parameters - - get_device_log_from_tr064: - type: list(str) - description: - de: "Ermittelt die Logeinträge auf dem Gerät über die TR-064 Schnittstelle." - en: "Gets the log entries on the device via the TR-064 interface." - parameters: - # This function has no parameters - - get_host_details: - type: dict(str) - description: - de: "Ermittelt die Informationen zu einem Host an einem angegebenen Index." - en: "Gets the information of a hosts at a specific index." - parameters: - index: - type: int - description: - de: "Index" - en: "Index" - - get_hosts: - type: list(dict(str)) - description: - de: "Ermittelt ein Array mit den Namen aller verbundener Hosts." - en: "Gets the name of all connected hosts as an array." - parameters: - only_active: - type: bool - description: - de: "True, wenn nur aktuell aktive Hosts zurückgegeben werden sollen." - en: "True, if only active hosts shall be returned." - - get_phone_name: - type: str - description: - de: "Gibt den Namen eines Telefons an einem Index zurück. Der zurückgegebene Wert kann in 'set_call_origin' verwendet werden." - en: "Get the phone name at a specific index. The returend value can be used as phone_name for set_call_origin." - parameters: - index: - type: int - description: - de: "Index" - en: "Index" - - get_phone_numbers_by_name: - type: dict(dict(str)) - description: - de: "Durchsucht das Telefonbuch mit einem Namen nach nach Kontakten und liefert die zugehörigen Telefonnummern." - en: "Searches the phonebook for a contact by a given name and returns the corresponding phone numbers." - parameters: - name: - type: str - description: - de: "Anteiliger oder vollständiger Name des Kontakts." - en: "Partial or full name of the contact." - phonebook_id: - type: int - default: 0 - description: - de: "ID des Telefonbuchs, in dem nachgeschlagen werden soll." - en: "ID of the phone book, in which numbers should be looked up." - - is_host_active: - type: bool - description: - de: "Prüft, ob eine MAC Adresse auf dem Gerät aktiv ist. Das kann bspw. für die Umsetzung einer Präsenzerkennung genutzt werden." - en: "Checks if a MAC address is active on the FritzDevice, e.g. the status can be used for simple presence detection." - parameters: - mac_address: - type: mac - description: - de: "MAC Adresse" - en: "MAC address" - - reboot: - type: void - description: - de: "Startet das Gerät neu." - en: "Reboots the device." - - reconnect: - type: void - description: - de: "Verbindet das Gerät neu mit dem WAN (Wide Area Network)." - en: "Reconnects the device to the WAN (Wide Area Network)." - - set_call_origin: - type: void - description: - de: "Setzt den 'call origin', bspw. vor dem Aufruf von 'start_call'." - en: "Sets the 'call origin', e.g. before running 'start_call'." - parameters: - phone_name: - type: mac - description: - de: "Identifikator des Telefons, dass als 'call origin' gesetzt werden soll. bspw. zwei Sterne gefolgt von '610' für ein internes Gerät." - en: "Full phone identifier, could be e.g. two asterisk followed by '610' for an internal device." - - start_call: - type: void - description: - de: "Startet einen Anruf an eine übergebene Telefonnummer (intern oder extern)." - en: "Starts a call for a given phone number (internal or external)." - parameters: - phone_number: - type: str - description: - de: "Vollständige Telefonnummer, die angerufen werden soll." - en: "Full phone number to call" - - wol: - type: void - description: - de: "Sendet einen WOL (WakeOnLAN) Befehl an eine MAC Adresse." - en: "Sends a WOL (WakeOnLAN) command to a MAC address." - parameters: - mac_address: - type: mac - description: - de: "MAC Adresse" - en: "MAC address" - - get_number_of_deflections: - type: bool - description: - de: "Liefert die Anzahl der Rufumleitungen zurück." - en: "Returns Number of set deflections." - parameters: - # This function has no parameters - - get_deflection: - type: bool - description: - de: "Liefert die Details der Rufumleitung der angegebenen ID zurück (Default-ID = 0)" - en: "Returns details of deflection with given deflection_id (default id = 0)" - parameters: - deflection_id: - type: int - description: - de: "Identifikator der abzufragenden Rufumleitung." - en: "Identifier of deflection." - - get_deflections: - type: bool - description: - de: "Liefert die Details aller Rufumleitungen zurück." - en: "Returns details of all deflections." - parameters: - # This function has no parameters - - set_deflection_enable: - type: bool - description: - de: "Schaltet die Rufumleitung mit angegebener ID an oder aus." - en: "Enables or disables deflection with given ID." - parameters: - deflection_id: - type: int - description: - de: "Identifikator der abzufragenden Rufumleitung." - en: "identifier of deflection." - enable: - type: bool - description: - de: "An / Aus" - en: "Enable / Disable" - - get_mesh_topology: - type: dict - description: - de: "Liefert die Mesh-Topologie als Dictionary" - en: "Lists mesh topology as dict" - parameters: - # This function has no parameters - - get_hosts_dict: - type: dict - description: - de: "Liefert Informationen aller Hosts als Dictionary" - en: "Lists information of all hosts as dict" - parameters: - # This function has no parameters - -logic_parameters: NONE - # Definition of logic parameters defined by this plugin (enter 'logic_parameters: NONE', if section should be empty) +# Metadata for the plugin +plugin: + # Global plugin attributes + type: interface # plugin type (gateway, interface, protocol, system, web) + description: + de: 'Ansteuerung von AVM FRITZ!Boxen, WLAN-Repeatern, DECT Steckdosen, etc.' + en: 'Get and send data from/to AVM devices such as the FRITZ!Box, Wifi Repeaters or DECT sockets.' + maintainer: sisamiwe + tester: psilo, onkelandy, aschwith, bmx + state: develop # change to ready when done with development +# keywords: iot xyz + documentation: http://smarthomeng.de/user/plugins/avm/user_doc.html + support: https://knx-user-forum.de/forum/supportforen/smarthome-py/934835-avm-plugin + + version: 2.0.4 # Plugin version (must match the version specified in __init__.py) + sh_minversion: 1.8 # minimum shNG version to use this plugin +# sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) +# py_minversion: 3.6 # minimum Python version to use for this plugin +# py_maxversion: # maximum Python version to use for this plugin (leave empty if latest) + multi_instance: true # plugin supports multi instance + restartable: unknown + classname: AVM # class containing the plugin + +parameters: + # Definition of parameters to be configured in etc/plugin.yaml + username: + type: str + default: '' + description: + de: '(optional) Nutzername für den Login. Kann für manche Features benötigt werden! (Speziell für Fritz!OS 7 ist die Konfiguration der Fritz!Box auf `Anmeldung mit FRITZ!Box-Benutzernamen und Kennwort` notwendig' + en: '(optional) Login information (user). Can be needed to use some features of the AVM device. (Specially for Firtz!OS 7 the Fritz!Box should be configured for login with username and password' + password: + type: str + default: '' + hide: true + description: + de: '(optional) Passwort für den Login. Wird in der Regel immer benötigt und aus Sicherheitsgründen empfohlen.' + en: '(optional) Password for login. Is normally always needed and recommended due to security reasons' + host: + type: str + mandatory: True + description: + de: '(optional) Hostname oder IP-Adresse des FritzDevice.' + en: '(optional) Hostname or ip address of the FritzDevice.' + port: + type: int + default: 49443 + description: + de: '(optional) Port des FritzDevice, normalerweise 49443 für https oder 49000 für http' + en: '(optional) Port of the FritzDevice, typically 49443 for https or 49000 for http' + cycle: + type: int + default: 300 + description: + de: '(optional) Zeit zwischen zwei Runs. Default ist 300 Sekunden.' + en: '(optional) Time period between two update cycles. Default is 300 seconds.' + ssl: + type: bool + default: true + description: + de: '(optional) Mit True wird das FritzDevice via https, mit False via http angesprochen.' + en: '(optional) True will add "https", False "http" to the URLs in the plugin.' + verify: + type: bool + default: false + description: + de: '(optional) Schaltet die Zertifikate-Prüfung an oder aus. Normalerweise False.' + en: '(optional) Turns certificate verification on or off. Typically False' + call_monitor: + type: bool + default: false + description: + de: '(optional) Aktiviert oder deaktiviert den MonitoringService, welcher auf den Call Monitor des FritzDevice verbindet. Der Call Monitor muss über ein verbundenes Telefon via #96*5* aktiviert sein.' + en: '(optional) Activates or deactivates the MonitoringService, which connects to the FritzDevice`s call monitor. The call monitor has to be activated before by a connected telephone via calling #96*5*' + call_monitor_incoming_filter: + type: str + default: '' + description: + de: '(optional) Filter, auf welche eigenen Rufnummern (oder Teile davon) der Callmonitor reagieren soll. Ist der Filter leer, werden alle eigenen Rufnummern überwacht. Wird ein Filterstring bspw. "12234" angegeben, werden nur die eigenen Anschlussnummern, die "12234" enthalten, vom CallMonitor verarbeitet.' + en: '(optional) Filter, for which numbers (or part of the number) of own telephone connection the Callmonitor should react.' + avm_home_automation: + type: bool + default: false + description: + de: '(optional) Aktiviert oder deaktiviert den Zugriff auf AVM Smarthome Geräte mit dem AHA HTTP Interface.' + en: '(optional) Activates or deactivates access to AVM smarthome devices via AHA HTTP interface' + log_entry_count: + type: int + default: 200 + description: + de: '(optional) Anzahl der Log-Messages, die verarbeitet/bereitgestellt werden. 0 = alle' + en: '(optional) Amount of Log-Messages, witch will be displayed. 0 = all' + tr064_item_blacklist: + type: bool + default: False + description: + de: '(optional) Wenn aktiv, werden TR064 Items, deren Abfrageergebnis 2x zu einen Fehler geführt hat, blacklisted und anschließend nicht mehr abgefragt.' + en: '(optional) If active, TR064 Items for which data polling resulted in errors, will be blacklisted and excluded from update cycle' + +item_attributes: + # Definition of item attributes defined by this plugin + avm_data_type: + type: str + mandatory: True + description: + de: 'AVM Datentyp des jeweiligen Items.' + en: 'AVM Data Type of the respective item.' + valid_list: + # Fritzdevice Attribute + - 'uptime' # r/o num Laufzeit des Fritzdevice in Sekunden + - 'serial_number' # r/o str Serialnummer des Fritzdevice + - 'software_version' # r/o str Software Version + - 'hardware_version' # r/o str Hardware Version + # Myfritz Attribute + - 'myfritz_status' # r/o bool MyFritz Status + # Call Monitor Attribute + - 'monitor_trigger' # r/o bool Monitortrigger + - 'is_call_incoming' # r/o bool Eingehender Anruf erkannt + - 'call_duration_incoming' # r/o num Dauer des eingehenden Anrufs + - 'last_caller_incoming' # r/o str Letzter Anrufer + - 'last_number_incoming' # r/o str Nummer des letzten eingehenden Anrufes + - 'last_called_number_incoming' # r/o str Angerufene Nummer des letzten eingehenden Anrufs + - 'last_call_date_incoming' # r/o str Zeitpunkt des letzten eingehenden Anrufs + - 'call_event_incoming' # r/o str Status des letzten eingehenden Anrufs + - 'is_call_outgoing' # r/o bool Ausgehender Anruf erkannt + - 'call_duration_outgoing' # r/o num Dauer des ausgehenden Anrufs + - 'last_caller_outgoing' # r/o str Letzter angerufener Kontakt + - 'last_number_outgoing' # r/o str Letzte angerufene Nummer + - 'last_called_number_outgoing' # r/o str Letzter verwendete Telefonnummer für ausgehenden Anruf + - 'last_call_date_outgoing' # r/o str Zeitpunkt des letzten ausgehenden Anrufs + - 'call_event_outgoing' # r/o str Status des letzten ausgehenden Anrufs + - 'call_direction' # r/o str Richtung des letzten Anrufes + - 'call_event' # r/o str Status des letzten Anrufes + # TAM Attribute Hinweis: alle Attribute benötigen zusätzlich das Attribut 'avm_tam_index' + - 'tam' # r/w bool TAM an/aus + - 'tam_name' # r/o str Name des TAM + - 'tam_old_message_number' # r/o num Anzahl der alten Nachrichten + - 'tam_new_message_number' # r/o num Anzahl der neuen Nachrichten + - 'tam_total_message_number' # r/o num Gesamtanzahl der Nachrichten + # WAN Attribute + - 'wan_connection_status' # r/o str WAN Verbindungsstatus + - 'wan_connection_error' # r/o str WAN Verbindungsfehler + - 'wan_is_connected' # r/o bool WAN Verbindung aktiv + - 'wan_uptime' # r/o str WAN Verbindungszeit + - 'wan_ip' # r/o str WAN IP Adresse + - 'wan_upstream' # r/o num WAN Upstream Datenmenge + - 'wan_downstream' # r/o num WAN Downstream Datenmenge + - 'wan_total_packets_sent' # r/o num WAN Verbindung-Anzahl insgesamt versendeter Pakete + - 'wan_total_packets_received' # r/o num WAN Verbindung-Anzahl insgesamt empfangener Pakete + - 'wan_current_packets_sent' # r/o num WAN Verbindung-Anzahl aktuell versendeter Pakete + - 'wan_current_packets_received' # r/o num WAN Verbindung-Anzahl aktuell empfangener Pakete + - 'wan_total_bytes_sent' # r/o num WAN Verbindung-Anzahl insgesamt versendeter Bytes + - 'wan_total_bytes_received' # r/o num WAN Verbindung-Anzahl insgesamt empfangener Bytes + - 'wan_current_bytes_sent' # r/o num WAN Verbindung-Anzahl aktuelle Bitrate Senden + - 'wan_current_bytes_received' # r/o num WAN Verbindung-Anzahl aktuelle Bitrate Empfangen + - 'wan_link' # r/o bool WAN Link + # WLAN Config Attribute Hinweis: alle Attribute benötigen zusätzlich das Attribut 'avm_wlan_index' + - 'wlanconfig' # r/w bool WLAN An/Aus + - 'wlanconfig_ssid' # r/o str WLAN SSID + - 'wlan_guest_time_remaining' # r/o num Verbleibende Zeit, bis zum automatischen Abschalten des Gäste-WLAN + - 'wlan_associates' # r/o num Anzahl der verbundenen Geräte im jeweiligen WLAN + - 'wps_active' # r/w bool Schaltet WPS für das entsprechende WlAN an / aus + - 'wps_status' # r/o str WPS Status des entsprechenden WlAN + - 'wps_mode' # r/o str WPS Modus des entsprechenden WlAN + # WLAN Attribute + - 'wlan_total_associates' # r/o num Anzahl der verbundenen Geräte im WLAN + # Host Attribute Hinweis: alle Attribute benötigen zusätzlich das Attribut 'avm_mac' + - 'network_device' # r/o bool Verbindungsstatus // Defines Network device via MAC-Adresse + - 'device_ip' # r/o str Geräte-IP (Muss Child von 'network_device' sein) + - 'device_connection_type' # r/o str Verbindungstyp (Muss Child von 'network_device' sein) + - 'device_hostname' # r/o str Gerätename (Muss Child von 'network_device' sein) + - 'connection_status' # r/o bool Verbindungsstatus (Muss Child von 'network_device' sein) + # Hosts Attribute + - 'hosts_count' #r/o num Anzahl der Hosts + - 'hosts_info' #r/o dict Informationen über die Hosts + - 'mesh_topology' #r/o dict Topologie des Mesh + # Smarthome Attribute (Deprecated avm data types. Please use alternative AHA interface type) + - 'aha_device' # r/w bool Steckdose schalten; siehe "switch_state" + - 'hkr_device' # r/o str Status des HKR (OPEN; CLOSED; TEMP) + - 'set_temperature' # r/o num siehe "target_temperature" + - 'temperature' # r/o num siehe "current_temperature" + - 'set_temperature_reduced' # r/o num siehe "temperature_reduced" + - 'set_temperature_comfort' # r/o num siehe "temperature_comfort" + - 'firmware_version' # r/o str siehe "fw_version" + # Deflections Hinweis: alle Attribute benötigen zusätzlich das Attribut 'avm_deflection_index' + - 'number_of_deflections' # r/o num Anzahl der eingestellten Rufumleitungen + - 'deflection_details' # r/o dict Details zur Rufumleitung (als dict); Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item + - 'deflections_details' # r/o dict Details zu allen Rufumleitung (als dict) + - 'deflection_enable' # r/w bool Rufumleitung Status an/aus; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item + - 'deflection_type' # r/o str Type der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item + - 'deflection_number' # r/o str Telefonnummer, die umgeleitet wird; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item + - 'deflection_to_number' # r/o str Zielrufnummer der Umleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item + - 'deflection_mode' # r/o str Modus der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item + - 'deflection_outgoing' # r/o str Outgoing der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item + - 'deflection_phonebook_id' # r/o str Phonebook_ID der Zielrufnummer (Only valid if Type==fromPB); Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item + # AHA Interface attributes Hinweis: alle Attribute benötigen zusätzlich das Attribut 'avm_tam_ain' + - 'device_id' # r/o str Geräte -ID + - 'manufacturer' # r/o str Hersteller + - 'product_name' # r/o str Produktname + - 'fw_version' # r/o str Firmware Version + - 'connected' # r/o bool Verbindungsstatus + - 'device_name' # r/o str Gerätename + - 'tx_busy' # r/o bool Verbindung aktiv + - 'device_functions' # r/o list Im Gerät vorhandene Funktionen + + - 'set_target_temperature' # w/o num Soll-Temperatur Setzen + - 'target_temperature' # r/w num Soll-Temperatur (Status und Setzen) + - 'current_temperature' # r/o num Ist-Temperatur + - 'temperature_reduced' # r/o num Eingestellte reduzierte Temperatur + - 'temperature_comfort' # r/o num Eingestellte Komfort-Temperatur + - 'temperature_offset' # r/o num Eingestellter Temperatur-Offset + - 'set_window_open' # w/o bool "Window Open" Funktionen Setzen + - 'window_open' # r/w bool "Window Open" Funktion (Status und Setzen) + - 'windowopenactiveendtime' # r/o num Zeitliches Ende der "Window Open" Funktion + - 'set_hkr_boost' # w/o bool "Boost" Funktion Setzen + - 'hkr_boost' # r/w bool "Boost" Funktion (Status aund Setzen) + - 'boost_active' # r/o bool Status der "Boost" Funktion deprecated + - 'boostactiveendtime' # r/o num Zeitliches Ende der "Boost" Funktion + - 'summer_active' # r/o bool Status der "Sommer" Funktion + - 'holiday_active' # r/o bool Status der "Holiday" Funktion + - 'battery_low' # r/o bool "Battery low" Status + - 'battery_level' # r/o num Batterie-Status in % + - 'lock' # r/o bool Tastensperre über UI/API aktiv + - 'device_lock' # r/o bool Tastensperre direkt am Gerät ein + - 'errorcode' # r/o num Fehlercodes die der HKR liefert + + - 'set_simpleonoff' # w/o bool Gerät/Aktor/Lampe an-/ausschalten + - 'simpleonoff' # w/r bool Gerät/Aktor/Lampe (Status und Setzen) + + - 'set_level' # w/o num Level/Niveau von 0 bis 255 Setzen + - 'level' # w/r num Level/Niveau von 0 bis 255 (Setzen & Status) + - 'set_levelpercentage' # w/o num Level/Niveau von 0% bis 100% Setzen + - 'levelpercentage' # w/r num Level/Niveau von 0% bis 100% (Setzen & Status) + - + - 'set_hue' # w/o num Hue Setzen + - 'hue' # w/r num Hue (Status und Setzen) + - 'set_saturation' # w/o num Saturation Setzen + - 'saturation' # w/r num Saturation (Status und Setzen) + - 'set_colortemperature' # w/o num Farbtemperatur Setzen + - 'colortemperature' # w/r num Farbtemperatur (Status und Setzen) + - 'unmapped_hue' # w/r num Hue (Status und Setzen) + - 'unmapped_saturation' # w/r num Saturation (Status und Setzen) + - 'color_mode' # r/o num Aktueller Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode) + - 'supported_color_mode' # r/o num Unterstützer Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode) + - 'fullcolorsupport' # r/o bool Lampe unterstützt setunmappedcolor + - 'mapped' # r/o bool von den Colordefaults abweichend zugeordneter HueSaturation-Wert gesetzt + + - 'switch_state' # r/w bool Schaltzustand Steckdose (Status und Setzen) + - 'switch_mode' # r/o str Zeitschaltung oder manuell schalten + - 'switch_toggle' # w/o bool Schaltzustand umschalten (toggle) + + - 'power' # r/o num Leistung in W (Aktualisierung alle 2 min) + - 'energy' # r/o num absoluter Verbrauch seit Inbetriebnahme in Wh + - 'voltage' # r/o num Spannung in V (Aktualisierung alle 2 min) + + - 'humidity' # r/o num Relative Luftfeuchtigkeit in % (FD440) + + - 'alert_state' # r/o bool letzter übermittelter Alarmzustand + + - 'blind_mode' # r/o str automatische Zeitschaltung oder manuell fahren + - 'endpositionsset' # r/o bool ist die Endlage für das Rollo konfiguriert + + avm_incoming_allowed: + type: str + mandatory: False + description: + de: '(optional) Definition der erlaubten eingehenden Rufnummer in Items vom avm_data_type `monitor_trigger`.' + en: '(optional) Definition of the allowed incoming number. Only in items of avm_data_type `monitor_trigger`.' + + avm_target_number: + type: str + mandatory: False + description: + de: '(optional) Definition der erlaubten angerufenen Rufnummer in Items vom avm_data_type `monitor_trigger`.' + en: '(optional) Definition of the allowed called number. Only in items of avm_data_type `monitor_trigger`.' + + avm_wlan_index: + type: int + description: + de: '(optional) Definition des Wlans ueber index: (1: 2.4Ghz, 2: 5Ghz, 3: Gaeste).' + en: '(optional) Definition of WiFi via index: (1: 2.4GHz, 2: 5GHz, 3: Guest)' + valid_min: 1 + valid_max: 3 + + avm_mac: + type: mac + mandatory: False + description: + de: '(optional) Definition der MAC Adresse für Items vom avm_data_type `network_device`. Nur für diese Items mandatory!' + en: '(optional) Definition of the MAC address for items of avm_data_type `network_device`. Only mandatory for these items!' + + avm_ain: + type: str + mandatory: False + description: + de: "(optional) Definition der AktorIdentifikationsNummer (AIN) Items vom avm_data_types für `AHA-Interface`. Nur für diese Items mandatory!" + en: "(optional) Definition of the ActorIdentificationNumber (AIN) for items of avm_data_types `AHA-Interface`. Only mandatory for these items!" + + avm_tam_index: + type: int + mandatory: False + description: + de: '(optional) Index für den Anrufbeantworter, normalerweise für den ersten eine "1". Es werden bis zu 5 Anrufbeantworter vom Gerät unterstützt.' + en: '(optional) Index für the answering machine, normally a "1" for the first one. Supported are up to 5 answering machines.' + valid_min: 1 + valid_max: 5 + + avm_deflection_index: + type: int + mandatory: False + description: + de: '(optional) Index für die Rufumleitung, normalerweise für die erste eine "1".' + en: '(optional) Index deflection, normally a "1" for the first one.' + valid_min: 1 + valid_max: 32 + + avm_read_after_write: + type: int + description: + de: '(optional) Konfiguriert eine Verzögerung in Sekunden nachdem ein Lesekommando nach einem Schreibkommando gesendet wird.' + en: '(optional) Configures delay in seconds to issue a read command after write command' + + avm_data_cycle: + type: int + mandatory: False + description: + de: 'Poll-Zyklus des AVM Datentypes des jeweiligen Items. 0-Nur beim Initialisieren Lesen; 10+ - Zyklisch Lesen' + en: 'Poll cycle of AVM Data Type of the respective item. 0-Just read at init; 10+ - cyclic reading' + +item_structs: + info: + uptime: + type: num + visu_acl: ro + avm_data_type@instance: uptime + avm_data_cycle@instance: 600 + serial_number: + type: str + visu_acl: ro + avm_data_type@instance: serial_number + avm_data_cycle@instance: 0 + firmware: + type: str + visu_acl: ro + avm_data_type@instance: software_version + avm_data_cycle@instance: 3600 + hardware_version: + type: str + visu_acl: ro + avm_data_type@instance: hardware_version + avm_data_cycle@instance: 0 + myfritz: + type: bool + avm_data_type@instance: myfritz_status + + monitor: + trigger: + type: bool + avm_data_type@instance: monitor_trigger + avm_incoming_allowed@instance: xxxxxxxx + avm_target_number@instance: xxxxxxxx + enforce_updates: yes + incoming: + is_call_incoming: + type: bool + avm_data_type@instance: is_call_incoming + duration: + type: num + avm_data_type@instance: call_duration_incoming + last_caller: + type: str + avm_data_type@instance: last_caller_incoming + last_calling_number: + type: str + avm_data_type@instance: last_number_incoming + last_called_number: + type: str + avm_data_type@instance: last_called_number_incoming + last_call_date: + type: str + avm_data_type@instance: last_call_date_incoming + event: + type: str + avm_data_type@instance: call_event_incoming + outgoing: + is_call_outgoing: + type: bool + avm_data_type@instance: is_call_outgoing + duration: + type: num + avm_data_type@instance: call_duration_outgoing + last_caller: + type: str + avm_data_type@instance: last_caller_outgoing + last_calling_number: + type: str + avm_data_type@instance: last_number_outgoing + last_called_number: + type: str + avm_data_type@instance: last_called_number_outgoing + last_call_date: + type: str + avm_data_type@instance: last_call_date_outgoing + event: + type: str + avm_data_type@instance: call_event_outgoing + newest: + direction: + type: str + avm_data_type@instance: call_direction + cache: yes + event: + type: str + avm_data_type@instance: call_event + cache: yes + + tam: + type: bool + visu_acl: rw + avm_data_type@instance: tam + avm_tam_index@instance: 1 + avm_read_after_write@instance: 5 + + name: + type: str + visu_acl: ro + avm_data_type@instance: tam_name + message_number_old: + type: num + visu_acl: ro + avm_data_type@instance: tam_old_message_number + message_number_new: + type: num + visu_acl: ro + avm_data_type@instance: tam_new_message_number + message_number_total: + type: num + visu_acl: ro + avm_data_type@instance: tam_total_message_number + + deflection: + type: bool + visu_acl: rw + avm_data_type@instance: deflection_enable + avm_deflection_index@instance: 1 + avm_read_after_write@instance: 5 + + deflection_type: + type: str + visu_acl: ro + avm_data_type@instance: deflection_type + deflection_number: + type: str + visu_acl: ro + avm_data_type@instance: deflection_number + deflection_to_number: + type: str + visu_acl: ro + avm_data_type@instance: deflection_to_number + deflection_mode: + type: str + visu_acl: ro + avm_data_type@instance: deflection_mode + deflection_outgoing: + type: str + visu_acl: ro + avm_data_type@instance: deflection_outgoing + deflection_phonebook_id: + type: num + visu_acl: ro + avm_data_type@instance: deflection_phonebook_id + + wan: + connection_status: + type: str + visu_acl: ro + avm_data_type@instance: wan_connection_status + connection_error: + type: str + visu_acl: ro + avm_data_type@instance: wan_connection_error + is_connected: + type: bool + visu_acl: ro + avm_data_type@instance: wan_is_connected + uptime: + type: num + visu_acl: ro + avm_data_type@instance: wan_uptime + upstream: + type: num + visu_acl: ro + avm_data_type@instance: wan_upstream + downstream: + type: num + visu_acl: ro + avm_data_type@instance: wan_downstream + total_packets_sent: + type: num + visu_acl: ro + avm_data_type@instance: wan_total_packets_sent + total_packets_received: + type: num + visu_acl: ro + avm_data_type@instance: wan_total_packets_received + total_bytes_sent: + type: num + visu_acl: ro + avm_data_type@instance: wan_total_bytes_sent + total_bytes_received: + type: num + visu_acl: ro + avm_data_type@instance: wan_total_bytes_received + current_bytes_sent: + type: num + visu_acl: ro + avm_data_type@instance: wan_current_bytes_sent + current_bytes_receive: + type: num + visu_acl: ro + avm_data_type@instance: wan_current_bytes_received + link: + type: bool + visu_acl: ro + avm_data_type@instance: wan_link + reconnect: + type: bool + visu_acl: rw + enforce_updates: yes + + wlan: + wlan_1: + type: bool + visu_acl: rw + avm_data_type@instance: wlanconfig # 2,4ghz + avm_wlan_index@instance: 1 + avm_read_after_write@instance: 5 + wlan_1_ssid: + type: str + visu_acl: ro + avm_data_type@instance: wlanconfig_ssid + avm_wlan_index@instance: 1 + wlan_1_associates: + type: num + visu_acl: ro + avm_data_type@instance: wlan_associates + avm_wlan_index@instance: 1 + wlan_2: + type: bool + visu_acl: rw + avm_data_type@instance: wlanconfig # 5 GHz + avm_wlan_index@instance: 2 + avm_read_after_write@instance: 5 + wlan_2_ssid: + type: str + visu_acl: ro + avm_data_type@instance: wlanconfig_ssid + avm_wlan_index@instance: 2 + wlan_2_associates: + type: num + visu_acl: ro + avm_data_type@instance: wlan_associates + avm_wlan_index@instance: 2 + wlan_gast: + type: bool + visu_acl: rw + avm_data_type@instance: wlanconfig # Guest + avm_wlan_index@instance: 3 + avm_read_after_write@instance: 5 + wlan_gast_ssid: + type: str + visu_acl: ro + avm_data_type@instance: wlanconfig_ssid + avm_wlan_index@instance: 3 + wlan_gast_associates: + type: num + visu_acl: ro + avm_data_type@instance: wlan_associates + avm_wlan_index@instance: 3 + wlan_gast_tr: + type: num + visu_acl: rw + avm_data_type@instance: wlan_guest_time_remaining + avm_wlan_index@instance: 3 + + device: + avm_data_type@instance: network_device + type: bool + visu_acl: ro + ip: + type: str + avm_data_type@instance: device_ip + visu_acl: ro + connection_type: + type: str + avm_data_type@instance: device_connection_type + visu_acl: ro + hostname: + type: str + avm_data_type@instance: device_hostname + visu_acl: ro + + aha_general: + name: + type: str + avm_data_type@instance: device_name + avm_data_cycle@instance: 0 + identifier: + type: str + avm_data_type@instance: device_id + avm_data_cycle@instance: 0 + productname: + type: str + avm_data_type@instance: product_name + avm_data_cycle@instance: 0 + manufacturer: + type: str + avm_data_type@instance: manufacturer + avm_data_cycle@instance: 0 + firmware_version: + type: str + avm_data_type@instance: fw_version + avm_data_cycle@instance: 0 + present: + type: bool + avm_data_type@instance: connected + avm_data_cycle@instance: 0 + functions: + type: list + avm_data_type@instance: device_functions + avm_data_cycle@instance: 0 + + aha_thermostat: + target_temperature: + avm_data_type@instance: target_temperature + avm_read_after_write@instance: 5 + type: num + comfort_temperature: + avm_data_type@instance: temperature_comfort + type: num + eco_temperature: + avm_data_type@instance: temperature_reduced + type: num + battery_low: + avm_data_type@instance: battery_low + type: bool + battery_level: + avm_data_type@instance: battery_level + type: num + window_open: + avm_data_type@instance: window_open + type: bool + windowopenactiveendtime: + avm_data_type@instance: windowopenactiveendtime + type: num + summer_active: + avm_data_type@instance: summer_active + type: bool + holiday_active: + avm_data_type@instance: holiday_active + type: bool + errorcode: + avm_data_type@instance: errorcode + type: num + hkr_boost: + avm_data_type@instance: hkr_boost + type: bool + boostactiveendtime: + avm_data_type@instance: boostactiveendtime + type: num + lock: + avm_data_type@instance: lock + type: bool + device_lock: + avm_data_type@instance: device_lock + type: bool + + aha_temperature_sensor: + current_temperature: + avm_data_type@instance: current_temperature + type: num + temperature_offset: + avm_data_type@instance: temperature_offset + type: num + + aha_humidity_sensor: + humidity: + avm_data_type@instance: humidity + type: num + + aha_alert: + state: + avm_data_type@instance: alert + type: bool + + aha_switch: + switch_state: + avm_data_type@instance: switch_state + type: bool + switch_mode: + avm_data_type@instance: switch_mode + type: str + switch_toggle: + avm_data_type@instance: switch_toggle + type: bool + enforce_updates: yes + + aha_powermeter: + power: + avm_data_type@instance: power + type: num + energy: + avm_data_type@instance: energy + type: num + voltage: + avm_data_type@instance: voltage + type: num + + aha_level: + level: + avm_data_type@instance: level + type: num + level_percentage: + avm_data_type@instance: levelpercentage + type: num + + aha_blind: + blind_mode: + avm_data_type@instance: blind_mode + type: str + endpositionsset: + avm_data_type@instance: endpositionsset + type: bool + + aha_on_off: + on_off: + avm_data_type@instance: simpleonoff + type: bool + + aha_button: + battery_low: + avm_data_type@instance: battery_low + type: bool + battery_level: + avm_data_type@instance: battery_level + type: num + + aha_color: + color_mode: + avm_data_type@instance: color_mode + type: num + supported_color_mode: + avm_data_type@instance: supported_color_mode + type: num + fullcolorsupport: + avm_data_type@instance: fullcolorsupport + type: bool + mapped: + avm_data_type@instance: mapped + type: bool + hue: + avm_data_type@instance: hue + type: num + saturation: + avm_data_type@instance: saturation + type: num + unmapped_hue: + avm_data_type@instance: unmapped_hue + type: bool + unmapped_saturation: + avm_data_type@instance: unmapped_saturation + type: bool + colortemperature: + avm_data_type@instance: colortemperature + type: num + +#item_attribute_prefixes: + # Definition of item attributes that only have a common prefix (enter 'item_attribute_prefixes: NONE' or ommit this section, if section should be empty) + # NOTE: This section should only be used, if really necessary (e.g. for the stateengine plugin) + +plugin_functions: + + cancel_call: + type: void + description: + de: "Beendet einen aktiven Anruf." + en: "Cancels an active call." + parameters: + # This function has no parameters + + get_call_origin: + type: str + description: + de: "Gib den Namen des Telefons zurück, das aktuell als 'call origin' gesetzt ist." + en: "Gets the phone name, currently set as 'call origin'." + parameters: + # This function has no parameters + + get_calllist: + type: list(dict(str)) + description: + de: "Ermittelt ein Array mit dicts aller Einträge der Anrufliste (Attribute 'Id', 'Type', 'Caller', 'Called', 'CalledNumber', 'Name', 'Numbertype', 'Device', 'Port', 'Date',' Duration' (einige optional))." + en: "Returns an array of dicts with all calllist entries (attributes 'Id', 'Type', 'Caller', 'Called', 'CalledNumber', 'Name', 'Numbertype', 'Device', 'Port', 'Date', 'Duration' (some optional))." + parameters: + filter_incoming: + type: str + default: '' + description: + de: "Filter, um nur die Anrufe zu erhalten, die zu einer bestimmten angerufenen Nummer gehören." + en: "Filter to filter calls to a specific destination phone number." + phonebook_id: + type: int + default: 0 + description: + de: "ID des Telefonbuchs, in dem nachgeschlagen werden soll." + en: "ID of the phone book, in which numbers should be looked up." + + get_contact_name_by_phone_number: + type: str + description: + de: "Durchsucht das Telefonbuch mit einer (vollständigen) Telefonnummer nach Kontakten. Falls kein Name gefunden wird, wird die Telefonnummer zurückgeliefert." + en: "Searches the phonebook for a contact by a given (complete) phone number. In case no name is found, the phone number is returned." + parameters: + phone_number: + type: str + description: + de: "Vollständige Telefonnummer" + en: "Complete phone number" + phonebook_id: + type: int + default: 0 + description: + de: "ID des Telefonbuchs, in dem nachgeschlagen werden soll." + en: "ID of the phone book, in which numbers should be looked up." + + get_device_log_from_lua: + type: list(list(str)) + description: + de: "Ermittelt die Logeinträge auf dem Gerät über die LUA Schnittstelle /query.lua?mq_log=logger:status/log." + en: "Gets the log entries on the device via the LUA interface /query.lua?mq_log=logger:status/log." + parameters: + # This function has no parameters + + get_device_log_from_tr064: + type: list(str) + description: + de: "Ermittelt die Logeinträge auf dem Gerät über die TR-064 Schnittstelle." + en: "Gets the log entries on the device via the TR-064 interface." + parameters: + # This function has no parameters + + get_host_details: + type: dict(str) + description: + de: "Ermittelt die Informationen zu einem Host an einem angegebenen Index." + en: "Gets the information of a hosts at a specific index." + parameters: + index: + type: int + description: + de: "Index" + en: "Index" + + get_hosts: + type: list(dict(str)) + description: + de: "Ermittelt ein Array mit den Namen aller verbundener Hosts." + en: "Gets the name of all connected hosts as an array." + parameters: + only_active: + type: bool + description: + de: "True, wenn nur aktuell aktive Hosts zurückgegeben werden sollen." + en: "True, if only active hosts shall be returned." + + get_phone_name: + type: str + description: + de: "Gibt den Namen eines Telefons an einem Index zurück. Der zurückgegebene Wert kann in 'set_call_origin' verwendet werden." + en: "Get the phone name at a specific index. The returend value can be used as phone_name for set_call_origin." + parameters: + index: + type: int + description: + de: "Index" + en: "Index" + + get_phone_numbers_by_name: + type: dict(dict(str)) + description: + de: "Durchsucht das Telefonbuch mit einem Namen nach nach Kontakten und liefert die zugehörigen Telefonnummern." + en: "Searches the phonebook for a contact by a given name and returns the corresponding phone numbers." + parameters: + name: + type: str + description: + de: "Anteiliger oder vollständiger Name des Kontakts." + en: "Partial or full name of the contact." + phonebook_id: + type: int + default: 0 + description: + de: "ID des Telefonbuchs, in dem nachgeschlagen werden soll." + en: "ID of the phone book, in which numbers should be looked up." + + is_host_active: + type: bool + description: + de: "Prüft, ob eine MAC Adresse auf dem Gerät aktiv ist. Das kann bspw. für die Umsetzung einer Präsenzerkennung genutzt werden." + en: "Checks if a MAC address is active on the FritzDevice, e.g. the status can be used for simple presence detection." + parameters: + mac_address: + type: mac + description: + de: "MAC Adresse" + en: "MAC address" + + reboot: + type: void + description: + de: "Startet das Gerät neu." + en: "Reboots the device." + + reconnect: + type: void + description: + de: "Verbindet das Gerät neu mit dem WAN (Wide Area Network)." + en: "Reconnects the device to the WAN (Wide Area Network)." + + set_call_origin: + type: void + description: + de: "Setzt den 'call origin', bspw. vor dem Aufruf von 'start_call'." + en: "Sets the 'call origin', e.g. before running 'start_call'." + parameters: + phone_name: + type: mac + description: + de: "Identifikator des Telefons, dass als 'call origin' gesetzt werden soll. bspw. zwei Sterne gefolgt von '610' für ein internes Gerät." + en: "Full phone identifier, could be e.g. two asterisk followed by '610' for an internal device." + + start_call: + type: void + description: + de: "Startet einen Anruf an eine übergebene Telefonnummer (intern oder extern)." + en: "Starts a call for a given phone number (internal or external)." + parameters: + phone_number: + type: str + description: + de: "Vollständige Telefonnummer, die angerufen werden soll." + en: "Full phone number to call" + + wol: + type: void + description: + de: "Sendet einen WOL (WakeOnLAN) Befehl an eine MAC Adresse." + en: "Sends a WOL (WakeOnLAN) command to a MAC address." + parameters: + mac_address: + type: mac + description: + de: "MAC Adresse" + en: "MAC address" + + get_number_of_deflections: + type: bool + description: + de: "Liefert die Anzahl der Rufumleitungen zurück." + en: "Returns Number of set deflections." + parameters: + # This function has no parameters + + get_deflection: + type: bool + description: + de: "Liefert die Details der Rufumleitung der angegebenen ID zurück (Default-ID = 0)" + en: "Returns details of deflection with given deflection_id (default id = 0)" + parameters: + deflection_id: + type: int + description: + de: "Identifikator der abzufragenden Rufumleitung." + en: "Identifier of deflection." + + get_deflections: + type: bool + description: + de: "Liefert die Details aller Rufumleitungen zurück." + en: "Returns details of all deflections." + parameters: + # This function has no parameters + + set_deflection_enable: + type: bool + description: + de: "Schaltet die Rufumleitung mit angegebener ID an oder aus." + en: "Enables or disables deflection with given ID." + parameters: + deflection_id: + type: int + description: + de: "Identifikator der abzufragenden Rufumleitung." + en: "identifier of deflection." + enable: + type: bool + description: + de: "An / Aus" + en: "Enable / Disable" + + get_mesh_topology: + type: dict + description: + de: "Liefert die Mesh-Topologie als Dictionary" + en: "Lists mesh topology as dict" + parameters: + # This function has no parameters + + get_hosts_dict: + type: dict + description: + de: "Liefert Informationen aller Hosts als Dictionary" + en: "Lists information of all hosts as dict" + parameters: + # This function has no parameters + +logic_parameters: NONE + # Definition of logic parameters defined by this plugin (enter 'logic_parameters: NONE', if section should be empty) From c4a9c1f5460ebdc3554ab76f596a12b01fbd89ca Mon Sep 17 00:00:00 2001 From: AndreK01 Date: Sun, 7 May 2023 15:14:48 +0200 Subject: [PATCH 093/775] update 4.0.1 - Single-Key-Id-login --- indego4shng/README.md | 31 +- indego4shng/__init__.py | 1132 ++++++++++++++---------- indego4shng/plugin.yaml | 2 +- indego4shng/requirements.txt | 2 + indego4shng/webif/templates/index.html | 2 +- 5 files changed, 666 insertions(+), 503 deletions(-) create mode 100755 indego4shng/requirements.txt mode change 100755 => 100644 indego4shng/webif/templates/index.html diff --git a/indego4shng/README.md b/indego4shng/README.md index 005e693c4..ac6bc8b95 100755 --- a/indego4shng/README.md +++ b/indego4shng/README.md @@ -6,19 +6,19 @@ 2. [Credits](#credits) 3. [Change Log](#changelog) **Neu** 4. [Konfiguration](#konfiguration) **Update** -5. [Web-Interface](#webinterface) **Update** +5. [Web-Interface](#webinterface) 6. [Logik-Trigger](#logiktrigger) 7. [öffentlich Funktionen (API)](#api) 8. [Gartenkarte "pimpen"](#gardenmap) 9. [Nutzung der Original Bosch-Mäher-Symbole](#boschpics) -10. [Die Bosch-Api 3.0 - behind the scenes](#boschapi) +10. [Die Bosch-Api 4.0.1 - behind the scenes](#boschapi) ## Generell Das Indego-Plugin wurde durch ein Reverse-Engineering der aktuellen (Version 3.0) App von Bosch entwickelt. Als Basis diente das ursprüngliche Plugin von Marcov. Es werden alle Funktionen der App für den Betrieb sowie einige zusätzliche bereitgestellt. Für die Ersteinrichtung wird weiterhin die Bosch-App benötigt. -Das Plugin erhält die Version der aktuellen Bosch-API. (3.0) +Das Plugin erhält die Version der aktuellen Bosch-API. (4.0.1) ## Credits @@ -33,6 +33,12 @@ Vielen Dank an Jan Odvarko für die Entwicklung des Color-Pickers (http://jscolo ## Change Log +#### 2023-05-06 V4.0.1 +- Login via Single-Key-ID eingebaut +- Endpoit der Bosch-API wurde geändert (siehe Konfiguration) + +#### 2023-03-08 V4.0.0 +- Login via Bosch-ID eingebaut #### 2023-02-05 V3.0.2 - Anpassungen für die geänderten Daten für das Wetter (es werden nun 7 Tage statt 5 übermittelt, die Sonnenstunden je Tag wurden entfern) @@ -120,6 +126,7 @@ zum "pimpen" der Gartenkarte verwenden * `indego_credentials : XXXXXXX`: sind die Zugangsdaten für den Bosch-Server im Format base64 encoded. * `parent_item : indego`: name des übergeordneten items für alle Child-Items * `cycle : 30`: Intervall in Sekunden für das Abrufen des Mäher-Status (default = 30 Sekunden) +* `url: https://api.indego-cloud.iot.bosch-si.com/api/v1/` : Url des Bosch-Endpoints Die Zugangsdaten (indego_credentials) können nach dem Erststart des Plugins im Web-Interface erfasst und gespeichert werden @@ -136,7 +143,7 @@ Indego4shNG: indego_credentials: parent_item: indego cycle: '30' - url: https://api.indego.iot.bosch-si.com/api/v1/ + url: https://api.indego-cloud.iot.bosch-si.com/api/v1/ ``` @@ -361,18 +368,22 @@ Sobald die Dateien mit den Bildern vorhanden sind findet das Widget diese und ve Die entsprechenden Bilder für die "Großen"/"Kleinen" werden auf Grund des Mähertyps automatisch gewählt und dargestellt. -## Die Bosch-Api 3.0 - behind the scenes +## Die Bosch-Api 4.0.1 - behind the scenes Hier ist die Schnittstelle der Bosch-API kurz beschrieben und die Implementierung im Plugin dokumentiert. Der Header ist in den meisten Fällen mit der Session-ID zu füllen : ``` -headers = { - 'x-im-context-id' : SESSION-ID - } +headers = {'accept' : '*/*', + 'authorization' : 'Bearer '+ self._bearer, + 'connection' : 'Keep-Alive', + 'host' : 'api.indego-cloud.iot.bosch-si.com', + 'user-agent' : 'Indego-Connect_4.0.0.12253', + 'content-type' : 'application/json' + } ``` -@Get - steht für einen get-request in Python. Die URL lautet : "https://api.indego.iot.bosch-si.com/api/v1/" gefolgt vom entsprechenden Zugriffspunkt +@Get - steht für einen get-request in Python. Die URL lautet : "https://api.indego-cloud.iot.bosch-si.com/api/v1/" gefolgt vom entsprechenden Zugriffspunkt ``` -url = "https://api.indego.iot.bosch-si.com/api/v1/" +"alms/{}/automaticUpdate".format(alm_sn) +url = "https://api.indego-cloud.iot.bosch-si.com/api/v1/" +"alms/{}/automaticUpdate".format(alm_sn) response = requests.get(url, headers=headers) ``` diff --git a/indego4shng/__init__.py b/indego4shng/__init__.py index 9888b1ebf..42d2e715e 100755 --- a/indego4shng/__init__.py +++ b/indego4shng/__init__.py @@ -44,9 +44,13 @@ from datetime import date import base64 +import urllib.parse +#sys.path.append('/home/smarthome/.p2/pool/plugins/org.python.pydev.core_6.5.0.201809011628/pysrc') +sys.path.append('/devtools/eclipse/plugins/org.python.pydev.core_8.0.0.202009061309/pysrc/') +import pydevd # If a package is needed, which might be not installed in the Python environment, @@ -64,7 +68,7 @@ class Indego4shNG(SmartPlugin): Main class of the Indego Plugin. Does all plugin specific stuff and provides the update functions for the items """ - PLUGIN_VERSION = '4.0.0' + PLUGIN_VERSION = '4.0.1' def __init__(self, sh, *args, **kwargs): """ @@ -166,15 +170,16 @@ def run(self): self.password = self.credentials.split(":")[1] # taken from Init of the plugin if (self.user != '' and self.password != ''): - # self._auth() deprecated - self.logged_in = self._login2Bosch() + self.login_pending = True + self.logged_in, self._bearer, self._refresh_token, self.token_expires,self.alm_sn = self._login_single_key_id(self.user, self.password) + self.login_pending = False + self.context_id = self._bearer[:10]+ '.......' # start the refresh timers self.scheduler_add('operating_data',self._get_operating_data,cycle = 300) self.scheduler_add('get_state', self._get_state, cycle = self.cycle) self.scheduler_add('alert', self.alert, cycle=300) self.scheduler_add('get_all_calendars', self._get_all_calendars, cycle=300) - #self.scheduler_add('check_login_state', self._check_login_state, cycle=130) self.scheduler_add('refresh_token', self._getrefreshToken, cycle=self.token_expires-100) self.scheduler_add('device_data', self._device_data, cycle=120) self.scheduler_add('get_weather', self._get_weather, cycle=600) @@ -199,7 +204,6 @@ def stop(self): self.scheduler_remove('get_weather') self.scheduler_remove('get_next_time') - self._delete_auth() # Log off self.logger.debug("Stop method called") self.alive = False @@ -237,6 +241,7 @@ def parse_item(self, item): return self.update_item if self.has_iattr(item.conf, 'indego_parse_2_attr'): + #pydevd.settrace("192.168.178.37", port=5678) _attr_name = item.conf['indego_attr_name'] newStruct = {} myStruct= json.loads(item()) @@ -271,6 +276,7 @@ def update_item(self, item, caller=None, source=None, dest=None): :param source: if given it represents the source :param dest: if given it represents the dest """ + #pydevd.settrace("192.168.178.37", port=5678) # Function when item is triggered by VISU if caller != self.get_shortname() and caller != 'Autotimer' and caller != 'Logic': @@ -323,6 +329,7 @@ def update_item(self, item, caller=None, source=None, dest=None): self.logger.warning("Error sending command for item '{}' from caller '{}', source '{}' and dest '{}'".format(item,caller,source,dest)) if self.has_iattr(item.conf, 'indego_function_4_all'): + #pydevd.settrace("192.168.178.37", port=5678) try: self.logger.debug("Item '{}' has attribute '{}' found with {}".format( item, 'indego_plugin_function', self.get_iattr_value(item.conf, 'indego_function_4_all'))) myFunction_Name = self.get_iattr_value(item.conf, 'indego_function_4_all') @@ -379,6 +386,7 @@ def _handle_wartung(self, item): self._set_automatic_updates() if item.property.name == self.parent_item+".wartung.messer_zaehler": + #pydevd.settrace("192.168.178.37", port=5678) if (item.property.value == True): if (self._reset_bladeCounter() == True): item(False) @@ -415,6 +423,7 @@ def _handle_parse_map(self, item): def _handle_calendar_list(self, item): if item.property.name == self.parent_item+'.calendar_list': + #pydevd.settrace("192.168.178.37", port=5678) myList = item() myCal = self._get_childitem('calendar') myNewCal = self._parse_list_2_cal(myList, myCal,'MOW') @@ -607,7 +616,7 @@ def _set_clear_message(self): myClearMsg = self._get_childitem('visu.alerts') for message in msg2clear: - myResult = self._delete_url(self.indego_url +'alerts/{}'.format(message), self.context_id, 10,auth=(self.user,self.password)) + myResult = self._delete_url(self.indego_url +'alerts/{}'.format(message), self.context_id, 10,None) self._del_message_in_dict(myClearMsg, message) self._set_childitem('visu.alerts', myClearMsg) @@ -625,11 +634,8 @@ def _check_login_state(self): if self.expiration_timestamp < actTimeStamp+575: self.logged_in = False self.login_pending = True - self._delete_auth() self.context_id = '' - self._auth() self.login_pending = False - self.logged_in = self._check_auth() self._set_childitem('online', self.logged_in) actDate = datetime.now() self.logger.info("refreshed Session-ID at : {}".format(actDate.strftime('Date: %a, %d %b %H:%M:%S %Z %Y'))) @@ -687,6 +693,7 @@ def _auto_pred_cal_update(self): def _auto_mow_cal_update(self): self.cal_update_count += 1 self.cal_update_running = True + #pydevd.settrace("192.168.178.37", port=5678) # set actual Calendar in Calendar-structure myCal = self._get_childitem('calendar') actCalendar = self._get_childitem('calendar_sel_cal') @@ -789,6 +796,7 @@ def _get_all_calendars(self): 'days' : schedule['schedule_days'] }] } + #pydevd.settrace("192.168.178.37", port=5678) my_pred_list = self._parse_cal_2_list(my_pred_cal, None) my_smMow_list = self._parse_cal_2_list(my_smMow_cal, None) @@ -808,6 +816,7 @@ def _log_communication(self, type, url, result): self._set_childitem('webif.communication_protocoll', myLog) def _fetch_url(self, url, username=None, password=None, timeout=10, body=None): + #pydevd.settrace("192.168.178.37", port=5678) try: myResult, response = self._post_url(url, self.context_id, body, timeout,auth=(username,password),nowait = True) except Exception as e: @@ -836,15 +845,16 @@ def _delete_url(self, url, contextid=None, timeout=40, auth=None,nowait = True): myCouner += 1 time.sleep(2) - headers = {'accept-encoding' : 'gzip', - 'authorization' : 'Bearer '+ self._bearer, - 'connection' : 'Keep-Alive', - 'host' : 'api.indego-cloud.iot.bosch-si.com', - 'user-agent' : 'Indego-Connect_4.0.0.12253' + headers = {'accept' : '*/*', + 'authorization' : 'Bearer '+ self._bearer, + 'connection' : 'Keep-Alive', + 'host' : 'api.indego-cloud.iot.bosch-si.com', + 'user-agent' : 'Indego-Connect_4.0.0.12253', + 'content-type' : 'application/json' } response = False try: - response = requests.delete(url, headers=headers, auth=auth) + response = requests.delete(url, headers=headers) self._log_communication('delete', url, response.status_code) except Exception as e: self.logger.warning("Problem deleting {}: {}".format(url, e)) @@ -993,30 +1003,6 @@ def _check_state_4_protocoll(self): self.position_detection = False - def _delete_auth(self): - ''' - DELETE https://api.indego.iot.bosch-si.com/api/v1/authenticate - x-im-context-id: {contextId} - ''' - headers = {'Content-Type': 'application/json', - 'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', - 'x-im-context-id' : self.context_id - } - url = self.indego_url + 'authenticate' - try: - response = self._delete_url(url, self.context_id, 10,auth=None, nowait = True) - except Exception as e: - self.logger.warning("Problem logging off {0}: {1}".format(url, e)) - return False - if response == False: - return False - - if (response.status_code == 200 or response.status_code == 201): - self.logger.info("You logged off successfully") - return True - else: - self.logger.info("Log off was not successfull : {0}".format(response.status_code)) - return False def _store_calendar(self, myCal = None, myName = ""): ''' @@ -1038,54 +1024,7 @@ def _store_calendar(self, myCal = None, myName = ""): return response.status_code - - - def _check_auth(self): - ''' - GET https://api.indego.iot.bosch-si.com/api/v1/authenticate/check - Authorization: Basic bWF4Lm11c3RlckBhbnl3aGVyZS5jb206c3VwZXJzZWNyZXQ= - x-im-context-id: {contextId} - ''' - headers = {'Content-Type': 'application/json', - 'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', - 'x-im-context-id' : self.context_id - } - url = self.indego_url + 'authenticate/check' - - try: - response = self._get_url(url, self.context_id, 10, auth=(self.user,self.password)) - #response = requests.get(url,auth=(self.user,self.password), headers=headers) - - - except Exception as e: - self.logger.warning("Problem checking Authentication {0}: {1}".format(url, e)) - return False - if response != False: - self.logger.info("Your are still logged in to the Bosch-Web-API") - return True - else: - self.logger.info("Your are not logged in to the Bosch-Web-API") - return False - - - - def _auth(self): - url = self.indego_url + 'authenticate' - auth_response,expiration_timestamp = self._fetch_url(url, self.user, self.password, 10,{"device":"", "os_type":"Android", "os_version":"4.0", "dvc_manuf":"unknown", "dvc_type":"unknown", "accept_tc_id": "202012"}) - if auth_response == False: - self.logger.error('AUTHENTICATION INDEGO FAILED! Plugin not working now.') - else: - self.last_login_timestamp = datetime.timestamp(datetime.now()) - self.expiration_timestamp = expiration_timestamp - self.logger.debug("String Auth: " + str(auth_response)) - self.context_id = auth_response['contextId'] - self.logger.info("context ID received :{}".format(self.context_id)) - self.user_id = auth_response['userId'] - self.logger.info("User ID received :{}".format(self.user_id)) - self.alm_sn = auth_response['alm_sn'] - self.logger.info("Serial received : {}".format(self.alm_sn)) - self._log_communication('Auth ', 'Expiration time {}'.format(expiration_timestamp), str(auth_response)) def _getrefreshToken(self): myUrl = 'https://prodindego.b2clogin.com/prodindego.onmicrosoft.com/b2c_1a_signup_signin/oauth2/v2.0/token' @@ -1107,426 +1046,622 @@ def _getrefreshToken(self): myJson = json.loads (response.content.decode()) self._refresh_token = myJson['refresh_token'] self._bearer = myJson['access_token'] + self.context_id = self._bearer[:10]+ '.......' self.token_expires = myJson['expires_in'] self.last_login_timestamp = datetime.timestamp(datetime.now()) self.expiration_timestamp = self.last_login_timestamp + self.token_expires - def _login2Bosch(self): - # Standardvalues - self.login_pending = True - code_challenge = 'iGz3HXMCebCh65NomBE5BbfSTBWE40xLew2JeSrDrF4' - code_verifier = '9aOBN3dvc634eBaj7F8iUnppHeqgUTwG7_3sxYMfpcjlIt7Uuv2n2tQlMLhsd0geWMNZPoryk_bGPmeZKjzbwA' - nonce = 'LtRKgCy_l1abdbKPuf5vhA' - myClientID = '65bb8c9d-1070-4fb4-aa95-853618acc876' # that the Client-ID for the Bosch-App - - myPerfPayload ={ - "navigation": { - "type": 0, - "redirectCount": 0 - }, - "timing": { - "connectStart": 1678187315976, - "navigationStart": 1678187315876, - "loadEventEnd": 1678187317001, - "domLoading": 1678187316710, - "secureConnectionStart": 1678187315994, - "fetchStart": 1678187315958, - "domContentLoadedEventStart": 1678187316973, - "responseStart": 1678187316262, - "responseEnd": 1678187316322, - "domInteractive": 1678187316973, - "domainLookupEnd": 1678187315958, - "redirectStart": 0, - "requestStart": 1678187316010, - "unloadEventEnd": 0, - "unloadEventStart": 0, - "domComplete": 1678187317001, - "domainLookupStart": 1678187315958, - "loadEventStart": 1678187317001, - "domContentLoadedEventEnd": 1678187316977, - "redirectEnd": 0, - "connectEnd": 1678187316002 - }, - "entries": [ - { - "name": "https://prodindego.b2clogin.com/prodindego.onmicrosoft.com/b2c_1a_signup_signin/oauth2/v2.0/authorize?redirect_uri=com.bosch.indegoconnect%3A%2F%2Flogin&client_id=65bb8c9d-1070-4fb4-aa95-853618acc876&response_type=code&state=j1A8L2zQMbolEja6yqbj4w&nonce=LtRKgCy_l1abdbKPuf5vhA&scope=openid%20profile%20email%20offline_access%20https%3A%2F%2Fprodindego.onmicrosoft.com%2Findego-mobile-api%2FIndego.Mower.User&code_challenge={}&code_challenge_method=S256".format(code_challenge), - "entryType": "navigation", - "startTime": 0, - "duration": 1125.3999999999849, - "initiatorType": "navigation", - "nextHopProtocol": "http/1.1", - "workerStart": 0, - "redirectStart": 0, - "redirectEnd": 0, - "fetchStart": 82.29999999997517, - "domainLookupStart": 82.29999999997517, - "domainLookupEnd": 82.29999999997517, - "connectStart": 99.99999999999432, - "connectEnd": 126.29999999998631, - "secureConnectionStart": 117.4999999999784, - "requestStart": 133.7999999999795, - "responseStart": 385.5999999999824, - "responseEnd": 445.699999999988, - "transferSize": 66955, - "encodedBodySize": 64581, - "decodedBodySize": 155950, - "serverTiming": [], - "workerTiming": [], - "unloadEventStart": 0, - "unloadEventEnd": 0, - "domInteractive": 1097.29999999999, - "domContentLoadedEventStart": 1097.29999999999, - "domContentLoadedEventEnd": 1100.999999999999, - "domComplete": 1125.2999999999815, - "loadEventStart": 1125.3999999999849, - "loadEventEnd": 1125.3999999999849, - "type": "navigate", - "redirectCount": 0 - }, - { - "name": "https://swsasharedprodb2c.blob.core.windows.net/b2c-templates/bosch/unified.html", - "entryType": "resource", - "startTime": 1038.0999999999858, - "duration": 21.600000000006503, - "initiatorType": "xmlhttprequest", - "nextHopProtocol": "", - "workerStart": 0, - "redirectStart": 0, - "redirectEnd": 0, - "fetchStart": 1038.0999999999858, - "domainLookupStart": 0, - "domainLookupEnd": 0, - "connectStart": 0, - "connectEnd": 0, - "secureConnectionStart": 0, - "requestStart": 0, - "responseStart": 0, - "responseEnd": 1059.6999999999923, - "transferSize": 0, - "encodedBodySize": 0, - "decodedBodySize": 0, - "serverTiming": [], - "workerTiming": [] - }, - { - "name": "https://swsasharedprodb2c.blob.core.windows.net/b2c-templates/bosch/bosch-header.png", - "entryType": "resource", - "startTime": 1312.7999999999815, - "duration": 7.900000000006457, - "initiatorType": "css", - "nextHopProtocol": "", - "workerStart": 0, - "redirectStart": 0, - "redirectEnd": 0, - "fetchStart": 1312.7999999999815, - "domainLookupStart": 0, - "domainLookupEnd": 0, - "connectStart": 0, - "connectEnd": 0, - "secureConnectionStart": 0, - "requestStart": 0, - "responseStart": 0, - "responseEnd": 1320.699999999988, - "transferSize": 0, - "encodedBodySize": 0, - "decodedBodySize": 0, - "serverTiming": [], - "workerTiming": [] - } - ], - "connection": { - "onchange": None, - "effectiveType": "4g", - "rtt": 150, - "downlink": 1.6, - "saveData": False, - "downlinkMax": None, - "type": "unknown", - "ontypechange": None - } - } - - myReqPayload = { - "pageViewId":'', - "pageId":"CombinedSigninAndSignup", - "trace":[ - { - "ac":"T005", - "acST":1678187316, - "acD":7 - }, - { - "ac":"T021 - URL:https://swsasharedprodb2c.blob.core.windows.net/b2c-templates/bosch/unified.html", - "acST":1678187316, - "acD":119 - }, - { - "ac":"T019", - "acST":1678187317, - "acD":44 - }, - { - "ac":"T004", - "acST":1678187317, - "acD":19 - }, - { - "ac":"T003", - "acST":1678187317, - "acD":5 - }, - { - "ac":"T035", - "acST":1678187317, - "acD":0 + + def _login_single_key_id(self,user, pwd): + try: + # Standardvalues + code_challenge = 'iGz3HXMCebCh65NomBE5BbfSTBWE40xLew2JeSrDrF4' + code_verifier = '9aOBN3dvc634eBaj7F8iUnppHeqgUTwG7_3sxYMfpcjlIt7Uuv2n2tQlMLhsd0geWMNZPoryk_bGPmeZKjzbwA' + nonce = 'LtRKgCy_l1abdbKPuf5vhA' + myClientID = '65bb8c9d-1070-4fb4-aa95-853618acc876' # das ist die echte Client-ID + step = 0 + + myPerfPayload ={ + "navigation": { + "type": 0, + "redirectCount": 0 }, - { - "ac":"T030Online", - "acST":1678187317, - "acD":0 + "timing": { + "connectStart": 1678187315976, + "navigationStart": 1678187315876, + "loadEventEnd": 1678187317001, + "domLoading": 1678187316710, + "secureConnectionStart": 1678187315994, + "fetchStart": 1678187315958, + "domContentLoadedEventStart": 1678187316973, + "responseStart": 1678187316262, + "responseEnd": 1678187316322, + "domInteractive": 1678187316973, + "domainLookupEnd": 1678187315958, + "redirectStart": 0, + "requestStart": 1678187316010, + "unloadEventEnd": 0, + "unloadEventStart": 0, + "domComplete": 1678187317001, + "domainLookupStart": 1678187315958, + "loadEventStart": 1678187317001, + "domContentLoadedEventEnd": 1678187316977, + "redirectEnd": 0, + "connectEnd": 1678187316002 }, - { - "ac":"T002", - "acST":1678187328, - "acD":0 + "entries": [ + { + "name": "https://prodindego.b2clogin.com/prodindego.onmicrosoft.com/b2c_1a_signup_signin/oauth2/v2.0/authorize?redirect_uri=com.bosch.indegoconnect%3A%2F%2Flogin&client_id=65bb8c9d-1070-4fb4-aa95-853618acc876&response_type=code&state=j1A8L2zQMbolEja6yqbj4w&nonce={}&scope=openid%20profile%20email%20offline_access%20https%3A%2F%2Fprodindego.onmicrosoft.com%2Findego-mobile-api%2FIndego.Mower.User&code_challenge={}&code_challenge_method=S256".format(nonce,code_challenge), + "entryType": "navigation", + "startTime": 0, + "duration": 1125.3999999999849, + "initiatorType": "navigation", + "nextHopProtocol": "http/1.1", + "workerStart": 0, + "redirectStart": 0, + "redirectEnd": 0, + "fetchStart": 82.29999999997517, + "domainLookupStart": 82.29999999997517, + "domainLookupEnd": 82.29999999997517, + "connectStart": 99.99999999999432, + "connectEnd": 126.29999999998631, + "secureConnectionStart": 117.4999999999784, + "requestStart": 133.7999999999795, + "responseStart": 385.5999999999824, + "responseEnd": 445.699999999988, + "transferSize": 66955, + "encodedBodySize": 64581, + "decodedBodySize": 155950, + "serverTiming": [], + "workerTiming": [], + "unloadEventStart": 0, + "unloadEventEnd": 0, + "domInteractive": 1097.29999999999, + "domContentLoadedEventStart": 1097.29999999999, + "domContentLoadedEventEnd": 1100.999999999999, + "domComplete": 1125.2999999999815, + "loadEventStart": 1125.3999999999849, + "loadEventEnd": 1125.3999999999849, + "type": "navigate", + "redirectCount": 0 + }, + { + "name": "https://swsasharedprodb2c.blob.core.windows.net/b2c-templates/bosch/unified.html", + "entryType": "resource", + "startTime": 1038.0999999999858, + "duration": 21.600000000006503, + "initiatorType": "xmlhttprequest", + "nextHopProtocol": "", + "workerStart": 0, + "redirectStart": 0, + "redirectEnd": 0, + "fetchStart": 1038.0999999999858, + "domainLookupStart": 0, + "domainLookupEnd": 0, + "connectStart": 0, + "connectEnd": 0, + "secureConnectionStart": 0, + "requestStart": 0, + "responseStart": 0, + "responseEnd": 1059.6999999999923, + "transferSize": 0, + "encodedBodySize": 0, + "decodedBodySize": 0, + "serverTiming": [], + "workerTiming": [] + }, + { + "name": "https://swsasharedprodb2c.blob.core.windows.net/b2c-templates/bosch/bosch-header.png", + "entryType": "resource", + "startTime": 1312.7999999999815, + "duration": 7.900000000006457, + "initiatorType": "css", + "nextHopProtocol": "", + "workerStart": 0, + "redirectStart": 0, + "redirectEnd": 0, + "fetchStart": 1312.7999999999815, + "domainLookupStart": 0, + "domainLookupEnd": 0, + "connectStart": 0, + "connectEnd": 0, + "secureConnectionStart": 0, + "requestStart": 0, + "responseStart": 0, + "responseEnd": 1320.699999999988, + "transferSize": 0, + "encodedBodySize": 0, + "decodedBodySize": 0, + "serverTiming": [], + "workerTiming": [] + } + ], + "connection": { + "onchange": None, + "effectiveType": "4g", + "rtt": 150, + "downlink": 1.6, + "saveData": False, + "downlinkMax": None, + "type": "unknown", + "ontypechange": None } - ] - } - # Create a session - mySession = requests.session() - - # Collect some Cookies - - url = 'https://prodindego.b2clogin.com/prodindego.onmicrosoft.com/b2c_1a_signup_signin/oauth2/v2.0/authorize?redirect_uri=com.bosch.indegoconnect%3A%2F%2Flogin&client_id={}&response_type=code&state=j1A8L2zQMbolEja6yqbj4w&nonce=LtRKgCy_l1abdbKPuf5vhA&scope=openid%20profile%20email%20offline_access%20https%3A%2F%2Fprodindego.onmicrosoft.com%2Findego-mobile-api%2FIndego.Mower.User&code_challenge={}&code_challenge_method=S256'.format(myClientID,code_challenge) + } - myHeader = {'accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9', - 'accept-encoding' : 'gzip, deflate, br', - 'accept-language' : 'en-US', - 'connection' : 'keep-alive', - 'host' : 'prodindego.b2clogin.com', - 'user-agent' : 'Mozilla/5.0 (Linux; Android 11; sdk_gphone_x86_arm) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.106 Mobile Safari/537.36' - } - mySession.headers = myHeader + myReqPayload = { + "pageViewId":'', + "pageId":"CombinedSigninAndSignup", + "trace":[ + { + "ac":"T005", + "acST":1678187316, + "acD":7 + }, + { + "ac":"T021 - URL:https://swsasharedprodb2c.blob.core.windows.net/b2c-templates/bosch/unified.html", + "acST":1678187316, + "acD":119 + }, + { + "ac":"T019", + "acST":1678187317, + "acD":44 + }, + { + "ac":"T004", + "acST":1678187317, + "acD":19 + }, + { + "ac":"T003", + "acST":1678187317, + "acD":5 + }, + { + "ac":"T035", + "acST":1678187317, + "acD":0 + }, + { + "ac":"T030Online", + "acST":1678187317, + "acD":0 + }, + { + "ac":"T002", + "acST":1678187328, + "acD":0 + } + ] + } + # Create a session + mySession = requests.session() - response = mySession.get(url, allow_redirects=True ) - self._log_communication('GET ', url, response.status_code) - - myText= response.content.decode() - myText1 = myText[myText.find('"csrf"')+8:myText.find('"csrf"')+300] - myCsrf = (myText1[:myText1.find(',')-1]) + # Collect some Cookies - myText1 = myText[myText.find('nonce'):myText.find('nonce')+40] - myNonce = myText1.split('"')[1] + url = 'https://prodindego.b2clogin.com/prodindego.onmicrosoft.com/b2c_1a_signup_signin/oauth2/v2.0/authorize?redirect_uri=com.bosch.indegoconnect%3A%2F%2Flogin&client_id={}&response_type=code&state=j1A8L2zQMbolEja6yqbj4w&nonce={}&scope=openid%20profile%20email%20offline_access%20https%3A%2F%2Fprodindego.onmicrosoft.com%2Findego-mobile-api%2FIndego.Mower.User&code_challenge={}&code_challenge_method=S256'.format(myClientID,nonce,code_challenge) + loginReferer = url - myText1 = myText[myText.find('pageViewId'):myText.find('pageViewId')+60] - myPageViewID = myText1.split('"')[2] + myHeader = {'accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9', + 'accept-encoding' : 'gzip, deflate, br', + 'accept-language' : 'en-US', + 'connection' : 'keep-alive', + 'host' : 'prodindego.b2clogin.com', + 'user-agent' : 'Mozilla/5.0 (Linux; Android 11; sdk_gphone_x86_arm) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.106 Mobile Safari/537.36' + } + mySession.headers = myHeader - myReqPayload['pageViewId']=myPageViewID - - mySession.headers['x-csrf-token'] = myCsrf - mySession.headers['referer'] = url - mySession.headers['origin'] = 'https://prodindego.b2clogin.com' - mySession.headers['host'] = 'prodindego.b2clogin.com' - mySession.headers['x-requested-with'] = 'XMLHttpRequest' - mySession.headers['content-length'] = str(len(json.dumps(myPerfPayload))) - mySession.headers['content-type'] = 'application/json; charset=UTF-8' - mySession.headers['accept-language'] = 'en-US,en;q=0.9' - - - myState = mySession.cookies['x-ms-cpim-trans'] - myCookie = json.loads(base64.b64decode(myState).decode()) - myNewState = '{"TID":"'+myCookie['C_ID']+'"}' - myNewState = base64.b64encode(myNewState.encode()).decode()[:-2] - #'{"TID":"8912c0e6-defb-4d58-858b-27d1cfbbe8f5"}' - #eyJUSUQiOiI4OTEyYzBlNi1kZWZiLTRkNTgtODU4Yi0yN2QxY2ZiYmU4ZjUifQ - - - myUrl = 'https://prodindego.b2clogin.com/prodindego.onmicrosoft.com/B2C_1A_signup_signin/client/perftrace?tx=StateProperties={}&p=B2C_1A_signup_signin'.format(myNewState) - response=mySession.post(myUrl,data=json.dumps(myPerfPayload)) - self._log_communication('GET ', myUrl, response.status_code) - - - myUrl = 'https://prodindego.b2clogin.com/prodindego.onmicrosoft.com/B2C_1A_signup_signin/api/CombinedSigninAndSignup/unified' - mySession.headers['accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9' - mySession.headers['accept-encoding'] = 'gzip, deflate, br' - mySession.headers['upgrade-insecure-requests'] = '1' - mySession.headers['sec-fetch-mode'] = 'navigate' - mySession.headers['sec-fetch-dest'] = 'document' - mySession.headers['sec-fetch-user'] = '?1' - mySession.headers['sec-fetch-site'] = 'same-origin' - - del mySession.headers['content-length'] - del mySession.headers['content-type'] - del mySession.headers['x-requested-with'] - del mySession.headers['x-csrf-token'] - del mySession.headers['origin'] - - myParams = { - 'claimsexchange': 'BoschIDExchange', - 'csrf_token': myCsrf, - 'tx': 'StateProperties=' + myNewState, - 'p': 'B2C_1A_signup_signin', - 'diags': myReqPayload - } - # Get the redirect-URI - response = mySession.get(myUrl,allow_redirects=False,params=myParams) - self._log_communication('GET ', myUrl, response.status_code) - try: - if (response.status_code == 302): - myText = response.content.decode() - myText1 = myText[myText.find('href') + 6:] - myNewUrl = myText1.split('"')[0].replace('&','&') - else: + response = mySession.get(url, allow_redirects=True ) + self._log_communication('GET ', url, response.status_code) + myText= response.content.decode() + myText1 = myText[myText.find('"csrf"')+8:myText.find('"csrf"')+300] + myCsrf = (myText1[:myText1.find(',')-1]) + + myText1 = myText[myText.find('nonce'):myText.find('nonce')+40] + myNonce = myText1.split('"')[1] + + myText1 = myText[myText.find('pageViewId'):myText.find('pageViewId')+60] + myPageViewID = myText1.split('"')[2] + + myReqPayload['pageViewId']=myPageViewID + + mySession.headers['x-csrf-token'] = myCsrf + mySession.headers['referer'] = url + mySession.headers['origin'] = 'https://prodindego.b2clogin.com' + mySession.headers['host'] = 'prodindego.b2clogin.com' + mySession.headers['x-requested-with'] = 'XMLHttpRequest' + mySession.headers['content-length'] = str(len(json.dumps(myPerfPayload))) + mySession.headers['content-type'] = 'application/json; charset=UTF-8' + mySession.headers['accept-language'] = 'en-US,en;q=0.9' + + + myState = mySession.cookies['x-ms-cpim-trans'] + myCookie = json.loads(base64.b64decode(myState).decode()) + myNewState = '{"TID":"'+myCookie['C_ID']+'"}' + myNewState = base64.b64encode(myNewState.encode()).decode()[:-2] + #'{"TID":"8912c0e6-defb-4d58-858b-27d1cfbbe8f5"}' + #eyJUSUQiOiI4OTEyYzBlNi1kZWZiLTRkNTgtODU4Yi0yN2QxY2ZiYmU4ZjUifQ + + + myUrl = 'https://prodindego.b2clogin.com/prodindego.onmicrosoft.com/B2C_1A_signup_signin/client/perftrace?tx=StateProperties={}&p=B2C_1A_signup_signin'.format(myNewState) + CollectingCookie = {} + for c in mySession.cookies: + CollectingCookie[c.name] = c.value + + + response=mySession.post(myUrl,data=json.dumps(myPerfPayload),cookies=CollectingCookie) + self._log_communication('POST ', myUrl, response.status_code) + + myUrl = 'https://prodindego.b2clogin.com/prodindego.onmicrosoft.com/B2C_1A_signup_signin/api/CombinedSigninAndSignup/unified' + mySession.headers['accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9' + mySession.headers['accept-encoding'] = 'gzip, deflate, br' + mySession.headers['upgrade-insecure-requests'] = '1' + mySession.headers['sec-fetch-mode'] = 'navigate' + mySession.headers['sec-fetch-dest'] = 'document' + mySession.headers['sec-fetch-user'] = '?1' + mySession.headers['sec-fetch-site'] = 'same-origin' + + + del mySession.headers['content-length'] + del mySession.headers['content-type'] + del mySession.headers['x-requested-with'] + del mySession.headers['x-csrf-token'] + del mySession.headers['origin'] + + myParams = { + 'claimsexchange': 'BoschIDExchange', + 'csrf_token': myCsrf, + 'tx': 'StateProperties=' + myNewState, + 'p': 'B2C_1A_signup_signin', + 'diags': myReqPayload + } + # Get the redirect-URI + + response = mySession.get(myUrl,allow_redirects=False,params=myParams) + self._log_communication('GET ', myUrl, response.status_code) + try: + if (response.status_code == 302): + myText = response.content.decode() + myText1 = myText[myText.find('href') + 6:] + myNewUrl = myText1.split('"')[0].replace('&','&') + else: + pass + except: pass - except: - pass - mySession.headers['sec-fetch-site'] = 'cross-site' - mySession.headers['host'] = 'identity.bosch.com' + mySession.headers['sec-fetch-site'] = 'cross-site' + mySession.headers['host'] = 'identity.bosch.com' - # Get the CIAMIDS - response = mySession.get(myNewUrl,allow_redirects=True) - self._log_communication('GET ', myNewUrl, response.status_code) - try: - if (response.history[0].status_code != 302): + # Get the CIAMIDS + response = mySession.get(myNewUrl,allow_redirects=True) + self._log_communication('GET ', myNewUrl, response.status_code) + try: + if (response.history[0].status_code != 302): + pass + else: + myNewUrl = response.history[0].headers['location'] + except: pass - else: - myNewUrl = response.history[0].headers['location'] - except: - pass - - # Signin to Session - response = mySession.get(myNewUrl,allow_redirects=False) - self._log_communication('GET ', myNewUrl, response.status_code) - - # Authorize -IDS - myNewUrl = response.headers['location'] - mySession.headers['host'] = 'identity-myprofile.bosch.com' - mySession.headers['upgrade-insecure-requests']='1' - cookie_obj = requests.cookies.create_cookie(domain="identity-myprofile.bosch.com",name=".AspNetCore.Identity.Application",value="CfDJ8BbTLtgL3GFMvpWDXN913TQqlMWfpnzYNGGcX0qV3_e1mcxyuYndGzcNXVwoAHCyvY3Ad_1bkYnLsg-J56IdLUNQVMguFnS_KWkPbzib4u6SQtdCZfbiIPV_ZUh4xK-Pd-LgJ61Fi4ljxbb4CewKJRAaDyOhS7KPUu68EVdzte3mEYGm2z8PeSvViW6cGgQeIIOcJ1G3f7XG_s2synfm4o6MDA49a1WnkBIk1kXBodq-vKYXZNMLHOtNGVNE2aZ_k5b9E4mGQVeuncw6SupEku9dCXgO0tRRFK0qUX-41JVrgQdz5v4c_4NB--i1U1b7LUmoZrTtkv0a5KcGPTGz9cZqV5D_Ki4p5uoQxZCmDBPbyecSe6xF3m4yGpEC6hTfrOEJR4LdX6mnppjnXMSc1Y9Pr0Lui3FGeBGuK8GyT4QXJ-pnFrLyF8dh6g2ovkeRvI8MlS5DLSLy_d0s2nOgUxVQPxDsVCxtIMJhE14tSUnC9oRDB_6YUxOqMTEJ_dFacHt-s4iLD2ClBLtA6MsDQcF5pYe4ZOt9zLMuLcoO1NqD3Ca0r00Y0qdkGFGvckp5Xqf7QndkcZxKMPE3GtfH8o6uMsFd7hs1xstxBlT2pgrp0fjjk5R8ugOzJDv-BXarCbjXTzLJtAMVYO4dzorJ7xnXAZDK4IczfXIgxZliwOnTCBvwGIx5CHZfnkYlfhS1PbOE0bwR-sqvJXCS8Jmh6BjmSPHcoKxWxJbLa_wok5HsYmOJgQhVE49WgwuBV88sFvoxpnK_pp1IRR0jFfnV4stT905lkd8hNj5D8o3aZ35sHZDuNPYEXFNUPDORoFnfHkNAP33r126a00n-fLLjaBhFa7W5PnPDaD-M-luVP7nIL-c2tlVon_XRZRC5KMzO4FuOqCeCFwsh3jTtpJk5_iUS4EpHvHT5ldZtRVShC2uzZQ63N_LWl5KZwVlWXPCaLECCZwsGfaAJz0HKDlC-vgXuWL7odJKInmIsi4BJeM9xe280pPDwD6FNUhSOAM2GZgCAW2jilScn5hA2pS1HsLD9yLV0-80Rk9UR9RmRt7USsIOf_7qFMnijAV3MZq9wNKt7ZTBDCI40dxQ1WCYSUV0") - mySession.cookies.set_cookie(cookie_obj) - response = mySession.get(myNewUrl,allow_redirects=False) - self._log_communication('GET ', myNewUrl, response.status_code) - - # Get the login page with redirect URI - returnUrl = myNewUrl - myNewUrl='https://identity-myprofile.bosch.com/ids/login?ReturnUrl='+returnUrl - response=mySession.get(myNewUrl,allow_redirects=True) - self._log_communication('GET', myNewUrl, response.status_code) - myText = response.content.decode() - # find all the needed values - RequestVerificationToken = myText[myText.find('__RequestVerificationToken'):myText.find('__RequestVerificationToken')+300].split('"')[4] - postData = { - 'meta-information' : '', - 'uEmail' : self.user, - 'uPassword' : self.password, - 'ReturnUrl' : returnUrl[36:58]+'/callback'+returnUrl[58:], - '__RequestVerificationToken' : RequestVerificationToken - } - mySession.headers['content-type'] = 'application/x-www-form-urlencoded' - mySession.headers['sec-fetch-sites'] = 'same-origin' - mySession.headers['origin'] = '' - response=mySession.post(myNewUrl,data=postData,allow_redirects=True) - self._log_communication('POST ', myNewUrl, response.status_code) - - ######################################### - mySession.headers['pragma'] = 'no-cache' - mySession.headers['request-context'] = response.history[0].headers['request-context'] - mySession.headers['host'] = 'identity.bosch.com' - myNewUrl = response.history[1].headers['location'] - - # Collect next Cookie - response = mySession.get(myNewUrl,allow_redirects=False) - self._log_communication('GET ', myNewUrl, response.status_code) - - #Get Location for autorization - myNewUrl = 'https://identity.bosch.com/callback' - response=mySession.get(myNewUrl,allow_redirects=False) - self._log_communication('GET', myNewUrl, response.status_code) - myNewUrl = response.headers['location'] - - #Get Authorize-Informations - response = mySession.get(myNewUrl,allow_redirects=False) - self._log_communication('GET ', myNewUrl, response.status_code) - - # Get the post-Fields - myText= response.content.decode() - myCode = myText[myText.find('"code"')+14:myText.find('"code"')+300].split('"')[0] - mySessionState = myText[myText.find('"session_state"')+23:myText.find('"session_state"')+300].split('"')[0] - myState = myText[myText.find('"state"')+15:myText.find('"state"')+300].split('"')[0] - - request_body = {"code" : myCode, "state" : myState, "session_state=" : mySessionState } - - mySession.headers['host'] = 'prodindego.b2clogin.com' - mySession.headers['origin'] = 'https://identity.bosch.com' - mySession.headers['content-type'] = 'application/x-www-form-urlencoded' - mySession.headers['cache-control'] = 'max-age=0' - - del mySession.headers['pragma'] - del mySession.headers['request-context'] - - myNewUrl='https://prodindego.b2clogin.com/prodindego.onmicrosoft.com/oauth2/authresp' - response = mySession.post(myNewUrl,data=request_body,allow_redirects=False) - self._log_communication('POST ', myNewUrl, response.status_code) - myNewUrl = response.headers['location'] - - myFinalCode = myNewUrl.split("code")[1].split("=")[1] - - # Get the new Login-Page - url = 'https://prodindego.b2clogin.com/prodindego.onmicrosoft.com/b2c_1a_signup_signin/oauth2/v2.0/authorize?redirect_uri=com.bosch.indegoconnect%3A%2F%2Flogin&client_id={}&response_type=code&state=j1A8L2zQMbolEja6yqbj4w&nonce=LtRKgCy_l1abdbKPuf5vhA&scope=openid%20profile%20email%20offline_access%20https%3A%2F%2Fprodindego.onmicrosoft.com%2Findego-mobile-api%2FIndego.Mower.User&code_challenge={}&code_challenge_method=S256'.format(myClientID,code_challenge) - mySession.headers['host'] = 'prodindego.b2clogin.com' - del mySession.headers['content-type'] - del mySession.headers['origin'] - del mySession.headers['referer'] - response = mySession.get(url,allow_redirects=False) - self._log_communication('GET ', url, response.status_code) - - # Now Post for a token - mySession.close() - request_body = { - 'code' : myFinalCode, - 'grant_type' : 'authorization_code', - 'redirect_uri' : 'com.bosch.indegoconnect://login', - 'code_verifier' : code_verifier, - 'client_id' : myClientID + # Signin to Session + response = mySession.get(myNewUrl,allow_redirects=False) + self._log_communication('GET ', myNewUrl, response.status_code) + + # Authorize -IDS + myNewUrl = response.headers['location'] + mySession.headers['host'] = 'identity-myprofile.bosch.com' + mySession.headers['upgrade-insecure-requests']='1' + + response = mySession.get(myNewUrl,allow_redirects=False) + myNewUrl=response.headers['location'] + + postConfirmUrl = myNewUrl[myNewUrl.find('postConfirmReturnUrl'):myNewUrl.find('postConfirmReturnUrl')+300].split('"') + # Get the login page with redirect URI + #returnUrl = myNewUrl + #myNewUrl='https://identity-myprofile.bosch.com/ids/login?ReturnUrl='+returnUrl + + step2_AuthorizeUrl = myNewUrl + response=mySession.get(myNewUrl,allow_redirects=True) + self._log_communication('GET ', myNewUrl, response.status_code) + myText = response.content.decode() + # find all the needed values + RequestVerificationToken = myText[myText.find('__RequestVerificationToken'):myText.find('__RequestVerificationToken')+300].split('"')[4] + contentReturnUrl = myText[myText.find('ReturnUrl'):myText.find('ReturnUrl')+1600].split('"')[4] + ReturnUrl =myText[myText.find('ReturnUrl'):myText.find('ReturnUrl')+300].split('"')[4] + + postConfirmUrl = myText[myText.find('postConfirmReturnUrl'):myText.find('postConfirmReturnUrl')+700].split('"') + + myNewUrl='https://identity-myprofile.bosch.com/ids/api/v1/clients/'+myText[myText.find('ciamids_'):myText.find('ciamids_')+300].split('%2')[0] + response=mySession.get(myNewUrl,allow_redirects=True) + self._log_communication('GET ', myNewUrl, response.status_code) + + myNewUrl = step2_AuthorizeUrl+'&skid=true' + mySession.headers['sec-fetch-site']='same-origin' + mySession.headers['accept']='text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9' + del (mySession.headers['referer']) + # https://identity-myprofile.bosch.com + # /ids/login?ReturnUrl=%2Fids%2Fconnect%2Fauthorize%2Fcallback%3Fclient_id%3Dcentralids_65EC204B-85B2-4EC3-8BB7-F4B0F69D77D7%26redirect_uri%3Dhttps%253A%252F%252Fidentity.bosch.com%26response_type%3Dcode%26scope%3Dopenid%2520profile%2520email%26state%3DOpenIdConnect.AuthenticationProperties%253DZIkEldcO9j64ZsZ8lxkOF43KmLm9E-R7KiQ6vyWOHRY5coi-sQOCNbtzVfTmM30G2dQ8taj9dupmlMsdfl_aeQfBTLbXNPCoPMduVcoXcUVDx-G2Wo1BhyJmZZryQWMKBGVS5akW3441ocWSmzZ3sseK4ysrm14GxCYIjaXQLw-5-jqSp5xQ3fTbCIIuiEI0zql0bnoAQW2ElbUfFxCZGg2BPRJeIBGddPQOJ_TVR0fZ_Rb2Ex5CJorqDK-GzAq_eKEcqhLwSw3jLLJeyXqHiP8lVwo%26nonce%3D638175139721725147.NTYxMTVjOTEtZGQ2MC00NWFlLWFlMzgtZGRiNWVjZGNjZTNjMTk1ODMwMGQtNTY4OS00MDY5LThiYWItZDRjMTNkZmEzZTEy%26code_challenge%3DVZhREJ7Xv0gvQw6ehTBc55P9Lh3qWX7CiW7wTYYxqY0%26code_challenge_method%3DS256%26postConfirmReturnUrl%3Dhttps%253A%252F%252Fidentity.bosch.com%252Fconnect%252Fauthorize%253Fclient_id%253Dciamids_12E7F9D5-613D-444A-ACD3-838E4D974396%2526redirect_uri%253Dhttps%25253A%25252F%25252Fprodindego.b2clogin.com%25252Fprodindego.onmicrosoft.com%25252Foauth2%25252Fauthresp%2526response_type%253Dcode%2526scope%253Dopenid%252520profile%252520email%2526response_mode%253Dform_post%2526nonce%253DTRg%25252FDjkgw7qNuS2Rh3OslA%25253D%25253D%2526state%253DStateProperties%25253DeyJTSUQiOiJ4LW1zLWNwaW0tcmM6MjU2YzJjOWYtMzlkNC00Y2E2LWFlYTctMTYwZmE4ZTY1ZWRhIiwiVElEIjoiZTgxZjU1MWUtMmM4MC00YmNjLWI4ODgtYjU2NGJlMmEwYzllIiwiVE9JRCI6ImI4MTEzNjgxLWFlZjQtNDc0Yi05YmEyLTI1Mjk0Y2FhNDhmYyJ9%26x-client-SKU%3DID_NET461%26x-client-ver%3D6.7.1.0&skid=true + + response=mySession.get(myNewUrl,allow_redirects=False) + self._log_communication('GET ', myNewUrl, response.status_code) + myNewUrl = response.headers['location'] + + # Now go for the single-key-id + # Get Single-Key-Site + # 1. Step + # + # auth/connect/authorize?client_id=7ca4e64b-faf6-ce9e-937d-6639339b4dac&redirect_uri=https%3A%2F%2Fidentity-myprofile.bosch.com%2Fids%2Fsignin-oidc&response_type=code&scope=openid%20profile%20email&code_challenge=5U4qTGs6v14xAZWvC3lENuVSzuvWLJ0IodizL75YWzk&code_challenge_method=S256&nonce=638175138999064799.OTMzYTExZGMtOGVhZS00MTQ5LTk2MmYtMTA0Mzg4YmJmYmVhOWYyZjEwZDYtZDE3Ny00OTczLTk1MDEtN2FmMjVmOTZiZmJm&state=CfDJ8BbTLtgL3GFMvpWDXN913TRMZYhIvGAQNZTmV8MG88I2iNRhqMCEorJUpmP8ShwrAEBHAAVfh7FgjR4gVnk3eQVuq_P-BvSRzmMKejfb_qxh7fq_Nhp8ULWZ9lU1LZzNEj140CnHaTaLY7LwsP5rXBy-JrdnDiYpPJOYMVswdn6BDZI_EvLnHqd4JJZ0P5Itay4pC0wyfKv2plk3_EyoOMteqnUFvGvfKUeevbbUScXXLwfdNgWjej3nP3BkCW5HDu3PDAz2g4jsC8l5eDZIIcYxpm3jXOcNJC_8B_2JwY9QTjeHDyfV3JNhPUruDTOCHDI4MWuh79pV5Eo-mHrkumSHfQRuycpQS7H6H5UT59io4D9B2xJfPrl-7tBU_5toC1nah0nUkfyyxirPzI6vBTXuJPiHdXC1mjj3wDX2UbFJEFhuvHuOsAzdICLtxS1rySeRcKcFD8nFGnSvIuVodgJSR9PRpXvzmS3cgaS0zae5FYN5LsDO7tTtTPTLadfaqQd11LjNqT7EZTnvT1SpW0HGiBBwPxzr8vSZGQG-xelop7scHH8SYGL4SrUn-nxvHbBYGDIwPrAoMYsIYfzcjPLZ4_VSPFPxVZcAK-8_i_LqKbUhm1ECJlfgOn5hOZhQGTR5uqXZTBJTSIrbHcdc0XMSXZSIi56qBWXiEHwYLAOtiEbNUVjMckyUI-HnrBmfZMhpiLndgLXHhLDr7lNIsBjll_1QOhZxFukftPVwFXIrgKPXWvUz4zky9mwFk9mwQvD2Ip77WvZz5MhJrfCNaPlASLelSdJlVQVRY3-qBdg7CzxFyJs_HOZX37oXsqH02lwda5uAHU2GAtNLfkmj_WGE05qB04gLfn9Y6rf_cXix4oIltbQqf57VUt7xdBKplcQUqeGqoDOg5eHLiz_-9iHu7GHRmqt3SDVxBrZlvL9KxwHAAuUpDJ97oRD51KdZlFFYjTNDjrHHrajshD6yRqFx2a4mGsd_pI_wnS12d9oZs8ILn3Lhdz9ATpNADGoTjbf0dRG8L-hMEx1DBeVID1GztCT-WIbl57xK-2NfbGjQ3MGk5W4vNxwGcxt3L6eRgrAIgAIGlTLHVJc-nQ4RSabzOB-kfUx-mTCHJYMkawqGIrJKkfozj-8aNYoE-wXUVFB63D-xVS25r5V0ttUGehjc4eZjN9JQA6U-ZZXe4UNv5hW8XVCYd-IT83JV340pMqERBjRNYAOPUn3LrDwXwFSKyYecgXMoyZ2d7wEZ-zuqHNCnlAKkLpsR5fJuybDPYNDDdFFHz-du-G2Aq38EUSBPjoUZVRjIUHhQfOUOEicg29ReBOueI-I61pkJKdgfiI7Zezy7Uit71CiZ1kNDjLWC0JkvpiUbXAv_TUySqk62tNkDL2T8E7gj1aT250Pxg7gSmmpBxRWv_0ZltyXwRTR564egzv0BDe4mhIOX5sNGL1HjwBJidNrZ0Q2jL6qr1a2W6DFIyuQ68eZmFAiq4WDkdv928fWMedndQHjgw6t1gBnG6l-J_JINqYaw0vnDUsyWKSzErcf5LN-4_o9RjwcJ3A0iui6PpUyYpQlRlwhobOlCa7V4_4sNQXH5-dlD6lhvXEtHHdBjb9xB9MNIwAJCMkoNU3q3ln10yeFBh_W6Iy25bPthFOeIONFWhC_FslnJvAzlX_kLCieFrGpOmkgE5V9FN-I9fxDX18a3JgdG-qt3YZzgcjTwwiM7YtgRHU4Ikmo7TqaOMfdJjz-Y3FPoaSOKUv6_eVfoY22lNyOMvU-SGLec_7MfpOR0YD2Cvz9Ibo6uh0umjrRHQKEIjzeR0yBjdl68BkoLu7qE0A_tVbcUK918fK2eExs7LONzVshb0_Ruwk5u1sqeft5AWxfYBZSSfnOwzHhS1-PZuWZSF9YVZXd72aKVgvWcyAEDOnsCifsXXzaboJAzs7K00gq9Tq-o3Mlfd44jugQ5-_maYnV9oY646o7ILJ6FD1A93X1mYkR6V7Ma6hxADmoYmD3-teZo_EVmSH6w_ElnYF98-TRyhXqI7tUK10c92kqB_biWHlH25cE-KvH3MaqBkGt1PSBr7kbpX9bxAKS9vP9gYEwCqJG6Ho796cItahFicQDqL2XdxUARA6eyeQUwwz216rIKsPypst-hCyqFWVcv7IS_hzVtSzJfxPLCkmzMmD84u6OL_SMO_GVfnb0X5C33ndFqRu6_aa-6QnuHpyyOBsuWUV_JZ5GED7PajJ-K16Py_23vRp4gKXpfXCOH8E3wESB2aSCXWC5It7tgqQBpdxiavH6CcvsfN-JrBRbHgw&x-client-SKU=ID_NETSTANDARD2_0&x-client-ver=5.6.0.0' + # https://singlekey-id.com + step += 1 + del (mySession.headers['host']) + mySession.headers['user-agent'] = 'Mozilla/5.0 (Linux; Android 11; sdk_gphone_x86_arm) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.106 Mobile Safari/537.36' + mySession.headers['sec-fetch-site']='cross-site' + response=mySession.get(myNewUrl,allow_redirects=False) + self._log_communication('GET ', myNewUrl, response.status_code) + + # 2. Step + # https://singlekey-id.com + # auth/log-in?ReturnUrl=%2Fauth%2Fconnect%2Fauthorize%2Fcallback%3Fclient_id%3D7ca4e64b-faf6-ce9e-937d-6639339b4dac%26redirect_uri%3Dhttps%253A%252F%252Fidentity-myprofile.bosch.com%252Fids%252Fsignin-oidc%26response_type%3Dcode%26scope%3Dopenid%2520profile%2520email%26code_challenge%3D_H7Bn1EBzLmdvYxRd-ZU9moDgCCOeLhuZlr2oWbTr8Y%26code_challenge_method%3DS256%26nonce%3D638175138466544692.Y2MwNjJlMDEtMjYzZS00MjZlLThhYTQtMTg0MmFjZGMwMzgzMmNmMTI0MGUtYWEyNC00OWExLWJlNTMtMTgzZDFmMDg3NTE5%26state%3DCfDJ8BbTLtgL3GFMvpWDXN913TTi4lyCR0HNQl1e_IHHsZzeJpmbm3hvFYhV6JhmVAlez_YwxFKyT18rCdVOrh8ncg6h6Wi3zCKgovjE7Jn7k9ZuZoWDC7XldFX_3Z2IzwG8WdD4V7ZVlwFChaohZ3fMDYvVPVhzbu7K-5VdMJdSGvyD0J5qaoSL0x-W76jQz15WAMP0npoq4Eyl1rjCVLTunSiQdt1mDJQE_3W1BFj41iW-1nfNeU5Xy8du_AnxyJ-UtWAAAeIr2lwaPKdyr1Mh_G3Q0QwJLxsJY-GhcJXxsBn95cUEDlHjBHRGbgn9T9ab87ppLvyMV3YI6PCu0RWs10IkIxdPFgAVZxb2PRggc8NOGQLnKIOr_2pHKmYw73JT2afa8cPc1CIobT-OS9Xt5_qfKgL6a2dl0RGc29tIPeHqlF-tolY3LSjyEEbOYJ1rLIO9MnR-HHRgq1JpmOPZe5DAl2wRmUDw16GdqQw71NRA0FmExtfLV6vKDrYJfs6IrJEaiC2dsU1mK8WVrTippBdINKhOmTgfvW0o8NiDnHmaKMg35niVI9SKgfEDlccj9EZYw9BOy9pdsPqkQzpAFpCP_BYHHhSmu-Pcj5KAqhY3wm2iRTuMuTLHWt0PP54c9l1kQ2JEoCYt9hHYLl4_D0szQVBUpJLYZgDrWtoLLRbEEPHLnO45pc6gtHyM8j15U0N4GWeiPXZJMnwA-d_dIU5meiWCc3HeA0o-F0IQ688U-GBNXg_QKyatAk6_pGdz7BAokNTiiA9IkJewfqINtjjPkujuNEMhSGNI65GSdk1PBgGAGVgY4o4G1JXUv8VMOQgR5ULtSMfepBf6z2ZIZu7lClg3YUdEGiEyfhU-Gz164D9JYbBtx0gCExg585eXsG2YZ1JNCWqu6eobXWjKBN0IejMmdw4N8uy4EvvWK9RBZ4TCZWXqPM-q0T-REUfm4HmvQvtL-WU8IO8FM1pZaHvePi6qcpivuZWnD1I7PWDbtNSai7uuEm4tMA7NpkBMlh2MNYG5XadLoFN0rrR4TJHHuq2r-AhiGXw8KlXsgff3yZdgCjn854gVjOnnvjwdTrzCaUSsSPiAZ5yFAVYUHKqIzGKXK6MQ7vBdJzfEPwFDNe4aIxEAHogn7hHLJn37b5WgrXZhUxha1zDQcEhdTtEr161soKFRJ1njLwWvXTSCbYUUfVN6BVCIAluWi0C2RLNYmSdUji3B4l_oJ5mq_gjmfdc37e3Xd1EWZtcgFRiO0yEldcscbwsltEzelF_lnK-VImZsr1Y1BD4VahyiykFZF15SAbrhVF6sAHf28mvO38dOqzdt7_B4K2VcOnmo9gM63BNw9rU_dMGLHXub4lJISoOMqeTFN_NmMUrkv41uKUc15e0e2fWS-faC4cZ6hRibrkkCdH5MyqHc8jtMDdIKp59LwXEKskaXrNUO9EL8XR_EfboHIER8dhEUG_ZuaY4FiO64ttgrRvPCC3uokeWhXsa7gx9HsONKqGjFAxCiMDba35uRpcWpunix601ex2le-6vuGpAQ-vqcMrUOvh45sAuHCIa8PLU08zx99lqrd9ERSodPfAGFBVyNh0Y0-y6d1vKYykOj4o7REphB3LnSotJruBVpCaLS1omcA88NtMkJoboKjDBPqzaIX3d9bZhnur3yoFcnjGKoismBmLgDtJY2PT3AAaOBbBjuM_KeqNC92gO_vUkXCa6MJ7JYmlnaRJVMtTFB3Ta4iSaGy7CGkz9KZZcaWPEpTEop-yb64cmkebDCWpcY9Tzouvsvg7CsX2ONM6ejOqDXo_ZpIQrEYVg7PMgZ7NxJhRlrjUDiyYQPofugwC_zaTA1p0oruIkPEmqUwgGSVaBZ8V9WBr2e_dregnUukkjKyft1secvXqaHdV1Ob684PRs_A3-zgKEHAjrkrglRljfsTzTDuDYC3uU3nxFtXFDG42SEcaDAHCPQWwr9j9KwZWgbbohX2dZly9ukvxO1WgPpfyvzKZOK7PpjsbghdIqTu9LX8gFbDNFuPoZ91X4jMG5SnL63YbdLgo68I8c_N_8bBRz1x233HRpJn0ltrxuQULalNjw7XQn9L0iNvZxDf6ZoFgOGNrtFe7PiZTRC6uksaWbFfhXAACmWrAIsi1_6KXLQfjXRhn7ZThfSVDdWWF0M9dv9AUHLoJDbt6eXYQqCTUkBJRkDXPDGzpkCpZT09FIOBFhRt5KKWkAkjldQ5l-6imVGir7LbIPoUVuMDN3C3y8oo2Vd3oWuT3-GhZXz6TkAwlTFeAGfe0g4XtW4wRrz8TYmHw%26x-client-SKU%3DID_NETSTANDARD2_0%26x-client-ver%3D5.6.0.0' + step += 1 + myNewUrl = response.headers['location'] + mySession.headers['Host']='singlekey-id.com' + response=mySession.get(myNewUrl,allow_redirects=False) + self._log_communication('GET ', myNewUrl, response.status_code) + + # 3. Step + # https://singlekey-id.com + # auth/log-in/?ReturnUrl=%2Fauth%2Fconnect%2Fauthorize%2Fcallback%3Fclient_id%3D7ca4e64b-faf6-ce9e-937d-6639339b4dac%26redirect_uri%3Dhttps%253A%252F%252Fidentity-myprofile.bosch.com%252Fids%252Fsignin-oidc%26response_type%3Dcode%26scope%3Dopenid%2520profile%2520email%26code_challenge%3DTZdCd3C1FX0t08NtCr-rCmJxOrAQiPaNYytL-1Wi9TY%26code_challenge_method%3DS256%26nonce%3D638175136336911121.MGJiYTY0ZGQtN2E4My00MTJmLTg4NjgtNDc1NzAwYTExZDE3NmJhMzExMTEtYWIxZC00MjQ4LWFlNzItODI0MWYyMjg1Y2Zj%26state%3DCfDJ8BbTLtgL3GFMvpWDXN913TQFZ0MRP7aNSvpY-IwH5nqb4gdz1W3Ohx6L_5jrLoqk-iG8_Fb6txKNub_FyGnywp_hEdKhmOrG1QvFtB9Zok6PoKNloLcq1IwoxS2eFAS6qsvHBRR84Yq9D24B1d7klbITisHcQPNTYf-bsMv5w_CcMSrgzRHUFnqFPIHREMkunq1cqUfDCmOw_gFOCzZIAyp0GDRVMvJEmc7mBGjk8nhpJLtdIy2iPn2WXcueAfN7cF8jKtf_uOmSR233K2YM38GoIRgVb_ICWHJ_tqDWs7GIbMffl9D9Q5A2Aa_fopg4vZtk53G8P4jWoX47caPYSmyKwjzAPcP327lUR8tTVFduPEgcGwFrB_U41vtytxSGIrrSQAJU-GyvULXF-BwEJ_ScceQ4udNb_yFbUa8x1YMeVNqsyMvOItv46wPCW5OAycPEfzGfmcntKg4d1XVOkjr4hzc-3oehALLrCI4RFc_-3NtuUMkdZoV93QPA1pndlGajn5CkWu5_QCCa1aUR3unKd5g3OhiQ3ngxjEFbxiHcOrOt4PwaC6Nf6qvVixTXDLWvkL7MOjsSAjLZmoPtYHh4nWK6rnWKvh5J-kn-uJxZm0yKBnm59BQVemH-5XHAIzNGuXuo7R6nxUcsFWZnMCfyxu-d4ta7tUqI2LL-Sz3Fc6qw-3sVrXb9hyxITKAn1-KYNqhWokE3rzhU1itdgQyJIQd4b4eMPES-np8YNHld0hOKCeai4WD8rph054rW049Hfph1g4VVilswqfjEt7jPvIqIlEpMjAs38i6w8GREeJr2_lbrqPN0KF6Bj9jeZV4zy5LCDKItLc-ZRiBxon4dWHYhXy1UjjvPDpDepWtUhPF6FXHyYjN4D222itkTwxOuwLQc-AlWCD09A0bTi4FWOYy2IlwDhprTT15TLfb8QJ3upE8LvPNkRUiadvM0f0M214-Y_K0s53W6oUOvMtXnjqXmYRCoBW18HL1AOfyMto2aJtEB_83sE6Qf18Y2pBMZPMb2bkMURckCBhWwAVFxC0w30HaIVPfrXcRypgFBeh3GS54jrQTC3ropVvqQVUcZXNeZNCApV7M_eLsvgkUlVVeKB0-Dgce7SNaH8AcNtf-17c6WiY3T3FypjsNTzpBgobj7Ay0HL8B8FBighQ0wJxyARRyFRLHTubgVN4Y-tV2hFlc4o2eWzLeJyEFtE48KTpvz8ydXuhYoXAj6gPWoyt-VEdxYwE8OUcswQxWX7De4VyeUi8eOxHwdE3-T3blmDWLzsCuvhUSeL-ykXd0V-T6zMmSDonVL8taIt2xO14yM40X48xCp8d4slOXtZOFuVodMeV5otdZXZmeMVWgVPSUdcuCkCDP2KljEqhtOfCpDy5vDVgJKK9axabMpI-AbzINbz9vFId5wN6crBDjW0bwrQ68gCUhRcHtTtBoLC7y65onvzTLPTslASYTAIGQojvdxxOe5j_nB0iHqcnhbxUtp_vLv2UsrLVfI06MhXfJHO8Sx3LGgxhkXDMMorPnfe2gPLHB39SwZDinwewE2hQU0G-LCqUL5B3AG-lsT-i2FimJTqca6OqPkOo-QuVr2b72iskzxyOFxK6gQhNuvpmlj_47SVcRqK8HbLGU_rAYlmucAP9BbRBKnT0pcVmYpVxCt7tWnQ5uKywZRvUvWX9RVTICz7TssZCm4JnCp8_wRKMxrcJ7hFNb4h2qdjCUm4QgU16h-3L6E1j0UlRzf3w2gPiONPWt3vOGgyn-SGM1jXpLDzWfr-dUxeVlr1Z6we1fjaDo3dDZEJrj1fEeQFb9NxH6LlZmPLDHBXYex3YzO1OxUzaigPqmsMXIuh5STg78lB8k1m2cN96b8I0ohwL0eWbDvoTaLlLudfeo9RkGQ9cMkjTvQvQ4rQEfKVU1YnBM1NijW98yr9Fq8WRuoQL01_s8jnSI7htLo4u8VVpnDC1dSvErrcoM6ob-GBVstIeHdPJV36NHevlylkabKgoZKhl5tqpbVzjrKuyrc2IKdFiavPiTsSxojpFmL8fpqGZiK6XDEe6TWmrZ8Xy8QL6vDTbVtHRvW4-2WIgMdOqP20IzTQTDfUDs9FWrvo0z4JtJ_iC0ZTP3eEk5q1DGHNmzSTkAp4qq1pjgAkiU7hOrTNTFgLkBcy7wAk0eD16DzACp7mY4Z04exIHhPbou7p_904xcptBXT4XtjrS2qneEP8P5j51W0y3kCdEqiR73L0nBpLxj26NVXPDUYxLym1trfQrVyKMiFYIS8u7KiVIlWodukE7fJ1E7gQ_dfpA%26x-client-SKU%3DID_NETSTANDARD2_0%26x-client-ver%3D5.6.0.0 + step += 1 + myNewUrl = response.headers['location'] + postConfirmUrl = myNewUrl[myNewUrl.find('ReturnUrl')+10:].split('"')[0] + response=mySession.get(myNewUrl,allow_redirects=False) + self._log_communication('GET ', myNewUrl, response.status_code) + + # 4. Step - get XSRF-Token + #https://singlekey-id.com/static/roboto-latin-400-normal-b009a76ad6afe4ebd301e36f847a29be.woff2 + step += 1 + myNewUrl='https://singlekey-id.com/favicon.ico' + response=mySession.get(myNewUrl,allow_redirects=True) + self._log_communication('GET ', myNewUrl, response.status_code) + myXRSF_Token = response.history[0].cookies.get('XSRF-TOKEN') + + # 5. Step + step += 1 + mySession.headers['x-xsrf-token']=myXRSF_Token + + myNewUrl='https://singlekey-id.com/auth/api/v1/authentication/UserExists' + myJson = { "username": user } + + mySession.headers['origin']= 'https://singlekey-id.com' + mySession.headers['content-type']= 'application/json' + response=mySession.post(myNewUrl,json=myJson,allow_redirects=False) + self._log_communication('POST ', myNewUrl, response.status_code) + + + # 6. Step + step += 1 + postConfirmUrl = urllib.parse.unquote(postConfirmUrl) + myJson = { + "username": user, + "password": pwd, + "keepMeSignedIn": False, + "returnUrl": postConfirmUrl + } + RequestVerificationToken = mySession.cookies.get('X-CSRF-FORM-TOKEN') + mySession.cookies.set('XSRF-TOKEN',myXRSF_Token) + mySession.cookies.set('X-CSRF-FORM-TOKEN',mySession.cookies.get('X-CSRF-FORM-TOKEN')) + mySession.cookies.set('.AspNetCore.Antiforgery.085ONM3l57w',mySession.cookies.get('.AspNetCore.Antiforgery.085ONM3l57w')) + + + + mySession.headers['content-type']= 'application/json' + mySession.headers['accept'] = 'application/json, text/plain, */*' + mySession.headers['sec-fetch-site'] = 'same-origin' + mySession.headers['host'] = 'singlekey-id.com' + mySession.headers['origin'] = 'https://singlekey-id.com' + mySession.headers['sec-fetch-dest'] = 'empty' + mySession.headers['sec-fetch-mode'] = 'cors' + + del mySession.headers['upgrade-insecure-requests'] + del mySession.headers['sec-fetch-user'] + + mySession.headers['requestverificationtoken'] = RequestVerificationToken + + myNewUrl='https://singlekey-id.com/auth/api/v1/authentication/login' + response=mySession.post(myNewUrl,json=myJson,allow_redirects=True) + self._log_communication('POST ', myNewUrl, response.status_code) + + + # 7. Step + step += 1 + mySession.cookies.set('idsrv.session',mySession.cookies.get('idsrv.session')) + mySession.cookies.set('.AspNetCore.Identity.Application',mySession.cookies.get('.AspNetCore.Identity.Application')) + mySession.headers['accept']='text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9' + mySession.headers['host']='singlekey-id.com' + mySession.headers['sec-fetch-dest']='document' + mySession.headers['sec-fetch-site']='same-origin' + mySession.headers['sec-fetch-user']='?1' + mySession.headers['upgrade-insecure-requests']='1' + mySession.headers['sec-fetch-mode']='navigate' + del(mySession.headers['origin']) + del(mySession.headers['requestverificationtoken']) + + myNewUrl = 'https://singlekey-id.com'+postConfirmUrl + response= mySession.get(myNewUrl,allow_redirects=False) + self._log_communication('GET ',myNewUrl, response.status_code) + + + # 8. Step + step += 1 + myNewUrl = response.headers['location'] + mySession.headers['host']='identity-myprofile.bosch.com' + response=mySession.get(myNewUrl,allow_redirects=False) + self._log_communication('GET ', myNewUrl, response.status_code) + + # 9. Step + step += 1 + myNewUrl = 'https://identity-myprofile.bosch.com'+response.headers['location'] + response=mySession.get(myNewUrl,allow_redirects=False) + self._log_communication('GET ', myNewUrl, response.status_code) + + # 10. Step - Authorize + step += 1 + CollectingCookie = {} + CollectingCookie['idsrv.session'] = mySession.cookies.get_dict('identity-myprofile.bosch.com','/ids')['idsrv.session'] + CollectingCookie['.AspNetCore.Identity.Application'] = mySession.cookies.get_dict('identity-myprofile.bosch.com','/ids')['.AspNetCore.Identity.Application'] + myNewUrl = 'https://identity-myprofile.bosch.com'+response.headers['location'] + response=mySession.get(myNewUrl,allow_redirects=False,cookies=CollectingCookie) + self._log_communication('GET ', myNewUrl, response.status_code) + + # 11. Step + step += 1 + del mySession.headers['x-xsrf-token'] + del mySession.headers['content-type'] + mySession.headers['host']='identity.bosch.com' + mySession.headers['sec-fetch-site']='same-origin' + CollectingCookie = {} + CollectingCookie['styleId'] = mySession.cookies.get_dict('identity.bosch.com','/')['styleId'] + myDict = mySession.cookies.get_dict('identity.bosch.com','/') + for c in myDict: + if ('SignInMessage' in c): + CollectingCookie[c] = myDict[c] + myNewUrl = response.headers['location'] + response=mySession.get(myNewUrl,allow_redirects=False,cookies=CollectingCookie) + self._log_communication('GET ', myNewUrl, response.status_code) + + # 12. Step + step += 1 + CollectingCookie['idsrv.external'] = mySession.cookies.get_dict('identity.bosch.com','/')['idsrv.external'] + myNewUrl = 'https://identity.bosch.com'+ response.headers['location'] + response=mySession.get(myNewUrl,allow_redirects=False,cookies=CollectingCookie) + self._log_communication('GET ', myNewUrl, response.status_code) + + # 13. Step + step += 1 + CollectingCookie={} + CollectingCookie['idsrv'] = mySession.cookies.get_dict('identity.bosch.com','/')['idsrv'] + CollectingCookie['styleId'] = mySession.cookies.get_dict('identity.bosch.com','/')['styleId'] + CollectingCookie['idsvr.session'] = mySession.cookies.get_dict('identity.bosch.com','/')['idsvr.session'] + + myNewUrl = response.headers['location'] + response=mySession.get(myNewUrl,allow_redirects=False,cookies=CollectingCookie) + self._log_communication('GET ', myNewUrl, response.status_code) + myText= response.content.decode() + myCode = myText[myText.find('"code"')+14:myText.find('"code"')+300].split('"')[0] + mySessionState = myText[myText.find('"session_state"')+23:myText.find('"session_state"')+300].split('"')[0] + myState = myText[myText.find('"state"')+15:myText.find('"state"')+300].split('"')[0] + + # 14. Step - /csp/report + step += 1 + myReferer = myNewUrl # the last URL is the referer + CollectingCookie['idsvr.clients'] = mySession.cookies.get_dict('identity.bosch.com','/')['idsvr.clients'] + myNewUrl='https://identity.bosch.com/csp/report' + + myHeaders = { + 'accept' : '*/*', + 'accept-encoding':'gzip, deflate, br', + 'accept-language' : 'en-US,en;q=0.9', + 'connection':'keep-alive', + 'content-type' : 'application/csp-report', + 'origin' : 'https://identity.bosch.com', + 'referer' : myReferer, + 'sec-fetch-dest' : 'report', + 'sec-fetch-mode' : 'no-cors', + 'sec-fetch-site' : 'same-origin', + 'user-agent' : 'Mozilla/5.0 (Linux; Android 11; sdk_gphone_x86_arm) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.106 Mobile Safari/537.36' } - url = 'https://prodindego.b2clogin.com/prodindego.onmicrosoft.com/b2c_1a_signup_signin/oauth2/v2.0/token' - mySession = requests.session() - mySession.headers['accept'] = 'application/json' - mySession.headers['accept-encoding'] = 'gzip' - mySession.headers['connection'] = 'Keep-Alive' - mySession.headers['content-type'] = 'application/x-www-form-urlencoded' - mySession.headers['host'] = 'prodindego.b2clogin.com' - mySession.headers['user-agent'] = 'Dalvik/2.1.0 (Linux; U; Android 11; sdk_gphone_x86_arm Build/RSR1.201013.001)' - - response = mySession.post(url,data=request_body) - self._log_communication('POST ', url, response.status_code) - myJson = json.loads (response.content.decode()) - self._refresh_token = myJson['refresh_token'] - self._bearer = myJson['access_token'] - self.token_expires = myJson['expires_in'] - - - url='https://api.indego-cloud.iot.bosch-si.com/api/v1/alms' - myHeader = {'accept-encoding' : 'gzip', - 'authorization' : 'Bearer '+ myJson['access_token'], - 'connection' : 'Keep-Alive', - 'host' : 'api.indego-cloud.iot.bosch-si.com', - 'user-agent' : 'Indego-Connect_4.0.0.12253' - } - response = requests.get(url, headers=myHeader,allow_redirects=True ) - self._log_communication('GET ', url, response.status_code) - if (response.status_code == 200): + myPayload = {"csp-report":{"document-uri": myReferer ,"referrer":"","violated-directive":"script-src","effective-directive":"script-src","original-policy":"default-src 'self'; script-src 'self' ; style-src 'self' 'unsafe-inline' ; img-src *; report-uri https://identity.bosch.com/csp/report","disposition":"enforce","blocked-uri":"eval","line-number":174,"column-number":361,"source-file":"https://identity.bosch.com/assets/scripts.2.5.0.js","status-code":0,"script-sample":""}} + response=mySession.post(myNewUrl,allow_redirects=False,cookies=CollectingCookie) + self._log_communication('POST ', myNewUrl, response.status_code) + + # 15. Step + step += 1 + myHeaders = { + 'accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9', + 'cache-control' : 'max-age=0', + 'accept-encoding':'gzip, deflate, br', + 'accept-language' : 'en-US,en;q=0.9', + 'connection':'keep-alive', + 'content-type' : 'application/x-www-form-urlencoded', + 'host' : 'prodindego.b2clogin.com', + 'origin' : 'https://identity.bosch.com', + 'referer' : myReferer, + 'sec-fetch-dest' : 'document', + 'sec-fetch-mode' : 'navigate', + 'sec-fetch-site' : 'cross-site', + 'user-agent' : 'Mozilla/5.0 (Linux; Android 11; sdk_gphone_x86_arm) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.106 Mobile Safari/537.36' + } + request_body = { + 'code' : myCode, + 'state' : myState, + 'session_state' : mySessionState + } + myNewUrl = 'https://prodindego.b2clogin.com/prodindego.onmicrosoft.com/oauth2/authresp' + response=mySession.post(myNewUrl,allow_redirects=False,data=request_body,headers=myHeaders) + self._log_communication('POST ', myNewUrl, response.status_code) + + + + # 16. Step + # go end get the Token + step += 1 + + myText= response.content.decode() + myFinalCode = myText[myText.find('code%3d')+7:myText.find('"code%3"')+1700].split('"')[0] + + request_body = { + 'code' : myFinalCode, + 'grant_type' : 'authorization_code', + 'redirect_uri' : 'com.bosch.indegoconnect://login', + 'code_verifier' : code_verifier, + 'client_id' : myClientID + } + + url = 'https://prodindego.b2clogin.com/prodindego.onmicrosoft.com/b2c_1a_signup_signin/oauth2/v2.0/token' + mySession = requests.session() + mySession.headers['accept'] = 'application/json' + mySession.headers['accept-encoding'] = 'gzip' + mySession.headers['connection'] = 'Keep-Alive' + mySession.headers['content-type'] = 'application/x-www-form-urlencoded' + mySession.headers['host'] = 'prodindego.b2clogin.com' + mySession.headers['user-agent'] = 'Dalvik/2.1.0 (Linux; U; Android 11; sdk_gphone_x86_arm Build/RSR1.201013.001)' + + response = mySession.post(url,data=request_body) + self._log_communication('POST ', url,response.status_code) myJson = json.loads (response.content.decode()) - self.alm_sn = myJson[0]['alm_sn'] - self.login_pending = False - self.last_login_timestamp = datetime.timestamp(datetime.now()) - self.expiration_timestamp = self.last_login_timestamp + self.token_expires - return True - else: - return False + _refresh_token = myJson['refresh_token'] + _access_token = myJson['access_token'] + _token_expires = myJson['expires_in'] + + + # 17. Step + # Check-Login + step += 1 + url='https://api.indego-cloud.iot.bosch-si.com/api/v1/alms' + myHeader = {'accept-encoding' : 'gzip', + 'authorization' : 'Bearer '+ _access_token, + 'connection' : 'Keep-Alive', + 'host' : 'api.indego-cloud.iot.bosch-si.com', + 'user-agent' : 'Indego-Connect_4.0.0.12253' + } + response = requests.get(url, headers=myHeader,allow_redirects=True ) + self._log_communication('GET ', url, response.status_code) + if (response.status_code == 200): + myJson = json.loads (response.content.decode()) + _alm_sn = myJson[0]['alm_sn'] + self.last_login_timestamp = datetime.timestamp(datetime.now()) + self.expiration_timestamp = self.last_login_timestamp + _token_expires + self._log_communication('LOGIN ', 'Login to Sinlge-Key-ID successful done ', 666) + return True,_access_token,_refresh_token,_token_expires,_alm_sn + else: + return False,'','',0,'' + + except err as Exception: + self._log_communication('LOGIN ', 'something went wrong during getting Sinlge-Key-ID Login on Step : {} - {}'.format(step,err), 999) + self.logger.warning('something went wrong during getting Sinlge-Key-ID Login on Step : {} - {}'.format(step,err)) + return False,'','',0,'' + @@ -1723,6 +1858,7 @@ def _get_active_calendar(self, myCal = None): return activeCal def _parse_uzsu_2_list(self, uzsu_dict=None): + #pydevd.settrace("192.168.178.37", port=5678) weekDays = {'MO' : "0" ,'TU' : "1" ,'WE' : "2" ,'TH' : "3",'FR' : "4",'SA' : "5" ,'SU' : "6" } myCal = {} @@ -1814,6 +1950,7 @@ def _parse_cal_2_list(self, myCal = None, type=None): 'End' : myEndTime1, 'Days' : str(myDay) } + #pydevd.settrace("192.168.178.37", port=5678) if 'Attr' in slots: if slots['Attr'] == "C": # manual Exclusion Time mycolour = '#DC143C' @@ -2222,6 +2359,7 @@ def alert(self): else: actAlerts = self._get_childitem('visu.alerts') + #pydevd.settrace("192.168.178.37", port=5678) for myAlert in alert_response: if not (myAlert['alert_id'] in actAlerts): # add new alert to dict @@ -2341,6 +2479,7 @@ def _check_state_triggers(self, myStatecode): def _check_alarm_triggers(self, myAlarm): counter = 1 + #pydevd.settrace("192.168.178.37", port=5678) while counter <=4: myItemName="trigger.alarm_trigger_" + str(counter) + ".alarm" myAlarmTrigger = self._get_childitem(myItemName) @@ -2352,6 +2491,7 @@ def _check_alarm_triggers(self, myAlarm): def _get_state(self): if (self._get_childitem("wartung.wintermodus") == True or self.logged_in == False): return + #pydevd.settrace("192.168.178.37", port=5678) if (self.position_detection): self.position_count += 1 @@ -2369,6 +2509,7 @@ def _get_state(self): else: error_code = 0 self._set_childitem('stateError',error_code) + #pydevd.settrace("192.168.178.37", port=5678) state_code = states['state'] try: if not str(state_code) in str(self.states) and len(self.states) > 0: @@ -2543,6 +2684,7 @@ def _load_map(self): self.logger.debug('You have a new MAP') self._set_childitem('mapSvgCacheDate',self.shtime.now()) self._set_childitem('webif.garden_map', garden.decode("utf-8")) + #pydevd.settrace("192.168.178.37", port=5678) self._parse_map() def _parse_map(self): @@ -2557,6 +2699,7 @@ def _parse_map(self): #======================================================================= myMap = myMap.replace(">",">\n") mapArray = myMap.split('\n') + #pydevd.settrace("192.168.178.37", port=5678) # till here new # Get the Mower-Position and extract it i= 0 @@ -2686,6 +2829,7 @@ def store_state_trigger_html(self, Trigger_State_Item = None,newState=None): @cherrypy.expose def store_alarm_trigger_html(self, Trigger_Alarm_Item = None,newAlarm=None): + #pydevd.settrace("192.168.178.37", port=5678) myItemSuffix=Trigger_Alarm_Item myItem="trigger." + myItemSuffix + ".alarm" self.plugin._set_childitem(myItem,newAlarm) @@ -2701,6 +2845,7 @@ def store_credentials_html(self, encoded='', pwd = '', user= '', store_2_config= result2send={} resultParams={} + #pydevd.settrace("192.168.178.37", port=5678) myCredentials = user+':'+pwd byte_credentials = base64.b64encode(myCredentials.encode('utf-8')) encoded = byte_credentials.decode("utf-8") @@ -2721,13 +2866,15 @@ def store_credentials_html(self, encoded='', pwd = '', user= '', store_2_config= for line in new_conf.splitlines(): myFile.write(line+'\r\n') myFile.close() + #pydevd.settrace("192.168.178.37", port=5678) txt_Result.append("stored new config to filesystem") self.plugin.user = user self.plugin.password = pwd - if self.plugin.logged_in: - self.plugin._delete_auth() - self.plugin._auth() - self.plugin.logged_in = self.plugin._check_auth() + # Here the login-procedure + self.plugin.login_pending = True + self.plugin.logged_in, self.plugin._bearer, self.plugin._refresh_token, self.plugin.token_expires,self.plugin.alm_sn = self.plugin._login_single_key_id(self.plugin.user, self.plugin.password) + self.plugin.login_pending = False + if self.plugin.logged_in: txt_Result.append("logged in succesfully") else: @@ -2736,7 +2883,7 @@ def store_credentials_html(self, encoded='', pwd = '', user= '', store_2_config= myLastLogin = datetime.fromtimestamp(float(self.plugin.last_login_timestamp)).strftime('%Y-%m-%d %H:%M:%S') resultParams['logged_in']= self.plugin.logged_in resultParams['timeStamp']= myLastLogin + " / " + myExperitation_Time - resultParams['SessionID']= self.plugin.context_id + resultParams['SessionID']= self.plugin._bearer self.plugin._set_childitem('visu.refresh',True) txt_Result.append("refresh of Items initiated") @@ -2755,11 +2902,13 @@ def get_proto_html(self, proto_Name= None): @cherrypy.expose def clear_proto_html(self, proto_Name= None): + #pydevd.settrace("192.168.178.37", port=5678) self.plugin._set_childitem(proto_Name,[]) return None @cherrypy.expose def set_location_html(self, longitude=None, latitude=None): + pydevd.settrace("192.168.178.37", port=5678) self.plugin._set_childitem('webif.location_longitude',float(longitude)) self.plugin._set_childitem('webif.location_latitude',float(latitude)) myLocation = {"latitude":str(latitude),"longitude":str(longitude),"timezone":"Europe/Berlin"} @@ -2845,6 +2994,7 @@ def index(self, reload=None): myLatitude = "" myText = "" try: + #pydevd.settrace("192.168.178.37", port=5678) myLongitude = self.plugin._get_childitem('webif.location_longitude') myLatitude = self.plugin._get_childitem('webif.location_latitude') myText = 'Location from Indego-Server' diff --git a/indego4shng/plugin.yaml b/indego4shng/plugin.yaml index 8b519f87e..24f4a6ed4 100755 --- a/indego4shng/plugin.yaml +++ b/indego4shng/plugin.yaml @@ -12,7 +12,7 @@ plugin: documentation: http://smarthomeng.de/user/plugins_doc/config/indego.html # url of documentation (wiki) page support: https://knx-user-forum.de/forum/supportforen/smarthome-py/966612-indego-connect - version: 4.0.0 # Plugin version + version: 4.0.1 # Plugin version sh_minversion: 1.6 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) multi_instance: False # plugin supports multi instance diff --git a/indego4shng/requirements.txt b/indego4shng/requirements.txt new file mode 100755 index 000000000..dbb901f09 --- /dev/null +++ b/indego4shng/requirements.txt @@ -0,0 +1,2 @@ +requests +urllib3 >= 1.25.8 diff --git a/indego4shng/webif/templates/index.html b/indego4shng/webif/templates/index.html old mode 100755 new mode 100644 index 53758c38b..ae658df37 --- a/indego4shng/webif/templates/index.html +++ b/indego4shng/webif/templates/index.html @@ -140,7 +140,7 @@
    {{ _('Plugin') }}     : {% if p.aliv
    Session-IDToken {{ p.context_id }}
    + + + + + + + + + + + + + + + + + +
    {{ _('Long Polling Timeout') }}{{ p._long_polling_timeout ~ _(' Sekunden')}}
    {{ _('Willkommensnachricht') }}{{ p._welcome_msg }}
    {{ _('Ende Nachricht') }}{{ p._bye_msg }}
    +{% endblock headtable %} + + +{% set tabcount = 4 %} + + +{% if item_count==0 %} + {% set start_tab = 1 %} +{% endif %} + + + +{% set tab1title = "Output Items"%} +{% block bodytab1 %} +
    + + + + + + + + + + + + {% for item in p._items %} + + + + + + + + {% endfor %} + +
    {{ _('Pfad') }}{{ _('Typ') }}{{ _('Wert') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
    {{ item.property.path }}{{ item.property.type }}{{ item() }}{{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }}{{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
    +
    +{% endblock bodytab1 %} + +{% set tab2title = "Input Items"%} +{% block bodytab2 %} +
    + + + + + + + + + + + + {% for item in p._items_text_message %} + + + + + + + + {% endfor %} + +
    {{ _('Pfad') }}{{ _('Typ') }}{{ _('Wert') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
    {{ item.property.path }}{{ item.property.type }}{{ item() }}{{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }}{{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
    +
    +{% endblock bodytab2 %} +{% set tab3title = "Telegram Control"%} +{% block bodytab3 %} +
    + + + + + + + + + + + + {% for key in p._items_control %} + + + + + + + + + {% endfor %} + +
    {{ _('Control') }}{{ _('Pfad') }}{{ _('Typ') }}{{ _('Wert') }}{{ _('Letzter Change') }}{{ _('letzter Wert') }}
    {{ key }}{{ p._items_control[key].item.property.path }}{{ p._items_control[key].item.property.type }}{{ p._items_control[key].item() }}{{ p._items_control[key].item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}{{ p._items_control[key].item.property.last_value }}
    +
    +{% endblock bodytab3 %} + + + +{% set tab4title = "Telegram Infos" %} +{% block bodytab4 %} +Chat-IDs +
    + + + + + + + + + {% for key in p._chat_ids_item() %} + + + + + {% endfor %} + +
    {{ _('Registrierte Chat-ID') }}{{ _('Zugriff') }}
    {{ key }}{{ p._chat_ids_item()[key] }}
    + +Telegram Infos + + + + + + + + + {% for key in p._items_info %} + + + + + {% endfor %} + +
    {{ _('Info') }}{{ _('Info-Items') }}
    {{ key }}{{ p._items_info[key] }}
    +
    +{% endblock bodytab4 %} diff --git a/telegram/webif/templates/index.html b/telegram/webif/templates/index.html index 290518780..94df37107 100755 --- a/telegram/webif/templates/index.html +++ b/telegram/webif/templates/index.html @@ -77,30 +77,28 @@ --> {% set tab1title = "Output Items"%} {% block bodytab1 %} -
    - - - - - - - - - - - - {% for item in p._items %} - - - - - - - - {% endfor %} - -
    {{ _('Pfad') }}{{ _('Typ') }}{{ _('Wert') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
    {{ item.property.path }}{{ item.property.type }}{{ item() }}{{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }}{{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
    -
    + + + + + + + + + + + + {% for item in p._items %} + + + + + + + + {% endfor %} + +
    {{ _('Pfad') }}{{ _('Typ') }}{{ _('Wert') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
    {{ item.property.path }}{{ item.property.type }}{{ item() }}{{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }}{{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
    {% endblock bodytab1 %} {% set tab2title = "Input Items"%} {% block bodytab2 %} -
    - - - - - - - - - - - - {% for item in p._items_text_message %} - - - - - - - - {% endfor %} - -
    {{ _('Pfad') }}{{ _('Typ') }}{{ _('Wert') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
    {{ item.property.path }}{{ item.property.type }}{{ item() }}{{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }}{{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
    -
    + + + + + + + + + + + + {% for item in p._items_text_message %} + + + + + + + + {% endfor %} + +
    {{ _('Pfad') }}{{ _('Typ') }}{{ _('Wert') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
    {{ item.property.path }}{{ item.property.type }}{{ item() }}{{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }}{{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
    {% endblock bodytab2 %} {% set tab3title = "Telegram Control"%} {% block bodytab3 %} -
    - - - - - - - - - - - - {% for key in p._items_control %} - - - - - - - - - {% endfor %} - -
    {{ _('Control') }}{{ _('Pfad') }}{{ _('Typ') }}{{ _('Wert') }}{{ _('Letzter Change') }}{{ _('letzter Wert') }}
    {{ key }}{{ p._items_control[key].item.property.path }}{{ p._items_control[key].item.property.type }}{{ p._items_control[key].item() }}{{ p._items_control[key].item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}{{ p._items_control[key].item.property.last_value }}
    -
    + + + + + + + + + + + + {% for key in p._items_control %} + + + + + + + + + {% endfor %} + +
    {{ _('Control') }}{{ _('Pfad') }}{{ _('Typ') }}{{ _('Wert') }}{{ _('Letzter Change') }}{{ _('letzter Wert') }}
    {{ key }}{{ p._items_control[key].item.property.path }}{{ p._items_control[key].item.property.type }}{{ p._items_control[key].item() }}{{ p._items_control[key].item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}{{ p._items_control[key].item.property.last_value }}
    {% endblock bodytab3 %} {% set tab4title = "Telegram Infos" %} {% block bodytab4 %} -
    - - - - - - - - - {% for key in p._items_info %} - - - - - {% endfor %} - -
    {{ _('Info') }}{{ _('Info-Items') }}
    {{ key }}{{ p._items_info[key] }}
    -
    + + + + + + + + + {% for key in p._items_info %} + + + + + {% endfor %} + +
    {{ _('Info') }}{{ _('Info-Items') }}
    {{ key }}{{ p._items_info[key] }}
    {% endblock bodytab4 %} -{% set tab5title = "Chat-ID's" %} +{% set tab5title = "Chat-IDs" %} {% block bodytab5 %} -
    - - - - - - - - - {% for key in p._chat_ids_item() %} - - - - - {% endfor %} - -
    {{ _('Registrierte Chat-ID') }}{{ _('Zugriff') }}
    {{ key }}{{ p._chat_ids_item()[key] }}
    -
    + + + + + + + + + {% for key in p._chat_ids_item() %} + + + + + {% endfor %} + +
    {{ _('Registrierte Chat-ID') }}{{ _('Zugriff') }}
    {{ key }}{{ p._chat_ids_item()[key] }}
    {% endblock bodytab5 %} \ No newline at end of file From 55e6d9ee81e1e75d1cf9473beb6ee831e0fa1d11 Mon Sep 17 00:00:00 2001 From: aschwith Date: Mon, 8 May 2023 17:40:25 +0200 Subject: [PATCH 095/775] avm: removed 'optional' text in mandatory plugin host attribute. --- avm/plugin.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/avm/plugin.yaml b/avm/plugin.yaml index 1d6e7b1aa..b487ef6ec 100644 --- a/avm/plugin.yaml +++ b/avm/plugin.yaml @@ -40,8 +40,8 @@ parameters: type: str mandatory: True description: - de: '(optional) Hostname oder IP-Adresse des FritzDevice.' - en: '(optional) Hostname or ip address of the FritzDevice.' + de: 'Hostname oder IP-Adresse des FritzDevice.' + en: 'Hostname or ip address of the FritzDevice.' port: type: int default: 49443 From 00f2bfcd0a15928fa0d553b6e506052f7a5c29e3 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Mon, 8 May 2023 19:18:30 +0200 Subject: [PATCH 096/775] AVM Plugin: - Automate creation of item_attributes.py and update of valid_list of avm_data_type in plugin.yaml by scripts included in item_attributes_master.py --- avm/item_attributes.py | 23 +- avm/item_attributes_master.py | 134 ++++++- avm/plugin.yaml | 717 ++++++++++++++++++---------------- 3 files changed, 505 insertions(+), 369 deletions(-) diff --git a/avm/item_attributes.py b/avm/item_attributes.py index 8aebaac94..6411e118d 100644 --- a/avm/item_attributes.py +++ b/avm/item_attributes.py @@ -19,14 +19,23 @@ # along with this plugin. If not, see . # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# +# +# THIS FILE IS AUTOMATICALLY CREATED BY USING item_attributs_master.py +# +# +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + ALL_ATTRIBUTES_SUPPORTED_BY_REPEATER = ['uptime', 'software_version', 'hardware_version', 'serial_number', 'manufacturer', 'product_class', 'manufacturer_oui', 'model_name', 'description', 'device_log', 'security_port', 'reboot', 'wlanconfig', 'wlanconfig_ssid', 'wlan_guest_time_remaining', 'wlan_associates', 'wps_active', 'wps_status', 'wps_mode', 'wlan_total_associates', 'hosts_count', 'hosts_info', 'mesh_topology', 'number_of_hosts', 'hosts_url', 'mesh_url', 'network_device', 'device_ip', 'device_connection_type', 'device_hostname', 'connection_status', 'is_host_active', 'host_info'] -ALL_ATTRIBUTES_WRITEABLE = ['reboot', 'set_target_temperature', 'set_window_open', 'set_hkr_boost', 'set_simpleonoff', 'set_level', 'set_levelpercentage', 'set_hue', 'set_saturation', 'set_colortemperature', 'switch_toggle', 'tam', 'wlanconfig', 'wps_active', 'deflection_enable', 'aha_device', 'target_temperature', 'window_open', 'hkr_boost', 'switch_state'] -ALL_ATTRIBUTES_WRITEONLY = ['set_target_temperature', 'set_window_open', 'set_hkr_boost', 'set_simpleonoff', 'set_level', 'set_levelpercentage', 'set_hue', 'set_saturation', 'set_colortemperature', 'switch_toggle'] +ALL_ATTRIBUTES_WRITEABLE = ['reboot', 'set_target_temperature', 'set_window_open', 'set_hkr_boost', 'set_simpleonoff', 'set_level', 'set_levelpercentage', 'set_hue', 'set_saturation', 'set_colortemperature', 'switch_toggle', 'tam', 'wlanconfig', 'wps_active', 'deflection_enable', 'aha_device', 'target_temperature', 'window_open', 'hkr_boost', 'simpleonoff', 'level', 'levelpercentage', 'hue', 'saturation', 'colortemperature', 'unmapped_hue', 'unmapped_saturation', 'switch_state'] +ALL_ATTRIBUTES_WRITEONLY = ['reboot', 'set_target_temperature', 'set_window_open', 'set_hkr_boost', 'set_simpleonoff', 'set_level', 'set_levelpercentage', 'set_hue', 'set_saturation', 'set_colortemperature', 'switch_toggle'] DEPRECATED_ATTRIBUTES = ['aha_device', 'hkr_device', 'set_temperature', 'temperature', 'set_temperature_reduced', 'set_temperature_comfort', 'firmware_version', 'boost_active'] AHA_ATTRIBUTES = ['device_id', 'manufacturer', 'product_name', 'fw_version', 'connected', 'device_name', 'tx_busy', 'device_functions', 'set_target_temperature', 'target_temperature', 'current_temperature', 'temperature_reduced', 'temperature_comfort', 'temperature_offset', 'set_window_open', 'window_open', 'windowopenactiveendtime', 'set_hkr_boost', 'hkr_boost', 'boost_active', 'boostactiveendtime', 'summer_active', 'holiday_active', 'battery_low', 'battery_level', 'lock', 'device_lock', 'errorcode', 'set_simpleonoff', 'simpleonoff', 'set_level', 'level', 'set_levelpercentage', 'levelpercentage', 'set_hue', 'hue', 'set_saturation', 'saturation', 'set_colortemperature', 'colortemperature', 'unmapped_hue', 'unmapped_saturation', 'color_mode', 'supported_color_mode', 'fullcolorsupport', 'mapped', 'switch_state', 'switch_mode', 'switch_toggle', 'power', 'energy', 'voltage', 'humidity', 'alert_state', 'blind_mode', 'endpositionsset'] AHA_RO_ATTRIBUTES = ['device_id', 'manufacturer', 'product_name', 'fw_version', 'connected', 'device_name', 'tx_busy', 'device_functions', 'current_temperature', 'temperature_reduced', 'temperature_comfort', 'temperature_offset', 'windowopenactiveendtime', 'boost_active', 'boostactiveendtime', 'summer_active', 'holiday_active', 'battery_low', 'battery_level', 'lock', 'device_lock', 'errorcode', 'color_mode', 'supported_color_mode', 'fullcolorsupport', 'mapped', 'switch_mode', 'power', 'energy', 'voltage', 'humidity', 'alert_state', 'blind_mode', 'endpositionsset'] AHA_WO_ATTRIBUTES = ['set_target_temperature', 'set_window_open', 'set_hkr_boost', 'set_simpleonoff', 'set_level', 'set_levelpercentage', 'set_hue', 'set_saturation', 'set_colortemperature', 'switch_toggle'] -AHA_RW_ATTRIBUTES = ['target_temperature', 'window_open', 'hkr_boost', 'switch_state', 'simpleonoff', 'level', 'levelpercentage', 'hue', 'saturation', 'colortemperature', 'unmapped_hue', 'unmapped_saturation'] +AHA_RW_ATTRIBUTES = ['target_temperature', 'window_open', 'hkr_boost', 'simpleonoff', 'level', 'levelpercentage', 'hue', 'saturation', 'colortemperature', 'unmapped_hue', 'unmapped_saturation', 'switch_state'] TR064_ATTRIBUTES = ['uptime', 'software_version', 'hardware_version', 'serial_number', 'manufacturer', 'product_class', 'manufacturer_oui', 'model_name', 'description', 'device_log', 'security_port', 'reboot', 'myfritz_status', 'call_direction', 'call_event', 'monitor_trigger', 'is_call_incoming', 'last_caller_incoming', 'last_call_date_incoming', 'call_event_incoming', 'last_number_incoming', 'last_called_number_incoming', 'is_call_outgoing', 'last_caller_outgoing', 'last_call_date_outgoing', 'call_event_outgoing', 'last_number_outgoing', 'last_called_number_outgoing', 'call_duration_incoming', 'call_duration_outgoing', 'tam', 'tam_name', 'tam_new_message_number', 'tam_old_message_number', 'tam_total_message_number', 'wan_connection_status', 'wan_connection_error', 'wan_is_connected', 'wan_uptime', 'wan_ip', 'wan_upstream', 'wan_downstream', 'wan_total_packets_sent', 'wan_total_packets_received', 'wan_current_packets_sent', 'wan_current_packets_received', 'wan_total_bytes_sent', 'wan_total_bytes_received', 'wan_current_bytes_sent', 'wan_current_bytes_received', 'wan_link', 'wlanconfig', 'wlanconfig_ssid', 'wlan_guest_time_remaining', 'wlan_associates', 'wps_active', 'wps_status', 'wps_mode', 'wlan_total_associates', 'hosts_count', 'hosts_info', 'mesh_topology', 'number_of_hosts', 'hosts_url', 'mesh_url', 'network_device', 'device_ip', 'device_connection_type', 'device_hostname', 'connection_status', 'is_host_active', 'host_info', 'number_of_deflections', 'deflections_details', 'deflection_details', 'deflection_enable', 'deflection_type', 'deflection_number', 'deflection_to_number', 'deflection_mode', 'deflection_outgoing', 'deflection_phonebook_id', 'aha_device', 'hkr_device', 'set_temperature', 'temperature', 'set_temperature_reduced', 'set_temperature_comfort', 'firmware_version'] AVM_RW_ATTRIBUTES = ['tam', 'wlanconfig', 'wps_active', 'deflection_enable', 'aha_device'] CALL_MONITOR_ATTRIBUTES = ['call_direction', 'call_event', 'monitor_trigger', 'is_call_incoming', 'last_caller_incoming', 'last_call_date_incoming', 'call_event_incoming', 'last_number_incoming', 'last_called_number_incoming', 'is_call_outgoing', 'last_caller_outgoing', 'last_call_date_outgoing', 'call_event_outgoing', 'last_number_outgoing', 'last_called_number_outgoing', 'call_duration_incoming', 'call_duration_outgoing'] @@ -42,11 +51,11 @@ WLAN_CONFIG_ATTRIBUTES = ['wlanconfig', 'wlanconfig_ssid', 'wlan_guest_time_remaining', 'wlan_associates', 'wps_active', 'wps_status', 'wps_mode'] WLAN_ATTRIBUTES = ['wlan_total_associates'] FRITZ_DEVICE_ATTRIBUTES = ['uptime', 'software_version', 'hardware_version', 'serial_number', 'manufacturer', 'product_class', 'manufacturer_oui', 'model_name', 'description', 'device_log', 'security_port', 'reboot'] -HOST_ATTRIBUTES = ['host_info'] # host index needed -HOST_ATTRIBUTES_CHILD = ['network_device', 'device_ip', 'device_connection_type', 'device_hostname', 'connection_status', 'is_host_active'] # avm_mac needed -HOSTS_ATTRIBUTES = ['hosts_count', 'hosts_info', 'mesh_topology', 'number_of_hosts', 'hosts_url', 'mesh_url'] # no index needed +HOST_ATTRIBUTES = ['host_info'] +HOSTS_ATTRIBUTES = ['hosts_count', 'hosts_info', 'mesh_topology', 'number_of_hosts', 'hosts_url', 'mesh_url'] +HOST_ATTRIBUTES_CHILD = ['network_device', 'device_ip', 'device_connection_type', 'device_hostname', 'connection_status', 'is_host_active'] DEFLECTION_ATTRIBUTES = ['number_of_deflections', 'deflections_details', 'deflection_details', 'deflection_enable', 'deflection_type', 'deflection_number', 'deflection_to_number', 'deflection_mode', 'deflection_outgoing', 'deflection_phonebook_id'] HOMEAUTO_RO_ATTRIBUTES = ['hkr_device', 'set_temperature', 'temperature', 'set_temperature_reduced', 'set_temperature_comfort', 'firmware_version'] HOMEAUTO_RW_ATTRIBUTES = ['aha_device'] HOMEAUTO_ATTRIBUTES = ['aha_device', 'hkr_device', 'set_temperature', 'temperature', 'set_temperature_reduced', 'set_temperature_comfort', 'firmware_version'] -MYFRITZ_ATTRIBUTES = ['myfritz_status'] \ No newline at end of file +MYFRITZ_ATTRIBUTES = ['myfritz_status'] diff --git a/avm/item_attributes_master.py b/avm/item_attributes_master.py index 092bcf7a4..45e735785 100644 --- a/avm/item_attributes_master.py +++ b/avm/item_attributes_master.py @@ -19,6 +19,45 @@ # along with this plugin. If not, see . # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +import ruamel.yaml + +FILENAME_ATTRIBUTES = 'item_attributes.py' + +FILENAME_PLUGIN = 'plugin.yaml' + +HEADER = """\ +# !/usr/bin/env python +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# Copyright 2023 Michael Wenzel +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# AVM for SmartHomeNG. https://github.com/smarthomeNG// +# +# This plugin is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This plugin is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this plugin. If not, see . +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + + +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# +# +# THIS FILE IS AUTOMATICALLY CREATED BY USING item_attributs_master.py +# +# +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + +""" + # 'avm_data_type': {'interface': 'tr064', 'group': '', 'sub_group': None, 'access': '', 'type': '', 'deprecated': False, 'supported_by_repeater': False, 'description': ''}, AVM_DATA_TYPES = { @@ -132,7 +171,7 @@ 'window_open': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'rw', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': '"Window Open" Funktion (Status und Setzen)'}, 'windowopenactiveendtime': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Zeitliches Ende der "Window Open" Funktion'}, 'set_hkr_boost': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'wo', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': '"Boost" Funktion Setzen'}, - 'hkr_boost': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'rw', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': '"Boost" Funktion (Status aund Setzen)'}, + 'hkr_boost': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'rw', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': '"Boost" Funktion (Status und Setzen)'}, 'boost_active': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': True, 'supported_by_repeater': False, 'description': 'Status der "Boost" Funktion'}, 'boostactiveendtime': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Zeitliches Ende der "Boost" Funktion'}, 'summer_active': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Status der "Sommer" Funktion'}, @@ -143,19 +182,19 @@ 'device_lock': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Tastensperre direkt am Gerät ein'}, 'errorcode': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Fehlercodes die der HKR liefert'}, 'set_simpleonoff': {'interface': 'aha', 'group': 'simpleonoff', 'sub_group': None, 'access': 'wo', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Gerät/Aktor/Lampe an-/ausschalten'}, - 'simpleonoff': {'interface': 'aha', 'group': 'simpleonoff', 'sub_group': None, 'access': 'wr', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Gerät/Aktor/Lampe (Status und Setzen)'}, + 'simpleonoff': {'interface': 'aha', 'group': 'simpleonoff', 'sub_group': None, 'access': 'rw', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Gerät/Aktor/Lampe (Status und Setzen)'}, 'set_level': {'interface': 'aha', 'group': 'level', 'sub_group': None, 'access': 'wo', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Level/Niveau von 0 bis 255 Setzen'}, - 'level': {'interface': 'aha', 'group': 'level', 'sub_group': None, 'access': 'wr', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Level/Niveau von 0 bis 255 (Setzen & Status)'}, + 'level': {'interface': 'aha', 'group': 'level', 'sub_group': None, 'access': 'rw', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Level/Niveau von 0 bis 255 (Setzen & Status)'}, 'set_levelpercentage': {'interface': 'aha', 'group': 'level', 'sub_group': None, 'access': 'wo', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Level/Niveau von 0% bis 100% Setzen'}, - 'levelpercentage': {'interface': 'aha', 'group': 'level', 'sub_group': None, 'access': 'wr', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Level/Niveau von 0% bis 100% (Setzen & Status)'}, + 'levelpercentage': {'interface': 'aha', 'group': 'level', 'sub_group': None, 'access': 'rw', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Level/Niveau von 0% bis 100% (Setzen & Status)'}, 'set_hue': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'wo', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Hue Setzen'}, - 'hue': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'wr', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Hue (Status und Setzen)'}, + 'hue': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'rw', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Hue (Status und Setzen)'}, 'set_saturation': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'wo', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Saturation Setzen'}, - 'saturation': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'wr', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Saturation (Status und Setzen)'}, + 'saturation': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'rw', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Saturation (Status und Setzen)'}, 'set_colortemperature': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'wo', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Farbtemperatur Setzen'}, - 'colortemperature': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'wr', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Farbtemperatur (Status und Setzen)'}, - 'unmapped_hue': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'wr', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Hue (Status und Setzen)'}, - 'unmapped_saturation': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'wr', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Saturation (Status und Setzen)'}, + 'colortemperature': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'rw', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Farbtemperatur (Status und Setzen)'}, + 'unmapped_hue': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'rw', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Hue (Status und Setzen)'}, + 'unmapped_saturation': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'rw', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Saturation (Status und Setzen)'}, 'color_mode': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Aktueller Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode)'}, 'supported_color_mode': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Unterstützer Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode)'}, 'fullcolorsupport': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Lampe unterstützt setunmappedcolor'}, @@ -173,8 +212,9 @@ } } +ATTRIBUTES_LIST = ['tr064', 'aha'] -def get_attrs(ifaces: list = ['tr064', 'aha'], sub_dict: dict = {}) -> list: +def get_attrs(ifaces: list = ATTRIBUTES_LIST, sub_dict: dict = {}) -> list: attributes = [] for iface in ifaces: for avm_data_type in AVM_DATA_TYPES[iface]: @@ -182,8 +222,8 @@ def get_attrs(ifaces: list = ['tr064', 'aha'], sub_dict: dict = {}) -> list: attributes.append(avm_data_type) return attributes -def export_avm_data(): - ATTRS = {} +def export_item_attributs_py(): + ATTRS = dict() ATTRS['ALL_ATTRIBUTES_SUPPORTED_BY_REPEATER'] = get_attrs(sub_dict={'supported_by_repeater': True}) ATTRS['ALL_ATTRIBUTES_WRITEABLE'] = get_attrs(sub_dict={'access': 'wo'}) + get_attrs(sub_dict={'access': 'rw'}) ATTRS['ALL_ATTRIBUTES_WRITEONLY'] = get_attrs(sub_dict={'access': 'wo'}) @@ -207,7 +247,7 @@ def export_avm_data(): ATTRS['WLAN_CONFIG_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'wlan_config'}) ATTRS['WLAN_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'wlan'}) ATTRS['FRITZ_DEVICE_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'fritz_device'}) - ATTRS['HOST_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'host', 'sub_group': 'info'}) + ATTRS['HOST_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'host', 'sub_group': 'host'}) ATTRS['HOSTS_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'host', 'sub_group': 'gen'}) ATTRS['HOST_ATTRIBUTES_CHILD'] = get_attrs(['tr064'], {'group': 'host', 'sub_group': 'child'}) ATTRS['DEFLECTION_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'deflection'}) @@ -216,8 +256,72 @@ def export_avm_data(): ATTRS['HOMEAUTO_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'homeauto'}) ATTRS['MYFRITZ_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'myfritz'}) + + # create file and write header + f = open(FILENAME_ATTRIBUTES, "w") + f.write(HEADER) + f.close() + + # write avm_data_types for attr, alist in ATTRS.items(): - print(f'{attr} = {alist!r}') + with open(FILENAME_ATTRIBUTES, "a") as f: + print (f'{attr} = {alist!r}', file=f) + + print('item_attributs.py successfully created!') + +def create_plugin_yaml_avm_data_type_valid_list(ifaces: list = ATTRIBUTES_LIST): + """Create valid_list of avm_data_type based on master dict""" + + interface_group = None + valid_list_str = """\ + type: str + mandatory: True + description: + de: 'AVM Datentyp des jeweiligen Items.' + en: 'AVM Data Type of the respective item.' + valid_list: # NOTE: valid_list is automatically created by using item_attributes_master.py""" + + for iface in ifaces: + valid_list_str = f"{valid_list_str}\n # {iface} Attributes" + for avm_data_type in AVM_DATA_TYPES[iface]: + interface_group_new = f"{AVM_DATA_TYPES[iface][avm_data_type]['interface']}-{AVM_DATA_TYPES[iface][avm_data_type]['group']}" + if interface_group_new != interface_group: + interface_group = interface_group_new + valid_list_str = f"""{valid_list_str}\n\ + # {interface_group} Attributes""" + valid_list_str = f"""{valid_list_str}\n\ + - {avm_data_type!r:<40}# {AVM_DATA_TYPES[iface][avm_data_type]['access']:<5}{AVM_DATA_TYPES[iface][avm_data_type]['type']:<5}\t{AVM_DATA_TYPES[iface][avm_data_type]['description']:<}""" + + return valid_list_str + +def update_plugin_yaml_avm_data_type_valid_list(): + yaml = ruamel.yaml.YAML() + yaml.indent(mapping=4, sequence=4, offset=4) + yaml.width = 150 + + valid_list_str = create_plugin_yaml_avm_data_type_valid_list() + valid_list_yaml = yaml.load(valid_list_str) + + with open(FILENAME_PLUGIN, 'r') as f: + data = yaml.load(f) + avm_data_type = data.get('item_attributes', {}).get('avm_data_type') + + if avm_data_type: + data['item_attributes']['avm_data_type'] = valid_list_yaml + with open(FILENAME_PLUGIN, 'w') as f: + yaml.dump(data, f) + print('valid_list of avm_data_type successfully updated!') + else: + print('Error during updating valid_list of avm_data_type!') + if __name__ == '__main__': - export_avm_data() + # Run main to export item_attributes.py and update a´valid_list of avm_data_type in plugin.yaml + export_item_attributs_py() + update_plugin_yaml_avm_data_type_valid_list() + + +# Notes: +# - HOST_ATTRIBUTES: host index needed +# - HOSTS_ATTRIBUTES: no index needed +# - HOST_ATTRIBUTES_CHILD: avm_mac needed \ No newline at end of file diff --git a/avm/plugin.yaml b/avm/plugin.yaml index b487ef6ec..d858d6f7c 100644 --- a/avm/plugin.yaml +++ b/avm/plugin.yaml @@ -3,8 +3,8 @@ plugin: # Global plugin attributes type: interface # plugin type (gateway, interface, protocol, system, web) description: - de: 'Ansteuerung von AVM FRITZ!Boxen, WLAN-Repeatern, DECT Steckdosen, etc.' - en: 'Get and send data from/to AVM devices such as the FRITZ!Box, Wifi Repeaters or DECT sockets.' + de: Ansteuerung von AVM FRITZ!Boxen, WLAN-Repeatern, DECT Steckdosen, etc. + en: Get and send data from/to AVM devices such as the FRITZ!Box, Wifi Repeaters or DECT sockets. maintainer: sisamiwe tester: psilo, onkelandy, aschwith, bmx state: develop # change to ready when done with development @@ -27,252 +27,272 @@ parameters: type: str default: '' description: - de: '(optional) Nutzername für den Login. Kann für manche Features benötigt werden! (Speziell für Fritz!OS 7 ist die Konfiguration der Fritz!Box auf `Anmeldung mit FRITZ!Box-Benutzernamen und Kennwort` notwendig' - en: '(optional) Login information (user). Can be needed to use some features of the AVM device. (Specially for Firtz!OS 7 the Fritz!Box should be configured for login with username and password' + de: (optional) Nutzername für den Login. Kann für manche Features benötigt werden! (Speziell für Fritz!OS 7 ist die Konfiguration der Fritz!Box + auf `Anmeldung mit FRITZ!Box-Benutzernamen und Kennwort` notwendig + en: (optional) Login information (user). Can be needed to use some features of the AVM device. (Specially for Firtz!OS 7 the Fritz!Box should + be configured for login with username and password password: type: str default: '' hide: true description: - de: '(optional) Passwort für den Login. Wird in der Regel immer benötigt und aus Sicherheitsgründen empfohlen.' - en: '(optional) Password for login. Is normally always needed and recommended due to security reasons' + de: (optional) Passwort für den Login. Wird in der Regel immer benötigt und aus Sicherheitsgründen empfohlen. + en: (optional) Password for login. Is normally always needed and recommended due to security reasons host: type: str - mandatory: True + mandatory: true description: - de: 'Hostname oder IP-Adresse des FritzDevice.' - en: 'Hostname or ip address of the FritzDevice.' + de: Hostname oder IP-Adresse des FritzDevice. + en: Hostname or ip address of the FritzDevice. port: type: int default: 49443 description: - de: '(optional) Port des FritzDevice, normalerweise 49443 für https oder 49000 für http' - en: '(optional) Port of the FritzDevice, typically 49443 for https or 49000 for http' + de: (optional) Port des FritzDevice, normalerweise 49443 für https oder 49000 für http + en: (optional) Port of the FritzDevice, typically 49443 for https or 49000 for http cycle: type: int default: 300 description: - de: '(optional) Zeit zwischen zwei Runs. Default ist 300 Sekunden.' - en: '(optional) Time period between two update cycles. Default is 300 seconds.' + de: (optional) Zeit zwischen zwei Runs. Default ist 300 Sekunden. + en: (optional) Time period between two update cycles. Default is 300 seconds. ssl: type: bool default: true description: - de: '(optional) Mit True wird das FritzDevice via https, mit False via http angesprochen.' - en: '(optional) True will add "https", False "http" to the URLs in the plugin.' + de: (optional) Mit True wird das FritzDevice via https, mit False via http angesprochen. + en: (optional) True will add "https", False "http" to the URLs in the plugin. verify: type: bool default: false description: - de: '(optional) Schaltet die Zertifikate-Prüfung an oder aus. Normalerweise False.' - en: '(optional) Turns certificate verification on or off. Typically False' + de: (optional) Schaltet die Zertifikate-Prüfung an oder aus. Normalerweise False. + en: (optional) Turns certificate verification on or off. Typically False call_monitor: type: bool default: false description: - de: '(optional) Aktiviert oder deaktiviert den MonitoringService, welcher auf den Call Monitor des FritzDevice verbindet. Der Call Monitor muss über ein verbundenes Telefon via #96*5* aktiviert sein.' - en: '(optional) Activates or deactivates the MonitoringService, which connects to the FritzDevice`s call monitor. The call monitor has to be activated before by a connected telephone via calling #96*5*' + de: '(optional) Aktiviert oder deaktiviert den MonitoringService, welcher auf den Call Monitor des FritzDevice verbindet. Der Call Monitor muss + über ein verbundenes Telefon via #96*5* aktiviert sein.' + en: '(optional) Activates or deactivates the MonitoringService, which connects to the FritzDevice`s call monitor. The call monitor has to be + activated before by a connected telephone via calling #96*5*' call_monitor_incoming_filter: type: str default: '' description: - de: '(optional) Filter, auf welche eigenen Rufnummern (oder Teile davon) der Callmonitor reagieren soll. Ist der Filter leer, werden alle eigenen Rufnummern überwacht. Wird ein Filterstring bspw. "12234" angegeben, werden nur die eigenen Anschlussnummern, die "12234" enthalten, vom CallMonitor verarbeitet.' - en: '(optional) Filter, for which numbers (or part of the number) of own telephone connection the Callmonitor should react.' + de: (optional) Filter, auf welche eigenen Rufnummern (oder Teile davon) der Callmonitor reagieren soll. Ist der Filter leer, werden alle eigenen + Rufnummern überwacht. Wird ein Filterstring bspw. "12234" angegeben, werden nur die eigenen Anschlussnummern, die "12234" enthalten, vom + CallMonitor verarbeitet. + en: (optional) Filter, for which numbers (or part of the number) of own telephone connection the Callmonitor should react. avm_home_automation: type: bool default: false description: - de: '(optional) Aktiviert oder deaktiviert den Zugriff auf AVM Smarthome Geräte mit dem AHA HTTP Interface.' - en: '(optional) Activates or deactivates access to AVM smarthome devices via AHA HTTP interface' + de: (optional) Aktiviert oder deaktiviert den Zugriff auf AVM Smarthome Geräte mit dem AHA HTTP Interface. + en: (optional) Activates or deactivates access to AVM smarthome devices via AHA HTTP interface log_entry_count: type: int default: 200 description: - de: '(optional) Anzahl der Log-Messages, die verarbeitet/bereitgestellt werden. 0 = alle' - en: '(optional) Amount of Log-Messages, witch will be displayed. 0 = all' + de: (optional) Anzahl der Log-Messages, die verarbeitet/bereitgestellt werden. 0 = alle + en: (optional) Amount of Log-Messages, witch will be displayed. 0 = all tr064_item_blacklist: type: bool - default: False + default: false description: - de: '(optional) Wenn aktiv, werden TR064 Items, deren Abfrageergebnis 2x zu einen Fehler geführt hat, blacklisted und anschließend nicht mehr abgefragt.' - en: '(optional) If active, TR064 Items for which data polling resulted in errors, will be blacklisted and excluded from update cycle' + de: (optional) Wenn aktiv, werden TR064 Items, deren Abfrageergebnis 2x zu einen Fehler geführt hat, blacklisted und anschließend nicht mehr + abgefragt. + en: (optional) If active, TR064 Items for which data polling resulted in errors, will be blacklisted and excluded from update cycle item_attributes: # Definition of item attributes defined by this plugin avm_data_type: type: str - mandatory: True + mandatory: true description: - de: 'AVM Datentyp des jeweiligen Items.' - en: 'AVM Data Type of the respective item.' - valid_list: - # Fritzdevice Attribute - - 'uptime' # r/o num Laufzeit des Fritzdevice in Sekunden - - 'serial_number' # r/o str Serialnummer des Fritzdevice - - 'software_version' # r/o str Software Version - - 'hardware_version' # r/o str Hardware Version - # Myfritz Attribute - - 'myfritz_status' # r/o bool MyFritz Status - # Call Monitor Attribute - - 'monitor_trigger' # r/o bool Monitortrigger - - 'is_call_incoming' # r/o bool Eingehender Anruf erkannt - - 'call_duration_incoming' # r/o num Dauer des eingehenden Anrufs - - 'last_caller_incoming' # r/o str Letzter Anrufer - - 'last_number_incoming' # r/o str Nummer des letzten eingehenden Anrufes - - 'last_called_number_incoming' # r/o str Angerufene Nummer des letzten eingehenden Anrufs - - 'last_call_date_incoming' # r/o str Zeitpunkt des letzten eingehenden Anrufs - - 'call_event_incoming' # r/o str Status des letzten eingehenden Anrufs - - 'is_call_outgoing' # r/o bool Ausgehender Anruf erkannt - - 'call_duration_outgoing' # r/o num Dauer des ausgehenden Anrufs - - 'last_caller_outgoing' # r/o str Letzter angerufener Kontakt - - 'last_number_outgoing' # r/o str Letzte angerufene Nummer - - 'last_called_number_outgoing' # r/o str Letzter verwendete Telefonnummer für ausgehenden Anruf - - 'last_call_date_outgoing' # r/o str Zeitpunkt des letzten ausgehenden Anrufs - - 'call_event_outgoing' # r/o str Status des letzten ausgehenden Anrufs - - 'call_direction' # r/o str Richtung des letzten Anrufes - - 'call_event' # r/o str Status des letzten Anrufes - # TAM Attribute Hinweis: alle Attribute benötigen zusätzlich das Attribut 'avm_tam_index' - - 'tam' # r/w bool TAM an/aus - - 'tam_name' # r/o str Name des TAM - - 'tam_old_message_number' # r/o num Anzahl der alten Nachrichten - - 'tam_new_message_number' # r/o num Anzahl der neuen Nachrichten - - 'tam_total_message_number' # r/o num Gesamtanzahl der Nachrichten - # WAN Attribute - - 'wan_connection_status' # r/o str WAN Verbindungsstatus - - 'wan_connection_error' # r/o str WAN Verbindungsfehler - - 'wan_is_connected' # r/o bool WAN Verbindung aktiv - - 'wan_uptime' # r/o str WAN Verbindungszeit - - 'wan_ip' # r/o str WAN IP Adresse - - 'wan_upstream' # r/o num WAN Upstream Datenmenge - - 'wan_downstream' # r/o num WAN Downstream Datenmenge - - 'wan_total_packets_sent' # r/o num WAN Verbindung-Anzahl insgesamt versendeter Pakete - - 'wan_total_packets_received' # r/o num WAN Verbindung-Anzahl insgesamt empfangener Pakete - - 'wan_current_packets_sent' # r/o num WAN Verbindung-Anzahl aktuell versendeter Pakete - - 'wan_current_packets_received' # r/o num WAN Verbindung-Anzahl aktuell empfangener Pakete - - 'wan_total_bytes_sent' # r/o num WAN Verbindung-Anzahl insgesamt versendeter Bytes - - 'wan_total_bytes_received' # r/o num WAN Verbindung-Anzahl insgesamt empfangener Bytes - - 'wan_current_bytes_sent' # r/o num WAN Verbindung-Anzahl aktuelle Bitrate Senden - - 'wan_current_bytes_received' # r/o num WAN Verbindung-Anzahl aktuelle Bitrate Empfangen - - 'wan_link' # r/o bool WAN Link - # WLAN Config Attribute Hinweis: alle Attribute benötigen zusätzlich das Attribut 'avm_wlan_index' - - 'wlanconfig' # r/w bool WLAN An/Aus - - 'wlanconfig_ssid' # r/o str WLAN SSID - - 'wlan_guest_time_remaining' # r/o num Verbleibende Zeit, bis zum automatischen Abschalten des Gäste-WLAN - - 'wlan_associates' # r/o num Anzahl der verbundenen Geräte im jeweiligen WLAN - - 'wps_active' # r/w bool Schaltet WPS für das entsprechende WlAN an / aus - - 'wps_status' # r/o str WPS Status des entsprechenden WlAN - - 'wps_mode' # r/o str WPS Modus des entsprechenden WlAN - # WLAN Attribute - - 'wlan_total_associates' # r/o num Anzahl der verbundenen Geräte im WLAN - # Host Attribute Hinweis: alle Attribute benötigen zusätzlich das Attribut 'avm_mac' - - 'network_device' # r/o bool Verbindungsstatus // Defines Network device via MAC-Adresse - - 'device_ip' # r/o str Geräte-IP (Muss Child von 'network_device' sein) - - 'device_connection_type' # r/o str Verbindungstyp (Muss Child von 'network_device' sein) - - 'device_hostname' # r/o str Gerätename (Muss Child von 'network_device' sein) - - 'connection_status' # r/o bool Verbindungsstatus (Muss Child von 'network_device' sein) - # Hosts Attribute - - 'hosts_count' #r/o num Anzahl der Hosts - - 'hosts_info' #r/o dict Informationen über die Hosts - - 'mesh_topology' #r/o dict Topologie des Mesh - # Smarthome Attribute (Deprecated avm data types. Please use alternative AHA interface type) - - 'aha_device' # r/w bool Steckdose schalten; siehe "switch_state" - - 'hkr_device' # r/o str Status des HKR (OPEN; CLOSED; TEMP) - - 'set_temperature' # r/o num siehe "target_temperature" - - 'temperature' # r/o num siehe "current_temperature" - - 'set_temperature_reduced' # r/o num siehe "temperature_reduced" - - 'set_temperature_comfort' # r/o num siehe "temperature_comfort" - - 'firmware_version' # r/o str siehe "fw_version" - # Deflections Hinweis: alle Attribute benötigen zusätzlich das Attribut 'avm_deflection_index' - - 'number_of_deflections' # r/o num Anzahl der eingestellten Rufumleitungen - - 'deflection_details' # r/o dict Details zur Rufumleitung (als dict); Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item - - 'deflections_details' # r/o dict Details zu allen Rufumleitung (als dict) - - 'deflection_enable' # r/w bool Rufumleitung Status an/aus; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item - - 'deflection_type' # r/o str Type der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item - - 'deflection_number' # r/o str Telefonnummer, die umgeleitet wird; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item - - 'deflection_to_number' # r/o str Zielrufnummer der Umleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item - - 'deflection_mode' # r/o str Modus der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item - - 'deflection_outgoing' # r/o str Outgoing der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item - - 'deflection_phonebook_id' # r/o str Phonebook_ID der Zielrufnummer (Only valid if Type==fromPB); Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item - # AHA Interface attributes Hinweis: alle Attribute benötigen zusätzlich das Attribut 'avm_tam_ain' - - 'device_id' # r/o str Geräte -ID - - 'manufacturer' # r/o str Hersteller - - 'product_name' # r/o str Produktname - - 'fw_version' # r/o str Firmware Version - - 'connected' # r/o bool Verbindungsstatus - - 'device_name' # r/o str Gerätename - - 'tx_busy' # r/o bool Verbindung aktiv - - 'device_functions' # r/o list Im Gerät vorhandene Funktionen - - - 'set_target_temperature' # w/o num Soll-Temperatur Setzen - - 'target_temperature' # r/w num Soll-Temperatur (Status und Setzen) - - 'current_temperature' # r/o num Ist-Temperatur - - 'temperature_reduced' # r/o num Eingestellte reduzierte Temperatur - - 'temperature_comfort' # r/o num Eingestellte Komfort-Temperatur - - 'temperature_offset' # r/o num Eingestellter Temperatur-Offset - - 'set_window_open' # w/o bool "Window Open" Funktionen Setzen - - 'window_open' # r/w bool "Window Open" Funktion (Status und Setzen) - - 'windowopenactiveendtime' # r/o num Zeitliches Ende der "Window Open" Funktion - - 'set_hkr_boost' # w/o bool "Boost" Funktion Setzen - - 'hkr_boost' # r/w bool "Boost" Funktion (Status aund Setzen) - - 'boost_active' # r/o bool Status der "Boost" Funktion deprecated - - 'boostactiveendtime' # r/o num Zeitliches Ende der "Boost" Funktion - - 'summer_active' # r/o bool Status der "Sommer" Funktion - - 'holiday_active' # r/o bool Status der "Holiday" Funktion - - 'battery_low' # r/o bool "Battery low" Status - - 'battery_level' # r/o num Batterie-Status in % - - 'lock' # r/o bool Tastensperre über UI/API aktiv - - 'device_lock' # r/o bool Tastensperre direkt am Gerät ein - - 'errorcode' # r/o num Fehlercodes die der HKR liefert - - - 'set_simpleonoff' # w/o bool Gerät/Aktor/Lampe an-/ausschalten - - 'simpleonoff' # w/r bool Gerät/Aktor/Lampe (Status und Setzen) - - - 'set_level' # w/o num Level/Niveau von 0 bis 255 Setzen - - 'level' # w/r num Level/Niveau von 0 bis 255 (Setzen & Status) - - 'set_levelpercentage' # w/o num Level/Niveau von 0% bis 100% Setzen - - 'levelpercentage' # w/r num Level/Niveau von 0% bis 100% (Setzen & Status) - - - - 'set_hue' # w/o num Hue Setzen - - 'hue' # w/r num Hue (Status und Setzen) - - 'set_saturation' # w/o num Saturation Setzen - - 'saturation' # w/r num Saturation (Status und Setzen) - - 'set_colortemperature' # w/o num Farbtemperatur Setzen - - 'colortemperature' # w/r num Farbtemperatur (Status und Setzen) - - 'unmapped_hue' # w/r num Hue (Status und Setzen) - - 'unmapped_saturation' # w/r num Saturation (Status und Setzen) - - 'color_mode' # r/o num Aktueller Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode) - - 'supported_color_mode' # r/o num Unterstützer Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode) - - 'fullcolorsupport' # r/o bool Lampe unterstützt setunmappedcolor - - 'mapped' # r/o bool von den Colordefaults abweichend zugeordneter HueSaturation-Wert gesetzt - - - 'switch_state' # r/w bool Schaltzustand Steckdose (Status und Setzen) - - 'switch_mode' # r/o str Zeitschaltung oder manuell schalten - - 'switch_toggle' # w/o bool Schaltzustand umschalten (toggle) - - - 'power' # r/o num Leistung in W (Aktualisierung alle 2 min) - - 'energy' # r/o num absoluter Verbrauch seit Inbetriebnahme in Wh - - 'voltage' # r/o num Spannung in V (Aktualisierung alle 2 min) - - - 'humidity' # r/o num Relative Luftfeuchtigkeit in % (FD440) - - - 'alert_state' # r/o bool letzter übermittelter Alarmzustand - - - 'blind_mode' # r/o str automatische Zeitschaltung oder manuell fahren - - 'endpositionsset' # r/o bool ist die Endlage für das Rollo konfiguriert - + de: AVM Datentyp des jeweiligen Items. + en: AVM Data Type of the respective item. + valid_list: # NOTE: valid_list is automatically created by using item_attributes_master.py + # tr064 Attributes + # tr064-fritz_device Attributes + - uptime # ro num Laufzeit des Fritzdevice in Sekunden + - software_version # ro str Serialnummer des Fritzdevice + - hardware_version # ro str Software Version + - serial_number # ro str Hardware Version + - manufacturer # ro str Hersteller + - product_class # ro str Produktklasse + - manufacturer_oui # ro str Hersteller OUI + - model_name # ro str Modelname + - description # ro str Modelbeschreibung + - device_log # ro str Gerte Log + - security_port # ro str Security Port + - reboot # wo bool Startet das Gert neu + # tr064-myfritz Attributes + - myfritz_status # ro bool MyFritz Status (an/aus) + # tr064-call_monitor Attributes + - call_direction # ro str Richtung des letzten Anrufes + - call_event # ro str Status des letzten Anrufes + - monitor_trigger # ro bool Monitortrigger + - is_call_incoming # ro bool Eingehender Anruf erkannt + - last_caller_incoming # ro str Letzter Anrufer + - last_call_date_incoming # ro str Zeitpunkt des letzten eingehenden Anrufs + - call_event_incoming # ro str Status des letzten eingehenden Anrufs + - last_number_incoming # ro str Nummer des letzten eingehenden Anrufes + - last_called_number_incoming # ro str Angerufene Nummer des letzten eingehenden Anrufs + - is_call_outgoing # ro bool Ausgehender Anruf erkannt + - last_caller_outgoing # ro str Letzter angerufener Kontakt + - last_call_date_outgoing # ro str Zeitpunkt des letzten ausgehenden Anrufs + - call_event_outgoing # ro str Status des letzten ausgehenden Anrufs + - last_number_outgoing # ro str Nummer des letzten ausgehenden Anrufes + - last_called_number_outgoing # ro str Letzte verwendete Telefonnummer fr ausgehenden Anruf + - call_duration_incoming # ro num Dauer des eingehenden Anrufs + - call_duration_outgoing # ro num Dauer des ausgehenden Anrufs + # tr064-tam Attributes + - tam # rw bool TAM an/aus + - tam_name # ro str Name des TAM + - tam_new_message_number # ro num Anzahl der alten Nachrichten + - tam_old_message_number # ro num Anzahl der neuen Nachrichten + - tam_total_message_number # ro num Gesamtanzahl der Nachrichten + # tr064-wan Attributes + - wan_connection_status # ro str WAN Verbindungsstatus + - wan_connection_error # ro str WAN Verbindungsfehler + - wan_is_connected # ro bool WAN Verbindung aktiv + - wan_uptime # ro str WAN Verbindungszeit + - wan_ip # ro str WAN IP Adresse + - wan_upstream # ro num WAN Upstream Datenmenge + - wan_downstream # ro num WAN Downstream Datenmenge + - wan_total_packets_sent # ro num WAN Verbindung-Anzahl insgesamt versendeter Pakete + - wan_total_packets_received # ro num WAN Verbindung-Anzahl insgesamt empfangener Pakete + - wan_current_packets_sent # ro num WAN Verbindung-Anzahl aktuell versendeter Pakete + - wan_current_packets_received # ro num WAN Verbindung-Anzahl aktuell empfangener Pakete + - wan_total_bytes_sent # ro num WAN Verbindung-Anzahl insgesamt versendeter Bytes + - wan_total_bytes_received # ro num WAN Verbindung-Anzahl insgesamt empfangener Bytes + - wan_current_bytes_sent # ro num WAN Verbindung-Anzahl aktuelle Bitrate Senden + - wan_current_bytes_received # ro num WAN Verbindung-Anzahl aktuelle Bitrate Empfangen + - wan_link # ro bool WAN Link + # tr064-wlan_config Attributes + - wlanconfig # rw bool WLAN An/Aus + - wlanconfig_ssid # ro str WLAN SSID + - wlan_guest_time_remaining # ro num Verbleibende Zeit, bis zum automatischen Abschalten des Gste-WLAN + - wlan_associates # ro num Anzahl der verbundenen Gerte im jeweiligen WLAN + - wps_active # rw bool Schaltet WPS fr das entsprechende WlAN an / aus + - wps_status # ro str WPS Status des entsprechenden WlAN + - wps_mode # ro str WPS Modus des entsprechenden WlAN + # tr064-wlan Attributes + - wlan_total_associates # ro num Anzahl der verbundenen Gerte im WLAN + # tr064-host Attributes + - hosts_count # ro num Anzahl der Hosts + - hosts_info # ro dict Informationen ber die Hosts + - mesh_topology # ro dict Topologie des Mesh + - number_of_hosts # ro bool Verbindungsstatus (Muss Child von "network_device" sein + - hosts_url # ro bool Verbindungsstatus (Muss Child von "network_device" sein + - mesh_url # ro bool Verbindungsstatus (Muss Child von "network_device" sein + - network_device # ro bool Verbindungsstatus // Defines Network device via MAC-Adresse + - device_ip # ro str Gerte-IP (Muss Child von "network_device" sein + - device_connection_type # ro str Verbindungstyp (Muss Child von "network_device" sein + - device_hostname # ro str Gertename (Muss Child von "network_device" sein + - connection_status # ro bool Verbindungsstatus (Muss Child von "network_device" sein + - is_host_active # ro bool Verbindungsstatus (Muss Child von "network_device" sein + - host_info # ro bool Verbindungsstatus (Muss Child von "network_device" sein + # tr064-deflection Attributes + - number_of_deflections # ro num Anzahl der eingestellten Rufumleitungen + - deflections_details # ro dict Details zu allen Rufumleitung (als dict) + - deflection_details # ro dict Details zur Rufumleitung (als dict); Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item + - deflection_enable # rw bool Rufumleitung Status an/aus; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item + - deflection_type # ro str Type der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item + - deflection_number # ro str Telefonnummer, die umgeleitet wird; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item + - deflection_to_number # ro str Zielrufnummer der Umleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item + - deflection_mode # ro str Modus der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item + - deflection_outgoing # ro str Outgoing der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item + - deflection_phonebook_id # ro str Phonebook_ID der Zielrufnummer (Only valid if Type==fromPB); Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item + # tr064-homeauto Attributes + - aha_device # rw bool Steckdose schalten; siehe "switch_state" + - hkr_device # ro str Status des HKR (OPEN; CLOSED; TEMP) + - set_temperature # ro num siehe "target_temperature" + - temperature # ro num siehe "current_temperature" + - set_temperature_reduced # ro num siehe "temperature_reduced" + - set_temperature_comfort # ro num siehe "temperature_comfort" + - firmware_version # ro str siehe "fw_version" + # aha Attributes + # aha-device Attributes + - device_id # ro str Gerte -ID + - manufacturer # ro str Hersteller + - product_name # ro str Produktname + - fw_version # ro str Firmware Version + - connected # ro bool Verbindungsstatus + - device_name # ro str Gertename + - tx_busy # ro bool Verbindung aktiv + - device_functions # ro list Im Gert vorhandene Funktionen + # aha-hkr Attributes + - set_target_temperature # wo num Soll-Temperatur Setzen + - target_temperature # rw num Soll-Temperatur (Status und Setzen) + - current_temperature # ro num Ist-Temperatur + - temperature_reduced # ro num Eingestellte reduzierte Temperatur + - temperature_comfort # ro num Eingestellte Komfort-Temperatur + - temperature_offset # ro num Eingestellter Temperatur-Offset + - set_window_open # wo bool "Window Open" Funktionen Setzen + - window_open # rw bool "Window Open" Funktion (Status und Setzen) + - windowopenactiveendtime # ro num Zeitliches Ende der "Window Open" Funktion + - set_hkr_boost # wo bool "Boost" Funktion Setzen + - hkr_boost # rw bool "Boost" Funktion (Status und Setzen) + - boost_active # ro bool Status der "Boost" Funktion + - boostactiveendtime # ro num Zeitliches Ende der "Boost" Funktion + - summer_active # ro bool Status der "Sommer" Funktion + - holiday_active # ro bool Status der "Holiday" Funktion + - battery_low # ro bool "Battery low" Status + - battery_level # ro num Batterie-Status in % + - lock # ro bool Tastensperre ber UI/API aktiv + - device_lock # ro bool Tastensperre direkt am Gert ein + - errorcode # ro num Fehlercodes die der HKR liefert + # aha-simpleonoff Attributes + - set_simpleonoff # wo bool Gert/Aktor/Lampe an-/ausschalten + - simpleonoff # rw bool Gert/Aktor/Lampe (Status und Setzen) + # aha-level Attributes + - set_level # wo num Level/Niveau von 0 bis 255 Setzen + - level # rw num Level/Niveau von 0 bis 255 (Setzen & Status) + - set_levelpercentage # wo num Level/Niveau von 0% bis 100% Setzen + - levelpercentage # rw num Level/Niveau von 0% bis 100% (Setzen & Status) + # aha-color Attributes + - set_hue # wo num Hue Setzen + - hue # rw num Hue (Status und Setzen) + - set_saturation # wo num Saturation Setzen + - saturation # rw num Saturation (Status und Setzen) + - set_colortemperature # wo num Farbtemperatur Setzen + - colortemperature # rw num Farbtemperatur (Status und Setzen) + - unmapped_hue # rw num Hue (Status und Setzen) + - unmapped_saturation # rw num Saturation (Status und Setzen) + - color_mode # ro num Aktueller Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode) + - supported_color_mode # ro num Untersttzer Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode) + - fullcolorsupport # ro bool Lampe untersttzt setunmappedcolor + - mapped # ro bool von den Colordefaults abweichend zugeordneter HueSaturation-Wert gesetzt + # aha-switch Attributes + - switch_state # rw bool Schaltzustand Steckdose (Status und Setzen) + - switch_mode # ro str Zeitschaltung oder manuell schalten + - switch_toggle # wo bool Schaltzustand umschalten (toggle) + # aha-powermeter Attributes + - power # ro num Leistung in W (Aktualisierung alle 2 min) + - energy # ro num absoluter Verbrauch seit Inbetriebnahme in Wh + - voltage # ro num Spannung in V (Aktualisierung alle 2 min) + # aha-humidity Attributes + - humidity # ro num Relative Luftfeuchtigkeit in % (FD440) + # aha-alarm Attributes + - alert_state # ro bool letzter bermittelter Alarmzustand + # aha-blind Attributes + - blind_mode # ro str automatische Zeitschaltung oder manuell fahren + - endpositionsset # ro bool ist die Endlage fr das Rollo konfiguriert avm_incoming_allowed: type: str - mandatory: False + mandatory: false description: - de: '(optional) Definition der erlaubten eingehenden Rufnummer in Items vom avm_data_type `monitor_trigger`.' - en: '(optional) Definition of the allowed incoming number. Only in items of avm_data_type `monitor_trigger`.' + de: (optional) Definition der erlaubten eingehenden Rufnummer in Items vom avm_data_type `monitor_trigger`. + en: (optional) Definition of the allowed incoming number. Only in items of avm_data_type `monitor_trigger`. avm_target_number: type: str - mandatory: False + mandatory: false description: - de: '(optional) Definition der erlaubten angerufenen Rufnummer in Items vom avm_data_type `monitor_trigger`.' - en: '(optional) Definition of the allowed called number. Only in items of avm_data_type `monitor_trigger`.' + de: (optional) Definition der erlaubten angerufenen Rufnummer in Items vom avm_data_type `monitor_trigger`. + en: (optional) Definition of the allowed called number. Only in items of avm_data_type `monitor_trigger`. avm_wlan_index: type: int @@ -284,48 +304,48 @@ item_attributes: avm_mac: type: mac - mandatory: False + mandatory: false description: - de: '(optional) Definition der MAC Adresse für Items vom avm_data_type `network_device`. Nur für diese Items mandatory!' - en: '(optional) Definition of the MAC address for items of avm_data_type `network_device`. Only mandatory for these items!' + de: (optional) Definition der MAC Adresse für Items vom avm_data_type `network_device`. Nur für diese Items mandatory! + en: (optional) Definition of the MAC address for items of avm_data_type `network_device`. Only mandatory for these items! avm_ain: type: str - mandatory: False + mandatory: false description: - de: "(optional) Definition der AktorIdentifikationsNummer (AIN) Items vom avm_data_types für `AHA-Interface`. Nur für diese Items mandatory!" - en: "(optional) Definition of the ActorIdentificationNumber (AIN) for items of avm_data_types `AHA-Interface`. Only mandatory for these items!" + de: (optional) Definition der AktorIdentifikationsNummer (AIN) Items vom avm_data_types für `AHA-Interface`. Nur für diese Items mandatory! + en: (optional) Definition of the ActorIdentificationNumber (AIN) for items of avm_data_types `AHA-Interface`. Only mandatory for these items! avm_tam_index: type: int - mandatory: False + mandatory: false description: - de: '(optional) Index für den Anrufbeantworter, normalerweise für den ersten eine "1". Es werden bis zu 5 Anrufbeantworter vom Gerät unterstützt.' - en: '(optional) Index für the answering machine, normally a "1" for the first one. Supported are up to 5 answering machines.' + de: (optional) Index für den Anrufbeantworter, normalerweise für den ersten eine "1". Es werden bis zu 5 Anrufbeantworter vom Gerät unterstützt. + en: (optional) Index für the answering machine, normally a "1" for the first one. Supported are up to 5 answering machines. valid_min: 1 valid_max: 5 avm_deflection_index: type: int - mandatory: False + mandatory: false description: - de: '(optional) Index für die Rufumleitung, normalerweise für die erste eine "1".' - en: '(optional) Index deflection, normally a "1" for the first one.' + de: (optional) Index für die Rufumleitung, normalerweise für die erste eine "1". + en: (optional) Index deflection, normally a "1" for the first one. valid_min: 1 valid_max: 32 avm_read_after_write: type: int description: - de: '(optional) Konfiguriert eine Verzögerung in Sekunden nachdem ein Lesekommando nach einem Schreibkommando gesendet wird.' - en: '(optional) Configures delay in seconds to issue a read command after write command' + de: (optional) Konfiguriert eine Verzögerung in Sekunden nachdem ein Lesekommando nach einem Schreibkommando gesendet wird. + en: (optional) Configures delay in seconds to issue a read command after write command avm_data_cycle: type: int - mandatory: False + mandatory: false description: - de: 'Poll-Zyklus des AVM Datentypes des jeweiligen Items. 0-Nur beim Initialisieren Lesen; 10+ - Zyklisch Lesen' - en: 'Poll cycle of AVM Data Type of the respective item. 0-Just read at init; 10+ - cyclic reading' + de: Poll-Zyklus des AVM Datentypes des jeweiligen Items. 0-Nur beim Initialisieren Lesen; 10+ - Zyklisch Lesen + en: Poll cycle of AVM Data Type of the respective item. 0-Just read at init; 10+ - cyclic reading item_structs: info: @@ -512,13 +532,13 @@ item_structs: visu_acl: ro avm_data_type@instance: wan_total_bytes_received current_bytes_sent: - type: num - visu_acl: ro - avm_data_type@instance: wan_current_bytes_sent + type: num + visu_acl: ro + avm_data_type@instance: wan_current_bytes_sent current_bytes_receive: - type: num - visu_acl: ro - avm_data_type@instance: wan_current_bytes_received + type: num + visu_acl: ro + avm_data_type@instance: wan_current_bytes_received link: type: bool visu_acl: ro @@ -541,10 +561,10 @@ item_structs: avm_data_type@instance: wlanconfig_ssid avm_wlan_index@instance: 1 wlan_1_associates: - type: num - visu_acl: ro - avm_data_type@instance: wlan_associates - avm_wlan_index@instance: 1 + type: num + visu_acl: ro + avm_data_type@instance: wlan_associates + avm_wlan_index@instance: 1 wlan_2: type: bool visu_acl: rw @@ -557,10 +577,10 @@ item_structs: avm_data_type@instance: wlanconfig_ssid avm_wlan_index@instance: 2 wlan_2_associates: - type: num - visu_acl: ro - avm_data_type@instance: wlan_associates - avm_wlan_index@instance: 2 + type: num + visu_acl: ro + avm_data_type@instance: wlan_associates + avm_wlan_index@instance: 2 wlan_gast: type: bool visu_acl: rw @@ -573,10 +593,10 @@ item_structs: avm_data_type@instance: wlanconfig_ssid avm_wlan_index@instance: 3 wlan_gast_associates: - type: num - visu_acl: ro - avm_data_type@instance: wlan_associates - avm_wlan_index@instance: 3 + type: num + visu_acl: ro + avm_data_type@instance: wlan_associates + avm_wlan_index@instance: 3 wlan_gast_tr: type: num visu_acl: rw @@ -684,9 +704,9 @@ item_structs: type: num aha_humidity_sensor: - humidity: - avm_data_type@instance: humidity - type: num + humidity: + avm_data_type@instance: humidity + type: num aha_alert: state: @@ -698,8 +718,8 @@ item_structs: avm_data_type@instance: switch_state type: bool switch_mode: - avm_data_type@instance: switch_mode - type: str + avm_data_type@instance: switch_mode + type: str switch_toggle: avm_data_type@instance: switch_toggle type: bool @@ -725,12 +745,12 @@ item_structs: type: num aha_blind: - blind_mode: - avm_data_type@instance: blind_mode - type: str - endpositionsset: - avm_data_type@instance: endpositionsset - type: bool + blind_mode: + avm_data_type@instance: blind_mode + type: str + endpositionsset: + avm_data_type@instance: endpositionsset + type: bool aha_on_off: on_off: @@ -746,33 +766,33 @@ item_structs: type: num aha_color: - color_mode: - avm_data_type@instance: color_mode - type: num - supported_color_mode: - avm_data_type@instance: supported_color_mode - type: num - fullcolorsupport: - avm_data_type@instance: fullcolorsupport - type: bool - mapped: - avm_data_type@instance: mapped - type: bool - hue: - avm_data_type@instance: hue - type: num - saturation: - avm_data_type@instance: saturation - type: num - unmapped_hue: - avm_data_type@instance: unmapped_hue - type: bool - unmapped_saturation: - avm_data_type@instance: unmapped_saturation - type: bool - colortemperature: - avm_data_type@instance: colortemperature - type: num + color_mode: + avm_data_type@instance: color_mode + type: num + supported_color_mode: + avm_data_type@instance: supported_color_mode + type: num + fullcolorsupport: + avm_data_type@instance: fullcolorsupport + type: bool + mapped: + avm_data_type@instance: mapped + type: bool + hue: + avm_data_type@instance: hue + type: num + saturation: + avm_data_type@instance: saturation + type: num + unmapped_hue: + avm_data_type@instance: unmapped_hue + type: bool + unmapped_saturation: + avm_data_type@instance: unmapped_saturation + type: bool + colortemperature: + avm_data_type@instance: colortemperature + type: num #item_attribute_prefixes: # Definition of item attributes that only have a common prefix (enter 'item_attribute_prefixes: NONE' or ommit this section, if section should be empty) @@ -783,245 +803,248 @@ plugin_functions: cancel_call: type: void description: - de: "Beendet einen aktiven Anruf." - en: "Cancels an active call." + de: Beendet einen aktiven Anruf. + en: Cancels an active call. parameters: # This function has no parameters get_call_origin: type: str description: - de: "Gib den Namen des Telefons zurück, das aktuell als 'call origin' gesetzt ist." - en: "Gets the phone name, currently set as 'call origin'." + de: Gib den Namen des Telefons zurück, das aktuell als 'call origin' gesetzt ist. + en: Gets the phone name, currently set as 'call origin'. parameters: # This function has no parameters get_calllist: type: list(dict(str)) description: - de: "Ermittelt ein Array mit dicts aller Einträge der Anrufliste (Attribute 'Id', 'Type', 'Caller', 'Called', 'CalledNumber', 'Name', 'Numbertype', 'Device', 'Port', 'Date',' Duration' (einige optional))." - en: "Returns an array of dicts with all calllist entries (attributes 'Id', 'Type', 'Caller', 'Called', 'CalledNumber', 'Name', 'Numbertype', 'Device', 'Port', 'Date', 'Duration' (some optional))." + de: Ermittelt ein Array mit dicts aller Einträge der Anrufliste (Attribute 'Id', 'Type', 'Caller', 'Called', 'CalledNumber', 'Name', 'Numbertype', + 'Device', 'Port', 'Date',' Duration' (einige optional)). + en: Returns an array of dicts with all calllist entries (attributes 'Id', 'Type', 'Caller', 'Called', 'CalledNumber', 'Name', 'Numbertype', + 'Device', 'Port', 'Date', 'Duration' (some optional)). parameters: filter_incoming: type: str default: '' description: - de: "Filter, um nur die Anrufe zu erhalten, die zu einer bestimmten angerufenen Nummer gehören." - en: "Filter to filter calls to a specific destination phone number." + de: Filter, um nur die Anrufe zu erhalten, die zu einer bestimmten angerufenen Nummer gehören. + en: Filter to filter calls to a specific destination phone number. phonebook_id: type: int default: 0 description: - de: "ID des Telefonbuchs, in dem nachgeschlagen werden soll." - en: "ID of the phone book, in which numbers should be looked up." + de: ID des Telefonbuchs, in dem nachgeschlagen werden soll. + en: ID of the phone book, in which numbers should be looked up. get_contact_name_by_phone_number: type: str description: - de: "Durchsucht das Telefonbuch mit einer (vollständigen) Telefonnummer nach Kontakten. Falls kein Name gefunden wird, wird die Telefonnummer zurückgeliefert." - en: "Searches the phonebook for a contact by a given (complete) phone number. In case no name is found, the phone number is returned." + de: Durchsucht das Telefonbuch mit einer (vollständigen) Telefonnummer nach Kontakten. Falls kein Name gefunden wird, wird die Telefonnummer + zurückgeliefert. + en: Searches the phonebook for a contact by a given (complete) phone number. In case no name is found, the phone number is returned. parameters: phone_number: type: str description: - de: "Vollständige Telefonnummer" - en: "Complete phone number" + de: Vollständige Telefonnummer + en: Complete phone number phonebook_id: type: int default: 0 description: - de: "ID des Telefonbuchs, in dem nachgeschlagen werden soll." - en: "ID of the phone book, in which numbers should be looked up." + de: ID des Telefonbuchs, in dem nachgeschlagen werden soll. + en: ID of the phone book, in which numbers should be looked up. get_device_log_from_lua: type: list(list(str)) description: - de: "Ermittelt die Logeinträge auf dem Gerät über die LUA Schnittstelle /query.lua?mq_log=logger:status/log." - en: "Gets the log entries on the device via the LUA interface /query.lua?mq_log=logger:status/log." + de: Ermittelt die Logeinträge auf dem Gerät über die LUA Schnittstelle /query.lua?mq_log=logger:status/log. + en: Gets the log entries on the device via the LUA interface /query.lua?mq_log=logger:status/log. parameters: # This function has no parameters get_device_log_from_tr064: type: list(str) description: - de: "Ermittelt die Logeinträge auf dem Gerät über die TR-064 Schnittstelle." - en: "Gets the log entries on the device via the TR-064 interface." + de: Ermittelt die Logeinträge auf dem Gerät über die TR-064 Schnittstelle. + en: Gets the log entries on the device via the TR-064 interface. parameters: # This function has no parameters get_host_details: type: dict(str) description: - de: "Ermittelt die Informationen zu einem Host an einem angegebenen Index." - en: "Gets the information of a hosts at a specific index." + de: Ermittelt die Informationen zu einem Host an einem angegebenen Index. + en: Gets the information of a hosts at a specific index. parameters: index: type: int description: - de: "Index" - en: "Index" + de: Index + en: Index get_hosts: type: list(dict(str)) description: - de: "Ermittelt ein Array mit den Namen aller verbundener Hosts." - en: "Gets the name of all connected hosts as an array." + de: Ermittelt ein Array mit den Namen aller verbundener Hosts. + en: Gets the name of all connected hosts as an array. parameters: only_active: type: bool description: - de: "True, wenn nur aktuell aktive Hosts zurückgegeben werden sollen." - en: "True, if only active hosts shall be returned." + de: True, wenn nur aktuell aktive Hosts zurückgegeben werden sollen. + en: True, if only active hosts shall be returned. get_phone_name: type: str description: - de: "Gibt den Namen eines Telefons an einem Index zurück. Der zurückgegebene Wert kann in 'set_call_origin' verwendet werden." - en: "Get the phone name at a specific index. The returend value can be used as phone_name for set_call_origin." + de: Gibt den Namen eines Telefons an einem Index zurück. Der zurückgegebene Wert kann in 'set_call_origin' verwendet werden. + en: Get the phone name at a specific index. The returend value can be used as phone_name for set_call_origin. parameters: index: type: int description: - de: "Index" - en: "Index" + de: Index + en: Index get_phone_numbers_by_name: type: dict(dict(str)) description: - de: "Durchsucht das Telefonbuch mit einem Namen nach nach Kontakten und liefert die zugehörigen Telefonnummern." - en: "Searches the phonebook for a contact by a given name and returns the corresponding phone numbers." + de: Durchsucht das Telefonbuch mit einem Namen nach nach Kontakten und liefert die zugehörigen Telefonnummern. + en: Searches the phonebook for a contact by a given name and returns the corresponding phone numbers. parameters: name: type: str description: - de: "Anteiliger oder vollständiger Name des Kontakts." - en: "Partial or full name of the contact." + de: Anteiliger oder vollständiger Name des Kontakts. + en: Partial or full name of the contact. phonebook_id: type: int default: 0 description: - de: "ID des Telefonbuchs, in dem nachgeschlagen werden soll." - en: "ID of the phone book, in which numbers should be looked up." + de: ID des Telefonbuchs, in dem nachgeschlagen werden soll. + en: ID of the phone book, in which numbers should be looked up. is_host_active: type: bool description: - de: "Prüft, ob eine MAC Adresse auf dem Gerät aktiv ist. Das kann bspw. für die Umsetzung einer Präsenzerkennung genutzt werden." - en: "Checks if a MAC address is active on the FritzDevice, e.g. the status can be used for simple presence detection." + de: Prüft, ob eine MAC Adresse auf dem Gerät aktiv ist. Das kann bspw. für die Umsetzung einer Präsenzerkennung genutzt werden. + en: Checks if a MAC address is active on the FritzDevice, e.g. the status can be used for simple presence detection. parameters: mac_address: type: mac description: - de: "MAC Adresse" - en: "MAC address" + de: MAC Adresse + en: MAC address reboot: type: void description: - de: "Startet das Gerät neu." - en: "Reboots the device." + de: Startet das Gerät neu. + en: Reboots the device. reconnect: type: void description: - de: "Verbindet das Gerät neu mit dem WAN (Wide Area Network)." - en: "Reconnects the device to the WAN (Wide Area Network)." + de: Verbindet das Gerät neu mit dem WAN (Wide Area Network). + en: Reconnects the device to the WAN (Wide Area Network). set_call_origin: type: void description: - de: "Setzt den 'call origin', bspw. vor dem Aufruf von 'start_call'." - en: "Sets the 'call origin', e.g. before running 'start_call'." + de: Setzt den 'call origin', bspw. vor dem Aufruf von 'start_call'. + en: Sets the 'call origin', e.g. before running 'start_call'. parameters: phone_name: type: mac description: - de: "Identifikator des Telefons, dass als 'call origin' gesetzt werden soll. bspw. zwei Sterne gefolgt von '610' für ein internes Gerät." - en: "Full phone identifier, could be e.g. two asterisk followed by '610' for an internal device." + de: Identifikator des Telefons, dass als 'call origin' gesetzt werden soll. bspw. zwei Sterne gefolgt von '610' für ein internes Gerät. + en: Full phone identifier, could be e.g. two asterisk followed by '610' for an internal device. start_call: type: void description: - de: "Startet einen Anruf an eine übergebene Telefonnummer (intern oder extern)." - en: "Starts a call for a given phone number (internal or external)." + de: Startet einen Anruf an eine übergebene Telefonnummer (intern oder extern). + en: Starts a call for a given phone number (internal or external). parameters: phone_number: type: str description: - de: "Vollständige Telefonnummer, die angerufen werden soll." - en: "Full phone number to call" + de: Vollständige Telefonnummer, die angerufen werden soll. + en: Full phone number to call wol: type: void description: - de: "Sendet einen WOL (WakeOnLAN) Befehl an eine MAC Adresse." - en: "Sends a WOL (WakeOnLAN) command to a MAC address." + de: Sendet einen WOL (WakeOnLAN) Befehl an eine MAC Adresse. + en: Sends a WOL (WakeOnLAN) command to a MAC address. parameters: mac_address: type: mac description: - de: "MAC Adresse" - en: "MAC address" + de: MAC Adresse + en: MAC address get_number_of_deflections: type: bool description: - de: "Liefert die Anzahl der Rufumleitungen zurück." - en: "Returns Number of set deflections." + de: Liefert die Anzahl der Rufumleitungen zurück. + en: Returns Number of set deflections. parameters: # This function has no parameters get_deflection: type: bool description: - de: "Liefert die Details der Rufumleitung der angegebenen ID zurück (Default-ID = 0)" - en: "Returns details of deflection with given deflection_id (default id = 0)" + de: Liefert die Details der Rufumleitung der angegebenen ID zurück (Default-ID = 0) + en: Returns details of deflection with given deflection_id (default id = 0) parameters: deflection_id: type: int description: - de: "Identifikator der abzufragenden Rufumleitung." - en: "Identifier of deflection." + de: Identifikator der abzufragenden Rufumleitung. + en: Identifier of deflection. get_deflections: type: bool description: - de: "Liefert die Details aller Rufumleitungen zurück." - en: "Returns details of all deflections." + de: Liefert die Details aller Rufumleitungen zurück. + en: Returns details of all deflections. parameters: # This function has no parameters set_deflection_enable: type: bool description: - de: "Schaltet die Rufumleitung mit angegebener ID an oder aus." - en: "Enables or disables deflection with given ID." + de: Schaltet die Rufumleitung mit angegebener ID an oder aus. + en: Enables or disables deflection with given ID. parameters: deflection_id: type: int description: - de: "Identifikator der abzufragenden Rufumleitung." - en: "identifier of deflection." + de: Identifikator der abzufragenden Rufumleitung. + en: identifier of deflection. enable: type: bool description: - de: "An / Aus" - en: "Enable / Disable" + de: An / Aus + en: Enable / Disable get_mesh_topology: - type: dict - description: - de: "Liefert die Mesh-Topologie als Dictionary" - en: "Lists mesh topology as dict" - parameters: + type: dict + description: + de: Liefert die Mesh-Topologie als Dictionary + en: Lists mesh topology as dict + parameters: # This function has no parameters get_hosts_dict: - type: dict - description: - de: "Liefert Informationen aller Hosts als Dictionary" - en: "Lists information of all hosts as dict" - parameters: + type: dict + description: + de: Liefert Informationen aller Hosts als Dictionary + en: Lists information of all hosts as dict + parameters: # This function has no parameters logic_parameters: NONE From f86ffc0b470ee0d899cd017d6162d50d7b7041d6 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Tue, 9 May 2023 14:05:12 +0200 Subject: [PATCH 097/775] AVM Plugin: - Bugfix of update of valid_list of avm_data_type in plugin.yaml by - Create valid_list_description --- avm/item_attributes.py | 6 +- avm/item_attributes_master.py | 59 ++-- avm/plugin.yaml | 548 ++++++++++++++++++++++------------ 3 files changed, 392 insertions(+), 221 deletions(-) diff --git a/avm/item_attributes.py b/avm/item_attributes.py index 6411e118d..a46bd826c 100644 --- a/avm/item_attributes.py +++ b/avm/item_attributes.py @@ -28,7 +28,7 @@ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -ALL_ATTRIBUTES_SUPPORTED_BY_REPEATER = ['uptime', 'software_version', 'hardware_version', 'serial_number', 'manufacturer', 'product_class', 'manufacturer_oui', 'model_name', 'description', 'device_log', 'security_port', 'reboot', 'wlanconfig', 'wlanconfig_ssid', 'wlan_guest_time_remaining', 'wlan_associates', 'wps_active', 'wps_status', 'wps_mode', 'wlan_total_associates', 'hosts_count', 'hosts_info', 'mesh_topology', 'number_of_hosts', 'hosts_url', 'mesh_url', 'network_device', 'device_ip', 'device_connection_type', 'device_hostname', 'connection_status', 'is_host_active', 'host_info'] +ALL_ATTRIBUTES_SUPPORTED_BY_REPEATER = ['uptime', 'serial_number', 'software_version', 'hardware_version', 'manufacturer', 'product_class', 'manufacturer_oui', 'model_name', 'description', 'device_log', 'security_port', 'reboot', 'wlanconfig', 'wlanconfig_ssid', 'wlan_guest_time_remaining', 'wlan_associates', 'wps_active', 'wps_status', 'wps_mode', 'wlan_total_associates', 'hosts_count', 'hosts_info', 'mesh_topology', 'number_of_hosts', 'hosts_url', 'mesh_url', 'network_device', 'device_ip', 'device_connection_type', 'device_hostname', 'connection_status', 'is_host_active', 'host_info'] ALL_ATTRIBUTES_WRITEABLE = ['reboot', 'set_target_temperature', 'set_window_open', 'set_hkr_boost', 'set_simpleonoff', 'set_level', 'set_levelpercentage', 'set_hue', 'set_saturation', 'set_colortemperature', 'switch_toggle', 'tam', 'wlanconfig', 'wps_active', 'deflection_enable', 'aha_device', 'target_temperature', 'window_open', 'hkr_boost', 'simpleonoff', 'level', 'levelpercentage', 'hue', 'saturation', 'colortemperature', 'unmapped_hue', 'unmapped_saturation', 'switch_state'] ALL_ATTRIBUTES_WRITEONLY = ['reboot', 'set_target_temperature', 'set_window_open', 'set_hkr_boost', 'set_simpleonoff', 'set_level', 'set_levelpercentage', 'set_hue', 'set_saturation', 'set_colortemperature', 'switch_toggle'] DEPRECATED_ATTRIBUTES = ['aha_device', 'hkr_device', 'set_temperature', 'temperature', 'set_temperature_reduced', 'set_temperature_comfort', 'firmware_version', 'boost_active'] @@ -36,7 +36,7 @@ AHA_RO_ATTRIBUTES = ['device_id', 'manufacturer', 'product_name', 'fw_version', 'connected', 'device_name', 'tx_busy', 'device_functions', 'current_temperature', 'temperature_reduced', 'temperature_comfort', 'temperature_offset', 'windowopenactiveendtime', 'boost_active', 'boostactiveendtime', 'summer_active', 'holiday_active', 'battery_low', 'battery_level', 'lock', 'device_lock', 'errorcode', 'color_mode', 'supported_color_mode', 'fullcolorsupport', 'mapped', 'switch_mode', 'power', 'energy', 'voltage', 'humidity', 'alert_state', 'blind_mode', 'endpositionsset'] AHA_WO_ATTRIBUTES = ['set_target_temperature', 'set_window_open', 'set_hkr_boost', 'set_simpleonoff', 'set_level', 'set_levelpercentage', 'set_hue', 'set_saturation', 'set_colortemperature', 'switch_toggle'] AHA_RW_ATTRIBUTES = ['target_temperature', 'window_open', 'hkr_boost', 'simpleonoff', 'level', 'levelpercentage', 'hue', 'saturation', 'colortemperature', 'unmapped_hue', 'unmapped_saturation', 'switch_state'] -TR064_ATTRIBUTES = ['uptime', 'software_version', 'hardware_version', 'serial_number', 'manufacturer', 'product_class', 'manufacturer_oui', 'model_name', 'description', 'device_log', 'security_port', 'reboot', 'myfritz_status', 'call_direction', 'call_event', 'monitor_trigger', 'is_call_incoming', 'last_caller_incoming', 'last_call_date_incoming', 'call_event_incoming', 'last_number_incoming', 'last_called_number_incoming', 'is_call_outgoing', 'last_caller_outgoing', 'last_call_date_outgoing', 'call_event_outgoing', 'last_number_outgoing', 'last_called_number_outgoing', 'call_duration_incoming', 'call_duration_outgoing', 'tam', 'tam_name', 'tam_new_message_number', 'tam_old_message_number', 'tam_total_message_number', 'wan_connection_status', 'wan_connection_error', 'wan_is_connected', 'wan_uptime', 'wan_ip', 'wan_upstream', 'wan_downstream', 'wan_total_packets_sent', 'wan_total_packets_received', 'wan_current_packets_sent', 'wan_current_packets_received', 'wan_total_bytes_sent', 'wan_total_bytes_received', 'wan_current_bytes_sent', 'wan_current_bytes_received', 'wan_link', 'wlanconfig', 'wlanconfig_ssid', 'wlan_guest_time_remaining', 'wlan_associates', 'wps_active', 'wps_status', 'wps_mode', 'wlan_total_associates', 'hosts_count', 'hosts_info', 'mesh_topology', 'number_of_hosts', 'hosts_url', 'mesh_url', 'network_device', 'device_ip', 'device_connection_type', 'device_hostname', 'connection_status', 'is_host_active', 'host_info', 'number_of_deflections', 'deflections_details', 'deflection_details', 'deflection_enable', 'deflection_type', 'deflection_number', 'deflection_to_number', 'deflection_mode', 'deflection_outgoing', 'deflection_phonebook_id', 'aha_device', 'hkr_device', 'set_temperature', 'temperature', 'set_temperature_reduced', 'set_temperature_comfort', 'firmware_version'] +TR064_ATTRIBUTES = ['uptime', 'serial_number', 'software_version', 'hardware_version', 'manufacturer', 'product_class', 'manufacturer_oui', 'model_name', 'description', 'device_log', 'security_port', 'reboot', 'myfritz_status', 'call_direction', 'call_event', 'monitor_trigger', 'is_call_incoming', 'last_caller_incoming', 'last_call_date_incoming', 'call_event_incoming', 'last_number_incoming', 'last_called_number_incoming', 'is_call_outgoing', 'last_caller_outgoing', 'last_call_date_outgoing', 'call_event_outgoing', 'last_number_outgoing', 'last_called_number_outgoing', 'call_duration_incoming', 'call_duration_outgoing', 'tam', 'tam_name', 'tam_new_message_number', 'tam_old_message_number', 'tam_total_message_number', 'wan_connection_status', 'wan_connection_error', 'wan_is_connected', 'wan_uptime', 'wan_ip', 'wan_upstream', 'wan_downstream', 'wan_total_packets_sent', 'wan_total_packets_received', 'wan_current_packets_sent', 'wan_current_packets_received', 'wan_total_bytes_sent', 'wan_total_bytes_received', 'wan_current_bytes_sent', 'wan_current_bytes_received', 'wan_link', 'wlanconfig', 'wlanconfig_ssid', 'wlan_guest_time_remaining', 'wlan_associates', 'wps_active', 'wps_status', 'wps_mode', 'wlan_total_associates', 'hosts_count', 'hosts_info', 'mesh_topology', 'number_of_hosts', 'hosts_url', 'mesh_url', 'network_device', 'device_ip', 'device_connection_type', 'device_hostname', 'connection_status', 'is_host_active', 'host_info', 'number_of_deflections', 'deflections_details', 'deflection_details', 'deflection_enable', 'deflection_type', 'deflection_number', 'deflection_to_number', 'deflection_mode', 'deflection_outgoing', 'deflection_phonebook_id', 'aha_device', 'hkr_device', 'set_temperature', 'temperature', 'set_temperature_reduced', 'set_temperature_comfort', 'firmware_version'] AVM_RW_ATTRIBUTES = ['tam', 'wlanconfig', 'wps_active', 'deflection_enable', 'aha_device'] CALL_MONITOR_ATTRIBUTES = ['call_direction', 'call_event', 'monitor_trigger', 'is_call_incoming', 'last_caller_incoming', 'last_call_date_incoming', 'call_event_incoming', 'last_number_incoming', 'last_called_number_incoming', 'is_call_outgoing', 'last_caller_outgoing', 'last_call_date_outgoing', 'call_event_outgoing', 'last_number_outgoing', 'last_called_number_outgoing', 'call_duration_incoming', 'call_duration_outgoing'] CALL_MONITOR_ATTRIBUTES_TRIGGER = ['monitor_trigger'] @@ -50,7 +50,7 @@ TAM_ATTRIBUTES = ['tam', 'tam_name', 'tam_new_message_number', 'tam_old_message_number', 'tam_total_message_number'] WLAN_CONFIG_ATTRIBUTES = ['wlanconfig', 'wlanconfig_ssid', 'wlan_guest_time_remaining', 'wlan_associates', 'wps_active', 'wps_status', 'wps_mode'] WLAN_ATTRIBUTES = ['wlan_total_associates'] -FRITZ_DEVICE_ATTRIBUTES = ['uptime', 'software_version', 'hardware_version', 'serial_number', 'manufacturer', 'product_class', 'manufacturer_oui', 'model_name', 'description', 'device_log', 'security_port', 'reboot'] +FRITZ_DEVICE_ATTRIBUTES = ['uptime', 'serial_number', 'software_version', 'hardware_version', 'manufacturer', 'product_class', 'manufacturer_oui', 'model_name', 'description', 'device_log', 'security_port', 'reboot'] HOST_ATTRIBUTES = ['host_info'] HOSTS_ATTRIBUTES = ['hosts_count', 'hosts_info', 'mesh_topology', 'number_of_hosts', 'hosts_url', 'mesh_url'] HOST_ATTRIBUTES_CHILD = ['network_device', 'device_ip', 'device_connection_type', 'device_hostname', 'connection_status', 'is_host_active'] diff --git a/avm/item_attributes_master.py b/avm/item_attributes_master.py index 45e735785..427d6572e 100644 --- a/avm/item_attributes_master.py +++ b/avm/item_attributes_master.py @@ -63,9 +63,9 @@ AVM_DATA_TYPES = { 'tr064': { 'uptime': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Laufzeit des Fritzdevice in Sekunden'}, - 'software_version': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Serialnummer des Fritzdevice'}, - 'hardware_version': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Software Version'}, - 'serial_number': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Hardware Version'}, + 'serial_number': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Serialnummer des Fritzdevice'}, + 'software_version': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Software Version'}, + 'hardware_version': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Hardware Version'}, 'manufacturer': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Hersteller'}, 'product_class': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Produktklasse'}, 'manufacturer_oui': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Hersteller OUI'}, @@ -269,57 +269,64 @@ def export_item_attributs_py(): print('item_attributs.py successfully created!') -def create_plugin_yaml_avm_data_type_valid_list(ifaces: list = ATTRIBUTES_LIST): +def create_plugin_yaml_avm_data_type_valids(ifaces: list = ATTRIBUTES_LIST): """Create valid_list of avm_data_type based on master dict""" interface_group = None - valid_list_str = """\ - type: str - mandatory: True - description: - de: 'AVM Datentyp des jeweiligen Items.' - en: 'AVM Data Type of the respective item.' - valid_list: # NOTE: valid_list is automatically created by using item_attributes_master.py""" + valid_list_str = """ # NOTE: valid_list is automatically created by using item_attributes_master.py""" + valid_list_desc_str = """ # NOTE: valid_list_description is automatically created by using item_attributes_master.py""" for iface in ifaces: valid_list_str = f"{valid_list_str}\n # {iface} Attributes" + valid_list_desc_str = f"{valid_list_desc_str}\n # {iface} Attributes" + for avm_data_type in AVM_DATA_TYPES[iface]: interface_group_new = f"{AVM_DATA_TYPES[iface][avm_data_type]['interface']}-{AVM_DATA_TYPES[iface][avm_data_type]['group']}" if interface_group_new != interface_group: interface_group = interface_group_new + valid_list_str = f"""{valid_list_str}\n\ # {interface_group} Attributes""" + + valid_list_desc_str = f"""{valid_list_desc_str}\n\ + # {interface_group} Attributes""" + valid_list_str = f"""{valid_list_str}\n\ - - {avm_data_type!r:<40}# {AVM_DATA_TYPES[iface][avm_data_type]['access']:<5}{AVM_DATA_TYPES[iface][avm_data_type]['type']:<5}\t{AVM_DATA_TYPES[iface][avm_data_type]['description']:<}""" + - {avm_data_type!r:<40}# {AVM_DATA_TYPES[iface][avm_data_type]['access']:<5}{AVM_DATA_TYPES[iface][avm_data_type]['type']:<5}""" - return valid_list_str + valid_list_desc_str = f"""{valid_list_desc_str}\n\ + - '{AVM_DATA_TYPES[iface][avm_data_type]['description']:<}'""" + + return valid_list_str, valid_list_desc_str + +def update_plugin_yaml_avm_data_type(): + """Update ´'valid_list' and 'valid_list_description' of 'avm_data_type´ in plugin.yaml""" -def update_plugin_yaml_avm_data_type_valid_list(): yaml = ruamel.yaml.YAML() yaml.indent(mapping=4, sequence=4, offset=4) - yaml.width = 150 + yaml.width = 200 + yaml.allow_unicode = True - valid_list_str = create_plugin_yaml_avm_data_type_valid_list() - valid_list_yaml = yaml.load(valid_list_str) + valid_list_str, valid_list_description_str = create_plugin_yaml_avm_data_type_valids() with open(FILENAME_PLUGIN, 'r') as f: data = yaml.load(f) - avm_data_type = data.get('item_attributes', {}).get('avm_data_type') - if avm_data_type: - data['item_attributes']['avm_data_type'] = valid_list_yaml - with open(FILENAME_PLUGIN, 'w') as f: + if data.get('item_attributes', {}).get('avm_data_type'): + data['item_attributes']['avm_data_type']['valid_list'] = yaml.load(valid_list_str) + data['item_attributes']['avm_data_type']['valid_list_description'] = yaml.load(valid_list_description_str) + + with open(FILENAME_PLUGIN, 'w', encoding="utf-8") as f: yaml.dump(data, f) - print('valid_list of avm_data_type successfully updated!') + print('valid_list and valid_list_description of avm_data_type successfully updated in plugin.yaml!') else: - print('Error during updating valid_list of avm_data_type!') + print('Attribut "avm_data_type" not defined in plugin.yaml') if __name__ == '__main__': - # Run main to export item_attributes.py and update a´valid_list of avm_data_type in plugin.yaml + # Run main to export item_attributes.py and update ´valid_list and valid_list_description of avm_data_type in plugin.yaml export_item_attributs_py() - update_plugin_yaml_avm_data_type_valid_list() - + update_plugin_yaml_avm_data_type() # Notes: # - HOST_ATTRIBUTES: host index needed diff --git a/avm/plugin.yaml b/avm/plugin.yaml index d858d6f7c..4ba886316 100644 --- a/avm/plugin.yaml +++ b/avm/plugin.yaml @@ -27,16 +27,15 @@ parameters: type: str default: '' description: - de: (optional) Nutzername für den Login. Kann für manche Features benötigt werden! (Speziell für Fritz!OS 7 ist die Konfiguration der Fritz!Box - auf `Anmeldung mit FRITZ!Box-Benutzernamen und Kennwort` notwendig - en: (optional) Login information (user). Can be needed to use some features of the AVM device. (Specially for Firtz!OS 7 the Fritz!Box should - be configured for login with username and password + de: (optional) Nutzername für den Login. Kann für manche Features benötigt werden! (Speziell für Fritz!OS 7 ist die Konfiguration der Fritz!Box auf `Anmeldung mit FRITZ!Box-Benutzernamen + und Kennwort` notwendig + en: (optional) Login information (user). Can be needed to use some features of the AVM device. (Specially for Firtz!OS 7 the Fritz!Box should be configured for login with username and password password: type: str default: '' hide: true description: - de: (optional) Passwort für den Login. Wird in der Regel immer benötigt und aus Sicherheitsgründen empfohlen. + de: (optional) Passwort für den Login. Wird in der Regel immer benötigt und aus Sicherheitsgründen empfohlen. en: (optional) Password for login. Is normally always needed and recommended due to security reasons host: type: str @@ -48,7 +47,7 @@ parameters: type: int default: 49443 description: - de: (optional) Port des FritzDevice, normalerweise 49443 für https oder 49000 für http + de: (optional) Port des FritzDevice, normalerweise 49443 für https oder 49000 für http en: (optional) Port of the FritzDevice, typically 49443 for https or 49000 for http cycle: type: int @@ -66,29 +65,28 @@ parameters: type: bool default: false description: - de: (optional) Schaltet die Zertifikate-Prüfung an oder aus. Normalerweise False. + de: (optional) Schaltet die Zertifikate-Prüfung an oder aus. Normalerweise False. en: (optional) Turns certificate verification on or off. Typically False call_monitor: type: bool default: false description: - de: '(optional) Aktiviert oder deaktiviert den MonitoringService, welcher auf den Call Monitor des FritzDevice verbindet. Der Call Monitor muss - über ein verbundenes Telefon via #96*5* aktiviert sein.' - en: '(optional) Activates or deactivates the MonitoringService, which connects to the FritzDevice`s call monitor. The call monitor has to be - activated before by a connected telephone via calling #96*5*' + de: '(optional) Aktiviert oder deaktiviert den MonitoringService, welcher auf den Call Monitor des FritzDevice verbindet. Der Call Monitor muss über ein verbundenes Telefon via #96*5* aktiviert + sein.' + en: '(optional) Activates or deactivates the MonitoringService, which connects to the FritzDevice`s call monitor. The call monitor has to be activated before by a connected telephone via calling + #96*5*' call_monitor_incoming_filter: type: str default: '' description: - de: (optional) Filter, auf welche eigenen Rufnummern (oder Teile davon) der Callmonitor reagieren soll. Ist der Filter leer, werden alle eigenen - Rufnummern überwacht. Wird ein Filterstring bspw. "12234" angegeben, werden nur die eigenen Anschlussnummern, die "12234" enthalten, vom - CallMonitor verarbeitet. + de: (optional) Filter, auf welche eigenen Rufnummern (oder Teile davon) der Callmonitor reagieren soll. Ist der Filter leer, werden alle eigenen Rufnummern überwacht. Wird ein Filterstring + bspw. "12234" angegeben, werden nur die eigenen Anschlussnummern, die "12234" enthalten, vom CallMonitor verarbeitet. en: (optional) Filter, for which numbers (or part of the number) of own telephone connection the Callmonitor should react. avm_home_automation: type: bool default: false description: - de: (optional) Aktiviert oder deaktiviert den Zugriff auf AVM Smarthome Geräte mit dem AHA HTTP Interface. + de: (optional) Aktiviert oder deaktiviert den Zugriff auf AVM Smarthome Geräte mit dem AHA HTTP Interface. en: (optional) Activates or deactivates access to AVM smarthome devices via AHA HTTP interface log_entry_count: type: int @@ -100,8 +98,7 @@ parameters: type: bool default: false description: - de: (optional) Wenn aktiv, werden TR064 Items, deren Abfrageergebnis 2x zu einen Fehler geführt hat, blacklisted und anschließend nicht mehr - abgefragt. + de: (optional) Wenn aktiv, werden TR064 Items, deren Abfrageergebnis 2x zu einen Fehler geführt hat, blacklisted und anschließend nicht mehr abgefragt. en: (optional) If active, TR064 Items for which data polling resulted in errors, will be blacklisted and excluded from update cycle item_attributes: @@ -115,171 +112,340 @@ item_attributes: valid_list: # NOTE: valid_list is automatically created by using item_attributes_master.py # tr064 Attributes # tr064-fritz_device Attributes - - uptime # ro num Laufzeit des Fritzdevice in Sekunden - - software_version # ro str Serialnummer des Fritzdevice - - hardware_version # ro str Software Version - - serial_number # ro str Hardware Version - - manufacturer # ro str Hersteller - - product_class # ro str Produktklasse - - manufacturer_oui # ro str Hersteller OUI - - model_name # ro str Modelname - - description # ro str Modelbeschreibung - - device_log # ro str Gerte Log - - security_port # ro str Security Port - - reboot # wo bool Startet das Gert neu + - uptime # ro num + - serial_number # ro str + - software_version # ro str + - hardware_version # ro str + - manufacturer # ro str + - product_class # ro str + - manufacturer_oui # ro str + - model_name # ro str + - description # ro str + - device_log # ro str + - security_port # ro str + - reboot # wo bool # tr064-myfritz Attributes - - myfritz_status # ro bool MyFritz Status (an/aus) + - myfritz_status # ro bool # tr064-call_monitor Attributes - - call_direction # ro str Richtung des letzten Anrufes - - call_event # ro str Status des letzten Anrufes - - monitor_trigger # ro bool Monitortrigger - - is_call_incoming # ro bool Eingehender Anruf erkannt - - last_caller_incoming # ro str Letzter Anrufer - - last_call_date_incoming # ro str Zeitpunkt des letzten eingehenden Anrufs - - call_event_incoming # ro str Status des letzten eingehenden Anrufs - - last_number_incoming # ro str Nummer des letzten eingehenden Anrufes - - last_called_number_incoming # ro str Angerufene Nummer des letzten eingehenden Anrufs - - is_call_outgoing # ro bool Ausgehender Anruf erkannt - - last_caller_outgoing # ro str Letzter angerufener Kontakt - - last_call_date_outgoing # ro str Zeitpunkt des letzten ausgehenden Anrufs - - call_event_outgoing # ro str Status des letzten ausgehenden Anrufs - - last_number_outgoing # ro str Nummer des letzten ausgehenden Anrufes - - last_called_number_outgoing # ro str Letzte verwendete Telefonnummer fr ausgehenden Anruf - - call_duration_incoming # ro num Dauer des eingehenden Anrufs - - call_duration_outgoing # ro num Dauer des ausgehenden Anrufs + - call_direction # ro str + - call_event # ro str + - monitor_trigger # ro bool + - is_call_incoming # ro bool + - last_caller_incoming # ro str + - last_call_date_incoming # ro str + - call_event_incoming # ro str + - last_number_incoming # ro str + - last_called_number_incoming # ro str + - is_call_outgoing # ro bool + - last_caller_outgoing # ro str + - last_call_date_outgoing # ro str + - call_event_outgoing # ro str + - last_number_outgoing # ro str + - last_called_number_outgoing # ro str + - call_duration_incoming # ro num + - call_duration_outgoing # ro num # tr064-tam Attributes - - tam # rw bool TAM an/aus - - tam_name # ro str Name des TAM - - tam_new_message_number # ro num Anzahl der alten Nachrichten - - tam_old_message_number # ro num Anzahl der neuen Nachrichten - - tam_total_message_number # ro num Gesamtanzahl der Nachrichten + - tam # rw bool + - tam_name # ro str + - tam_new_message_number # ro num + - tam_old_message_number # ro num + - tam_total_message_number # ro num # tr064-wan Attributes - - wan_connection_status # ro str WAN Verbindungsstatus - - wan_connection_error # ro str WAN Verbindungsfehler - - wan_is_connected # ro bool WAN Verbindung aktiv - - wan_uptime # ro str WAN Verbindungszeit - - wan_ip # ro str WAN IP Adresse - - wan_upstream # ro num WAN Upstream Datenmenge - - wan_downstream # ro num WAN Downstream Datenmenge - - wan_total_packets_sent # ro num WAN Verbindung-Anzahl insgesamt versendeter Pakete - - wan_total_packets_received # ro num WAN Verbindung-Anzahl insgesamt empfangener Pakete - - wan_current_packets_sent # ro num WAN Verbindung-Anzahl aktuell versendeter Pakete - - wan_current_packets_received # ro num WAN Verbindung-Anzahl aktuell empfangener Pakete - - wan_total_bytes_sent # ro num WAN Verbindung-Anzahl insgesamt versendeter Bytes - - wan_total_bytes_received # ro num WAN Verbindung-Anzahl insgesamt empfangener Bytes - - wan_current_bytes_sent # ro num WAN Verbindung-Anzahl aktuelle Bitrate Senden - - wan_current_bytes_received # ro num WAN Verbindung-Anzahl aktuelle Bitrate Empfangen - - wan_link # ro bool WAN Link + - wan_connection_status # ro str + - wan_connection_error # ro str + - wan_is_connected # ro bool + - wan_uptime # ro str + - wan_ip # ro str + - wan_upstream # ro num + - wan_downstream # ro num + - wan_total_packets_sent # ro num + - wan_total_packets_received # ro num + - wan_current_packets_sent # ro num + - wan_current_packets_received # ro num + - wan_total_bytes_sent # ro num + - wan_total_bytes_received # ro num + - wan_current_bytes_sent # ro num + - wan_current_bytes_received # ro num + - wan_link # ro bool # tr064-wlan_config Attributes - - wlanconfig # rw bool WLAN An/Aus - - wlanconfig_ssid # ro str WLAN SSID - - wlan_guest_time_remaining # ro num Verbleibende Zeit, bis zum automatischen Abschalten des Gste-WLAN - - wlan_associates # ro num Anzahl der verbundenen Gerte im jeweiligen WLAN - - wps_active # rw bool Schaltet WPS fr das entsprechende WlAN an / aus - - wps_status # ro str WPS Status des entsprechenden WlAN - - wps_mode # ro str WPS Modus des entsprechenden WlAN + - wlanconfig # rw bool + - wlanconfig_ssid # ro str + - wlan_guest_time_remaining # ro num + - wlan_associates # ro num + - wps_active # rw bool + - wps_status # ro str + - wps_mode # ro str # tr064-wlan Attributes - - wlan_total_associates # ro num Anzahl der verbundenen Gerte im WLAN + - wlan_total_associates # ro num # tr064-host Attributes - - hosts_count # ro num Anzahl der Hosts - - hosts_info # ro dict Informationen ber die Hosts - - mesh_topology # ro dict Topologie des Mesh - - number_of_hosts # ro bool Verbindungsstatus (Muss Child von "network_device" sein - - hosts_url # ro bool Verbindungsstatus (Muss Child von "network_device" sein - - mesh_url # ro bool Verbindungsstatus (Muss Child von "network_device" sein - - network_device # ro bool Verbindungsstatus // Defines Network device via MAC-Adresse - - device_ip # ro str Gerte-IP (Muss Child von "network_device" sein - - device_connection_type # ro str Verbindungstyp (Muss Child von "network_device" sein - - device_hostname # ro str Gertename (Muss Child von "network_device" sein - - connection_status # ro bool Verbindungsstatus (Muss Child von "network_device" sein - - is_host_active # ro bool Verbindungsstatus (Muss Child von "network_device" sein - - host_info # ro bool Verbindungsstatus (Muss Child von "network_device" sein + - hosts_count # ro num + - hosts_info # ro dict + - mesh_topology # ro dict + - number_of_hosts # ro bool + - hosts_url # ro bool + - mesh_url # ro bool + - network_device # ro bool + - device_ip # ro str + - device_connection_type # ro str + - device_hostname # ro str + - connection_status # ro bool + - is_host_active # ro bool + - host_info # ro bool # tr064-deflection Attributes - - number_of_deflections # ro num Anzahl der eingestellten Rufumleitungen - - deflections_details # ro dict Details zu allen Rufumleitung (als dict) - - deflection_details # ro dict Details zur Rufumleitung (als dict); Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item - - deflection_enable # rw bool Rufumleitung Status an/aus; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item - - deflection_type # ro str Type der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item - - deflection_number # ro str Telefonnummer, die umgeleitet wird; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item - - deflection_to_number # ro str Zielrufnummer der Umleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item - - deflection_mode # ro str Modus der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item - - deflection_outgoing # ro str Outgoing der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item - - deflection_phonebook_id # ro str Phonebook_ID der Zielrufnummer (Only valid if Type==fromPB); Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item + - number_of_deflections # ro num + - deflections_details # ro dict + - deflection_details # ro dict + - deflection_enable # rw bool + - deflection_type # ro str + - deflection_number # ro str + - deflection_to_number # ro str + - deflection_mode # ro str + - deflection_outgoing # ro str + - deflection_phonebook_id # ro str # tr064-homeauto Attributes - - aha_device # rw bool Steckdose schalten; siehe "switch_state" - - hkr_device # ro str Status des HKR (OPEN; CLOSED; TEMP) - - set_temperature # ro num siehe "target_temperature" - - temperature # ro num siehe "current_temperature" - - set_temperature_reduced # ro num siehe "temperature_reduced" - - set_temperature_comfort # ro num siehe "temperature_comfort" - - firmware_version # ro str siehe "fw_version" + - aha_device # rw bool + - hkr_device # ro str + - set_temperature # ro num + - temperature # ro num + - set_temperature_reduced # ro num + - set_temperature_comfort # ro num + - firmware_version # ro str # aha Attributes # aha-device Attributes - - device_id # ro str Gerte -ID - - manufacturer # ro str Hersteller - - product_name # ro str Produktname - - fw_version # ro str Firmware Version - - connected # ro bool Verbindungsstatus - - device_name # ro str Gertename - - tx_busy # ro bool Verbindung aktiv - - device_functions # ro list Im Gert vorhandene Funktionen + - device_id # ro str + - manufacturer # ro str + - product_name # ro str + - fw_version # ro str + - connected # ro bool + - device_name # ro str + - tx_busy # ro bool + - device_functions # ro list # aha-hkr Attributes - - set_target_temperature # wo num Soll-Temperatur Setzen - - target_temperature # rw num Soll-Temperatur (Status und Setzen) - - current_temperature # ro num Ist-Temperatur - - temperature_reduced # ro num Eingestellte reduzierte Temperatur - - temperature_comfort # ro num Eingestellte Komfort-Temperatur - - temperature_offset # ro num Eingestellter Temperatur-Offset - - set_window_open # wo bool "Window Open" Funktionen Setzen - - window_open # rw bool "Window Open" Funktion (Status und Setzen) - - windowopenactiveendtime # ro num Zeitliches Ende der "Window Open" Funktion - - set_hkr_boost # wo bool "Boost" Funktion Setzen - - hkr_boost # rw bool "Boost" Funktion (Status und Setzen) - - boost_active # ro bool Status der "Boost" Funktion - - boostactiveendtime # ro num Zeitliches Ende der "Boost" Funktion - - summer_active # ro bool Status der "Sommer" Funktion - - holiday_active # ro bool Status der "Holiday" Funktion - - battery_low # ro bool "Battery low" Status - - battery_level # ro num Batterie-Status in % - - lock # ro bool Tastensperre ber UI/API aktiv - - device_lock # ro bool Tastensperre direkt am Gert ein - - errorcode # ro num Fehlercodes die der HKR liefert + - set_target_temperature # wo num + - target_temperature # rw num + - current_temperature # ro num + - temperature_reduced # ro num + - temperature_comfort # ro num + - temperature_offset # ro num + - set_window_open # wo bool + - window_open # rw bool + - windowopenactiveendtime # ro num + - set_hkr_boost # wo bool + - hkr_boost # rw bool + - boost_active # ro bool + - boostactiveendtime # ro num + - summer_active # ro bool + - holiday_active # ro bool + - battery_low # ro bool + - battery_level # ro num + - lock # ro bool + - device_lock # ro bool + - errorcode # ro num # aha-simpleonoff Attributes - - set_simpleonoff # wo bool Gert/Aktor/Lampe an-/ausschalten - - simpleonoff # rw bool Gert/Aktor/Lampe (Status und Setzen) + - set_simpleonoff # wo bool + - simpleonoff # rw bool # aha-level Attributes - - set_level # wo num Level/Niveau von 0 bis 255 Setzen - - level # rw num Level/Niveau von 0 bis 255 (Setzen & Status) - - set_levelpercentage # wo num Level/Niveau von 0% bis 100% Setzen - - levelpercentage # rw num Level/Niveau von 0% bis 100% (Setzen & Status) + - set_level # wo num + - level # rw num + - set_levelpercentage # wo num + - levelpercentage # rw num # aha-color Attributes - - set_hue # wo num Hue Setzen - - hue # rw num Hue (Status und Setzen) - - set_saturation # wo num Saturation Setzen - - saturation # rw num Saturation (Status und Setzen) - - set_colortemperature # wo num Farbtemperatur Setzen - - colortemperature # rw num Farbtemperatur (Status und Setzen) - - unmapped_hue # rw num Hue (Status und Setzen) - - unmapped_saturation # rw num Saturation (Status und Setzen) - - color_mode # ro num Aktueller Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode) - - supported_color_mode # ro num Untersttzer Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode) - - fullcolorsupport # ro bool Lampe untersttzt setunmappedcolor - - mapped # ro bool von den Colordefaults abweichend zugeordneter HueSaturation-Wert gesetzt + - set_hue # wo num + - hue # rw num + - set_saturation # wo num + - saturation # rw num + - set_colortemperature # wo num + - colortemperature # rw num + - unmapped_hue # rw num + - unmapped_saturation # rw num + - color_mode # ro num + - supported_color_mode # ro num + - fullcolorsupport # ro bool + - mapped # ro bool # aha-switch Attributes - - switch_state # rw bool Schaltzustand Steckdose (Status und Setzen) - - switch_mode # ro str Zeitschaltung oder manuell schalten - - switch_toggle # wo bool Schaltzustand umschalten (toggle) + - switch_state # rw bool + - switch_mode # ro str + - switch_toggle # wo bool # aha-powermeter Attributes - - power # ro num Leistung in W (Aktualisierung alle 2 min) - - energy # ro num absoluter Verbrauch seit Inbetriebnahme in Wh - - voltage # ro num Spannung in V (Aktualisierung alle 2 min) + - power # ro num + - energy # ro num + - voltage # ro num # aha-humidity Attributes - - humidity # ro num Relative Luftfeuchtigkeit in % (FD440) + - humidity # ro num # aha-alarm Attributes - - alert_state # ro bool letzter bermittelter Alarmzustand + - alert_state # ro bool # aha-blind Attributes - - blind_mode # ro str automatische Zeitschaltung oder manuell fahren - - endpositionsset # ro bool ist die Endlage fr das Rollo konfiguriert + - blind_mode # ro str + - endpositionsset # ro bool + valid_list_description: + # NOTE: valid_list_description is automatically created by using item_attributes_master.py + # tr064 Attributes + # tr064-fritz_device Attributes + - Laufzeit des Fritzdevice in Sekunden + - Serialnummer des Fritzdevice + - Software Version + - Hardware Version + - Hersteller + - Produktklasse + - Hersteller OUI + - Modelname + - Modelbeschreibung + - Geräte Log + - Security Port + - Startet das Gerät neu + # tr064-myfritz Attributes + - MyFritz Status (an/aus) + # tr064-call_monitor Attributes + - Richtung des letzten Anrufes + - Status des letzten Anrufes + - Monitortrigger + - Eingehender Anruf erkannt + - Letzter Anrufer + - Zeitpunkt des letzten eingehenden Anrufs + - Status des letzten eingehenden Anrufs + - Nummer des letzten eingehenden Anrufes + - Angerufene Nummer des letzten eingehenden Anrufs + - Ausgehender Anruf erkannt + - Letzter angerufener Kontakt + - Zeitpunkt des letzten ausgehenden Anrufs + - Status des letzten ausgehenden Anrufs + - Nummer des letzten ausgehenden Anrufes + - Letzte verwendete Telefonnummer für ausgehenden Anruf + - Dauer des eingehenden Anrufs + - Dauer des ausgehenden Anrufs + # tr064-tam Attributes + - TAM an/aus + - Name des TAM + - Anzahl der alten Nachrichten + - Anzahl der neuen Nachrichten + - Gesamtanzahl der Nachrichten + # tr064-wan Attributes + - WAN Verbindungsstatus + - WAN Verbindungsfehler + - WAN Verbindung aktiv + - WAN Verbindungszeit + - WAN IP Adresse + - WAN Upstream Datenmenge + - WAN Downstream Datenmenge + - WAN Verbindung-Anzahl insgesamt versendeter Pakete + - WAN Verbindung-Anzahl insgesamt empfangener Pakete + - WAN Verbindung-Anzahl aktuell versendeter Pakete + - WAN Verbindung-Anzahl aktuell empfangener Pakete + - WAN Verbindung-Anzahl insgesamt versendeter Bytes + - WAN Verbindung-Anzahl insgesamt empfangener Bytes + - WAN Verbindung-Anzahl aktuelle Bitrate Senden + - WAN Verbindung-Anzahl aktuelle Bitrate Empfangen + - WAN Link + # tr064-wlan_config Attributes + - WLAN An/Aus + - WLAN SSID + - Verbleibende Zeit, bis zum automatischen Abschalten des Gäste-WLAN + - Anzahl der verbundenen Geräte im jeweiligen WLAN + - Schaltet WPS für das entsprechende WlAN an / aus + - WPS Status des entsprechenden WlAN + - WPS Modus des entsprechenden WlAN + # tr064-wlan Attributes + - Anzahl der verbundenen Geräte im WLAN + # tr064-host Attributes + - Anzahl der Hosts + - Informationen über die Hosts + - Topologie des Mesh + - Verbindungsstatus (Muss Child von "network_device" sein + - Verbindungsstatus (Muss Child von "network_device" sein + - Verbindungsstatus (Muss Child von "network_device" sein + - Verbindungsstatus // Defines Network device via MAC-Adresse + - Geräte-IP (Muss Child von "network_device" sein + - Verbindungstyp (Muss Child von "network_device" sein + - Gerätename (Muss Child von "network_device" sein + - Verbindungsstatus (Muss Child von "network_device" sein + - Verbindungsstatus (Muss Child von "network_device" sein + - Verbindungsstatus (Muss Child von "network_device" sein + # tr064-deflection Attributes + - Anzahl der eingestellten Rufumleitungen + - Details zu allen Rufumleitung (als dict) + - Details zur Rufumleitung (als dict); Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item + - Rufumleitung Status an/aus; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item + - Type der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item + - Telefonnummer, die umgeleitet wird; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item + - Zielrufnummer der Umleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item + - Modus der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item + - Outgoing der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item + - Phonebook_ID der Zielrufnummer (Only valid if Type==fromPB); Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item + # tr064-homeauto Attributes + - Steckdose schalten; siehe "switch_state" + - Status des HKR (OPEN; CLOSED; TEMP) + - siehe "target_temperature" + - siehe "current_temperature" + - siehe "temperature_reduced" + - siehe "temperature_comfort" + - siehe "fw_version" + # aha Attributes + # aha-device Attributes + - Geräte -ID + - Hersteller + - Produktname + - Firmware Version + - Verbindungsstatus + - Gerätename + - Verbindung aktiv + - Im Gerät vorhandene Funktionen + # aha-hkr Attributes + - Soll-Temperatur Setzen + - Soll-Temperatur (Status und Setzen) + - Ist-Temperatur + - Eingestellte reduzierte Temperatur + - Eingestellte Komfort-Temperatur + - Eingestellter Temperatur-Offset + - '"Window Open" Funktionen Setzen' + - '"Window Open" Funktion (Status und Setzen)' + - Zeitliches Ende der "Window Open" Funktion + - '"Boost" Funktion Setzen' + - '"Boost" Funktion (Status und Setzen)' + - Status der "Boost" Funktion + - Zeitliches Ende der "Boost" Funktion + - Status der "Sommer" Funktion + - Status der "Holiday" Funktion + - '"Battery low" Status' + - Batterie-Status in % + - Tastensperre über UI/API aktiv + - Tastensperre direkt am Gerät ein + - Fehlercodes die der HKR liefert + # aha-simpleonoff Attributes + - Gerät/Aktor/Lampe an-/ausschalten + - Gerät/Aktor/Lampe (Status und Setzen) + # aha-level Attributes + - Level/Niveau von 0 bis 255 Setzen + - Level/Niveau von 0 bis 255 (Setzen & Status) + - Level/Niveau von 0% bis 100% Setzen + - Level/Niveau von 0% bis 100% (Setzen & Status) + # aha-color Attributes + - Hue Setzen + - Hue (Status und Setzen) + - Saturation Setzen + - Saturation (Status und Setzen) + - Farbtemperatur Setzen + - Farbtemperatur (Status und Setzen) + - Hue (Status und Setzen) + - Saturation (Status und Setzen) + - Aktueller Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode) + - Unterstützer Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode) + - Lampe unterstützt setunmappedcolor + - von den Colordefaults abweichend zugeordneter HueSaturation-Wert gesetzt + # aha-switch Attributes + - Schaltzustand Steckdose (Status und Setzen) + - Zeitschaltung oder manuell schalten + - Schaltzustand umschalten (toggle) + # aha-powermeter Attributes + - Leistung in W (Aktualisierung alle 2 min) + - absoluter Verbrauch seit Inbetriebnahme in Wh + - Spannung in V (Aktualisierung alle 2 min) + # aha-humidity Attributes + - Relative Luftfeuchtigkeit in % (FD440) + # aha-alarm Attributes + - letzter übermittelter Alarmzustand + # aha-blind Attributes + - automatische Zeitschaltung oder manuell fahren + - ist die Endlage für das Rollo konfiguriert avm_incoming_allowed: type: str mandatory: false @@ -306,22 +472,22 @@ item_attributes: type: mac mandatory: false description: - de: (optional) Definition der MAC Adresse für Items vom avm_data_type `network_device`. Nur für diese Items mandatory! + de: (optional) Definition der MAC Adresse für Items vom avm_data_type `network_device`. Nur für diese Items mandatory! en: (optional) Definition of the MAC address for items of avm_data_type `network_device`. Only mandatory for these items! avm_ain: type: str mandatory: false description: - de: (optional) Definition der AktorIdentifikationsNummer (AIN) Items vom avm_data_types für `AHA-Interface`. Nur für diese Items mandatory! + de: (optional) Definition der AktorIdentifikationsNummer (AIN) Items vom avm_data_types für `AHA-Interface`. Nur für diese Items mandatory! en: (optional) Definition of the ActorIdentificationNumber (AIN) for items of avm_data_types `AHA-Interface`. Only mandatory for these items! avm_tam_index: type: int mandatory: false description: - de: (optional) Index für den Anrufbeantworter, normalerweise für den ersten eine "1". Es werden bis zu 5 Anrufbeantworter vom Gerät unterstützt. - en: (optional) Index für the answering machine, normally a "1" for the first one. Supported are up to 5 answering machines. + de: (optional) Index für den Anrufbeantworter, normalerweise für den ersten eine "1". Es werden bis zu 5 Anrufbeantworter vom Gerät unterstützt. + en: (optional) Index für the answering machine, normally a "1" for the first one. Supported are up to 5 answering machines. valid_min: 1 valid_max: 5 @@ -329,7 +495,7 @@ item_attributes: type: int mandatory: false description: - de: (optional) Index für die Rufumleitung, normalerweise für die erste eine "1". + de: (optional) Index für die Rufumleitung, normalerweise für die erste eine "1". en: (optional) Index deflection, normally a "1" for the first one. valid_min: 1 valid_max: 32 @@ -337,7 +503,7 @@ item_attributes: avm_read_after_write: type: int description: - de: (optional) Konfiguriert eine Verzögerung in Sekunden nachdem ein Lesekommando nach einem Schreibkommando gesendet wird. + de: (optional) Konfiguriert eine Verzögerung in Sekunden nachdem ein Lesekommando nach einem Schreibkommando gesendet wird. en: (optional) Configures delay in seconds to issue a read command after write command avm_data_cycle: @@ -811,7 +977,7 @@ plugin_functions: get_call_origin: type: str description: - de: Gib den Namen des Telefons zurück, das aktuell als 'call origin' gesetzt ist. + de: Gib den Namen des Telefons zurück, das aktuell als 'call origin' gesetzt ist. en: Gets the phone name, currently set as 'call origin'. parameters: # This function has no parameters @@ -819,16 +985,15 @@ plugin_functions: get_calllist: type: list(dict(str)) description: - de: Ermittelt ein Array mit dicts aller Einträge der Anrufliste (Attribute 'Id', 'Type', 'Caller', 'Called', 'CalledNumber', 'Name', 'Numbertype', - 'Device', 'Port', 'Date',' Duration' (einige optional)). - en: Returns an array of dicts with all calllist entries (attributes 'Id', 'Type', 'Caller', 'Called', 'CalledNumber', 'Name', 'Numbertype', - 'Device', 'Port', 'Date', 'Duration' (some optional)). + de: Ermittelt ein Array mit dicts aller Einträge der Anrufliste (Attribute 'Id', 'Type', 'Caller', 'Called', 'CalledNumber', 'Name', 'Numbertype', 'Device', 'Port', 'Date',' Duration' (einige + optional)). + en: Returns an array of dicts with all calllist entries (attributes 'Id', 'Type', 'Caller', 'Called', 'CalledNumber', 'Name', 'Numbertype', 'Device', 'Port', 'Date', 'Duration' (some optional)). parameters: filter_incoming: type: str default: '' description: - de: Filter, um nur die Anrufe zu erhalten, die zu einer bestimmten angerufenen Nummer gehören. + de: Filter, um nur die Anrufe zu erhalten, die zu einer bestimmten angerufenen Nummer gehören. en: Filter to filter calls to a specific destination phone number. phonebook_id: type: int @@ -840,14 +1005,13 @@ plugin_functions: get_contact_name_by_phone_number: type: str description: - de: Durchsucht das Telefonbuch mit einer (vollständigen) Telefonnummer nach Kontakten. Falls kein Name gefunden wird, wird die Telefonnummer - zurückgeliefert. + de: Durchsucht das Telefonbuch mit einer (vollständigen) Telefonnummer nach Kontakten. Falls kein Name gefunden wird, wird die Telefonnummer zurückgeliefert. en: Searches the phonebook for a contact by a given (complete) phone number. In case no name is found, the phone number is returned. parameters: phone_number: type: str description: - de: Vollständige Telefonnummer + de: Vollständige Telefonnummer en: Complete phone number phonebook_id: type: int @@ -859,7 +1023,7 @@ plugin_functions: get_device_log_from_lua: type: list(list(str)) description: - de: Ermittelt die Logeinträge auf dem Gerät über die LUA Schnittstelle /query.lua?mq_log=logger:status/log. + de: Ermittelt die Logeinträge auf dem Gerät über die LUA Schnittstelle /query.lua?mq_log=logger:status/log. en: Gets the log entries on the device via the LUA interface /query.lua?mq_log=logger:status/log. parameters: # This function has no parameters @@ -867,7 +1031,7 @@ plugin_functions: get_device_log_from_tr064: type: list(str) description: - de: Ermittelt die Logeinträge auf dem Gerät über die TR-064 Schnittstelle. + de: Ermittelt die Logeinträge auf dem Gerät über die TR-064 Schnittstelle. en: Gets the log entries on the device via the TR-064 interface. parameters: # This function has no parameters @@ -893,13 +1057,13 @@ plugin_functions: only_active: type: bool description: - de: True, wenn nur aktuell aktive Hosts zurückgegeben werden sollen. + de: True, wenn nur aktuell aktive Hosts zurückgegeben werden sollen. en: True, if only active hosts shall be returned. get_phone_name: type: str description: - de: Gibt den Namen eines Telefons an einem Index zurück. Der zurückgegebene Wert kann in 'set_call_origin' verwendet werden. + de: Gibt den Namen eines Telefons an einem Index zurück. Der zurückgegebene Wert kann in 'set_call_origin' verwendet werden. en: Get the phone name at a specific index. The returend value can be used as phone_name for set_call_origin. parameters: index: @@ -911,13 +1075,13 @@ plugin_functions: get_phone_numbers_by_name: type: dict(dict(str)) description: - de: Durchsucht das Telefonbuch mit einem Namen nach nach Kontakten und liefert die zugehörigen Telefonnummern. + de: Durchsucht das Telefonbuch mit einem Namen nach nach Kontakten und liefert die zugehörigen Telefonnummern. en: Searches the phonebook for a contact by a given name and returns the corresponding phone numbers. parameters: name: type: str description: - de: Anteiliger oder vollständiger Name des Kontakts. + de: Anteiliger oder vollständiger Name des Kontakts. en: Partial or full name of the contact. phonebook_id: type: int @@ -929,7 +1093,7 @@ plugin_functions: is_host_active: type: bool description: - de: Prüft, ob eine MAC Adresse auf dem Gerät aktiv ist. Das kann bspw. für die Umsetzung einer Präsenzerkennung genutzt werden. + de: Prüft, ob eine MAC Adresse auf dem Gerät aktiv ist. Das kann bspw. für die Umsetzung einer Präsenzerkennung genutzt werden. en: Checks if a MAC address is active on the FritzDevice, e.g. the status can be used for simple presence detection. parameters: mac_address: @@ -941,13 +1105,13 @@ plugin_functions: reboot: type: void description: - de: Startet das Gerät neu. + de: Startet das Gerät neu. en: Reboots the device. reconnect: type: void description: - de: Verbindet das Gerät neu mit dem WAN (Wide Area Network). + de: Verbindet das Gerät neu mit dem WAN (Wide Area Network). en: Reconnects the device to the WAN (Wide Area Network). set_call_origin: @@ -959,19 +1123,19 @@ plugin_functions: phone_name: type: mac description: - de: Identifikator des Telefons, dass als 'call origin' gesetzt werden soll. bspw. zwei Sterne gefolgt von '610' für ein internes Gerät. + de: Identifikator des Telefons, dass als 'call origin' gesetzt werden soll. bspw. zwei Sterne gefolgt von '610' für ein internes Gerät. en: Full phone identifier, could be e.g. two asterisk followed by '610' for an internal device. start_call: type: void description: - de: Startet einen Anruf an eine übergebene Telefonnummer (intern oder extern). + de: Startet einen Anruf an eine übergebene Telefonnummer (intern oder extern). en: Starts a call for a given phone number (internal or external). parameters: phone_number: type: str description: - de: Vollständige Telefonnummer, die angerufen werden soll. + de: Vollständige Telefonnummer, die angerufen werden soll. en: Full phone number to call wol: @@ -989,7 +1153,7 @@ plugin_functions: get_number_of_deflections: type: bool description: - de: Liefert die Anzahl der Rufumleitungen zurück. + de: Liefert die Anzahl der Rufumleitungen zurück. en: Returns Number of set deflections. parameters: # This function has no parameters @@ -997,7 +1161,7 @@ plugin_functions: get_deflection: type: bool description: - de: Liefert die Details der Rufumleitung der angegebenen ID zurück (Default-ID = 0) + de: Liefert die Details der Rufumleitung der angegebenen ID zurück (Default-ID = 0) en: Returns details of deflection with given deflection_id (default id = 0) parameters: deflection_id: @@ -1009,7 +1173,7 @@ plugin_functions: get_deflections: type: bool description: - de: Liefert die Details aller Rufumleitungen zurück. + de: Liefert die Details aller Rufumleitungen zurück. en: Returns details of all deflections. parameters: # This function has no parameters From 36d6ed70774dd3ab21b34d77e8940db48d0cf5bb Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Tue, 9 May 2023 18:27:10 +0200 Subject: [PATCH 098/775] AVM Plugin: - Bugfix of update of valid_list of avm_data_type in plugin.yaml by - Create valid_list_description --- avm/item_attributes_master.py | 31 +++++---- avm/plugin.yaml | 119 +++++++++++++++++----------------- 2 files changed, 77 insertions(+), 73 deletions(-) diff --git a/avm/item_attributes_master.py b/avm/item_attributes_master.py index 427d6572e..cff7d2cf9 100644 --- a/avm/item_attributes_master.py +++ b/avm/item_attributes_master.py @@ -69,8 +69,8 @@ 'manufacturer': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Hersteller'}, 'product_class': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Produktklasse'}, 'manufacturer_oui': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Hersteller OUI'}, - 'model_name': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Modelname'}, - 'description': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Modelbeschreibung'}, + 'model_name': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Modellname'}, + 'description': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Modellbeschreibung'}, 'device_log': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Geräte Log'}, 'security_port': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Security Port'}, 'reboot': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'wo', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Startet das Gerät neu'}, @@ -167,31 +167,31 @@ 'temperature_reduced': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Eingestellte reduzierte Temperatur'}, 'temperature_comfort': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Eingestellte Komfort-Temperatur'}, 'temperature_offset': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Eingestellter Temperatur-Offset'}, - 'set_window_open': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'wo', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': '"Window Open" Funktionen Setzen'}, - 'window_open': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'rw', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': '"Window Open" Funktion (Status und Setzen)'}, + 'set_window_open': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'wo', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Window-Open-Funktion (Setzen)'}, + 'window_open': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'rw', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Window-Open-Funktion (Status und Setzen)'}, 'windowopenactiveendtime': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Zeitliches Ende der "Window Open" Funktion'}, - 'set_hkr_boost': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'wo', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': '"Boost" Funktion Setzen'}, - 'hkr_boost': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'rw', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': '"Boost" Funktion (Status und Setzen)'}, + 'set_hkr_boost': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'wo', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Boost-Funktion (Setzen)'}, + 'hkr_boost': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'rw', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Boost-Funktion (Status und Setzen)'}, 'boost_active': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': True, 'supported_by_repeater': False, 'description': 'Status der "Boost" Funktion'}, - 'boostactiveendtime': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Zeitliches Ende der "Boost" Funktion'}, + 'boostactiveendtime': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Zeitliches Ende der Boost Funktion'}, 'summer_active': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Status der "Sommer" Funktion'}, 'holiday_active': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Status der "Holiday" Funktion'}, - 'battery_low': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': '"Battery low" Status'}, + 'battery_low': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Battery-low Status'}, 'battery_level': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Batterie-Status in %'}, 'lock': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Tastensperre über UI/API aktiv'}, 'device_lock': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Tastensperre direkt am Gerät ein'}, 'errorcode': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Fehlercodes die der HKR liefert'}, 'set_simpleonoff': {'interface': 'aha', 'group': 'simpleonoff', 'sub_group': None, 'access': 'wo', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Gerät/Aktor/Lampe an-/ausschalten'}, 'simpleonoff': {'interface': 'aha', 'group': 'simpleonoff', 'sub_group': None, 'access': 'rw', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Gerät/Aktor/Lampe (Status und Setzen)'}, - 'set_level': {'interface': 'aha', 'group': 'level', 'sub_group': None, 'access': 'wo', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Level/Niveau von 0 bis 255 Setzen'}, + 'set_level': {'interface': 'aha', 'group': 'level', 'sub_group': None, 'access': 'wo', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Level/Niveau von 0 bis 255 (Setzen)'}, 'level': {'interface': 'aha', 'group': 'level', 'sub_group': None, 'access': 'rw', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Level/Niveau von 0 bis 255 (Setzen & Status)'}, - 'set_levelpercentage': {'interface': 'aha', 'group': 'level', 'sub_group': None, 'access': 'wo', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Level/Niveau von 0% bis 100% Setzen'}, + 'set_levelpercentage': {'interface': 'aha', 'group': 'level', 'sub_group': None, 'access': 'wo', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Level/Niveau von 0% bis 100% (Setzen)'}, 'levelpercentage': {'interface': 'aha', 'group': 'level', 'sub_group': None, 'access': 'rw', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Level/Niveau von 0% bis 100% (Setzen & Status)'}, - 'set_hue': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'wo', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Hue Setzen'}, + 'set_hue': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'wo', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Hue (Setzen)'}, 'hue': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'rw', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Hue (Status und Setzen)'}, - 'set_saturation': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'wo', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Saturation Setzen'}, + 'set_saturation': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'wo', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Saturation (Setzen)'}, 'saturation': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'rw', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Saturation (Status und Setzen)'}, - 'set_colortemperature': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'wo', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Farbtemperatur Setzen'}, + 'set_colortemperature': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'wo', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Farbtemperatur (Setzen)'}, 'colortemperature': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'rw', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Farbtemperatur (Status und Setzen)'}, 'unmapped_hue': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'rw', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Hue (Status und Setzen)'}, 'unmapped_saturation': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'rw', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Saturation (Status und Setzen)'}, @@ -297,6 +297,8 @@ def create_plugin_yaml_avm_data_type_valids(ifaces: list = ATTRIBUTES_LIST): valid_list_desc_str = f"""{valid_list_desc_str}\n\ - '{AVM_DATA_TYPES[iface][avm_data_type]['description']:<}'""" + valid_list_desc_str = f"""{valid_list_desc_str}\n\r""" + return valid_list_str, valid_list_desc_str def update_plugin_yaml_avm_data_type(): @@ -306,10 +308,11 @@ def update_plugin_yaml_avm_data_type(): yaml.indent(mapping=4, sequence=4, offset=4) yaml.width = 200 yaml.allow_unicode = True + yaml.preserve_quotes = False valid_list_str, valid_list_description_str = create_plugin_yaml_avm_data_type_valids() - with open(FILENAME_PLUGIN, 'r') as f: + with open(FILENAME_PLUGIN, 'r', encoding="utf-8") as f: data = yaml.load(f) if data.get('item_attributes', {}).get('avm_data_type'): diff --git a/avm/plugin.yaml b/avm/plugin.yaml index 4ba886316..8c52deefb 100644 --- a/avm/plugin.yaml +++ b/avm/plugin.yaml @@ -1,7 +1,7 @@ # Metadata for the plugin plugin: # Global plugin attributes - type: interface # plugin type (gateway, interface, protocol, system, web) + type: interface # plugin type (gateway, interface, protocol, system, web) description: de: Ansteuerung von AVM FRITZ!Boxen, WLAN-Repeatern, DECT Steckdosen, etc. en: Get and send data from/to AVM devices such as the FRITZ!Box, Wifi Repeaters or DECT sockets. @@ -27,15 +27,15 @@ parameters: type: str default: '' description: - de: (optional) Nutzername für den Login. Kann für manche Features benötigt werden! (Speziell für Fritz!OS 7 ist die Konfiguration der Fritz!Box auf `Anmeldung mit FRITZ!Box-Benutzernamen - und Kennwort` notwendig - en: (optional) Login information (user). Can be needed to use some features of the AVM device. (Specially for Firtz!OS 7 the Fritz!Box should be configured for login with username and password + de: (optional) Nutzername für den Login. Kann für manche Features benötigt werden! (Speziell für Fritz!OS 7 ist die Konfiguration der Fritz!Box auf `Anmeldung mit FRITZ!Box-Benutzernamen und + Kennwort` notwendig + en: (optional) Login information (user). Can be needed to use some features of the AVM device. (Specially for Fritz!OS 7 the Fritz!Box should be configured for login with username and password password: type: str default: '' hide: true description: - de: (optional) Passwort für den Login. Wird in der Regel immer benötigt und aus Sicherheitsgründen empfohlen. + de: (optional) Passwort für den Login. Wird in der Regel immer benötigt und aus Sicherheitsgründen empfohlen. en: (optional) Password for login. Is normally always needed and recommended due to security reasons host: type: str @@ -47,7 +47,7 @@ parameters: type: int default: 49443 description: - de: (optional) Port des FritzDevice, normalerweise 49443 für https oder 49000 für http + de: (optional) Port des FritzDevice, normalerweise 49443 für https oder 49000 für http en: (optional) Port of the FritzDevice, typically 49443 for https or 49000 for http cycle: type: int @@ -65,28 +65,28 @@ parameters: type: bool default: false description: - de: (optional) Schaltet die Zertifikate-Prüfung an oder aus. Normalerweise False. + de: (optional) Schaltet die Zertifikate-Prüfung an oder aus. Normalerweise False. en: (optional) Turns certificate verification on or off. Typically False call_monitor: type: bool default: false description: - de: '(optional) Aktiviert oder deaktiviert den MonitoringService, welcher auf den Call Monitor des FritzDevice verbindet. Der Call Monitor muss über ein verbundenes Telefon via #96*5* aktiviert - sein.' - en: '(optional) Activates or deactivates the MonitoringService, which connects to the FritzDevice`s call monitor. The call monitor has to be activated before by a connected telephone via calling - #96*5*' + de: (optional) Aktiviert oder deaktiviert den MonitoringService, welcher auf den Call Monitor des FritzDevice verbindet. Der Call Monitor muss über ein verbundenes Telefon via '#96*5*' aktiviert + sein. + en: (optional) Activates or deactivates the MonitoringService, which connects to the FritzDevice`s call monitor. The call monitor has to be activated before by a connected telephone via calling + ''#96*5*' call_monitor_incoming_filter: type: str default: '' description: - de: (optional) Filter, auf welche eigenen Rufnummern (oder Teile davon) der Callmonitor reagieren soll. Ist der Filter leer, werden alle eigenen Rufnummern überwacht. Wird ein Filterstring - bspw. "12234" angegeben, werden nur die eigenen Anschlussnummern, die "12234" enthalten, vom CallMonitor verarbeitet. + de: (optional) Filter, auf welche eigenen Rufnummern (oder Teile davon) der Callmonitor reagieren soll. Ist der Filter leer, werden alle eigenen Rufnummern überwacht. Wird ein Filterstring bspw. + "12234" angegeben, werden nur die eigenen Anschlussnummern, die "12234" enthalten, vom CallMonitor verarbeitet. en: (optional) Filter, for which numbers (or part of the number) of own telephone connection the Callmonitor should react. avm_home_automation: type: bool default: false description: - de: (optional) Aktiviert oder deaktiviert den Zugriff auf AVM Smarthome Geräte mit dem AHA HTTP Interface. + de: (optional) Aktiviert oder deaktiviert den Zugriff auf AVM Smarthome Geräte mit dem AHA HTTP Interface. en: (optional) Activates or deactivates access to AVM smarthome devices via AHA HTTP interface log_entry_count: type: int @@ -98,7 +98,7 @@ parameters: type: bool default: false description: - de: (optional) Wenn aktiv, werden TR064 Items, deren Abfrageergebnis 2x zu einen Fehler geführt hat, blacklisted und anschließend nicht mehr abgefragt. + de: (optional) Wenn aktiv, werden TR064 Items, deren Abfrageergebnis 2x zu einen Fehler geführt hat, blacklisted und anschließend nicht mehr abgefragt. en: (optional) If active, TR064 Items for which data polling resulted in errors, will be blacklisted and excluded from update cycle item_attributes: @@ -109,7 +109,8 @@ item_attributes: description: de: AVM Datentyp des jeweiligen Items. en: AVM Data Type of the respective item. - valid_list: # NOTE: valid_list is automatically created by using item_attributes_master.py + valid_list: + # NOTE: valid_list is automatically created by using item_attributes_master.py # tr064 Attributes # tr064-fritz_device Attributes - uptime # ro num @@ -288,8 +289,8 @@ item_attributes: - Hersteller - Produktklasse - Hersteller OUI - - Modelname - - Modelbeschreibung + - Modellname + - Modellbeschreibung - Geräte Log - Security Port - Startet das Gerät neu @@ -396,16 +397,16 @@ item_attributes: - Eingestellte reduzierte Temperatur - Eingestellte Komfort-Temperatur - Eingestellter Temperatur-Offset - - '"Window Open" Funktionen Setzen' - - '"Window Open" Funktion (Status und Setzen)' + - Window-Open-Funktion (Setzen) + - Window-Open-Funktion (Status und Setzen) - Zeitliches Ende der "Window Open" Funktion - - '"Boost" Funktion Setzen' - - '"Boost" Funktion (Status und Setzen)' + - Boost-Funktion (Setzen) + - Boost-Funktion (Status und Setzen) - Status der "Boost" Funktion - - Zeitliches Ende der "Boost" Funktion + - Zeitliches Ende der Boost Funktion - Status der "Sommer" Funktion - Status der "Holiday" Funktion - - '"Battery low" Status' + - Battery-low Status - Batterie-Status in % - Tastensperre über UI/API aktiv - Tastensperre direkt am Gerät ein @@ -414,16 +415,16 @@ item_attributes: - Gerät/Aktor/Lampe an-/ausschalten - Gerät/Aktor/Lampe (Status und Setzen) # aha-level Attributes - - Level/Niveau von 0 bis 255 Setzen + - Level/Niveau von 0 bis 255 (Setzen) - Level/Niveau von 0 bis 255 (Setzen & Status) - - Level/Niveau von 0% bis 100% Setzen + - Level/Niveau von 0% bis 100% (Setzen) - Level/Niveau von 0% bis 100% (Setzen & Status) # aha-color Attributes - - Hue Setzen + - Hue (Setzen) - Hue (Status und Setzen) - - Saturation Setzen + - Saturation (Setzen) - Saturation (Status und Setzen) - - Farbtemperatur Setzen + - Farbtemperatur (Setzen) - Farbtemperatur (Status und Setzen) - Hue (Status und Setzen) - Saturation (Status und Setzen) @@ -463,8 +464,8 @@ item_attributes: avm_wlan_index: type: int description: - de: '(optional) Definition des Wlans ueber index: (1: 2.4Ghz, 2: 5Ghz, 3: Gaeste).' - en: '(optional) Definition of WiFi via index: (1: 2.4GHz, 2: 5GHz, 3: Guest)' + de: '(optional) Definition des Wlans über index (1: 2.4Ghz, 2: 5Ghz, 3: Gäste)' + en: '(optional) Definition of WiFi via index (1: 2.4GHz, 2: 5GHz, 3: Guest)' valid_min: 1 valid_max: 3 @@ -472,22 +473,22 @@ item_attributes: type: mac mandatory: false description: - de: (optional) Definition der MAC Adresse für Items vom avm_data_type `network_device`. Nur für diese Items mandatory! + de: (optional) Definition der MAC Adresse für Items vom avm_data_type `network_device`. Nur für diese Items mandatory! en: (optional) Definition of the MAC address for items of avm_data_type `network_device`. Only mandatory for these items! avm_ain: type: str mandatory: false description: - de: (optional) Definition der AktorIdentifikationsNummer (AIN) Items vom avm_data_types für `AHA-Interface`. Nur für diese Items mandatory! + de: (optional) Definition der AktorIdentifikationsNummer (AIN) Items vom avm_data_types für `AHA-Interface`. Nur für diese Items mandatory! en: (optional) Definition of the ActorIdentificationNumber (AIN) for items of avm_data_types `AHA-Interface`. Only mandatory for these items! avm_tam_index: type: int mandatory: false description: - de: (optional) Index für den Anrufbeantworter, normalerweise für den ersten eine "1". Es werden bis zu 5 Anrufbeantworter vom Gerät unterstützt. - en: (optional) Index für the answering machine, normally a "1" for the first one. Supported are up to 5 answering machines. + de: (optional) Index für den Anrufbeantworter, normalerweise für den ersten eine "1". Es werden bis zu 5 Anrufbeantworter vom Gerät unterstützt. + en: (optional) Index for the answering machine, normally a "1" for the first one. Supported are up to 5 answering machines. valid_min: 1 valid_max: 5 @@ -495,7 +496,7 @@ item_attributes: type: int mandatory: false description: - de: (optional) Index für die Rufumleitung, normalerweise für die erste eine "1". + de: (optional) Index für die Rufumleitung, normalerweise für die erste eine "1". en: (optional) Index deflection, normally a "1" for the first one. valid_min: 1 valid_max: 32 @@ -503,14 +504,14 @@ item_attributes: avm_read_after_write: type: int description: - de: (optional) Konfiguriert eine Verzögerung in Sekunden nachdem ein Lesekommando nach einem Schreibkommando gesendet wird. + de: (optional) Konfiguriert eine Verzögerung in Sekunden nachdem ein Lesekommando nach einem Schreibkommando gesendet wird. en: (optional) Configures delay in seconds to issue a read command after write command avm_data_cycle: type: int mandatory: false description: - de: Poll-Zyklus des AVM Datentypes des jeweiligen Items. 0-Nur beim Initialisieren Lesen; 10+ - Zyklisch Lesen + de: Poll-Zyklus des AVM Datentyps des jeweiligen Items. 0-Nur beim Initialisieren Lesen; 10+ - Zyklisch Lesen en: Poll cycle of AVM Data Type of the respective item. 0-Just read at init; 10+ - cyclic reading item_structs: @@ -977,7 +978,7 @@ plugin_functions: get_call_origin: type: str description: - de: Gib den Namen des Telefons zurück, das aktuell als 'call origin' gesetzt ist. + de: Gib den Namen des Telefons zurück, das aktuell als 'call origin' gesetzt ist. en: Gets the phone name, currently set as 'call origin'. parameters: # This function has no parameters @@ -985,7 +986,7 @@ plugin_functions: get_calllist: type: list(dict(str)) description: - de: Ermittelt ein Array mit dicts aller Einträge der Anrufliste (Attribute 'Id', 'Type', 'Caller', 'Called', 'CalledNumber', 'Name', 'Numbertype', 'Device', 'Port', 'Date',' Duration' (einige + de: Ermittelt ein Array mit dicts aller Einträge der Anrufliste (Attribute 'Id', 'Type', 'Caller', 'Called', 'CalledNumber', 'Name', 'Numbertype', 'Device', 'Port', 'Date',' Duration' (einige optional)). en: Returns an array of dicts with all calllist entries (attributes 'Id', 'Type', 'Caller', 'Called', 'CalledNumber', 'Name', 'Numbertype', 'Device', 'Port', 'Date', 'Duration' (some optional)). parameters: @@ -993,7 +994,7 @@ plugin_functions: type: str default: '' description: - de: Filter, um nur die Anrufe zu erhalten, die zu einer bestimmten angerufenen Nummer gehören. + de: Filter, um nur die Anrufe zu erhalten, die zu einer bestimmten angerufenen Nummer gehören. en: Filter to filter calls to a specific destination phone number. phonebook_id: type: int @@ -1005,13 +1006,13 @@ plugin_functions: get_contact_name_by_phone_number: type: str description: - de: Durchsucht das Telefonbuch mit einer (vollständigen) Telefonnummer nach Kontakten. Falls kein Name gefunden wird, wird die Telefonnummer zurückgeliefert. + de: Durchsucht das Telefonbuch mit einer (vollständigen) Telefonnummer nach Kontakten. Falls kein Name gefunden wird, wird die Telefonnummer zurückgeliefert. en: Searches the phonebook for a contact by a given (complete) phone number. In case no name is found, the phone number is returned. parameters: phone_number: type: str description: - de: Vollständige Telefonnummer + de: Vollständige Telefonnummer en: Complete phone number phonebook_id: type: int @@ -1023,7 +1024,7 @@ plugin_functions: get_device_log_from_lua: type: list(list(str)) description: - de: Ermittelt die Logeinträge auf dem Gerät über die LUA Schnittstelle /query.lua?mq_log=logger:status/log. + de: Ermittelt die Logeinträge auf dem Gerät über die LUA Schnittstelle /query.lua?mq_log=logger:status/log. en: Gets the log entries on the device via the LUA interface /query.lua?mq_log=logger:status/log. parameters: # This function has no parameters @@ -1031,7 +1032,7 @@ plugin_functions: get_device_log_from_tr064: type: list(str) description: - de: Ermittelt die Logeinträge auf dem Gerät über die TR-064 Schnittstelle. + de: Ermittelt die Logeinträge auf dem Gerät über die TR-064 Schnittstelle. en: Gets the log entries on the device via the TR-064 interface. parameters: # This function has no parameters @@ -1057,14 +1058,14 @@ plugin_functions: only_active: type: bool description: - de: True, wenn nur aktuell aktive Hosts zurückgegeben werden sollen. + de: True, wenn nur aktuell aktive Hosts zurückgegeben werden sollen. en: True, if only active hosts shall be returned. get_phone_name: type: str description: - de: Gibt den Namen eines Telefons an einem Index zurück. Der zurückgegebene Wert kann in 'set_call_origin' verwendet werden. - en: Get the phone name at a specific index. The returend value can be used as phone_name for set_call_origin. + de: Gibt den Namen eines Telefons an einem Index zurück. Der zurückgegebene Wert kann in 'set_call_origin' verwendet werden. + en: Get the phone name at a specific index. The returned value can be used as phone_name for set_call_origin. parameters: index: type: int @@ -1075,13 +1076,13 @@ plugin_functions: get_phone_numbers_by_name: type: dict(dict(str)) description: - de: Durchsucht das Telefonbuch mit einem Namen nach nach Kontakten und liefert die zugehörigen Telefonnummern. + de: Durchsucht das Telefonbuch mit einem Namen nach nach Kontakten und liefert die zugehörigen Telefonnummern. en: Searches the phonebook for a contact by a given name and returns the corresponding phone numbers. parameters: name: type: str description: - de: Anteiliger oder vollständiger Name des Kontakts. + de: Anteiliger oder vollständiger Name des Kontakts. en: Partial or full name of the contact. phonebook_id: type: int @@ -1093,7 +1094,7 @@ plugin_functions: is_host_active: type: bool description: - de: Prüft, ob eine MAC Adresse auf dem Gerät aktiv ist. Das kann bspw. für die Umsetzung einer Präsenzerkennung genutzt werden. + de: Prüft, ob eine MAC Adresse auf dem Gerät aktiv ist. Das kann bspw. für die Umsetzung einer Präsenzerkennung genutzt werden. en: Checks if a MAC address is active on the FritzDevice, e.g. the status can be used for simple presence detection. parameters: mac_address: @@ -1105,13 +1106,13 @@ plugin_functions: reboot: type: void description: - de: Startet das Gerät neu. + de: Startet das Gerät neu. en: Reboots the device. reconnect: type: void description: - de: Verbindet das Gerät neu mit dem WAN (Wide Area Network). + de: Verbindet das Gerät neu mit dem WAN (Wide Area Network). en: Reconnects the device to the WAN (Wide Area Network). set_call_origin: @@ -1123,19 +1124,19 @@ plugin_functions: phone_name: type: mac description: - de: Identifikator des Telefons, dass als 'call origin' gesetzt werden soll. bspw. zwei Sterne gefolgt von '610' für ein internes Gerät. + de: Identifikator des Telefons, dass als 'call origin' gesetzt werden soll. bspw. zwei Sterne gefolgt von '610' für ein internes Gerät. en: Full phone identifier, could be e.g. two asterisk followed by '610' for an internal device. start_call: type: void description: - de: Startet einen Anruf an eine übergebene Telefonnummer (intern oder extern). + de: Startet einen Anruf an eine übergebene Telefonnummer (intern oder extern). en: Starts a call for a given phone number (internal or external). parameters: phone_number: type: str description: - de: Vollständige Telefonnummer, die angerufen werden soll. + de: Vollständige Telefonnummer, die angerufen werden soll. en: Full phone number to call wol: @@ -1153,7 +1154,7 @@ plugin_functions: get_number_of_deflections: type: bool description: - de: Liefert die Anzahl der Rufumleitungen zurück. + de: Liefert die Anzahl der Rufumleitungen zurück. en: Returns Number of set deflections. parameters: # This function has no parameters @@ -1161,7 +1162,7 @@ plugin_functions: get_deflection: type: bool description: - de: Liefert die Details der Rufumleitung der angegebenen ID zurück (Default-ID = 0) + de: Liefert die Details der Rufumleitung der angegebenen ID zurück (Default-ID = 0) en: Returns details of deflection with given deflection_id (default id = 0) parameters: deflection_id: @@ -1173,7 +1174,7 @@ plugin_functions: get_deflections: type: bool description: - de: Liefert die Details aller Rufumleitungen zurück. + de: Liefert die Details aller Rufumleitungen zurück. en: Returns details of all deflections. parameters: # This function has no parameters From b19161a177e5ce7ce5302826ac1bd9f162b18f1f Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Tue, 9 May 2023 18:34:43 +0200 Subject: [PATCH 099/775] AVM Plugin: - Bugfix of update of valid_list of avm_data_type in plugin.yaml by - Create valid_list_description --- avm/item_attributes_master.py | 18 +++++++++--------- avm/plugin.yaml | 26 +++++++++++++------------- 2 files changed, 22 insertions(+), 22 deletions(-) diff --git a/avm/item_attributes_master.py b/avm/item_attributes_master.py index cff7d2cf9..e83694670 100644 --- a/avm/item_attributes_master.py +++ b/avm/item_attributes_master.py @@ -124,16 +124,16 @@ 'hosts_count': {'interface': 'tr064', 'group': 'host', 'sub_group': 'gen', 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Anzahl der Hosts'}, 'hosts_info': {'interface': 'tr064', 'group': 'host', 'sub_group': 'gen', 'access': 'ro', 'type': 'dict', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Informationen über die Hosts'}, 'mesh_topology': {'interface': 'tr064', 'group': 'host', 'sub_group': 'gen', 'access': 'ro', 'type': 'dict', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Topologie des Mesh'}, - 'number_of_hosts': {'interface': 'tr064', 'group': 'host', 'sub_group': 'gen', 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Verbindungsstatus (Muss Child von "network_device" sein'}, - 'hosts_url': {'interface': 'tr064', 'group': 'host', 'sub_group': 'gen', 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Verbindungsstatus (Muss Child von "network_device" sein'}, - 'mesh_url': {'interface': 'tr064', 'group': 'host', 'sub_group': 'gen', 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Verbindungsstatus (Muss Child von "network_device" sein'}, - 'network_device': {'interface': 'tr064', 'group': 'host', 'sub_group': 'child', 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Verbindungsstatus // Defines Network device via MAC-Adresse'}, - 'device_ip': {'interface': 'tr064', 'group': 'host', 'sub_group': 'child', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Geräte-IP (Muss Child von "network_device" sein'}, - 'device_connection_type': {'interface': 'tr064', 'group': 'host', 'sub_group': 'child', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Verbindungstyp (Muss Child von "network_device" sein'}, + 'number_of_hosts': {'interface': 'tr064', 'group': 'host', 'sub_group': 'gen', 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Anzahl der verbundenen Hosts (Muss Child von "network_device" sein)'}, + 'hosts_url': {'interface': 'tr064', 'group': 'host', 'sub_group': 'gen', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'URL zu Hosts (Muss Child von "network_device" sein)'}, + 'mesh_url': {'interface': 'tr064', 'group': 'host', 'sub_group': 'gen', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'URL zum Mesh (Muss Child von "network_device" sein)'}, + 'network_device': {'interface': 'tr064', 'group': 'host', 'sub_group': 'child', 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Verbindungsstatus des Gerätes // Defines Network device via MAC-Adresse'}, + 'device_ip': {'interface': 'tr064', 'group': 'host', 'sub_group': 'child', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Geräte-IP (Muss Child von "network_device" sein)'}, + 'device_connection_type': {'interface': 'tr064', 'group': 'host', 'sub_group': 'child', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Verbindungstyp (Muss Child von "network_device" sein)'}, 'device_hostname': {'interface': 'tr064', 'group': 'host', 'sub_group': 'child', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Gerätename (Muss Child von "network_device" sein'}, - 'connection_status': {'interface': 'tr064', 'group': 'host', 'sub_group': 'child', 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Verbindungsstatus (Muss Child von "network_device" sein'}, - 'is_host_active': {'interface': 'tr064', 'group': 'host', 'sub_group': 'child', 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Verbindungsstatus (Muss Child von "network_device" sein'}, - 'host_info': {'interface': 'tr064', 'group': 'host', 'sub_group': 'host', 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Verbindungsstatus (Muss Child von "network_device" sein'}, + 'connection_status': {'interface': 'tr064', 'group': 'host', 'sub_group': 'child', 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Verbindungsstatus (Muss Child von "network_device" sein)'}, + 'is_host_active': {'interface': 'tr064', 'group': 'host', 'sub_group': 'child', 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Host aktiv? (Muss Child von "network_device" sein)'}, + 'host_info': {'interface': 'tr064', 'group': 'host', 'sub_group': 'host', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Informationen zum Host (Muss Child von "network_device" sein)'}, 'number_of_deflections': {'interface': 'tr064', 'group': 'deflection', 'sub_group': 'gen', 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Anzahl der eingestellten Rufumleitungen'}, 'deflections_details': {'interface': 'tr064', 'group': 'deflection', 'sub_group': 'gen', 'access': 'ro', 'type': 'dict', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Details zu allen Rufumleitung (als dict)'}, 'deflection_details': {'interface': 'tr064', 'group': 'deflection', 'sub_group': 'single', 'access': 'ro', 'type': 'dict', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Details zur Rufumleitung (als dict); Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item'}, diff --git a/avm/plugin.yaml b/avm/plugin.yaml index 8c52deefb..4e3ffcd17 100644 --- a/avm/plugin.yaml +++ b/avm/plugin.yaml @@ -182,16 +182,16 @@ item_attributes: - hosts_count # ro num - hosts_info # ro dict - mesh_topology # ro dict - - number_of_hosts # ro bool - - hosts_url # ro bool - - mesh_url # ro bool + - number_of_hosts # ro num + - hosts_url # ro str + - mesh_url # ro str - network_device # ro bool - device_ip # ro str - device_connection_type # ro str - device_hostname # ro str - connection_status # ro bool - is_host_active # ro bool - - host_info # ro bool + - host_info # ro str # tr064-deflection Attributes - number_of_deflections # ro num - deflections_details # ro dict @@ -351,16 +351,16 @@ item_attributes: - Anzahl der Hosts - Informationen über die Hosts - Topologie des Mesh - - Verbindungsstatus (Muss Child von "network_device" sein - - Verbindungsstatus (Muss Child von "network_device" sein - - Verbindungsstatus (Muss Child von "network_device" sein - - Verbindungsstatus // Defines Network device via MAC-Adresse - - Geräte-IP (Muss Child von "network_device" sein - - Verbindungstyp (Muss Child von "network_device" sein + - Anzahl der verbundenen Hosts (Muss Child von "network_device" sein) + - URL zu Hosts (Muss Child von "network_device" sein) + - URL zum Mesh (Muss Child von "network_device" sein) + - Verbindungsstatus des Gerätes // Defines Network device via MAC-Adresse + - Geräte-IP (Muss Child von "network_device" sein) + - Verbindungstyp (Muss Child von "network_device" sein) - Gerätename (Muss Child von "network_device" sein - - Verbindungsstatus (Muss Child von "network_device" sein - - Verbindungsstatus (Muss Child von "network_device" sein - - Verbindungsstatus (Muss Child von "network_device" sein + - Verbindungsstatus (Muss Child von "network_device" sein) + - Host aktiv? (Muss Child von "network_device" sein) + - Informationen zum Host (Muss Child von "network_device" sein) # tr064-deflection Attributes - Anzahl der eingestellten Rufumleitungen - Details zu allen Rufumleitung (als dict) From 4e502a263b641cadc6e625c4ffda3516bd12b482 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Tue, 9 May 2023 20:14:58 +0200 Subject: [PATCH 100/775] AVM Plugin: - Add docu - some tweaking --- avm/item_attributes_master.py | 8 ++++---- avm/user_doc.rst | 16 ++++++++++++++++ 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/avm/item_attributes_master.py b/avm/item_attributes_master.py index e83694670..de5a67c2b 100644 --- a/avm/item_attributes_master.py +++ b/avm/item_attributes_master.py @@ -25,7 +25,7 @@ FILENAME_PLUGIN = 'plugin.yaml' -HEADER = """\ +FILE_HEADER = """\ # !/usr/bin/env python # vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # @@ -58,8 +58,8 @@ """ +# Note: change / add information of avm_data_type within the following dict according to the given scheme # 'avm_data_type': {'interface': 'tr064', 'group': '', 'sub_group': None, 'access': '', 'type': '', 'deprecated': False, 'supported_by_repeater': False, 'description': ''}, - AVM_DATA_TYPES = { 'tr064': { 'uptime': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Laufzeit des Fritzdevice in Sekunden'}, @@ -259,7 +259,7 @@ def export_item_attributs_py(): # create file and write header f = open(FILENAME_ATTRIBUTES, "w") - f.write(HEADER) + f.write(FILE_HEADER) f.close() # write avm_data_types @@ -334,4 +334,4 @@ def update_plugin_yaml_avm_data_type(): # Notes: # - HOST_ATTRIBUTES: host index needed # - HOSTS_ATTRIBUTES: no index needed -# - HOST_ATTRIBUTES_CHILD: avm_mac needed \ No newline at end of file +# - HOST_ATTRIBUTES_CHILD: avm_mac needed diff --git a/avm/user_doc.rst b/avm/user_doc.rst index 2e80842e2..a43c137ff 100644 --- a/avm/user_doc.rst +++ b/avm/user_doc.rst @@ -400,3 +400,19 @@ Beschreibung der Plugin-API .. image:: user_doc/assets/webif_tab6.jpg :class: screenshot + + +Vorgehen bei Funktionserweiterung des Plugins bzw. Ergänzung weiterer Werte für Itemattribut `avm_data_type` +------------------------------------------------------------------------------------------------------------ + +Augrund der Vielzahl der möglichen Werte des Itemattribut `avm_data_type` wurde die Erstellung/Update des entsprechenden Teils der +`plugin.yam` sowie die Erstellung der Datei `item_attributes.py`, die vom Plugin verwendet wird, automatisiert. + +Die Masterinformationen Itemattribut `avm_data_type` sowie die Skipte zum Erstellen/Update der beiden Dateien sind in der +Datei `item_attributes_master.py` enthalten. + +.. important:: + + Korrekturen, Erweiterungen etc. des Itemattributs `avm_data_type` sollten nur in der Datei `item_attributes_master.py` + in Dict der Variable `AVM_DATA_TYPES` vorgenommen werden. Das Ausführen der Datei `item_attributes_master.py` (main) erstellt die `item_attributes.py` und aktualisiert + `valid_list` und `valid_list_description` von `avm_data_type` in `plugin.yaml`. From b47e9302458fb686435ab902bf5d1c6032f17bfa Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Tue, 9 May 2023 22:55:00 +0200 Subject: [PATCH 101/775] stateengine plugin: keep variables after state evaluation so they can be used in enter Conditions. --- stateengine/StateEngineItem.py | 2 +- stateengine/user_doc/10_funktionen_variablen.rst | 7 +------ 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/stateengine/StateEngineItem.py b/stateengine/StateEngineItem.py index f5a5e5706..b883e1f40 100755 --- a/stateengine/StateEngineItem.py +++ b/stateengine/StateEngineItem.py @@ -776,7 +776,7 @@ def __update_check_can_enter(self, state): return state.can_enter() except Exception as ex: self.__logger.warning("Problem with currentstate {0}. Error: {1}", state.id, ex) - finally: + # The variables where originally reset in a finally: statement. No idea why... ;) self.__variables["current.state_id"] = "" self.__variables["current.state_name"] = "" self.__variables["current.conditionset_id"] = "" diff --git a/stateengine/user_doc/10_funktionen_variablen.rst b/stateengine/user_doc/10_funktionen_variablen.rst index d22c93a2c..e2b71a57b 100755 --- a/stateengine/user_doc/10_funktionen_variablen.rst +++ b/stateengine/user_doc/10_funktionen_variablen.rst @@ -213,16 +213,11 @@ auf das passende Unteritem in licht1.automatik.settings. **current.state_id:** *Die Id des Status, der gerade geprüft wird* -Diese Variable wird leer, sobald die Statusevaluierung beendet wurde, noch bevor die Aktionen des -zuletzt eingenommenen Zustands ausgeführt werden. Sie kann daher nur in der Evaluierung, nicht aber -in on_enter(_or_stay) genutzt werden. Hierfür wird stattdessen ``se_eval.get_relative_itemvalue('..state_id')`` genutzt. +Diese Variable wird während der Statusevaluierung befüllt. Die Werte bleiben bis zum nächsten Statuswechsel gleich. **current.state_name:** *Der Name des Status, der gerade geprüft wird* -Wie die state_id Variable wird diese nur während der Statusevaluierung entsprechend befüllt und sofort beim Eintritt -in einen neuen Zustand geleert (noch vor dem Durchführen der Aktionen). - Das angeführte Beispiel zeigt, wie eine Bedingung mit einem Wert abgeglichen werden kann, der in einem passenden Settingitem hinterlegt ist. Konkret würde beim Evaluieren vom Zustand_Eins mit dem Namen "sueden" die maximale From 27ddd3af544cfb2ec1f2c2cb7509837a6b8b3261 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 11 May 2023 09:41:35 +0200 Subject: [PATCH 102/775] stateengine plugin: minor code and log changes/fixes --- stateengine/StateEngineItem.py | 4 ++-- stateengine/StateEngineState.py | 3 +-- stateengine/StateEngineStruct.py | 6 +++--- stateengine/StateEngineValue.py | 2 +- 4 files changed, 7 insertions(+), 8 deletions(-) diff --git a/stateengine/StateEngineItem.py b/stateengine/StateEngineItem.py index b883e1f40..24989a35f 100755 --- a/stateengine/StateEngineItem.py +++ b/stateengine/StateEngineItem.py @@ -1075,9 +1075,9 @@ def __write_to_log(self): _previousstate_conditionset_id = self.return_item_by_attribute("se_previousstate_conditionset_item_id") _previousstate_conditionset_name = self.return_item_by_attribute("se_previousstate_conditionset_item_name") if _previousstate_conditionset_id is not None: - self.__logger.info("Item 'Previouscondition Id': {0}", _previousstate_conditionset_id.property.path) + self.__logger.info("Item 'Previousstate condition Id': {0}", _previousstate_conditionset_id.property.path) if _previousstate_conditionset_name is not None: - self.__logger.info("Item 'Previouscondition Name': {0}", _previousstate_conditionset_name.property.path) + self.__logger.info("Item 'Previousstate condition Name': {0}", _previousstate_conditionset_name.property.path) # log states for state in self.__states: diff --git a/stateengine/StateEngineState.py b/stateengine/StateEngineState.py index 4095103c7..99ffbedfd 100755 --- a/stateengine/StateEngineState.py +++ b/stateengine/StateEngineState.py @@ -267,8 +267,7 @@ def update_name(self, item_state, recursion_depth=0): # Read configuration from item and populate data in class # item_state: item to read from # recursion_depth: current recursion_depth (recursion is canceled after five levels) - # item_stateengine: StateEngine-Item defining items for conditions - # abitem_object: Related SeItem instance for later determination of current age and current delay + # se_use: If se_use Attribute is used or not def __fill(self, item_state, recursion_depth, se_use=None): if recursion_depth > 5: self._log_error("{0}/{1}: too many levels of 'use'", self.id, item_state.property.path) diff --git a/stateengine/StateEngineStruct.py b/stateengine/StateEngineStruct.py index 76c83a227..0da5a4a89 100755 --- a/stateengine/StateEngineStruct.py +++ b/stateengine/StateEngineStruct.py @@ -72,7 +72,7 @@ def __init__(self, abitem, struct_path, global_struct): self.convert() def __repr__(self): - return "SeStruct {}".format(self.struct_path, self._conf) + return "SeStruct {}".format(self.struct_path) @staticmethod # Usage: dict_get(mydict, 'some.deeply.nested.value', 'my default') @@ -113,7 +113,7 @@ def __init__(self, abitem, struct_path, global_struct): #self._log_debug("Struct path {} for {}", self.struct_path, __class__.__name__) def __repr__(self): - return "SeStructMain {}".format(self.struct_path, self._conf) + return "SeStructMain {}".format(self.struct_path) def create_parent(self): try: @@ -171,7 +171,7 @@ def __init__(self, abitem, struct_path, global_struct): #self._log_debug("Struct path {} for {}", self.struct_path, __class__.__name__) def __repr__(self): - return "SeStructChild {}".format(self.struct_path, self._conf) + return "SeStructChild {}".format(self.struct_path) def get(self): try: diff --git a/stateengine/StateEngineValue.py b/stateengine/StateEngineValue.py index b2bc7c69b..5ca3fac5e 100755 --- a/stateengine/StateEngineValue.py +++ b/stateengine/StateEngineValue.py @@ -415,7 +415,7 @@ def write_to_logger(self): if i is not None: self._log_debug("{0} from variable: {1}", self.__name, i) else: - self._log_debug("{0} from item: {1}", self.__name, self.__varname) + self._log_debug("{0} from variable: {1}", self.__name, self.__varname) # Get Text (similar to logger text) # prefix: Prefix for text From 750872097b09fd80ccd0bcf5510a619571b8f2cf Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 11 May 2023 09:42:55 +0200 Subject: [PATCH 103/775] stateengine plugin: fix major problem with internal structs --- stateengine/StateEngineValue.py | 1 + 1 file changed, 1 insertion(+) diff --git a/stateengine/StateEngineValue.py b/stateengine/StateEngineValue.py index 5ca3fac5e..0dfe64255 100755 --- a/stateengine/StateEngineValue.py +++ b/stateengine/StateEngineValue.py @@ -137,6 +137,7 @@ def __resetvalue(self): # value: string indicating value or source of value # name: name of object ("time" is being handled differently) def set(self, value, name="", reset=True, item=None): + value = copy.deepcopy(value) if reset: self.__resetvalue() if isinstance(value, list): From 609efed95243fe431f7ffa13aaa51ae6845ed229 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 11 May 2023 09:43:55 +0200 Subject: [PATCH 104/775] stateengine plugin: temporarily set state_name and state_id on init --- stateengine/StateEngineState.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/stateengine/StateEngineState.py b/stateengine/StateEngineState.py index 99ffbedfd..de0e7ee09 100755 --- a/stateengine/StateEngineState.py +++ b/stateengine/StateEngineState.py @@ -116,6 +116,8 @@ def write_to_log(self): self._log_info("State {0}:", self.id) self._log_increase_indent() self.update_name(self.__item) + self._abitem.set_variable("current.state_name", self.name) + self._abitem.set_variable("current.state_id", self.id) self.__text.write_to_logger() self._log_info("Updating Web Interface...") self._log_increase_indent() @@ -165,7 +167,8 @@ def write_to_log(self): self.__actions_leave.write_to_logger() self._log_decrease_indent() self._abitem.update_webif([self.id, 'actions_leave'], self.__actions_leave.dict_actions) - + self._abitem.set_variable("current.state_name", "") + self._abitem.set_variable("current.state_id", "") self._log_decrease_indent() # run actions when entering the state From 96a4a289926d76c948f88c87bbcddea7b9b30e59 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 11 May 2023 09:44:29 +0200 Subject: [PATCH 105/775] stateengine plugin: adjust docu accordingly regarding current.state_name variable --- stateengine/user_doc/10_funktionen_variablen.rst | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/stateengine/user_doc/10_funktionen_variablen.rst b/stateengine/user_doc/10_funktionen_variablen.rst index e2b71a57b..d22c93a2c 100755 --- a/stateengine/user_doc/10_funktionen_variablen.rst +++ b/stateengine/user_doc/10_funktionen_variablen.rst @@ -213,11 +213,16 @@ auf das passende Unteritem in licht1.automatik.settings. **current.state_id:** *Die Id des Status, der gerade geprüft wird* -Diese Variable wird während der Statusevaluierung befüllt. Die Werte bleiben bis zum nächsten Statuswechsel gleich. +Diese Variable wird leer, sobald die Statusevaluierung beendet wurde, noch bevor die Aktionen des +zuletzt eingenommenen Zustands ausgeführt werden. Sie kann daher nur in der Evaluierung, nicht aber +in on_enter(_or_stay) genutzt werden. Hierfür wird stattdessen ``se_eval.get_relative_itemvalue('..state_id')`` genutzt. **current.state_name:** *Der Name des Status, der gerade geprüft wird* +Wie die state_id Variable wird diese nur während der Statusevaluierung entsprechend befüllt und sofort beim Eintritt +in einen neuen Zustand geleert (noch vor dem Durchführen der Aktionen). + Das angeführte Beispiel zeigt, wie eine Bedingung mit einem Wert abgeglichen werden kann, der in einem passenden Settingitem hinterlegt ist. Konkret würde beim Evaluieren vom Zustand_Eins mit dem Namen "sueden" die maximale From 0106c694f56460872bc5b3e27a3ff9c9ae6a4532 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 11 May 2023 09:59:31 +0200 Subject: [PATCH 106/775] stateengine plugin: bump version to 1.9.6 --- stateengine/__init__.py | 2 +- stateengine/plugin.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/stateengine/__init__.py b/stateengine/__init__.py index e15e676da..ce2382c9d 100755 --- a/stateengine/__init__.py +++ b/stateengine/__init__.py @@ -39,7 +39,7 @@ class StateEngine(SmartPlugin): - PLUGIN_VERSION = '1.9.5' + PLUGIN_VERSION = '1.9.6' # Constructor # noinspection PyUnusedLocal,PyMissingConstructor diff --git a/stateengine/plugin.yaml b/stateengine/plugin.yaml index e59f0f985..d16e87f4e 100755 --- a/stateengine/plugin.yaml +++ b/stateengine/plugin.yaml @@ -39,7 +39,7 @@ plugin: state: ready support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1303071-stateengine-plugin-support - version: 1.9.5 + version: 1.9.6 sh_minversion: 1.6 multi_instance: False classname: StateEngine From e121704613e371dc766f51bb66c3db1f7e86c695 Mon Sep 17 00:00:00 2001 From: ivande Date: Sun, 14 May 2023 22:14:24 +0200 Subject: [PATCH 107/775] V2.0.0 Additions in locale; evaluate if debugging --- telegram/__init__.py | 155 ++++++++++++++-------------- telegram/_pv_1_7_1/locale.yaml | 9 +- telegram/locale.yaml | 37 +++++-- telegram/plugin.yaml | 12 +-- telegram/webif/templates/index.html | 6 +- 5 files changed, 121 insertions(+), 98 deletions(-) diff --git a/telegram/__init__.py b/telegram/__init__.py index 7adbf3bf3..d693169cb 100755 --- a/telegram/__init__.py +++ b/telegram/__init__.py @@ -66,7 +66,7 @@ class Telegram(SmartPlugin): - PLUGIN_VERSION = "1.8.0" + PLUGIN_VERSION = "2.0.0" _items = [] # all items using attribute ``telegram_message`` _items_info = {} # dict used whith the info-command: key = attribute_value, val= item_list telegram_info @@ -88,9 +88,8 @@ def __init__(self, sh): super().__init__() if not self._init_complete: return - self.debug_enabled = self.logger.isEnabledFor(logging.DEBUG) - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"init {__name__}") self._init_complete = False @@ -118,7 +117,7 @@ def __init__(self, sh): self._application = Application.builder().token(self._token).build() - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug("adding command handlers to application") self._application.add_error_handler(self.eHandler) @@ -139,7 +138,7 @@ def __init__(self, sh): self.logger.error("Unable to start Webinterface") self._init_complete = False else: - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug("Init complete") self._init_complete = True @@ -158,7 +157,7 @@ def run(self): """ This is called when the plugins thread is about to run """ - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug("Run method called") self.logics = Logics.get_instance() # Returns the instance of the Logics class, to be used to access the logics-api @@ -166,21 +165,21 @@ def run(self): self.alive = True self._loop.run_until_complete(self.run_coros()) - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"Run method ended") def stop(self): """ This is called when the plugins thread is about to stop """ - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug("stop telegram plugin") try: if self._bye_msg: cids = [key for key, value in self._chat_ids_item().items() if value == 1] self.msg_broadcast(self._bye_msg, chat_id=cids) - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug("sent bye message") except Exception as e: self.logger.error(f"could not send bye message [{e}]") @@ -192,7 +191,7 @@ def stop(self): self.disconnect() if self._loop.is_running(): - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug("stop telegram _loop.is_running") while self._loop.is_running(): asyncio.sleep(0.1) @@ -200,7 +199,7 @@ def stop(self): except Exception as e: self.logger.error(f"An error occurred while stopping the plugin [{e}]") - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug("stop telegram plugin finished") async def run_coros(self): @@ -215,7 +214,7 @@ async def connect(self): """ Connects """ - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug("connect method called") try: await self._application.initialize() @@ -224,13 +223,13 @@ async def connect(self): q = await self._updater.start_polling(timeout=self._long_polling_timeout) - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"started polling the updater, Queue is {q}") self._bot = self._updater.bot self.logger.info(f"Telegram bot is listening: {await self._updater.bot.getMe()}") if self._welcome_msg: - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"sent welcome message {self._welcome_msg}") cids = [key for key, value in self._chat_ids_item().items() if value == 1] await self.async_msg_broadcast(self._welcome_msg, chat_id=cids) @@ -239,7 +238,7 @@ async def connect(self): # catch Unauthorized errors due to an invalid token self.logger.error(f"Unable to start up Telegram conversation. Maybe an invalid token? {e}") return False - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug("connect method end") async def sendQueue(self): @@ -249,7 +248,7 @@ async def sendQueue(self): dict txt: {"msgType":"Text", "msg":msg, "chat_id":chat_id, "reply_markup":reply_markup, "parse_mode":parse_mode } dict photo: {"msgType":"Photo", "photofile_or_url":photofile_or_url, "chat_id":chat_id, "caption":caption, "local_prepare":local_prepare} """ - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"sendQueue called - queue: [{self._queue}]") while self.alive: # infinite loop until self.alive = False try: @@ -259,13 +258,13 @@ async def sendQueue(self): except Exception as e: self.logger.debug(f"messageQueue Exception [{e}]") else: # message to be sent in the queue - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"message queue {message}") if message["msgType"] == "Text": await self.async_msg_broadcast(message["msg"], message["chat_id"], message["reply_markup"], message["parse_mode"]) if message["msgType"] == "Photo": await self.async_photo_broadcast(message["photofile_or_url"], message["caption"], message["chat_id"], message["local_prepare"]) - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug("sendQueue method end") async def disconnect(self): @@ -278,7 +277,7 @@ async def disconnect(self): await self._application.stop() await self._application.shutdown() - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"disconnect end") def parse_item(self, item): @@ -293,7 +292,7 @@ def parse_item(self, item): self._chat_ids_item = item if self.has_iattr(item.conf, ITEM_ATTR_MESSAGE): - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"parse item: {item}") self._items.append(item) return self.update_item @@ -305,15 +304,15 @@ def parse_item(self, item): if self.has_iattr(item.conf, ITEM_ATTR_INFO): key = self.get_iattr_value(item.conf, ITEM_ATTR_INFO) if self.is_valid_command(key): - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"parse item: {item} with command: {key}") if key in self._items_info: self._items_info[key].append(item) - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"Append a new item '{item}' to command '{key}'") else: self._items_info[key] = [item] # dem dict neue Liste hinzufuegen - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"Register new command '{key}', add item '{item}' and register a handler") # add a handler for each info-attribute self._application.add_handler(CommandHandler(key, self.cHandler_info_attr)) @@ -322,7 +321,7 @@ def parse_item(self, item): self.logger.error(f"Command '{key}' chosen for item '{item}' is invalid for telegram botfather") if self.has_iattr(item.conf, ITEM_ATTR_TEXT): - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"parse item: {item.id()}") value = self.get_iattr_value(item.conf, ITEM_ATTR_TEXT) if value in ['true', 'True', '1']: @@ -355,16 +354,16 @@ def parse_item(self, item): if 'max' in k: max = v - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"parse control-item: {item} with command: {key}") dicCtl = {'name': key, 'type': changeType, 'item': item, 'question': question, 'timeout': timeout, 'min': min, 'max': max} if key not in self._items_control: - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"Append a new control-item '{item}' to command '{key}'") self._items_control[key] = dicCtl # add to dict - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"Register new command '{key}', add item '{item}' and register a handler") # add a handler for each control-attribute self._application.add_handler(CommandHandler(key, self.cHandler_control_attr)) @@ -450,21 +449,21 @@ def update_item(self, item, caller=None, source=None, dest=None): cond = self.get_iattr_value(item.conf, ITEM_ATTR_CONDITION).lower() if cond == "on_change": if item.property.value != item.property.last_value and item.property.last_update <= item.property.last_change: - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"condition {cond} met: {item.property.value}!={item.property.last_value}, last_update_age {item.property.last_update}, last_change_age {item.property.last_change}") else: - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"condition {cond} not met: {item.property.value}=={item.property.last_value}, last_update_age {item.property.last_update}, last_change_age {item.property.last_change}") return elif cond == "on_update": # this is standard behaviour pass else: - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"ignoring unknown condition {cond}") # sending the message - # if self.debug_enabled: + # if self.logger.isEnabledFor(logging.DEBUG): # self.logger.debug(f"send Message: {msg_txt} to Chat_ID {msg_chat_id_txt}") self.msg_broadcast(msg_txt, msg_chat_id) @@ -477,26 +476,26 @@ async def async_msg_broadcast(self, msg, chat_id=None, reply_markup=None, parse_ :param reply_markup: :param parse_mode: """ - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"async msg_broadcast called") for cid in self.get_chat_id_list(chat_id): try: response = await self._bot.send_message(chat_id=cid, text=msg, reply_markup=reply_markup, parse_mode=parse_mode) if response: - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"Message sent:[{msg}] to Chat_ID:[{cid}] Bot:[{self._bot.bot}] response:[{response}]") else: self.logger.error(f"could not broadcast to chat id [{cid}] response: {response}") except TelegramError as e: self.logger.error(f"could not broadcast to chat id [{cid}] due to error {e}") except Exception as e: - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"Exception '{e}' occurred, please inform plugin maintainer!") def msg_broadcast(self, msg, chat_id=None, reply_markup=None, parse_mode=None): if self.alive: - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"msg_broadcast called") q_msg= {"msgType":"Text", "msg":msg, "chat_id":chat_id, "reply_markup":reply_markup, "parse_mode":parse_mode } try: @@ -537,7 +536,7 @@ def photo_broadcast(self, photofile_or_url, caption=None, chat_id=None, local_pr :param chat_id: a chat id or a list of chat ids to identificate the chat(s) """ if self.alive: - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"photo_broadcast called") q_msg= {"msgType":"Photo", "photofile_or_url":photofile_or_url, "chat_id":chat_id, "caption":caption, "local_prepare":local_prepare } try: @@ -650,26 +649,26 @@ async def mHandler(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> """ write the content (text) of the message in an SH-item """ - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"write the content (text) of the message in an SH-item for update={update}, chat_id={update.message.chat.id} and context={dir(context)}") if self.has_write_access_right(update.message.chat.id): try: if self._waitAnswer is None: # keine Antwort erwartet (control-Item/question) - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"update.message.from_user.name={update.message.from_user.name}") text = update.message.from_user.name + ": " text += str(update.message.chat.id) + ": " # add the message.chat.id text += update.message.text # add the message.text for item in self._items_text_message: - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"write item: {item.id()} value: {text}") item(text, caller=self.get_fullname()) # write text to SH-item else: # Antwort von control-Item/question wird erwartet text = update.message.text dicCtl = self._waitAnswer # _waitAnswer enthält dict mit weiteren Parametern valid = True # für Prüfung des Wertebereiches bei num - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"update.message.from_user.name={update.message.from_user.name} answer={text} name={dicCtl['name']}") if text == 'On': if dicCtl['type'] == 'onoff': @@ -697,19 +696,19 @@ async def mHandler(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> self._bot.send_message(chat_id=update.message.chat.id, text=self.translate("Control/Change item-values:"), reply_markup={"keyboard":self.create_control_reply_markup()}) elif dicCtl['type'] == 'num': if type(text) == int or float: - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"control-item: answer is num ") item = dicCtl['item'] newValue = text if dicCtl['min'] is not None: if float(newValue) < float(dicCtl['min']): valid = False - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"control-item: value:{newValue} to low:{dicCtl['min']}") if dicCtl['max'] is not None: if float(newValue) > float(dicCtl['max']): valid = False - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"control-item: value:{newValue} to high:{dicCtl['max']}") if valid: msg = f"{dicCtl['name']} \n change from:{item()} to:{newValue}" @@ -725,14 +724,14 @@ async def mHandler(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> await context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("Control/Change item-values:"), reply_markup={"keyboard": self.create_control_reply_markup()}) self._waitAnswer = None except Exception as e: - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"Exception '{e}' occurred, traceback '{traceback.format_exc()}' Please inform plugin maintainer!") async def cHandler_time(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None: """ /time: return server time """ - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"/time: return server time for update={update}, chat_id={update.message.chat.id} and context={dir(context)}") if self.has_access_right(update.message.chat.id): await context.bot.send_message(chat_id=update.message.chat.id, text=str(datetime.datetime.now())) @@ -741,7 +740,7 @@ async def cHandler_help(self, update: Update, context: ContextTypes.DEFAULT_TYPE """ /help: show available commands as keyboard """ - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"/help: show available commands as keyboard for update={update}, chat_id={update.message.chat.id} and context={dir(context)}") if self.has_access_right(update.message.chat.id): await context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("choose"), reply_markup={"keyboard": [["/hide","/start"], ["/time","/list"], ["/lo","/info"], ["/control", "/tr "]]}) @@ -750,7 +749,7 @@ async def cHandler_hide(self, update: Update, context: ContextTypes.DEFAULT_TYPE """ /hide: hide keyboard """ - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"/hide: hide keyboard for bot={context.bot} and chat_id={update.message.chat.id}") if self.has_access_right(update.message.chat.id): hide_keyboard = {'hide_keyboard': True} @@ -760,7 +759,7 @@ async def cHandler_list(self, update: Update, context: ContextTypes.DEFAULT_TYPE """ /list: show registered items and value """ - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"/list: show registered items and value for chat_id={update.message.chat.id}") if self.has_access_right(update.message.chat.id): await context.bot.send_message(chat_id=update.message.chat.id, text=self.list_items()) @@ -770,34 +769,34 @@ async def cHandler_info(self, update: Update, context: ContextTypes.DEFAULT_TYPE """ /info: show item-menu with registered items with specific attribute """ - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"/info: show item-menu with registered items with specific attribute for chat_id={update.message.chat.id}") if self.has_access_right(update.message.chat.id): if len(self._items_info) > 0: await context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("Infos from the items:"), reply_markup={"keyboard": self.create_info_reply_markup()}) else: - await context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("No items have attribute telegram_info!"), reply_markup={"keyboard": self.create_info_reply_markup()}) + await context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("no items have attribute telegram_info!"), reply_markup={"keyboard": self.create_info_reply_markup()}) async def cHandler_start(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None: """ /start: show a welcome together with asking to add chat id to trusted chat ids """ - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"/start: show a welcome together with asking to add chat id to trusted chat ids for chat_id={update.message.chat.id}") text = "" if self._chat_ids_item: ids = self._chat_ids_item() - text = self.translate(f"Your chat id is: {update.message.chat.id}") - if self.debug_enabled: + text = self.translate(f"Your chat id is:") + f" {update.message.chat.id}" + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f'update.message.chat.id={update.message.chat.id} with type={type(update.message.chat.id)}') self.logger.debug(f'ids dict={ids}') if update.message.chat.id in ids: if ids[update.message.chat.id]: - text += ", you have write access" + text += ", " + self.translate("you have write access") else: - text += ", you have read access" + text += ", " + self.translate("you have read access") else: - text = text+self.translate(", please add it to the list of trusted chat ids to get access") + text = text + ", " + self.translate("please add it to the list of trusted chat ids to get access") else: self.logger.warning('No chat_ids defined') @@ -808,23 +807,23 @@ async def cHandler_info_attr(self, update: Update, context: ContextTypes.DEFAULT /command show registered items and value with specific attribute/key where ``command`` is the value from an item with ``telegram_info`` attribute """ - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug("Enter cHandler_info_attr") if self.has_access_right(update.message.chat.id): - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"Gathering items to fulfill command {update.message.text}") c_key = update.message.text.replace("/", "", 1) if c_key in self._items_info: - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"info-command: {c_key}") #self.list_items_info(update.message.chat.id, c_key) await context.bot.sendMessage(chat_id=update.message.chat.id, text=self.list_items_info(c_key)) else: await context.bot.sendMessage(chat_id=update.message.chat.id, text=self.translate("unknown command %s") % c_key) else: - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"Chat with id {update.message.chat.id} has no right to use command {update.message.text}") - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug("Leave cHandler_info_attr") async def cHandler_lo(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None: @@ -855,7 +854,7 @@ async def cHandler_tr(self, update: Update, context: ContextTypes.DEFAULT_TYPE) if self.has_access_right(update.message.chat.id): logicname = context.args[0] try: - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"trigger_logic: {context.args}") self.logics.trigger_logic(logicname, by=self.get_shortname()) # Trigger a logic except Exception as e: @@ -867,7 +866,7 @@ async def cHandler_control(self, update: Update, context: ContextTypes.DEFAULT_T """ /control: Change values of items with specific attribute """ - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"/control: show item-menu with registered items with specific attribute for chat_id={update.message.chat.id}") if self.has_write_access_right(update.message.chat.id): if len(self._items_control) > 0: @@ -875,19 +874,19 @@ async def cHandler_control(self, update: Update, context: ContextTypes.DEFAULT_T await context.bot.send_message(chat_id=update.message.chat.id, text=self.list_items_control()) #self.list_items_control(update.message.chat.id) else: - await context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("No items have attribute telegram_control!"), reply_markup={"keyboard": self.create_control_reply_markup()}) + await context.bot.send_message(chat_id=update.message.chat.id, text=self.translate("no items have attribute telegram_control!"), reply_markup={"keyboard": self.create_control_reply_markup()}) async def cHandler_control_attr(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None: """ /xx change value from registered items """ - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug("Enter cHandler_control_attr") if self.has_write_access_right(update.message.chat.id): c_key = update.message.text.replace("/", "", 1) if c_key in self._items_control: dicCtl = self._items_control[c_key] #{'type':type,'item':item} - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"control-command: name:{c_key} dictCtl:{dicCtl}") await self.change_item(update=update, context=context, name=c_key, dicCtl=dicCtl) else: @@ -945,13 +944,13 @@ def create_control_reply_markup(self): for key, value in sorted(self._items_control.items()): button_list.append("/"+key) - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"button_list: {button_list}") header = ["/help"] - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"header: {header}") keyboard = self.build_menu(button_list, n_cols=3, header_buttons=header) - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"keyboard: {keyboard}") return keyboard @@ -993,12 +992,12 @@ async def change_item(self, update, context, name, dicCtl): timeout = dicCtl['timeout'] text = "" if changeType == 'toggle': - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"control-item: type:toggle") if question != '': nd = (datetime.datetime.now()+ datetime.timedelta(seconds=timeout)).replace(tzinfo=self._sh.tzinfo()) self._waitAnswer = dicCtl - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"control-item: add scheduler for answer-timout") self.scheduler_add('telegram_change_item_timeout', self.telegram_change_item_timeout, value={'update': update, 'context': context}, next=nd) text = question @@ -1016,12 +1015,12 @@ async def change_item(self, update, context, name, dicCtl): text = f"{name}: {item()}\n" await context.bot.sendMessage(chat_id=chat_id, text=text) if changeType == 'on': - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"control-item: type:on") if question != '': nd = (datetime.datetime.now() + datetime.timedelta(seconds=timeout)).replace(tzinfo=self._sh.tzinfo()) self._waitAnswer = dicCtl - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"control-item: add scheduler for answer-timout") self.scheduler_add('telegram_change_item_timeout', self.telegram_change_item_timeout, value={'update': update, 'context': context}, next=nd) text = question @@ -1032,12 +1031,12 @@ async def change_item(self, update, context, name, dicCtl): text = f"{name}: {item()}\n" self._bot.sendMessage(chat_id=chat_id, text=text) if changeType == 'off': - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"control-item: type:off") if question != '': nd = (datetime.datetime.now() + datetime.timedelta(seconds=timeout)).replace(tzinfo=self._sh.tzinfo()) self._waitAnswer = dicCtl - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"control-item: add scheduler for answer-timout") self.scheduler_add('telegram_change_item_timeout', self.telegram_change_item_timeout, value={'update': update, 'context': context}, next=nd) text = question @@ -1050,7 +1049,7 @@ async def change_item(self, update, context, name, dicCtl): if changeType == 'onoff': nd = (datetime.datetime.now() + datetime.timedelta(seconds=timeout)).replace(tzinfo=self._sh.tzinfo()) self._waitAnswer = dicCtl - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"control-item: add scheduler for answer-timout") self.scheduler_add('telegram_change_item_timeout', self.telegram_change_item_timeout, value={'update': update, 'context': context}, next=nd) if question == '': @@ -1062,7 +1061,7 @@ async def change_item(self, update, context, name, dicCtl): text = self.translate("insert a value") nd = (datetime.datetime.now() + datetime.timedelta(seconds=timeout)).replace(tzinfo=self._sh.tzinfo()) self._waitAnswer = dicCtl - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"control-item: add scheduler for answer-timout") self.scheduler_add('telegram_change_item_timeout', self.telegram_change_item_timeout, value={'update': update, 'context': context}, next=nd) await context.bot.sendMessage(chat_id=chat_id, text=text) @@ -1077,7 +1076,7 @@ async def telegram_change_item_timeout(self, **kwargs): update = kwargs['update'] if 'context' in kwargs: context = kwargs['context'] - if self.debug_enabled: + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"Answer control_item timeout update:{update} context:{context}") if self._waitAnswer is not None: self._waitAnswer = None diff --git a/telegram/_pv_1_7_1/locale.yaml b/telegram/_pv_1_7_1/locale.yaml index 63f423cd9..955b5d62d 100644 --- a/telegram/_pv_1_7_1/locale.yaml +++ b/telegram/_pv_1_7_1/locale.yaml @@ -2,10 +2,15 @@ plugin_translations: # Translations for the plugin specially for the web interface 'choose': {'de': 'Auswählen', 'en': '=', 'fr': 'Choisissez parmi'} 'I will hide the keyboard': {'de': 'Ich blende die Bedientasten aus', 'en': '=', 'fr': 'Je cache le clavier'} - 'Info from the items:': {'de': 'Infos von den Items:', 'en': '=', 'fr': 'Info sur les Items:'} + 'Info from the items:': {'de': 'Infos von den Items:', 'en': '=', 'fr': 'Info sur les Items:'} 'unknown command %s': {'de': 'Unbekanntes Kommando %s:', 'en': '=', 'fr': 'commande inconnue %s:'} 'no items found with the attribute %s': {'de': 'Keine Items mit Attribut %s gefunden', 'en': '=', 'fr': 'Ne pas trouvée une Item avec %s'} - 'insert a value': {'de': 'Wert eingeben', 'en': '=', 'fr': 'insérer une valeur'} + 'insert a value': {'de': 'Wert eingeben', 'en': '=', 'fr': 'insérer une valeur'} + + 'Long Polling Timeout': {'de': '=', 'en': '=', 'fr': "Délai d'attente du long polling"} + 'Willkommensnachricht': {'de': '=', 'en': 'Welcome message', 'fr': 'essage de bienvenue'} + 'Ende Nachricht': {'de': '=', 'en': 'plugin stop message', 'fr': 'Message de fin'} + ' Sekunden': {'de': '=', 'en': ' seconds', 'fr': ' secondes'} # Alternative format for translations of longer texts: # 'Hier kommt der Inhalt des Webinterfaces hin.': diff --git a/telegram/locale.yaml b/telegram/locale.yaml index 63f423cd9..7bcc5d734 100755 --- a/telegram/locale.yaml +++ b/telegram/locale.yaml @@ -1,15 +1,34 @@ plugin_translations: - # Translations for the plugin specially for the web interface + # Translations for the plugin 'choose': {'de': 'Auswählen', 'en': '=', 'fr': 'Choisissez parmi'} 'I will hide the keyboard': {'de': 'Ich blende die Bedientasten aus', 'en': '=', 'fr': 'Je cache le clavier'} 'Info from the items:': {'de': 'Infos von den Items:', 'en': '=', 'fr': 'Info sur les Items:'} 'unknown command %s': {'de': 'Unbekanntes Kommando %s:', 'en': '=', 'fr': 'commande inconnue %s:'} 'no items found with the attribute %s': {'de': 'Keine Items mit Attribut %s gefunden', 'en': '=', 'fr': 'Ne pas trouvée une Item avec %s'} - 'insert a value': {'de': 'Wert eingeben', 'en': '=', 'fr': 'insérer une valeur'} - - # Alternative format for translations of longer texts: -# 'Hier kommt der Inhalt des Webinterfaces hin.': -# de: '=' -# en: 'Here goes the content of the web interface.' -# fr: 'Le contenu de l'interface web vient ici.' - + 'Control/Change item-values:': {'de': 'Item-Werte schalten/ändern:', 'en': '=', 'fr': 'Contrôle/Modification des valeurs des Item:'} + 'Your chat id is:': {'de': 'Deine Chat-ID lautet:', 'en': '=', 'fr': 'Votre identifiant de chat est :'} + 'you have write access': {'de': 'Du hast Schreibzugriff', 'en': '=', 'fr': 'Vous avez accès en écriture'} + 'you have read access': {'de': 'Du hast Lesezugriff', 'en': '=', 'fr': 'Vous avez accès en lecture'} + 'unknown control-command': {'de': 'Unbekanntes Control-Kommando', 'en': '=', 'fr': 'commande de contrôle inconnue'} + 'insert a value': {'de': 'Wert eingeben', 'en': '=', 'fr': 'Insérer une valeur'} + 'please add it to the list of trusted chat ids to get access': + de: 'Bitte füge es zur Liste der vertrauenswürdigen Chat-IDs hinzu, um Zugriff zu erhalten' + en: '=' + fr: "Veuillez l'ajouter à la liste des identifiants de chat de confiance pour obtenir l'accès" + 'no items have attribute telegram_control!': + de: 'Keine Items haben das Attribut "telegram_control"!' + en: '=' + fr: "Aucun Items n'a l'attribut 'telegram_control'!" + 'no items have attribute telegram_info!': + de: 'Keine Items haben das Attribut "telegram_info"!' + en: '=' + fr: "Aucun Items n'a l'attribut 'telegram_info'!" + 'no item found with the attribute': + de: 'Kein Item mit dem Attribut gefunden:' + en: '=' + fr: "Aucun Item trouvé avec l'attribut" + # Translations for the plugin web interface + 'Long Polling Timeout': {'de': '=', 'en': '=', 'fr': "Délai d'attente du long polling"} + 'Welcome message': {'de': 'Willkommensnachricht', 'en': '=', 'fr': 'essage de bienvenue'} + 'plugin stop message': {'de': 'Ende Nachricht', 'en': '=', 'fr': 'Message de fin'} + 'seconds': {'de': 'Sekunden', 'en': '=', 'fr': 'secondes'} diff --git a/telegram/plugin.yaml b/telegram/plugin.yaml index 6feb27fef..9ff8b0eb2 100755 --- a/telegram/plugin.yaml +++ b/telegram/plugin.yaml @@ -12,8 +12,8 @@ plugin: documentation: http://smarthomeng.de/user/plugins/telegram/user_doc.html support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1548691-support-thread-für-das-telegram-plugin - version: 1.8.0 # Plugin version - sh_minversion: 1.8 # minimum shNG version to use this plugin + version: 2.0.0 # Plugin version + sh_minversion: 1.9.5 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) py_minversion: 3.6 # minimum Python version to use for this plugin # py_maxversion: # maximum Python version to use for this plugin (leave empty if latest) @@ -48,15 +48,15 @@ parameters: type: str default: 'SmarthomeNG Telegram Plugin is up and running' description: - de: 'Willkommensnachricht, wird an alle vertrauenswürdigen Clients beim Start des Plugins gesendet. Ist der Wert leer, wird keine Willkommensnachricht gesendet.' - en: 'Welcome message, will be sent to all trusted chat clients at plugin start. An empty value indicates that no welcome message is sent.' + de: 'Willkommensnachricht, wird an auswählbare vertrauenswürdigen Clients beim Start des Plugins gesendet. Ist der Wert leer, wird keine Willkommensnachricht gesendet.' + en: 'Welcome message, will be sent to selectable trusted chat clients at plugin start. An empty value indicates that no welcome message is sent.' bye_msg: type: str default: 'SmartHomeNG Telegram Plugin stops' description: - de: 'Endenachricht, wird an alle vertrauenswürdigen Clients beim Stop des Plugins gesendet. Ist der Wert leer, wird keine Endenachricht gesendet.' - en: 'Bye message, will be sent to all trusted chat clients at plugin stop. An empty value indicates that no by message is sent.' + de: 'Endenachricht, wird an auswählbare vertrauenswürdigen Clients beim Stop des Plugins gesendet. Ist der Wert leer, wird keine Endenachricht gesendet.' + en: 'Bye message, will be sent to selectable trusted chat clients at plugin stop. An empty value indicates that no by message is sent.' no_access_msg: type: str diff --git a/telegram/webif/templates/index.html b/telegram/webif/templates/index.html index 94df37107..04b66d672 100755 --- a/telegram/webif/templates/index.html +++ b/telegram/webif/templates/index.html @@ -42,16 +42,16 @@ {{ _('Long Polling Timeout') }} - {{ p._long_polling_timeout ~ _(' Sekunden')}} + {{ p._long_polling_timeout ~ ' ' ~ _('seconds')}} - {{ _('Willkommensnachricht') }} + {{ _('Welcome message') }} {{ p._welcome_msg }} - {{ _('Ende Nachricht') }} + {{ _('plugin stop message') }} {{ p._bye_msg }} From 71bd3fe818d653349ee1ca3d89b7e69821aa147f Mon Sep 17 00:00:00 2001 From: ivande Date: Sun, 14 May 2023 22:42:20 +0200 Subject: [PATCH 108/775] py_minversion removed. --- telegram/plugin.yaml | 3 --- 1 file changed, 3 deletions(-) diff --git a/telegram/plugin.yaml b/telegram/plugin.yaml index 9ff8b0eb2..5562334a0 100755 --- a/telegram/plugin.yaml +++ b/telegram/plugin.yaml @@ -14,9 +14,6 @@ plugin: version: 2.0.0 # Plugin version sh_minversion: 1.9.5 # minimum shNG version to use this plugin - # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) - py_minversion: 3.6 # minimum Python version to use for this plugin - # py_maxversion: # maximum Python version to use for this plugin (leave empty if latest) multi_instance: True # plugin supports multi instance restartable: True classname: Telegram # class containing the plugin From 2d04076122f40f59d776b685931fe3ecf391c64c Mon Sep 17 00:00:00 2001 From: ivande Date: Tue, 16 May 2023 09:20:23 +0200 Subject: [PATCH 109/775] retry sending in case of error (with parameters) --- telegram/__init__.py | 75 ++++++++++++++++++++++++++++++++++++++------ telegram/plugin.yaml | 21 +++++++++++-- 2 files changed, 84 insertions(+), 12 deletions(-) diff --git a/telegram/__init__.py b/telegram/__init__.py index d693169cb..d873c649a 100755 --- a/telegram/__init__.py +++ b/telegram/__init__.py @@ -111,6 +111,8 @@ def __init__(self, sh): self._no_write_access_msg = self.get_parameter_value('no_write_access_msg') self._long_polling_timeout = self.get_parameter_value('long_polling_timeout') self._pretty_thread_names = self.get_parameter_value('pretty_thread_names') + self._resend_delay = self.get_parameter_value('resend_delay') + self._resend_attemps = self.get_parameter_value('resend_attemps') self._bot = None self._queue = Queue() @@ -232,7 +234,7 @@ async def connect(self): if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"sent welcome message {self._welcome_msg}") cids = [key for key, value in self._chat_ids_item().items() if value == 1] - await self.async_msg_broadcast(self._welcome_msg, chat_id=cids) + self.msg_broadcast(self._welcome_msg, chat_id=cids) except TelegramError as e: # catch Unauthorized errors due to an invalid token @@ -258,12 +260,44 @@ async def sendQueue(self): except Exception as e: self.logger.debug(f"messageQueue Exception [{e}]") else: # message to be sent in the queue - if self.logger.isEnabledFor(logging.DEBUG): - self.logger.debug(f"message queue {message}") - if message["msgType"] == "Text": - await self.async_msg_broadcast(message["msg"], message["chat_id"], message["reply_markup"], message["parse_mode"]) - if message["msgType"] == "Photo": - await self.async_photo_broadcast(message["photofile_or_url"], message["caption"], message["chat_id"], message["local_prepare"]) + resendDelay = 0 + resendAttemps = 0 + if "resendDelay" in message: + resendDelay = message["resendDelay"] + if "resendAttemps" in message: + resendAttemps = message["resendAttemps"] + + if resendDelay <= 0: + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"message queue {message}") + if message["msgType"] == "Text": + result = await self.async_msg_broadcast(message["msg"], message["chat_id"], message["reply_markup"], message["parse_mode"]) + elif message["msgType"] == "Photo": + result = await self.async_photo_broadcast(message["photofile_or_url"], message["caption"], message["chat_id"], message["local_prepare"]) + + # An error occurred while sending - result: list containing the dic of the failed send attempt + if result: + for res in result: + resendAttemps+=1 + if resendAttemps > self._resend_attemps: + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"don't initiate any further send attempts for: {res}") + break + else: + resendDelay = self._resend_delay + + # Including the sendDelay and sendAttempts in the queue message for the next send attempt. + res["resendDelay"] = resendDelay + res["resendAttemps"] = resendAttemps + + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"new send attempt by placing it in the queue. sendAttemps:{resendAttemps} sendDelay:{resendDelay} [{res}]") + self._queue.put(res) # new send attempt by replacing the message in the queue + else: + message["resendDelay"] = resendDelay - 1 + await asyncio.sleep(1) + self._queue.put(message) # new send attempt by replacing the message in the queue + if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug("sendQueue method end") @@ -476,6 +510,7 @@ async def async_msg_broadcast(self, msg, chat_id=None, reply_markup=None, parse_ :param reply_markup: :param parse_mode: """ + sendResult = [] if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"async msg_broadcast called") @@ -486,12 +521,20 @@ async def async_msg_broadcast(self, msg, chat_id=None, reply_markup=None, parse_ if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"Message sent:[{msg}] to Chat_ID:[{cid}] Bot:[{self._bot.bot}] response:[{response}]") else: + sendResult.append({"msgType":"Text", "msg":msg, "chat_id":cid, "reply_markup":reply_markup, "parse_mode":parse_mode }) self.logger.error(f"could not broadcast to chat id [{cid}] response: {response}") except TelegramError as e: + sendResult.append({"msgType":"Text", "msg":msg, "chat_id":cid, "reply_markup":reply_markup, "parse_mode":parse_mode }) self.logger.error(f"could not broadcast to chat id [{cid}] due to error {e}") except Exception as e: + sendResult.append({"msgType":"Text", "msg":msg, "chat_id":cid, "reply_markup":reply_markup, "parse_mode":parse_mode }) if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"Exception '{e}' occurred, please inform plugin maintainer!") + if not sendResult: + return None + else: + return sendResult + def msg_broadcast(self, msg, chat_id=None, reply_markup=None, parse_mode=None): if self.alive: @@ -512,19 +555,31 @@ async def async_photo_broadcast(self, photofile_or_url, caption=None, chat_id=No :param caption: caption of image to send :param chat_id: a chat id or a list of chat ids to identificate the chat(s) """ + sendResult = [] for cid in self.get_chat_id_list(chat_id): try: if photofile_or_url.startswith("http"): if local_prepare: photo_raw = requests.get(photofile_or_url) photo_data = BytesIO(photo_raw.content) - await self._bot.send_photo(chat_id=cid, photo=photo_data, caption=caption) + response = await self._bot.send_photo(chat_id=cid, photo=photo_data, caption=caption) else: - await self._bot.send_photo(chat_id=cid, photo=photofile_or_url, caption=caption) + response = await self._bot.send_photo(chat_id=cid, photo=photofile_or_url, caption=caption) else: - await self._bot.send_photo(chat_id=cid, photo=open(str(photofile_or_url), 'rb'), caption=caption) + response = await self._bot.send_photo(chat_id=cid, photo=open(str(photofile_or_url), 'rb'), caption=caption) + if response: + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(f"Photo sent to Chat_ID:[{cid}] Bot:[{self._bot.bot}] response:[{response}]") + else: + sendResult.append({"msgType":"Photo", "photofile_or_url":photofile_or_url, "chat_id":cid, "caption":caption, "local_prepare":local_prepare }) + self.logger.error(f"could not broadcast to chat id [{cid}] response: {response}") except Exception as e: + sendResult.append({"msgType":"Photo", "photofile_or_url":photofile_or_url, "chat_id":cid, "caption":caption, "local_prepare":local_prepare }) self.logger.error(f"Error '{e}' could not send image {photofile_or_url} to chat id {cid}") + if not sendResult: + return None + else: + return sendResult def photo_broadcast(self, photofile_or_url, caption=None, chat_id=None, local_prepare=True): """ diff --git a/telegram/plugin.yaml b/telegram/plugin.yaml index 5562334a0..18bb738dc 100755 --- a/telegram/plugin.yaml +++ b/telegram/plugin.yaml @@ -14,6 +14,9 @@ plugin: version: 2.0.0 # Plugin version sh_minversion: 1.9.5 # minimum shNG version to use this plugin + # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) + # py_minversion: 3.6 # minimum Python version to use for this plugin + # py_maxversion: # maximum Python version to use for this plugin (leave empty if latest) multi_instance: True # plugin supports multi instance restartable: True classname: Telegram # class containing the plugin @@ -38,8 +41,22 @@ parameters: # 2 minutes should be long enough to avoid problems with too many connections to telegram server default: 120 description: - de: 'Zeit nachdem eine neue Verbindung zum Telegram Server aufgebaut um Updates zu holen' - en: 'Time after a new link to telegram server will be established to get updates' + de: 'Zeit (Sekunden) nachdem eine neue Verbindung zum Telegram Server aufgebaut um Updates zu holen' + en: 'Time (seconds) after a new link to telegram server will be established to get updates' + + resend_delay: + type: num + default: 60 + description: + de: 'Zeit (Sekunden) nachdem eine neuer Sendeversuch unternommen wird' + en: 'Time (seconds) after which a new sending attempt is made' + + resend_attemps: + type: num + default: 10 + description: + de: 'Nach dieser Anzahl an Sendeversuchen erfolgt keine Sendeversuch mehr' + en: 'After this number of sending attempts, no further sending attempts will be made' welcome_msg: type: str From 55d98c5290acfcae227cb68c27d7d5b1024ebb3b Mon Sep 17 00:00:00 2001 From: ivande Date: Tue, 16 May 2023 09:56:22 +0200 Subject: [PATCH 110/775] attempt to catch the exception in the updater --- telegram/__init__.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/telegram/__init__.py b/telegram/__init__.py index d873c649a..f6df66617 100755 --- a/telegram/__init__.py +++ b/telegram/__init__.py @@ -223,7 +223,7 @@ async def connect(self): await self._application.start() self._updater = self._application.updater - q = await self._updater.start_polling(timeout=self._long_polling_timeout) + q = await self._updater.start_polling(timeout=self._long_polling_timeout, error_callback=self.error_handler) if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"started polling the updater, Queue is {q}") @@ -243,6 +243,15 @@ async def connect(self): if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug("connect method end") + def error_handler(self, update, context): + """ + Just logs an error in case of a problem + """ + try: + self.logger.warning(f'Update {update} caused error {context.error}') + except Exception: + pass + async def sendQueue(self): """ Waiting for messages to be sent in the queue and sending them to Telegram. From d0c510cb30dbd7dd2deef22677c9f2fd989773d1 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Wed, 17 May 2023 10:22:58 +0200 Subject: [PATCH 111/775] Pioneer Plugin: Add and tweak some more commands --- pioneer/commands.py | 133 +++++++++++++++++++++++++++++++++++++++++-- pioneer/datatypes.py | 68 ++++++++++++++++++++++ 2 files changed, 196 insertions(+), 5 deletions(-) diff --git a/pioneer/commands.py b/pioneer/commands.py index dc1e35289..72be9ddfa 100755 --- a/pioneer/commands.py +++ b/pioneer/commands.py @@ -3,10 +3,10 @@ # commands for dev pioneer models = { - 'ALL': ['general.pqls', 'general.dimmer', 'general.sleep', 'general.display', 'general.error', 'general.multizone', 'tuner', 'zone1', 'zone2.control', 'hdzone'], - 'SC-LX87': ['general.amp', 'zone2.settings.sound.channel_level', 'zone2.settings.sound.tone_control', 'zone3'], - 'SC-LX77': ['general.amp', 'zone2.settings.sound.channel_level', 'zone2.settings.sound.tone_control', 'zone3'], - 'SC-LX57': ['general.amp', 'zone2.settings.sound.channel_level', 'zone2.settings.sound.tone_control', 'zone3'], + 'ALL': ['general.pqls', 'general.setup.surroundposition', 'general.setup.speakersystem', 'general.setup.xcurve', 'general.setup.xover', 'general.setup.hdmi', 'general.setup.name', 'general.setup.language', 'general.dimmer', 'general.sleep', 'general.display', 'general.error', 'general.multizone', 'tuner', 'zone1', 'zone2.control', 'hdzone'], + 'SC-LX87': ['general.amp', 'general.setup.loudness', 'zone2.settings.sound.channel_level', 'zone2.settings.sound.tone_control', 'zone3'], + 'SC-LX77': ['general.amp', 'general.setup.loudness', 'zone2.settings.sound.channel_level', 'zone2.settings.sound.tone_control', 'zone3'], + 'SC-LX57': ['general.amp', 'general.setup.loudness', 'zone2.settings.sound.channel_level', 'zone2.settings.sound.tone_control', 'zone3'], 'SC-2023': ['zone2.settings.sound.channel_level', 'zone2.settings.sound.tone_control', 'zone3'], 'SC-1223': ['zone2.settings.sound.channel_level', 'zone2.settings.sound.tone_control'], 'VSX-1123': [], @@ -21,7 +21,25 @@ 'dimmer': {'read': True, 'write': True, 'write_cmd': '{RAW_VALUE}SAA', 'cmd_settings': {'force_min': 0, 'force_max': 3}, 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': r'SAA(\d)', 'item_attrs': {'attributes': {'remark': '0 = very bright, 1 = bright, 2 = dark, 3 = off'}}}, 'sleep': {'read': True, 'write': True, 'read_cmd': '?SAB', 'write_cmd': '{VALUE}SAB', 'item_type': 'num', 'dev_datatype': 'PioSleep', 'reply_pattern': r'SAB(\d{3})', 'item_attrs': {'attributes': {'remark': '0 = off, 30 = 30 minutes, 60 = 60 minutes, 90 = 90 minutes'}}}, 'amp': {'read': True, 'write': True, 'read_cmd': '?SAC', 'write_cmd': '{VALUE}SAC', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': r'SAC{LOOKUP}', 'lookup': 'AMP', 'item_attrs': {'attributes': {'remark': '0 = AMP, 1 = THR'}, 'lookup_item': True}}, - 'multizone': {'read': False, 'write': True, 'write_cmd': 'ZZ', 'item_type': 'str', 'dev_datatype': 'str'} + 'multizone': {'read': False, 'write': True, 'write_cmd': 'ZZ', 'item_type': 'str', 'dev_datatype': 'str'}, + 'settings': { + 'language': {'read': True, 'write': True, 'read_cmd': '?SSE', 'write_cmd': '{RAW_VALUE:02}SSE', 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'SSE{LOOKUP}', 'lookup': 'LANGUAGE', 'item_attrs': {'initial': True}}, + 'name': {'read': True, 'write': True, 'read_cmd': '?SSO', 'write_cmd': '{VALUE}SSO', 'item_type': 'str', 'dev_datatype': 'PioName', 'reply_pattern': r'SSO(?:\d{2})(.*)', 'item_attrs': {'initial': True}}, + 'speakersystem': {'read': True, 'write': True, 'read_cmd': '?SSF', 'write_cmd': '{RAW_VALUE:02}SSF', 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'SSF{LOOKUP}', 'lookup': 'SPEAKERSYSTEM', 'item_attrs': {'initial': True}}, + 'surroundposition': {'read': True, 'write': True, 'read_cmd': '?SSP', 'write_cmd': '{RAW_VALUE:01}SSP', 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'SSP{LOOKUP}', 'lookup': 'SURROUNDPOSITION', 'item_attrs': {'initial': True}}, + 'xover': {'read': True, 'write': True, 'read_cmd': '?SSQ', 'write_cmd': '{RAW_VALUE:01}SSQ', 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'SSQ{LOOKUP}', 'lookup': 'XOVER', 'item_attrs': {'initial': True}}, + 'xcurve': {'read': True, 'write': True, 'read_cmd': '?SST', 'write_cmd': '{RAW_VALUE:01}SST', 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'SST{LOOKUP}', 'lookup': 'XCURVE', 'item_attrs': {'initial': True}}, + 'loudness': {'read': True, 'write': True, 'read_cmd': '?SSU', 'write_cmd': '{RAW_VALUE:01}SSU', 'item_type': 'bool', 'dev_datatype': 'raw', 'reply_pattern': r'SSU(\d{1})', 'item_attrs': {'initial': True}}, + 'initialvolume': {'read': True, 'write': True, 'read_cmd': '?SUC', 'write_cmd': '{VALUE}SUC', 'item_type': 'num', 'dev_datatype': 'PioInitVol', 'reply_pattern': r'SUC(\d{3})', 'item_attrs': {'initial': True}}, + 'mutelevel': {'read': True, 'write': True, 'read_cmd': '?SUE', 'write_cmd': '{RAW_VALUE:01}SUE', 'item_type': 'num', 'dev_datatype': 'raw', 'reply_pattern': r'SUE{LOOKUP}', 'lookup': 'MUTELEVEL', 'item_attrs': {'initial': True}}, + 'hdmi': { + 'control': {'read': True, 'write': True, 'read_cmd': '?STQ', 'write_cmd': '{RAW_VALUE:01}STQ', 'item_type': 'bool', 'dev_datatype': 'raw', 'reply_pattern': r'STQ(\d{1})', 'item_attrs': {'initial': True}}, + 'controlmode': {'read': True, 'write': True, 'read_cmd': '?STR', 'write_cmd': '{RAW_VALUE:01}STR', 'item_type': 'bool', 'dev_datatype': 'raw', 'reply_pattern': r'STR(\d{1})', 'item_attrs': {'initial': True}}, + 'arc': {'read': True, 'write': True, 'read_cmd': '?STT', 'write_cmd': '{RAW_VALUE:01}STT', 'item_type': 'bool', 'dev_datatype': 'raw', 'reply_pattern': r'STT(\d{1})', 'item_attrs': {'initial': True}}, + 'standbythrough': {'read': True, 'write': True, 'read_cmd': '?STU', 'write_cmd': '{RAW_VALUE:02}STU', 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'STU{LOOKUP})', 'lookup': 'STANDBYTHROUGH', 'item_attrs': {'initial': True}} + } + + } }, 'tuner': { 'tunerpreset': {'read': True, 'write': True, 'read_cmd': '?PR', 'item_type': 'num', 'write_cmd': '{RAW_VALUE}PR', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': r'PR([A-Ga-g]\d{2})'}, @@ -43,6 +61,7 @@ 'inputdown': {'read': False, 'write': True, 'write_cmd': 'FD', 'item_type': 'bool', 'dev_datatype': 'raw'} }, 'settings': { + 'standby': {'read': True, 'write': True, 'read_cmd': '?STY', 'write_cmd': '{VALUE}STY', 'item_type': 'num', 'dev_datatype': 'PioStandby', 'reply_pattern': r'STY(\d{4})', 'item_attrs': {'attributes': {'remark': '0 = OFF, 15 = 15 minutes, 30 = 30 minutes, 60 = 60 minutes'}, 'initial': True}}, 'sound': { 'channel_level': { 'front_left': {'read': True, 'write': True, 'read_cmd': '?L__CLV', 'item_type': 'num', 'cmd_settings': {'force_min': -12.0, 'valid_max': 12.0}, 'write_cmd': 'L__{VALUE}CLV', 'dev_datatype': 'PioChannelVol', 'reply_pattern': r'CLVL__(\d{2})'}, @@ -90,6 +109,7 @@ 'inputdown': {'read': False, 'write': True, 'write_cmd': 'ZSFD', 'item_type': 'bool', 'dev_datatype': 'raw'} }, 'settings': { + 'standby': {'read': True, 'write': True, 'read_cmd': '?STZ', 'write_cmd': '{VALUE}STZ', 'item_type': 'num', 'dev_datatype': 'PioStandby2', 'reply_pattern': r'STZ(\d{4})', 'item_attrs': {'attributes': {'remark': '0 = OFF, 0.5 = 30 minutes, 1 = 1 hour, 3 = 3 hours, 6 = 6 hours, 9 = 9 hours'}, 'initial': True}}, 'sound': { 'channel_level': { 'front_left': {'read': True, 'write': True, 'read_cmd': '?ZGEL___', 'item_type': 'num', 'cmd_settings': {'force_min': -12.0, 'valid_max': 12.0}, 'write_cmd': 'L__{VALUE}ZGE', 'dev_datatype': 'PioChannelVol', 'reply_pattern': r'ZGEL__(\d{2})'}, @@ -115,6 +135,7 @@ 'inputdown': {'read': False, 'write': True, 'write_cmd': 'ZTFD', 'item_type': 'bool', 'dev_datatype': 'raw'} }, 'settings': { + 'standby': {'read': True, 'write': True, 'read_cmd': '?SUA', 'write_cmd': '{VALUE}SUA', 'item_type': 'num', 'dev_datatype': 'PioStandby2', 'reply_pattern': r'SUA(\d{4})', 'item_attrs': {'attributes': {'remark': '0 = OFF, 0.5 = 30 minutes, 1 = 1 hour, 3 = 3 hours, 6 = 6 hours, 9 = 9 hours'}, 'initial': True}}, 'sound': { 'channel_level': { 'front_left': {'read': True, 'write': True, 'read_cmd': '?ZHEL___', 'item_type': 'num', 'cmd_settings': {'force_min': -12.0, 'valid_max': 12.0}, 'write_cmd': 'L__{VALUE}ZHE', 'dev_datatype': 'PioChannelVol', 'reply_pattern': r'ZHEL__(\d{2})'}, @@ -127,6 +148,9 @@ 'control': { 'power': {'read': True, 'write': True, 'read_cmd': '?ZEP', 'write_cmd': 'ZE{VALUE}', 'item_type': 'bool', 'dev_datatype': 'onoff', 'reply_pattern': r'ZEP(\d{1})'}, 'input': {'read': True, 'write': True, 'read_cmd': '?ZEA', 'write_cmd': '{VALUE}ZEA', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': r'ZEA{LOOKUP}', 'lookup': 'INPUTHD', 'item_attrs': {'lookup_item': True}} + }, + 'settings': { + 'standby': {'read': True, 'write': True, 'read_cmd': '?SUB', 'write_cmd': '{VALUE}SUB', 'item_type': 'num', 'dev_datatype': 'PioStandby2', 'reply_pattern': r'SUB(\d{4})', 'item_attrs': {'attributes': {'remark': '0 = OFF, 0.5 = 30 minutes, 1 = 1 hour, 3 = 3 hours, 6 = 6 hours, 9 = 9 hours'}, 'initial': True}}, } } } @@ -151,6 +175,52 @@ '1': 'THRU', '9': 'HDMI AUDIO (cyclic)' }, + 'MUTELEVEL': { + '0': 'Full', + '1': '-40db', + '2': '-20db' + }, + 'XOVER': { + '0': '50Hz', + '1': '80Hz', + '2': '100Hz', + '3': '150Hz', + '4': '200Hz' + }, + 'XCURVE': { + '0': 'OFF', + '1': '-0.5db', + '2': '-1.0db', + '3': '-1,5db', + '4': '-2.0db', + '5': '-2.5db', + '6': '-3.0db' + }, + 'STANDBYTHROUGH': { + '00': 'OFF', + '01': 'LAST', + '02': 'BD', + '03': 'HDMI 1', + '04': 'HDMI 2', + '05': 'HDMI 3', + '06': 'HDMI 4', + '07': 'HDMI 5', + '08': 'HDMI 6', + '09': 'HDMI 7', + '10': 'HDMI 8' + }, + 'LANGUAGE': { + '00': 'English', + '01': 'French', + '03': 'German', + '04': 'Italian', + '05': 'Spanish', + '06': 'Dutch', + '07': 'Russian', + '08': 'Chinese1', + '09': 'Chinese2', + '10': 'Japanese' + }, 'AMP': { '00': 'AMP ON', '01': 'AMP Front OFF', @@ -159,6 +229,17 @@ '98': 'DOWN (cyclic)', '99': 'UP (cyclic)' }, + 'SPEAKERSYSTEM': { + '00': 'Normal(SB/FH)', + '01': 'Normal(SB/FW)', + '02': 'Speaker B', + '03': 'Front Bi-Amp', + '04': 'Zone2' + }, + 'SURROUNDPOSITION': { + '0': 'ON SIDE', + '1': 'IN REAR' + }, 'TONE': { '00': 6, '01': 5, @@ -450,6 +531,20 @@ '10': 'VIDEO 1 (VIDEO)', '35': 'HDMI 8' }, + 'SPEAKERSYSTEM': { + '10': '9.1ch FH/FW', + '11': '7.1ch + Speaker B', + '12': '7.1ch Front Bi-Amp', + '13': '7.1ch + Zone2', + '14': '7.1ch FH/FW + Zone2', + '15': '5.1ch Bi-Amp + Zone2', + '16': '5.1ch + Zone 2+3', + '17': '5.1ch + SP-B Bi-Amp', + '18': '5.1ch F+Surr Bi-Amp', + '19': '5.1ch F+C Bi-Amp', + '20': '5.1ch C+Surr Bi-Amp', + '21': 'Multi-ZONE Music' + }, 'LISTENINGMODE': { '0050': 'THX (cyclic)', '0051': 'PROLOGIC + THX CINEMA', @@ -517,6 +612,20 @@ '10': 'VIDEO 1 (VIDEO)', '35': 'HDMI 8' }, + 'SPEAKERSYSTEM': { + '10': '9.1ch FH/FW', + '11': '7.1ch + Speaker B', + '12': '7.1ch Front Bi-Amp', + '13': '7.1ch + Zone2', + '14': '7.1ch FH/FW + Zone2', + '15': '5.1ch Bi-Amp + Zone2', + '16': '5.1ch + Zone 2+3', + '17': '5.1ch + SP-B Bi-Amp', + '18': '5.1ch F+Surr Bi-Amp', + '19': '5.1ch F+C Bi-Amp', + '20': '5.1ch C+Surr Bi-Amp', + '21': 'Multi-ZONE Music' + }, 'LISTENINGMODE': { '0050': 'THX (cyclic)', '0051': 'PROLOGIC + THX CINEMA', @@ -576,6 +685,20 @@ '10': 'VIDEO 1 (VIDEO)', '35': 'HDMI 8' }, + 'SPEAKERSYSTEM': { + '10': '9.1ch FH/FW', + '11': '7.1ch + Speaker B', + '12': '7.1ch Front Bi-Amp', + '13': '7.1ch + Zone2', + '14': '7.1ch FH/FW + Zone2', + '15': '5.1ch Bi-Amp + Zone2', + '16': '5.1ch + Zone 2+3', + '17': '5.1ch + SP-B Bi-Amp', + '18': '5.1ch F+Surr Bi-Amp', + '19': '5.1ch F+C Bi-Amp', + '20': '5.1ch C+Surr Bi-Amp', + '21': 'Multi-ZONE Music' + }, 'LISTENINGMODE': { '0050': 'THX (cyclic)', '0051': 'PROLOGIC + THX CINEMA', diff --git a/pioneer/datatypes.py b/pioneer/datatypes.py index 02bf79e48..ee4679470 100755 --- a/pioneer/datatypes.py +++ b/pioneer/datatypes.py @@ -28,6 +28,71 @@ def get_send_data(self, data, **kwargs): def get_shng_data(self, data, type=None, **kwargs): return int(data) +class DT_PioStandby(DT.Datatype): + def get_send_data(self, data, **kwargs): + if data == 0: + return "0000" + elif data <= 15: + return "0150" + elif data <= 30: + return "0300" + else: + return "0600" + + def get_shng_data(self, data, type=None, **kwargs): + return int(data) + +class DT_PioStandby2(DT.Datatype): + def get_send_data(self, data, **kwargs): + if data == 0: + return "0000" + elif data == 0.5: + return "0300" + elif data <= 1: + return "0011" + elif data <= 3: + return "0031" + elif data <= 6: + return "0061" + else: + return "0091" + + def get_shng_data(self, data, type=None, **kwargs): + if data == "0000": + return 0 + elif data == "0300": + return 0.5 + elif data == "0011": + return 1 + elif data == "0031": + return 3 + elif data == "0061": + return 6 + elif data == "0091": + return 9 + +class DT_PioInitVol(DT.Datatype): + def get_send_data(self, data, **kwargs): + if int(data) == 999: + _returnvalue = "999" + elif int(data) < -80: + _returnvalue = "000" + elif float(data) >= 0: + _returnvalue = f"{int(((x - 0) / 12) * (185 - 161) + 161):03}" + elif float(data) < 0: + _returnvalue = f"{int(161 - ((x - 0) / -80) * 160):03}" + return _returnvalue + + def get_shng_data(self, data, type=None, **kwargs): + if data == "999": + _returnvalue = 999 + elif data == "000": + _returnvalue = -81 + elif int(data) >= 161: + _returnvalue = ((data - 161) / (185 - 161)) * 12 + elif int(data) < 161: + _returnvalue = ((data - 161) / 160) * 80 + return _returnvalue class DT_PioChannelVol(DT.Datatype): def get_send_data(self, data, **kwargs): @@ -36,6 +101,9 @@ def get_send_data(self, data, **kwargs): def get_shng_data(self, data, type=None, **kwargs): return (int(data) - 50) / 2 +class DT_PioName(DT.Datatype): + def get_send_data(self, data, **kwargs): + return f"{len(data):02}{data}" class DT_onoff(DT.Datatype): def get_send_data(self, data, **kwargs): From 3903ca835c26084fd250091677fc195b956e1e1b Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Wed, 17 May 2023 10:23:25 +0200 Subject: [PATCH 112/775] Pioneer Plugin: Bump v to 1.0.2, add option to create structs; update structs --- pioneer/__init__.py | 20 +- pioneer/plugin.yaml | 497 +++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 514 insertions(+), 3 deletions(-) diff --git a/pioneer/__init__.py b/pioneer/__init__.py index 270605e13..b24003f9d 100755 --- a/pioneer/__init__.py +++ b/pioneer/__init__.py @@ -22,11 +22,24 @@ ######################################################################### import builtins +import os +import sys + +if __name__ == '__main__': + + class SmartPlugin(): + pass + + class SmartPluginWebIf(): + pass + + BASE = os.path.sep.join(os.path.realpath(__file__).split(os.path.sep)[:-3]) + sys.path.insert(0, BASE) from lib.model.sdp.globals import (PLUGIN_ATTR_NET_HOST, PLUGIN_ATTR_CONNECTION, PLUGIN_ATTR_SERIAL_PORT, PLUGIN_ATTR_CONN_TERMINATOR, CONN_NET_TCP_CLI, CONN_SER_ASYNC, CONN_NULL) -from lib.model.smartdeviceplugin import SmartDevicePlugin +from lib.model.smartdeviceplugin import SmartDevicePlugin, Standalone # from .webif import WebInterface @@ -36,7 +49,7 @@ class pioneer(SmartDevicePlugin): """ Device class for Pioneer AV function. """ - PLUGIN_VERSION = '1.0.1' + PLUGIN_VERSION = '1.0.2' def _set_device_defaults(self): # set our own preferences concerning connections @@ -57,3 +70,6 @@ def _transform_send_data(self, data=None, **kwargs): data['limit_response'] = self._parameters[PLUGIN_ATTR_CONN_TERMINATOR] data['payload'] = f'{data.get("payload", "")}{data["limit_response"].decode("unicode-escape")}' return data + +if __name__ == '__main__': + s = Standalone(pioneer, sys.argv[0]) diff --git a/pioneer/plugin.yaml b/pioneer/plugin.yaml index ba844f0f6..8b3171471 100755 --- a/pioneer/plugin.yaml +++ b/pioneer/plugin.yaml @@ -6,7 +6,7 @@ plugin: tester: Morg state: develop keywords: iot device av pioneer sdp - version: 1.0.1 + version: 1.0.2 sh_minversion: 1.9.5 py_minversion: 3.7 multi_instance: false @@ -301,6 +301,154 @@ item_structs: pioneer_read: false pioneer_write: true + settings: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: general.settings + + language: + type: str + pioneer_command: general.settings.language + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - general + - general.settings + pioneer_read_initial: true + + name: + type: str + pioneer_command: general.settings.name + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - general + - general.settings + pioneer_read_initial: true + + speakersystem: + type: str + pioneer_command: general.settings.speakersystem + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - general + - general.settings + pioneer_read_initial: true + + surroundposition: + type: str + pioneer_command: general.settings.surroundposition + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - general + - general.settings + pioneer_read_initial: true + + xover: + type: str + pioneer_command: general.settings.xover + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - general + - general.settings + pioneer_read_initial: true + + xcurve: + type: str + pioneer_command: general.settings.xcurve + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - general + - general.settings + pioneer_read_initial: true + + loudness: + type: bool + pioneer_command: general.settings.loudness + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - general + - general.settings + pioneer_read_initial: true + + initialvolume: + type: num + pioneer_command: general.settings.initialvolume + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - general + - general.settings + pioneer_read_initial: true + + mutelevel: + type: num + pioneer_command: general.settings.mutelevel + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - general + - general.settings + pioneer_read_initial: true + + hdmi: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: general.settings.hdmi + + control: + type: bool + pioneer_command: general.settings.hdmi.control + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - general + - general.settings + - general.settings.hdmi + pioneer_read_initial: true + + controlmode: + type: bool + pioneer_command: general.settings.hdmi.controlmode + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - general + - general.settings + - general.settings.hdmi + pioneer_read_initial: true + + arc: + type: bool + pioneer_command: general.settings.hdmi.arc + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - general + - general.settings + - general.settings.hdmi + pioneer_read_initial: true + + standbythrough: + type: str + pioneer_command: general.settings.hdmi.standbythrough + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - general + - general.settings + - general.settings.hdmi + pioneer_read_initial: true + tuner: read: @@ -438,6 +586,17 @@ item_structs: enforce_updates: true pioneer_read_group_trigger: zone1.settings + standby: + type: num + pioneer_command: zone1.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - zone1 + - zone1.settings + pioneer_read_initial: true + remark: 0 = OFF, 15 = 15 minutes, 30 = 30 minutes, 60 = 60 minutes + sound: read: @@ -832,6 +991,17 @@ item_structs: enforce_updates: true pioneer_read_group_trigger: zone2.settings + standby: + type: num + pioneer_command: zone2.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - zone2 + - zone2.settings + pioneer_read_initial: true + remark: 0 = OFF, 0.5 = 30 minutes, 1 = 1 hour, 3 = 3 hours, 6 = 6 hours, 9 = 9 hours + sound: read: @@ -1002,6 +1172,17 @@ item_structs: enforce_updates: true pioneer_read_group_trigger: zone3.settings + standby: + type: num + pioneer_command: zone3.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - zone3 + - zone3.settings + pioneer_read_initial: true + remark: 0 = OFF, 0.5 = 30 minutes, 1 = 1 hour, 3 = 3 hours, 6 = 6 hours, 9 = 9 hours + sound: read: @@ -1074,6 +1255,24 @@ item_structs: type: list pioneer_lookup: INPUTHD#list + settings: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: hdzone.settings + + standby: + type: num + pioneer_command: hdzone.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - hdzone + - hdzone.settings + pioneer_read_initial: true + remark: 0 = OFF, 0.5 = 30 minutes, 1 = 1 hour, 3 = 3 hours, 6 = 6 hours, 9 = 9 hours + ALL: read: @@ -1277,6 +1476,18 @@ item_structs: enforce_updates: true pioneer_read_group_trigger: ALL.zone1.settings + standby: + type: num + pioneer_command: zone1.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - ALL + - ALL.zone1 + - ALL.zone1.settings + pioneer_read_initial: true + remark: 0 = OFF, 15 = 15 minutes, 30 = 30 minutes, 60 = 60 minutes + sound: read: @@ -1725,6 +1936,25 @@ item_structs: type: list pioneer_lookup: INPUTHD#list + settings: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: ALL.hdzone.settings + + standby: + type: num + pioneer_command: hdzone.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - ALL + - ALL.hdzone + - ALL.hdzone.settings + pioneer_read_initial: true + remark: 0 = OFF, 0.5 = 30 minutes, 1 = 1 hour, 3 = 3 hours, 6 = 6 hours, 9 = 9 hours + SC-LX87: read: @@ -1942,6 +2172,18 @@ item_structs: enforce_updates: true pioneer_read_group_trigger: SC-LX87.zone1.settings + standby: + type: num + pioneer_command: zone1.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX87 + - SC-LX87.zone1 + - SC-LX87.zone1.settings + pioneer_read_initial: true + remark: 0 = OFF, 15 = 15 minutes, 30 = 30 minutes, 60 = 60 minutes + sound: read: @@ -2538,6 +2780,18 @@ item_structs: enforce_updates: true pioneer_read_group_trigger: SC-LX87.zone3.settings + standby: + type: num + pioneer_command: zone3.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX87 + - SC-LX87.zone3 + - SC-LX87.zone3.settings + pioneer_read_initial: true + remark: 0 = OFF, 0.5 = 30 minutes, 1 = 1 hour, 3 = 3 hours, 6 = 6 hours, 9 = 9 hours + sound: read: @@ -2614,6 +2868,25 @@ item_structs: type: list pioneer_lookup: INPUTHD#list + settings: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: SC-LX87.hdzone.settings + + standby: + type: num + pioneer_command: hdzone.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX87 + - SC-LX87.hdzone + - SC-LX87.hdzone.settings + pioneer_read_initial: true + remark: 0 = OFF, 0.5 = 30 minutes, 1 = 1 hour, 3 = 3 hours, 6 = 6 hours, 9 = 9 hours + SC-LX77: read: @@ -2831,6 +3104,18 @@ item_structs: enforce_updates: true pioneer_read_group_trigger: SC-LX77.zone1.settings + standby: + type: num + pioneer_command: zone1.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX77 + - SC-LX77.zone1 + - SC-LX77.zone1.settings + pioneer_read_initial: true + remark: 0 = OFF, 15 = 15 minutes, 30 = 30 minutes, 60 = 60 minutes + sound: read: @@ -3427,6 +3712,18 @@ item_structs: enforce_updates: true pioneer_read_group_trigger: SC-LX77.zone3.settings + standby: + type: num + pioneer_command: zone3.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX77 + - SC-LX77.zone3 + - SC-LX77.zone3.settings + pioneer_read_initial: true + remark: 0 = OFF, 0.5 = 30 minutes, 1 = 1 hour, 3 = 3 hours, 6 = 6 hours, 9 = 9 hours + sound: read: @@ -3503,6 +3800,25 @@ item_structs: type: list pioneer_lookup: INPUTHD#list + settings: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: SC-LX77.hdzone.settings + + standby: + type: num + pioneer_command: hdzone.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX77 + - SC-LX77.hdzone + - SC-LX77.hdzone.settings + pioneer_read_initial: true + remark: 0 = OFF, 0.5 = 30 minutes, 1 = 1 hour, 3 = 3 hours, 6 = 6 hours, 9 = 9 hours + SC-LX57: read: @@ -3720,6 +4036,18 @@ item_structs: enforce_updates: true pioneer_read_group_trigger: SC-LX57.zone1.settings + standby: + type: num + pioneer_command: zone1.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX57 + - SC-LX57.zone1 + - SC-LX57.zone1.settings + pioneer_read_initial: true + remark: 0 = OFF, 15 = 15 minutes, 30 = 30 minutes, 60 = 60 minutes + sound: read: @@ -4316,6 +4644,18 @@ item_structs: enforce_updates: true pioneer_read_group_trigger: SC-LX57.zone3.settings + standby: + type: num + pioneer_command: zone3.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX57 + - SC-LX57.zone3 + - SC-LX57.zone3.settings + pioneer_read_initial: true + remark: 0 = OFF, 0.5 = 30 minutes, 1 = 1 hour, 3 = 3 hours, 6 = 6 hours, 9 = 9 hours + sound: read: @@ -4392,6 +4732,25 @@ item_structs: type: list pioneer_lookup: INPUTHD#list + settings: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: SC-LX57.hdzone.settings + + standby: + type: num + pioneer_command: hdzone.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX57 + - SC-LX57.hdzone + - SC-LX57.hdzone.settings + pioneer_read_initial: true + remark: 0 = OFF, 0.5 = 30 minutes, 1 = 1 hour, 3 = 3 hours, 6 = 6 hours, 9 = 9 hours + SC-2023: read: @@ -4595,6 +4954,18 @@ item_structs: enforce_updates: true pioneer_read_group_trigger: SC-2023.zone1.settings + standby: + type: num + pioneer_command: zone1.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-2023 + - SC-2023.zone1 + - SC-2023.zone1.settings + pioneer_read_initial: true + remark: 0 = OFF, 15 = 15 minutes, 30 = 30 minutes, 60 = 60 minutes + sound: read: @@ -5191,6 +5562,18 @@ item_structs: enforce_updates: true pioneer_read_group_trigger: SC-2023.zone3.settings + standby: + type: num + pioneer_command: zone3.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-2023 + - SC-2023.zone3 + - SC-2023.zone3.settings + pioneer_read_initial: true + remark: 0 = OFF, 0.5 = 30 minutes, 1 = 1 hour, 3 = 3 hours, 6 = 6 hours, 9 = 9 hours + sound: read: @@ -5267,6 +5650,25 @@ item_structs: type: list pioneer_lookup: INPUTHD#list + settings: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: SC-2023.hdzone.settings + + standby: + type: num + pioneer_command: hdzone.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-2023 + - SC-2023.hdzone + - SC-2023.hdzone.settings + pioneer_read_initial: true + remark: 0 = OFF, 0.5 = 30 minutes, 1 = 1 hour, 3 = 3 hours, 6 = 6 hours, 9 = 9 hours + SC-1223: read: @@ -5470,6 +5872,18 @@ item_structs: enforce_updates: true pioneer_read_group_trigger: SC-1223.zone1.settings + standby: + type: num + pioneer_command: zone1.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-1223 + - SC-1223.zone1 + - SC-1223.zone1.settings + pioneer_read_initial: true + remark: 0 = OFF, 15 = 15 minutes, 30 = 30 minutes, 60 = 60 minutes + sound: read: @@ -6008,6 +6422,25 @@ item_structs: type: list pioneer_lookup: INPUTHD#list + settings: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: SC-1223.hdzone.settings + + standby: + type: num + pioneer_command: hdzone.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-1223 + - SC-1223.hdzone + - SC-1223.hdzone.settings + pioneer_read_initial: true + remark: 0 = OFF, 0.5 = 30 minutes, 1 = 1 hour, 3 = 3 hours, 6 = 6 hours, 9 = 9 hours + VSX-1123: read: @@ -6211,6 +6644,18 @@ item_structs: enforce_updates: true pioneer_read_group_trigger: VSX-1123.zone1.settings + standby: + type: num + pioneer_command: zone1.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - VSX-1123 + - VSX-1123.zone1 + - VSX-1123.zone1.settings + pioneer_read_initial: true + remark: 0 = OFF, 15 = 15 minutes, 30 = 30 minutes, 60 = 60 minutes + sound: read: @@ -6659,6 +7104,25 @@ item_structs: type: list pioneer_lookup: INPUTHD#list + settings: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: VSX-1123.hdzone.settings + + standby: + type: num + pioneer_command: hdzone.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - VSX-1123 + - VSX-1123.hdzone + - VSX-1123.hdzone.settings + pioneer_read_initial: true + remark: 0 = OFF, 0.5 = 30 minutes, 1 = 1 hour, 3 = 3 hours, 6 = 6 hours, 9 = 9 hours + VSX-923: read: @@ -6862,6 +7326,18 @@ item_structs: enforce_updates: true pioneer_read_group_trigger: VSX-923.zone1.settings + standby: + type: num + pioneer_command: zone1.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - VSX-923 + - VSX-923.zone1 + - VSX-923.zone1.settings + pioneer_read_initial: true + remark: 0 = OFF, 15 = 15 minutes, 30 = 30 minutes, 60 = 60 minutes + sound: read: @@ -7309,5 +7785,24 @@ item_structs: lookup: type: list pioneer_lookup: INPUTHD#list + + settings: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: VSX-923.hdzone.settings + + standby: + type: num + pioneer_command: hdzone.settings.standby + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - VSX-923 + - VSX-923.hdzone + - VSX-923.hdzone.settings + pioneer_read_initial: true + remark: 0 = OFF, 0.5 = 30 minutes, 1 = 1 hour, 3 = 3 hours, 6 = 6 hours, 9 = 9 hours plugin_functions: NONE logic_parameters: NONE From ab08ef1da89d8e3a6017dad4c0d497e64d08581e Mon Sep 17 00:00:00 2001 From: msinn Date: Fri, 19 May 2023 12:21:13 +0200 Subject: [PATCH 113/775] hue2: Adjusted requirements for zeroconf package --- hue2/requirements.txt | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/hue2/requirements.txt b/hue2/requirements.txt index 7618d59c7..90328ae79 100755 --- a/hue2/requirements.txt +++ b/hue2/requirements.txt @@ -3,4 +3,18 @@ qhue # zeroconf below v0.27, because newer versions need intensive testing and actual version has dropped support for Python 3.6 #zeroconf<=0.26.3 #Zeroconf >= 0.28 for testing (to resolve conflict with appletv plugin) -zeroconf<=0.28.3 +#zeroconf<=0.28.3 + +#zeroconf>0.28.3,<=0.31 # funktioniert anscheinend +# gibt folgenden Console output: +# ?gleiche? Version von zeroconf: consolidated = , further = >0.28.3, used by ["plugin 'hue2'"] + +#zeroconf>=0.32,<0.33 # 0.32.1 funktioniert anscheinend +#zeroconf>=0.33,<0.38 # 0.37 funktioniert anscheinend +#zeroconf>=0.38,<0.39 # 0.38.7 funktioniert anscheinend (nicht ganz -> Warnung im Log: ) +# 2023-05-19 11:50:15 WARNING lib.smarthome The following threads have not been terminated properly by their plugins (please report to the plugin's author): +# 2023-05-19 11:50:15 WARNING lib.smarthome -Thread: zeroconf-ServiceBrowser-_hue._tcp-29820, still alive +# 2023-05-19 11:50:15 WARNING lib.smarthome -Thread: zeroconf-ServiceBrowser-_hue._tcp-29871, still alive +# 2023-05-19 11:50:15 WARNING lib.smarthome -Thread: zeroconf-ServiceBrowser-_hue._tcp-29916, still alive + +zeroconf>=0.39,<0.52.0 From a837fa2ba39db2b51077541349fc28dadbec61fc Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Sun, 21 May 2023 09:18:00 +0200 Subject: [PATCH 114/775] Tasmota Plugin: - revert direct use of _plg_item_dict --- tasmota/webif/templates/index.html | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/tasmota/webif/templates/index.html b/tasmota/webif/templates/index.html index 106b309ee..17f5e42f1 100644 --- a/tasmota/webif/templates/index.html +++ b/tasmota/webif/templates/index.html @@ -696,7 +696,6 @@ {% block bodytab6 %} - + +
    +
    - - + + - {% for device in p.tasmota_devices %} + {% for item in p.get_item_list() %} - - + + {% endfor %} - - - -
    {{ _('Tasmota Device') }}{{ _('Tasmota Device Details') }}{{ _('Tasmota Items') }}{{ _('Tasmota Item Config') }}
    {{ device }}{{ p.tasmota_devices[device] }}{{ item }}{{ p.get_item_config(item) }}
    {{ '_plg_item_dict' }}{{ p._plg_item_dict }}
    +
    +
    +
    From aefa6ab99f1ca44186607623f66fb98593c9a23b Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Mon, 22 May 2023 12:20:05 +0200 Subject: [PATCH 115/775] DB-ADDON Plugin: - Remove use of _plg_item_dict in WebIF - automate generation of file item_attributes.py and update of plugin.yaml per script in item_attributes_master.py - update docu - bump to 1.1.2 --- db_addon/__init__.py | 16 +- db_addon/item_attributes.py | 43 ++ db_addon/item_attributes_master.py | 138 +++- db_addon/plugin.yaml | 1075 ++++++++++++++------------- db_addon/user_doc.rst | 20 +- db_addon/webif/templates/index.html | 4 - 6 files changed, 716 insertions(+), 580 deletions(-) create mode 100644 db_addon/item_attributes.py diff --git a/db_addon/__init__.py b/db_addon/__init__.py index 33658a30e..625e2221a 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -40,6 +40,7 @@ from lib.shtime import Shtime from lib.plugin import Plugins from .webif import WebInterface +from .item_attributes import * import lib.db DAY = 'day' @@ -53,7 +54,7 @@ class DatabaseAddOn(SmartPlugin): Main class of the Plugin. Does all plugin specific stuff and provides the update functions for the items """ - PLUGIN_VERSION = '1.1.1' + PLUGIN_VERSION = '1.1.2' # ToDo: cache temperatureseries raw data def __init__(self, sh): @@ -2875,19 +2876,6 @@ def to_int_float(arg): ALLOWED_QUERY_TIMEFRAMES = ['year', 'month', 'week', 'day', 'hour'] ALLOWED_MINMAX_FUNCS = ['min', 'max', 'avg'] -ALL_ONCHANGE_ATTRIBUTES = ['verbrauch_heute', 'verbrauch_woche', 'verbrauch_monat', 'verbrauch_jahr', 'minmax_heute_min', 'minmax_heute_max', 'minmax_woche_min', 'minmax_woche_max', 'minmax_monat_min', 'minmax_monat_max', 'minmax_jahr_min', 'minmax_jahr_max', 'tagesmitteltemperatur_heute'] -ALL_DAILY_ATTRIBUTES = ['verbrauch_heute_minus1', 'verbrauch_heute_minus2', 'verbrauch_heute_minus3', 'verbrauch_heute_minus4', 'verbrauch_heute_minus5', 'verbrauch_heute_minus6', 'verbrauch_heute_minus7', 'verbrauch_rolling_12m_heute_minus1', 'verbrauch_jahreszeitraum_minus1', 'verbrauch_jahreszeitraum_minus2', 'verbrauch_jahreszeitraum_minus3', 'zaehlerstand_heute_minus1', 'zaehlerstand_heute_minus2', 'zaehlerstand_heute_minus3', 'minmax_last_24h_min', 'minmax_last_24h_max', 'minmax_last_24h_avg', 'minmax_last_7d_min', 'minmax_last_7d_max', 'minmax_last_7d_avg', 'minmax_heute_minus1_min', 'minmax_heute_minus1_max', 'minmax_heute_minus1_avg', 'minmax_heute_minus2_min', 'minmax_heute_minus2_max', 'minmax_heute_minus2_avg', 'minmax_heute_minus3_min', 'minmax_heute_minus3_max', 'minmax_heute_minus3_avg', 'tagesmitteltemperatur_heute_minus1', 'tagesmitteltemperatur_heute_minus2', 'tagesmitteltemperatur_heute_minus3', 'serie_minmax_tag_min_30d', 'serie_minmax_tag_max_30d', 'serie_minmax_tag_avg_30d', 'serie_verbrauch_tag_30d', 'serie_zaehlerstand_tag_30d', 'serie_tagesmittelwert_stunde_0d', 'serie_tagesmittelwert_tag_stunde_30d', 'kaeltesumme', 'waermesumme', 'gruenlandtempsumme', 'tagesmitteltemperatur', 'wachstumsgradtage'] -ALL_WEEKLY_ATTRIBUTES = ['verbrauch_woche_minus1', 'verbrauch_woche_minus2', 'verbrauch_woche_minus3', 'verbrauch_woche_minus4', 'verbrauch_rolling_12m_woche_minus1', 'zaehlerstand_woche_minus1', 'zaehlerstand_woche_minus2', 'zaehlerstand_woche_minus3', 'minmax_woche_minus1_min', 'minmax_woche_minus1_max', 'minmax_woche_minus1_avg', 'minmax_woche_minus2_min', 'minmax_woche_minus2_max', 'minmax_woche_minus2_avg', 'serie_minmax_woche_min_30w', 'serie_minmax_woche_max_30w', 'serie_minmax_woche_avg_30w', 'serie_verbrauch_woche_30w', 'serie_zaehlerstand_woche_30w'] -ALL_MONTHLY_ATTRIBUTES = ['verbrauch_monat_minus1', 'verbrauch_monat_minus2', 'verbrauch_monat_minus3', 'verbrauch_monat_minus4', 'verbrauch_monat_minus12', 'verbrauch_rolling_12m_monat_minus1', 'zaehlerstand_monat_minus1', 'zaehlerstand_monat_minus2', 'zaehlerstand_monat_minus3', 'minmax_monat_minus1_min', 'minmax_monat_minus1_max', 'minmax_monat_minus1_avg', 'minmax_monat_minus2_min', 'minmax_monat_minus2_max', 'minmax_monat_minus2_avg', 'serie_minmax_monat_min_15m', 'serie_minmax_monat_max_15m', 'serie_minmax_monat_avg_15m', 'serie_verbrauch_monat_18m', 'serie_zaehlerstand_monat_18m', 'serie_waermesumme_monat_24m', 'serie_kaeltesumme_monat_24m'] -ALL_YEARLY_ATTRIBUTES = ['verbrauch_jahr_minus1', 'verbrauch_jahr_minus2', 'verbrauch_rolling_12m_jahr_minus1', 'zaehlerstand_jahr_minus1', 'zaehlerstand_jahr_minus2', 'zaehlerstand_jahr_minus3', 'minmax_jahr_minus1_min', 'minmax_jahr_minus1_max', 'minmax_jahr_minus1_avg'] -ALL_NEED_PARAMS_ATTRIBUTES = ['kaeltesumme', 'waermesumme', 'gruenlandtempsumme', 'tagesmitteltemperatur', 'wachstumsgradtage', 'db_request'] -ALL_VERBRAUCH_ATTRIBUTES = ['verbrauch_heute', 'verbrauch_woche', 'verbrauch_monat', 'verbrauch_jahr', 'verbrauch_heute_minus1', 'verbrauch_heute_minus2', 'verbrauch_heute_minus3', 'verbrauch_heute_minus4', 'verbrauch_heute_minus5', 'verbrauch_heute_minus6', 'verbrauch_heute_minus7', 'verbrauch_woche_minus1', 'verbrauch_woche_minus2', 'verbrauch_woche_minus3', 'verbrauch_woche_minus4', 'verbrauch_monat_minus1', 'verbrauch_monat_minus2', 'verbrauch_monat_minus3', 'verbrauch_monat_minus4', 'verbrauch_monat_minus12', 'verbrauch_jahr_minus1', 'verbrauch_jahr_minus2', 'verbrauch_rolling_12m_heute_minus1', 'verbrauch_rolling_12m_woche_minus1', 'verbrauch_rolling_12m_monat_minus1', 'verbrauch_rolling_12m_jahr_minus1', 'verbrauch_jahreszeitraum_minus1', 'verbrauch_jahreszeitraum_minus2', 'verbrauch_jahreszeitraum_minus3'] -ALL_ZAEHLERSTAND_ATTRIBUTES = ['zaehlerstand_heute_minus1', 'zaehlerstand_heute_minus2', 'zaehlerstand_heute_minus3', 'zaehlerstand_woche_minus1', 'zaehlerstand_woche_minus2', 'zaehlerstand_woche_minus3', 'zaehlerstand_monat_minus1', 'zaehlerstand_monat_minus2', 'zaehlerstand_monat_minus3', 'zaehlerstand_jahr_minus1', 'zaehlerstand_jahr_minus2', 'zaehlerstand_jahr_minus3'] -ALL_HISTORIE_ATTRIBUTES = ['minmax_last_24h_min', 'minmax_last_24h_max', 'minmax_last_24h_avg', 'minmax_last_7d_min', 'minmax_last_7d_max', 'minmax_last_7d_avg', 'minmax_heute_min', 'minmax_heute_max', 'minmax_heute_minus1_min', 'minmax_heute_minus1_max', 'minmax_heute_minus1_avg', 'minmax_heute_minus2_min', 'minmax_heute_minus2_max', 'minmax_heute_minus2_avg', 'minmax_heute_minus3_min', 'minmax_heute_minus3_max', 'minmax_heute_minus3_avg', 'minmax_woche_min', 'minmax_woche_max', 'minmax_woche_minus1_min', 'minmax_woche_minus1_max', 'minmax_woche_minus1_avg', 'minmax_woche_minus2_min', 'minmax_woche_minus2_max', 'minmax_woche_minus2_avg', 'minmax_monat_min', 'minmax_monat_max', 'minmax_monat_minus1_min', 'minmax_monat_minus1_max', 'minmax_monat_minus1_avg', 'minmax_monat_minus2_min', 'minmax_monat_minus2_max', 'minmax_monat_minus2_avg', 'minmax_jahr_min', 'minmax_jahr_max', 'minmax_jahr_minus1_min', 'minmax_jahr_minus1_max', 'minmax_jahr_minus1_avg'] -ALL_TAGESMITTEL_ATTRIBUTES = ['tagesmitteltemperatur_heute', 'tagesmitteltemperatur_heute_minus1', 'tagesmitteltemperatur_heute_minus2', 'tagesmitteltemperatur_heute_minus3'] -ALL_SERIE_ATTRIBUTES = ['serie_minmax_monat_min_15m', 'serie_minmax_monat_max_15m', 'serie_minmax_monat_avg_15m', 'serie_minmax_woche_min_30w', 'serie_minmax_woche_max_30w', 'serie_minmax_woche_avg_30w', 'serie_minmax_tag_min_30d', 'serie_minmax_tag_max_30d', 'serie_minmax_tag_avg_30d', 'serie_verbrauch_tag_30d', 'serie_verbrauch_woche_30w', 'serie_verbrauch_monat_18m', 'serie_zaehlerstand_tag_30d', 'serie_zaehlerstand_woche_30w', 'serie_zaehlerstand_monat_18m', 'serie_waermesumme_monat_24m', 'serie_kaeltesumme_monat_24m', 'serie_tagesmittelwert_stunde_0d', 'serie_tagesmittelwert_tag_stunde_30d'] -ALL_GEN_ATTRIBUTES = ['general_oldest_value', 'general_oldest_log'] -ALL_COMPLEX_ATTRIBUTES = ['kaeltesumme', 'waermesumme', 'gruenlandtempsumme', 'tagesmitteltemperatur', 'wachstumsgradtage', 'db_request'] """ diff --git a/db_addon/item_attributes.py b/db_addon/item_attributes.py new file mode 100644 index 000000000..860c0435d --- /dev/null +++ b/db_addon/item_attributes.py @@ -0,0 +1,43 @@ +# !/usr/bin/env python +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# Copyright 2023 Michael Wenzel +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# DatabaseAddOn for SmartHomeNG. https://github.com/smarthomeNG// +# +# This plugin is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This plugin is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this plugin. If not, see . +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + + +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# +# +# THIS FILE IS AUTOMATICALLY CREATED BY USING item_attributs_master.py +# +# +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + +ALL_ONCHANGE_ATTRIBUTES = ['verbrauch_heute', 'verbrauch_woche', 'verbrauch_monat', 'verbrauch_jahr', 'minmax_heute_min', 'minmax_heute_max', 'minmax_woche_min', 'minmax_woche_max', 'minmax_monat_min', 'minmax_monat_max', 'minmax_jahr_min', 'minmax_jahr_max', 'tagesmitteltemperatur_heute'] +ALL_DAILY_ATTRIBUTES = ['verbrauch_heute_minus1', 'verbrauch_heute_minus2', 'verbrauch_heute_minus3', 'verbrauch_heute_minus4', 'verbrauch_heute_minus5', 'verbrauch_heute_minus6', 'verbrauch_heute_minus7', 'verbrauch_rolling_12m_heute_minus1', 'verbrauch_jahreszeitraum_minus1', 'verbrauch_jahreszeitraum_minus2', 'verbrauch_jahreszeitraum_minus3', 'zaehlerstand_heute_minus1', 'zaehlerstand_heute_minus2', 'zaehlerstand_heute_minus3', 'minmax_last_24h_min', 'minmax_last_24h_max', 'minmax_last_24h_avg', 'minmax_last_7d_min', 'minmax_last_7d_max', 'minmax_last_7d_avg', 'minmax_heute_minus1_min', 'minmax_heute_minus1_max', 'minmax_heute_minus1_avg', 'minmax_heute_minus2_min', 'minmax_heute_minus2_max', 'minmax_heute_minus2_avg', 'minmax_heute_minus3_min', 'minmax_heute_minus3_max', 'minmax_heute_minus3_avg', 'tagesmitteltemperatur_heute_minus1', 'tagesmitteltemperatur_heute_minus2', 'tagesmitteltemperatur_heute_minus3', 'serie_minmax_tag_min_30d', 'serie_minmax_tag_max_30d', 'serie_minmax_tag_avg_30d', 'serie_verbrauch_tag_30d', 'serie_zaehlerstand_tag_30d', 'serie_tagesmittelwert_stunde_0d', 'serie_tagesmittelwert_tag_stunde_30d', 'kaeltesumme', 'waermesumme', 'gruenlandtempsumme', 'tagesmitteltemperatur', 'wachstumsgradtage'] +ALL_WEEKLY_ATTRIBUTES = ['verbrauch_woche_minus1', 'verbrauch_woche_minus2', 'verbrauch_woche_minus3', 'verbrauch_woche_minus4', 'verbrauch_rolling_12m_woche_minus1', 'zaehlerstand_woche_minus1', 'zaehlerstand_woche_minus2', 'zaehlerstand_woche_minus3', 'minmax_woche_minus1_min', 'minmax_woche_minus1_max', 'minmax_woche_minus1_avg', 'minmax_woche_minus2_min', 'minmax_woche_minus2_max', 'minmax_woche_minus2_avg', 'serie_minmax_woche_min_30w', 'serie_minmax_woche_max_30w', 'serie_minmax_woche_avg_30w', 'serie_verbrauch_woche_30w', 'serie_zaehlerstand_woche_30w'] +ALL_MONTHLY_ATTRIBUTES = ['verbrauch_monat_minus1', 'verbrauch_monat_minus2', 'verbrauch_monat_minus3', 'verbrauch_monat_minus4', 'verbrauch_monat_minus12', 'verbrauch_rolling_12m_monat_minus1', 'zaehlerstand_monat_minus1', 'zaehlerstand_monat_minus2', 'zaehlerstand_monat_minus3', 'minmax_monat_minus1_min', 'minmax_monat_minus1_max', 'minmax_monat_minus1_avg', 'minmax_monat_minus2_min', 'minmax_monat_minus2_max', 'minmax_monat_minus2_avg', 'serie_minmax_monat_min_15m', 'serie_minmax_monat_max_15m', 'serie_minmax_monat_avg_15m', 'serie_verbrauch_monat_18m', 'serie_zaehlerstand_monat_18m', 'serie_waermesumme_monat_24m', 'serie_kaeltesumme_monat_24m'] +ALL_YEARLY_ATTRIBUTES = ['verbrauch_jahr_minus1', 'verbrauch_jahr_minus2', 'verbrauch_rolling_12m_jahr_minus1', 'zaehlerstand_jahr_minus1', 'zaehlerstand_jahr_minus2', 'zaehlerstand_jahr_minus3', 'minmax_jahr_minus1_min', 'minmax_jahr_minus1_max', 'minmax_jahr_minus1_avg'] +ALL_NEED_PARAMS_ATTRIBUTES = ['kaeltesumme', 'waermesumme', 'gruenlandtempsumme', 'tagesmitteltemperatur', 'wachstumsgradtage', 'db_request'] +ALL_VERBRAUCH_ATTRIBUTES = ['verbrauch_heute', 'verbrauch_woche', 'verbrauch_monat', 'verbrauch_jahr', 'verbrauch_heute_minus1', 'verbrauch_heute_minus2', 'verbrauch_heute_minus3', 'verbrauch_heute_minus4', 'verbrauch_heute_minus5', 'verbrauch_heute_minus6', 'verbrauch_heute_minus7', 'verbrauch_woche_minus1', 'verbrauch_woche_minus2', 'verbrauch_woche_minus3', 'verbrauch_woche_minus4', 'verbrauch_monat_minus1', 'verbrauch_monat_minus2', 'verbrauch_monat_minus3', 'verbrauch_monat_minus4', 'verbrauch_monat_minus12', 'verbrauch_jahr_minus1', 'verbrauch_jahr_minus2', 'verbrauch_rolling_12m_heute_minus1', 'verbrauch_rolling_12m_woche_minus1', 'verbrauch_rolling_12m_monat_minus1', 'verbrauch_rolling_12m_jahr_minus1', 'verbrauch_jahreszeitraum_minus1', 'verbrauch_jahreszeitraum_minus2', 'verbrauch_jahreszeitraum_minus3'] +ALL_ZAEHLERSTAND_ATTRIBUTES = ['zaehlerstand_heute_minus1', 'zaehlerstand_heute_minus2', 'zaehlerstand_heute_minus3', 'zaehlerstand_woche_minus1', 'zaehlerstand_woche_minus2', 'zaehlerstand_woche_minus3', 'zaehlerstand_monat_minus1', 'zaehlerstand_monat_minus2', 'zaehlerstand_monat_minus3', 'zaehlerstand_jahr_minus1', 'zaehlerstand_jahr_minus2', 'zaehlerstand_jahr_minus3'] +ALL_HISTORIE_ATTRIBUTES = ['minmax_last_24h_min', 'minmax_last_24h_max', 'minmax_last_24h_avg', 'minmax_last_7d_min', 'minmax_last_7d_max', 'minmax_last_7d_avg', 'minmax_heute_min', 'minmax_heute_max', 'minmax_heute_minus1_min', 'minmax_heute_minus1_max', 'minmax_heute_minus1_avg', 'minmax_heute_minus2_min', 'minmax_heute_minus2_max', 'minmax_heute_minus2_avg', 'minmax_heute_minus3_min', 'minmax_heute_minus3_max', 'minmax_heute_minus3_avg', 'minmax_woche_min', 'minmax_woche_max', 'minmax_woche_minus1_min', 'minmax_woche_minus1_max', 'minmax_woche_minus1_avg', 'minmax_woche_minus2_min', 'minmax_woche_minus2_max', 'minmax_woche_minus2_avg', 'minmax_monat_min', 'minmax_monat_max', 'minmax_monat_minus1_min', 'minmax_monat_minus1_max', 'minmax_monat_minus1_avg', 'minmax_monat_minus2_min', 'minmax_monat_minus2_max', 'minmax_monat_minus2_avg', 'minmax_jahr_min', 'minmax_jahr_max', 'minmax_jahr_minus1_min', 'minmax_jahr_minus1_max', 'minmax_jahr_minus1_avg'] +ALL_TAGESMITTEL_ATTRIBUTES = ['tagesmitteltemperatur_heute', 'tagesmitteltemperatur_heute_minus1', 'tagesmitteltemperatur_heute_minus2', 'tagesmitteltemperatur_heute_minus3'] +ALL_SERIE_ATTRIBUTES = ['serie_minmax_monat_min_15m', 'serie_minmax_monat_max_15m', 'serie_minmax_monat_avg_15m', 'serie_minmax_woche_min_30w', 'serie_minmax_woche_max_30w', 'serie_minmax_woche_avg_30w', 'serie_minmax_tag_min_30d', 'serie_minmax_tag_max_30d', 'serie_minmax_tag_avg_30d', 'serie_verbrauch_tag_30d', 'serie_verbrauch_woche_30w', 'serie_verbrauch_monat_18m', 'serie_zaehlerstand_tag_30d', 'serie_zaehlerstand_woche_30w', 'serie_zaehlerstand_monat_18m', 'serie_waermesumme_monat_24m', 'serie_kaeltesumme_monat_24m', 'serie_tagesmittelwert_stunde_0d', 'serie_tagesmittelwert_tag_stunde_30d'] +ALL_GEN_ATTRIBUTES = ['general_oldest_value', 'general_oldest_log'] +ALL_COMPLEX_ATTRIBUTES = ['kaeltesumme', 'waermesumme', 'gruenlandtempsumme', 'tagesmitteltemperatur', 'wachstumsgradtage', 'db_request'] diff --git a/db_addon/item_attributes_master.py b/db_addon/item_attributes_master.py index 9d010ce70..75285fb84 100644 --- a/db_addon/item_attributes_master.py +++ b/db_addon/item_attributes_master.py @@ -19,8 +19,14 @@ # along with this plugin. If not, see . # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +import ruamel.yaml + +FILENAME_ATTRIBUTES = 'item_attributes.py' + +FILENAME_PLUGIN = 'plugin.yaml' + ITEM_ATTRIBUTS = { - 'DB_ADDON_FCTS': { + 'db_addon_fct': { 'verbrauch_heute': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch am heutigen Tag (Differenz zwischen aktuellem Wert und den Wert am Ende des vorherigen Tages)'}, 'verbrauch_woche': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch in der aktuellen Woche'}, 'verbrauch_monat': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch im aktuellen Monat'}, @@ -123,8 +129,8 @@ 'serie_kaeltesumme_monat_24m': {'cat': 'serie', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatliche Kältesumme der letzten 24 Monate'}, 'serie_tagesmittelwert_stunde_0d': {'cat': 'serie', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Stundenmittelwert für den aktuellen Tag'}, 'serie_tagesmittelwert_tag_stunde_30d': {'cat': 'serie', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Stundenmittelwert pro Tag der letzten 30 Tage (bspw. zur Berechnung der Tagesmitteltemperatur basierend auf den Mittelwert der Temperatur pro Stunde'}, - 'general_oldest_value': {'cat': 'gen', 'item_type': 'num ', 'calc': False, 'params': False, 'description': 'Ausgabe des ältesten Wertes des entsprechenden "Parent-Items" mit database Attribut'}, - 'general_oldest_log': {'cat': 'gen', 'item_type': 'list', 'calc': False, 'params': False, 'description': 'Ausgabe des Timestamp des ältesten Eintrages des entsprechenden "Parent-Items" mit database Attribut'}, + 'general_oldest_value': {'cat': 'gen', 'item_type': 'num', 'calc': 'no', 'params': False, 'description': 'Ausgabe des ältesten Wertes des entsprechenden "Parent-Items" mit database Attribut'}, + 'general_oldest_log': {'cat': 'gen', 'item_type': 'list', 'calc': 'no', 'params': False, 'description': 'Ausgabe des Timestamp des ältesten Eintrages des entsprechenden "Parent-Items" mit database Attribut'}, 'kaeltesumme': {'cat': 'complex', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Kältesumme für einen Zeitraum, db_addon_params: (year=mandatory, month=optional)'}, 'waermesumme': {'cat': 'complex', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Wärmesumme für einen Zeitraum, db_addon_params: (year=mandatory, month=optional)'}, 'gruenlandtempsumme': {'cat': 'complex', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Grünlandtemperatursumme für einen Zeitraum, db_addon_params: (year=mandatory)'}, @@ -132,16 +138,48 @@ 'wachstumsgradtage': {'cat': 'complex', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Wachstumsgradtage auf Basis der stündlichen Durchschnittswerte eines Tages für das laufende Jahr mit an Angabe des Temperaturschwellenwertes (threshold=Schwellentemperatur)'}, 'db_request': {'cat': 'complex', 'item_type': 'list', 'calc': 'group', 'params': True, 'description': 'Abfrage der DB: db_addon_params: (func=mandatory, item=mandatory, timespan=mandatory, start=optional, end=optional, count=optional, group=optional, group2=optional)'}, }, - 'DB_ADDON_INFO': { - 'db_version': {'cat': 'info', 'item_type': 'str', 'calc': False, 'params': False, 'description': 'Version der verbundenen Datenbank'}, + 'db_addon_info': { + 'db_version': {'cat': 'info', 'item_type': 'str', 'calc': 'no', 'params': False, 'description': 'Version der verbundenen Datenbank'}, }, - 'DB_ADDON_ADMIN': { - 'suspend': {'cat': 'admin', 'item_type': 'bool', 'calc': False, 'params': False, 'description': 'Unterbricht die Aktivitäten des Plugin'}, - 'recalc_all': {'cat': 'admin', 'item_type': 'bool', 'calc': False, 'params': False, 'description': 'Startet einen Neuberechnungslauf aller on-demand Items'}, - 'clean_cache_values': {'cat': 'admin', 'item_type': 'bool', 'calc': False, 'params': False, 'description': 'Löscht Plugin-Cache und damit alle im Plugin zwischengespeicherten Werte'}, + 'db_addon_admin': { + 'suspend': {'cat': 'admin', 'item_type': 'bool', 'calc': 'no', 'params': False, 'description': 'Unterbricht die Aktivitäten des Plugin'}, + 'recalc_all': {'cat': 'admin', 'item_type': 'bool', 'calc': 'no', 'params': False, 'description': 'Startet einen Neuberechnungslauf aller on-demand Items'}, + 'clean_cache_values': {'cat': 'admin', 'item_type': 'bool', 'calc': 'no', 'params': False, 'description': 'Löscht Plugin-Cache und damit alle im Plugin zwischengespeicherten Werte'}, }, } +FILE_HEADER = """\ +# !/usr/bin/env python +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# Copyright 2023 Michael Wenzel +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# DatabaseAddOn for SmartHomeNG. https://github.com/smarthomeNG// +# +# This plugin is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This plugin is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this plugin. If not, see . +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + + +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# +# +# THIS FILE IS AUTOMATICALLY CREATED BY USING item_attributs_master.py +# +# +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + +""" def get_attrs(sub_dict: dict = {}) -> list: attributes = [] @@ -151,9 +189,8 @@ def get_attrs(sub_dict: dict = {}) -> list: attributes.append(db_addon_fct) return attributes - -def export_db_addon_data(): - ATTRS = {} +def export_item_attributs_py(): + ATTRS = dict() ATTRS['ALL_ONCHANGE_ATTRIBUTES'] = get_attrs(sub_dict={'calc': 'onchange'}) ATTRS['ALL_DAILY_ATTRIBUTES'] = get_attrs(sub_dict={'calc': 'daily'}) ATTRS['ALL_WEEKLY_ATTRIBUTES'] = get_attrs(sub_dict={'calc': 'weekly'}) @@ -168,27 +205,70 @@ def export_db_addon_data(): ATTRS['ALL_GEN_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'gen'}) ATTRS['ALL_COMPLEX_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'complex'}) + # create file and write header + f = open(FILENAME_ATTRIBUTES, "w") + f.write(FILE_HEADER) + f.close() + + # write avm_data_types for attr, alist in ATTRS.items(): - print(f'{attr} = {alist!r}') + with open(FILENAME_ATTRIBUTES, "a") as f: + print (f'{attr} = {alist!r}', file=f) + print('item_attributs.py successfully created!') -def export_for_plugin_yaml(): - for entry in ITEM_ATTRIBUTS: - print(f'{entry}:') - print('valid_list:') - for func in ITEM_ATTRIBUTS[entry]: - print(f" - '{func}'") +def create_plugin_yaml_item_attribute_valids(): + """Create valid_list of db_addon_fct based on master dict""" + + valid_list_str = """ # NOTE: valid_list is automatically created by using item_attributes_master.py""" + valid_list_desc_str = """ # NOTE: valid_list_description is automatically created by using item_attributes_master.py""" + valid_list_item_type = """ # NOTE: valid_list_item_type is automatically created by using item_attributes_master.py""" + valid_list_calculation = """ # NOTE: valid_list_calculation is automatically created by using item_attributes_master.py""" + + for db_addon_fct in ITEM_ATTRIBUTS[attribute]: + valid_list_str = f"""{valid_list_str}\n\ + - {db_addon_fct!r:<40}""" + + valid_list_desc_str = f"""{valid_list_desc_str}\n\ + - '{ITEM_ATTRIBUTS[attribute][db_addon_fct]['description']:<}'""" + + valid_list_item_type = f"""{valid_list_item_type}\n\ + - '{ITEM_ATTRIBUTS[attribute][db_addon_fct]['item_type']:<}'""" + + valid_list_calculation = f"""{valid_list_calculation}\n\ + - '{ITEM_ATTRIBUTS[attribute][db_addon_fct]['calc']:<}'""" + + valid_list_calculation = f"""{valid_list_calculation}\n\r""" + + return valid_list_str, valid_list_desc_str, valid_list_item_type, valid_list_calculation + +def update_plugin_yaml_avm_data_type(): + """Update ´'valid_list', 'valid_list_description', 'valid_list_item_type' and 'valid_list_calculation' of item attributes in plugin.yaml""" + + yaml = ruamel.yaml.YAML() + yaml.indent(mapping=4, sequence=4, offset=4) + yaml.width = 200 + yaml.allow_unicode = True + yaml.preserve_quotes = False + + valid_list_str, valid_list_desc_str, valid_list_item_type_str, valid_list_calc_str = create_plugin_yaml_item_attribute_valids() + + with open(FILENAME_PLUGIN, 'r', encoding="utf-8") as f: + data = yaml.load(f) - for title in ['description', 'item_type', 'calc']: - print(f'valid_list_{entry}:') - for func in ITEM_ATTRIBUTS[entry]: - print(f" - '{ITEM_ATTRIBUTS[entry][func][title]}'") - print() + if data.get('item_attributes', {}).get(attribute): + data['item_attributes'][attribute]['valid_list'] = yaml.load(valid_list_str) + data['item_attributes'][attribute]['valid_list_description'] = yaml.load(valid_list_desc_str) + data['item_attributes'][attribute]['valid_list_item_type'] = yaml.load(valid_list_item_type_str) + data['item_attributes'][attribute]['valid_list_calculation'] = yaml.load(valid_list_calc_str) + with open(FILENAME_PLUGIN, 'w', encoding="utf-8") as f: + yaml.dump(data, f) + print(f"Successfully updated Attribut '{attribute}' in plugin.yaml!") + else: + print(f"Attribut '{attribute}' not defined in plugin.yaml") if __name__ == '__main__': - export_db_addon_data() - print() - print('--------------------------------------------------------------') - print() - export_for_plugin_yaml() + export_item_attributs_py() + for attribute in ITEM_ATTRIBUTS: + update_plugin_yaml_avm_data_type() \ No newline at end of file diff --git a/db_addon/plugin.yaml b/db_addon/plugin.yaml index 4e3c7101f..27999d06b 100644 --- a/db_addon/plugin.yaml +++ b/db_addon/plugin.yaml @@ -3,15 +3,15 @@ plugin: # Global plugin attributes type: system # plugin type (gateway, interface, protocol, system, web) description: - de: 'Add-On für das database Plugin zur Datenauswertung' - en: 'Add-On for the database plugin for data evaluation' + de: Add-On für das database Plugin zur Datenauswertung + en: Add-On for the database plugin for data evaluation maintainer: sisamiwe tester: bmx, onkelandy # Who tests this plugin? state: ready # change to ready when done with development # keywords: iot xyz # documentation: https://github.com/smarthomeNG/smarthome/wiki/CLI-Plugin # url of documentation (wiki) page support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1848494-support-thread-databaseaddon-plugin - version: 1.1.1 # Plugin version (must match the version specified in __init__.py) + version: 1.1.2 # Plugin version (must match the version specified in __init__.py) sh_minversion: 1.9.3.5 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) py_minversion: 3.8 # minimum Python version to use for this plugin @@ -23,26 +23,24 @@ plugin: parameters: database_plugin_config: type: str - default: 'database' + default: database description: - de: "Konfiguration des Plugin 'Database', für die das Plugin 'DatabaseAddOn' verwendet wird" - en: "Config of Plugin 'Database, for which the Plugin 'DatabaseAddOn' should be active" + de: Konfiguration des Plugin 'Database', für die das Plugin 'DatabaseAddOn' verwendet wird + en: Config of Plugin 'Database, for which the Plugin 'DatabaseAddOn' should be active startup_run_delay: type: int default: 60 description: - de: 'Zeitlicher Abstand in Sekunden, mit der die Berechnungen bei Startup ausgeführt werden sollen' - en: 'Delay in seconds, after which the startup calculations will be run' + de: Zeitlicher Abstand in Sekunden, mit der die Berechnungen bei Startup ausgeführt werden sollen + en: Delay in seconds, after which the startup calculations will be run ignore_0: type: list default: [] description: - de: "Bei Items, bei denen ein String aus der Liste im Pfadnamen vorkommt, werden 0-Werte (val_num = 0) bei Datenbankauswertungen ignoriert. - Beispieleintrag: temp | hum" - en: "At items having a entry of that list in path, val_num=0 will be ignored for database queries. - Example: temp | hum" + de: 'Bei Items, bei denen ein String aus der Liste im Pfadnamen vorkommt, werden 0-Werte (val_num = 0) bei Datenbankauswertungen ignoriert. Beispieleintrag: temp | hum' + en: 'At items having a entry of that list in path, val_num=0 will be ignored for database queries. Example: temp | hum' value_filter: type: dict @@ -52,524 +50,537 @@ parameters: optimize_value_filter: type: bool - default: True + default: true description: - de: "Optimierung der gesetzen als Plugin-Parameter oder/und Item-Attribute gesetzten Wertefilter." - en: "Optimize value filters set as plugin parameter or/and item attribute" + de: Optimierung der gesetzen als Plugin-Parameter oder/und Item-Attribute gesetzten Wertefilter. + en: Optimize value filters set as plugin parameter or/and item attribute use_oldest_entry: type: bool - default: False + default: false description: - de: "True: Verwendung des ältesten Eintrags des Items in der Datenbank, falls der Start des Abfragezeitraums zeitlich vor diesem Eintrag liegt - False: Abbruch der Datenbankabfrage" - en: "True: Use of oldest entry of item in database, if start of query is prior to oldest entry - False: Cancel query" + de: 'True: Verwendung des ältesten Eintrags des Items in der Datenbank, falls der Start des Abfragezeitraums zeitlich vor diesem Eintrag liegt False: Abbruch der Datenbankabfrage' + en: 'True: Use of oldest entry of item in database, if start of query is prior to oldest entry False: Cancel query' item_attributes: db_addon_fct: type: str description: - de: 'Auswertefunktion des DB-Addon Plugins' - en: 'Evaluation Function of DB-Addon Plugins' + de: Auswertefunktion des DB-Addon Plugins + en: Evaluation Function of DB-Addon Plugins valid_list: - - 'verbrauch_heute' - - 'verbrauch_woche' - - 'verbrauch_monat' - - 'verbrauch_jahr' - - 'verbrauch_heute_minus1' - - 'verbrauch_heute_minus2' - - 'verbrauch_heute_minus3' - - 'verbrauch_heute_minus4' - - 'verbrauch_heute_minus5' - - 'verbrauch_heute_minus6' - - 'verbrauch_heute_minus7' - - 'verbrauch_woche_minus1' - - 'verbrauch_woche_minus2' - - 'verbrauch_woche_minus3' - - 'verbrauch_woche_minus4' - - 'verbrauch_monat_minus1' - - 'verbrauch_monat_minus2' - - 'verbrauch_monat_minus3' - - 'verbrauch_monat_minus4' - - 'verbrauch_monat_minus12' - - 'verbrauch_jahr_minus1' - - 'verbrauch_jahr_minus2' - - 'verbrauch_rolling_12m_heute_minus1' - - 'verbrauch_rolling_12m_woche_minus1' - - 'verbrauch_rolling_12m_monat_minus1' - - 'verbrauch_rolling_12m_jahr_minus1' - - 'verbrauch_jahreszeitraum_minus1' - - 'verbrauch_jahreszeitraum_minus2' - - 'verbrauch_jahreszeitraum_minus3' - - 'zaehlerstand_heute_minus1' - - 'zaehlerstand_heute_minus2' - - 'zaehlerstand_heute_minus3' - - 'zaehlerstand_woche_minus1' - - 'zaehlerstand_woche_minus2' - - 'zaehlerstand_woche_minus3' - - 'zaehlerstand_monat_minus1' - - 'zaehlerstand_monat_minus2' - - 'zaehlerstand_monat_minus3' - - 'zaehlerstand_jahr_minus1' - - 'zaehlerstand_jahr_minus2' - - 'zaehlerstand_jahr_minus3' - - 'minmax_last_24h_min' - - 'minmax_last_24h_max' - - 'minmax_last_24h_avg' - - 'minmax_last_7d_min' - - 'minmax_last_7d_max' - - 'minmax_last_7d_avg' - - 'minmax_heute_min' - - 'minmax_heute_max' - - 'minmax_heute_minus1_min' - - 'minmax_heute_minus1_max' - - 'minmax_heute_minus1_avg' - - 'minmax_heute_minus2_min' - - 'minmax_heute_minus2_max' - - 'minmax_heute_minus2_avg' - - 'minmax_heute_minus3_min' - - 'minmax_heute_minus3_max' - - 'minmax_heute_minus3_avg' - - 'minmax_woche_min' - - 'minmax_woche_max' - - 'minmax_woche_minus1_min' - - 'minmax_woche_minus1_max' - - 'minmax_woche_minus1_avg' - - 'minmax_woche_minus2_min' - - 'minmax_woche_minus2_max' - - 'minmax_woche_minus2_avg' - - 'minmax_monat_min' - - 'minmax_monat_max' - - 'minmax_monat_minus1_min' - - 'minmax_monat_minus1_max' - - 'minmax_monat_minus1_avg' - - 'minmax_monat_minus2_min' - - 'minmax_monat_minus2_max' - - 'minmax_monat_minus2_avg' - - 'minmax_jahr_min' - - 'minmax_jahr_max' - - 'minmax_jahr_minus1_min' - - 'minmax_jahr_minus1_max' - - 'minmax_jahr_minus1_avg' - - 'tagesmitteltemperatur_heute' - - 'tagesmitteltemperatur_heute_minus1' - - 'tagesmitteltemperatur_heute_minus2' - - 'tagesmitteltemperatur_heute_minus3' - - 'serie_minmax_monat_min_15m' - - 'serie_minmax_monat_max_15m' - - 'serie_minmax_monat_avg_15m' - - 'serie_minmax_woche_min_30w' - - 'serie_minmax_woche_max_30w' - - 'serie_minmax_woche_avg_30w' - - 'serie_minmax_tag_min_30d' - - 'serie_minmax_tag_max_30d' - - 'serie_minmax_tag_avg_30d' - - 'serie_verbrauch_tag_30d' - - 'serie_verbrauch_woche_30w' - - 'serie_verbrauch_monat_18m' - - 'serie_zaehlerstand_tag_30d' - - 'serie_zaehlerstand_woche_30w' - - 'serie_zaehlerstand_monat_18m' - - 'serie_waermesumme_monat_24m' - - 'serie_kaeltesumme_monat_24m' - - 'serie_tagesmittelwert_stunde_0d' - - 'serie_tagesmittelwert_tag_stunde_30d' - - 'general_oldest_value' - - 'general_oldest_log' - - 'kaeltesumme' - - 'waermesumme' - - 'gruenlandtempsumme' - - 'tagesmitteltemperatur' - - 'wachstumsgradtage' - - 'db_request' + # NOTE: valid_list is automatically created by using item_attributes_master.py + - verbrauch_heute + - verbrauch_woche + - verbrauch_monat + - verbrauch_jahr + - verbrauch_heute_minus1 + - verbrauch_heute_minus2 + - verbrauch_heute_minus3 + - verbrauch_heute_minus4 + - verbrauch_heute_minus5 + - verbrauch_heute_minus6 + - verbrauch_heute_minus7 + - verbrauch_woche_minus1 + - verbrauch_woche_minus2 + - verbrauch_woche_minus3 + - verbrauch_woche_minus4 + - verbrauch_monat_minus1 + - verbrauch_monat_minus2 + - verbrauch_monat_minus3 + - verbrauch_monat_minus4 + - verbrauch_monat_minus12 + - verbrauch_jahr_minus1 + - verbrauch_jahr_minus2 + - verbrauch_rolling_12m_heute_minus1 + - verbrauch_rolling_12m_woche_minus1 + - verbrauch_rolling_12m_monat_minus1 + - verbrauch_rolling_12m_jahr_minus1 + - verbrauch_jahreszeitraum_minus1 + - verbrauch_jahreszeitraum_minus2 + - verbrauch_jahreszeitraum_minus3 + - zaehlerstand_heute_minus1 + - zaehlerstand_heute_minus2 + - zaehlerstand_heute_minus3 + - zaehlerstand_woche_minus1 + - zaehlerstand_woche_minus2 + - zaehlerstand_woche_minus3 + - zaehlerstand_monat_minus1 + - zaehlerstand_monat_minus2 + - zaehlerstand_monat_minus3 + - zaehlerstand_jahr_minus1 + - zaehlerstand_jahr_minus2 + - zaehlerstand_jahr_minus3 + - minmax_last_24h_min + - minmax_last_24h_max + - minmax_last_24h_avg + - minmax_last_7d_min + - minmax_last_7d_max + - minmax_last_7d_avg + - minmax_heute_min + - minmax_heute_max + - minmax_heute_minus1_min + - minmax_heute_minus1_max + - minmax_heute_minus1_avg + - minmax_heute_minus2_min + - minmax_heute_minus2_max + - minmax_heute_minus2_avg + - minmax_heute_minus3_min + - minmax_heute_minus3_max + - minmax_heute_minus3_avg + - minmax_woche_min + - minmax_woche_max + - minmax_woche_minus1_min + - minmax_woche_minus1_max + - minmax_woche_minus1_avg + - minmax_woche_minus2_min + - minmax_woche_minus2_max + - minmax_woche_minus2_avg + - minmax_monat_min + - minmax_monat_max + - minmax_monat_minus1_min + - minmax_monat_minus1_max + - minmax_monat_minus1_avg + - minmax_monat_minus2_min + - minmax_monat_minus2_max + - minmax_monat_minus2_avg + - minmax_jahr_min + - minmax_jahr_max + - minmax_jahr_minus1_min + - minmax_jahr_minus1_max + - minmax_jahr_minus1_avg + - tagesmitteltemperatur_heute + - tagesmitteltemperatur_heute_minus1 + - tagesmitteltemperatur_heute_minus2 + - tagesmitteltemperatur_heute_minus3 + - serie_minmax_monat_min_15m + - serie_minmax_monat_max_15m + - serie_minmax_monat_avg_15m + - serie_minmax_woche_min_30w + - serie_minmax_woche_max_30w + - serie_minmax_woche_avg_30w + - serie_minmax_tag_min_30d + - serie_minmax_tag_max_30d + - serie_minmax_tag_avg_30d + - serie_verbrauch_tag_30d + - serie_verbrauch_woche_30w + - serie_verbrauch_monat_18m + - serie_zaehlerstand_tag_30d + - serie_zaehlerstand_woche_30w + - serie_zaehlerstand_monat_18m + - serie_waermesumme_monat_24m + - serie_kaeltesumme_monat_24m + - serie_tagesmittelwert_stunde_0d + - serie_tagesmittelwert_tag_stunde_30d + - general_oldest_value + - general_oldest_log + - kaeltesumme + - waermesumme + - gruenlandtempsumme + - tagesmitteltemperatur + - wachstumsgradtage + - db_request valid_list_description: - - 'Verbrauch am heutigen Tag (Differenz zwischen aktuellem Wert und den Wert am Ende des vorherigen Tages)' - - 'Verbrauch in der aktuellen Woche' - - 'Verbrauch im aktuellen Monat' - - 'Verbrauch im aktuellen Jahr' - - 'Verbrauch gestern (heute -1 Tag) (Differenz zwischen Wert am Ende des gestrigen Tages und dem Wert am Ende des Tages danach)' - - 'Verbrauch vorgestern (heute -2 Tage)' - - 'Verbrauch heute -3 Tage' - - 'Verbrauch heute -4 Tage' - - 'Verbrauch heute -5 Tage' - - 'Verbrauch heute -6 Tage' - - 'Verbrauch heute -7 Tage' - - 'Verbrauch Vorwoche (aktuelle Woche -1)' - - 'Verbrauch aktuelle Woche -2 Wochen' - - 'Verbrauch aktuelle Woche -3 Wochen' - - 'Verbrauch aktuelle Woche -4 Wochen' - - 'Verbrauch Vormonat (aktueller Monat -1)' - - 'Verbrauch aktueller Monat -2 Monate' - - 'Verbrauch aktueller Monat -3 Monate' - - 'Verbrauch aktueller Monat -4 Monate' - - 'Verbrauch aktueller Monat -12 Monate' - - 'Verbrauch Vorjahr (aktuelles Jahr -1 Jahr)' - - 'Verbrauch aktuelles Jahr -2 Jahre' - - 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Tages' - - 'Verbrauch der letzten 12 Monate ausgehend im Ende der letzten Woche' - - 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Monats' - - 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Jahres' - - 'Verbrauch seit dem 1.1. bis zum heutigen Tag des Vorjahres' - - 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 2 Jahren' - - 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 3 Jahren' - - 'Zählerstand / Wert am Ende des letzten Tages (heute -1 Tag)' - - 'Zählerstand / Wert am Ende des vorletzten Tages (heute -2 Tag)' - - 'Zählerstand / Wert am Ende des vorvorletzten Tages (heute -3 Tag)' - - 'Zählerstand / Wert am Ende der vorvorletzten Woche (aktuelle Woche -1 Woche)' - - 'Zählerstand / Wert am Ende der vorletzten Woche (aktuelle Woche -2 Wochen)' - - 'Zählerstand / Wert am Ende der aktuellen Woche -3 Wochen' - - 'Zählerstand / Wert am Ende des letzten Monates (aktueller Monat -1 Monat)' - - 'Zählerstand / Wert am Ende des vorletzten Monates (aktueller Monat -2 Monate)' - - 'Zählerstand / Wert am Ende des aktuellen Monats -3 Monate' - - 'Zählerstand / Wert am Ende des letzten Jahres (aktuelles Jahr -1 Jahr)' - - 'Zählerstand / Wert am Ende des vorletzten Jahres (aktuelles Jahr -2 Jahre)' - - 'Zählerstand / Wert am Ende des aktuellen Jahres -3 Jahre' - - 'minimaler Wert der letzten 24h' - - 'maximaler Wert der letzten 24h' - - 'durchschnittlicher Wert der letzten 24h' - - 'minimaler Wert der letzten 7 Tage' - - 'maximaler Wert der letzten 7 Tage' - - 'durchschnittlicher Wert der letzten 7 Tage' - - 'Minimalwert seit Tagesbeginn' - - 'Maximalwert seit Tagesbeginn' - - 'Minimalwert gestern (heute -1 Tag)' - - 'Maximalwert gestern (heute -1 Tag)' - - 'Durchschnittswert gestern (heute -1 Tag)' - - 'Minimalwert vorgestern (heute -2 Tage)' - - 'Maximalwert vorgestern (heute -2 Tage)' - - 'Durchschnittswert vorgestern (heute -2 Tage)' - - 'Minimalwert heute vor 3 Tagen' - - 'Maximalwert heute vor 3 Tagen' - - 'Durchschnittswert heute vor 3 Tagen' - - 'Minimalwert seit Wochenbeginn' - - 'Maximalwert seit Wochenbeginn' - - 'Minimalwert Vorwoche (aktuelle Woche -1)' - - 'Maximalwert Vorwoche (aktuelle Woche -1)' - - 'Durchschnittswert Vorwoche (aktuelle Woche -1)' - - 'Minimalwert aktuelle Woche -2 Wochen' - - 'Maximalwert aktuelle Woche -2 Wochen' - - 'Durchschnittswert aktuelle Woche -2 Wochen' - - 'Minimalwert seit Monatsbeginn' - - 'Maximalwert seit Monatsbeginn' - - 'Minimalwert Vormonat (aktueller Monat -1)' - - 'Maximalwert Vormonat (aktueller Monat -1)' - - 'Durchschnittswert Vormonat (aktueller Monat -1)' - - 'Minimalwert aktueller Monat -2 Monate' - - 'Maximalwert aktueller Monat -2 Monate' - - 'Durchschnittswert aktueller Monat -2 Monate' - - 'Minimalwert seit Jahresbeginn' - - 'Maximalwert seit Jahresbeginn' - - 'Minimalwert Vorjahr (aktuelles Jahr -1 Jahr)' - - 'Maximalwert Vorjahr (aktuelles Jahr -1 Jahr)' - - 'Durchschnittswert Vorjahr (aktuelles Jahr -1 Jahr)' - - 'Tagesmitteltemperatur heute' - - 'Tagesmitteltemperatur des letzten Tages (heute -1 Tag)' - - 'Tagesmitteltemperatur des vorletzten Tages (heute -2 Tag)' - - 'Tagesmitteltemperatur des vorvorletzten Tages (heute -3 Tag)' - - 'monatlicher Minimalwert der letzten 15 Monate (gleitend)' - - 'monatlicher Maximalwert der letzten 15 Monate (gleitend)' - - 'monatlicher Mittelwert der letzten 15 Monate (gleitend)' - - 'wöchentlicher Minimalwert der letzten 30 Wochen (gleitend)' - - 'wöchentlicher Maximalwert der letzten 30 Wochen (gleitend)' - - 'wöchentlicher Mittelwert der letzten 30 Wochen (gleitend)' - - 'täglicher Minimalwert der letzten 30 Tage (gleitend)' - - 'täglicher Maximalwert der letzten 30 Tage (gleitend)' - - 'täglicher Mittelwert der letzten 30 Tage (gleitend)' - - 'Verbrauch pro Tag der letzten 30 Tage' - - 'Verbrauch pro Woche der letzten 30 Wochen' - - 'Verbrauch pro Monat der letzten 18 Monate' - - 'Zählerstand am Tagesende der letzten 30 Tage' - - 'Zählerstand am Wochenende der letzten 30 Wochen' - - 'Zählerstand am Monatsende der letzten 18 Monate' - - 'monatliche Wärmesumme der letzten 24 Monate' - - 'monatliche Kältesumme der letzten 24 Monate' - - 'Stundenmittelwert für den aktuellen Tag' - - 'Stundenmittelwert pro Tag der letzten 30 Tage (bspw. zur Berechnung der Tagesmitteltemperatur basierend auf den Mittelwert der Temperatur pro Stunde' - - 'Ausgabe des ältesten Wertes des entsprechenden "Parent-Items" mit database Attribut' - - 'Ausgabe des Timestamp des ältesten Eintrages des entsprechenden "Parent-Items" mit database Attribut' - - 'Berechnet die Kältesumme für einen Zeitraum, db_addon_params: (year=mandatory: int, month=optional: str)' - - 'Berechnet die Wärmesumme für einen Zeitraum, db_addon_params: (year=mandatory: int, month=optional: str, threshold=optional: int)' + # NOTE: valid_list_description is automatically created by using item_attributes_master.py + - Verbrauch am heutigen Tag (Differenz zwischen aktuellem Wert und den Wert am Ende des vorherigen Tages) + - Verbrauch in der aktuellen Woche + - Verbrauch im aktuellen Monat + - Verbrauch im aktuellen Jahr + - Verbrauch gestern (heute -1 Tag) (Differenz zwischen Wert am Ende des gestrigen Tages und dem Wert am Ende des Tages danach) + - Verbrauch vorgestern (heute -2 Tage) + - Verbrauch heute -3 Tage + - Verbrauch heute -4 Tage + - Verbrauch heute -5 Tage + - Verbrauch heute -6 Tage + - Verbrauch heute -7 Tage + - Verbrauch Vorwoche (aktuelle Woche -1) + - Verbrauch aktuelle Woche -2 Wochen + - Verbrauch aktuelle Woche -3 Wochen + - Verbrauch aktuelle Woche -4 Wochen + - Verbrauch Vormonat (aktueller Monat -1) + - Verbrauch aktueller Monat -2 Monate + - Verbrauch aktueller Monat -3 Monate + - Verbrauch aktueller Monat -4 Monate + - Verbrauch aktueller Monat -12 Monate + - Verbrauch Vorjahr (aktuelles Jahr -1 Jahr) + - Verbrauch aktuelles Jahr -2 Jahre + - Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Tages + - Verbrauch der letzten 12 Monate ausgehend im Ende der letzten Woche + - Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Monats + - Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Jahres + - Verbrauch seit dem 1.1. bis zum heutigen Tag des Vorjahres + - Verbrauch seit dem 1.1. bis zum heutigen Tag vor 2 Jahren + - Verbrauch seit dem 1.1. bis zum heutigen Tag vor 3 Jahren + - Zählerstand / Wert am Ende des letzten Tages (heute -1 Tag) + - Zählerstand / Wert am Ende des vorletzten Tages (heute -2 Tag) + - Zählerstand / Wert am Ende des vorvorletzten Tages (heute -3 Tag) + - Zählerstand / Wert am Ende der vorvorletzten Woche (aktuelle Woche -1 Woche) + - Zählerstand / Wert am Ende der vorletzten Woche (aktuelle Woche -2 Wochen) + - Zählerstand / Wert am Ende der aktuellen Woche -3 Wochen + - Zählerstand / Wert am Ende des letzten Monates (aktueller Monat -1 Monat) + - Zählerstand / Wert am Ende des vorletzten Monates (aktueller Monat -2 Monate) + - Zählerstand / Wert am Ende des aktuellen Monats -3 Monate + - Zählerstand / Wert am Ende des letzten Jahres (aktuelles Jahr -1 Jahr) + - Zählerstand / Wert am Ende des vorletzten Jahres (aktuelles Jahr -2 Jahre) + - Zählerstand / Wert am Ende des aktuellen Jahres -3 Jahre + - minimaler Wert der letzten 24h + - maximaler Wert der letzten 24h + - durchschnittlicher Wert der letzten 24h + - minimaler Wert der letzten 7 Tage + - maximaler Wert der letzten 7 Tage + - durchschnittlicher Wert der letzten 7 Tage + - Minimalwert seit Tagesbeginn + - Maximalwert seit Tagesbeginn + - Minimalwert gestern (heute -1 Tag) + - Maximalwert gestern (heute -1 Tag) + - Durchschnittswert gestern (heute -1 Tag) + - Minimalwert vorgestern (heute -2 Tage) + - Maximalwert vorgestern (heute -2 Tage) + - Durchschnittswert vorgestern (heute -2 Tage) + - Minimalwert heute vor 3 Tagen + - Maximalwert heute vor 3 Tagen + - Durchschnittswert heute vor 3 Tagen + - Minimalwert seit Wochenbeginn + - Maximalwert seit Wochenbeginn + - Minimalwert Vorwoche (aktuelle Woche -1) + - Maximalwert Vorwoche (aktuelle Woche -1) + - Durchschnittswert Vorwoche (aktuelle Woche -1) + - Minimalwert aktuelle Woche -2 Wochen + - Maximalwert aktuelle Woche -2 Wochen + - Durchschnittswert aktuelle Woche -2 Wochen + - Minimalwert seit Monatsbeginn + - Maximalwert seit Monatsbeginn + - Minimalwert Vormonat (aktueller Monat -1) + - Maximalwert Vormonat (aktueller Monat -1) + - Durchschnittswert Vormonat (aktueller Monat -1) + - Minimalwert aktueller Monat -2 Monate + - Maximalwert aktueller Monat -2 Monate + - Durchschnittswert aktueller Monat -2 Monate + - Minimalwert seit Jahresbeginn + - Maximalwert seit Jahresbeginn + - Minimalwert Vorjahr (aktuelles Jahr -1 Jahr) + - Maximalwert Vorjahr (aktuelles Jahr -1 Jahr) + - Durchschnittswert Vorjahr (aktuelles Jahr -1 Jahr) + - Tagesmitteltemperatur heute + - Tagesmitteltemperatur des letzten Tages (heute -1 Tag) + - Tagesmitteltemperatur des vorletzten Tages (heute -2 Tag) + - Tagesmitteltemperatur des vorvorletzten Tages (heute -3 Tag) + - monatlicher Minimalwert der letzten 15 Monate (gleitend) + - monatlicher Maximalwert der letzten 15 Monate (gleitend) + - monatlicher Mittelwert der letzten 15 Monate (gleitend) + - wöchentlicher Minimalwert der letzten 30 Wochen (gleitend) + - wöchentlicher Maximalwert der letzten 30 Wochen (gleitend) + - wöchentlicher Mittelwert der letzten 30 Wochen (gleitend) + - täglicher Minimalwert der letzten 30 Tage (gleitend) + - täglicher Maximalwert der letzten 30 Tage (gleitend) + - täglicher Mittelwert der letzten 30 Tage (gleitend) + - Verbrauch pro Tag der letzten 30 Tage + - Verbrauch pro Woche der letzten 30 Wochen + - Verbrauch pro Monat der letzten 18 Monate + - Zählerstand am Tagesende der letzten 30 Tage + - Zählerstand am Wochenende der letzten 30 Wochen + - Zählerstand am Monatsende der letzten 18 Monate + - monatliche Wärmesumme der letzten 24 Monate + - monatliche Kältesumme der letzten 24 Monate + - Stundenmittelwert für den aktuellen Tag + - Stundenmittelwert pro Tag der letzten 30 Tage (bspw. zur Berechnung der Tagesmitteltemperatur basierend auf den Mittelwert der Temperatur pro Stunde + - Ausgabe des ältesten Wertes des entsprechenden "Parent-Items" mit database Attribut + - Ausgabe des Timestamp des ältesten Eintrages des entsprechenden "Parent-Items" mit database Attribut + - 'Berechnet die Kältesumme für einen Zeitraum, db_addon_params: (year=mandatory, month=optional)' + - 'Berechnet die Wärmesumme für einen Zeitraum, db_addon_params: (year=mandatory, month=optional)' - 'Berechnet die Grünlandtemperatursumme für einen Zeitraum, db_addon_params: (year=mandatory)' - - 'Berechnet die Tagesmitteltemperatur auf Basis der stündlichen Durchschnittswerte eines Tages für die angegebene Anzahl von Tagen (timeframe=day, count=integer)' - - 'Berechnet die Wachstumsgradtage auf Basis der stündlichen Durchschnittswerte eines Tages für das laufende Jahr mit an Angabe des Temperaturschwellenwertes (year=Jahr: int, method=0/1: int, threshold=Schwellentemperatur: int)' + - Berechnet die Tagesmitteltemperatur auf Basis der stündlichen Durchschnittswerte eines Tages für die angegebene Anzahl von Tagen (timeframe=day, count=integer) + - Berechnet die Wachstumsgradtage auf Basis der stündlichen Durchschnittswerte eines Tages für das laufende Jahr mit an Angabe des Temperaturschwellenwertes (threshold=Schwellentemperatur) - 'Abfrage der DB: db_addon_params: (func=mandatory, item=mandatory, timespan=mandatory, start=optional, end=optional, count=optional, group=optional, group2=optional)' valid_list_item_type: - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'num' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'list' - - 'num ' - - 'list' - - 'num' - - 'num' - - 'num' - - 'list' - - 'num' - - 'list' + # NOTE: valid_list_item_type is automatically created by using item_attributes_master.py + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - num + - list + - list + - list + - list + - list + - list + - list + - list + - list + - list + - list + - list + - list + - list + - list + - list + - list + - list + - list + - num + - list + - num + - num + - num + - list + - num + - list valid_list_calculation: - - 'onchange' - - 'onchange' - - 'onchange' - - 'onchange' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'weekly' - - 'weekly' - - 'weekly' - - 'weekly' - - 'monthly' - - 'monthly' - - 'monthly' - - 'monthly' - - 'monthly' - - 'yearly' - - 'yearly' - - 'daily' - - 'weekly' - - 'monthly' - - 'yearly' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'weekly' - - 'weekly' - - 'weekly' - - 'monthly' - - 'monthly' - - 'monthly' - - 'yearly' - - 'yearly' - - 'yearly' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'onchange' - - 'onchange' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'onchange' - - 'onchange' - - 'weekly' - - 'weekly' - - 'weekly' - - 'weekly' - - 'weekly' - - 'weekly' - - 'onchange' - - 'onchange' - - 'monthly' - - 'monthly' - - 'monthly' - - 'monthly' - - 'monthly' - - 'monthly' - - 'onchange' - - 'onchange' - - 'yearly' - - 'yearly' - - 'yearly' - - 'onchange' - - 'daily' - - 'daily' - - 'daily' - - 'monthly' - - 'monthly' - - 'monthly' - - 'weekly' - - 'weekly' - - 'weekly' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'weekly' - - 'monthly' - - 'daily' - - 'weekly' - - 'monthly' - - 'monthly' - - 'monthly' - - 'daily' - - 'daily' - - 'False' - - 'False' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'daily' - - 'group' - + # NOTE: valid_list_calculation is automatically created by using item_attributes_master.py + - onchange + - onchange + - onchange + - onchange + - daily + - daily + - daily + - daily + - daily + - daily + - daily + - weekly + - weekly + - weekly + - weekly + - monthly + - monthly + - monthly + - monthly + - monthly + - yearly + - yearly + - daily + - weekly + - monthly + - yearly + - daily + - daily + - daily + - daily + - daily + - daily + - weekly + - weekly + - weekly + - monthly + - monthly + - monthly + - yearly + - yearly + - yearly + - daily + - daily + - daily + - daily + - daily + - daily + - onchange + - onchange + - daily + - daily + - daily + - daily + - daily + - daily + - daily + - daily + - daily + - onchange + - onchange + - weekly + - weekly + - weekly + - weekly + - weekly + - weekly + - onchange + - onchange + - monthly + - monthly + - monthly + - monthly + - monthly + - monthly + - onchange + - onchange + - yearly + - yearly + - yearly + - onchange + - daily + - daily + - daily + - monthly + - monthly + - monthly + - weekly + - weekly + - weekly + - daily + - daily + - daily + - daily + - weekly + - monthly + - daily + - weekly + - monthly + - monthly + - monthly + - daily + - daily + - no + - no + - daily + - daily + - daily + - daily + - daily + - group db_addon_info: type: str description: - de: 'Info-Funktion des DB-Addon Plugins' - en: 'Info-Function of DB-Addon Plugins' + de: Info-Funktion des DB-Addon Plugins + en: Info-Function of DB-Addon Plugins valid_list: - - 'db_version' + # NOTE: valid_list is automatically created by using item_attributes_master.py + - db_version valid_list_description: - - 'Version der verbundenen Datenbank' + # NOTE: valid_list_description is automatically created by using item_attributes_master.py + - Version der verbundenen Datenbank valid_list_item_type: - - 'str' - + # NOTE: valid_list_item_type is automatically created by using item_attributes_master.py + - str + valid_list_calculation: + # NOTE: valid_list_calculation is automatically created by using item_attributes_master.py + - no db_addon_admin: type: str description: - de: 'Admin-Funktion des DB-Addon Plugins' - en: 'Admin-Function of DB-Addon Plugins' + de: Admin-Funktion des DB-Addon Plugins + en: Admin-Function of DB-Addon Plugins valid_list: - - 'suspend' - - 'recalc_all' - - 'clean_cache_values' + # NOTE: valid_list is automatically created by using item_attributes_master.py + - suspend + - recalc_all + - clean_cache_values valid_list_description: - - 'unterbricht die Aktivitäten des Plugin -> bool' - - 'Startet einen Neuberechnungslauf aller on-demand items -> bool' - - 'Löscht Plugin-Cache und damit alle im Plugin zwischengespeicherten Werte -> bool' + # NOTE: valid_list_description is automatically created by using item_attributes_master.py + - Unterbricht die Aktivitäten des Plugin + - Startet einen Neuberechnungslauf aller on-demand Items + - Löscht Plugin-Cache und damit alle im Plugin zwischengespeicherten Werte valid_list_item_type: - - 'bool' - - 'bool' - - 'bool' - + # NOTE: valid_list_item_type is automatically created by using item_attributes_master.py + - bool + - bool + - bool + valid_list_calculation: + # NOTE: valid_list_calculation is automatically created by using item_attributes_master.py + - no + - no + - no db_addon_params: type: str description: - de: "Parameter für eine Auswertefunktion des DB-Addon Plugins im Format 'kwargs' enclosed in quotes like 'keyword=argument, keyword=argument'" - en: "Parameters of a DB-Addon Plugin evaluation function. Need to have format of 'kwargs' enclosed in quotes like 'keyword=argument, keyword=argument'" + de: Parameter für eine Auswertefunktion des DB-Addon Plugins im Format 'kwargs' enclosed in quotes like 'keyword=argument, keyword=argument' + en: Parameters of a DB-Addon Plugin evaluation function. Need to have format of 'kwargs' enclosed in quotes like 'keyword=argument, keyword=argument' db_addon_startup: type: bool description: - de: 'Ausführen der Berechnung bei Plugin Start (mit zeitlichem Abstand, wie in den Plugin Parametern definiert)' - en: 'Run function in startup of plugin (with delay, set in plugin parameters)' + de: Ausführen der Berechnung bei Plugin Start (mit zeitlichem Abstand, wie in den Plugin Parametern definiert) + en: Run function in startup of plugin (with delay, set in plugin parameters) db_addon_ignore_value: type: num description: - de: 'Wert der bei Abfrage bzw. Auswertung der Datenbank für diese Item ignoriert werden soll' - en: 'Value which will be ignored at database query' + de: Wert der bei Abfrage bzw. Auswertung der Datenbank für diese Item ignoriert werden soll + en: Value which will be ignored at database query db_addon_ignore_value_list: type: list(str) description: de: "Liste von Vergleichsoperatoren, die bei Abfrage bzw. Auswertung der Datenbank für dieses Item berücksichtigt werden sollen. Bsp: ['> 0', '< 35']" - en: "List of comparison operators which will be used at database query" + en: List of comparison operators which will be used at database query db_addon_database_item: type: str @@ -1022,8 +1033,8 @@ plugin_functions: fetch_log: type: list description: - de: 'Liefert für das angegebene Item und die Parameter das Abfrageergebnis zurück' - en: 'Return the database request result for the given item and parameters' + de: Liefert für das angegebene Item und die Parameter das Abfrageergebnis zurück + en: Return the database request result for the given item and parameters # mit dieser Funktion ist es möglich, eine Liste der "func" Werte pro "group" / "group2" eines "item" von "start""timespan" bis "end""timespan" oder von "start""timespan" bis "count" ausgegeben zu lassen # bspw. minimale Tagestemperatur vom Item "outdoor.temp" der letzten 10 Tage startend von gestern davor --> func=min, item=outdoor.temp, timespan=day, start=1, count=10, group=day # bspw. maximal Tagestemperatur vom Item "outdoor.temp" von jetzt bis 2 Monate davor --> func=max, item=outdoor.temp, timeframe=month, start=0, end=2, group=day @@ -1031,82 +1042,82 @@ plugin_functions: func: type: str description: - de: "zu verwendende Abfragefunktion" - en: "database function to be used" - mandatory: True + de: zu verwendende Abfragefunktion + en: database function to be used + mandatory: true valid_list: - - min # Minimalwerte - - max # Maximalwerte - - sum # Summe - - on - - integrate - - sum_max - - sum_avg - - sum_min_neg - - diff_max + - min # Minimalwerte + - max # Maximalwerte + - sum # Summe + - on + - integrate + - sum_max + - sum_avg + - sum_min_neg + - diff_max item: type: foo description: - de: "Das Item-Objekt oder die Item_ID der DB" - en: "An item object" - mandatory: True + de: Das Item-Objekt oder die Item_ID der DB + en: An item object + mandatory: true timeframe: type: str description: - de: "Zeitinkrement für die DB-Abfrage" - en: "time increment for db-request" - mandatory: True + de: Zeitinkrement für die DB-Abfrage + en: time increment for db-request + mandatory: true valid_list: - - day - - week - - month - - year + - day + - week + - month + - year start: type: int description: - de: "Zeitlicher Beginn der DB-Abfrage: x Zeitinkrementen von jetzt in die Vergangenheit" - en: "start point in time for db-request; x time increments from now into the past" + de: 'Zeitlicher Beginn der DB-Abfrage: x Zeitinkrementen von jetzt in die Vergangenheit' + en: start point in time for db-request; x time increments from now into the past end: type: int description: - de: "Zeitliches Ende der DB-Abfrage: x Zeitinkrementen von jetzt in die Vergangenheit" - en: "end point in time for db-request; x time increments from now into the past" + de: 'Zeitliches Ende der DB-Abfrage: x Zeitinkrementen von jetzt in die Vergangenheit' + en: end point in time for db-request; x time increments from now into the past count: type: int description: - de: "Anzahl der Zeitinkremente, vom Start in die Vergangenheit abzufragen sind. Alternative zu 'end'" - en: "number of time increments from start point in time into the past. can be used alternativly to 'end'" + de: Anzahl der Zeitinkremente, vom Start in die Vergangenheit abzufragen sind. Alternative zu 'end' + en: number of time increments from start point in time into the past. can be used alternativly to 'end' group: type: str description: - de: "erste Gruppierung der DB-Abfrage" - en: "first grouping for the db-request" + de: erste Gruppierung der DB-Abfrage + en: first grouping for the db-request valid_list: - - day - - week - - month - - year + - day + - week + - month + - year group2: type: str description: - de: "zweite Gruppierung der DB-Abfrage" - en: "second grouping for the db-request" + de: zweite Gruppierung der DB-Abfrage + en: second grouping for the db-request valid_list: - - day - - week - - month - - year + - day + - week + - month + - year db_version: type: str description: - de: 'Liefer die verwendete Version der Datenbank' - en: 'Return the database version' + de: Liefer die verwendete Version der Datenbank + en: Return the database version suspend: type: bool description: - de: 'Pausiert die Berechnungen des Plugins' - en: 'Suspends value evaluation of plugin' + de: Pausiert die Berechnungen des Plugins + en: Suspends value evaluation of plugin logic_parameters: NONE diff --git a/db_addon/user_doc.rst b/db_addon/user_doc.rst index 158e64bfe..d6ed6a49b 100644 --- a/db_addon/user_doc.rst +++ b/db_addon/user_doc.rst @@ -119,7 +119,8 @@ Hinweise - Für die Auswertung kann es nützlich sein, bestimmte Werte aus der Datenbank bei der Berechnung auszublenden. Hierfür stehen 2 Möglichkeiten zur Verfügung: - Plugin-Attribut `ignore_0`: (list of strings) Bei Items, bei denen ein String aus der Liste im Pfadnamen vorkommt, - werden 0-Werte (val_num = 0) bei Datenbankauswertungen ignoriert. Hat also das Attribut den Wert ['temp'] werden bei allen Items mit 'temp' im Pfadnamen die 0-Werte bei der Auswertung ignoriert. + werden 0-Werte (val_num = 0) bei Datenbankauswertungen ignoriert. Hat also das Attribut den Wert ['temp'] werden bei allen Items mit + 'temp' im Pfadnamen die 0-Werte bei der Auswertung ignoriert. - Item-Attribut `db_addon_ignore_value`: (num) Dieser Wert wird bei der Abfrage bzw. Auswertung der Datenbank für diese Item ignoriert. @@ -295,3 +296,20 @@ Tagesmitteltemperatur Die Tagesmitteltemperatur wird auf Basis der stündlichen Durchschnittswerte eines Tages (aller in der DB enthaltenen Datensätze) für die angegebene Anzahl von Tagen (days=optional) berechnet. + + + +Vorgehen bei Funktionserweiterung des Plugins bzw. Ergänzung weiterer Werte für Item-Attribute +---------------------------------------------------------------------------------------------- + +Augrund der Vielzahl der möglichen Werte der Itemattribute, insbesondere des Itemattributes`db_addon_fct`, wurde die Erstellung/Update +der entsprechenden Teile der `plugin.yam` sowie die Erstellung der Datei `item_attributes.py`, die vom Plugin verwendet wird, automatisiert. + +Die Masterinformationen für alle Itemattributs sowie die Skripte zum Erstellen/Update der beiden Dateien sind in der +Datei `item_attributes_master.py` enthalten. + +.. important:: + + Korrekturen, Erweiterungen etc. der Itemattribute sollten nur in der Datei `item_attributes_master.py` + im Dict der Variable `ITEM_ATTRIBUTS` vorgenommen werden. Das Ausführen der Datei `item_attributes_master.py` (main) + erstellt die `item_attributes.py` und aktualisiert die `plugin.yaml` entsprechend. \ No newline at end of file diff --git a/db_addon/webif/templates/index.html b/db_addon/webif/templates/index.html index 31b2dd3a2..a2f60cc4b 100644 --- a/db_addon/webif/templates/index.html +++ b/db_addon/webif/templates/index.html @@ -377,10 +377,6 @@ - - - - From 0db0f758fe5c8cc6f30767ea32fd8c9f160e8126 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Mon, 22 May 2023 17:04:20 +0200 Subject: [PATCH 116/775] DB-ADDON Plugin: - Correct types of last commit --- db_addon/item_attributes_master.py | 36 +++++++++++++++--------------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/db_addon/item_attributes_master.py b/db_addon/item_attributes_master.py index 75285fb84..5bcd34761 100644 --- a/db_addon/item_attributes_master.py +++ b/db_addon/item_attributes_master.py @@ -25,7 +25,7 @@ FILENAME_PLUGIN = 'plugin.yaml' -ITEM_ATTRIBUTS = { +ITEM_ATTRIBUTES = { 'db_addon_fct': { 'verbrauch_heute': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch am heutigen Tag (Differenz zwischen aktuellem Wert und den Wert am Ende des vorherigen Tages)'}, 'verbrauch_woche': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch in der aktuellen Woche'}, @@ -174,7 +174,7 @@ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -# THIS FILE IS AUTOMATICALLY CREATED BY USING item_attributs_master.py +# THIS FILE IS AUTOMATICALLY CREATED BY USING item_attributes_master.py # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # @@ -183,13 +183,13 @@ def get_attrs(sub_dict: dict = {}) -> list: attributes = [] - for entry in ITEM_ATTRIBUTS: - for db_addon_fct in ITEM_ATTRIBUTS[entry]: - if sub_dict.items() <= ITEM_ATTRIBUTS[entry][db_addon_fct].items(): + for entry in ITEM_ATTRIBUTES: + for db_addon_fct in ITEM_ATTRIBUTES[entry]: + if sub_dict.items() <= ITEM_ATTRIBUTES[entry][db_addon_fct].items(): attributes.append(db_addon_fct) return attributes -def export_item_attributs_py(): +def export_item_attributes_py(): ATTRS = dict() ATTRS['ALL_ONCHANGE_ATTRIBUTES'] = get_attrs(sub_dict={'calc': 'onchange'}) ATTRS['ALL_DAILY_ATTRIBUTES'] = get_attrs(sub_dict={'calc': 'daily'}) @@ -215,7 +215,7 @@ def export_item_attributs_py(): with open(FILENAME_ATTRIBUTES, "a") as f: print (f'{attr} = {alist!r}', file=f) - print('item_attributs.py successfully created!') + print('item_attributes.py successfully created!') def create_plugin_yaml_item_attribute_valids(): """Create valid_list of db_addon_fct based on master dict""" @@ -225,25 +225,25 @@ def create_plugin_yaml_item_attribute_valids(): valid_list_item_type = """ # NOTE: valid_list_item_type is automatically created by using item_attributes_master.py""" valid_list_calculation = """ # NOTE: valid_list_calculation is automatically created by using item_attributes_master.py""" - for db_addon_fct in ITEM_ATTRIBUTS[attribute]: + for db_addon_fct in ITEM_ATTRIBUTES[attribute]: valid_list_str = f"""{valid_list_str}\n\ - {db_addon_fct!r:<40}""" valid_list_desc_str = f"""{valid_list_desc_str}\n\ - - '{ITEM_ATTRIBUTS[attribute][db_addon_fct]['description']:<}'""" + - '{ITEM_ATTRIBUTES[attribute][db_addon_fct]['description']:<}'""" valid_list_item_type = f"""{valid_list_item_type}\n\ - - '{ITEM_ATTRIBUTS[attribute][db_addon_fct]['item_type']:<}'""" + - '{ITEM_ATTRIBUTES[attribute][db_addon_fct]['item_type']:<}'""" valid_list_calculation = f"""{valid_list_calculation}\n\ - - '{ITEM_ATTRIBUTS[attribute][db_addon_fct]['calc']:<}'""" + - '{ITEM_ATTRIBUTES[attribute][db_addon_fct]['calc']:<}'""" valid_list_calculation = f"""{valid_list_calculation}\n\r""" return valid_list_str, valid_list_desc_str, valid_list_item_type, valid_list_calculation -def update_plugin_yaml_avm_data_type(): - """Update ´'valid_list', 'valid_list_description', 'valid_list_item_type' and 'valid_list_calculation' of item attributes in plugin.yaml""" +def update_plugin_yaml_item_attributes(): + """Update 'valid_list', 'valid_list_description', 'valid_list_item_type' and 'valid_list_calculation' of item attributes in plugin.yaml""" yaml = ruamel.yaml.YAML() yaml.indent(mapping=4, sequence=4, offset=4) @@ -264,11 +264,11 @@ def update_plugin_yaml_avm_data_type(): with open(FILENAME_PLUGIN, 'w', encoding="utf-8") as f: yaml.dump(data, f) - print(f"Successfully updated Attribut '{attribute}' in plugin.yaml!") + print(f"Successfully updated Attribute '{attribute}' in plugin.yaml!") else: - print(f"Attribut '{attribute}' not defined in plugin.yaml") + print(f"Attribute '{attribute}' not defined in plugin.yaml") if __name__ == '__main__': - export_item_attributs_py() - for attribute in ITEM_ATTRIBUTS: - update_plugin_yaml_avm_data_type() \ No newline at end of file + export_item_attributes_py() + for attribute in ITEM_ATTRIBUTES: + update_plugin_yaml_item_attributes() \ No newline at end of file From 14ff717c6b264def2ad24d14fb0260931e270034 Mon Sep 17 00:00:00 2001 From: msinn Date: Thu, 25 May 2023 12:26:22 +0200 Subject: [PATCH 117/775] ecmd: Retired the plugin (moved to plugin_archive) --- ecmd/README.md | 60 ------------------ ecmd/__init__.py | 156 ----------------------------------------------- ecmd/plugin.yaml | 26 -------- 3 files changed, 242 deletions(-) delete mode 100755 ecmd/README.md delete mode 100755 ecmd/__init__.py delete mode 100755 ecmd/plugin.yaml diff --git a/ecmd/README.md b/ecmd/README.md deleted file mode 100755 index 46b88fb03..000000000 --- a/ecmd/README.md +++ /dev/null @@ -1,60 +0,0 @@ -# ecmd - -## Requirements - -The ECMD plugin connects to an AVR microcontroller board with ethersex firmware via network. -The ECMD protocoll provides access to attached 1wire temperature sensors DS1820. - -## Supported Hardware - -* 8-bit AVR microcontroller boards with network support, like NetIO (Pollin), Etherrape (lochraster.org), etc. -* 1-wire temperature and other sensors -* - DS1820 (temperature sensor) -* - DS18B20 (temperature sensor) -* - DS1822 (temperature sensor) -* - DS2502 (EEPROM) -* - DS2450 (4 channel ADC) - -## Configuration - -### plugin.yaml - -You can specify the host ip of your ethersex device. - -```yaml -ecmd: - plugin_name: ecmd - host: 10.10.10.10 - # port: 2701 -``` - -This plugin needs an host attribute and you could specify a port attribute which differs from the default '1010'. - -### items.yaml - -The item needs to define the 1-wire address of the sensor. - -#### ecmd1wire_addr - -```yaml -mysensor: - ecmd1wire_addr: 10f01929020800dc - type: num -``` - -#### Example - -Please provide an item configuration with every attribute and usefull settings. - -```yaml -someroom: - - temperature: - name: Raumtemperatur - ecmd1wire_addr: 10f01929020800dc - type: num - sqlite: 'yes' - history: 'yes' - visu: 'yes' - sv_widget: "\"{{ basic.float('item', 'item', '°') }}\" , \"{{ plot.period('item-plot', 'item') }}\"" -``` diff --git a/ecmd/__init__.py b/ecmd/__init__.py deleted file mode 100755 index 184589159..000000000 --- a/ecmd/__init__.py +++ /dev/null @@ -1,156 +0,0 @@ -#!/usr/bin/env python3 -# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab -######################################################################### -# Copyright 2013 Dirk Wallmeier dirk@wallmeier.info -######################################################################### -# This file is part of SmartHomeNG. https://github.com/smarthomeNG// -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, -# MA 02110-1301, USA. -# -######################################################################### - - -import logging -import socket -import threading -import time - -logger = logging.getLogger('') - - -class owex(Exception): - pass - - -class ECMD1wireBase(): - - def __init__(self, host='127.0.0.1', port=2701): - self.host = host - self.port = int(port) - self._lock = threading.Lock() - self.connected = False - self._connection_attempts = 0 - self._connection_errorlog = 60 - - def connect(self): - self._lock.acquire() - try: - self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self._sock.settimeout(2) - self._sock.connect((self.host, self.port)) - except Exception as e: - self._connection_attempts -= 1 - if self._connection_attempts <= 0: - logger.error('ecmd1wire: could not connect to {0}:{1}: {2}'.format(self.host, self.port, e)) - self._connection_attempts = self._connection_errorlog - return - else: - self.connected = True - logger.info('ecmd1wire: connected to {0}:{1}'.format(self.host, self.port)) - self._connection_attempts = 0 - finally: - self._lock.release() - - def request(self): - # name: request - # get a table of all DS1820 sensors and their names and values, - # separated by '\t' and terminated by 'OK\n': - # 10f01929020800dc sensor1 26.4 - # 100834290208001b sensor2 25.4 - # OK - # @return dict {'addr' : value} - # - if not self.connected: - raise owex("ecmd1wire: No connection to ethersex server {0}:{1}.".format(self.host, self.port)) - self._lock.acquire() - try: - self._sock.send("1w list\n") - except Exception as e: - self._lock.release() - raise owex("error sending request: {0}".format(e)) - table = {} - while 1: - try: - response = self._sock.recv(1024) - except socket.timeout: - self.close() - break - if not response: - self.close() - break - if response != "OK": - for r in response.split("\n"): - if r and len(r.split("\t")) == 3: - addr, name, value = r.split("\t") - table[addr] = float(value) - logger.debug('ecmd1wire: append Sensor {0} = {1}\n'.format(addr, table[addr])) - self._lock.release() - return table - - def close(self): - self.connected = False - try: - self._sock.close() - except: - pass - - -class ECMD(ECMD1wireBase): - _sensors = {} - alive = True - - def __init__(self, smarthome, cycle=120, host='192.168.178.10', port=2701): - ECMD1wireBase.__init__(self, host, port) - self._sh = smarthome - self._cycle = int(cycle) - - def _refresh(self): - start = time.time() - table = self.request() - for addr in self._sensors: - if not self.alive: - break - if addr not in table: - logger.debug("ecmd1wire: {0} not in sensors watched".format(addr)) - else: - try: - value = table[addr] - except Exception: - logger.info("ecmd1wire: problem reading {0}".format(addr)) - continue - else: - logger.info("ecmd1wire: sensor {0} has {1}°".format(addr, value)) - if value == '85': - logger.info("ecmd1wire: problem reading {0}. Wiring problem?".format(addr)) - continue - item = self._sensors[addr] - item(value, 'ECMD1Wire') - cycletime = time.time() - start - logger.debug("cycle takes {0} seconds".format(cycletime)) - - def run(self): - self.alive = True - self._sh.scheduler.add('ecmd1wire', self._refresh, cycle=self._cycle, prio=5, offset=0) - - def stop(self): - self.alive = False - - def parse_item(self, item): - if 'ecmd1wire_addr' not in item.conf: - return - addr = item.conf['ecmd1wire_addr'] - self._sensors[addr] = item - logger.info("ecmd1wire: Sensor {0} added.".format(addr)) diff --git a/ecmd/plugin.yaml b/ecmd/plugin.yaml deleted file mode 100755 index 6777a45a1..000000000 --- a/ecmd/plugin.yaml +++ /dev/null @@ -1,26 +0,0 @@ -# Metadata for the classic-plugin -plugin: - # Global plugin attributes - type: gateway # plugin type (gateway, interface, protocol, system, web) - description: - de: 'Anbindung eines AVRMicrocontrollers. Das Protokoll gibt Zugriff auf 1wire Sensoren DS1820' - en: '' - maintainer: '? (Dirk Wallmeier)' -# tester: waldi # Who tests this plugin? - keywords: 1wire onewire - state: deprecated # No user or tester for SmartPlugin conversion could be found -# documentation: https://github.com/smarthomeNG/plugins/blob/develop/mqtt/README.md # url of documentation (wiki) page - -# Following entries are for Smart-Plugins: -# version: 1.3.3 # Plugin version -# sh_minversion: 1.3 # minimum shNG version to use this plugin -# sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) -# multi_instance: False - classname: ECMD # class containing the plugin - -#parameters: - # Definition of parameters to be configured in etc/plugin.yaml - -#item_attributes: - # Definition of item attributes defined by this plugin - From db14cbca655f56b8ab04a47f7095ab9a250ea8e6 Mon Sep 17 00:00:00 2001 From: msinn Date: Thu, 25 May 2023 12:27:41 +0200 Subject: [PATCH 118/775] elro: Retired the plugin (moved to plugin_archive) --- elro/README.md | 123 ----------------------------------------------- elro/__init__.py | 104 --------------------------------------- elro/plugin.yaml | 26 ---------- 3 files changed, 253 deletions(-) delete mode 100755 elro/README.md delete mode 100755 elro/__init__.py delete mode 100755 elro/plugin.yaml diff --git a/elro/README.md b/elro/README.md deleted file mode 100755 index f8204adba..000000000 --- a/elro/README.md +++ /dev/null @@ -1,123 +0,0 @@ -# Elro - -## Description - -You can use this Plugin to control elro (or elro-based) remote-control-switches (rc-switches). -If the backend-server uses the same command-syntax as the rc_switch_server project, -you can even control non-elro rc-switches too! (Or everything other that can be switched on and off) - -For rc_switch_server command-syntax look at https://github.com/Brootux/rc_switch_server.py (Server-Clients) - -## Requirements - - * Installed and running rc_switch_server (https://github.com/Brootux/rc_switch_server.py) - -## Configuration - -### plugin.yaml - -You have to just simply copy the following into your plugin.yaml file. The ip-address/hostname of the rc_switch_server has to be setup later in the items.yaml! - -```yaml -elro: - plugin_name: elro -``` - -### items.yaml - -The most item-fields of this plugin are mandatory. So you should always use all of the fields showed in the following example. - -#### Example - -```yaml -RCS: - type: str - elro_host: localhost - elro_port: 6700 - - A: - type: bool - elro_system_code: 0.0.0.0.1 - elro_unit_code: 1 - elro_send: value - enforce_updates: 'yes' - visu_acl: rw - - B: - type: bool - elro_system_code: 0.0.0.0.1 - elro_unit_code: 2 - elro_send: value - enforce_updates: 'yes' - visu_acl: rw - - C: - type: bool - elro_system_code: 0.0.0.0.1 - elro_unit_code: 4 - elro_send: value - enforce_updates: 'yes' - visu_acl: rw - - D: - type: bool - elro_system_code: 0.0.0.0.1 - elro_unit_code: 8 - elro_send: value - enforce_updates: 'yes' - visu_acl: rw -``` - -Description of the attributes: - -* __elro_host__: the ip-address/hostname of the rc_switch_server (mandatory) -* __elro_port__: the port of the rc_switch_server -* __elro_system_code__: the code of your home (mandatory) -* __elro_unit_code__: the code of the unit, you want to switch (mandatory) -* __elro_send__: use always "value" here (mandatory) - -Hints: -* __You have to setup the items as showed in a tree structure with the `elro_host` as its root!__ (The tree can be a subtree of a greater tree but always has to be `elro_host` as a attribute of the root item) -* For __elro_system_code__ you have to set the correct bits of you code (no conversion) -* For __elro_unit_code__ you have to convert your settings to binary (A = 1, B = 2, C = 4, D = 8, ...) -* For __elro_send__ always use the transmitting of a value per button (because sometimes the signals dont get transported correctly from remote-transmitter, so you should have the chance to send "on" or "off" more than once) - -### Example for multiple rc_switch_server´s - -```yaml -RCS1: - type: str - elro_host: localhost - elro_port: 6700 - A: - type: bool - elro_system_code: '0.0.0.0.1' - elro_unit_code: 1 - elro_send: value - enforce_updates: yes - visu_acl: rw - -RCS2: - type: str - elro_host: 192.168.0.100 - elro_port: 6666 - A: - type: bool - elro_system_code: '0.0.0.0.2' - elro_unit_code: 1 - elro_send: value - enforce_updates: yes - visu_acl: rw -``` - -### SmartVisu - -I suggest you to use the following setup per rc-switch: - -```html - -

    TV-Center

    - {{ basic.button('rcs_tv_on', 'RCS.A', 'On', '', '1', 'midi') }} - {{ basic.button('rcs_tv_off', 'RCS.A', 'Off', '', '0', 'midi') }} -
    -``` diff --git a/elro/__init__.py b/elro/__init__.py deleted file mode 100755 index 9bf856dc2..000000000 --- a/elro/__init__.py +++ /dev/null @@ -1,104 +0,0 @@ -#!/usr/bin/env python3 -# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab -# -# Copyright 2014 Brootux (https://github.com/Brootux) as GNU-GPL -# -# This file is part of SmartHomeNG. https://github.com/smarthomeNG// -# -# SmartHomeNG is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# SmartHomeNG is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with SmartHomeNG. If not, see . -# - -import logging -import re -import socket - -logger = logging.getLogger('elro') - - -class Elro(): - - def __init__(self, smarthome): - self._sh = smarthome - self._host = "localhost" - self._port = 6700 - - def run(self): - self.alive = True - - def stop(self): - self.alive = False - - def parse_item(self, item): - - # Parse just the parent-items wich define the 'elro_host' - # and/or 'elro_port' attribute - if 'elro_host' in item.conf: - - # Get the host-name/ip from parent-item - self._host = item.conf['elro_host'] - - # Try to get the port from parent-item - # (else the default value will be uesed) - if 'elro_port' in item.conf: - self._port = int(item.conf['elro_port']) - - # Get all child-item-lists for the given fields (parse to set) - escSet = set(self._sh.find_children(item, 'elro_system_code')) - eucSet = set(self._sh.find_children(item, 'elro_unit_code')) - esSet = set(self._sh.find_children(item, 'elro_send')) - - # Just get those child-items which have all mandatory fields - # set (elro_system_code and elro_unit_code and elro_send) - validItems = list(escSet & eucSet & esSet) - - # Iterate over all valid child-items - for item in validItems: - # Add fields of parent-item to all valid child-items - item.conf['elro_host'] = self._host - item.conf['elro_port'] = self._port - - # Add method trigger to all valid child-items - item.add_method_trigger(self._send) - - def _send(self, item, caller=None, source=None, dest=None): - - # Just let calls from outside pass - if (caller != 'Elro'): - # Send informations to server (e.g. "0.0.0.0.1;2;0") - self.send("%s;%s;%s" % ( - item.conf['elro_system_code'], - item.conf['elro_unit_code'], - int(item()) - ), - item.conf['elro_host'], - item.conf['elro_port']) - - def send(self, payload="0.0.0.0.1;1;0", host="localhost", port=6700): - - # Print what will be send as a debug-message - logger.debug("ELRO: Sending %s to %s:%s" % (payload, host, port)) - - # Create socket - s = socket.socket() - - # Connect to server - s.connect((host, port)) - - # Write payload to server - s = s.makefile(mode="rw") - s.write(payload) - s.flush() - - # Close server-connection - s.close() diff --git a/elro/plugin.yaml b/elro/plugin.yaml deleted file mode 100755 index 9427e3820..000000000 --- a/elro/plugin.yaml +++ /dev/null @@ -1,26 +0,0 @@ -# Metadata for the classic-plugin -plugin: - # Global plugin attributes - type: gateway # plugin type (gateway, interface, protocol, system, web) - description: - de: 'Unterstützt elro-basierter Remote-Control-Switches' - en: '' - maintainer: '? (Brootux)' -# tester: efgh # Who tests this plugin? -# keywords: kwd1 kwd2 # keywords, where applicable - state: deprecated # No user or tester for SmartPlugin conversion could be found -# documentation: https://github.com/smarthomeNG/plugins/blob/develop/mqtt/README.md # url of documentation (wiki) page - -# Following entries are for Smart-Plugins: -# version: 1.3.3 # Plugin version -# sh_minversion: 1.3 # minimum shNG version to use this plugin -# sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) -# multi_instance: False - classname: Elro # class containing the plugin - -#parameters: - # Definition of parameters to be configured in etc/plugin.yaml - -#item_attributes: - # Definition of item attributes defined by this plugin - From 0ebb7534aab945dba2583a6cd4fd74971eaeae16 Mon Sep 17 00:00:00 2001 From: msinn Date: Thu, 25 May 2023 12:28:40 +0200 Subject: [PATCH 119/775] iaqstick: Retired the plugin (moved to plugin_archive) --- iaqstick/README.md | 57 ---------- iaqstick/__init__.py | 221 -------------------------------------- iaqstick/plugin.yaml | 26 ----- iaqstick/requirements.txt | 1 - 4 files changed, 305 deletions(-) delete mode 100755 iaqstick/README.md delete mode 100755 iaqstick/__init__.py delete mode 100755 iaqstick/plugin.yaml delete mode 100755 iaqstick/requirements.txt diff --git a/iaqstick/README.md b/iaqstick/README.md deleted file mode 100755 index 8478ead8c..000000000 --- a/iaqstick/README.md +++ /dev/null @@ -1,57 +0,0 @@ -# iaqstick - -## Requirements - -* pyusb -* udev rule - -install by -```bash -apt-get install python3-setuptools -pip3 install "pyusb>=1.0.2" -``` - -``` -echo 'SUBSYSTEM=="usb", ATTR{idVendor}=="03eb", ATTR{idProduct}=="2013", MODE="666"' > /etc/udev/rules.d/99-iaqstick.rules -udevadm trigger -``` - -## Supported Hardware - -* Applied Sensor iAQ Stick -* Voltcraft CO-20 (by Conrad) -* others using the same reference design - -## Configuration - -### plugin.yaml - -```yaml -iaqstick: - plugin_name: iaqstick -# update_cycle: 10 -``` - -Description of the attributes: - -* __update_cycle__: interval in seconds how often the data is read from the stick (default 10) - -### items.yaml - -Attributes: -* __iaqstick_id__: used to distinguish multiple sticks -* __iaqstick_info__: used to get data from the stick - -To get the Stick-ID, start sh.py and check the log saying: "iaqstick: Vendor: AppliedSensor / Product: iAQ Stick / Stick-ID: ". -Don't bother if you are going to use a single stick anyway. - -Fields: -* __ppm__: get the air quality measured in part-per-million (ppm) - -```yaml -iAQ_Stick: - PPM: - type: num - iaqstick_id: H02004-266272 - iaqstick_info: ppm -``` diff --git a/iaqstick/__init__.py b/iaqstick/__init__.py deleted file mode 100755 index e3447ff86..000000000 --- a/iaqstick/__init__.py +++ /dev/null @@ -1,221 +0,0 @@ -#!/usr/bin/env python3 -# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab -######################################################################### -# Copyright 2013 Robert Budde robert@projekt131.de -######################################################################### -# iAQ-Stick plugin for SmartHomeNG. https://github.com/smarthomeNG// -# -# This plugin is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This plugin is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this plugin. If not, see . -######################################################################### - -#/etc/udev/rules.d/99-iaqstick.rules -#SUBSYSTEM=="usb", ATTR{idVendor}=="03eb", ATTR{idProduct}=="2013", MODE="666" -#udevadm trigger - -import logging -import usb.core -import usb.util -from time import sleep - -logger = logging.getLogger('iAQ_Stick') - -class iAQ_Stick(): - def __init__(self, smarthome, update_cycle = "10"): - self._sh = smarthome - self._update_cycle = int(update_cycle) - self._items = {} - self._devs = {} - - def read(self, dev): - in_data = bytes() - try: - while True: - ret = bytes(dev.read(0x81, 0x10, 1000)) - if len(ret) == 0: - break - in_data += ret - except Exception as e: - logger.error("iaqstick: read - {}".format(e)) - pass - return in_data - - def xfer_type1(self, dev, msg): - out_data = bytes('@{:04X}{}\n@@@@@@@@@@'.format(self._devs[dev]['type1_seq'], msg), 'utf-8') - self._devs[dev]['type1_seq'] = (self._devs[dev]['type1_seq'] + 1) & 0xFFFF - ret = dev.write(0x02, out_data[:16], 1000) - return self.read(dev).decode('iso-8859-1') - - def xfer_type2(self, dev, msg): - out_data = bytes('@', 'utf-8') + self._devs[dev]['type2_seq'].to_bytes(1, byteorder='big') + bytes('{}\n@@@@@@@@@@@@@'.format(msg), 'utf-8') - self._devs[dev]['type2_seq'] = (self._devs[dev]['type2_seq'] + 1) if (self._devs[dev]['type2_seq'] < 0xFF) else 0x67 - ret = dev.write(0x02, out_data[:16], 1000) - in_data = bytes() - return self.read(dev) - - def _init_dev(self, dev): - try: - if dev.is_kernel_driver_active(self._intf): - dev.detach_kernel_driver(self._intf) - dev.set_configuration(0x01) - usb.util.claim_interface(dev, self._intf) - dev.set_interface_altsetting(self._intf, 0x00) - vendor = usb.util.get_string(dev, dev.iManufacturer) - product = usb.util.get_string(dev,dev.iProduct ) - self._devs[dev] = {'type1_seq':0x0001, 'type2_seq':0x67} - ret = self.xfer_type1(dev, '*IDN?') - pos1 = ret.find('S/N:') + 4 - id = '{:s}-{:d}'.format(bytes.fromhex(ret[pos1:pos1+12]).decode('ascii'), int(ret[pos1+14:pos1+20], 16)) - logger.info('iaqstick: Vendor: {} / Product: {} / Stick-ID: {}'.format(vendor, product, id)) - if (id not in self._items): - logger.warning('iaqstick: no specific item for Stick-ID {} - use \'iaqstick_id\' to distinguish multiple sticks!'.format(id)) - #ret = self.xfer_type1(dev, 'KNOBPRE?') - #ret = self.xfer_type1(dev, 'WFMPRE?') - #ret = self.xfer_type1(dev, 'FLAGS?') - return id - except Exception as e: - logger.error("iaqstick: init interface failed - {}".format(e)) - return None - - def run(self): - devs = list(usb.core.find(idVendor=0x03eb, idProduct=0x2013, find_all=True)) - if devs is None: - logger.error('iaqstick: iAQ Stick not found') - return - logger.debug('iaqstick: {} iAQ Stick connected'.format(len(devs))) - self._intf = 0 - - for dev in devs: - id = self._init_dev(dev) - if id is not None: - self._devs[dev]['id'] = id - - self.alive = True - self._sh.scheduler.add('iAQ_Stick', self._update_values, prio = 5, cycle = self._update_cycle) - logger.info("iaqstick: init successful") - - def stop(self): - self.alive = False - for dev in self._devs: - try: - usb.util.release_interface(dev, self._intf) - if dev.is_kernel_driver_active(self._intf): - dev.detach_kernel_driver(self._intf) - except Exception as e: - logger.error("iaqstick: releasing interface failed - {}".format(e)) - try: - self._sh.scheduler.remove('iAQ_Stick') - except Exception as e: - logger.error("iaqstick: removing iAQ_Stick from scheduler failed - {}".format(e)) - - def _update_values(self): - logger.debug("iaqstick: updating {} sticks".format(len(self._devs))) - for dev in self._devs: - logger.debug("iaqstick: updating {}".format(self._devs[dev]['id'])) - try: - self.xfer_type1(dev, 'FLAGGET?') - meas = self.xfer_type2(dev, '*TR') - ppm = int.from_bytes(meas[2:4], byteorder='little') - logger.debug('iaqstick: ppm: {}'.format(ppm)) - #logger.debug('iaqstick: debug?: {}'.format(int.from_bytes(meas[4:6], byteorder='little'))) - #logger.debug('iaqstick: PWM: {}'.format(int.from_bytes(meas[6:7], byteorder='little'))) - #logger.debug('iaqstick: Rh: {}'.format(int.from_bytes(meas[7:8], byteorder='little')*0.01)) - #logger.debug('iaqstick: Rs: {}'.format(int.from_bytes(meas[8:12], byteorder='little'))) - id = self._devs[dev]['id'] - if id in self._items: - if 'ppm' in self._items[id]: - for item in self._items[id]['ppm']['items']: - item(ppm, 'iAQ_Stick', 'USB') - if '*' in self._items: - if 'ppm' in self._items['*']: - for item in self._items['*']['ppm']['items']: - item(ppm, 'iAQ_Stick', 'USB') - except Exception as e: - logger.error("iaqstick: update failed - {}".format(e)) - logger.error("iaqstick: Trying to recover ...") - broken_id = self._devs[dev]['id'] - del self._devs[dev] - __devs = list(usb.core.find(idVendor=0x03eb, idProduct=0x2013, find_all=True)) - for __dev in __devs: - if (__dev not in self._devs): - id = self._init_dev(__dev) - if id == broken_id: - logger.error("iaqstick: {} was ressurrected".format(id)) - self._devs[__dev]['id'] = id - else: - logger.error("iaqstick: found other yet unknown stick: {}".format(id)) - - def parse_item(self, item): - if 'iaqstick_info' in item.conf: - logger.debug("parse item: {0}".format(item)) - if 'iaqstick_id' in item.conf: - id = item.conf['iaqstick_id'] - else: - id = '*' - info_tag = item.conf['iaqstick_info'].lower() - if not id in self._items: - self._items[id] = {'ppm': {'items': [item], 'logics': []}} - else: - self._items[id]['ppm']['items'].append(item) - return None - -if __name__ == '__main__': - logging.basicConfig(level=logging.DEBUG) - myplugin = Plugin('iaqstick') - myplugin.run() - -#Application Version: 2.19.0 (Id: Form1.frm 1053 2010-06-30 11:00:09Z patrik.arven@appliedsensor.com ) -# -#Device 0: -#Name: iAQ Stick -#Firmware: 1.12p5 $Revision: 346 $ -#Protocol: 5 -#Hardware: C -#Processor: ATmega32U4 -#Serial number: S/N:48303230303415041020 -#Web address: -#Plot title: Air Quality Trend -# -#Channels: 5 -#... Channel 0:CO2/VOC level -#... Channel 1:Debug -#... Channel 2:PWM -#... Channel 3:Rh -#... Channel 4:Rs -#Knobs: 8 -#... Knob CO2/VOC level_warn1:1000 -#... Knob CO2/VOC level_warn2:1500 -#... Knob Reg_Set:151 -#... Knob Reg_P:3 -#... Knob Reg_I:10 -#... Knob Reg_D:0 -#... Knob LogInterval:0 -#... Knob ui16StartupBits:1 -#Flags: 5 -#... WARMUP=&h0000& -#... BURN-IN=&h0000& -#... RESET BASELINE=&h0000& -#... CALIBRATE HEATER=&h0000& -#... LOGGING=&h0000& -# -#@013E;;DEBUG: -#Log: -#buffer_size=&h1400; -#address_base=&h4800; -#readindex=&h0040; -#Write index=&h0000; -#nValues=&h0000; -#Records=&h0000; -#nValues (last)=&h0000; -#uint16_t g_u16_loop_cnt_100ms=&h08D4; -#;\x0A diff --git a/iaqstick/plugin.yaml b/iaqstick/plugin.yaml deleted file mode 100755 index 79aa362a9..000000000 --- a/iaqstick/plugin.yaml +++ /dev/null @@ -1,26 +0,0 @@ -# Metadata for the classic-plugin -plugin: - # Global plugin attributes - type: gateway # plugin type (gateway, interface, protocol, system, web) - description: - de: 'Unterstützung für Applied Sensor iAQ Stick und Voltcraft CO-20' - en: '' - maintainer: '? (Robert Budde )' -# tester: efgh # Who tests this plugin? -# keywords: kwd1 kwd2 # keywords, where applicable - state: deprecated # No user or tester for SmartPlugin conversion could be found -# documentation: https://github.com/smarthomeNG/plugins/blob/develop/mqtt/README.md # url of documentation (wiki) page - -# Following entries are for Smart-Plugins: -# version: 1.3.3 # Plugin version -# sh_minversion: 1.3 # minimum shNG version to use this plugin -# sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) -# multi_instance: False - classname: iAQ_Stick # class containing the plugin - -#parameters: - # Definition of parameters to be configured in etc/plugin.yaml - -#item_attributes: - # Definition of item attributes defined by this plugin - diff --git a/iaqstick/requirements.txt b/iaqstick/requirements.txt deleted file mode 100755 index 62b1668a7..000000000 --- a/iaqstick/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -pyusb>=1.0.2 From fb5e4a015a258db6169930301c192055d9b6cb30 Mon Sep 17 00:00:00 2001 From: msinn Date: Thu, 25 May 2023 12:29:43 +0200 Subject: [PATCH 120/775] snom: Retired the plugin (moved to plugin_archive) --- snom/README.md | 61 -------------------------- snom/__init__.py | 109 ----------------------------------------------- snom/plugin.yaml | 26 ----------- 3 files changed, 196 deletions(-) delete mode 100755 snom/README.md delete mode 100755 snom/__init__.py delete mode 100755 snom/plugin.yaml diff --git a/snom/README.md b/snom/README.md deleted file mode 100755 index 7c781bef6..000000000 --- a/snom/README.md +++ /dev/null @@ -1,61 +0,0 @@ -# snom - -## Requirements -This plugin has no requirements or dependencies. - -## Configuration - -### plugin.yaml - -```yaml -snom: - class_name: Snom - class_path: plugins.snom - # phonebook: None - # username: None - # password: None -``` - -#### Attributes - * `host`: specifies the hostname of your mail server. - * `port`: if you want to use a nonstandard port. - * `username`/`password`: login information for _all_ snom phones - * `phonebook`: path to a xml phonebook file e.g. '/var/www/voip/phonebook.xml' - -### items.yaml - -#### snom_host -With 'snom_host' you specify the host name or IP address of a snom phone. - -#### snom_key -This is the key name of an item in the snom configuration. You have to specify the 'snom_host' in the same or the item above to make the link to the phone. - -```yaml -phone: - snom_host: 10.0.0.4 - - display: - type: str - snom_key: user_realname1 - - mailbox: - type: num - ast_box: 33 - - hook: - type: bool - nw: 'yes' -``` - -### logic.yaml - -Currently there is no logic configuration for this plugin. - -## Functions - -### phonebook_add(name, number) - -If you have specified a phonebook, you could add or change existing entries by calling this function. -You to provide the (unique) name and a number. It will replace the number of an existing entry with exactly the same name. - -See the [phonebook logic](https://github.com/smarthomeNG/smarthome/wiki/Phonebook) for a logic which is using this function. diff --git a/snom/__init__.py b/snom/__init__.py deleted file mode 100755 index cbf4302b5..000000000 --- a/snom/__init__.py +++ /dev/null @@ -1,109 +0,0 @@ -#!/usr/bin/env python3 -# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab -######################################################################### -# Copyright 2012-2013 Marcus Popp marcus@popp.mx -######################################################################### -# This file is part of SmartHomeNG. https://github.com/smarthomeNG// -# -# SmartHomeNG is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# SmartHomeNG is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with SmartHomeNG. If not, see . -######################################################################### - -import logging -import urllib.request -import urllib.parse -import urllib.error -import xml.etree.ElementTree - -logger = logging.getLogger('') - - -class Snom(): - - def __init__(self, smarthome, username=None, password=None, phonebook=None): - self._sh = smarthome - self._phonebook = phonebook - self._username = username - self._password = password - - def run(self): - self.alive = True - # if you want to create child threads, do not make them daemon = True! - # They will not shutdown properly. (It's a python bug) - - def stop(self): - self.alive = False - - def parse_item(self, item): - if 'snom_key' in item.conf: - logger.debug("parse item: {0}".format(item)) - if 'snom_host' in item.conf: - return self.update_item - else: - parent = item.return_parent() - if hasattr(parent, 'conf'): - if 'snom_host' in parent.conf: - item.conf['snom_host'] = parent.conf['snom_host'] - return self.update_item - logger.warning("No 'snom_host' specified for {0}".format(item.id())) - - def parse_logic(self, logic): - pass - - def update_item(self, item, caller=None, source=None, dest=None): - if caller != 'HTTP': - uri = "https://{0}/dummy.htm".format(item.conf['snom_host']) - req = "{0}?settings=save&store_settings=save&{1}={2}".format(uri, item.conf['snom_key'], urllib.parse.quote(str(item()))) - try: - self._sh.tools.fetch_url(req, self._username, self._password) - except Exception as e: - logger.exception("Error updating Snom Phone ({0}): {1}".format(item.conf['snom_host'], e)) - - def phonebook_add(self, name, number): - if self._phonebook is None: - logger.warning("Snom: No Phonebook specified") - return - root = xml.etree.ElementTree.Element('SnomIPPhoneDirectory') - root.text = "\n" - tree = xml.etree.ElementTree.ElementTree(root) - try: - root = tree.parse(self._phonebook) - except IOError as e: - logger.warning("Could not read {0}: {1}".format(self._phonebook, e)) - except Exception as e: - logger.warning("Problem reading {0}: {1}".format(self._phonebook, e)) - return - found = False - for entry in tree.findall('DirectoryEntry'): - ename = entry.findtext('Name') - if ename == name: - # update number - entry.find('Telephone').text = number - found = True - if not found: - # add new element - new = xml.etree.ElementTree.SubElement(tree.getroot(), 'DirectoryEntry') - new.tail = "\n" - xml.etree.ElementTree.SubElement(new, 'Name').text = name - xml.etree.ElementTree.SubElement(new, 'Telephone').text = number - # sort - data = [] - for entry in tree.findall('DirectoryEntry'): - key = entry.findtext("Name") - data.append((key, entry)) - data.sort() - root[:] = [item[-1] for item in data] - try: - tree.write(self._phonebook, encoding="UTF-8", xml_declaration=True) - except Exception as e: - logger.warning("Problem writing {0}: {1}".format(self._phonebook, e)) diff --git a/snom/plugin.yaml b/snom/plugin.yaml deleted file mode 100755 index 6ce4803d6..000000000 --- a/snom/plugin.yaml +++ /dev/null @@ -1,26 +0,0 @@ -# Metadata for the classic-plugin -plugin: - # Global plugin attributes - type: gateway # plugin type (gateway, interface, protocol, system, web) - description: - de: 'Telefonbuch Anbindung für Snom Telefone' - en: '' - maintainer: '? (mknx)' -# tester: efgh # Who tests this plugin? -# keywords: kwd1 kwd2 # keywords, where applicable - state: deprecated # No user or tester for SmartPlugin conversion could be found -# documentation: https://github.com/smarthomeNG/plugins/blob/develop/mqtt/README.md # url of documentation (wiki) page - -# Following entries are for Smart-Plugins: -# version: 1.3.3 # Plugin version -# sh_minversion: 1.3 # minimum shNG version to use this plugin -# sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) -# multi_instance: False - classname: Snom # class containing the plugin - -#parameters: - # Definition of parameters to be configured in etc/plugin.yaml - -#item_attributes: - # Definition of item attributes defined by this plugin - From 0e69d2a207bee29ca96c94554f1206bf590a9dad Mon Sep 17 00:00:00 2001 From: msinn Date: Thu, 25 May 2023 12:34:05 +0200 Subject: [PATCH 121/775] easymeter: Retired the plugin (moved to plugin_archive) --- easymeter/README.md | 59 --------------------- easymeter/__init__.py | 106 ------------------------------------- easymeter/plugin.yaml | 49 ----------------- easymeter/requirements.txt | 1 - 4 files changed, 215 deletions(-) delete mode 100755 easymeter/README.md delete mode 100755 easymeter/__init__.py delete mode 100755 easymeter/plugin.yaml delete mode 100755 easymeter/requirements.txt diff --git a/easymeter/README.md b/easymeter/README.md deleted file mode 100755 index 66e82b6ce..000000000 --- a/easymeter/README.md +++ /dev/null @@ -1,59 +0,0 @@ -# Easymeter - -## Requirements - -* smartmeter using DLMS (Device Language Message Specification) IEC 62056-21 -* USB IR-Reader e.g. from volkszaehler.org - -install with -``` -sudo python3 -m pip install pyserial -``` - -make sure the serial port can be used by the user executing smarthome.py - -Example for a recent version of the Volkszaehler IR-Reader, please adapt the vendor- and product-id for your own readers: - -``` -echo 'SUBSYSTEM=="tty", ATTRS{idVendor}=="10c4", ATTRS{idProduct}=="ea60", ATTRS{serial}=="0092C9FE", MODE="0666", GROUP="dialout", SYMLINK+="dlms0"' > /etc/udev/rules.d/11-dlms.rules -udevadm trigger -``` -If you like, you can also give the serial port a descriptive name with this. - -## Supported Hardware - -* Easymeter Q3D with ir-reader from volkszaehler.org - -## Configuration - -### plugin.yaml - -```yaml -easymeter: - plugin_name: easymeter -``` - -Parameter for serial device are currently set to fix 9600/7E1. - -Description of the attributes: - -* none - -### items.yaml - -* __easymeter_code__: obis protocol code - -* __device__: USB device for ir-reader from volkszaehler.org - -### Example - -```yaml -output: - easymeter_code: 1-0:21.7.0*255 - device: /dev/ttyUSB0 - type: num -``` - -Please take care, there are different obis codes for different versions of Easymeter Q3D. -For example Version 3.02 reports obis code 1-0:21.7.0*255, version 3.04 -reports 1-0:21.7.255*255. diff --git a/easymeter/__init__.py b/easymeter/__init__.py deleted file mode 100755 index 2cda98b5a..000000000 --- a/easymeter/__init__.py +++ /dev/null @@ -1,106 +0,0 @@ -#!/usr/bin/env python3 -# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab -# -# Copyright 2013 KNX-User-Forum e.V. http://knx-user-forum.de/ -# -# This file is part of SmartHomeNG. https://github.com/smarthomeNG// -# -# SmartHomeNG is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# SmartHomeNG is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with SmartHomeNG. If not, see . -# - -import logging -import serial -import re -import time - -from lib.model.smartplugin import * -#from lib.item import Items - -#from .webif import WebInterface - -#logger = logging.getLogger('easymeter') - -PLUGIN_VERSION = '1.0.0' # (must match the version specified in plugin.yaml), use '1.0.0' for your initial plugin Release - - -class easymeter(SmartPlugin): - - def __init__(self, smarthome): - self._cycle = 10 - self._timeout = 2 - self._codes = dict() - - def run(self): - self.scheduler_add('poll_device', self.update_status, cycle=self._cycle) - self.alive = True - - def stop(self): - self.scheduler_remove('poll_device') - self.alive = False - - # parse items, if item has parameter netio_port - # add item to local list - def parse_item(self, item): - if 'easymeter_code' in item.conf: - - if item.conf['device'] not in self._codes: - self._codes[item.conf['device']] = dict() - - self._codes[item.conf['device']][ - item.conf['easymeter_code']] = item - - return None - - def update_status(self): - - for curr_port in self._codes.keys(): - ser = serial.Serial( - port=curr_port, - timeout=2, - baudrate=9600, - bytesize=serial.SEVENBITS, - parity=serial.PARITY_NONE, - stopbits=serial.STOPBITS_ONE) - - start = time.time() - ser.flushInput() - - # wait for start of next datablock - while True: - line = ser.readline().decode("utf-8") - if line.find('!') >= 0: - break - - # read next datablock - datablock = [] - - while True: - line = ser.readline().decode("utf-8") - datablock.append(line) - if line.find('!') >= 0: - break - - # close serial connection - ser.close() - - for code in self._codes[curr_port].keys(): - r = re.compile('[()]+') - for line in datablock: - line = line.split(code) - if len(line) > 1: - self._codes[curr_port][code]( - r.split(line[1])[1].split('*')[0]) - - cycletime = time.time() - start - self.logger.debug("cycle takes %d seconds", cycletime) diff --git a/easymeter/plugin.yaml b/easymeter/plugin.yaml deleted file mode 100755 index 41fbeb6ac..000000000 --- a/easymeter/plugin.yaml +++ /dev/null @@ -1,49 +0,0 @@ -# Metadata for the classic-plugin -plugin: - # Global plugin attributes - type: interface # plugin type (gateway, interface, protocol, system, web) - description: - de: 'Easymeter Q3D Unterstützung - Parameter für serielle Devices sind aktuell fest auf 9600/7E1 gesetzt' - en: 'Easymeter Q3D support - Parameter for serial device are currently set to fix 9600/7E1' - maintainer: '?' - tester: '?' # Who tests this plugin? -# keywords: kwd1 kwd2 # keywords, where applicable - state: deprecated # No user or tester for SmartPlugin conversion could be found -# documentation: https://github.com/smarthomeNG/plugins/blob/develop/mqtt/README.md # url of documentation (wiki) page -# support: https://knx-user-forum.de/forum/supportforen/smarthome-py - -# Following entries are for Smart-Plugins: - version: 1.0.0 # Plugin version (must match the version specified in __init__.py) - sh_minversion: 1.6 # minimum shNG version to use this plugin -# sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) - multi_instance: False - restartable: True - classname: easymeter # class containing the plugin - -parameters: NONE - # Definition of parameters to be configured in etc/plugin.yaml (enter 'parameters: NONE', if section should be empty) - -item_attributes: - # Definition of item attributes defined by this plugin (enter 'item_attributes: NONE', if section should be empty) - easymeter_code: - type: str - description: - de: 'Obis Protokoll Code' - en: 'obis protocol code' - - device: - type: str - default: /dev/ttyUSB0 - description: - de: 'USB device für den IR-Readed von volkszaehler.org' - en: 'USB device for ir-reader from volkszaehler.org' - -item_structs: NONE - # Definition of item-structure templates for this plugin (enter 'item_structs: NONE', if section should be empty) - -plugin_functions: NONE - # Definition of plugin functions defined by this plugin (enter 'plugin_functions: NONE', if section should be empty) - -logic_parameters: NONE - # Definition of logic parameters defined by this plugin (enter 'logic_parameters: NONE', if section should be empty) - diff --git a/easymeter/requirements.txt b/easymeter/requirements.txt deleted file mode 100755 index 27b4af0a5..000000000 --- a/easymeter/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -pyserial>=3.2.1 From 81c27626fa1adb9dbd79124efe15c486ac72cd6a Mon Sep 17 00:00:00 2001 From: msinn Date: Thu, 25 May 2023 12:34:26 +0200 Subject: [PATCH 122/775] vr100: Retired the plugin (moved to plugin_archive) --- vr100/README.md | 88 ----------------------------- vr100/__init__.py | 140 ---------------------------------------------- vr100/plugin.yaml | 27 --------- 3 files changed, 255 deletions(-) delete mode 100755 vr100/README.md delete mode 100755 vr100/__init__.py delete mode 100755 vr100/plugin.yaml diff --git a/vr100/README.md b/vr100/README.md deleted file mode 100755 index edc0fa72f..000000000 --- a/vr100/README.md +++ /dev/null @@ -1,88 +0,0 @@ -# VR100 - -## Requirements - -bluez - -install by -```bash -$ apt-get install bluez -$ hcitool scan -Scanning ... - -$ simple-agent hci0 -RequestPinCode (/org/bluez/3070/hci0/dev_bt_addr_underscores) -Enter PIN Code: -Release -New device (/org/bluez/3070/hci0/dev_bt_addr_underscores) -$ bluez-test-device trusted yes -$ bluez-test-device list -``` - -## Supported Hardware - -A Vorwerk Kobold VR100 robotic vacuum cleaner with a retrofitted bluetooth module. - -## Configuration - -### plugin.yaml - -``` -vr100: - class_name: VR100 - class_path: plugins.vr100 - bt_addr: 07:12:07:xx:xx:xx - # update_cycle: 60 -``` - -Description of the attributes: - -* __bt_addr__: MAC-address of the robot (find out with 'hcitool scan') -* __update_cycle__: interval in seconds how often the data is read from the robot (default 60) - -### items.yaml - -You can use all commands available by the serial interface. - -For a explanation of all available commands type 'help' when connected to robot - -Attributes: -* __vr100_cmd__: used to set a comand string -* __vr100_info__: used to get data from the robot - all but the last strings are send as a comand, the last string is read to get the value - -Fields: -* __{}__: the value of the item is written to this placeholder (don't use if a fixed/no value is required) - -You should verify all your commands manually by using the serial interface. - -```yaml -VR100: - - Reinigung: - type: bool - vr100_cmd: Clean - - Spot: - type: bool - vr100_cmd: Clean Spot - - Batterie: - - Fuellstand: - type: num - sqlite: 'true' - vr100_info: GetCharger FuelPercent - - Ladung_aktiv: - type: bool - vr100_info: GetCharger ChargingActive - - leer: - type: bool - vr100_info: GetCharger EmptyFuel - - Spannung: - type: num - sqlite: 'true' - vr100_info: GetCharger VBattV -``` diff --git a/vr100/__init__.py b/vr100/__init__.py deleted file mode 100755 index 1b385761e..000000000 --- a/vr100/__init__.py +++ /dev/null @@ -1,140 +0,0 @@ -#!/usr/bin/env python3 -# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab -######################################################################### -# Copyright 2013 Robert Budde robert@projekt131.de -######################################################################### -# VR100/Neato plugin for SmartHomeNG. https://github.com/smarthomeNG// -# -# This plugin is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This plugin is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this plugin. If not, see . -######################################################################### - -import logging -import socket -import sys - -logger = logging.getLogger('VR100') - - -class VR100(): - - def __init__(self, smarthome, bt_addr, update_cycle="300"): - self._sh = smarthome - self._update_cycle = int(update_cycle) - self._query_items = {} - self._bt_addr = bt_addr - self._terminator = bytes('\r\n\x1a\r\n\x1a', 'utf-8') - - def _update_values(self): - #logger.debug("vr100: update") - for query_cmd, fields in self._query_items.items(): - #logger.debug("vr100: requesting \'{}\'".format(query_cmd)) - self._send(query_cmd) - for line in self._recv().splitlines(): - field, _, value = line.partition(',') - #logger.debug("vr100: {}={}".format(field, value)) - field = field.upper() - if field in self._query_items[query_cmd]: - for item in self._query_items[query_cmd][field]['items']: - item(value, 'VR100', "field \'{}\'".format(field)) - - def run(self): - self.alive = True - if True: - try: - self._btsocket = socket.socket( - socket.AF_BLUETOOTH, socket.SOCK_STREAM, socket.BTPROTO_RFCOMM) - self._btsocket.connect((self._bt_addr, 1)) - logger.info( - "vr100: via bluetooth connected to {}".format(self._bt_addr)) - except: - logger.error( - "vr100: establishing connection to robot failed - {}".format(sys.exc_info())) - return - self._sh.scheduler.add('VR100', self._update_values, - prio=5, cycle=self._update_cycle) - - def stop(self): - self.alive = False - try: - self._sh.scheduler.remove('VR100') - except: - logger.error( - "vr100: removing VR100 from scheduler failed - {}".format(sys.exc_info())) - try: - self._btsocket.close() - except: - logger.error( - "vr100: closing connection to robot failed - {}".format(sys.exc_info())) - - def parse_item(self, item): - if 'vr100_cmd' in item.conf: - cmd = item.conf['vr100_cmd'] - logger.debug("vr100: {0} will send cmd \'{1}\'".format(item, cmd)) - return self.update_item - if 'vr100_info' in item.conf: - info = item.conf['vr100_info'].rsplit(' ', 1) - query_cmd = info[0] - field = info[1].upper() - if not query_cmd in self._query_items: - self._query_items[query_cmd] = {} - if not field in self._query_items[query_cmd]: - self._query_items[query_cmd][field] = { - 'items': [], 'logics': []} - if not item in self._query_items[query_cmd][field]['items']: - self._query_items[query_cmd][field]['items'].append(item) - logger.debug("vr100: {0} will be updated by querying \'{1}\' and extracting \'{2}\'".format( - item, query_cmd, field)) - return None - - def update_item(self, item, caller=None, source=None, dest=None): - try: - cmd = item.conf['vr100_cmd'] - value = item() - if isinstance(value, bool): - value = 'on' if value else 'off' - if cmd.lower().startswith('clean') and not value: - # allow stopping cleaning by setting item to false - cmd = 'clean stop' - self._send(cmd.format(value)) - except: - pass - - def _recv(self, timeout=1.0): - try: - msg = bytearray() - self._btsocket.settimeout(timeout) - while ((len(msg) < len(self._terminator)) or (msg[-len(self._terminator):] != self._terminator)): - msg += self._btsocket.recv(1000) - except socket.timeout: - logger.warning("vr100: rx: timeout after {}s".format(timeout)) - return '' - except: - logger.warning("vr100: rx: exception - {}".format(sys.exc_info())) - return '' - try: - msg = msg[:-len(self._terminator)].decode() - except: - msg = '' - #logger.debug("vr100: rx: msg: len={} / str={}".format(len(msg), msg)) - return msg - - def _send(self, msg): - #logger.debug("vr100: tx: len={} / str={}".format(len(msg), msg)) - try: - self._btsocket.send(bytes(msg + '\r\n', 'utf-8')) - except OSError as e: - if e.errno == 107: # Der Socket ist nicht verbunden - self.run() - except: - logger.warning("vr100: rx: exception - {}".format(sys.exc_info())) diff --git a/vr100/plugin.yaml b/vr100/plugin.yaml deleted file mode 100755 index 8e07e7b48..000000000 --- a/vr100/plugin.yaml +++ /dev/null @@ -1,27 +0,0 @@ -# Metadata for the classic-plugin -plugin: - # Global plugin attributes - type: interface # plugin type (gateway, interface, protocol, system, web) - description: - de: 'Anbindung eines Vorwerk Kobold VR100 Staubsaugers. Der Kobold muss mit einem Bluetooth Modul ausgerüstet sein' - en: '' - maintainer: '? (Robert Budde)' -# tester: efgh # Who tests this plugin? - keywords: bluetooth - state: deprecated # No user or tester for SmartPlugin conversion could be found -# documentation: https://github.com/smarthomeNG/plugins/blob/develop/mqtt/README.md # url of documentation (wiki) page -# support: https://knx-user-forum.de/forum/supportforen/smarthome-py - -# Following entries are for Smart-Plugins: -# version: 1.3.3 # Plugin version -# sh_minversion: 1.3 # minimum shNG version to use this plugin -# sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) -# multi_instance: False - classname: VR100 # class containing the plugin - -#parameters: - # Definition of parameters to be configured in etc/plugin.yaml - -#item_attributes: - # Definition of item attributes defined by this plugin - From 2eb637b880de2a5c57b7785f6d18bc68ab2af9ef Mon Sep 17 00:00:00 2001 From: msinn Date: Thu, 25 May 2023 12:35:00 +0200 Subject: [PATCH 123/775] pirateweather: Small modifications --- piratewthr/__init__.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/piratewthr/__init__.py b/piratewthr/__init__.py index 2585b2fe9..a2a59440b 100755 --- a/piratewthr/__init__.py +++ b/piratewthr/__init__.py @@ -41,7 +41,6 @@ class PirateWeather(SmartPlugin): # https://api.pirateweather.net/forecast/[apikey]/[latitude],[longitude] _base_url = 'https://api.pirateweather.net/forecast/' - _base_forecast_url = _base_url + '%s/%s,%s' _http_response = { 500: 'Internal Server Error', @@ -365,13 +364,14 @@ def _build_url(self, url_type='forecast'): """ url = '' if url_type == 'forecast': - url = self._base_forecast_url % (self._key, self._lat, self._lon) - parameters = "?lang=%s" % self._lang + #url = self._base_forecast_url % (self._key, self._lat, self._lon) + url = self._base_url + f"{self._key}/{self._lat},{self._lon}" + parameters = f"?lang={self._lang}" if self._units is not None: - parameters = "%s&units=%s" % (parameters, self._units) - url = '%s%s' % (url, parameters) + parameters += "&units={self._units}" + url += parameters else: - self.logger.error('_build_url: Wrong url type specified: %s' %url_type) + self.logger.error(f"_build_url: Wrong url type specified: {url_type}") return url From e2c9c6ac53a636ade289fc55d81202586f47ff2e Mon Sep 17 00:00:00 2001 From: msinn Date: Thu, 25 May 2023 12:56:37 +0200 Subject: [PATCH 124/775] appletv: Added requirement to circumvent problematic requirements of pyatv package under Python 3.11 --- appletv/requirements.txt | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/appletv/requirements.txt b/appletv/requirements.txt index 7f4c32936..2dda3583f 100755 --- a/appletv/requirements.txt +++ b/appletv/requirements.txt @@ -1,2 +1,7 @@ pyatv==0.7.0;python_version<'3.9' -pyatv==0.10.3;python_version>='3.9' \ No newline at end of file +pyatv==0.10.3;python_version>='3.9' + +# miniaudio is used by pyatv, for the actual version of miniaudio (1.58) requested my pyatv 0.10.5, the wheel +# cannot be built on Python 3.11, so the following requirement was added: +miniaudio<=1.55;python_version>='3.11' + From 41cd05b72a42f67eba3e74aba76e0203b78a5bce Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Fri, 26 May 2023 16:59:00 +0200 Subject: [PATCH 125/775] DB-ADDON Plugin: - Reactivate cache in structs --- db_addon/plugin.yaml | 124 +++++++++++++++++++++---------------------- 1 file changed, 62 insertions(+), 62 deletions(-) diff --git a/db_addon/plugin.yaml b/db_addon/plugin.yaml index 27999d06b..4f1231700 100644 --- a/db_addon/plugin.yaml +++ b/db_addon/plugin.yaml @@ -596,35 +596,35 @@ item_structs: db_addon_fct: verbrauch_heute type: num visu_acl: ro - # cache: yes + cache: yes verbrauch_woche: name: Verbrauch seit Wochenbeginn db_addon_fct: verbrauch_woche type: num visu_acl: ro - # cache: yes + cache: yes verbrauch_monat: name: Verbrauch seit Monatsbeginn db_addon_fct: verbrauch_monat type: num visu_acl: ro - # cache: yes + cache: yes verbrauch_jahr: name: Verbrauch seit Jahresbeginn db_addon_fct: verbrauch_jahr type: num visu_acl: ro - # cache: yes + cache: yes verbrauch_rolling_12m: name: Verbrauch innerhalb der letzten 12 Monate ausgehend von gestern db_addon_fct: verbrauch_rolling_12m_heute_minus1 type: num visu_acl: ro - # cache: yes + cache: yes verbrauch_gestern: name: Verbrauch gestern @@ -632,7 +632,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - # cache: yes + cache: yes verbrauch_gestern_minus1: name: Verbrauch vorgestern @@ -640,7 +640,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - # cache: yes + cache: yes verbrauch_gestern_minus2: name: Verbrauch vor 3 Tagen @@ -648,7 +648,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - # cache: yes + cache: yes verbrauch_vorwoche: name: Verbrauch in der Vorwoche @@ -656,7 +656,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - # cache: yes + cache: yes verbrauch_vorwoche_minus1: name: Verbrauch vor 2 Wochen @@ -664,7 +664,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - # cache: yes + cache: yes verbrauch_vormonat: name: Verbrauch im Vormonat @@ -672,7 +672,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - # cache: yes + cache: yes verbrauch_vormonat_minus12: name: Verbrauch vor 12 Monaten @@ -680,7 +680,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - # cache: yes + cache: yes verbrauch_vorjahreszeitraum: name: Verbrauch im Jahreszeitraum 1.1. bis jetzt vor einem Jahr @@ -688,7 +688,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - # cache: yes + cache: yes verbrauch_2: name: Struct für Verbrauchsauswertung bei Zählern mit stetig ansteigendem Zählerstand (Teil 2) @@ -697,70 +697,70 @@ item_structs: db_addon_fct: verbrauch_heute_minus3 type: num visu_acl: ro - # cache: yes + cache: yes verbrauch_gestern_minus4: name: Verbrauch vor 4 Tagen db_addon_fct: verbrauch_heute_minus4 type: num visu_acl: ro - # cache: yes + cache: yes verbrauch_gestern_minus5: name: Verbrauch vor 5 Tagen db_addon_fct: verbrauch_heute_minus5 type: num visu_acl: ro - # cache: yes + cache: yes verbrauch_gestern_minus6: name: Verbrauch vor 6 Tagen db_addon_fct: verbrauch_heute_minus6 type: num visu_acl: ro - # cache: yes + cache: yes verbrauch_gestern_minus7: name: Verbrauch vor 7 Tagen db_addon_fct: verbrauch_heute_minus7 type: num visu_acl: ro - # cache: yes + cache: yes verbrauch_vorwoche_minus2: name: Verbrauch vor 3 Wochen db_addon_fct: verbrauch_woche_minus3 type: num visu_acl: ro - # cache: yes + cache: yes verbrauch_vorwoche_minus3: name: Verbrauch vor 4 Wochen db_addon_fct: verbrauch_woche_minus4 type: num visu_acl: ro - # cache: yes + cache: yes verbrauch_vormonat_minus1: name: Verbrauch vor 2 Monaten db_addon_fct: verbrauch_monat_minus2 type: num visu_acl: ro - # cache: yes + cache: yes verbrauch_vormonat_minus2: name: Verbrauch vor 3 Monaten db_addon_fct: verbrauch_monat_minus3 type: num visu_acl: ro - # cache: yes + cache: yes verbrauch_vormonat_minus3: name: Verbrauch vor 4 Monaten db_addon_fct: verbrauch_monat_minus4 type: num visu_acl: ro - # cache: yes + cache: yes zaehlerstand_1: name: Struct für die Erfassung von Zählerständen zu bestimmten Zeitpunkten bei Zählern mit stetig ansteigendem Zählerstand @@ -769,7 +769,7 @@ item_structs: db_addon_fct: zaehlerstand_heute_minus1 type: num visu_acl: ro - # cache: yes + cache: yes zaehlerstand_vorwoche: name: Zählerstand zum Ende der vorigen Woche @@ -777,7 +777,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - # cache: yes + cache: yes zaehlerstand_vormonat: name: Zählerstand zum Ende des Vormonates @@ -785,7 +785,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - # cache: yes + cache: yes zaehlerstand_vormonat_minus1: name: Zählerstand zum Monatsende vor 2 Monaten @@ -793,7 +793,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - # cache: yes + cache: yes zaehlerstand_vormonat_minus2: name: Zählerstand zum Monatsende vor 3 Monaten @@ -801,7 +801,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - # cache: yes + cache: yes zaehlerstand_vorjahr: name: Zählerstand am Ende des vorigen Jahres @@ -809,7 +809,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - # cache: yes + cache: yes minmax_1: name: Struct für Auswertung der Wertehistorie bei schwankenden Werten wie bspw. Temperatur oder Leistung (Teil 1) @@ -819,138 +819,138 @@ item_structs: db_addon_fct: minmax_heute_min db_addon_ignore_value: 0 type: num - # cache: yes + cache: yes heute_max: name: Maximaler Wert seit Tagesbeginn db_addon_fct: minmax_heute_max type: num - # cache: yes + cache: yes last24h_min: name: Minimaler Wert in den letzten 24h (gleitend) db_addon_fct: minmax_last_24h_min type: num - # cache: yes + cache: yes last24h_max: name: Maximaler Wert in den letzten 24h (gleitend) db_addon_fct: minmax_last_24h_max type: num - # cache: yes + cache: yes woche_min: name: Minimaler Wert seit Wochenbeginn db_addon_fct: minmax_woche_min type: num - # cache: yes + cache: yes woche_max: name: Maximaler Wert seit Wochenbeginn db_addon_fct: minmax_woche_max type: num - # cache: yes + cache: yes monat_min: name: Minimaler Wert seit Monatsbeginn db_addon_fct: minmax_monat_min type: num - # cache: yes + cache: yes monat_max: name: Maximaler Wert seit Monatsbeginn db_addon_fct: minmax_monat_max type: num - # cache: yes + cache: yes jahr_min: name: Minimaler Wert seit Jahresbeginn db_addon_fct: minmax_jahr_min type: num - # cache: yes + cache: yes jahr_max: name: Maximaler Wert seit Jahresbeginn db_addon_fct: minmax_jahr_max type: num - # cache: yes + cache: yes gestern_min: name: Minimaler Wert gestern db_addon_fct: minmax_heute_minus1_min db_addon_startup: yes type: num - # cache: yes + cache: yes gestern_max: name: Maximaler Wert gestern db_addon_fct: minmax_heute_minus1_max db_addon_startup: yes type: num - # cache: yes + cache: yes gestern_avg: name: Durchschnittlicher Wert gestern db_addon_fct: minmax_heute_minus1_avg db_addon_startup: yes type: num - # cache: yes + cache: yes vorwoche_min: name: Minimaler Wert in der Vorwoche db_addon_fct: minmax_woche_minus1_min db_addon_startup: yes type: num - # cache: yes + cache: yes vorwoche_max: name: Maximaler Wert in der Vorwoche db_addon_fct: minmax_woche_minus1_max db_addon_startup: yes type: num - # cache: yes + cache: yes vorwoche_avg: name: Durchschnittlicher Wert in der Vorwoche db_addon_fct: minmax_woche_minus1_avg db_addon_startup: yes type: num - # cache: yes + cache: yes vormonat_min: name: Minimaler Wert im Vormonat db_addon_fct: minmax_monat_minus1_min db_addon_startup: yes type: num - # cache: yes + cache: yes vormonat_max: name: Maximaler Wert im Vormonat db_addon_fct: minmax_monat_minus1_max db_addon_startup: yes type: num - # cache: yes + cache: yes vormonat_avg: name: Durchschnittlicher Wert im Vormonat db_addon_fct: minmax_monat_minus1_avg db_addon_startup: yes type: num - # cache: yes + cache: yes vorjahr_min: name: Minimaler Wert im Vorjahr db_addon_fct: minmax_jahr_minus1_min db_addon_startup: yes type: num - # cache: yes + cache: yes vorjahr_max: name: Maximaler Wert im Vorjahr db_addon_fct: minmax_jahr_minus1_max db_addon_startup: yes type: num - # cache: yes + cache: yes minmax_2: name: Struct für Auswertung der Wertehistorie bei schwankenden Werten wie bspw. Temperatur oder Leistung (Teil 2) @@ -959,73 +959,73 @@ item_structs: name: Minimaler Wert vorgestern db_addon_fct: minmax_heute_minus2_min type: num - # cache: yes + cache: yes gestern_minus1_max: name: Maximaler Wert vorgestern db_addon_fct: minmax_heute_minus2_max type: num - # cache: yes + cache: yes gestern_minus1_avg: name: Durchschnittlicher Wert vorgestern db_addon_fct: minmax_heute_minus2_avg type: num - # cache: yes + cache: yes gestern_minus2_min: name: Minimaler Wert vor 3 Tagen db_addon_fct: minmax_heute_minus3_min type: num - # cache: yes + cache: yes gestern_minus2_max: name: Maximaler Wert vor 3 Tagen db_addon_fct: minmax_heute_minus3_max type: num - # cache: yes + cache: yes gestern_minus2_avg: name: Durchschnittlicher Wert vor 3 Tagen db_addon_fct: minmax_heute_minus3_avg type: num - # cache: yes + cache: yes vorwoche_minus1_min: name: Minimaler Wert in der Woche vor 2 Wochen db_addon_fct: minmax_woche_minus2_min type: num - # cache: yes + cache: yes vorwoche_minus1_max: name: Maximaler Wert in der Woche vor 2 Wochen db_addon_fct: minmax_woche_minus2_max type: num - # cache: yes + cache: yes vorwoche_minus1_avg: name: Durchschnittlicher Wert in der Woche vor 2 Wochen db_addon_fct: minmax_woche_minus2_avg type: num - # cache: yes + cache: yes vormonat_minus1_min: name: Minimaler Wert im Monat vor 2 Monaten db_addon_fct: minmax_monat_minus2_min type: num - # cache: yes + cache: yes vormonat_minus1_max: name: Maximaler Wert im Monat vor 2 Monaten db_addon_fct: minmax_monat_minus2_max type: num - # cache: yes + cache: yes vormonat_minus1_avg: name: Durchschnittlicher Wert im Monat vor 2 Monaten db_addon_fct: minmax_monat_minus2_avg type: num - # cache: yes + cache: yes item_attribute_prefixes: NONE From 84f8d369c29202483baf7b1fe17416cf2b267e9b Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Wed, 31 May 2023 23:33:58 +0200 Subject: [PATCH 126/775] stateengine plugin: improve log message for min/max/value --- stateengine/StateEngineCondition.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/stateengine/StateEngineCondition.py b/stateengine/StateEngineCondition.py index d7061d628..30b25b7df 100755 --- a/stateengine/StateEngineCondition.py +++ b/stateengine/StateEngineCondition.py @@ -486,8 +486,9 @@ def __check_value(self): return True elif self.__value.is_empty() and cond_min_max: - self._log_warning("Neither value nor min/max given. This might result in unexpected" - " evalutions. Min {}, max {}, value {}", + self._log_warning("Condition {}: Neither value nor min/max given." + " This might result in unexpected" + " evalutions. Min {}, max {}, value {}", self.__name, self.__min.get(), self.__max.get(), self.__value.get()) self._log_increase_indent() return True From 38fb769a855db4777ddf275db29581826e636604 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Wed, 31 May 2023 23:34:56 +0200 Subject: [PATCH 127/775] stateengine plugin: improve suspendduration structs (conversion now works for duration_format, too) --- stateengine/plugin.yaml | 89 ++++++++++++++++++++--------------------- 1 file changed, 44 insertions(+), 45 deletions(-) diff --git a/stateengine/plugin.yaml b/stateengine/plugin.yaml index d16e87f4e..5c2cae2ba 100755 --- a/stateengine/plugin.yaml +++ b/stateengine/plugin.yaml @@ -819,7 +819,7 @@ item_structs: settings: remark: Use these settings for your condition values - type: foo + type: bool eval: sh..suspendduration(sh..suspendduration(), "Init", "Start") crontab: init = True @@ -828,26 +828,26 @@ item_structs: type: num visu_acl: rw cache: True + enforce_updates: True initial_value: 60 - on_change: .seconds = value * 60 if not sh..self.property.last_change_by == "On_Change:{}".format(sh..seconds.property.path) else None + on_change: .seconds = value * 60 if not sh..self.property.last_change_by in ["On_Change:{}".format(sh..seconds.property.path), "On_Change:{}".format(sh..duration_format.property.path)] else None on_update: .seconds = value * 60 if "Init" in sh..self.property.last_update_by else None duration_format: - remark: Can be used for the clock.countdown widget + remark: Can be used for the clock.countdown widget and smartvisu type: str cache: True - visu_acl: ro - eval: "'{}d {}h {}i {}s'.format(int(sh...seconds()//86400), int((sh...seconds()%86400)//3600), int((sh...seconds()%3600)//60), round((sh...seconds()%3600)%60))" - eval_trigger: - - ..seconds - - .. + visu_acl: rw + on_change: ..seconds = sum([a*b for a,b in zip([216000,3600,60,1], map(int,value.replace('d','').replace('h','').replace('i','').replace('s','').split(' ')))]) if not sh..self.property.last_change_by in [ "On_Change:{}".format(sh...seconds.property.path), "On_Update:{}".format(sh...seconds.property.path)] else None seconds: remark: duration of suspend mode in seconds (gets converted automatically) type: num visu_acl: rw cache: True - on_change: .. = value / 60 if not sh..self.property.last_change_by in [ "On_Change:{}".format(sh....property.path), "On_Update:{}".format(sh....property.path)] else None + on_change: + - .. = value / 60 if not sh..self.property.last_change_by in [ "On_Change:{}".format(sh....property.path), "On_Update:{}".format(sh....property.path)] else None + - ..duration_format = '{}d {}h {}i {}s'.format(int(value//86400), int((value%86400)//3600), int((value%3600)//60), round((value%3600)%60)) if not sh..self.property.last_change_by in [ "On_Change:{}".format(sh...duration_format.property.path), "On_Update:{}".format(sh...duration_format.property.path)] else None suspend_active: remark: Use this to (de)activate suspend mode in general @@ -1009,7 +1009,7 @@ item_structs: settings: remark: Use these settings for your condition values - type: foo + type: bool eval: (sh..suspendduration(sh..suspendduration(), "Init", "Start"), sh..suspendvariant.suspendduration0(sh..suspendduration(), "Init", "Start"), sh..suspendvariant.suspendduration1(sh..suspendvariant.suspendduration1(), "Init", "Start"), sh..suspendvariant.suspendduration2(sh..suspendvariant.suspendduration2(), "Init", "Start")) crontab: init = True @@ -1021,83 +1021,82 @@ item_structs: initial_value: 0 suspendduration0: - remark: gets automatically updated from settings.suspendduration as this is the default item for the suspendduration + remark: duration of suspend mode in minutes (gets converted automatically) type: num visu_acl: rw cache: True - on_change: .seconds = value * 60 if not sh..self.property.last_change_by == "On_Change:{}".format(sh..seconds.property.path) else None + enforce_updates: True + initial_value: 60 + on_change: .seconds = value * 60 if not sh..self.property.last_change_by in ["On_Change:{}".format(sh..seconds.property.path), "On_Change:{}".format(sh..duration_format.property.path)] else None on_update: .seconds = value * 60 if "Init" in sh..self.property.last_update_by else None - eval: sh....suspendduration() - eval_trigger: ...suspendduration duration_format: - remark: Can be used for the clock.countdown widget + remark: Can be used for the clock.countdown widget and smartvisu type: str cache: True - visu_acl: ro - eval: "'{}d {}h {}i {}s'.format(int(sh...seconds()//86400), int((sh...seconds()%86400)//3600), int((sh...seconds()%3600)//60), round((sh...seconds()%3600)%60))" - eval_trigger: - - ..seconds - - .. + visu_acl: rw + on_change: ..seconds = sum([a*b for a,b in zip([216000,3600,60,1], map(int,value.replace('d','').replace('h','').replace('i','').replace('s','').split(' ')))]) if not sh..self.property.last_change_by in [ "On_Change:{}".format(sh...seconds.property.path), "On_Update:{}".format(sh...seconds.property.path)] else None seconds: remark: duration of suspend mode in seconds (gets converted automatically) type: num visu_acl: rw cache: True - on_change: .. = value / 60 if not sh..self.property.last_change_by in [ "On_Change:{}".format(sh....property.path), "On_Update:{}".format(sh....property.path)] else None + on_change: + - .. = value / 60 if not sh..self.property.last_change_by in [ "On_Change:{}".format(sh....property.path), "On_Update:{}".format(sh....property.path)] else None + - ..duration_format = '{}d {}h {}i {}s'.format(int(value//86400), int((value%86400)//3600), int((value%3600)//60), round((value%3600)%60)) if not sh..self.property.last_change_by in [ "On_Change:{}".format(sh...duration_format.property.path), "On_Update:{}".format(sh...duration_format.property.path)] else None suspendduration1: remark: duration of suspend mode in minutes (gets converted automatically) type: num visu_acl: rw cache: True + enforce_updates: True initial_value: 60 - on_change: .seconds = value * 60 if not sh..self.property.last_change_by == "On_Change:{}".format(sh..seconds.property.path) else None + on_change: .seconds = value * 60 if not sh..self.property.last_change_by in ["On_Change:{}".format(sh..seconds.property.path), "On_Change:{}".format(sh..duration_format.property.path)] else None on_update: .seconds = value * 60 if "Init" in sh..self.property.last_update_by else None duration_format: - remark: Can be used for the clock.countdown widget + remark: Can be used for the clock.countdown widget and smartvisu type: str cache: True - visu_acl: ro - eval: "'{}d {}h {}i {}s'.format(int(sh...seconds()//86400), int((sh...seconds()%86400)//3600), int((sh...seconds()%3600)//60), round((sh...seconds()%3600)%60))" - eval_trigger: - - ..seconds - - .. + visu_acl: rw + on_change: ..seconds = sum([a*b for a,b in zip([216000,3600,60,1], map(int,value.replace('d','').replace('h','').replace('i','').replace('s','').split(' ')))]) if not sh..self.property.last_change_by in [ "On_Change:{}".format(sh...seconds.property.path), "On_Update:{}".format(sh...seconds.property.path)] else None seconds: remark: duration of suspend mode in seconds (gets converted automatically) type: num visu_acl: rw cache: True - on_change: .. = value / 60 if not sh..self.property.last_change_by in [ "On_Change:{}".format(sh....property.path), "On_Update:{}".format(sh....property.path)] else None + on_change: + - .. = value / 60 if not sh..self.property.last_change_by in [ "On_Change:{}".format(sh....property.path), "On_Update:{}".format(sh....property.path)] else None + - ..duration_format = '{}d {}h {}i {}s'.format(int(value//86400), int((value%86400)//3600), int((value%3600)//60), round((value%3600)%60)) if not sh..self.property.last_change_by in [ "On_Change:{}".format(sh...duration_format.property.path), "On_Update:{}".format(sh...duration_format.property.path)] else None suspendduration2: remark: duration of suspend mode in minutes (gets converted automatically) type: num visu_acl: rw cache: True + enforce_updates: True initial_value: 60 - on_change: .seconds = value * 60 if not sh..self.property.last_change_by == "On_Change:{}".format(sh..seconds.property.path) else None + on_change: .seconds = value * 60 if not sh..self.property.last_change_by in ["On_Change:{}".format(sh..seconds.property.path), "On_Change:{}".format(sh..duration_format.property.path)] else None on_update: .seconds = value * 60 if "Init" in sh..self.property.last_update_by else None duration_format: - remark: Can be used for the clock.countdown widget + remark: Can be used for the clock.countdown widget and smartvisu type: str cache: True - visu_acl: ro - eval: "'{}d {}h {}i {}s'.format(int(sh...seconds()//86400), int((sh...seconds()%86400)//3600), int((sh...seconds()%3600)//60), round((sh...seconds()%3600)%60))" - eval_trigger: - - ..seconds - - .. + visu_acl: rw + on_change: ..seconds = sum([a*b for a,b in zip([216000,3600,60,1], map(int,value.replace('d','').replace('h','').replace('i','').replace('s','').split(' ')))]) if not sh..self.property.last_change_by in [ "On_Change:{}".format(sh...seconds.property.path), "On_Update:{}".format(sh...seconds.property.path)] else None seconds: remark: duration of suspend mode in seconds (gets converted automatically) type: num visu_acl: rw cache: True - on_change: .. = value / 60 if not sh..self.property.last_change_by in [ "On_Change:{}".format(sh....property.path), "On_Update:{}".format(sh....property.path)] else None + on_change: + - .. = value / 60 if not sh..self.property.last_change_by in [ "On_Change:{}".format(sh....property.path), "On_Update:{}".format(sh....property.path)] else None + - ..duration_format = '{}d {}h {}i {}s'.format(int(value//86400), int((value%86400)//3600), int((value%3600)//60), round((value%3600)%60)) if not sh..self.property.last_change_by in [ "On_Change:{}".format(sh...duration_format.property.path), "On_Update:{}".format(sh...duration_format.property.path)] else None additionaluse0: remark: set this value to a struct or (relative) state that should be added to the condition sets if suspendvariant is 0 @@ -1125,26 +1124,26 @@ item_structs: type: num visu_acl: rw cache: True + enforce_updates: True initial_value: 60 - on_change: .seconds = value * 60 if not sh..self.property.last_change_by == "On_Change:{}".format(sh..seconds.property.path) else None + on_change: .seconds = value * 60 if not sh..self.property.last_change_by in ["On_Change:{}".format(sh..seconds.property.path), "On_Change:{}".format(sh..duration_format.property.path)] else None on_update: .seconds = value * 60 if "Init" in sh..self.property.last_update_by else None duration_format: - remark: Can be used for the clock.countdown widget + remark: Can be used for the clock.countdown widget and smartvisu type: str cache: True - visu_acl: ro - eval: "'{}d {}h {}i {}s'.format(int(sh...seconds()//86400), int((sh...seconds()%86400)//3600), int((sh...seconds()%3600)//60), round((sh...seconds()%3600)%60))" - eval_trigger: - - ..seconds - - .. + visu_acl: rw + on_change: ..seconds = sum([a*b for a,b in zip([216000,3600,60,1], map(int,value.replace('d','').replace('h','').replace('i','').replace('s','').split(' ')))]) if not sh..self.property.last_change_by in [ "On_Change:{}".format(sh...seconds.property.path), "On_Update:{}".format(sh...seconds.property.path)] else None seconds: remark: duration of suspend mode in seconds (gets converted automatically) type: num visu_acl: rw cache: True - on_change: .. = value / 60 if not sh..self.property.last_change_by in [ "On_Change:{}".format(sh....property.path), "On_Update:{}".format(sh....property.path)] else None + on_change: + - .. = value / 60 if not sh..self.property.last_change_by in [ "On_Change:{}".format(sh....property.path), "On_Update:{}".format(sh....property.path)] else None + - ..duration_format = '{}d {}h {}i {}s'.format(int(value//86400), int((value%86400)//3600), int((value%3600)//60), round((value%3600)%60)) if not sh..self.property.last_change_by in [ "On_Change:{}".format(sh...duration_format.property.path), "On_Update:{}".format(sh...duration_format.property.path)] else None suspend_active: remark: Use this to (de)activate suspend mode in general From 0dd95379657ea46bb2f700b63722ef1890fc657a Mon Sep 17 00:00:00 2001 From: msinn Date: Sat, 3 Jun 2023 11:42:02 +0200 Subject: [PATCH 128/775] modules.websocket: Implementation of first version of the adm payload protocol --- appletv/assets/webif_appletv1.png | Bin avm/TR-064links.yaml | 0 avm/__init__.py | 0 avm/item_attributes.py | 0 avm/item_attributes_master.py | 0 avm/locale.yaml | 0 avm/plugin.yaml | 0 avm/requirements.txt | 0 avm/sv_widgets/widget_avm.html | 0 avm/tr064/action.py | 0 avm/tr064/attribute_dict.py | 0 avm/tr064/client.py | 0 avm/tr064/config.py | 0 avm/tr064/device.py | 0 avm/tr064/exceptions.py | 0 avm/tr064/service.py | 0 avm/tr064/service_list.py | 0 avm/user_doc.rst | 0 avm/user_doc/assets/webif_tab1.jpg | Bin avm/user_doc/assets/webif_tab2.jpg | Bin avm/user_doc/assets/webif_tab3.jpg | Bin avm/user_doc/assets/webif_tab4.jpg | Bin avm/user_doc/assets/webif_tab5.jpg | Bin avm/user_doc/assets/webif_tab6.jpg | Bin avm/webif/__init__.py | 0 avm/webif/static/img/lamp_green.png | Bin avm/webif/static/img/lamp_red.png | Bin avm/webif/static/img/plugin_logo.png | Bin avm/webif/static/img/readme.txt | 0 avm/webif/templates/index.html | 0 db_addon/__init__.py | 0 db_addon/item_attributes_master.py | 0 db_addon/locale.yaml | 0 db_addon/plugin.yaml | 0 db_addon/requirements.txt | 0 db_addon/user_doc.rst | 0 db_addon/webif/__init__.py | 0 db_addon/webif/static/img/plugin_logo.png | Bin db_addon/webif/templates/index.html | 0 lirc/assets/lirc_webif.png | Bin mailsend/plugin.yaml | 7 ++++--- mieleathome/assets/img.png | Bin mieleathome/assets/img_1.png | Bin mieleathome/assets/img_10.png | Bin mieleathome/assets/img_11.png | Bin mieleathome/assets/img_12.png | Bin mieleathome/assets/img_13.png | Bin mieleathome/assets/img_14.png | Bin mieleathome/assets/img_15.png | Bin mieleathome/assets/img_16.png | Bin mieleathome/assets/img_17.png | Bin mieleathome/assets/img_18.png | Bin mieleathome/assets/img_2.png | Bin mieleathome/assets/img_3.png | Bin mieleathome/assets/img_4.png | Bin mieleathome/assets/img_5.png | Bin mieleathome/assets/img_6.png | Bin mieleathome/assets/img_7.png | Bin mieleathome/assets/img_8.png | Bin mieleathome/assets/img_9.png | Bin mieleathome/miele.html | 0 mieleathome/webif/static/img/plugin_logo.svg | 0 tasmota/__init__.py | 0 tasmota/plugin.yaml | 0 tasmota/webif/__init__.py | 0 tasmota/webif/templates/index.html | 0 telegram/_pv_1_7_1/README.rst.off | 0 telegram/_pv_1_7_1/__init__.py | 0 telegram/_pv_1_7_1/locale.yaml | 0 telegram/_pv_1_7_1/logging.yaml.telegram | 0 telegram/_pv_1_7_1/plugin.yaml | 0 telegram/_pv_1_7_1/requirements.txt | 0 telegram/_pv_1_7_1/user_doc.rst | 0 telegram/_pv_1_7_1/webif/__init__.py | 0 telegram/_pv_1_7_1/webif/static/img/plugin_logo.svg | 0 telegram/_pv_1_7_1/webif/static/img/readme.txt | 0 telegram/_pv_1_7_1/webif/templates/index.html | 0 webpush/__init__.py | 0 webpush/locale.yaml | 0 webpush/plugin.yaml | 0 webpush/requirements.txt | 0 webpush/sv_widgets/webpush.html | 0 webpush/sv_widgets/webpush.js | 0 webpush/sv_widgets/webpush_serviceworker.js | 0 webpush/sv_widgets/widget_webpush.config.html | 0 webpush/user_doc.rst | 0 webpush/webif/static/img/plugin_logo.png | Bin webpush/webif/static/img/readme.txt | 0 webpush/webif/templates/index.html | 0 89 files changed, 4 insertions(+), 3 deletions(-) mode change 100644 => 100755 appletv/assets/webif_appletv1.png mode change 100644 => 100755 avm/TR-064links.yaml mode change 100644 => 100755 avm/__init__.py mode change 100644 => 100755 avm/item_attributes.py mode change 100644 => 100755 avm/item_attributes_master.py mode change 100644 => 100755 avm/locale.yaml mode change 100644 => 100755 avm/plugin.yaml mode change 100644 => 100755 avm/requirements.txt mode change 100644 => 100755 avm/sv_widgets/widget_avm.html mode change 100644 => 100755 avm/tr064/action.py mode change 100644 => 100755 avm/tr064/attribute_dict.py mode change 100644 => 100755 avm/tr064/client.py mode change 100644 => 100755 avm/tr064/config.py mode change 100644 => 100755 avm/tr064/device.py mode change 100644 => 100755 avm/tr064/exceptions.py mode change 100644 => 100755 avm/tr064/service.py mode change 100644 => 100755 avm/tr064/service_list.py mode change 100644 => 100755 avm/user_doc.rst mode change 100644 => 100755 avm/user_doc/assets/webif_tab1.jpg mode change 100644 => 100755 avm/user_doc/assets/webif_tab2.jpg mode change 100644 => 100755 avm/user_doc/assets/webif_tab3.jpg mode change 100644 => 100755 avm/user_doc/assets/webif_tab4.jpg mode change 100644 => 100755 avm/user_doc/assets/webif_tab5.jpg mode change 100644 => 100755 avm/user_doc/assets/webif_tab6.jpg mode change 100644 => 100755 avm/webif/__init__.py mode change 100644 => 100755 avm/webif/static/img/lamp_green.png mode change 100644 => 100755 avm/webif/static/img/lamp_red.png mode change 100644 => 100755 avm/webif/static/img/plugin_logo.png mode change 100644 => 100755 avm/webif/static/img/readme.txt mode change 100644 => 100755 avm/webif/templates/index.html mode change 100644 => 100755 db_addon/__init__.py mode change 100644 => 100755 db_addon/item_attributes_master.py mode change 100644 => 100755 db_addon/locale.yaml mode change 100644 => 100755 db_addon/plugin.yaml mode change 100644 => 100755 db_addon/requirements.txt mode change 100644 => 100755 db_addon/user_doc.rst mode change 100644 => 100755 db_addon/webif/__init__.py mode change 100644 => 100755 db_addon/webif/static/img/plugin_logo.png mode change 100644 => 100755 db_addon/webif/templates/index.html mode change 100644 => 100755 lirc/assets/lirc_webif.png mode change 100644 => 100755 mieleathome/assets/img.png mode change 100644 => 100755 mieleathome/assets/img_1.png mode change 100644 => 100755 mieleathome/assets/img_10.png mode change 100644 => 100755 mieleathome/assets/img_11.png mode change 100644 => 100755 mieleathome/assets/img_12.png mode change 100644 => 100755 mieleathome/assets/img_13.png mode change 100644 => 100755 mieleathome/assets/img_14.png mode change 100644 => 100755 mieleathome/assets/img_15.png mode change 100644 => 100755 mieleathome/assets/img_16.png mode change 100644 => 100755 mieleathome/assets/img_17.png mode change 100644 => 100755 mieleathome/assets/img_18.png mode change 100644 => 100755 mieleathome/assets/img_2.png mode change 100644 => 100755 mieleathome/assets/img_3.png mode change 100644 => 100755 mieleathome/assets/img_4.png mode change 100644 => 100755 mieleathome/assets/img_5.png mode change 100644 => 100755 mieleathome/assets/img_6.png mode change 100644 => 100755 mieleathome/assets/img_7.png mode change 100644 => 100755 mieleathome/assets/img_8.png mode change 100644 => 100755 mieleathome/assets/img_9.png mode change 100644 => 100755 mieleathome/miele.html mode change 100644 => 100755 mieleathome/webif/static/img/plugin_logo.svg mode change 100644 => 100755 tasmota/__init__.py mode change 100644 => 100755 tasmota/plugin.yaml mode change 100644 => 100755 tasmota/webif/__init__.py mode change 100644 => 100755 tasmota/webif/templates/index.html mode change 100644 => 100755 telegram/_pv_1_7_1/README.rst.off mode change 100644 => 100755 telegram/_pv_1_7_1/__init__.py mode change 100644 => 100755 telegram/_pv_1_7_1/locale.yaml mode change 100644 => 100755 telegram/_pv_1_7_1/logging.yaml.telegram mode change 100644 => 100755 telegram/_pv_1_7_1/plugin.yaml mode change 100644 => 100755 telegram/_pv_1_7_1/requirements.txt mode change 100644 => 100755 telegram/_pv_1_7_1/user_doc.rst mode change 100644 => 100755 telegram/_pv_1_7_1/webif/__init__.py mode change 100644 => 100755 telegram/_pv_1_7_1/webif/static/img/plugin_logo.svg mode change 100644 => 100755 telegram/_pv_1_7_1/webif/static/img/readme.txt mode change 100644 => 100755 telegram/_pv_1_7_1/webif/templates/index.html mode change 100644 => 100755 webpush/__init__.py mode change 100644 => 100755 webpush/locale.yaml mode change 100644 => 100755 webpush/plugin.yaml mode change 100644 => 100755 webpush/requirements.txt mode change 100644 => 100755 webpush/sv_widgets/webpush.html mode change 100644 => 100755 webpush/sv_widgets/webpush.js mode change 100644 => 100755 webpush/sv_widgets/webpush_serviceworker.js mode change 100644 => 100755 webpush/sv_widgets/widget_webpush.config.html mode change 100644 => 100755 webpush/user_doc.rst mode change 100644 => 100755 webpush/webif/static/img/plugin_logo.png mode change 100644 => 100755 webpush/webif/static/img/readme.txt mode change 100644 => 100755 webpush/webif/templates/index.html diff --git a/appletv/assets/webif_appletv1.png b/appletv/assets/webif_appletv1.png old mode 100644 new mode 100755 diff --git a/avm/TR-064links.yaml b/avm/TR-064links.yaml old mode 100644 new mode 100755 diff --git a/avm/__init__.py b/avm/__init__.py old mode 100644 new mode 100755 diff --git a/avm/item_attributes.py b/avm/item_attributes.py old mode 100644 new mode 100755 diff --git a/avm/item_attributes_master.py b/avm/item_attributes_master.py old mode 100644 new mode 100755 diff --git a/avm/locale.yaml b/avm/locale.yaml old mode 100644 new mode 100755 diff --git a/avm/plugin.yaml b/avm/plugin.yaml old mode 100644 new mode 100755 diff --git a/avm/requirements.txt b/avm/requirements.txt old mode 100644 new mode 100755 diff --git a/avm/sv_widgets/widget_avm.html b/avm/sv_widgets/widget_avm.html old mode 100644 new mode 100755 diff --git a/avm/tr064/action.py b/avm/tr064/action.py old mode 100644 new mode 100755 diff --git a/avm/tr064/attribute_dict.py b/avm/tr064/attribute_dict.py old mode 100644 new mode 100755 diff --git a/avm/tr064/client.py b/avm/tr064/client.py old mode 100644 new mode 100755 diff --git a/avm/tr064/config.py b/avm/tr064/config.py old mode 100644 new mode 100755 diff --git a/avm/tr064/device.py b/avm/tr064/device.py old mode 100644 new mode 100755 diff --git a/avm/tr064/exceptions.py b/avm/tr064/exceptions.py old mode 100644 new mode 100755 diff --git a/avm/tr064/service.py b/avm/tr064/service.py old mode 100644 new mode 100755 diff --git a/avm/tr064/service_list.py b/avm/tr064/service_list.py old mode 100644 new mode 100755 diff --git a/avm/user_doc.rst b/avm/user_doc.rst old mode 100644 new mode 100755 diff --git a/avm/user_doc/assets/webif_tab1.jpg b/avm/user_doc/assets/webif_tab1.jpg old mode 100644 new mode 100755 diff --git a/avm/user_doc/assets/webif_tab2.jpg b/avm/user_doc/assets/webif_tab2.jpg old mode 100644 new mode 100755 diff --git a/avm/user_doc/assets/webif_tab3.jpg b/avm/user_doc/assets/webif_tab3.jpg old mode 100644 new mode 100755 diff --git a/avm/user_doc/assets/webif_tab4.jpg b/avm/user_doc/assets/webif_tab4.jpg old mode 100644 new mode 100755 diff --git a/avm/user_doc/assets/webif_tab5.jpg b/avm/user_doc/assets/webif_tab5.jpg old mode 100644 new mode 100755 diff --git a/avm/user_doc/assets/webif_tab6.jpg b/avm/user_doc/assets/webif_tab6.jpg old mode 100644 new mode 100755 diff --git a/avm/webif/__init__.py b/avm/webif/__init__.py old mode 100644 new mode 100755 diff --git a/avm/webif/static/img/lamp_green.png b/avm/webif/static/img/lamp_green.png old mode 100644 new mode 100755 diff --git a/avm/webif/static/img/lamp_red.png b/avm/webif/static/img/lamp_red.png old mode 100644 new mode 100755 diff --git a/avm/webif/static/img/plugin_logo.png b/avm/webif/static/img/plugin_logo.png old mode 100644 new mode 100755 diff --git a/avm/webif/static/img/readme.txt b/avm/webif/static/img/readme.txt old mode 100644 new mode 100755 diff --git a/avm/webif/templates/index.html b/avm/webif/templates/index.html old mode 100644 new mode 100755 diff --git a/db_addon/__init__.py b/db_addon/__init__.py old mode 100644 new mode 100755 diff --git a/db_addon/item_attributes_master.py b/db_addon/item_attributes_master.py old mode 100644 new mode 100755 diff --git a/db_addon/locale.yaml b/db_addon/locale.yaml old mode 100644 new mode 100755 diff --git a/db_addon/plugin.yaml b/db_addon/plugin.yaml old mode 100644 new mode 100755 diff --git a/db_addon/requirements.txt b/db_addon/requirements.txt old mode 100644 new mode 100755 diff --git a/db_addon/user_doc.rst b/db_addon/user_doc.rst old mode 100644 new mode 100755 diff --git a/db_addon/webif/__init__.py b/db_addon/webif/__init__.py old mode 100644 new mode 100755 diff --git a/db_addon/webif/static/img/plugin_logo.png b/db_addon/webif/static/img/plugin_logo.png old mode 100644 new mode 100755 diff --git a/db_addon/webif/templates/index.html b/db_addon/webif/templates/index.html old mode 100644 new mode 100755 diff --git a/lirc/assets/lirc_webif.png b/lirc/assets/lirc_webif.png old mode 100644 new mode 100755 diff --git a/mailsend/plugin.yaml b/mailsend/plugin.yaml index a24438f60..2c879edca 100755 --- a/mailsend/plugin.yaml +++ b/mailsend/plugin.yaml @@ -23,7 +23,8 @@ plugin: parameters: # Definition of parameters to be configured in etc/plugin.yaml host: - type: ip + #type: ip + type: str description: de: 'Adresse des SMTP Hosts' en: 'Address of SMTP host' @@ -34,8 +35,8 @@ parameters: valid_min: 0 valid_max: 65535 description: - de: 'Port des SMTP service (bitte 25 nutzen, fals tls deaktiviert wird)' - en: 'Port used by SMTP service (use 25 if tls is set to False)' + de: 'Port des SMTP service - Alternative gebräuchliche Ports sind 2525, 465 (smtp over SSL) und 25 (nur ohne tls)' + en: 'Port used by SMTP service - Commonly used alternative ports are 2525, 465 (smtp over SSL) und 25 (only without tls)' tls: type: bool diff --git a/mieleathome/assets/img.png b/mieleathome/assets/img.png old mode 100644 new mode 100755 diff --git a/mieleathome/assets/img_1.png b/mieleathome/assets/img_1.png old mode 100644 new mode 100755 diff --git a/mieleathome/assets/img_10.png b/mieleathome/assets/img_10.png old mode 100644 new mode 100755 diff --git a/mieleathome/assets/img_11.png b/mieleathome/assets/img_11.png old mode 100644 new mode 100755 diff --git a/mieleathome/assets/img_12.png b/mieleathome/assets/img_12.png old mode 100644 new mode 100755 diff --git a/mieleathome/assets/img_13.png b/mieleathome/assets/img_13.png old mode 100644 new mode 100755 diff --git a/mieleathome/assets/img_14.png b/mieleathome/assets/img_14.png old mode 100644 new mode 100755 diff --git a/mieleathome/assets/img_15.png b/mieleathome/assets/img_15.png old mode 100644 new mode 100755 diff --git a/mieleathome/assets/img_16.png b/mieleathome/assets/img_16.png old mode 100644 new mode 100755 diff --git a/mieleathome/assets/img_17.png b/mieleathome/assets/img_17.png old mode 100644 new mode 100755 diff --git a/mieleathome/assets/img_18.png b/mieleathome/assets/img_18.png old mode 100644 new mode 100755 diff --git a/mieleathome/assets/img_2.png b/mieleathome/assets/img_2.png old mode 100644 new mode 100755 diff --git a/mieleathome/assets/img_3.png b/mieleathome/assets/img_3.png old mode 100644 new mode 100755 diff --git a/mieleathome/assets/img_4.png b/mieleathome/assets/img_4.png old mode 100644 new mode 100755 diff --git a/mieleathome/assets/img_5.png b/mieleathome/assets/img_5.png old mode 100644 new mode 100755 diff --git a/mieleathome/assets/img_6.png b/mieleathome/assets/img_6.png old mode 100644 new mode 100755 diff --git a/mieleathome/assets/img_7.png b/mieleathome/assets/img_7.png old mode 100644 new mode 100755 diff --git a/mieleathome/assets/img_8.png b/mieleathome/assets/img_8.png old mode 100644 new mode 100755 diff --git a/mieleathome/assets/img_9.png b/mieleathome/assets/img_9.png old mode 100644 new mode 100755 diff --git a/mieleathome/miele.html b/mieleathome/miele.html old mode 100644 new mode 100755 diff --git a/mieleathome/webif/static/img/plugin_logo.svg b/mieleathome/webif/static/img/plugin_logo.svg old mode 100644 new mode 100755 diff --git a/tasmota/__init__.py b/tasmota/__init__.py old mode 100644 new mode 100755 diff --git a/tasmota/plugin.yaml b/tasmota/plugin.yaml old mode 100644 new mode 100755 diff --git a/tasmota/webif/__init__.py b/tasmota/webif/__init__.py old mode 100644 new mode 100755 diff --git a/tasmota/webif/templates/index.html b/tasmota/webif/templates/index.html old mode 100644 new mode 100755 diff --git a/telegram/_pv_1_7_1/README.rst.off b/telegram/_pv_1_7_1/README.rst.off old mode 100644 new mode 100755 diff --git a/telegram/_pv_1_7_1/__init__.py b/telegram/_pv_1_7_1/__init__.py old mode 100644 new mode 100755 diff --git a/telegram/_pv_1_7_1/locale.yaml b/telegram/_pv_1_7_1/locale.yaml old mode 100644 new mode 100755 diff --git a/telegram/_pv_1_7_1/logging.yaml.telegram b/telegram/_pv_1_7_1/logging.yaml.telegram old mode 100644 new mode 100755 diff --git a/telegram/_pv_1_7_1/plugin.yaml b/telegram/_pv_1_7_1/plugin.yaml old mode 100644 new mode 100755 diff --git a/telegram/_pv_1_7_1/requirements.txt b/telegram/_pv_1_7_1/requirements.txt old mode 100644 new mode 100755 diff --git a/telegram/_pv_1_7_1/user_doc.rst b/telegram/_pv_1_7_1/user_doc.rst old mode 100644 new mode 100755 diff --git a/telegram/_pv_1_7_1/webif/__init__.py b/telegram/_pv_1_7_1/webif/__init__.py old mode 100644 new mode 100755 diff --git a/telegram/_pv_1_7_1/webif/static/img/plugin_logo.svg b/telegram/_pv_1_7_1/webif/static/img/plugin_logo.svg old mode 100644 new mode 100755 diff --git a/telegram/_pv_1_7_1/webif/static/img/readme.txt b/telegram/_pv_1_7_1/webif/static/img/readme.txt old mode 100644 new mode 100755 diff --git a/telegram/_pv_1_7_1/webif/templates/index.html b/telegram/_pv_1_7_1/webif/templates/index.html old mode 100644 new mode 100755 diff --git a/webpush/__init__.py b/webpush/__init__.py old mode 100644 new mode 100755 diff --git a/webpush/locale.yaml b/webpush/locale.yaml old mode 100644 new mode 100755 diff --git a/webpush/plugin.yaml b/webpush/plugin.yaml old mode 100644 new mode 100755 diff --git a/webpush/requirements.txt b/webpush/requirements.txt old mode 100644 new mode 100755 diff --git a/webpush/sv_widgets/webpush.html b/webpush/sv_widgets/webpush.html old mode 100644 new mode 100755 diff --git a/webpush/sv_widgets/webpush.js b/webpush/sv_widgets/webpush.js old mode 100644 new mode 100755 diff --git a/webpush/sv_widgets/webpush_serviceworker.js b/webpush/sv_widgets/webpush_serviceworker.js old mode 100644 new mode 100755 diff --git a/webpush/sv_widgets/widget_webpush.config.html b/webpush/sv_widgets/widget_webpush.config.html old mode 100644 new mode 100755 diff --git a/webpush/user_doc.rst b/webpush/user_doc.rst old mode 100644 new mode 100755 diff --git a/webpush/webif/static/img/plugin_logo.png b/webpush/webif/static/img/plugin_logo.png old mode 100644 new mode 100755 diff --git a/webpush/webif/static/img/readme.txt b/webpush/webif/static/img/readme.txt old mode 100644 new mode 100755 diff --git a/webpush/webif/templates/index.html b/webpush/webif/templates/index.html old mode 100644 new mode 100755 From c9c253867076d8fc02f2f2532b644150c86005ba Mon Sep 17 00:00:00 2001 From: aschwith Date: Mon, 5 Jun 2023 17:50:27 +0200 Subject: [PATCH 129/775] enocean: robustify plugin reaction to loss of Enocean serial device --- enocean/__init__.py | 113 ++++++++++++++++++++++++++------------------ enocean/plugin.yaml | 2 +- 2 files changed, 67 insertions(+), 48 deletions(-) diff --git a/enocean/__init__.py b/enocean/__init__.py index 1363441a9..3a9cd7f0b 100755 --- a/enocean/__init__.py +++ b/enocean/__init__.py @@ -166,7 +166,7 @@ class EnOcean(SmartPlugin): ALLOW_MULTIINSTANCE = False - PLUGIN_VERSION = "1.3.8" + PLUGIN_VERSION = "1.3.9" def __init__(self, sh, *args, **kwargs): @@ -189,7 +189,13 @@ def __init__(self, sh, *args, **kwargs): self.tx_id = int(tx_id, 16) self.logger.info(f"Stick TX ID configured via plugin.conf to: {tx_id}") self._log_unknown_msg = self.get_parameter_value("log_unknown_messages") - self._tcm = serial.Serial(self.port, 57600, timeout=1.5) + try: + self._tcm = serial.Serial(self.port, 57600, timeout=1.5) + except Exception as e: + self._tcm = None + self._init_complete = False + self.logger.error(f"Exception occurred during serial open: {e}") + return self._cmd_lock = threading.Lock() self._response_lock = threading.Condition() self._rx_items = {} @@ -451,57 +457,64 @@ def run(self): t.start() msg = [] while self.alive: - readin = self._tcm.read(1000) - if readin: - msg += readin - if logger_debug: - self.logger.debug("Data received") - # check if header is complete (6bytes including sync) - # 0x55 (SYNC) + 4bytes (HEADER) + 1byte(HEADER-CRC) - while (len(msg) >= 6): - #check header for CRC - if (msg[0] == PACKET_SYNC_BYTE) and (self._calc_crc8(msg[1:5]) == msg[5]): - # header bytes: sync; length of data (2); optional length; packet type; crc - data_length = (msg[1] << 8) + msg[2] - opt_length = msg[3] - packet_type = msg[4] - msg_length = data_length + opt_length + 7 - if logger_debug: - self.logger.debug("Received header with data_length = {} / opt_length = 0x{:02x} / type = {}".format(data_length, opt_length, packet_type)) - - # break if msg is not yet complete: - if (len(msg) < msg_length): - break - - # msg complete - if (self._calc_crc8(msg[6:msg_length - 1]) == msg[msg_length - 1]): + try: + readin = self._tcm.read(1000) + except Exception as e: + self.logger.error(f"Exception during tcm read occurred: {e}") + break + else: + if readin: + msg += readin + if logger_debug: + self.logger.debug("Data received") + # check if header is complete (6bytes including sync) + # 0x55 (SYNC) + 4bytes (HEADER) + 1byte(HEADER-CRC) + while (len(msg) >= 6): + #check header for CRC + if (msg[0] == PACKET_SYNC_BYTE) and (self._calc_crc8(msg[1:5]) == msg[5]): + # header bytes: sync; length of data (2); optional length; packet type; crc + data_length = (msg[1] << 8) + msg[2] + opt_length = msg[3] + packet_type = msg[4] + msg_length = data_length + opt_length + 7 if logger_debug: - self.logger.debug("Accepted package with type = 0x{:02x} / len = {} / data = [{}]!".format(packet_type, msg_length, ', '.join(['0x%02x' % b for b in msg]))) - data = msg[6:msg_length - (opt_length + 1)] - optional = msg[(6 + data_length):msg_length - 1] - if (packet_type == PACKET_TYPE_RADIO): - self._process_packet_type_radio(data, optional) - elif (packet_type == PACKET_TYPE_SMART_ACK_COMMAND): - self._process_packet_type_smart_ack_command(data, optional) - elif (packet_type == PACKET_TYPE_RESPONSE): - self._process_packet_type_response(data, optional) - elif (packet_type == PACKET_TYPE_EVENT): - self._process_packet_type_event(data, optional) + self.logger.debug("Received header with data_length = {} / opt_length = 0x{:02x} / type = {}".format(data_length, opt_length, packet_type)) + + # break if msg is not yet complete: + if (len(msg) < msg_length): + break + + # msg complete + if (self._calc_crc8(msg[6:msg_length - 1]) == msg[msg_length - 1]): + if logger_debug: + self.logger.debug("Accepted package with type = 0x{:02x} / len = {} / data = [{}]!".format(packet_type, msg_length, ', '.join(['0x%02x' % b for b in msg]))) + data = msg[6:msg_length - (opt_length + 1)] + optional = msg[(6 + data_length):msg_length - 1] + if (packet_type == PACKET_TYPE_RADIO): + self._process_packet_type_radio(data, optional) + elif (packet_type == PACKET_TYPE_SMART_ACK_COMMAND): + self._process_packet_type_smart_ack_command(data, optional) + elif (packet_type == PACKET_TYPE_RESPONSE): + self._process_packet_type_response(data, optional) + elif (packet_type == PACKET_TYPE_EVENT): + self._process_packet_type_event(data, optional) + else: + self.logger.error("Received packet with unknown type = 0x{:02x} - len = {} / data = [{}]".format(packet_type, msg_length, ', '.join(['0x%02x' % b for b in msg]))) else: - self.logger.error("Received packet with unknown type = 0x{:02x} - len = {} / data = [{}]".format(packet_type, msg_length, ', '.join(['0x%02x' % b for b in msg]))) + self.logger.error("Crc error - dumping packet with type = 0x{:02x} / len = {} / data = [{}]!".format(packet_type, msg_length, ', '.join(['0x%02x' % b for b in msg]))) + msg = msg[msg_length:] else: - self.logger.error("Crc error - dumping packet with type = 0x{:02x} / len = {} / data = [{}]!".format(packet_type, msg_length, ', '.join(['0x%02x' % b for b in msg]))) - msg = msg[msg_length:] - else: - #self.logger.warning("Consuming [0x{:02x}] from input buffer!".format(msg[0])) - msg.pop(0) - self._tcm.close() + #self.logger.warning("Consuming [0x{:02x}] from input buffer!".format(msg[0])) + msg.pop(0) + try: + self._tcm.close() + except Exception as e: + self.logger.error(f"Exception during tcm close occured: {e}") self.logger.info("Run method stopped") def stop(self): self.logger.debug("Call function << stop >>") - self.alive = False - + self.alive = False def get_tx_id_as_hex(self): hexstring = "{:08X}".format(self.tx_id) @@ -698,7 +711,13 @@ def _send_packet(self, packet_type, data=[], optional=[]): packet += bytes(data + optional) packet += bytes([self._calc_crc8(packet[6:])]) self.logger.info("Sending packet with len = {} / data = [{}]!".format(len(packet), ', '.join(['0x%02x' % b for b in packet]))) - self._tcm.write(packet) + + # Send out serial data: + if self._tcm: + try: + self._tcm.write(packet) + except Exception as e: + self.logger.error(f"Exception during tcm write occurred: {e}") def _send_smart_ack_command(self, _code, data=[]): #self.logger.debug("enocean: call function << _send_smart_ack_command >>") diff --git a/enocean/plugin.yaml b/enocean/plugin.yaml index a020510a8..e0ae2b8a5 100755 --- a/enocean/plugin.yaml +++ b/enocean/plugin.yaml @@ -16,7 +16,7 @@ plugin: # url of the support thread support: https://knx-user-forum.de/forum/supportforen/smarthome-py/26542-featurewunsch-enocean-plugin/page13 - version: 1.3.8 # Plugin version + version: 1.3.9 # Plugin version sh_minversion: 1.3 # minimum shNG version to use this plugin #sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) multi_instance: False # plugin supports multi instance From 6b2147cb4403ca7db40a4743e04a24b81fee07d2 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Sun, 11 Jun 2023 11:31:51 +0200 Subject: [PATCH 130/775] AVM Plugin: Bugfix and Extension of get_hosts - Bump to 2.0.5 - NEW get_hosts_dict: enable "only_active" - NEW get_hosts_list: returns list for (filtered host); provides more flexibility and much quicker results as get_hosts - BUGFIX is_host_active - BUGFIX get_hosts - UPDATE get_hosts_dict - NEW get_hosts_count: Enable only_active - UPDATE plugin.yaml - UPDATE user_doc.rst --- avm/__init__.py | 110 ++++++++++++++++++++++++++++++++++++++++------- avm/plugin.yaml | 25 ++++++++++- avm/user_doc.rst | 55 ++++++++++++++++++++++++ 3 files changed, 173 insertions(+), 17 deletions(-) diff --git a/avm/__init__.py b/avm/__init__.py index 15df62067..1762dd5a7 100644 --- a/avm/__init__.py +++ b/avm/__init__.py @@ -106,7 +106,7 @@ class AVM(SmartPlugin): """ Main class of the Plugin. Does all plugin specific stuff """ - PLUGIN_VERSION = '2.0.4' + PLUGIN_VERSION = '2.0.5' # ToDo: FritzHome.handle_updated_item: implement 'saturation' # ToDo: FritzHome.handle_updated_item: implement 'unmapped_hue' @@ -406,8 +406,12 @@ def get_hosts(self, only_active: bool = False): return self.fritz_device.get_hosts(only_active) @NoAttributeError - def get_hosts_dict(self): - return self.fritz_device.get_hosts_dict() + def get_hosts_dict(self, only_active: bool = False): + return self.fritz_device.get_hosts_dict(only_active) + + @NoAttributeError + def get_hosts_list(self, identifier_list: list = None, filter_dict: dict = None) -> Union[list, None]: + return self.fritz_device.get_hosts_list(identifier_list, filter_dict) @NoAttributeError def get_mesh_topology(self): @@ -990,7 +994,7 @@ def _poll_fritz_device(self, avm_data_type: str, index=None, enforce_read: bool 'mesh_topology': ('get_mesh_topology', None) } - # turn data to True of string is as listed + # turn data to True if string is as listed str_to_bool = { 'wan_is_connected': 'Connected', 'wan_link': 'Up', @@ -1617,7 +1621,7 @@ def is_host_active(self, mac_address: str) -> bool: :return: True or False, depending if the host is active on the FritzDevice """ # is_active = self.client.LANDevice.Hosts.GetSpecificHostEntry(NewMACAddress=mac_address)['NewActive'] - return bool(self._poll_fritz_device('is_host_active', mac_address, enforce_read=True)) + return bool(to_int(self._poll_fritz_device('is_host_active', mac_address, enforce_read=True))) def get_hosts(self, only_active: bool = False) -> list: """ @@ -1626,7 +1630,7 @@ def get_hosts(self, only_active: bool = False) -> list: Uses: http://avm.de/fileadmin/user_upload/Global/Service/Schnittstellen/hostsSCPD.pdf :param only_active: bool, if only active hosts shall be returned - :return: Array host dicts (see get_host_details) + :return: Array of host dicts (see get_host_details) """ # number_of_hosts = int(self.client.LANDevice.Hosts.GetHostNumberOfEntries()['NewHostNumberOfEntries']) number_of_hosts = to_int(self._poll_fritz_device('number_of_hosts', enforce_read=True)) @@ -1664,13 +1668,20 @@ def get_host_details(self, index: int): 'ip_address': host_info.get('NewIPAddress'), 'address_source': host_info.get('NewAddressSource'), 'mac_address': host_info.get('NewMACAddress'), - 'is_active': bool(host_info.get('NewActive')), + 'is_active': bool(to_int(host_info.get('NewActive'))), 'lease_time_remaining': to_int(host_info.get('NewLeaseTimeRemaining')) } return host - def get_hosts_dict(self) -> Union[dict, None]: - """Get all Hosts connected to AVM device as dict""" + def get_hosts_dict(self, only_active: bool = False) -> Union[dict, None]: + """Get all Hosts connected to AVM device as dict + + Uses: http://avm.de/fileadmin/user_upload/Global/Service/Schnittstellen/hostsSCPD.pdf + + :param only_active: bool, if only active hosts shall be returned + :return: dict host dicts + """ + # hosts_url = self.client.LANDevice.Hosts.X_AVM_DE_GetHostListPath()['NewX_AVM_DE_HostListPath'] hosts_url = self._poll_fritz_device('hosts_url', enforce_read=True) @@ -1687,25 +1698,94 @@ def get_hosts_dict(self) -> Union[dict, None]: index = None for attr in item: if attr.tag == 'Index': - index = int(attr.text) + index = to_int(attr.text) + key = str(attr.tag) - value = str(attr.text) if key.startswith('X_AVM-DE_'): key = key[9:] + + value = str(attr.text) if value.isdigit(): value = int(value) + elif value == 'None': + value = None + if key in ['Active', 'Guest', 'Disallow', 'UpdateAvailable', 'VPN']: value = bool(value) + host_dict[key] = value - if index: + + if index and (not only_active or host_dict['Active']): hosts_dict[index] = host_dict return hosts_dict - def get_hosts_count(self) -> int: - """Returns count of hosts""" + def get_hosts_list(self, identifier_list: list = None, filter_dict: dict = None) -> Union[list, None]: + """ + Get information about (filtered) hosts as array / list + + Uses: http://avm.de/fileadmin/user_upload/Global/Service/Schnittstellen/hostsSCPD.pdf + + :param identifier_list: list of identifiers of host, which will be returned (valid: 'Index', 'IPAddress', 'MACAddress', 'HostName', 'FriendyName') + :param filter_dict: dict of filters, for which hosts (identifiers) will be returned (e.g. filter={'Active': False} will return all non-active hosts) + :return: Array of (filtered) hosts information + """ + + identifiers = ['index', 'ipaddress', 'macaddress', 'hostname', 'friendlyname'] + hosts = [] + + if filter_dict is None: + filter_dict = {} + else: + filter_dict = {k.lower(): v for k, v in filter_dict.items()} + + if identifier_list is None: + identifier_list = [] + elif isinstance(identifier_list, str): + identifier_list = list(identifier_list) + + # get host dict to evaluate result from + hosts_dict = self.get_hosts_dict() + if not hosts_dict: + return + + # extract filtered hosts + for idx in hosts_dict: + host_lower = {k.lower(): v for k, v in hosts_dict[idx].items()} + if filter_dict.items() <= host_lower.items(): + hosts.append(hosts_dict[idx]) + + # process identifier list + identifier_list_checked = [] + for identifier in identifier_list: + if identifier.lower() in identifiers: + identifier_list_checked.append(identifier.lower()) + + if not identifier_list_checked: + return hosts + + # eval identifier from filtered hosts + hosts_ids = [] + for host in hosts: + host_lower = {k.lower(): v for k, v in host.items()} + host_ids = {} + for identifier in identifier_list_checked: + host_id = host_lower.get(identifier) + if host_id: + host_ids.update({identifier: host_id}) + if host_ids: + hosts_ids.append(host_ids) + return hosts_ids + + def get_hosts_count(self, only_active: bool = False) -> int: + """Returns count of hosts + + :param only_active: bool, if only number of active hosts shall be returned + :return: number of hosts + """ + try: - return len(self.get_hosts_dict()) + return len(self.get_hosts_dict(only_active)) except TypeError: return 0 diff --git a/avm/plugin.yaml b/avm/plugin.yaml index 4e3ffcd17..aa64a2950 100644 --- a/avm/plugin.yaml +++ b/avm/plugin.yaml @@ -12,7 +12,7 @@ plugin: documentation: http://smarthomeng.de/user/plugins/avm/user_doc.html support: https://knx-user-forum.de/forum/supportforen/smarthome-py/934835-avm-plugin - version: 2.0.4 # Plugin version (must match the version specified in __init__.py) + version: 2.0.5 # Plugin version (must match the version specified in __init__.py) sh_minversion: 1.8 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) # py_minversion: 3.6 # minimum Python version to use for this plugin @@ -1210,7 +1210,28 @@ plugin_functions: de: Liefert Informationen aller Hosts als Dictionary en: Lists information of all hosts as dict parameters: - # This function has no parameters + only_active: + type: bool + description: + de: True, wenn nur aktuell aktive Hosts zurückgegeben werden sollen. + en: True, if only active hosts shall be returned. + + get_hosts_list: + type: list(dict) + description: + de: Liefert Informationen gefilterte Information zu den verbundenen Hosts als Array + en: Lists information of filtered hosts as array + parameters: + identifier_list: + type: list + description: + de: "Liste von Identifiers des Hosts, die zurückgegeben werden sollen; (möglich: 'Index', 'IPAddress', 'MACAddress', 'HostName', 'FriendyName')" + en: "list of identifiers of host, which will be returned; (valid: 'Index', 'IPAddress', 'MACAddress', 'HostName', 'FriendyName')" + filter_dict: + type: dict + description: + de: "Dict von Filtern, für welche die Host-Identifier zurückgegeben werden sollen (bspw: filter={'Active': False} gibt alle nicht-aktiven Host zurück)" + en: "dict of filters, for which hosts (identifiers) will be returned (e.g. filter={'Active': False} will return all non-active hosts)" logic_parameters: NONE # Definition of logic parameters defined by this plugin (enter 'logic_parameters: NONE', if section should be empty) diff --git a/avm/user_doc.rst b/avm/user_doc.rst index a43c137ff..551a9ac41 100644 --- a/avm/user_doc.rst +++ b/avm/user_doc.rst @@ -243,6 +243,61 @@ Beispiel einer Logik, die die Host von 3 verbundenen Geräten in eine Liste zusa string += '' sh.avm.devices.device_list(string) +get_hosts_list +~~~~~~~~~~~~~~ + +Ermittelt ein Array mit (gefilterten) Informationen der verbundenen Hosts. Dabei wird die die Abfrage der "Host List Contents" verwendet. +Der Vorteil gegenüber "get_hosts" liegt in der deutlich schnelleren Abfrage. + +In Abfrage der Hosts liefert folgenden Werte: + + - 'Index' + - 'IPAddress' + - 'MACAddress' + - 'Active' + - 'HostName' + - 'InterfaceType' + - 'Port' + - 'Speed' + - 'UpdateAvailable' + - 'UpdateSuccessful' + - 'InfoURL' + - 'MACAddressList' + - 'Model' + - 'URL' + - 'Guest' + - 'RequestClient' + - 'VPN' + - 'WANAccess' + - 'Disallow' + - 'IsMeshable' + - 'Priority' + - 'FriendlyName' + - 'FriendlyNameIsWriteable' + +Auf all diese Werte kann mit dem Parameter "filter_dict" gefiltert werden. Dabei können auch mehrere Filter gesetzt werden. + +Das folgende Beispiel liefert alle Informationen zu den aktiven Hosts zurück: + +.. code-block:: python + + hosts = sh.fritzbox_7490.get_hosts_list(filter_dict={'Active': True}) + +Das folgende Beispiel liefer alle Informationen zu den aktiven Hosts zurück, bei den ein Update vorliegt: + +.. code-block:: python + + hosts = sh.fritzbox_7490.get_hosts_list(filter_dict={'Active': True, 'UpdateAvailable': True}) + +Des Weiteren können über den Parameter "identifier_list" die Identifier des Hosts festgelegt werden, die zurückgegeben werden sollen. +Möglich sind: 'index', 'ipaddress', 'macaddress', 'hostname', 'friendlyname' + +Das folgende Beispiel liefer 'IPAddress' und 'MACAddress' zu den aktiven Hosts zurück, bei den ein Update vorliegt: + +.. code-block:: python + + hosts = sh.fritzbox_7490.get_hosts_list(identifier_list=['ipaddress', 'macaddress'], filter_dict={'Active': True, 'UpdateAvailable': True}) + get_phone_name ~~~~~~~~~~~~~~ Gibt den Namen eines Telefons an einem Index zurück. Der zurückgegebene Wert kann in 'set_call_origin' verwendet werden. From 3e0acfca16bf4fa447dac23211d6a9b9bc995190 Mon Sep 17 00:00:00 2001 From: msinn Date: Thu, 15 Jun 2023 16:38:25 +0200 Subject: [PATCH 131/775] appletv: Added requirement to circumvent problematic requirements of pyatv package under Python 3.11 --- piratewthr/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/piratewthr/__init__.py b/piratewthr/__init__.py index a2a59440b..be525bcd1 100755 --- a/piratewthr/__init__.py +++ b/piratewthr/__init__.py @@ -368,7 +368,7 @@ def _build_url(self, url_type='forecast'): url = self._base_url + f"{self._key}/{self._lat},{self._lon}" parameters = f"?lang={self._lang}" if self._units is not None: - parameters += "&units={self._units}" + parameters += f"&units={self._units}" url += parameters else: self.logger.error(f"_build_url: Wrong url type specified: {url_type}") From 088d75264604f2a1a60f7bad76f55476a0690285 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Thu, 15 Jun 2023 21:04:56 +0200 Subject: [PATCH 132/775] AVM Plugin: make use of standard methods of class SmartPlugin for item handling/storage - Bump to 2.0.6 - move all item handling to class AVM - make use of standard methods of SmartPlugin for item handling/storage - adapt access of items on other classes accordingly - update WebIF accordingly --- avm/__init__.py | 775 ++++++++++++++++----------------- avm/item_attributes.py | 2 +- avm/item_attributes_master.py | 2 +- avm/plugin.yaml | 2 +- avm/webif/__init__.py | 16 +- avm/webif/templates/index.html | 18 +- 6 files changed, 393 insertions(+), 422 deletions(-) diff --git a/avm/__init__.py b/avm/__init__.py index 1762dd5a7..17bd74d6f 100644 --- a/avm/__init__.py +++ b/avm/__init__.py @@ -42,13 +42,9 @@ from lib.model.smartplugin import SmartPlugin from .webif import WebInterface -from .item_attributes import \ - ALL_ATTRIBUTES_SUPPORTED_BY_REPEATER, ALL_ATTRIBUTES_WRITEABLE, AHA_ATTRIBUTES, \ - TR064_ATTRIBUTES, CALL_MONITOR_ATTRIBUTES, CALL_MONITOR_ATTRIBUTES_TRIGGER, \ - CALL_MONITOR_ATTRIBUTES_GEN, CALL_MONITOR_ATTRIBUTES_IN, CALL_MONITOR_ATTRIBUTES_OUT, \ - CALL_MONITOR_ATTRIBUTES_DURATION, TAM_ATTRIBUTES, WLAN_CONFIG_ATTRIBUTES, \ - HOST_ATTRIBUTES_CHILD, DEFLECTION_ATTRIBUTES, ALL_ATTRIBUTES_WRITEONLY +from .item_attributes import * +ERROR_COUNT_TO_BE_BLACKLISTED = 2 def NoAttributeError(func): @functools.wraps(func) @@ -106,7 +102,7 @@ class AVM(SmartPlugin): """ Main class of the Plugin. Does all plugin specific stuff """ - PLUGIN_VERSION = '2.0.5' + PLUGIN_VERSION = '2.0.6' # ToDo: FritzHome.handle_updated_item: implement 'saturation' # ToDo: FritzHome.handle_updated_item: implement 'unmapped_hue' @@ -178,14 +174,39 @@ def run(self): """ Run method for the plugin """ + + def create_cyclic_scheduler(target: str, items: list, fct, offset: int) -> bool: + """Create the scheduler to handle cyclic read commands and find the proper time for the cycle.""" + + # find the shortest cycle + shortestcycle = -1 + for item in items: + item_cycle = self.get_item_config(item)['avm_data_cycle'] + if item_cycle != 0 and (shortestcycle == -1 or item_cycle < shortestcycle): + shortestcycle = item_cycle + + # start the worker thread + if shortestcycle != -1: + # balance unnecessary calls and precision + workercycle = int(shortestcycle / 2) + # just in case it already exists... + if self.scheduler_get(f'poll_{target}'): + self.scheduler_remove(f'poll_{target}') + dt = self.shtime.now() + datetime.timedelta(seconds=workercycle) + self.scheduler_add(f'poll_{target}', fct, cycle=workercycle, prio=5, offset=offset, next=dt) + self.logger.info(f'{target}: Added cyclic worker thread ({workercycle} sec cycle). Shortest item update cycle found: {shortestcycle} sec') + return True + else: + return False + self.logger.debug("Run method called") - if self.fritz_device is not None: - self.create_cyclic_scheduler(target='tr064', items=self.fritz_device.items, fct=self.fritz_device.cyclic_item_update, offset=2) + if self.fritz_device: + create_cyclic_scheduler(target='tr064', items=self.get_tr064_items(), fct=self.fritz_device.cyclic_item_update, offset=2) self.fritz_device.cyclic_item_update(read_all=True) if self._aha_http_interface and self.fritz_device and self.fritz_device.is_fritzbox() and self.fritz_home: # add scheduler for updating items - self.create_cyclic_scheduler(target='aha', items=self.fritz_home.items, fct=self.fritz_home.cyclic_item_update, offset=4) + create_cyclic_scheduler(target='aha', items=self.get_aha_items(), fct=self.fritz_home.cyclic_item_update, offset=4) self.fritz_home.cyclic_item_update(read_all=True) # add scheduler for checking validity of session id self.scheduler_add('check_sid', self.fritz_home.check_sid, prio=5, cycle=900, offset=30) @@ -199,15 +220,25 @@ def stop(self): """ Stop method for the plugin """ + self.logger.debug("Stop method called") - self.scheduler_remove('poll_tr064') + + self.alive = False + + if self.fritz_device: + self.scheduler_remove('poll_tr064') + self.fritz_device.connected = False + self.fritz_device = None + if self.fritz_home: self.scheduler_remove('poll_aha') self.scheduler_remove('check_sid') self.fritz_home.logout() + self.fritz_home = None + if self.monitoring_service: self.monitoring_service.disconnect() - self.alive = False + self.monitoring_service = None def parse_item(self, item): """ @@ -222,6 +253,214 @@ def parse_item(self, item): with the item, caller, source and dest as arguments and in case of the knx plugin the value can be sent to the knx with a knx write function within the knx plugin. """ + + item_config = dict() + + def _get_item_ain() -> Union[str, None]: + """ + Get AIN of device from item.conf + """ + ain_device = None + + lookup_item = item + for i in range(2): + attribute = 'ain' + ain_device = self.get_iattr_value(lookup_item.conf, attribute) + if ain_device: + break + else: + lookup_item = lookup_item.return_parent() + + if ain_device: + # deprecated warning for attribute 'ain' + self.logger.warning(f"Item {item.path()} uses deprecated 'ain' attribute. Please consider to switch to 'avm_ain'.") + else: + lookup_item = item + for i in range(2): + attribute = 'avm_ain' + ain_device = self.get_iattr_value(lookup_item.conf, attribute) + if ain_device is not None: + break + else: + lookup_item = lookup_item.return_parent() + + if ain_device is None: + self.logger.error(f'Device AIN for {item.path()} is not defined or instance not given') + return None + + return ain_device + + def _get_wlan_index(): + """ + return wlan index for given item + """ + wlan_index = None + lookup_item = item + for i in range(2): + attribute = 'avm_wlan_index' + + wlan_index = self.get_iattr_value(lookup_item.conf, attribute) + if wlan_index: + break + else: + lookup_item = lookup_item.return_parent() + + if wlan_index is not None: + wlan_index = int(wlan_index) - 1 + if not 0 <= wlan_index <= 2: + wlan_index = None + self.logger.warning(f"Attribute 'avm_wlan_index' for item {item.path()} not in valid range 1-3.") + + return wlan_index + + def _get_tam_index(): + """ + return tam index for given item + """ + tam_index = None + lookup_item = item + for i in range(2): + attribute = 'avm_tam_index' + + tam_index = self.get_iattr_value(lookup_item.conf, attribute) + if tam_index: + break + else: + lookup_item = lookup_item.return_parent() + + if tam_index is not None: + tam_index = int(tam_index) - 1 + if not 0 <= tam_index <= 4: + tam_index = None + self.logger.warning(f"Attribute 'avm_tam_index' for item {item.path()} not in valid range 1-5.") + + return tam_index + + def _get_deflection_index(): + """ + return deflection index for given item + """ + deflection_index = None + lookup_item = item + for i in range(2): + attribute = 'avm_deflection_index' + + deflection_index = self.get_iattr_value(lookup_item.conf, attribute) + if deflection_index: + break + else: + lookup_item = lookup_item.return_parent() + + if deflection_index is not None: + deflection_index = int(deflection_index) - 1 + if not 0 <= deflection_index <= 31: + deflection_index = None + self.logger.warning(f"Attribute 'avm_deflection_index' for item {item.path()} not in valid range 1-5.") + + return deflection_index + + def _get_mac() -> Union[str, None]: + """ + return mac for given item + """ + mac = None + lookup_item = item + for i in range(2): + attribute = 'avm_mac' + + mac = self.get_iattr_value(lookup_item.conf, attribute) + if mac: + break + else: + lookup_item = lookup_item.return_parent() + + return mac + + def get_aha_index() -> bool: + index = _get_item_ain() + if index: + self.logger.debug(f"Item {item.path()} with avm device attribute and defined avm_ain={index} found; append to list.") + item_config.update({'interface': 'aha', 'index': index}) + return True + else: + self.logger.warning(f"Item {item.path()} with avm attribute found, but 'avm_ain' is not defined; Item will be ignored.") + return False + + def get_tr064_index() -> bool: + index = None + # handle wlan items + if avm_data_type in WLAN_CONFIG_ATTRIBUTES: + index = _get_wlan_index() + if index is not None: + self.logger.debug(f"Item {item.path()} with avm device attribute {avm_data_type!r} and defined 'avm_wlan_index' with {index!r} found; append to list.") + else: + self.logger.warning(f"Item {item.path()} with avm attribute {avm_data_type!r} found, but 'avm_wlan_index' is not defined; Item will be ignored.") + return False + + # handle network_device / host child related items + elif avm_data_type in HOST_ATTRIBUTES_CHILD: + index = _get_mac() + if index is not None: + self.logger.debug(f"Item {item.path()} with avm device attribute {avm_data_type!r} and defined 'avm_mac' with {index!r} found; append to list.") + else: + self.logger.warning(f"Item {item.path()} with avm attribute {avm_data_type!r} found, but 'avm_mac' is not defined; Item will be ignored.") + return False + + # handle tam related items + elif avm_data_type in TAM_ATTRIBUTES: + index = _get_tam_index() + if index is not None: + self.logger.debug(f"Item {item.path()} with avm device attribute {avm_data_type!r} and defined 'avm_tam_index' with {index!r} found; append to list.") + else: + self.logger.warning(f"Item {item.path()} with avm attribute {avm_data_type!r} found, but 'avm_tam_index' is not defined; Item will be ignored.") + return False + + # handle deflection related items + elif avm_data_type in DEFLECTION_ATTRIBUTES: + index = _get_deflection_index() + if index is not None: + self.logger.debug(f"Item {item.path()} with avm device attribute {avm_data_type!r} and defined 'avm_tam_index' with {index!r} found; append to list.") + else: + self.logger.warning(f"Item {item.path()} with avm attribute {avm_data_type!r} found, but 'avm_tam_index' is not defined; Item will be ignored.") + return False + + item_config.update({'interface': 'tr064', 'index': index, 'error_count': 0}) + return True + + def get_monitor_index() -> bool: + # handle CALL_MONITOR_ATTRIBUTES_IN + if avm_data_type in CALL_MONITOR_ATTRIBUTES_IN: + monitor_item_type = 'incoming' + + elif avm_data_type in CALL_MONITOR_ATTRIBUTES_OUT: + monitor_item_type = 'outgoing' + + elif avm_data_type in CALL_MONITOR_ATTRIBUTES_GEN: + monitor_item_type = 'generic' + + elif avm_data_type in CALL_MONITOR_ATTRIBUTES_TRIGGER: + avm_incoming_allowed = self.get_iattr_value(item.conf, 'avm_incoming_allowed') + avm_target_number = self.get_iattr_value(item.conf, 'avm_target_number') + + if not avm_incoming_allowed or not avm_target_number: + self.logger.error(f"For Trigger-item={item.path()} both 'avm_incoming_allowed' and 'avm_target_number' must be specified as attributes. Item will be ignored.") + return False + else: + monitor_item_type = 'trigger' + item_config.update({'avm_incoming_allowed': avm_incoming_allowed, 'avm_target_number': avm_target_number}) + + elif avm_data_type in CALL_MONITOR_ATTRIBUTES_DURATION: + if avm_data_type == 'call_duration_incoming': + monitor_item_type = 'duration_in' + else: + monitor_item_type = 'duration_out' + + else: + monitor_item_type = 'generic' + + item_config.update({'interface': 'monitor', 'monitor_item_type': monitor_item_type}) + return True + if self.has_iattr(item.conf, 'avm_data_type'): self.logger.debug(f"parse item: {item}") @@ -233,52 +472,43 @@ def parse_item(self, item): if 0 < avm_data_cycle < 30: avm_data_cycle = 30 - # define item_config - item_config = {'avm_data_type': avm_data_type, 'avm_data_cycle': avm_data_cycle, 'next_update': time.time()} + # define initial item_config + item_config.update({'avm_data_type': avm_data_type, 'avm_data_cycle': avm_data_cycle, 'next_update': int(time.time())}) # handle items specific to call monitor if avm_data_type in CALL_MONITOR_ATTRIBUTES: - if self.monitoring_service: - self.monitoring_service.register_item(item, item_config) - else: + if not self.monitoring_service: self.logger.warning(f"Items with avm attribute {avm_data_type!r} found, which needs Call-Monitoring-Service. This is not available/enabled for that plugin; Item will be ignored.") + return + + if not get_monitor_index(): + return - # handle smarthome items using aha-interface (new) + # handle smarthome items using aha-interface elif avm_data_type in AHA_ATTRIBUTES: - if self.fritz_home: - self.fritz_home.register_item(item, item_config) - else: + if not self.fritz_home: self.logger.warning(f"Items with avm attribute {avm_data_type!r} found, which needs aha-http-interface. This is not available/enabled for that plugin; Item will be ignored.") + return + + if not get_aha_index(): + return # handle items updated by tr-064 interface elif avm_data_type in TR064_ATTRIBUTES: - if self.fritz_device: - self.fritz_device.register_item(item, item_config) - else: + if not self.fritz_device: self.logger.warning(f"Items with avm attribute {avm_data_type!r} found, which needs tr064 interface. This is not available/enabled; Item will be ignored.") - # handle anything else - else: - self.logger.warning(f"Item={item.path()} has unknown avm_data_type {avm_data_type!r}. Item will be ignored.") + return + + if not get_tr064_index(): + return + + # add item + self.add_item(item, config_data_dict=item_config) # items which can be changed outside the plugin context if avm_data_type in ALL_ATTRIBUTES_WRITEABLE: return self.update_item - def unparse_item(self, item): - """ remove item bindings from plugin """ - super().unparse_item(item) - - # handle items specific to call monitor - if self.monitoring_service: - self.monitoring_service.unregister_item(item) - - if self.fritz_home: - self.fritz_home.unregister_item(item) - - # handle items updated by tr-064 interface - if self.fritz_device: - self.fritz_device.unregister_item(item) - def update_item(self, item, caller=None, source=None, dest=None): """ Item has been updated @@ -295,24 +525,27 @@ def update_item(self, item, caller=None, source=None, dest=None): if self.alive and caller != self.get_fullname(): # get avm_data_type - avm_data_type = to_str(self.get_iattr_value(item.conf, 'avm_data_type')) + avm_data_type = self.get_iattr_value(item.conf, 'avm_data_type') self.logger.info(f"Updated item: {item.path()} with avm_data_type={avm_data_type} item has been changed outside this plugin from caller={caller}") readafterwrite = 0 if self.has_iattr(item.conf, 'avm_read_after_write'): - readafterwrite = to_int(self.get_iattr_value(item.conf, 'avm_read_after_write')) + readafterwrite = self.get_iattr_value(item.conf, 'avm_read_after_write') if self.debug_log: self.logger.debug(f'Attempting read after write for item: {item.path()}, avm_data_type: {avm_data_type}, delay: {readafterwrite}s') # handle items updated by tr-064 interface - if avm_data_type in TR064_ATTRIBUTES: - if self.debug_log: - self.logger.debug(f"Updated item={item.path()} with avm_data_type={avm_data_type} identified as part of 'TR064_ATTRIBUTES'") - self.fritz_device.handle_updated_item(item, avm_data_type, readafterwrite) + if avm_data_type in TR064_RW_ATTRIBUTES: + if self.fritz_device: + if self.debug_log: + self.logger.debug(f"Updated item={item.path()} with avm_data_type={avm_data_type} identified as part of 'TR064_ATTRIBUTES'") + self.fritz_device.handle_updated_item(item, avm_data_type, readafterwrite) + else: + self.logger.warning(f"AVM TR064 Interface not activated or not available. Update for {avm_data_type} will not be executed.") # handle items updated by AHA_ATTRIBUTES - elif avm_data_type in AHA_ATTRIBUTES: + elif avm_data_type in AHA_RW_ATTRIBUTES + AHA_WO_ATTRIBUTES: if self.fritz_home: if self.debug_log: self.logger.debug(f"Updated item={item.path()} with avm_data_type={avm_data_type} identified as part of 'AHA_ATTRIBUTES'") @@ -320,29 +553,6 @@ def update_item(self, item, caller=None, source=None, dest=None): else: self.logger.warning(f"AVM Homeautomation Interface not activated or not available. Update for {avm_data_type} will not be executed.") - def create_cyclic_scheduler(self, target: str, items: dict, fct, offset: int): - """Create the scheduler to handle cyclic read commands and find the proper time for the cycle.""" - # find the shortest cycle - shortestcycle = -1 - for item in items: - item_cycle = items[item]['avm_data_cycle'] - if item_cycle != 0 and (shortestcycle == -1 or item_cycle < shortestcycle): - shortestcycle = item_cycle - - # Start the worker thread - if shortestcycle != -1: - # Balance unnecessary calls and precision - workercycle = int(shortestcycle / 2) - # just in case it already exists... - if self.scheduler_get(f'poll_{target}'): - self.scheduler_remove(f'poll_{target}') - dt = self.shtime.now() + datetime.timedelta(seconds=workercycle) - self.scheduler_add(f'poll_{target}', fct, cycle=workercycle, prio=5, offset=offset, next=dt) - self.logger.info(f'{target}: Added cyclic worker thread ({workercycle} sec cycle). Shortest item update cycle found: {shortestcycle} sec') - return True - else: - return False - @property def log_level(self): return self.logger.getEffectiveLevel() @@ -453,6 +663,29 @@ def set_deflection_enable(self, deflection_id: int = 0, new_enable: bool = False def set_tam(self, tam_index: int = 0, new_enable: bool = False): return self.fritz_device.set_tam(tam_index, new_enable) + def get_aha_items(self): + return self.get_item_list(filter_key='interface', filter_value='aha') + + def get_tr064_items(self): + return self.get_item_list(filter_key='interface', filter_value='tr064') + + def get_monitor_items(self): + return self.get_item_list(filter_key='interface', filter_value='monitor') + + def reset_item_blacklist(self): + """ + Clean/reset item blacklist + """ + for item in self.get_item_list(): + self.get_item_config(item)['error_count'] = 0 + self.logger.info(f"Item Blacklist reset. item_blacklist={self.get_tr064_items_blacklisted()}") + + def get_tr064_items_blacklisted(self) -> list: + """ + Return list of blacklisted items + """ + return self.get_item_list(filter_key='error_count', filter_value=ERROR_COUNT_TO_BE_BLACKLISTED) + class FritzDevice: """ @@ -494,8 +727,6 @@ class FritzDevice: FRITZ_L2TPV3_FILE = "l2tpv3.xml" FRITZ_FBOX_DESC_FILE = "fboxdesc.xml" - ERROR_COUNT_TO_BE_BLACKLISTED = 2 - def __init__(self, host, port, ssl, verify, username, password, call_monitor_incoming_filter, use_tr064_backlist, log_entry_count, plugin_instance): """ Init class FritzDevice @@ -517,7 +748,6 @@ def __init__(self, host, port, ssl, verify, username, password, call_monitor_inc self._data_cache = {} self._calllist_cache = [] self._timeout = 10 - self.items = {} self._session = requests.Session() self.connected = False self.default_connection_service = None @@ -547,71 +777,11 @@ def __init__(self, host, port, ssl, verify, username, password, call_monitor_inc self.logger.error(f"Init TR064 Client for {self.FRITZ_IGD_DESC_FILE} caused error {e!r}.") pass - def register_item(self, item, item_config: dict): - """ - Parsed items valid for that class will be registered - """ - index = None - avm_data_type = item_config['avm_data_type'] - - # if fritz device is repeater and avm_data_type is not supported by repeater, return - if self.is_repeater() and avm_data_type not in ALL_ATTRIBUTES_SUPPORTED_BY_REPEATER: - self.logger.warning(f"Item {item.path()} with avm attribute {avm_data_type!r} found, which is not supported by Repeaters; Item will be ignored.") - return - - # handle wlan items - if avm_data_type in WLAN_CONFIG_ATTRIBUTES: - index = self._get_wlan_index(item) - if index is not None: - self.logger.debug(f"Item {item.path()} with avm device attribute {avm_data_type!r} and defined 'avm_wlan_index' with {index!r} found; append to list.") - else: - self.logger.warning(f"Item {item.path()} with avm attribute {avm_data_type!r} found, but 'avm_wlan_index' is not defined; Item will be ignored.") - return - - # handle network_device / host child related items - elif avm_data_type in HOST_ATTRIBUTES_CHILD: - index = self._get_mac(item) - if index is not None: - self.logger.debug(f"Item {item.path()} with avm device attribute {avm_data_type!r} and defined 'avm_mac' with {index!r} found; append to list.") - else: - self.logger.warning(f"Item {item.path()} with avm attribute {avm_data_type!r} found, but 'avm_mac' is not defined; Item will be ignored.") - return - - # handle tam related items - elif avm_data_type in TAM_ATTRIBUTES: - index = self._get_tam_index(item) - if index is not None: - self.logger.debug(f"Item {item.path()} with avm device attribute {avm_data_type!r} and defined 'avm_tam_index' with {index!r} found; append to list.") - else: - self.logger.warning(f"Item {item.path()} with avm attribute {avm_data_type!r} found, but 'avm_tam_index' is not defined; Item will be ignored.") - return - - # handle deflection related items - elif avm_data_type in DEFLECTION_ATTRIBUTES: - index = self._get_deflection_index(item) - if index is not None: - self.logger.debug(f"Item {item.path()} with avm device attribute {avm_data_type!r} and defined 'avm_tam_index' with {index!r} found; append to list.") - else: - self.logger.warning(f"Item {item.path()} with avm attribute {avm_data_type!r} found, but 'avm_tam_index' is not defined; Item will be ignored.") - return - - # update item config - item_config.update({'interface': 'tr064', 'index': index, 'error_count': 0}) - - # register item - self.items[item] = item_config - - def unregister_item(self, item): - """ remove item from instance """ - try: - del self.items[item] - except KeyError: - pass - def handle_updated_item(self, item, avm_data_type: str, readafterwrite: int): """Updated Item will be processed and value communicated to AVM Device""" + # get index - index = self.items[item]['index'] + index = self._plugin_instance.get_item_config(item)['index'] # to be set value to_be_set_value = item() @@ -673,88 +843,6 @@ def _build_url(self) -> str: return url - def _get_wlan_index(self, item): - """ - return wlan index for given item - """ - wlan_index = None - for _ in range(2): - attribute = 'avm_wlan_index' - - wlan_index = self._plugin_instance.get_iattr_value(item.conf, attribute) - if wlan_index: - break - else: - item = item.return_parent() - - if wlan_index is not None: - wlan_index = int(wlan_index) - 1 - if not 0 <= wlan_index <= 2: - wlan_index = None - self.logger.warning(f"Attribute 'avm_wlan_index' for item {item.path()} not in valid range 1-3.") - - return wlan_index - - def _get_tam_index(self, item): - """ - return tam index for given item - """ - tam_index = None - for _ in range(2): - attribute = 'avm_tam_index' - - tam_index = self._plugin_instance.get_iattr_value(item.conf, attribute) - if tam_index: - break - else: - item = item.return_parent() - - if tam_index is not None: - tam_index = int(tam_index) - 1 - if not 0 <= tam_index <= 4: - tam_index = None - self.logger.warning(f"Attribute 'avm_tam_index' for item {item.path()} not in valid range 1-5.") - - return tam_index - - def _get_deflection_index(self, item): - """ - return deflection index for given item - """ - deflection_index = None - for _ in range(2): - attribute = 'avm_deflection_index' - - deflection_index = self._plugin_instance.get_iattr_value(item.conf, attribute) - if deflection_index: - break - else: - item = item.return_parent() - - if deflection_index is not None: - deflection_index = int(deflection_index) - 1 - if not 0 <= deflection_index <= 31: - deflection_index = None - self.logger.warning(f"Attribute 'avm_deflection_index' for item {item.path()} not in valid range 1-5.") - - return deflection_index - - def _get_mac(self, item) -> Union[str, None]: - """ - return mac for given item - """ - mac = None - for _ in range(2): - attribute = 'avm_mac' - - mac = self._plugin_instance.get_iattr_value(item.conf, attribute) - if mac: - break - else: - item = item.return_parent() - - return mac - def _get_default_connection_service(self): _default_connection_service = self._poll_fritz_device('default_connection_service', enforce_read=True) @@ -769,7 +857,7 @@ def _get_default_connection_service(self): return 'IP' def item_list(self): - return list(self.items.keys()) + return self._plugin_instance.get_tr064_items() def manufacturer_name(self): return self._poll_fritz_device('manufacturer') @@ -783,7 +871,7 @@ def model_name(self): def product_class(self): return self._poll_fritz_device('product_class') - def desciption(self): + def description(self): return self._poll_fritz_device('description') def safe_port(self): @@ -816,23 +904,17 @@ def wlan_devices_count(self): def cyclic_item_update(self, read_all: bool = False): """Updates Item Values""" - if not self._plugin_instance.alive: - return - - if not self.connected: - self.logger.warning("FritzDevice not connected. No update of item values possible.") - return - current_time = int(time.time()) # iterate over items and get data - for item in self.items: + for item in self.item_list(): - if not self._plugin_instance.alive: + if not self.connected: + self.logger.warning("FritzDevice not connected. No update of item values possible.") return # get item config - item_config = self.items[item] + item_config = self._plugin_instance.get_item_config(item) avm_data_type = item_config['avm_data_type'] index = item_config['index'] cycle = item_config['avm_data_cycle'] @@ -840,12 +922,12 @@ def cyclic_item_update(self, read_all: bool = False): error_count = item_config['error_count'] # check if item is blacklisted - if error_count >= self.ERROR_COUNT_TO_BE_BLACKLISTED: + if error_count >= ERROR_COUNT_TO_BE_BLACKLISTED: self.logger.info(f"Item {item.path()} is blacklisted due to exceptions in former update cycles. Item will be ignored.") continue # read items with cycle == 0 just at init - if not read_all and cycle == 0: + if cycle == 0 and not read_all: self.logger.debug(f"Item {item.path()} just read at init. No further update.") continue @@ -856,20 +938,19 @@ def cyclic_item_update(self, read_all: bool = False): # check, if client_igd exists when avm_data_type startswith 'wan_current' are due if avm_data_type.startswith('wan_current') and self.client_igd is None: - self.logger.debug(f"Skipping item {item} with wan_current and no client_igd") + self.logger.debug(f"Skipping item {item.path()} with avm_data_type 'wan_current' since not client für IGD is available.") continue self.logger.debug(f"Item={item.path()} with avm_data_type={avm_data_type} and index={index} will be updated") # get data and set item value - if not self._update_item_value(item, avm_data_type, index) and self.use_tr064_blacklist: error_count += 1 self.logger.debug(f"{item.path()} caused error. New error_count: {error_count}. Item will be blacklisted after more than 2 errors.") item_config.update({'error_count': error_count}) # set next due date - self.items[item].update({'next_update': current_time + cycle}) + item_config['next_update'] = current_time + cycle # clear data cache dict after update cycle self._clear_data_cache() @@ -1158,21 +1239,6 @@ def _request(self, url: str, timeout: int, verify: bool): self.logger.error(f"Request to URL={url} failed with {request.status_code}") request.raise_for_status() - def reset_item_blacklist(self): - """ - Clean/reset item blacklist - """ - for item in self.items: - self.items[item]['error_count'] = 0 - self.logger.info(f"Item Blacklist reset. item_blacklist={self.get_tr064_items_blacklisted()}") - - def get_tr064_items_blacklisted(self) -> list: - item_list = [] - for item in self.items: - if self.items[item].get('error_count', 0) >= self.ERROR_COUNT_TO_BE_BLACKLISTED: - item_list.append(item) - return item_list - # ---------------------------------- # Fritz Device methods, reboot, wol, reconnect # ---------------------------------- @@ -1446,8 +1512,8 @@ def set_wlan(self, wlan_index: int, new_enable: bool = False): def set_wlan_time_remaining(self, wlan_index: int): """look for item and set time remaining""" - for item in self.items: # search for guest time remaining item. - if self.items[item][0] == 'wlan_guest_time_remaining' and self.items[item][1] == wlan_index: + for item in self.item_list(): # search for guest time remaining item. + if self._plugin_instance.get_item_config(item)['avm_data_type'] == 'wlan_guest_time_remaining' and self._plugin_instance.get_item_config(item)['index'] == wlan_index: data = self._poll_fritz_device('wlan_guest_time_remaining', wlan_index, enforce_read=True) if data is not None: item(data, self._plugin_instance.get_fullname()) @@ -1813,9 +1879,8 @@ class FritzHome: """ Fritzhome object to communicate with the device via AHA-HTTP Interface. """ - """ - Definition of AHA Routes - """ + + # Definition of AHA Routes LOGIN_ROUTE = '/login_sid.lua?version=2' LOG_ROUTE = '/query.lua?mq_log=logger:status/log' LOG_SEPARATE_ROUTE = '/query.lua?mq_log=logger:status/log_separate' @@ -1843,7 +1908,6 @@ def __init__(self, host, ssl, verify, user, password, log_entry_count, plugin_in self._logged_in = False self._session = requests.Session() self._timeout = 10 - self.items = dict() self.connected = False self.last_request = None self.log_entry_count = log_entry_count @@ -1853,31 +1917,6 @@ def __init__(self, host, ssl, verify, user, password, log_entry_count, plugin_in if not self._logged_in: raise IOError("Error 'Login failed'") - def register_item(self, item, item_config: dict): - """ - Parsed items valid fpr that class will be registered - """ - # handle aha items - index = self._get_item_ain(item) - if index: - self.logger.debug(f"Item {item.path()} with avm device attribute and defined avm_ain={index} found; append to list.") - else: - self.logger.warning(f"Item {item.path()} with avm attribute found, but 'avm_ain' is not defined; Item will be ignored.") - return - - # update item config - item_config.update({'interface': 'aha', 'index': index}) - - # register item - self.items[item] = item_config - - def unregister_item(self, item): - """ remove item from instance """ - try: - del self.items[item] - except KeyError: - pass - def cyclic_item_update(self, read_all: bool = False): """ Update smarthome item values using information from dict '_aha_devices' @@ -1895,9 +1934,9 @@ def cyclic_item_update(self, read_all: bool = False): current_time = int(time.time()) # iterate over items and get data - for item in self.items: + for item in self.item_list(): # get item config - item_config = self.items[item] + item_config = self._plugin_instance.get_item_config(item) avm_data_type = item_config['avm_data_type'] ain = item_config['index'] cycle = item_config['avm_data_cycle'] @@ -1926,7 +1965,7 @@ def cyclic_item_update(self, read_all: bool = False): avm_data_type = avm_data_type[len('set_'):] # get value - value = self.get_value_by_ain_and_avm_data_type(ain, avm_data_type) + value = getattr(self.get_devices_as_dict().get(ain), avm_data_type, None) if value is None: self.logger.debug(f'Value for attribute={avm_data_type} at device with AIN={ain} to set Item={item.path()} is not available/None.') continue @@ -1935,7 +1974,7 @@ def cyclic_item_update(self, read_all: bool = False): item(value, self._plugin_instance.get_fullname()) # set next due date - self.items[item].update({'next_update': current_time + cycle}) + item_config['next_update'] = current_time + cycle def handle_updated_item(self, item, avm_data_type: str, readafterwrite: int): """ @@ -1957,7 +1996,7 @@ def handle_updated_item(self, item, avm_data_type: str, readafterwrite: int): } # get AIN - _ain = self.items[item]['index'] + _ain = self._plugin_instance.get_item_config(item)['index'] # adapt avm_data_type by removing 'set_' if avm_data_type.startswith('set_'): @@ -2010,42 +2049,8 @@ def get_value_by_ain_and_avm_data_type(self, ain, avm_data_type): # return value return getattr(device, avm_data_type, None) - def _get_item_ain(self, item) -> Union[str, None]: - """ - Get AIN of device from item.conf - """ - ain_device = None - - lookup_item = item - for i in range(2): - attribute = 'ain' - ain_device = self._plugin_instance.get_iattr_value(lookup_item.conf, attribute) - if ain_device: - break - else: - lookup_item = lookup_item.return_parent() - - if ain_device: - # deprecated warning for attribute 'ain' - self.logger.warning(f"Item {item.path()} uses deprecated 'ain' attribute. Please consider to switch to 'avm_ain'.") - else: - lookup_item = item - for i in range(2): - attribute = 'avm_ain' - ain_device = self._plugin_instance.get_iattr_value(lookup_item.conf, attribute) - if ain_device is not None: - break - else: - lookup_item = lookup_item.return_parent() - - if ain_device is None: - self.logger.error(f'Device AIN for {item.path()} is not defined or instance not given') - return None - - return ain_device - def item_list(self): - return list(self.items.keys()) + return self._plugin_instance.get_aha_items() def _request(self, url: str, params=None, result: str = 'text'): """ @@ -3768,7 +3773,6 @@ def __init__(self, host, port, callback, call_monitor_incoming_filter, plugin_in self._call_monitor_incoming_filter = call_monitor_incoming_filter self._callback = callback - self.items = dict() # item dict self._call_active = dict() self._listen_active = False self._call_active['incoming'] = False @@ -3830,53 +3834,6 @@ def reconnect(self): self.disconnect() self.connect() - def register_item(self, item, item_config: dict): - """ - Registers an item to the CallMonitoringService - - :param item: item to register - :param item_config: item config dict of item to be registered - """ - avm_data_type = item_config['avm_data_type'] - - # handle CALL_MONITOR_ATTRIBUTES_IN - if avm_data_type in CALL_MONITOR_ATTRIBUTES_IN: - item_config.update({'monitor_item_type': 'incoming'}) - - elif avm_data_type in CALL_MONITOR_ATTRIBUTES_OUT: - item_config.update({'monitor_item_type': 'outgoing'}) - - elif avm_data_type in CALL_MONITOR_ATTRIBUTES_GEN: - item_config.update({'monitor_item_type': 'generic'}) - - elif avm_data_type in CALL_MONITOR_ATTRIBUTES_TRIGGER: - avm_incoming_allowed = self._plugin_instance.get_iattr_value(item.conf, 'avm_incoming_allowed') - avm_target_number = self._plugin_instance.get_iattr_value(item.conf, 'avm_target_number') - - if not avm_incoming_allowed or not avm_target_number: - self.logger.error(f"For Trigger-item={item.path()} both 'avm_incoming_allowed' and 'avm_target_number' must be specified as attributes. Item will be ignored.") - else: - item_config.update({'monitor_item_type': 'trigger', 'avm_incoming_allowed': avm_incoming_allowed, 'avm_target_number': avm_target_number}) - - elif avm_data_type in CALL_MONITOR_ATTRIBUTES_DURATION: - if avm_data_type == 'call_duration_incoming': - item_config.update({'monitor_item_type': 'duration_in'}) - else: - item_config.update({'monitor_item_type': 'duration_out'}) - - else: - item_config.update({'monitor_item_type': 'generic'}) - - # register item - self.items[item] = item_config - - def unregister_item(self, item): - """ remove item from instance """ - try: - del self.items[item] - except KeyError: - pass - def set_callmonitor_item_values_initially(self): """ Set callmonitor related item values after startup @@ -3886,8 +3843,10 @@ def set_callmonitor_item_values_initially(self): if not _calllist: return - for item in self.items: - avm_data_type = self.items[item]['avm_data_type'] + for item in self.item_list(): + # get item config + item_config = self._plugin_instance.get_item_config(item) + avm_data_type = item_config['avm_data_type'] if avm_data_type == 'last_caller_incoming': for element in _calllist: @@ -3994,44 +3953,40 @@ def set_callmonitor_item_values_initially(self): break def item_list(self): - return list(self.items.keys()) + """Returns duration item list of all monitor items """ + return self._plugin_instance.get_monitor_items() def item_list_gen(self) -> list: - return self._get_item_list({'monitor_item_type': 'generic'}) + """Returns duration item list of items for generic use""" + return self._plugin_instance.get_item_list(filter_key='monitor_item_type', filter_value='generic') def item_list_incoming(self) -> list: - return self._get_item_list({'monitor_item_type': 'incoming'}) + """Returns duration item list of items for incoming direction""" + return self._plugin_instance.get_item_list(filter_key='monitor_item_type', filter_value='incoming') def item_list_outgoing(self) -> list: - return self._get_item_list({'monitor_item_type': 'outgoing'}) + """Returns duration item list of items for outgoing direction""" + return self._plugin_instance.get_item_list(filter_key='monitor_item_type', filter_value='outgoing') def item_list_trigger(self) -> list: - return self._get_item_list({'monitor_item_type': 'trigger'}) + """Returns duration item list of trigger items""" + return self._plugin_instance.get_item_list(filter_key='monitor_item_type', filter_value='trigger') def duration_item_in(self): - item_list = self._get_item_list({'monitor_item_type': 'duration_in'}) + """Returns duration item for in-direction""" + item_list = self._plugin_instance.get_item_list(filter_key='monitor_item_type', filter_value='duration_in') if item_list: return item_list[0] def duration_item_out(self): - item_list = self._get_item_list({'monitor_item_type': 'duration_out'}) + """Returns duration item for out-direction""" + item_list = self._plugin_instance.get_item_list(filter_key='monitor_item_type', filter_value='duration_out') if item_list: return item_list[0] - def _get_item_list(self, sub_dict: dict) -> list: - item_list = [] - for item in self.items: - if sub_dict.items() <= self.items[item].items(): - item_list.append(item) - return item_list - def item_count_total(self): - """ - Returns number of added items (all items of MonitoringService service) - - :return: number of items hold by the MonitoringService - """ - return len(self.items) + """Returns number of monitor items (all items of MonitoringService service)""" + return len(self.item_list()) def _listen(self, recv_buffer: int = 4096): """ @@ -4143,7 +4098,9 @@ def _trigger(self, call_from: str, call_to: str, dt: str, callid: str, event: st # set generic item value for item in self.item_list_gen(): - avm_data_type = self.items[item]['avm_data_type'] + item_config = self._plugin_instance.get_item_config(item) + avm_data_type = item_config['avm_data_type'] + if avm_data_type == 'call_event': item(event.lower(), self._plugin_instance.get_fullname()) if avm_data_type == 'call_direction': @@ -4156,9 +4113,11 @@ def _trigger(self, call_from: str, call_to: str, dt: str, callid: str, event: st if event == 'RING': # process "trigger items" for trigger_item in self.item_list_trigger(): - avm_data_type = self.items[trigger_item]['avm_data_type'] - avm_incoming_allowed = self.items[trigger_item]['avm_incoming_allowed'] - avm_target_number = self.items[trigger_item]['avm_target_number'] + item_config = self._plugin_instance.get_item_config(trigger_item) + avm_data_type = item_config['avm_data_type'] + avm_incoming_allowed = item_config['avm_incoming_allowed'] + avm_target_number = item_config['avm_target_number'] + trigger_item(0, self._plugin_instance.get_fullname()) if self.debug_log: self.logger.debug(f"{avm_data_type} {call_from} {call_to}") @@ -4176,7 +4135,9 @@ def _trigger(self, call_from: str, call_to: str, dt: str, callid: str, event: st # process items specific to incoming calls for item in self.item_list_incoming(): - avm_data_type = self.items[item]['avm_data_type'] + item_config = self._plugin_instance.get_item_config(item) + avm_data_type = item_config['avm_data_type'] + if avm_data_type == 'is_call_incoming': if self.debug_log: self.logger.debug("Setting is_call_incoming: True") @@ -4218,7 +4179,9 @@ def _trigger(self, call_from: str, call_to: str, dt: str, callid: str, event: st # process items specific to outgoing calls for item in self.item_list_outgoing(): - avm_data_type = self.items[item]['avm_data_type'] + item_config = self._plugin_instance.get_item_config(item) + avm_data_type = item_config['avm_data_type'] + if avm_data_type == 'is_call_outgoing': item(True, self._plugin_instance.get_fullname()) elif avm_data_type == 'last_caller_outgoing': @@ -4244,7 +4207,9 @@ def _trigger(self, call_from: str, call_to: str, dt: str, callid: str, event: st self._stop_counter('outgoing') # stop potential running counter for parallel (older) outgoing call self._start_counter(dt, 'outgoing') for item in self.item_list_outgoing(): - avm_data_type = self.items[item]['avm_data_type'] + item_config = self._plugin_instance.get_item_config(item) + avm_data_type = item_config['avm_data_type'] + if avm_data_type == 'call_event_outgoing': item(event.lower(), self._plugin_instance.get_fullname()) break @@ -4257,7 +4222,9 @@ def _trigger(self, call_from: str, call_to: str, dt: str, callid: str, event: st self.logger.debug("Starting Counter for Call Time") self._start_counter(dt, 'incoming') for item in self.item_list_incoming(): - avm_data_type = self.items[item]['avm_data_type'] + item_config = self._plugin_instance.get_item_config(item) + avm_data_type = item_config['avm_data_type'] + if avm_data_type == 'call_event_incoming': if self.debug_log: self.logger.debug(f"Setting call_event_incoming: {event.lower()}") @@ -4268,7 +4235,9 @@ def _trigger(self, call_from: str, call_to: str, dt: str, callid: str, event: st # handle OUTGOING calls if callid == self._call_outgoing_cid: for item in self.item_list_outgoing(): - avm_data_type = self.items[item]['avm_data_type'] + item_config = self._plugin_instance.get_item_config(item) + avm_data_type = item_config['avm_data_type'] + if avm_data_type == 'call_event_outgoing': item(event.lower(), self._plugin_instance.get_fullname()) elif avm_data_type == 'is_call_outgoing': @@ -4280,7 +4249,9 @@ def _trigger(self, call_from: str, call_to: str, dt: str, callid: str, event: st # handle INCOMING calls elif callid == self._call_incoming_cid: for item in self.item_list_incoming(): - avm_data_type = self.items[item]['avm_data_type'] + item_config = self._plugin_instance.get_item_config(item) + avm_data_type = item_config['avm_data_type'] + if avm_data_type == 'call_event_incoming': if self.debug_log: self.logger.debug(f"Setting call_event_incoming: {event.lower()}") diff --git a/avm/item_attributes.py b/avm/item_attributes.py index a46bd826c..23ea313d9 100644 --- a/avm/item_attributes.py +++ b/avm/item_attributes.py @@ -37,7 +37,7 @@ AHA_WO_ATTRIBUTES = ['set_target_temperature', 'set_window_open', 'set_hkr_boost', 'set_simpleonoff', 'set_level', 'set_levelpercentage', 'set_hue', 'set_saturation', 'set_colortemperature', 'switch_toggle'] AHA_RW_ATTRIBUTES = ['target_temperature', 'window_open', 'hkr_boost', 'simpleonoff', 'level', 'levelpercentage', 'hue', 'saturation', 'colortemperature', 'unmapped_hue', 'unmapped_saturation', 'switch_state'] TR064_ATTRIBUTES = ['uptime', 'serial_number', 'software_version', 'hardware_version', 'manufacturer', 'product_class', 'manufacturer_oui', 'model_name', 'description', 'device_log', 'security_port', 'reboot', 'myfritz_status', 'call_direction', 'call_event', 'monitor_trigger', 'is_call_incoming', 'last_caller_incoming', 'last_call_date_incoming', 'call_event_incoming', 'last_number_incoming', 'last_called_number_incoming', 'is_call_outgoing', 'last_caller_outgoing', 'last_call_date_outgoing', 'call_event_outgoing', 'last_number_outgoing', 'last_called_number_outgoing', 'call_duration_incoming', 'call_duration_outgoing', 'tam', 'tam_name', 'tam_new_message_number', 'tam_old_message_number', 'tam_total_message_number', 'wan_connection_status', 'wan_connection_error', 'wan_is_connected', 'wan_uptime', 'wan_ip', 'wan_upstream', 'wan_downstream', 'wan_total_packets_sent', 'wan_total_packets_received', 'wan_current_packets_sent', 'wan_current_packets_received', 'wan_total_bytes_sent', 'wan_total_bytes_received', 'wan_current_bytes_sent', 'wan_current_bytes_received', 'wan_link', 'wlanconfig', 'wlanconfig_ssid', 'wlan_guest_time_remaining', 'wlan_associates', 'wps_active', 'wps_status', 'wps_mode', 'wlan_total_associates', 'hosts_count', 'hosts_info', 'mesh_topology', 'number_of_hosts', 'hosts_url', 'mesh_url', 'network_device', 'device_ip', 'device_connection_type', 'device_hostname', 'connection_status', 'is_host_active', 'host_info', 'number_of_deflections', 'deflections_details', 'deflection_details', 'deflection_enable', 'deflection_type', 'deflection_number', 'deflection_to_number', 'deflection_mode', 'deflection_outgoing', 'deflection_phonebook_id', 'aha_device', 'hkr_device', 'set_temperature', 'temperature', 'set_temperature_reduced', 'set_temperature_comfort', 'firmware_version'] -AVM_RW_ATTRIBUTES = ['tam', 'wlanconfig', 'wps_active', 'deflection_enable', 'aha_device'] +TR064_RW_ATTRIBUTES = ['tam', 'wlanconfig', 'wps_active', 'deflection_enable', 'aha_device'] CALL_MONITOR_ATTRIBUTES = ['call_direction', 'call_event', 'monitor_trigger', 'is_call_incoming', 'last_caller_incoming', 'last_call_date_incoming', 'call_event_incoming', 'last_number_incoming', 'last_called_number_incoming', 'is_call_outgoing', 'last_caller_outgoing', 'last_call_date_outgoing', 'call_event_outgoing', 'last_number_outgoing', 'last_called_number_outgoing', 'call_duration_incoming', 'call_duration_outgoing'] CALL_MONITOR_ATTRIBUTES_TRIGGER = ['monitor_trigger'] CALL_MONITOR_ATTRIBUTES_GEN = ['call_direction', 'call_event'] diff --git a/avm/item_attributes_master.py b/avm/item_attributes_master.py index de5a67c2b..c1be32c4e 100644 --- a/avm/item_attributes_master.py +++ b/avm/item_attributes_master.py @@ -233,7 +233,7 @@ def export_item_attributs_py(): ATTRS['AHA_WO_ATTRIBUTES'] = get_attrs(['aha'], {'access': 'wo'}) ATTRS['AHA_RW_ATTRIBUTES'] = get_attrs(['aha'], {'access': 'rw'}) ATTRS['TR064_ATTRIBUTES'] = get_attrs(['tr064']) - ATTRS['AVM_RW_ATTRIBUTES'] = get_attrs(['tr064'], {'access': 'rw'}) + ATTRS['TR064_RW_ATTRIBUTES'] = get_attrs(['tr064'], {'access': 'rw'}) ATTRS['CALL_MONITOR_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'call_monitor'}) ATTRS['CALL_MONITOR_ATTRIBUTES_TRIGGER'] = get_attrs(['tr064'], {'group': 'call_monitor', 'sub_group': 'trigger'}) ATTRS['CALL_MONITOR_ATTRIBUTES_GEN'] = get_attrs(['tr064'], {'group': 'call_monitor', 'sub_group': 'generic'}) diff --git a/avm/plugin.yaml b/avm/plugin.yaml index aa64a2950..bdb75794b 100644 --- a/avm/plugin.yaml +++ b/avm/plugin.yaml @@ -12,7 +12,7 @@ plugin: documentation: http://smarthomeng.de/user/plugins/avm/user_doc.html support: https://knx-user-forum.de/forum/supportforen/smarthome-py/934835-avm-plugin - version: 2.0.5 # Plugin version (must match the version specified in __init__.py) + version: 2.0.6 # Plugin version (must match the version specified in __init__.py) sh_minversion: 1.8 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) # py_minversion: 3.6 # minimum Python version to use for this plugin diff --git a/avm/webif/__init__.py b/avm/webif/__init__.py index 4fafcfa6c..cdab224f9 100644 --- a/avm/webif/__init__.py +++ b/avm/webif/__init__.py @@ -61,14 +61,14 @@ def index(self, reload=None, action=None): """ if self.plugin.fritz_device: - tr064_items = self.plugin.fritz_device.item_list() + tr064_items = self.plugin.get_tr064_items() tr064_item_count = len(tr064_items) else: tr064_items = None tr064_item_count = None if self.plugin.fritz_home: - aha_items = self.plugin.fritz_home.item_list() + aha_items = self.plugin.get_aha_items() aha_item_count = len(aha_items) logentries = self.plugin.get_device_log_from_lua_separated() else: @@ -80,7 +80,7 @@ def index(self, reload=None, action=None): logentries = None if self.plugin.monitoring_service: - call_monitor_items = self.plugin.monitoring_service.item_list() + call_monitor_items = self.plugin.get_monitor_items() call_monitor_item_count = len(call_monitor_items) else: call_monitor_items = None @@ -119,7 +119,7 @@ def get_data_html(self, dataSet=None): data = dict() if self.plugin.monitoring_service: data['call_monitor'] = {} - for item in self.plugin.monitoring_service.item_list(): + for item in self.plugin.get_monitor_items(): data['call_monitor'][item.id()] = {} data['call_monitor'][item.id()]['value'] = item() data['call_monitor'][item.id()]['last_update'] = item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') @@ -127,16 +127,16 @@ def get_data_html(self, dataSet=None): if self.plugin.fritz_device: data['tr064_items'] = {} - for item in self.plugin.fritz_device.item_list(): + for item in self.plugin.get_tr064_items(): data['tr064_items'][item.id()] = {} data['tr064_items'][item.id()]['value'] = item() data['tr064_items'][item.id()]['last_update'] = item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') data['tr064_items'][item.id()]['last_change'] = item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') - data['tr064_items_blacklistet'] = self.plugin.fritz_device.get_tr064_items_blacklisted() + data['tr064_items_blacklistet'] = self.plugin.get_tr064_items_blacklisted() if self.plugin.fritz_home: data['aha_items'] = {} - for item in self.plugin.fritz_home.item_list(): + for item in self.plugin.get_aha_items(): data['aha_items'][item.id()] = {} data['aha_items'][item.id()]['value'] = item() data['aha_items'][item.id()]['last_update'] = item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') @@ -160,4 +160,4 @@ def reconnect(self): @cherrypy.expose def reset_item_blacklist(self): - self.plugin.fritz_device.reset_item_blacklist() + self.plugin.reset_item_blacklist() diff --git a/avm/webif/templates/index.html b/avm/webif/templates/index.html index b206d1fcb..3ffc52a69 100644 --- a/avm/webif/templates/index.html +++ b/avm/webif/templates/index.html @@ -280,13 +280,13 @@
    {% if tr064_items %} {% for item in tr064_items %} - {% set item_config = p.fritz_device.items[item] %} + {% set item_config = p.get_item_config(item) %} - - + + @@ -315,13 +315,13 @@ {% if aha_items %} {% for item in aha_items %} - {% set item_config = p.fritz_home.items[item] %} + {% set item_config = p.get_item_config(item) %} - - + + @@ -482,19 +482,19 @@ - + - + {% endif %} {% if p.fritz_home %} - + From 0cf301b3613f1331e728975d198a67c06dcd1eb9 Mon Sep 17 00:00:00 2001 From: msinn Date: Fri, 16 Jun 2023 10:44:45 +0200 Subject: [PATCH 133/775] database: Changed log message, if no cache value was found --- database/__init__.py | 4 ++-- database/plugin.yaml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/database/__init__.py b/database/__init__.py index 7a79905eb..0ffd9e6bc 100755 --- a/database/__init__.py +++ b/database/__init__.py @@ -51,7 +51,7 @@ class Database(SmartPlugin): """ ALLOW_MULTIINSTANCE = True - PLUGIN_VERSION = '1.6.9' + PLUGIN_VERSION = '1.6.10' # SQL queries: {item} = item table name, {log} = log table name # time, item_id, val_str, val_num, val_bool, changed @@ -251,7 +251,7 @@ def parse_item(self, item): except Exception as e: self.logger.error("Reading cache value from database for {} failed: {}".format(item.id(), e)) else: - self.logger.warning("Cache not available in database for item {}".format(item.id() )) + self.logger.notice(f"No cached value available in database for item {item.id()}") cur.close() self._db.release() elif self.get_iattr_value(item.conf, 'database').lower() == 'init': diff --git a/database/plugin.yaml b/database/plugin.yaml index 389724872..b232ff699 100755 --- a/database/plugin.yaml +++ b/database/plugin.yaml @@ -11,7 +11,7 @@ plugin: keywords: database support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1021844-neues-database-plugin - version: 1.6.9 # Plugin version + version: 1.6.10 # Plugin version sh_minversion: 1.9.3.2 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) multi_instance: True # plugin supports multi instance From 816fe681b2f7aea2f786223cc6794d1ed9260ac9 Mon Sep 17 00:00:00 2001 From: msinn Date: Fri, 16 Jun 2023 10:45:15 +0200 Subject: [PATCH 134/775] smartvisu: Corrected log message --- smartvisu/__init__.py | 5 +++-- smartvisu/plugin.yaml | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/smartvisu/__init__.py b/smartvisu/__init__.py index c9c9582ba..df8cf2a63 100755 --- a/smartvisu/__init__.py +++ b/smartvisu/__init__.py @@ -45,7 +45,8 @@ ######################################################################### class SmartVisu(SmartPlugin): - PLUGIN_VERSION="1.8.10" + + PLUGIN_VERSION="1.8.11" ALLOW_MULTIINSTANCE = True visu_definition = None @@ -421,7 +422,7 @@ def sv_is_configured(self): dirname = self.read_from_sv_configini('pages') result = (dirname != '') else: - self.logger.warning("Could not determine version of smartVISU in configured directory {self.smartvisu_dir}") + self.logger.warning(f"Could not determine version of smartVISU in configured directory {self.smartvisu_dir}") result = False return result diff --git a/smartvisu/plugin.yaml b/smartvisu/plugin.yaml index ffb8f7660..0238cdeb5 100755 --- a/smartvisu/plugin.yaml +++ b/smartvisu/plugin.yaml @@ -12,7 +12,7 @@ plugin: #documentation: '' support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1586800-support-thread-für-das-smartvisu-plugin - version: 1.8.10 # Plugin version + version: 1.8.11 # Plugin version sh_minversion: 1.9.3.5 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) py_minversion: 3.6 # minimum Python version to use for this plugin From 3f760a0db56934511a0175988e235a178b2beacd Mon Sep 17 00:00:00 2001 From: msinn Date: Fri, 16 Jun 2023 10:46:17 +0200 Subject: [PATCH 135/775] indego4shng: Synced image garden.svg --- indego4shng/webif/static/img/garden.svg | 1 - 1 file changed, 1 deletion(-) delete mode 120000 indego4shng/webif/static/img/garden.svg diff --git a/indego4shng/webif/static/img/garden.svg b/indego4shng/webif/static/img/garden.svg deleted file mode 120000 index 6b11a6546..000000000 --- a/indego4shng/webif/static/img/garden.svg +++ /dev/null @@ -1 +0,0 @@ -/var/www/html/smartVISU2.9/dropins/garden.svg \ No newline at end of file From b2ab8b9ef4168e97aa7eb4ffcd9c8632697a2a20 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Sun, 18 Jun 2023 09:41:54 +0200 Subject: [PATCH 136/775] DB_ADDON Plugin: enable on-change attributes to be run on startup - Bump to 1.1.3 - enable on-change attributes to be run on startup - fix in WebIf --- db_addon/__init__.py | 135 ++++++++++++++++++++++++++----------------- db_addon/plugin.yaml | 17 +++++- 2 files changed, 99 insertions(+), 53 deletions(-) diff --git a/db_addon/__init__.py b/db_addon/__init__.py index 625e2221a..aae20ce0c 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -54,7 +54,7 @@ class DatabaseAddOn(SmartPlugin): Main class of the Plugin. Does all plugin specific stuff and provides the update functions for the items """ - PLUGIN_VERSION = '1.1.2' + PLUGIN_VERSION = '1.1.3' # ToDo: cache temperatureseries raw data def __init__(self, sh): @@ -447,10 +447,8 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte if self.parse_debug: self.logger.debug(f"Item '{item.path()}' added to be run {item_config_data_dict['cycle']}.") - # create item config for item to be run on startup (onchange_items shall not be run at startup, but at first noticed change of item value; therefore remove for list of items to be run at startup) - if (db_addon_startup and db_addon_fct not in ALL_ONCHANGE_ATTRIBUTES) or db_addon_fct in ALL_GEN_ATTRIBUTES: - if self.parse_debug: - self.logger.debug(f"Item '{item.path()}' added to be run on startup") + # create item config for item to be run on startup + if db_addon_startup or db_addon_fct in ALL_GEN_ATTRIBUTES: item_config_data_dict.update({'startup': True}) else: item_config_data_dict.update({'startup': False}) @@ -535,12 +533,25 @@ def execute_startup_items(self) -> None: if self.execute_debug: self.logger.debug("execute_startup_items called") - if not self.suspended: - self.logger.info(f"{len(self._startup_items())} items will be calculated at startup.") - [self.item_queue.put(i) for i in self._startup_items()] - self.startup_finished = True - else: + if self.suspended: self.logger.info(f"Plugin is suspended. No items will be calculated.") + return + + relevant_item_list = self._startup_items() + self.logger.info(f"{len(relevant_item_list)} items will be calculated at startup.") + + for item in relevant_item_list: + item_config = self.get_item_config(item) + db_addon_fct = item_config['db_addon_fct'] + + # handle on-change items + if db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: + self.item_queue.put((item, None)) + # handle on-demand items + else: + self.item_queue.put(item) + + self.startup_finished = True def execute_static_items(self) -> None: """ @@ -780,7 +791,7 @@ def handle_ondemand(self, item: Item) -> None: item_config.update({'value': result}) item(result, self.get_shortname()) - def handle_onchange(self, updated_item: Item, value: float) -> None: + def handle_onchange(self, updated_item: Item, value: float = None) -> None: """ Get item and item value for which an update has been detected, fill cache dicts and set item value. @@ -791,7 +802,8 @@ def handle_onchange(self, updated_item: Item, value: float) -> None: if self.onchange_debug: self.logger.debug(f"handle_onchange called with updated_item={updated_item.path()} and value={value}.") - relevant_item_list = self.get_item_list('database_item', updated_item) + relevant_item_list = set(self.get_item_list('database_item', updated_item)) & set(self.get_item_list('cycle', 'on-change')) + if self.onchange_debug: self.logger.debug(f"Following items where identified for update: {relevant_item_list}.") @@ -808,48 +820,62 @@ def handle_onchange(self, updated_item: Item, value: float) -> None: _func = _var[2] _cache_dict = self.current_values[_timeframe] if not _timeframe: - return + continue if self.onchange_debug: self.logger.debug(f"handle_onchange: 'minmax' item {updated_item.path()} with {_func=} detected. Check for update of _cache_dicts and item value.") - _initial_value = False + init = False _new_value = None # make sure, that database item is in cache dict if _database_item not in _cache_dict: _cache_dict[_database_item] = {} - if _cache_dict[_database_item].get(_func) is None: + + # get _recent_value; if not already cached, create cache + _recent_value = _cache_dict[_database_item].get(_func) + if _recent_value is None: _query_params = {'func': _func, 'item': _database_item, 'timeframe': _timeframe, 'start': 0, 'end': 0, 'ignore_value_list': _ignore_value_list} - _cached_value = self._query_item(**_query_params)[0][1] - _initial_value = True + _db_value = self._query_item(**_query_params)[0][1] + if self.onchange_debug: - self.logger.debug(f"handle_onchange: Item={updated_item.path()} with _func={_func} and _timeframe={_timeframe} not in cache dict. recent value={_cached_value}.") - else: - _cached_value = _cache_dict[_database_item][_func] + self.logger.debug(f"handle_onchange: Item={updated_item.path()} with _func={_func} and _timeframe={_timeframe} not in cache dict. recent value={_db_value}.") + + if _db_value is not None: + _recent_value = _db_value + init = True + elif value is not None: + _recent_value = value + else: + if self.onchange_debug: + self.logger.debug(f"handle_onchange: continue due to {_db_value=}, {value}.") + continue + + # if value not given -> read at startup + if value is None: + _new_value = _recent_value + if self.onchange_debug: + self.logger.debug(f"handle_onchange: initial value for item will be set with value {_new_value}") - if _cached_value: - # check value for update of cache dict - if _func == 'min' and value < _cached_value: + # check value for update of cache dict + else: + if _func == 'min' and value < _recent_value: _new_value = value if self.onchange_debug: - self.logger.debug(f"handle_onchange: new value={_new_value} lower then current min_value={_cached_value}. _cache_dict will be updated") - elif _func == 'max' and value > _cached_value: + self.logger.debug(f"handle_onchange: new value={_new_value} lower then current min_value={_recent_value}. _cache_dict will be updated") + elif _func == 'max' and value > _recent_value: _new_value = value if self.onchange_debug: - self.logger.debug(f"handle_onchange: new value={_new_value} higher then current max_value={_cached_value}. _cache_dict will be updated") + self.logger.debug(f"handle_onchange: new value={_new_value} higher then current max_value={_recent_value}. _cache_dict will be updated") + elif init: + _new_value = _recent_value + if self.onchange_debug: + self.logger.debug(f"handle_onchange: initial value for item will be set with value {_new_value}") else: if self.onchange_debug: - self.logger.debug(f"handle_onchange: new value={_new_value} will not change max/min for period.") - else: - _cached_value = value - - if _initial_value and not _new_value: - _new_value = _cached_value - if self.onchange_debug: - self.logger.debug(f"handle_onchange: initial value for item will be set with value {_new_value}") + self.logger.debug(f"handle_onchange: new value={value} will not change max/min for period={_timeframe}.") - if _new_value: + if _new_value is not None: _cache_dict[_database_item][_func] = _new_value self.logger.info(f"Item value for '{item.path()}' with func={_func} will be set to {_new_value}") item_config = self.get_item_config(item) @@ -861,29 +887,34 @@ def handle_onchange(self, updated_item: Item, value: float) -> None: # handle verbrauch on-change items ending with heute, woche, monat, jahr elif len(_var) == 2 and _var[0] == 'verbrauch' and _var[1] in ['heute', 'woche', 'monat', 'jahr']: _timeframe = convert_timeframe(_var[1]) + _cache_dict = self.previous_values[_timeframe] if _timeframe is None: - return + continue # make sure, that database item is in cache dict - _cache_dict = self.previous_values[_timeframe] - if _database_item not in _cache_dict: + _cached_value = _cache_dict.get(_database_item) + if _cached_value is None: _query_params = {'func': 'max', 'item': _database_item, 'timeframe': _timeframe, 'start': 1, 'end': 1, 'ignore_value_list': _ignore_value_list} - _cached_value = self._query_item(**_query_params)[0][1] - _cache_dict[_database_item] = _cached_value - if self.onchange_debug: - self.logger.debug(f"handle_onchange: Item={updated_item.path()} with {_timeframe=} not in cache dict. Value {_cached_value} has been added.") - else: - _cached_value = _cache_dict[_database_item] + _db_value = self._query_item(**_query_params)[0][1] + + if _db_value is not None: + _cache_dict[_database_item] = _db_value + _cached_value = _db_value + if self.onchange_debug: + self.logger.debug(f"handle_onchange: Item={updated_item.path()} with {_timeframe=} not in cache dict. Value={_cached_value} has been added.") + else: + self.logger.info(f"Value for end of last {_timeframe} not available. No item value will be set.") + continue # calculate value, set item value, put data into plugin_item_dict - if _cached_value is not None: - _new_value = round(value - _cached_value, 1) - self.logger.info(f"Item value for '{item.path()}' will be set to {_new_value}") - item_config = self.get_item_config(item) - item_config.update({'value': _new_value}) - item(_new_value, self.get_shortname()) - else: - self.logger.info(f"Value for end of last {_timeframe} not available. No item value will be set.") + _new_value = round(value - _cached_value, 1) + self.logger.info(f"Item value for '{item.path()}' will be set to {_new_value}") + item_config = self.get_item_config(item) + item_config.update({'value': _new_value}) + item(_new_value, self.get_shortname()) + + else: + self.logger.warning(f"{_db_addon_fct} given at item {item.path()} not defined in plugin. Skipped.") def _update_database_items(self): for item in self._database_item_path_items(): diff --git a/db_addon/plugin.yaml b/db_addon/plugin.yaml index 4f1231700..5c3dc1c71 100644 --- a/db_addon/plugin.yaml +++ b/db_addon/plugin.yaml @@ -11,7 +11,7 @@ plugin: # keywords: iot xyz # documentation: https://github.com/smarthomeNG/smarthome/wiki/CLI-Plugin # url of documentation (wiki) page support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1848494-support-thread-databaseaddon-plugin - version: 1.1.2 # Plugin version (must match the version specified in __init__.py) + version: 1.1.3 # Plugin version (must match the version specified in __init__.py) sh_minversion: 1.9.3.5 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) py_minversion: 3.8 # minimum Python version to use for this plugin @@ -594,6 +594,7 @@ item_structs: verbrauch_heute: name: Verbrauch heute db_addon_fct: verbrauch_heute + db_addon_startup: yes type: num visu_acl: ro cache: yes @@ -601,6 +602,7 @@ item_structs: verbrauch_woche: name: Verbrauch seit Wochenbeginn db_addon_fct: verbrauch_woche + db_addon_startup: yes type: num visu_acl: ro cache: yes @@ -608,6 +610,7 @@ item_structs: verbrauch_monat: name: Verbrauch seit Monatsbeginn db_addon_fct: verbrauch_monat + db_addon_startup: yes type: num visu_acl: ro cache: yes @@ -615,6 +618,7 @@ item_structs: verbrauch_jahr: name: Verbrauch seit Jahresbeginn db_addon_fct: verbrauch_jahr + db_addon_startup: yes type: num visu_acl: ro cache: yes @@ -622,6 +626,7 @@ item_structs: verbrauch_rolling_12m: name: Verbrauch innerhalb der letzten 12 Monate ausgehend von gestern db_addon_fct: verbrauch_rolling_12m_heute_minus1 + db_addon_startup: yes type: num visu_acl: ro cache: yes @@ -818,60 +823,70 @@ item_structs: name: Minimaler Wert seit Tagesbeginn db_addon_fct: minmax_heute_min db_addon_ignore_value: 0 + db_addon_startup: yes type: num cache: yes heute_max: name: Maximaler Wert seit Tagesbeginn db_addon_fct: minmax_heute_max + db_addon_startup: yes type: num cache: yes last24h_min: name: Minimaler Wert in den letzten 24h (gleitend) db_addon_fct: minmax_last_24h_min + db_addon_startup: yes type: num cache: yes last24h_max: name: Maximaler Wert in den letzten 24h (gleitend) db_addon_fct: minmax_last_24h_max + db_addon_startup: yes type: num cache: yes woche_min: name: Minimaler Wert seit Wochenbeginn db_addon_fct: minmax_woche_min + db_addon_startup: yes type: num cache: yes woche_max: name: Maximaler Wert seit Wochenbeginn db_addon_fct: minmax_woche_max + db_addon_startup: yes type: num cache: yes monat_min: name: Minimaler Wert seit Monatsbeginn db_addon_fct: minmax_monat_min + db_addon_startup: yes type: num cache: yes monat_max: name: Maximaler Wert seit Monatsbeginn db_addon_fct: minmax_monat_max + db_addon_startup: yes type: num cache: yes jahr_min: name: Minimaler Wert seit Jahresbeginn db_addon_fct: minmax_jahr_min + db_addon_startup: yes type: num cache: yes jahr_max: name: Maximaler Wert seit Jahresbeginn db_addon_fct: minmax_jahr_max + db_addon_startup: yes type: num cache: yes From d1de5c4528c5a832bc4c4b8318d15fba42d76022 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Sun, 18 Jun 2023 09:42:44 +0200 Subject: [PATCH 137/775] DB_ADDON Plugin: enable on-change attributes to be run on startup - Bump to 1.1.3 - enable on-change attributes to be run on startup - fix in WebIf --- db_addon/webif/templates/index.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/db_addon/webif/templates/index.html b/db_addon/webif/templates/index.html index a2f60cc4b..9228c5919 100644 --- a/db_addon/webif/templates/index.html +++ b/db_addon/webif/templates/index.html @@ -69,7 +69,7 @@ value.substring(0, length - 3) + " ..." : value; - shngInsertText(item+'_value', new_value, 'maintable', 5); + shngInsertText(item+'_value', round(new_value, 2), 'maintable', 5); shngInsertText(item+'_last_update', objResponse['items'][item]['last_update'], 'maintable'); shngInsertText(item+'_last_change', objResponse['items'][item]['last_change'], 'maintable'); } @@ -283,7 +283,7 @@ - + From 6ec929c1dbec0d165a3b6b9f3cbb32c3b03183a3 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Sun, 18 Jun 2023 14:26:47 +0200 Subject: [PATCH 138/775] DB_ADDON Plugin: Bugfix in handle_onchange --- db_addon/__init__.py | 59 +++++++++++++++++++++++++------------------- 1 file changed, 34 insertions(+), 25 deletions(-) diff --git a/db_addon/__init__.py b/db_addon/__init__.py index aae20ce0c..944bb4cd9 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -351,7 +351,7 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte return # create standard items config - item_config_data_dict = {'db_addon': 'function', 'db_addon_fct': db_addon_fct, 'database_item': database_item, 'ignore_value_list': db_addon_ignore_value_list} + item_config_data_dict = {'db_addon': 'function', 'db_addon_fct': db_addon_fct, 'database_item': database_item, 'ignore_value_list': db_addon_ignore_value_list, 'query_params': query_params} if isinstance(database_item, str): item_config_data_dict.update({'database_item_path': True}) else: @@ -835,7 +835,7 @@ def handle_onchange(self, updated_item: Item, value: float = None) -> None: # get _recent_value; if not already cached, create cache _recent_value = _cache_dict[_database_item].get(_func) if _recent_value is None: - _query_params = {'func': _func, 'item': _database_item, 'timeframe': _timeframe, 'start': 0, 'end': 0, 'ignore_value_list': _ignore_value_list} + _query_params = {'func': _func, 'item': _database_item, 'timeframe': _timeframe, 'start': 0, 'end': 0, 'ignore_value_list': _ignore_value_list, 'use_oldest_entry': True} _db_value = self._query_item(**_query_params)[0][1] if self.onchange_debug: @@ -855,7 +855,7 @@ def handle_onchange(self, updated_item: Item, value: float = None) -> None: if value is None: _new_value = _recent_value if self.onchange_debug: - self.logger.debug(f"handle_onchange: initial value for item will be set with value {_new_value}") + self.logger.debug(f"handle_onchange: initial {_func} value for item {item.path()} will be set to {_new_value}") # check value for update of cache dict else: @@ -887,23 +887,24 @@ def handle_onchange(self, updated_item: Item, value: float = None) -> None: # handle verbrauch on-change items ending with heute, woche, monat, jahr elif len(_var) == 2 and _var[0] == 'verbrauch' and _var[1] in ['heute', 'woche', 'monat', 'jahr']: _timeframe = convert_timeframe(_var[1]) - _cache_dict = self.previous_values[_timeframe] if _timeframe is None: continue - # make sure, that database item is in cache dict + _cache_dict = self.previous_values[_timeframe] + + # get _cached_value for value at end of last period; if not already cached, create cache _cached_value = _cache_dict.get(_database_item) if _cached_value is None: - _query_params = {'func': 'max', 'item': _database_item, 'timeframe': _timeframe, 'start': 1, 'end': 1, 'ignore_value_list': _ignore_value_list} + _query_params = {'func': 'max', 'item': _database_item, 'timeframe': _timeframe, 'start': 1, 'end': 1, 'ignore_value_list': _ignore_value_list, 'use_oldest_entry': True} _db_value = self._query_item(**_query_params)[0][1] if _db_value is not None: _cache_dict[_database_item] = _db_value _cached_value = _db_value if self.onchange_debug: - self.logger.debug(f"handle_onchange: Item={updated_item.path()} with {_timeframe=} not in cache dict. Value={_cached_value} has been added.") + self.logger.debug(f"handle_onchange: Value for Item={updated_item.path()} at end of last {_timeframe} not in cache dict. Value={_cached_value} has been added.") else: - self.logger.info(f"Value for end of last {_timeframe} not available. No item value will be set.") + self.logger.info(f"Value for end of last {_timeframe} not available from database. Request skipped.") continue # calculate value, set item value, put data into plugin_item_dict @@ -1203,7 +1204,7 @@ def _handle_min_max(self, database_item: Item, db_addon_fct: str, ignore_value_l start = to_int(_var[4][:-1]) end = 0 group = convert_timeframe(_var[4][len(_var[4]) - 1]) - log_text = 'serie_min/max/avg' + log_text = 'serie_minmax' if timeframe is None or start is None or group is None: return else: @@ -1252,7 +1253,7 @@ def _handle_zaehlerstand(self, database_item: Item, db_addon_fct: str, ignore_va start = to_int(_var[3][:-1]) end = 0 group = convert_timeframe(_var[3][len(_var[3]) - 1]) - log_text = 'serie_min/max/avg' + log_text = 'serie_zaehlerstand' if timeframe is None or start is None or group is None: return else: @@ -1355,7 +1356,7 @@ def consumption_calc(c_start, c_end) -> Union[float, None]: if self.execute_debug: self.logger.debug(f"_handle_verbrauch: '{func}' function detected. {window=}, {timeframe=}, {timedelta=}") - if window_dur in ['day', 'week', 'month', 'year']: + if window_dur in ALLOWED_QUERY_TIMEFRAMES: starttime = convert_duration(timeframe, window_dur) * window_inc return consumption_calc(c_start=starttime, c_end=endtime) @@ -2168,7 +2169,7 @@ def _get_itemid_for_query(self, item: Union[Item, str, int]) -> Union[int, None] item_id = None return item_id - def _query_item(self, func: str, item: Item, timeframe: str, start: int = None, end: int = 0, group: str = None, group2: str = None, ignore_value_list=None) -> list: + def _query_item(self, func: str, item: Item, timeframe: str, start: int = None, end: int = 0, group: str = None, group2: str = None, ignore_value_list=None, use_oldest_entry: bool = False) -> list: """ Do diverse checks of input, and prepare query of log by getting item_id, start / end in timestamp etc. @@ -2180,6 +2181,7 @@ def _query_item(self, func: str, item: Item, timeframe: str, start: int = None, :param group: first grouping parameter (default = None, possible values: day, week, month, year) :param group2: second grouping parameter (default = None, possible values: day, week, month, year) :param ignore_value_list: list of comparison operators for val_num, which will be applied during query + :param use_oldest_entry: if start is prior to oldest entry, oldest entry will be used :return: query response / list for value pairs [[None, None]] for errors, [[0,0]] for """ @@ -2255,12 +2257,12 @@ def _handle_query_result(query_result) -> list: return result if ts_start < oldest_log: - if not self.use_oldest_entry: - self.logger.info(f"_query_item: Requested start time timestamp={ts_start} / {timestamp_to_timestring(ts_start)} of query for Item='{item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Query cancelled.") - return result - else: + if self.use_oldest_entry or use_oldest_entry: self.logger.info(f"_query_item: Requested start time timestamp={ts_start} / {timestamp_to_timestring(ts_start)} of query for Item='{item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Oldest available entry will be used.") ts_start = oldest_log + else: + self.logger.info(f"_query_item: Requested start time timestamp={ts_start} / {timestamp_to_timestring(ts_start)} of query for Item='{item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Query cancelled.") + return result query_params = {'func': func, 'item_id': item_id, 'ts_start': ts_start, 'ts_end': ts_end, 'group': group, 'group2': group2, 'ignore_value_list': ignore_value_list} result = _handle_query_result(self._query_log_timestamp(**query_params)) @@ -2702,13 +2704,10 @@ def convert_timeframe(timeframe: str) -> str: return convertion.get(timeframe) - def convert_duration(timeframe: str, window_dur: str) -> int: - """ - Convert duration - - """ + """Convert duration""" + _h_in_d = 24 _d_in_y = 365 _d_in_w = 7 _m_in_y = 12 @@ -2717,22 +2716,32 @@ def convert_duration(timeframe: str, window_dur: str) -> int: _d_in_m = _d_in_y / _m_in_y conversion = { - 'day': {'day': 1, + 'hour': {'hour': 1, + 'day': _h_in_d, + 'week': _h_in_d * _d_in_w, + 'month': _h_in_d * _d_in_m, + 'year': _h_in_d * _d_in_y, + }, + 'day': {'hour': 1 / _h_in_d, + 'day': 1, 'week': _d_in_w, 'month': _d_in_m, 'year': _d_in_y, }, - 'week': {'day': 1 / _d_in_w, + 'week': {'hour': 1 / (_h_in_d * _d_in_w), + 'day': 1 / _d_in_w, 'week': 1, 'month': _w_in_m, 'year': _w_in_y }, - 'month': {'day': 1 / _d_in_m, + 'month': {'hour': 1 / (_h_in_d * _d_in_m), + 'day': 1 / _d_in_m, 'week': 1 / _w_in_m, 'month': 1, 'year': _m_in_y }, - 'year': {'day': 1 / _d_in_y, + 'year': {'hour': 1 / (_h_in_d * _d_in_y), + 'day': 1 / _d_in_y, 'week': 1 / _w_in_y, 'month': 1 / _m_in_y, 'year': 1 From aaee6507a8e6de03bdf026e841c76819d35ac610 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Sun, 18 Jun 2023 14:32:44 +0200 Subject: [PATCH 139/775] DB_ADDON Plugin: Bugfix in handle_onchange --- db_addon/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/db_addon/__init__.py b/db_addon/__init__.py index 944bb4cd9..f3abf7152 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -351,7 +351,7 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte return # create standard items config - item_config_data_dict = {'db_addon': 'function', 'db_addon_fct': db_addon_fct, 'database_item': database_item, 'ignore_value_list': db_addon_ignore_value_list, 'query_params': query_params} + item_config_data_dict = {'db_addon': 'function', 'db_addon_fct': db_addon_fct, 'database_item': database_item, 'ignore_value_list': db_addon_ignore_value_list} if isinstance(database_item, str): item_config_data_dict.update({'database_item_path': True}) else: From 0247b4e8ec2957e6947d25daa12e14c2f163f841 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Fri, 23 Jun 2023 17:57:18 +0200 Subject: [PATCH 140/775] DB_ADDON Plugin: Code rework to enable more flexible attributes - bump to version 1.1.4 - rework code to create query parameter during parse_item - harmonize parameters to ease handling of queries - enable onchange items also to be calculated during start - bugfixes --- db_addon/__init__.py | 1428 +++++++++++++--------------- db_addon/item_attributes.py | 28 +- db_addon/item_attributes_master.py | 248 ++--- db_addon/plugin.yaml | 26 +- 4 files changed, 825 insertions(+), 905 deletions(-) diff --git a/db_addon/__init__.py b/db_addon/__init__.py index f3abf7152..90e2bce66 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -54,8 +54,7 @@ class DatabaseAddOn(SmartPlugin): Main class of the Plugin. Does all plugin specific stuff and provides the update functions for the items """ - PLUGIN_VERSION = '1.1.3' - # ToDo: cache temperatureseries raw data + PLUGIN_VERSION = '1.1.4' def __init__(self, sh): """ @@ -91,11 +90,12 @@ def __init__(self, sh): self.active_queue_item: str = '-' # String holding item path of currently executed item # define debug logs - self.parse_debug = True # Enable / Disable debug logging for method 'parse item' - self.execute_debug = False # Enable / Disable debug logging for method 'execute items' - self.sql_debug = False # Enable / Disable debug logging for sql stuff - self.onchange_debug = False # Enable / Disable debug logging for method 'handle_onchange' - self.prepare_debug = False # Enable / Disable debug logging for query preparation + self.parse_debug = True # Enable / Disable debug logging for method 'parse item' + self.execute_debug = True # Enable / Disable debug logging for method 'execute items' + self.sql_debug = True # Enable / Disable debug logging for sql stuff + self.ondemand_debug = True # Enable / Disable debug logging for method 'handle_ondemand' + self.onchange_debug = True # Enable / Disable debug logging for method 'handle_onchange' + self.prepare_debug = True # Enable / Disable debug logging for query preparation # define default mysql settings self.default_connect_timeout = 60 @@ -112,14 +112,6 @@ def __init__(self, sh): # init cache dicts self._init_cache_dicts() - # activate debug logger - if self.log_level == 10: # info: 20 debug: 10 - self.parse_debug = True - self.execute_debug = True - self.sql_debug = True - self.onchange_debug = True - self.prepare_debug = True - # init webinterface self.init_webinterface(WebInterface) @@ -191,16 +183,261 @@ def parse_item(self, item: Item): can be sent to the knx with a knx write function within the knx plugin. """ - def get_database_item_path() -> Item: + def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: + + # get parameter + db_addon_fct_vars = db_addon_fct.split('_') + func = timeframe = timedelta = start = end = group = group2 = method = log_text = None + required_params = None + + if db_addon_fct in HISTORIE_ATTRIBUTES_ONCHANGE: + # handle functions 'minmax on-change' in format 'minmax_timeframe_func' items like 'minmax_heute_max', 'minmax_heute_min', 'minmax_woche_max', 'minmax_woche_min' + timeframe = convert_timeframe(db_addon_fct_vars[1]) + func = db_addon_fct_vars[2] if db_addon_fct_vars[2] in ALLOWED_MINMAX_FUNCS else None + log_text = 'minmax_timeframe_func' + required_params = [func, timeframe] + + elif db_addon_fct in HISTORIE_ATTRIBUTES_LAST: + # handle functions 'minmax_last' in format 'minmax_last_timedelta|timeframe_function' like 'minmax_last_24h_max' + func = db_addon_fct_vars[3] + timeframe = convert_timeframe(db_addon_fct_vars[2][-1:]) + start = to_int(db_addon_fct_vars[2][:-1]) + end = 0 + log_text = 'minmax_last_timedelta|timeframe_function' + required_params = [func, timeframe, start, end] + + elif db_addon_fct in HISTORIE_ATTRIBUTES_TIMEFRAME: + # handle functions 'min/max/avg' in format 'minmax_timeframe_timedelta_func' like 'minmax_heute_minus2_max' + func = db_addon_fct_vars[3] # min, max, avg + timeframe = convert_timeframe(db_addon_fct_vars[1]) # day, week, month, year + start = to_int(db_addon_fct_vars[2][-1]) # 1, 2, 3, ... + end = start + log_text = 'minmax_timeframe_timedelta_func' + required_params = [func, timeframe, start] + + elif db_addon_fct in ZAEHLERSTAND_ATTRIBUTES_TIMEFRAME: + # handle functions 'zaehlerstand' in format 'zaehlerstand_timeframe_timedelta' like 'zaehlerstand_heute_minus1' + func = 'max' + timeframe = convert_timeframe(db_addon_fct_vars[1]) + start = to_int(db_addon_fct_vars[2][-1]) + end = start + log_text = 'zaehlerstand_timeframe_timedelta' + required_params = [timeframe, start] + + elif db_addon_fct in VERBRAUCH_ATTRIBUTES_ONCHANGE: + # handle functions 'verbrauch on-change' items in format 'verbrauch_timeframe' like 'verbrauch_heute', 'verbrauch_woche', 'verbrauch_monat', 'verbrauch_jahr' + timeframe = convert_timeframe(db_addon_fct_vars[1]) + func = 'max' + log_text = 'verbrauch_timeframe' + required_params = [timeframe] + + elif db_addon_fct in VERBRAUCH_ATTRIBUTES_TIMEFRAME: + # handle functions 'verbrauch on-demand' in format 'verbrauch_timeframe_timedelta' like 'verbrauch_heute_minus2' + timeframe = convert_timeframe(db_addon_fct_vars[1]) + start = to_int(db_addon_fct_vars[2][-1]) + 1 + end = to_int(db_addon_fct_vars[2][-1]) + log_text = 'verbrauch_timeframe_timedelta' + required_params = [timeframe, start, end] + + elif db_addon_fct in VERBRAUCH_ATTRIBUTES_ROLLING: + # handle functions 'verbrauch_on-demand' in format 'verbrauch_rolling_window_timeframe_timedelta' like 'verbrauch_rolling_12m_woche_minus1' + func = db_addon_fct_vars[1] + window_inc = to_int(db_addon_fct_vars[2][:-1]) # 12 + window_dur = convert_timeframe(db_addon_fct_vars[2][-1]) # day, week, month, year + timeframe = convert_timeframe(db_addon_fct_vars[3]) # day, week, month, year + if window_dur in ALLOWED_QUERY_TIMEFRAMES and window_inc and timeframe: + start = convert_duration(timeframe, window_dur) * window_inc + end = to_int(db_addon_fct_vars[4][-1]) # 1 + log_text = 'verbrauch_rolling_window_timeframe_timedelta' + required_params = [func, timeframe, start, end] + + elif db_addon_fct in VERBRAUCH_ATTRIBUTES_JAHRESZEITRAUM: + # handle functions of format 'verbrauch_jahreszeitraum_timedelta' like 'verbrauch_jahreszeitraum_minus1' + timeframe = convert_timeframe(db_addon_fct_vars[1]) # day, week, month, year + timedelta = to_int(db_addon_fct_vars[2][-1]) # 1 oder 2 oder 3 + log_text = 'verbrauch_jahreszeitraum_timedelta' + required_params = [timeframe, timedelta] + + elif db_addon_fct in TAGESMITTEL_ATTRIBUTES_ONCHANGE: + # handle functions 'tagesmitteltemperatur on-change' items in format 'tagesmitteltemperatur_timeframe' like 'tagesmitteltemperatur_heute', 'tagesmitteltemperatur_woche', 'tagesmitteltemperatur_monat', 'tagesmitteltemperatur_jahr' + timeframe = convert_timeframe(db_addon_fct_vars[1]) + func = 'max' + log_text = 'tagesmitteltemperatur_timeframe' + required_params = [timeframe] + + elif db_addon_fct in TAGESMITTEL_ATTRIBUTES_TIMEFRAME: + # handle 'tagesmitteltemperatur_timeframe_timedelta' like 'tagesmitteltemperatur_heute_minus1' + func = 'max' + timeframe = convert_timeframe(db_addon_fct_vars[1]) + start = to_int(db_addon_fct_vars[2][-1]) + end = start + log_text = 'tagesmitteltemperatur_timeframe_timedelta' + required_params = [func, timeframe, start, end] + + elif db_addon_fct in SERIE_ATTRIBUTES_MINMAX: + # handle functions 'serie_minmax' in format 'serie_minmax_timeframe_func_start|group' like 'serie_minmax_monat_min_15m' + func = db_addon_fct_vars[3] + timeframe = convert_timeframe(db_addon_fct_vars[2]) + start = to_int(db_addon_fct_vars[4][:-1]) + end = 0 + group = convert_timeframe(db_addon_fct_vars[4][len(db_addon_fct_vars[4]) - 1]) + log_text = 'serie_minmax_timeframe_func_start|group' + required_params = [func, timeframe, start, group] + + elif db_addon_fct in SERIE_ATTRIBUTES_ZAEHLERSTAND: + # handle functions 'serie_zaehlerstand' in format 'serie_zaehlerstand_timeframe_start|group' like 'serie_zaehlerstand_tag_30d' + func = 'max' + timeframe = convert_timeframe(db_addon_fct_vars[2]) + start = to_int(db_addon_fct_vars[3][:-1]) + group = convert_timeframe(db_addon_fct_vars[3][len(db_addon_fct_vars[3]) - 1]) + log_text = 'serie_zaehlerstand_timeframe_start|group' + required_params = [timeframe, start, group] + + elif db_addon_fct in SERIE_ATTRIBUTES_VERBRAUCH: + # handle all functions of format 'serie_verbrauch_timeframe_start|group' like 'serie_verbrauch_tag_30d' + func = 'diff_max' + timeframe = convert_timeframe(db_addon_fct_vars[2]) + start = to_int(db_addon_fct_vars[3][:-1]) + group = convert_timeframe(db_addon_fct_vars[3][len(db_addon_fct_vars[3]) - 1]) + log_text = 'serie_verbrauch_timeframe_start|group' + required_params = [timeframe, start, group] + + elif db_addon_fct in SERIE_ATTRIBUTES_SUMME: + # handle all summe in format 'serie_xxsumme_timeframe_count|group' like serie_waermesumme_monat_24m + func = 'sum_max' + timeframe = 'month' + start = to_int(db_addon_fct_vars[3][:-1]) + end = 0 + group = 'day', + group2 = 'month' + log_text = 'serie_xxsumme_timeframe_count|group' + required_params = [start] + + elif db_addon_fct in SERIE_ATTRIBUTES_MITTEL_D: + # handle 'serie_tagesmittelwert_count|group' like 'serie_tagesmittelwert_0d' => Tagesmittelwert der letzten 0 Tage (also heute) + func = 'max' + timeframe = 'year' + start = to_int(db_addon_fct_vars[2][:-1]) + end = 0 + group = convert_timeframe(db_addon_fct_vars[2][len(db_addon_fct_vars[2]) - 1]) + log_text = 'serie_tagesmittelwert_count|group' + required_params = [func, timeframe, start, end, group] + + elif db_addon_fct in SERIE_ATTRIBUTES_MITTEL_H: + # handle 'serie_tagesmittelwert_group2_count|group' like 'serie_tagesmittelwert_stunde_0d' => Stundenmittelwerte der letzten 0 Tage (also heute) + func = 'avg1' + timeframe = 'day' + start = to_int(db_addon_fct_vars[3][:-1]) + end = 0 + group = 'hour' + group2 = convert_timeframe(db_addon_fct_vars[3][len(db_addon_fct_vars[3]) - 1]) + log_text = 'serie_tagesmittelwert_group2_count|group' + required_params = [func, timeframe, start, end, group, group2] + + elif db_addon_fct in SERIE_ATTRIBUTES_MITTEL_H1: + # handle 'serie_tagesmittelwert_stunde_start_end|group' like 'serie_tagesmittelwert_stunde_30_0d' => Stundenmittelwerte von vor 30 Tage bis vor 0 Tagen (also heute) + timeframe = 'day' + method = 'raw' + start = to_int(db_addon_fct_vars[3]) + end = to_int(db_addon_fct_vars[4][:-1]) + log_text = 'serie_tagesmittelwert_stunde_start_end|group' + required_params = [timeframe, method, start, end] + + elif db_addon_fct in SERIE_ATTRIBUTES_MITTEL_D_H: + # handle 'serie_tagesmittelwert_tag_stunde_end|group' like 'serie_tagesmittelwert_tag_stunde_30d' => Tagesmittelwert auf Basis des Mittelwerts pro Stunden für die letzten 30 Tage + timeframe = 'day' + method = 'raw' + start = to_int(db_addon_fct_vars[4][:-1]) + end = 0 + log_text = 'serie_tagesmittelwert_tag_stunde_end|group' + required_params = [timeframe, method, start, end] + + elif db_addon_fct in ALL_GEN_ATTRIBUTES: + log_text = 'all_gen_attributes' + required_params = [] + + if required_params is None: + self.logger.warning(f"ERROR: For calculating '{db_addon_fct}' at Item '{item.path()}' no mandatory parameters given.") + return + + if required_params and None in required_params: + self.logger.warning(f"ERROR: For calculating '{db_addon_fct}' at Item '{item.path()}' not all mandatory parameters given. Definitions are: {func=}, {timeframe=}, {timedelta=}, {start=}, {end=}, {group=}, {group2=}, {method=}") + return + + # create dict and reduce dict to keys with value != None + param_dict = {'func': func, 'timeframe': timeframe, 'timedelta': timedelta, 'start': start, 'end': end, 'group': group, 'group2': group2, 'method': method} + + # return reduced dict w keys with value != None + return {k: v for k, v in param_dict.items() if v is not None} + + def get_query_parameters_from_db_addon_params() -> Union[dict, None]: + """get query parameters from item attribute db_addon_params""" + + db_addon_params = params_to_dict(self.get_iattr_value(item.conf, 'db_addon_params')) + new_db_addon_params = {} + possible_params = required_params = [] + + if db_addon_params is None: + self.logger.warning(f"ERROR: Definition for Item '{item.path()}' with db_addon_fct={db_addon_fct} incomplete, since parameters via 'db_addon_params' not given. Item will be ignored.") + return + + # create item config for all functions with 'summe' like waermesumme, kaeltesumme, gruenlandtemperatursumme + if 'summe' in db_addon_fct: + possible_params = ['year', 'month'] + + # create item config for wachstumsgradtage function + elif db_addon_fct == 'wachstumsgradtage': + possible_params = ['year', 'method', 'threshold'] + + # create item config for tagesmitteltemperatur + elif db_addon_fct == 'tagesmitteltemperatur': + possible_params = ['timeframe', 'count'] + + # create item config for minmax + elif db_addon_fct == 'minmax': + required_params = ['func', 'timeframe', 'start'] + + # create item config for minmax_last + elif db_addon_fct == 'minmax_last': + required_params = ['func', 'timeframe', 'start', 'end'] + + # create item config for verbrauch + elif db_addon_fct == 'verbrauch': + required_params = ['timeframe', 'start', 'end'] + + # create item config for zaehlerstand + elif db_addon_fct == 'zaehlerstand': + required_params = ['timeframe', 'start'] + + # create item config for db_request and everything else (get_query_parameters_from_db_addon_fct) + else: + required_params = ['func', 'timeframe'] + possible_params = ['start', 'end', 'group', 'group2', 'ignore_value_list', 'use_oldest_entry'] + + if required_params and not any(param in db_addon_params for param in required_params): + self.logger.warning(f"ERROR: Item '{item.path()}' with {db_addon_fct=} ignored, since not all mandatory parameters in {db_addon_params=} are given. Item will be ignored.") + return + + # reduce dict to possible keys + required_params + for key in possible_params + required_params: + value = db_addon_params.get(key) + if value: + new_db_addon_params[key] = value + + if new_db_addon_params: + return new_db_addon_params + + def get_database_item_path() -> tuple: """ - Returns item from shNG config which is an item with database attribut valid for current db_addon item + Returns item_path from shNG config which is an item with database attribut valid for current db_addon item """ _lookup_item = item for i in range(3): if self.has_iattr(_lookup_item.conf, 'db_addon_database_item'): - self.logger.debug(f"Attribut 'db_addon_database_item' for item='{item.path()}' has been found {i + 1} level above item at '{_lookup_item.path()}'.") + if self.parse_debug: + self.logger.debug(f"Attribut 'db_addon_database_item' for item='{item.path()}' has been found {i + 1} level above item at '{_lookup_item.path()}'.") _database_item_path = self.get_iattr_value(_lookup_item.conf, 'db_addon_database_item') _startup = bool(self.get_iattr_value(_lookup_item.conf, 'db_addon_startup')) return _database_item_path, _startup @@ -218,9 +455,9 @@ def get_database_item() -> Item: for i in range(2): if self.has_iattr(_lookup_item.conf, self.item_attribute_search_str): - self.logger.debug(f"Attribut '{self.item_attribute_search_str}' for item='{item.path()}' has been found {i + 1} level above item at '{_lookup_item.path()}'.") - _startup = bool(self.get_iattr_value(_lookup_item.conf, 'db_addon_startup')) - return _lookup_item, _startup + if self.parse_debug: + self.logger.debug(f"Attribut '{self.item_attribute_search_str}' for item='{item.path()}' has been found {i + 1} level above item at '{_lookup_item.path()}'.") + return _lookup_item else: _lookup_item = _lookup_item.return_parent() @@ -304,7 +541,7 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte db_addon_ignore_value_list_optimized.append(f"{upper_end[0]} {upper_end[1]}") if max_values['!=']: for v in max_values['!=']: - if (lower_end[0] and v >= lower_end[1]) or (upper_end[0] and v <= upper_end[1]): + if (not lower_end[0] or (lower_end[0] and v >= lower_end[1])) or (not upper_end[0] or (upper_end[0] and v <= upper_end[1])): db_addon_ignore_value_list_optimized.append(f'!= {v}') self.logger.info(f"Optimized 'ignore_value_list' for item {item.path()}: {db_addon_ignore_value_list_optimized}") @@ -319,39 +556,49 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte # get db_addon_fct attribute value db_addon_fct = self.get_iattr_value(item.conf, 'db_addon_fct').lower() + # get query parameters from db_addon_fct or db_addon_params + if db_addon_fct in ALL_NEED_PARAMS_ATTRIBUTES: + query_params = get_query_parameters_from_db_addon_params() + else: + query_params = get_query_parameters_from_db_addon_fct() + if not query_params: + return + # get database item (and attribute value if item should be calculated at plugin startup) and return if not available database_item, db_addon_startup = get_database_item_path() if database_item is None: - database_item, db_addon_startup = get_database_item() + database_item = get_database_item() + db_addon_startup = bool(self.get_iattr_value(item.conf, 'db_addon_startup')) if database_item is None: self.logger.warning(f"No database item found for {item.path()}: Item ignored. Maybe you should check instance of database plugin.") return # get/create list of comparison operators and check it - db_addon_ignore_value_list = self.get_iattr_value(item.conf, 'db_addon_ignore_value_list') - if self.has_iattr(item.conf, 'db_addon_ignore_value'): - db_addon_ignore_value = self.get_iattr_value(item.conf, 'db_addon_ignore_value') - if not db_addon_ignore_value_list: - db_addon_ignore_value_list = [] + db_addon_ignore_value_list = self.get_iattr_value(item.conf, 'db_addon_ignore_value_list') # ['> 0', '< 35'] + db_addon_ignore_value = self.get_iattr_value(item.conf, 'db_addon_ignore_value') # num + + if not db_addon_ignore_value_list: + db_addon_ignore_value_list = [] + + if db_addon_ignore_value: db_addon_ignore_value_list.append(f"!= {db_addon_ignore_value}") + if any(x in str(item.path()) for x in self.ignore_0): - if not db_addon_ignore_value_list: - db_addon_ignore_value_list = [] db_addon_ignore_value_list.append("!= 0") + if self.value_filter: for entry in list(self.value_filter.keys()): if entry in str(item.path()): db_addon_ignore_value_list.extend(self.value_filter[entry]) - if db_addon_ignore_value_list: - db_addon_ignore_value_list = format_db_addon_ignore_value_list() - # check if mandatory params for ad_addon_fct are given - if db_addon_fct in ALL_NEED_PARAMS_ATTRIBUTES and not self.has_iattr(item.conf, 'db_addon_params'): - self.logger.warning(f"Item '{item.path()}' with db_addon_fct={db_addon_fct} ignored, since parameter using 'db_addon_params' not given. Item will be ignored.") - return + if db_addon_ignore_value_list: + db_addon_ignore_value_list_final = format_db_addon_ignore_value_list() + if self.parse_debug: + self.logger.debug(f"{db_addon_ignore_value_list_final=}") + query_params.update({'ignore_value_list': db_addon_ignore_value_list_final}) # create standard items config - item_config_data_dict = {'db_addon': 'function', 'db_addon_fct': db_addon_fct, 'database_item': database_item, 'ignore_value_list': db_addon_ignore_value_list} + item_config_data_dict = {'db_addon': 'function', 'db_addon_fct': db_addon_fct, 'database_item': database_item, 'query_params': query_params} if isinstance(database_item, str): item_config_data_dict.update({'database_item_path': True}) else: @@ -374,74 +621,14 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte item_config_data_dict.update({'cycle': 'static'}) elif db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: item_config_data_dict.update({'cycle': 'on-change'}) - - # create item config for all functions with 'summe' like waermesumme, kaeltesumme, gruenlandtemperatursumme - if 'summe' in db_addon_fct: - db_addon_params = params_to_dict(self.get_iattr_value(item.conf, 'db_addon_params')) - if db_addon_params is None or 'year' not in db_addon_params: - self.logger.info(f"No 'year' for evaluation via 'db_addon_params' of item {item.path()} for function {db_addon_fct} given. Default with 'current year' will be used.") - db_addon_params = {'year': 'current'} - item_config_data_dict.update({'params': db_addon_params}) - - # create item config for wachstumsgradtage function - elif db_addon_fct == 'wachstumsgradtage': - DEFAULT_THRESHOLD = 10 - db_addon_params = params_to_dict(self.get_iattr_value(item.conf, 'db_addon_params')) - if db_addon_params is None or 'year' not in db_addon_params: - self.logger.info(f"No 'year' for evaluation via 'db_addon_params' of item {item.path()} for function {db_addon_fct} given. Default with 'current year' will be used.") - db_addon_params = {'year': 'current'} - if 'threshold' not in db_addon_params: - self.logger.info(f"No 'threshold' for evaluation via 'db_addon_params' of item {item.path()} for function {db_addon_fct} given. Default with {DEFAULT_THRESHOLD} will be used.") - db_addon_params.update({'threshold': DEFAULT_THRESHOLD}) - if not isinstance(db_addon_params['threshold'], int): - threshold = to_int(db_addon_params['threshold']) - db_addon_params['threshold'] = DEFAULT_THRESHOLD if threshold is None else threshold - item_config_data_dict.update({'params': db_addon_params}) - - # create item config for tagesmitteltemperatur - elif db_addon_fct == 'tagesmitteltemperatur': - if not self.has_iattr(item.conf, 'db_addon_params'): - self.logger.warning(f"Item '{item.path()}' with db_addon_fct={db_addon_fct} ignored, since parameter using 'db_addon_params' not given. Item will be ignored.") - return - - db_addon_params = params_to_dict(self.get_iattr_value(item.conf, 'db_addon_params')) - if db_addon_params is None: - self.logger.warning(f"Error occurred during parsing of item attribute 'db_addon_params' of item {item.path()}. Item will be ignored.") - return - item_config_data_dict.update({'params': db_addon_params}) - - # create item config for db_request elif db_addon_fct == 'db_request': - if not self.has_iattr(item.conf, 'db_addon_params'): - self.logger.warning(f"Item '{item.path()}' with db_addon_fct={db_addon_fct} ignored, since parameter using 'db_addon_params' not given. Item will be ignored") - return - - db_addon_params = params_to_dict(self.get_iattr_value(item.conf, 'db_addon_params')) - if db_addon_params is None: - self.logger.warning(f"Error occurred during parsing of item attribute 'db_addon_params' of item {item.path()}. Item will be ignored.") - return - - if self.parse_debug: - self.logger.debug(f"parse_item: {db_addon_fct=} for item={item.path()}, {db_addon_params=}") - - if not any(param in db_addon_params for param in ('func', 'timeframe')): - self.logger.warning(f"Item '{item.path()}' with {db_addon_fct=} ignored, not all mandatory parameters in {db_addon_params=} given. Item will be ignored.") - return - - TIMEFRAMES_2_UPDATECYCLE = {'day': 'daily', - 'week': 'weekly', - 'month': 'monthly', - 'year': 'yearly'} - - _timeframe = db_addon_params.get('group', None) - if not _timeframe: - _timeframe = db_addon_params.get('timeframe', None) - update_cycle = TIMEFRAMES_2_UPDATECYCLE.get(_timeframe) - if update_cycle is None: - self.logger.warning(f"Item '{item.path()}' with {db_addon_fct=} ignored. Not able to detect update cycle.") - return - - item_config_data_dict.update({'params': db_addon_params, 'cycle': update_cycle}) + cycle = item_config_data_dict['query_params'].get('group') + if not cycle: + cycle = item_config_data_dict['query_params'].get('timeframe') + item_config_data_dict.update({'cycle': f"{timeframe_to_updatecyle(cycle)}"}) + elif db_addon_fct == 'minmax': + cycle = item_config_data_dict['query_params']['timeframe'] + item_config_data_dict.update({'cycle': f"{timeframe_to_updatecyle(cycle)}"}) # do logging if self.parse_debug: @@ -471,7 +658,8 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte # Reference to 'update_item' für alle Items mit Attribut 'database', um die on_change Items zu berechnen elif self.has_iattr(item.conf, self.item_attribute_search_str) and has_db_addon_item(): - self.logger.debug(f"reference to update_item for item '{item.path()}' will be set due to on-change") + if self.parse_debug: + self.logger.debug(f"reference to update_item for item '{item.path()}' will be set due to on-change") self.add_item(item, config_data_dict={'db_addon': 'database'}) return self.update_item @@ -490,7 +678,6 @@ def update_item(self, item, caller=None, source=None, dest=None): if self.alive and caller != self.get_shortname(): # handle database items if item in self._database_items(): - # self.logger.debug(f"update_item was called with item {item.property.path} with value {item()} from caller {caller}, source {source} and dest {dest}") if not self.startup_finished: self.logger.info(f"Handling of 'on-change' is paused for startup. No updated will be processed.") elif self.suspended: @@ -546,7 +733,7 @@ def execute_startup_items(self) -> None: # handle on-change items if db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: - self.item_queue.put((item, None)) + self.item_queue.put((item_config['database_item'], None)) # handle on-demand items else: self.item_queue.put(item) @@ -620,166 +807,73 @@ def handle_ondemand(self, item: Item) -> None: :param item: Item for which value will be calculated """ - # set/get parameters + # get parameters item_config = self.get_item_config(item) - db_addon = item_config['db_addon'] + if self.ondemand_debug: + self.logger.debug(f"Item={item.path()} with {item_config=}") db_addon_fct = item_config['db_addon_fct'] database_item = item_config['database_item'] - ignore_value_list = item_config.get('ignore_value_list') - result = None - self.logger.debug(f"handle_ondemand: Item={item.path()} with {item_config=}") - - # handle info functions - if db_addon == 'info': - # handle info_db_version - if db_addon_fct == 'info_db_version': - result = self._get_db_version() - self.logger.debug(f"handle_ondemand: info_db_version {result=}") - else: - self.logger.warning(f"No handling for attribute {db_addon_fct=} for Item {item.path()} defined.") - - # handle general functions - elif db_addon_fct in ALL_GEN_ATTRIBUTES: - # handle oldest_value - if db_addon_fct == 'general_oldest_value': - result = self._get_oldest_value(database_item) + query_params = item_config.get('query_params') + if query_params: + params = dict(query_params) + params.update({'database_item': database_item}) + else: + params = {} - # handle oldest_log - elif db_addon_fct == 'general_oldest_log': - result = self._get_oldest_log(database_item) + if self.ondemand_debug: + self.logger.debug(f"{db_addon_fct=} will _query_item with {params=}.") - else: - self.logger.warning(f"No handling for attribute {db_addon_fct=} for Item {item.path()} defined.") + # handle all on_change functions + if db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: + if self.ondemand_debug: + self.logger.debug(f"on-change function detected; will be calculated by next change of database item") + return # handle item starting with 'verbrauch_' - elif db_addon_fct in ALL_VERBRAUCH_ATTRIBUTES: - if self.execute_debug: - self.logger.debug(f"handle_ondemand: 'verbrauch' detected.") - - result = self._handle_verbrauch(database_item, db_addon_fct, ignore_value_list) + if db_addon_fct in ALL_VERBRAUCH_ATTRIBUTES: + result = self._handle_verbrauch(params) if result and result < 0: self.logger.warning(f"Result of item {item.path()} with {db_addon_fct=} was negative. Something seems to be wrong.") - # handle item starting with 'zaehlerstand_' of format 'zaehlerstand_timeframe_timedelta' like 'zaehlerstand_woche_minus1' - elif db_addon_fct in ALL_ZAEHLERSTAND_ATTRIBUTES: - if self.execute_debug: - self.logger.debug(f"handle_ondemand: 'zaehlerstand' detected.") - - result = self._handle_zaehlerstand(database_item, db_addon_fct, ignore_value_list)[0][1] - - # handle item starting with 'minmax_' - elif db_addon_fct in ALL_HISTORIE_ATTRIBUTES: - if self.execute_debug: - self.logger.debug(f"handle_ondemand: 'minmax' detected.") - - result = self._handle_min_max(database_item, db_addon_fct, ignore_value_list)[0][1] - - # handle item starting with 'tagesmitteltemperatur_' - elif db_addon_fct in ALL_TAGESMITTEL_ATTRIBUTES: - if self.execute_debug: - self.logger.debug(f"handle_ondemand: 'tagesmitteltemperatur' detected.") - - result = self._handle_tagesmitteltemperatur(database_item, db_addon_fct, ignore_value_list)[0][1] - - # handle item starting with 'serie_' - elif db_addon_fct in ALL_SERIE_ATTRIBUTES: - if 'minmax' in db_addon_fct: - if self.execute_debug: - self.logger.debug(f"handle_ondemand: 'serie_minmax' detected.") - - result = self._handle_min_max(database_item, db_addon_fct, ignore_value_list) + # handle 'serie_tagesmittelwert_stunde_30_0d' and 'serie_tagesmittelwert_tag_stunde_30d' + elif db_addon_fct in SERIE_ATTRIBUTES_MITTEL_H1 + SERIE_ATTRIBUTES_MITTEL_D_H: + result = self._prepare_temperature_list(**params) - elif 'verbrauch' in db_addon_fct: - if self.execute_debug: - self.logger.debug(f"handle_ondemand: 'serie_verbrauch' detected.") - - result = self._handle_verbrauch(database_item, db_addon_fct, ignore_value_list) - - elif 'zaehlerstand' in db_addon_fct: - if self.execute_debug: - self.logger.debug(f"handle_ondemand: 'serie_zaehlerstand' detected.") - - result = self._handle_zaehlerstand(database_item, db_addon_fct, ignore_value_list) + # handle info functions + elif db_addon_fct == 'info_db_version': + result = self._get_db_version() - elif 'tagesmitteltemperatur' in db_addon_fct: - if self.execute_debug: - self.logger.debug(f"handle_ondemand: 'serie_tagesmittelwert' detected.") + # handle general functions + elif db_addon_fct == 'general_oldest_value': + result = self._get_oldest_value(database_item) - result = self._handle_tagesmitteltemperatur(database_item, db_addon_fct, ignore_value_list) - else: - self.logger.warning(f"No handling for attribute {db_addon_fct=} for Item {item.path()} defined.") + # handle oldest_log + elif db_addon_fct == 'general_oldest_log': + result = self._get_oldest_log(database_item) # handle kaeltesumme elif db_addon_fct == 'kaeltesumme': - db_addon_params = item_config.get('params') - if self.execute_debug: - self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected; {db_addon_params=}") - - if db_addon_params: - db_addon_params.update({'database_item': item_config['database_item']}) - result = self._handle_kaeltesumme(**db_addon_params) + result = self._handle_kaeltesumme(database_item=database_item, year=params.get('year'), month=params.get('month')) # handle waermesumme elif db_addon_fct == 'waermesumme': - db_addon_params = item_config.get('params') - if self.execute_debug: - self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected; {db_addon_params=}") - - if db_addon_params: - db_addon_params.update({'database_item': item_config['database_item']}) - result = self._handle_waermesumme(**db_addon_params) + result = self._handle_waermesumme(database_item=database_item, year=params.get('year'), month=params.get('month')) # handle gruenlandtempsumme elif db_addon_fct == 'gruenlandtempsumme': - db_addon_params = item_config.get('params') - if self.execute_debug: - self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected; {db_addon_params=}") - - if db_addon_params: - db_addon_params.update({'database_item': item_config['database_item']}) - result = self._handle_gruenlandtemperatursumme(**db_addon_params) + result = self._handle_gruenlandtemperatursumme(database_item=database_item, year=params.get('year')) # handle wachstumsgradtage elif db_addon_fct == 'wachstumsgradtage': - db_addon_params = item_config.get('params') - if self.execute_debug: - self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected; {db_addon_params}") - - if db_addon_params: - db_addon_params.update({'database_item': item_config['database_item']}) - result = self._handle_wachstumsgradtage(**db_addon_params) - - # handle tagesmitteltemperatur - elif db_addon_fct == 'tagesmitteltemperatur': - db_addon_params = item_config.get('params') - if self.execute_debug: - self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected; {db_addon_params=}") - - if db_addon_params: - result = self._handle_tagesmitteltemperatur(database_item, db_addon_fct, ignore_value_list, db_addon_params) - - # handle db_request - elif db_addon_fct == 'db_request': - db_addon_params = item_config.get('params') - if self.execute_debug: - self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected with {db_addon_params=}") - - if db_addon_params: - db_addon_params.update({'database_item': item_config['database_item']}) - if db_addon_params.keys() & {'func', 'item', 'timeframe'}: - result = self._query_item(**db_addon_params) - else: - self.logger.error(f"Attribute 'db_addon_params' not containing needed params for Item {item.path()} with {db_addon_fct=}.") + result = self._handle_wachstumsgradtage(database_item=database_item, year=params.get('year')) - # handle everything else else: - self.logger.warning(f"handle_ondemand: Function '{db_addon_fct}' for item {item.path()} not defined or found.") - return + result = self._query_item(**params)[0][1] # log result - if self.execute_debug: - self.logger.debug(f"handle_ondemand: result is {result} for item '{item.path()}' with '{db_addon_fct=}'") + if self.ondemand_debug: + self.logger.debug(f"result is {result} for item '{item.path()}' with '{db_addon_fct=}'") if result is None: self.logger.info(f" Result was None; No item value will be set.") @@ -799,6 +893,114 @@ def handle_onchange(self, updated_item: Item, value: float = None) -> None: :param value: Value of updated item """ + def handle_minmax(): + cache_dict = self.current_values[timeframe] + init = False + + if self.onchange_debug: + self.logger.debug(f"'minmax' Item={updated_item.path()} with {func=} and {timeframe=} detected. Check for update of cache_dicts {cache_dict=} and item value.") + + # make sure, that database item is in cache dict + if database_item not in cache_dict: + cache_dict[database_item] = {} + + # get _recent_value; if not already cached, create cache + cached_value = cache_dict[database_item].get(func) + if cached_value is None: + if self.onchange_debug: + self.logger.debug(f"Item={updated_item.path()} with {func=} and {timeframe=} not in cache dict. Query database.") + + query_params = {'func': func, 'database_item': database_item, 'timeframe': timeframe, 'start': 0, 'end': 0, 'ignore_value_list': ignore_value_list, 'use_oldest_entry': True} + db_value = self._query_item(**query_params)[0][1] + + if db_value is not None: # Wenn Werte aus DB vorliegt, nutze den + cached_value = db_value + init = True + elif value is not None: # Wenn kein Wert aus DB vorliegt, aber ein aktueller Wert, nutze den (ggf. bei ganz neuen Items, bei denen noch kein Eintrag in der DB ist) + cached_value = value + else: # Wenn gar kein Wert verfügbar ist, Abbruch + if self.onchange_debug: + self.logger.debug(f"no values available:{db_value=}, {value}. Abort...") + return + + # if value not given -> read at startup + if value is None or init: + if self.onchange_debug: + self.logger.debug(f"initial {func} value for {timeframe=} of Item={item.path()} with will be set to {cached_value}") + cache_dict[database_item][func] = cached_value + return cached_value + + # check value for update of cache dict min + elif func == 'min' and value < cached_value: + if self.onchange_debug: + self.logger.debug(f"new value={value} lower then current min_value={cached_value} for {timeframe=}. cache_dict will be updated") + cache_dict[database_item][func] = value + return value + + # check value for update of cache dict max + elif func == 'max' and value > cached_value: + if self.onchange_debug: + self.logger.debug(f"new value={value} higher then current max_value={cached_value} for {timeframe=}. cache_dict will be updated") + cache_dict[database_item][func] = value + return value + + # no impact + if self.onchange_debug: + self.logger.debug(f"new value={value} will not change max/min for period={timeframe}.") + return None + + def handle_verbrauch(): + cache_dict = self.previous_values[timeframe] + _value = value + + if self.onchange_debug: + self.logger.debug(f"'verbrauch' item {updated_item.path()} with {func=} and {value=} detected. Check for update of cache_dicts {cache_dict=} and item value.") + + # get _cached_value for value at end of last period; if not already cached, create cache + cached_value = cache_dict.get(database_item) + if cached_value is None: + if self.onchange_debug: + self.logger.debug(f"Item={updated_item.path()} with _func={func} and timeframe={timeframe} not in cache dict. recent value={cached_value}.") + + # try to get max value of last timeframe, assuming that this is the value at end of timeframe + query_params = {'func': 'max', 'database_item': database_item, 'timeframe': timeframe, 'start': 1, 'end': 1, 'ignore_value_list': ignore_value_list, 'use_oldest_entry': True} + db_value = self._query_item(**query_params)[0][1] + + if db_value is None: + self.logger.info(f"Value max value for last {timeframe} available from database. Try to get min value of current {timeframe}.") + + # try to get min value of current timeframe, assuming that this is the value at end of timeframe + query_params = {'func': 'min', 'database_item': database_item, 'timeframe': timeframe, 'start': 0, 'end': 0, 'ignore_value_list': ignore_value_list, 'use_oldest_entry': True} + db_value = self._query_item(**query_params)[0][1] + + if db_value is None: + self.logger.info(f"min value for current {timeframe} not available from database. Abort calculation.") + return + + cache_dict[database_item] = db_value + cached_value = db_value + if self.onchange_debug: + self.logger.debug(f"Value for Item={updated_item.path()} at end of last {timeframe} not in cache dict. Value={cached_value} has been added.") + + # get last value from db, if now updated value is given (init) + if _value is None: + # try to get max value of current timeframe + query_params = {'func': 'max', 'database_item': database_item, 'timeframe': timeframe, 'start': 0, 'end': 0, 'ignore_value_list': ignore_value_list, 'use_oldest_entry': True} + _value = self._query_item(**query_params)[0][1] + + if _value is None: + self.logger.info(f"max value for current {timeframe} not available from database. Abort calculation.") + return + + # calculate value, set item value, put data into plugin_item_dict + _new_value = _value - cached_value + return _new_value if isinstance(_new_value, int) else round(_new_value, 1) + + def handle_tagesmitteltemp(): + self.logger.info(f"Onchange handling of 'tagesmitteltemperatur' not implemented, yet.") + # ToDo: Implement tagesmitteltemperatur onchange + return + if self.onchange_debug: self.logger.debug(f"handle_onchange called with updated_item={updated_item.path()} and value={value}.") @@ -809,115 +1011,41 @@ def handle_onchange(self, updated_item: Item, value: float = None) -> None: for item in relevant_item_list: item_config = self.get_item_config(item) - _database_item = item_config['database_item'] - _db_addon_fct = item_config['db_addon_fct'] - _ignore_value_list = item_config['ignore_value_list'] - _var = _db_addon_fct.split('_') - - # handle minmax on-change items like minmax_heute_max, minmax_heute_min, minmax_woche_max, minmax_woche_min..... - if _db_addon_fct.startswith('minmax') and len(_var) == 3 and _var[2] in ['min', 'max']: - _timeframe = convert_timeframe(_var[1]) - _func = _var[2] - _cache_dict = self.current_values[_timeframe] - if not _timeframe: - continue - + self.logger.debug(f"handle_onchange: Item={item.path()} with {item_config=}") + db_addon_fct = item_config['db_addon_fct'] + database_item = item_config['database_item'] + timeframe = item_config['query_params']['timeframe'] + func = item_config['query_params']['func'] + ignore_value_list = item_config['query_params'].get('ignore_value_list') + new_value = None + + # handle all on_change functions + if db_addon_fct not in ALL_ONCHANGE_ATTRIBUTES: if self.onchange_debug: - self.logger.debug(f"handle_onchange: 'minmax' item {updated_item.path()} with {_func=} detected. Check for update of _cache_dicts and item value.") + self.logger.debug(f"non on-change function detected. Skip update.") + continue - init = False - _new_value = None - - # make sure, that database item is in cache dict - if _database_item not in _cache_dict: - _cache_dict[_database_item] = {} - - # get _recent_value; if not already cached, create cache - _recent_value = _cache_dict[_database_item].get(_func) - if _recent_value is None: - _query_params = {'func': _func, 'item': _database_item, 'timeframe': _timeframe, 'start': 0, 'end': 0, 'ignore_value_list': _ignore_value_list, 'use_oldest_entry': True} - _db_value = self._query_item(**_query_params)[0][1] + # handle minmax on-change items like minmax_heute_max, minmax_heute_min, minmax_woche_max, minmax_woche_min..... + if db_addon_fct.startswith('minmax'): + new_value = handle_minmax() - if self.onchange_debug: - self.logger.debug(f"handle_onchange: Item={updated_item.path()} with _func={_func} and _timeframe={_timeframe} not in cache dict. recent value={_db_value}.") - - if _db_value is not None: - _recent_value = _db_value - init = True - elif value is not None: - _recent_value = value - else: - if self.onchange_debug: - self.logger.debug(f"handle_onchange: continue due to {_db_value=}, {value}.") - continue - - # if value not given -> read at startup - if value is None: - _new_value = _recent_value - if self.onchange_debug: - self.logger.debug(f"handle_onchange: initial {_func} value for item {item.path()} will be set to {_new_value}") + # handle verbrauch on-change items ending with heute, woche, monat, jahr + elif db_addon_fct.startswith('verbrauch'): + new_value = handle_verbrauch() - # check value for update of cache dict - else: - if _func == 'min' and value < _recent_value: - _new_value = value - if self.onchange_debug: - self.logger.debug(f"handle_onchange: new value={_new_value} lower then current min_value={_recent_value}. _cache_dict will be updated") - elif _func == 'max' and value > _recent_value: - _new_value = value - if self.onchange_debug: - self.logger.debug(f"handle_onchange: new value={_new_value} higher then current max_value={_recent_value}. _cache_dict will be updated") - elif init: - _new_value = _recent_value - if self.onchange_debug: - self.logger.debug(f"handle_onchange: initial value for item will be set with value {_new_value}") - else: - if self.onchange_debug: - self.logger.debug(f"handle_onchange: new value={value} will not change max/min for period={_timeframe}.") - - if _new_value is not None: - _cache_dict[_database_item][_func] = _new_value - self.logger.info(f"Item value for '{item.path()}' with func={_func} will be set to {_new_value}") - item_config = self.get_item_config(item) - item_config.update({'value': _new_value}) - item(_new_value, self.get_shortname()) - else: - self.logger.info(f"Received value={value} is not influencing min / max value. Therefore item {item.path()} will not be changed.") + # handle tagesmitteltemperatur on-change items ending with heute, woche, monat, jahr + elif db_addon_fct.startswith('tagesmitteltemperatur'): + new_value = handle_tagesmitteltemp() - # handle verbrauch on-change items ending with heute, woche, monat, jahr - elif len(_var) == 2 and _var[0] == 'verbrauch' and _var[1] in ['heute', 'woche', 'monat', 'jahr']: - _timeframe = convert_timeframe(_var[1]) - if _timeframe is None: - continue - - _cache_dict = self.previous_values[_timeframe] - - # get _cached_value for value at end of last period; if not already cached, create cache - _cached_value = _cache_dict.get(_database_item) - if _cached_value is None: - _query_params = {'func': 'max', 'item': _database_item, 'timeframe': _timeframe, 'start': 1, 'end': 1, 'ignore_value_list': _ignore_value_list, 'use_oldest_entry': True} - _db_value = self._query_item(**_query_params)[0][1] - - if _db_value is not None: - _cache_dict[_database_item] = _db_value - _cached_value = _db_value - if self.onchange_debug: - self.logger.debug(f"handle_onchange: Value for Item={updated_item.path()} at end of last {_timeframe} not in cache dict. Value={_cached_value} has been added.") - else: - self.logger.info(f"Value for end of last {_timeframe} not available from database. Request skipped.") - continue - - # calculate value, set item value, put data into plugin_item_dict - _new_value = round(value - _cached_value, 1) - self.logger.info(f"Item value for '{item.path()}' will be set to {_new_value}") - item_config = self.get_item_config(item) - item_config.update({'value': _new_value}) - item(_new_value, self.get_shortname()) + if new_value is None: + continue - else: - self.logger.warning(f"{_db_addon_fct} given at item {item.path()} not defined in plugin. Skipped.") + self.logger.info(f" Item value for '{item.path()}' with func={func} will be set to {new_value}") + item_config = self.get_item_config(item) + item_config.update({'value': new_value}) + item(new_value, self.get_shortname()) - def _update_database_items(self): + def _update_database_items(self) -> None: for item in self._database_item_path_items(): item_config = self.get_item_config(item) database_item_path = item_config.get('database_item') @@ -936,10 +1064,10 @@ def _update_database_items(self): def log_level(self): return self.logger.getEffectiveLevel() - def queue_backlog(self): + def queue_backlog(self) -> int: return self.item_queue.qsize() - def db_version(self): + def db_version(self) -> str: return self._get_db_version() def _startup_items(self) -> list: @@ -978,11 +1106,11 @@ def _database_item_path_items(self) -> list: def _ondemand_items(self) -> list: return self._daily_items() + self._weekly_items() + self._monthly_items() + self._yearly_items() + self._static_items() - ############################## + ######################################### # Public functions / Using item_path - ############################## + ######################################### - def gruenlandtemperatursumme(self, item_path: str, year: Union[int, str]) -> Union[int, None]: + def gruenlandtemperatursumme(self, item_path: str, year: Union[int, str] = None) -> Union[int, None]: """ Query database for gruenlandtemperatursumme for given year or year https://de.wikipedia.org/wiki/Gr%C3%BCnlandtemperatursumme @@ -1000,7 +1128,7 @@ def gruenlandtemperatursumme(self, item_path: str, year: Union[int, str]) -> Uni if item: return self._handle_gruenlandtemperatursumme(item, year) - def waermesumme(self, item_path: str, year, month: Union[int, str] = None, threshold: int = 0) -> Union[int, None]: + def waermesumme(self, item_path: str, year: Union[int, str] = None, month: Union[int, str] = None, threshold: int = 0) -> Union[int, None]: """ Query database for waermesumme for given year or year/month https://de.wikipedia.org/wiki/W%C3%A4rmesumme @@ -1016,7 +1144,7 @@ def waermesumme(self, item_path: str, year, month: Union[int, str] = None, thres if item: return self._handle_waermesumme(item, year, month, threshold) - def kaeltesumme(self, item_path: str, year, month: Union[int, str] = None) -> Union[int, None]: + def kaeltesumme(self, item_path: str, year: Union[int, str] = None, month: Union[int, str] = None) -> Union[int, None]: """ Query database for kaeltesumme for given year or year/month https://de.wikipedia.org/wiki/K%C3%A4ltesumme @@ -1037,7 +1165,7 @@ def tagesmitteltemperatur(self, item_path: str, timeframe: str = None, count: in https://www.dwd.de/DE/leistungen/klimadatendeutschland/beschreibung_tagesmonatswerte.html :param item_path: item object or item_id for which the query should be done - :param timeframe: timeincrement for determination + :param timeframe: time increment for determination :param count: number of time increments starting from now to the left (into the past) :return: tagesmitteltemperatur """ @@ -1050,24 +1178,28 @@ def tagesmitteltemperatur(self, item_path: str, timeframe: str = None, count: in item = self.items.return_item(item_path) if item: - return self._handle_tagesmitteltemperatur(database_item=item, db_addon_fct='tagesmitteltemperatur', params={'timeframe': timeframe, 'count': count}) + count = to_int(count) + start, end = count_to_start(count) + query_params = {'database_item': item, 'func': 'max', 'timeframe': convert_timeframe(timeframe), 'start': start, 'end': end} + return self._handle_tagesmitteltemperatur(**query_params) - def wachstumsgradtage(self, item_path: str, year: Union[int, str], threshold: int) -> Union[int, None]: + def wachstumsgradtage(self, item_path: str, year: Union[int, str] = None, method: int = 0, threshold: int = 10) -> Union[int, None]: """ Query database for wachstumsgradtage https://de.wikipedia.org/wiki/Wachstumsgradtag :param item_path: item object or item_id for which the query should be done :param year: year the wachstumsgradtage should be calculated for + :param method: method to be used :param threshold: Temperature in °C as threshold: Ein Tage mit einer Tagesdurchschnittstemperatur oberhalb des Schwellenwertes gilt als Wachstumsgradtag :return: wachstumsgradtage """ item = self.items.return_item(item_path) if item: - return self._handle_wachstumsgradtage(item, year, threshold) + return self._handle_wachstumsgradtage(database_item=item, year=year, method=method, threshold=threshold) - def temperaturserie(self, item_path: str, year: Union[int, str], method: str) -> Union[list, None]: + def temperaturserie(self, item_path: str, year: Union[int, str] = None, method: str = 'raw') -> Union[list, None]: """ Query database for wachstumsgradtage https://de.wikipedia.org/wiki/Wachstumsgradtag @@ -1111,7 +1243,7 @@ def fetch_log(self, func: str, item_path: str, timeframe: str, start: int = None start, end = count_to_start(count) if item and start and end: - return self._query_item(func=func, item=item, timeframe=timeframe, start=start, end=end, group=group, group2=group2, ignore_value_list=ignore_value_list) + return self._query_item(func=func, database_item=item, timeframe=timeframe, start=start, end=end, group=group, group2=group2, ignore_value_list=ignore_value_list) else: return [] @@ -1159,344 +1291,66 @@ def suspend(self, state: bool = False) -> bool: return self.suspended - ############################## + ############################################## # Calculation methods / Using Item Object - ############################## - - def _handle_min_max(self, database_item: Item, db_addon_fct: str, ignore_value_list=None) -> Union[list, None]: - """ - Handle execution of min/max calculation - """ - # handle all on_change functions of format 'minmax_timeframe_function' like 'minmax_heute_max' - if db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: - if self.execute_debug: - self.logger.debug(f"on-change function with 'min/max' detected; will be calculated by next change of database item") - return - - _var = db_addon_fct.split('_') - group = None - group2 = None - - # handle all 'last' functions in format 'minmax_last_window_function' like 'minmax_last_24h_max' - if len(_var) == 4 and _var[1] == 'last': - func = _var[3] - timeframe = convert_timeframe(_var[2][-1:]) - start = to_int(_var[2][:-1]) - end = 0 - log_text = 'minmax_last' - if timeframe is None or start is None: - return + ############################################## - # handle all functions 'min/max/avg' in format 'minmax_timeframe_timedelta_func' like 'minmax_heute_minus2_max' - elif len(_var) == 4 and _var[2].startswith('minus'): - func = _var[3] # min, max, avg - timeframe = convert_timeframe(_var[1]) # day, week, month, year - start = to_int(_var[2][-1]) # 1, 2, 3, ... - end = start - log_text = 'minmax' - if timeframe is None or start is None: - return - - # handle all functions 'serie_min/max/avg' in format 'serie_minmax_timeframe_func_count_group' like 'serie_minmax_monat_min_15m' - elif _var[0] == 'serie' and _var[1] == 'minmax': - timeframe = convert_timeframe(_var[2]) - func = _var[3] - start = to_int(_var[4][:-1]) - end = 0 - group = convert_timeframe(_var[4][len(_var[4]) - 1]) - log_text = 'serie_minmax' - if timeframe is None or start is None or group is None: - return - else: - self.logger.info(f"_handle_min_max: No adequate function for {db_addon_fct=} found.") - return - - if func not in ALLOWED_MINMAX_FUNCS: - self.logger.info(f"_handle_min_max: Called {func=} not in allowed functions={ALLOWED_MINMAX_FUNCS}.") - return - - query_params = {'item': database_item, 'ignore_value_list': ignore_value_list, 'func': func, 'timeframe': timeframe, 'start': start, 'end': end, 'group': group, 'group2': group2} - - if self.execute_debug: - self.logger.debug(f"_handle_min_max: db_addon_fct={log_text} function detected. {query_params=}") - - return self._query_item(**query_params) - - def _handle_zaehlerstand(self, database_item: Item, db_addon_fct: str, ignore_value_list=None) -> Union[list, None]: - """ - Handle execution of Zaehlerstand calculation - """ - # handle all on_change functions - if db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: - if self.execute_debug: - self.logger.debug(f"on-change function with 'zaehlerstand' detected; will be calculated by next change of database item") - return - - _var = db_addon_fct.split('_') - group = None - group2 = None - - # handle functions starting with 'zaehlerstand' like 'zaehlerstand_heute_minus1' - if len(_var) == 3 and _var[0] == 'zaehlerstand': - func = 'max' - timeframe = convert_timeframe(_var[1]) - start = to_int(_var[2][-1]) - end = start - log_text = 'zaehlerstand' - if timeframe is None or start is None: - return - - # handle all functions 'serie_min/max/avg' in format 'serie_minmax_timeframe_func_count_group' like 'serie_zaehlerstand_tag_30d' - elif len(_var) == 4 and _var[0] == 'serie' and _var[1] == 'zaehlerstand': - func = 'max' - timeframe = convert_timeframe(_var[2]) - start = to_int(_var[3][:-1]) - end = 0 - group = convert_timeframe(_var[3][len(_var[3]) - 1]) - log_text = 'serie_zaehlerstand' - if timeframe is None or start is None or group is None: - return - else: - self.logger.info(f"_handle_zaehlerstand: No adequate function for {db_addon_fct=} found.") - return - - query_params = {'item': database_item, 'ignore_value_list': ignore_value_list, 'func': func, 'timeframe': timeframe, 'start': start, 'end': end, 'group': group, 'group2': group2} - - if self.execute_debug: - self.logger.debug(f"_handle_zaehlerstand: db_addon_fct={log_text} function detected. {query_params=}") - - return self._query_item(**query_params) - - def _handle_verbrauch(self, database_item: Item, db_addon_fct: str, ignore_value_list=None): - """ - Handle execution of verbrauch calculation - """ - - self.logger.debug(f"_handle_verbrauch called with {database_item=} and {db_addon_fct=}") - - def consumption_calc(c_start, c_end) -> Union[float, None]: - """ - Handle query for Verbrauch - - :param c_start: beginning of timeframe - :param c_end: end of timeframe - """ - - if self.prepare_debug: - self.logger.debug(f"_consumption_calc called with {database_item=}, {timeframe=}, {c_start=}, {c_end=}") - - _result = None - _query_params = {'item': database_item, 'timeframe': timeframe} - - # get value for end and check it; - _query_params.update({'func': 'max', 'start': c_end, 'end': c_end}) - value_end = self._query_item(**_query_params)[0][1] - - if self.prepare_debug: - self.logger.debug(f"_consumption_calc {value_end=}") - - if value_end is None: # if None (Error) return - return - elif value_end == 0: # wenn die Query "None" ergab, was wiederum bedeutet, dass zum Abfragezeitpunkt keine Daten vorhanden sind, ist der value hier gleich 0 → damit der Verbrauch für die Abfrage auch Null - return 0 - - # get value for start and check it; - _query_params.update({'func': 'min', 'start': c_end, 'end': c_end}) - value_start = self._query_item(**_query_params)[0][1] - if self.prepare_debug: - self.logger.debug(f"_consumption_calc {value_start=}") - - if value_start is None: # if None (Error) return - return - - if value_start == 0: # wenn der Wert zum Startzeitpunkt 0 ist, gab es dort keinen Eintrag (also keinen Verbrauch), dann frage den nächsten Eintrag in der DB ab. - self.logger.info(f"No DB Entry found for requested start date. Looking for next DB entry.") - _query_params.update({'func': 'next', 'start': c_start, 'end': c_end}) - value_start = self._query_item(**_query_params)[0][1] - if self.prepare_debug: - self.logger.debug(f"_consumption_calc: next available value is {value_start=}") - - # calculate result - if value_start is not None: - return round(value_end - value_start, 1) - - # handle all on_change functions of format 'verbrauch_timeframe' like 'verbrauch_heute' - if db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: - if self.execute_debug: - self.logger.debug(f"on_change function with 'verbrauch' detected; will be calculated by next change of database item") - return - - _var = db_addon_fct.split('_') - - # handle all functions 'verbrauch' in format 'verbrauch_timeframe_timedelta' like 'verbrauch_heute_minus2' - if len(_var) == 3 and _var[1] in ['heute', 'woche', 'monat', 'jahr'] and _var[2].startswith('minus'): - timeframe = convert_timeframe(_var[1]) - timedelta = to_int(_var[2][-1]) - if timedelta is None or timeframe is None: - return - - if self.execute_debug: - self.logger.debug(f"_handle_verbrauch: '{db_addon_fct}' function detected. {timeframe=}, {timedelta=}") - - return consumption_calc(c_start=timedelta + 1, c_end=timedelta) - - # handle all functions of format 'verbrauch_function_window_timeframe_timedelta' like 'verbrauch_rolling_12m_woche_minus1' - elif len(_var) == 5 and _var[1] == 'rolling' and _var[4].startswith('minus'): - func = _var[1] - window = _var[2] # 12m - window_inc = to_int(window[:-1]) # 12 - window_dur = convert_timeframe(window[-1]) # day, week, month, year - timeframe = convert_timeframe(_var[3]) # day, week, month, year - timedelta = to_int(_var[4][-1]) # 1 - endtime = timedelta - - if window_inc is None or window_dur is None or timeframe is None or timedelta is None: - return - - if self.execute_debug: - self.logger.debug(f"_handle_verbrauch: '{func}' function detected. {window=}, {timeframe=}, {timedelta=}") - - if window_dur in ALLOWED_QUERY_TIMEFRAMES: - starttime = convert_duration(timeframe, window_dur) * window_inc - return consumption_calc(c_start=starttime, c_end=endtime) - - # handle all functions of format 'verbrauch_timeframe_timedelta' like 'verbrauch_jahreszeitraum_minus1' - elif len(_var) == 3 and _var[1] == 'jahreszeitraum' and _var[2].startswith('minus'): - timeframe = convert_timeframe(_var[1]) # day, week, month, year - timedelta = to_int(_var[2][-1]) # 1 oder 2 oder 3 - if timedelta is None or timeframe is None: - return - - if self.execute_debug: - self.logger.debug(f"_handle_verbrauch: '{db_addon_fct}' function detected. {timeframe=}, {timedelta=}") + def _handle_verbrauch(self, query_params: dict): + """Handle execution of verbrauch calculation""" + # define start, end for verbrauch_jahreszeitraum_timedelta + if 'timedelta' in query_params: + timedelta = query_params.pop('timedelta') today = datetime.date.today() year = today.year - timedelta start_date = datetime.date(year, 1, 1) - relativedelta(days=1) # Start ist Tag vor dem 1.1., damit Abfrage den Maximalwert von 31.12. 00:00:00 bis 1.1. 00:00:00 ergibt - end_date = today - relativedelta(years=timedelta) + end_date = today - relativedelta(timedelta) start = (today - start_date).days end = (today - end_date).days - - return consumption_calc(c_start=start, c_end=end) - - # handle all functions of format 'serie_verbrauch_timeframe_countgroup' like 'serie_verbrauch_tag_30d' - elif db_addon_fct.startswith('serie_') and len(_var) == 4: - self.logger.debug(f"_handle_verbrauch serie reached") - func = 'diff_max' - timeframe = convert_timeframe(_var[2]) - start = to_int(_var[3][:-1]) - group = convert_timeframe(_var[3][len(_var[3]) - 1]) - group2 = None - if timeframe is None or start is None or group is None: - self.logger.warning(f"For calculating '{db_addon_fct}' not all mandatory parameters given. {timeframe=}, {start=}, {group=}") - return - - query_params = {'func': func, 'item': database_item, 'timeframe': timeframe, 'start': start, 'end': 0, 'group': group, 'group2': group2, 'ignore_value_list': ignore_value_list} - - if self.execute_debug: - self.logger.debug(f"_handle_verbrauch: 'serie_verbrauch_timeframe_countgroup' function detected. {query_params=}") - - return self._query_item(**query_params) - else: - self.logger.info(f"_handle_verbrauch: No adequate function for {db_addon_fct=} found.") - return - - def _handle_tagesmitteltemperatur(self, database_item: Item, db_addon_fct: str, ignore_value_list=None, params: dict = None) -> list: - """ - Query database for tagesmitteltemperatur - - :param database_item: item object or item_id for which the query should be done - :param db_addon_fct: - :param ignore_value_list: list of comparison operators for val_num, which will be applied during query - :param params: - :return: tagesmitteltemperatur - """ - - # handle all on_change functions - if db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: - if self.execute_debug: - self.logger.debug(f"on_change function with 'tagesmitteltemperatur' detected; will be calculated by next change of database item") - return [] + start = query_params['start'] + end = query_params['end'] - _var = db_addon_fct.split('_') - group = None - group2 = None + # calculate consumption + if self.prepare_debug: + self.logger.debug(f"called with {query_params=}") - # handle tagesmitteltemperatur - if db_addon_fct == 'tagesmitteltemperatur': - if not params: - return [] + _result = None - func = 'max' - timeframe = convert_timeframe(params.get('timeframe')) - log_text = 'tagesmitteltemperatur' - count = to_int(params.get('count')) - if timeframe is None or not count: - return [] + # get value for end and check it; + query_params.update({'func': 'max', 'start': end, 'end': end}) + value_end = self._query_item(**query_params)[0][1] - start, end = count_to_start(count) - - # handle 'tagesmittelwert_timeframe_timedelta' like 'tagesmittelwert_heute_minus1' - elif len(_var) == 3 and _var[2].startswith('minus'): - func = 'max' - timeframe = convert_timeframe(_var[1]) - start = to_int(_var[2][-1]) - end = start - log_text = 'tagesmittelwert_timeframe_timedelta' - if timeframe is None or start is None: - return [] - - # handle 'serie_tagesmittelwert_countgroup' like 'serie_tagesmittelwert_0d' - elif db_addon_fct.startswith('serie_') and len(_var) == 3: - # 'serie_tagesmittelwert_0d': {'func': 'max', 'timeframe': 'year', 'start': 0, 'end': 0, 'group': 'day'}, - func = 'max' - timeframe = 'year' - log_text = 'serie_tagesmittelwert_countgroup' - start = to_int(_var[2][:-1]) - end = 0 - group = convert_timeframe(_var[2][len(_var[2]) - 1]) - if group is None or start is None: - return [] - - # handle 'serie_tagesmittelwert_group2_count_group' like 'serie_tagesmittelwert_stunde_0d' - elif db_addon_fct.startswith('serie_') and len(_var) == 4: - # 'serie_tagesmittelwert_stunde_0d': {'func': 'avg1', 'timeframe': 'day', 'start': 0, 'end': 0, 'group': 'hour', 'group2': 'day'}, - # 'serie_tagesmittelwert_stunde_30d': {'func': 'avg1', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'hour', 'group2': 'day'}, - func = 'avg1' - timeframe = 'day' - log_text = 'serie_tagesmittelwert_group2_countgroup' - start = to_int(_var[3][:-1]) - end = 0 - group = 'hour' - group2 = convert_timeframe(_var[3][len(_var[3]) - 1]) - if group2 is None or start is None: - return [] - - # handle 'serie_tagesmittelwert_group2_start_endgroup' like 'serie_tagesmittelwert_stunde_30_0d' - elif db_addon_fct.startswith('serie_') and len(_var) == 5: - timeframe = 'day' - method = 'raw' - start = to_int(_var[3]) - end = to_int(_var[4][:-1]) - if start is None or end is None: - return [] + if self.prepare_debug: + self.logger.debug(f"{value_end=}") - return self._prepare_temperature_list(database_item=database_item, timeframe=timeframe, start=start, end=end, method=method) + if value_end is None: # if None (Error) return + return + elif value_end == 0: # wenn die Query "None" ergab, was wiederum bedeutet, dass zum Abfragezeitpunkt keine Daten vorhanden sind, ist der value hier gleich 0 → damit der Verbrauch für die Abfrage auch Null + return 0 - # handle everything else - else: - self.logger.info(f"_handle_tagesmitteltemperatur: No adequate function for {db_addon_fct=} found.") - return [] + # get value for start and check it; + query_params.update({'func': 'min'}) + value_start = self._query_item(**query_params)[0][1] + if self.prepare_debug: + self.logger.debug(f"{value_start=}") - query_params = {'item': database_item, 'ignore_value_list': ignore_value_list, 'func': func, 'timeframe': timeframe, 'start': start, 'end': end, 'group': group, 'group2': group2} + if value_start is None: # if None (Error) return + return - if self.execute_debug: - self.logger.debug(f"_handle_tagesmitteltemperatur: db_addon_fct={log_text} function detected. {query_params=}") + if value_start == 0: # wenn der Wert zum Startzeitpunkt 0 ist, gab es dort keinen Eintrag (also keinen Verbrauch), dann frage den nächsten Eintrag in der DB ab. + self.logger.info(f"No DB Entry found for requested start date. Looking for next DB entry.") + query_params.update({'func': 'next', 'start': start}) + value_start = self._query_item(**query_params)[0][1] + if self.prepare_debug: + self.logger.debug(f"next available value is {value_start=}") - return self._query_item(**query_params) + # calculate result + if value_start is not None: + _new_value = value_end - value_start + return _new_value if isinstance(_new_value, int) else round(_new_value, 1) - def _handle_kaeltesumme(self, database_item: Item, year: Union[int, str], month: Union[int, str] = None) -> Union[int, None]: + def _handle_kaeltesumme(self, database_item: Item, year: Union[int, str] = None, month: Union[int, str] = None) -> Union[int, None]: """ Query database for kaeltesumme for given year or year/month https://de.wikipedia.org/wiki/K%C3%A4ltesumme @@ -1507,13 +1361,18 @@ def _handle_kaeltesumme(self, database_item: Item, year: Union[int, str], month: :return: kaeltesumme """ - self.logger.debug(f"_handle_kaeltesumme called with {database_item=}, {year=}, {month=}") + if self.prepare_debug: + self.logger.debug(f"called with {database_item=}, {year=}, {month=}") # check validity of given year if not valid_year(year): - self.logger.error(f"_handle_kaeltesumme: Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") + self.logger.error(f"Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") return + # set default year + if not year: + year = 'current' + # define year if year == 'current': if datetime.date.today() < datetime.date(int(datetime.date.today().year), 9, 21): @@ -1529,26 +1388,27 @@ def _handle_kaeltesumme(self, database_item: Item, year: Union[int, str], month: start_date = datetime.date(int(year), int(month), 1) end_date = start_date + relativedelta(months=+1) - datetime.timedelta(days=1) else: - self.logger.error(f"_handle_kaeltesumme: Month for item={database_item.path()} was {month}. This is not a valid month. Query cancelled.") + self.logger.error(f"Month for item={database_item.path()} was {month}. This is not a valid month. Query cancelled.") return # define start / end today = datetime.date.today() if start_date > today: - self.logger.error(f"_handle_kaeltesumme: Start time for query of item={database_item.path()} is in future. Query cancelled.") + self.logger.error(f"Start time for query of item={database_item.path()} is in future. Query cancelled.") return start = (today - start_date).days end = (today - end_date).days if end_date < today else 0 if start < end: - self.logger.error(f"_handle_kaeltesumme: End time for query of item={database_item.path()} is before start time. Query cancelled.") + self.logger.error(f"End time for query of item={database_item.path()} is before start time. Query cancelled.") return # get raw data as list - self.logger.debug("_handle_kaeltesumme: Try to get raw data") + if self.prepare_debug: + self.logger.debug("Try to get raw data") raw_data = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method='raw') if self.execute_debug: - self.logger.debug(f"_handle_kaeltesumme: raw_value_list={raw_data=}") + self.logger.debug(f"raw_value_list={raw_data=}") # calculate value if raw_data is None: @@ -1561,7 +1421,7 @@ def _handle_kaeltesumme(self, database_item: Item, year: Union[int, str], month: ks -= entry[1] return int(round(ks, 0)) - def _handle_waermesumme(self, database_item: Item, year: Union[int, str], month: Union[int, str] = None, threshold: int = 0) -> Union[int, None]: + def _handle_waermesumme(self, database_item: Item, year: Union[int, str] = None, month: Union[int, str] = None, threshold: int = 0) -> Union[int, None]: """ Query database for waermesumme for given year or year/month https://de.wikipedia.org/wiki/W%C3%A4rmesumme @@ -1575,7 +1435,7 @@ def _handle_waermesumme(self, database_item: Item, year: Union[int, str], month: # get raw data as list raw_data = self._prepare_waermesumme(database_item=database_item, year=year, month=month) if self.execute_debug: - self.logger.debug(f"_handle_waermesumme: raw_value_list={raw_data=}") + self.logger.debug(f"raw_value_list={raw_data=}") # set threshold to min 0 threshold = max(0, threshold) @@ -1591,7 +1451,7 @@ def _handle_waermesumme(self, database_item: Item, year: Union[int, str], month: ws += entry[1] return int(round(ws, 0)) - def _handle_gruenlandtemperatursumme(self, database_item: Item, year: Union[int, str]) -> Union[int, None]: + def _handle_gruenlandtemperatursumme(self, database_item: Item, year: Union[int, str] = None) -> Union[int, None]: """ Query database for gruenlandtemperatursumme for given year or year/month https://de.wikipedia.org/wiki/Gr%C3%BCnlandtemperatursumme @@ -1604,7 +1464,7 @@ def _handle_gruenlandtemperatursumme(self, database_item: Item, year: Union[int, # get raw data as list raw_data = self._prepare_waermesumme(database_item=database_item, year=year) if self.execute_debug: - self.logger.debug(f"_handle_gruenlandtemperatursumme: raw_data={raw_data}") + self.logger.debug(f"raw_data={raw_data}") # calculate value if raw_data is None: @@ -1624,7 +1484,7 @@ def _handle_gruenlandtemperatursumme(self, database_item: Item, year: Union[int, gts += value return int(round(gts, 0)) - def _handle_wachstumsgradtage(self, database_item: Item, year: Union[int, str], method: int = 0, threshold: int = 10): + def _handle_wachstumsgradtage(self, database_item: Item, year: Union[int, str] = None, method: int = 0, threshold: int = 10) -> Union[list, float, None]: """ Calculate "wachstumsgradtage" for given year with temperature threshold https://de.wikipedia.org/wiki/Wachstumsgradtag @@ -1636,8 +1496,12 @@ def _handle_wachstumsgradtage(self, database_item: Item, year: Union[int, str], :return: wachstumsgradtage """ + # set default year + if not year: + year = 'current' + if not valid_year(year): - self.logger.error(f"_handle_wachstumsgradtage: Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") + self.logger.error(f"Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") return # define year @@ -1651,7 +1515,7 @@ def _handle_wachstumsgradtage(self, database_item: Item, year: Union[int, str], # check start_date today = datetime.date.today() if start_date > today: - self.logger.info(f"_handle_wachstumsgradtage: Start time for query of item={database_item.path()} is in future. Query cancelled.") + self.logger.info(f"Start time for query of item={database_item.path()} is in future. Query cancelled.") return # define start / end @@ -1660,19 +1524,19 @@ def _handle_wachstumsgradtage(self, database_item: Item, year: Union[int, str], # check end if start < end: - self.logger.error(f"_handle_wachstumsgradtage: End time for query of item={database_item.path()} is before start time. Query cancelled.") + self.logger.error(f"End time for query of item={database_item.path()} is before start time. Query cancelled.") return # get raw data as list raw_data = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method='minmax') if self.execute_debug: - self.logger.debug(f"_handle_wachstumsgradtage: raw_value_list={raw_data}") + self.logger.debug(f"raw_value_list={raw_data}") # calculate value if raw_data is None: return - elif isinstance(raw_data, list): + if isinstance(raw_data, list): # Die Berechnung des einfachen Durchschnitts // akkumuliere positive Differenz aus Mittelwert aus Tagesminimaltemperatur und Tagesmaximaltemperatur limitiert auf 30°C und Schwellenwert wgte = 0 wgte_list = [] @@ -1720,7 +1584,7 @@ def _handle_wachstumsgradtage(self, database_item: Item, year: Union[int, str], else: self.logger.info(f"Method for 'Wachstumsgradtag' calculation not defined.'") - def _handle_temperaturserie(self, database_item: Item, year: Union[int, str], method: str = 'raw'): + def _handle_temperaturserie(self, database_item: Item, year: Union[int, str] = None, method: str = 'raw') -> Union[list, None]: """ provide list of lists having timestamp and temperature(s) per day @@ -1730,8 +1594,12 @@ def _handle_temperaturserie(self, database_item: Item, year: Union[int, str], me :return: list of temperatures """ + # set default year + if not year: + year = 'current' + if not valid_year(year): - self.logger.error(f"_handle_temperaturserie: Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") + self.logger.error(f"Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") return # define year @@ -1745,7 +1613,7 @@ def _handle_temperaturserie(self, database_item: Item, year: Union[int, str], me # check start_date today = datetime.date.today() if start_date > today: - self.logger.info(f"_handle_temperaturserie: Start time for query of item={database_item.path()} is in future. Query cancelled.") + self.logger.info(f"Start time for query of item={database_item.path()} is in future. Query cancelled.") return # define start / end @@ -1754,22 +1622,22 @@ def _handle_temperaturserie(self, database_item: Item, year: Union[int, str], me # check end if start < end: - self.logger.error(f"_handle_temperaturserie: End time for query of item={database_item.path()} is before start time. Query cancelled.") + self.logger.error(f"End time for query of item={database_item.path()} is before start time. Query cancelled.") return # check method if method not in ['hour', 'raw', 'minmax']: - self.logger.error(f"_handle_temperaturserie: Calculation method {method!r} unknown. Need to be 'hour', 'raw' or 'minmax'. Query cancelled.") + self.logger.error(f"Calculation method {method!r} unknown. Need to be 'hour', 'raw' or 'minmax'. Query cancelled.") return # get raw data as list temp_list = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method=method) if self.execute_debug: - self.logger.debug(f"_handle_temperaturserie: {temp_list=}") + self.logger.debug(f"{temp_list=}") return temp_list - def _prepare_waermesumme(self, database_item: Item, year: Union[int, str], month: Union[int, str] = None): + def _prepare_waermesumme(self, database_item: Item, year: Union[int, str] = None, month: Union[int, str] = None) -> Union[list, None]: """Prepares raw data for waermesumme""" # check validity of given year @@ -1777,6 +1645,10 @@ def _prepare_waermesumme(self, database_item: Item, year: Union[int, str], month self.logger.error(f"Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") return + # set default year + if not year: + year = 'current' + # define year if year == 'current': year = datetime.date.today().year @@ -1884,8 +1756,9 @@ def _create_list_timestamp_minmaxtemp() -> list: # temp_list = [[timestamp1, avg-value1], [timestamp2, avg-value2], [timestamp3, avg-value3], ...] Tagesmitteltemperatur pro Stunde wird in der Datenbank per avg ermittelt if method == 'hour': - raw_data = self._query_item(func='avg', item=database_item, timeframe=timeframe, start=start, end=end, group='hour', ignore_value_list=ignore_value_list) - self.logger.debug(f"{raw_data=}") + raw_data = self._query_item(func='avg', database_item=database_item, timeframe=timeframe, start=start, end=end, group='hour', ignore_value_list=ignore_value_list) + if self.prepare_debug: + self.logger.debug(f"{raw_data=}") if raw_data and isinstance(raw_data, list): if raw_data == [[None, None]]: @@ -1896,13 +1769,13 @@ def _create_list_timestamp_minmaxtemp() -> list: # create list of list like database query response temp_list = _create_list_timestamp_avgtemp() - self.logger.debug(f"{temp_list=}") + if self.prepare_debug: + self.logger.debug(f"{temp_list=}") return temp_list # temp_list = [[timestamp1, avg-value1], [timestamp2, avg-value2], [timestamp3, avg-value3], ...] Tagesmitteltemperatur pro Stunde wird hier im Plugin ermittelt ermittelt elif method == 'raw': - raw_data = self._query_item(func='raw', item=database_item, timeframe=timeframe, start=start, end=end, ignore_value_list=ignore_value_list) - self.logger.debug(f"{raw_data=}") + raw_data = self._query_item(func='raw', database_item=database_item, timeframe=timeframe, start=start, end=end, ignore_value_list=ignore_value_list) if raw_data and isinstance(raw_data, list): if raw_data == [[None, None]]: @@ -1910,21 +1783,21 @@ def _create_list_timestamp_minmaxtemp() -> list: # create nested dict with temps temp_dict = _create_temp_dict() - self.logger.debug(f"raw: {temp_dict=}") # calculate 'tagesdurchschnitt' and create list of list like database query response _calculate_hourly_average() - self.logger.debug(f"raw: {temp_dict=}") + if self.prepare_debug: + self.logger.debug(f"raw: {temp_dict=}") # create list of list like database query response temp_list = _create_list_timestamp_avgtemp() - self.logger.debug(f"{temp_list=}") + if self.prepare_debug: + self.logger.debug(f"{temp_list=}") return temp_list # temp_list = [[timestamp1, min-value1, max-value1], [timestamp2, min-value2, max-value2], [timestamp3, min-value3, max-value3], ...] elif method == 'minmax': - raw_data = self._query_item(func='raw', item=database_item, timeframe=timeframe, start=start, end=end, ignore_value_list=ignore_value_list) - self.logger.debug(f"{raw_data=}") + raw_data = self._query_item(func='raw', database_item=database_item, timeframe=timeframe, start=start, end=end, ignore_value_list=ignore_value_list) if raw_data and isinstance(raw_data, list): if raw_data == [[None, None]]: @@ -1932,16 +1805,18 @@ def _create_list_timestamp_minmaxtemp() -> list: # create nested dict with temps temp_dict = _create_temp_dict() - self.logger.debug(f"raw: {temp_dict=}") + if self.prepare_debug: + self.logger.debug(f"raw: {temp_dict=}") # create list of list like database query response temp_list = _create_list_timestamp_minmaxtemp() - self.logger.debug(f"{temp_list=}") + if self.prepare_debug: + self.logger.debug(f"{temp_list=}") return temp_list - ############################## + #################### # Support stuff - ############################## + #################### def _create_due_items(self) -> list: """ @@ -2042,7 +1917,6 @@ def _initialize_db(self) -> bool: # limit connection requests to 20 seconds. current_time = time.time() time_delta_last_connect = current_time - self.last_connect_time - # self.logger.debug(f"DEBUG: delta {time_delta_last_connect}") if time_delta_last_connect > 20: self.last_connect_time = time.time() self._db.connect() @@ -2138,7 +2012,6 @@ def _get_itemid(self, item: Item) -> int: :return: id of the item within the database """ - # self.logger.debug(f"_get_itemid called with item={item.path()}") _item_id = self.item_cache.get(item, {}).get('id', None) if _item_id is None: @@ -2169,12 +2042,12 @@ def _get_itemid_for_query(self, item: Union[Item, str, int]) -> Union[int, None] item_id = None return item_id - def _query_item(self, func: str, item: Item, timeframe: str, start: int = None, end: int = 0, group: str = None, group2: str = None, ignore_value_list=None, use_oldest_entry: bool = False) -> list: + def _query_item(self, func: str, database_item: Item, timeframe: str, start: int = None, end: int = 0, group: str = None, group2: str = None, ignore_value_list=None, use_oldest_entry: bool = False) -> list: """ Do diverse checks of input, and prepare query of log by getting item_id, start / end in timestamp etc. :param func: function to be used at query - :param item: item object or item_id for which the query should be done + :param database_item: item object or item_id for which the query should be done :param timeframe: time increment für definition of start, end (day, week, month, year) :param start: start of timeframe (oldest) for query given in x time increments (default = None, meaning complete database) :param end: end of timeframe (newest) for query given in x time increments (default = 0, meaning end of today, end of last week, end of last month, end of last year) @@ -2212,14 +2085,14 @@ def _handle_query_result(query_result) -> list: return _result if self.prepare_debug: - self.logger.debug(f"_query_item called with {func=}, item={item.path()}, {timeframe=}, {start=}, {end=}, {group=}, {group2=}, {ignore_value_list=}") + self.logger.debug(f"called with {func=}, item={database_item.path()}, {timeframe=}, {start=}, {end=}, {group=}, {group2=}, {ignore_value_list=}") # set default result result = [[None, None]] # check correctness of timeframe if timeframe not in ALLOWED_QUERY_TIMEFRAMES: - self.logger.error(f"_query_item: Requested {timeframe=} for item={item.path()} not defined; Need to be 'year' or 'month' or 'week' or 'day' or 'hour''. Query cancelled.") + self.logger.error(f"Requested {timeframe=} for item={database_item.path()} not defined; Need to be 'year' or 'month' or 'week' or 'day' or 'hour''. Query cancelled.") return result # check start / end for being int @@ -2232,43 +2105,43 @@ def _handle_query_result(query_result) -> list: # check correctness of start / end if start < end: - self.logger.warning(f"_query_item: Requested {start=} for item={item.path()} is not valid since {start=} < {end=}. Query cancelled.") + self.logger.warning(f"Requested {start=} for item={database_item.path()} is not valid since {start=} < {end=}. Query cancelled.") return result # define item_id - item_id = self._get_itemid(item) + item_id = self._get_itemid(database_item) if not item_id: - self.logger.error(f"_query_item: ItemId for item={item.path()} not found. Query cancelled.") + self.logger.error(f"ItemId for item={database_item.path()} not found. Query cancelled.") return result # define start and end of query as timestamp in microseconds ts_start, ts_end = get_start_end_as_timestamp(timeframe, start, end) - oldest_log = int(self._get_oldest_log(item)) + oldest_log = int(self._get_oldest_log(database_item)) if start is None: ts_start = oldest_log if self.prepare_debug: - self.logger.debug(f"_query_item: Requested {timeframe=} with {start=} and {end=} resulted in start being timestamp={ts_start} / {timestamp_to_timestring(ts_start)} and end being timestamp={ts_end} / {timestamp_to_timestring(ts_end)}") + self.logger.debug(f"Requested {timeframe=} with {start=} and {end=} resulted in start being timestamp={ts_start} / {timestamp_to_timestring(ts_start)} and end being timestamp={ts_end} / {timestamp_to_timestring(ts_end)}") # check if values for end time and start time are in database if ts_end < oldest_log: # (Abfrage abbrechen, wenn Endzeitpunkt in UNIX-timestamp der Abfrage kleiner (und damit jünger) ist, als der UNIX-timestamp des ältesten Eintrages) - self.logger.info(f"_query_item: Requested end time timestamp={ts_end} / {timestamp_to_timestring(ts_end)} of query for Item='{item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Query cancelled.") + self.logger.info(f"Requested end time timestamp={ts_end} / {timestamp_to_timestring(ts_end)} of query for Item='{database_item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Query cancelled.") return result if ts_start < oldest_log: if self.use_oldest_entry or use_oldest_entry: - self.logger.info(f"_query_item: Requested start time timestamp={ts_start} / {timestamp_to_timestring(ts_start)} of query for Item='{item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Oldest available entry will be used.") + self.logger.info(f"Requested start time timestamp={ts_start} / {timestamp_to_timestring(ts_start)} of query for Item='{database_item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Oldest available entry will be used.") ts_start = oldest_log else: - self.logger.info(f"_query_item: Requested start time timestamp={ts_start} / {timestamp_to_timestring(ts_start)} of query for Item='{item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Query cancelled.") + self.logger.info(f"Requested start time timestamp={ts_start} / {timestamp_to_timestring(ts_start)} of query for Item='{database_item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Query cancelled.") return result query_params = {'func': func, 'item_id': item_id, 'ts_start': ts_start, 'ts_end': ts_end, 'group': group, 'group2': group2, 'ignore_value_list': ignore_value_list} result = _handle_query_result(self._query_log_timestamp(**query_params)) if self.prepare_debug: - self.logger.debug(f"_query_item: value for item={item.path()} with {query_params=}: {result}") + self.logger.debug(f"value for item={database_item.path()} with {query_params=}: {result}") return result @@ -2331,9 +2204,9 @@ def _work_item_queue_thread_shutdown(self): self.logger.info("Thread 'work_item_queue_thread' has been terminated.") self.work_item_queue_thread = None - ############################## + ################################# # Database Query Preparation - ############################## + ################################# def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: int, group: str = None, group2: str = None, ignore_value_list=None) -> Union[list, None]: """ @@ -2353,7 +2226,7 @@ def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: i # do debug log if self.prepare_debug: - self.logger.debug(f"_query_log_timestamp: Called with {func=}, {item_id=}, {ts_start=}, {ts_end=}, {group=}, {group2=}, {ignore_value_list=}") + self.logger.debug(f"Called with {func=}, {item_id=}, {ts_start=}, {ts_end=}, {group=}, {group2=}, {ignore_value_list=}") # define query parts _select = { @@ -2425,15 +2298,15 @@ def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: i # check correctness of func if func not in _select: - self.logger.error(f"_query_log_timestamp: Requested {func=} for {item_id=} not defined. Query cancelled.") + self.logger.error(f"Requested {func=} for {item_id=} not defined. Query cancelled.") return # check correctness of group and group2 if group not in _group_by: - self.logger.error(f"_query_log_timestamp: Requested {group=} for item={item_id=} not defined. Query cancelled.") + self.logger.error(f"Requested {group=} for item={item_id=} not defined. Query cancelled.") return if group2 not in _group_by: - self.logger.error(f"_query_log_timestamp: Requested {group2=} for item={item_id=} not defined. Query cancelled.") + self.logger.error(f"Requested {group2=} for item={item_id=} not defined. Query cancelled.") return # handle ignore values @@ -2456,7 +2329,7 @@ def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: i # do debug log if self.prepare_debug: - self.logger.debug(f"_query_log_timestamp: {query=}, {params=}") + self.logger.debug(f"{query=}, {params=}") # request database and return result return self._fetchall(query, params) @@ -2470,7 +2343,7 @@ def _read_log_all(self, item_id: int): """ if self.prepare_debug: - self.logger.debug(f"_read_log_all: Called for {item_id=}") + self.logger.debug(f"called for {item_id=}") query = "SELECT * FROM log WHERE (item_id = :item_id) AND (time = None OR 1 = 1)" params = {'item_id': item_id} @@ -2557,9 +2430,9 @@ def _get_db_net_read_timeout(self) -> list: query = "SHOW GLOBAL VARIABLES LIKE 'net_read_timeout'" return self._fetchone(query) - ############################## + ####################### # Database Queries - ############################## + ####################### def _execute(self, query: str, params: dict = None, cur=None) -> list: if params is None: @@ -2584,17 +2457,17 @@ def _query(self, fetch, query: str, params: dict = None, cur=None) -> Union[None params = {} if self.sql_debug: - self.logger.debug(f"_query: Called with {query=}, {params=}, {cur=}") + self.logger.debug(f"Called with {query=}, {params=}, {cur=}") if not self._initialize_db(): return None if cur is None: if self._db.verify(5) == 0: - self.logger.error("_query: Connection to database not recovered.") + self.logger.error("Connection to database not recovered.") return None if not self._db.lock(300): - self.logger.error("_query: Can't query due to fail to acquire lock.") + self.logger.error("Can't query due to fail to acquire lock.") return None query_readable = re.sub(r':([a-z_]+)', r'{\1}', query).format(**params) @@ -2602,25 +2475,23 @@ def _query(self, fetch, query: str, params: dict = None, cur=None) -> Union[None try: tuples = fetch(query, params, cur=cur) except Exception as e: - self.logger.error(f"_query: Error for query '{query_readable}': {e}") + self.logger.error(f"Error for query '{query_readable}': {e}") else: if self.sql_debug: - self.logger.debug(f"_query: Result of '{query_readable}': {tuples}") + self.logger.debug(f"Result of '{query_readable}': {tuples}") return tuples finally: if cur is None: self._db.release() -############################## +####################### # Helper functions -############################## +####################### def params_to_dict(string: str) -> Union[dict, None]: - """ - Parse a string with named arguments and comma separation to dict; (e.g. string = 'year=2022, month=12') - """ + """Parse a string with named arguments and comma separation to dict; (e.g. string = 'year=2022, month=12')""" try: res_dict = dict((a.strip(), b.strip()) for a, b in (element.split('=') for element in string.split(', '))) @@ -2651,9 +2522,7 @@ def params_to_dict(string: str) -> Union[dict, None]: def valid_year(year: Union[int, str]) -> bool: - """ - Check if given year is digit and within allowed range - """ + """Check if given year is digit and within allowed range""" if ((isinstance(year, int) or (isinstance(year, str) and year.isdigit())) and ( 1980 <= int(year) <= datetime.date.today().year)) or (isinstance(year, str) and year == 'current'): @@ -2663,9 +2532,7 @@ def valid_year(year: Union[int, str]) -> bool: def valid_month(month: Union[int, str]) -> bool: - """ - Check if given month is digit and within allowed range - """ + """Check if given month is digit and within allowed range""" if (isinstance(month, int) or (isinstance(month, str) and month.isdigit())) and (1 <= int(month) <= 12): return True @@ -2674,18 +2541,13 @@ def valid_month(month: Union[int, str]) -> bool: def timestamp_to_timestring(timestamp: int) -> str: - """ - Parse timestamp from db query to string representing date and time - """ + """Parse timestamp from db query to string representing date and time""" return datetime.datetime.utcfromtimestamp(timestamp / 1000).strftime('%Y-%m-%d %H:%M:%S') def convert_timeframe(timeframe: str) -> str: - """ - Convert timeframe - - """ + """Convert timeframe""" convertion = { 'tag': 'day', @@ -2704,6 +2566,7 @@ def convert_timeframe(timeframe: str) -> str: return convertion.get(timeframe) + def convert_duration(timeframe: str, window_dur: str) -> int: """Convert duration""" @@ -2752,9 +2615,7 @@ def convert_duration(timeframe: str, window_dur: str) -> int: def count_to_start(count: int = 0, end: int = 0): - """ - Converts given count and end ot start and end - """ + """Converts given count and end ot start and end""" return end + count, end @@ -2820,76 +2681,58 @@ def get_end(timeframe: str, end: int) -> datetime: def year_beginning(delta: int = 0) -> datetime: - """ - provides datetime of beginning of year of today minus x years - """ + """provides datetime of beginning of year of today minus x years""" _dt = datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) return _dt.replace(month=1, day=1) - relativedelta(years=delta) def year_end(delta: int = 0) -> datetime: - """ - provides datetime of end of year of today minus x years - """ + """provides datetime of end of year of today minus x years""" return year_beginning(delta) + relativedelta(years=1) def month_beginning(delta: int = 0) -> datetime: - """ - provides datetime of beginning of month minus x month - """ + """provides datetime of beginning of month minus x month""" _dt = datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) return _dt.replace(day=1) - relativedelta(months=delta) def month_end(delta: int = 0) -> datetime: - """ - provides datetime of end of month minus x month - """ + """provides datetime of end of month minus x month""" return month_beginning(delta) + relativedelta(months=1) def week_beginning(delta: int = 0) -> datetime: - """ - provides datetime of beginning of week minus x weeks - """ + """provides datetime of beginning of week minus x weeks""" _dt = datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) return _dt - relativedelta(days=(datetime.date.today().weekday() + (delta * 7))) def week_end(delta: int = 0) -> datetime: - """ - provides datetime of end of week minus x weeks - """ + """provides datetime of end of week minus x weeks""" return week_beginning(delta) + relativedelta(days=6) def day_beginning(delta: int = 0) -> datetime: - """ - provides datetime of beginning of today minus x days - """ + """provides datetime of beginning of today minus x days""" return datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) - relativedelta(days=delta) def day_end(delta: int = 0) -> datetime: - """ - provides datetime of end of today minus x days - """ + """provides datetime of end of today minus x days""" return day_beginning(delta) + relativedelta(days=1) def datetime_to_timestamp(dt: datetime) -> int: - """ - Provides timestamp from given datetime - """ + """Provides timestamp from given datetime""" return int(dt.replace(tzinfo=datetime.timezone.utc).timestamp()) @@ -2907,6 +2750,7 @@ def to_float(arg) -> Union[float, None]: except (ValueError, TypeError): return None + def to_int_float(arg): try: return int(arg) @@ -2914,6 +2758,16 @@ def to_int_float(arg): return to_float(arg) +def timeframe_to_updatecyle(timeframe): + + lookup = {'day': 'daily', + 'week': 'weekly', + 'month': 'monthly', + 'year': 'yearly'} + + return lookup.get(timeframe) + + ALLOWED_QUERY_TIMEFRAMES = ['year', 'month', 'week', 'day', 'hour'] ALLOWED_MINMAX_FUNCS = ['min', 'max', 'avg'] diff --git a/db_addon/item_attributes.py b/db_addon/item_attributes.py index 860c0435d..75c44316b 100644 --- a/db_addon/item_attributes.py +++ b/db_addon/item_attributes.py @@ -23,21 +23,39 @@ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -# THIS FILE IS AUTOMATICALLY CREATED BY USING item_attributs_master.py +# THIS FILE IS AUTOMATICALLY CREATED BY USING item_attributes_master.py # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # ALL_ONCHANGE_ATTRIBUTES = ['verbrauch_heute', 'verbrauch_woche', 'verbrauch_monat', 'verbrauch_jahr', 'minmax_heute_min', 'minmax_heute_max', 'minmax_woche_min', 'minmax_woche_max', 'minmax_monat_min', 'minmax_monat_max', 'minmax_jahr_min', 'minmax_jahr_max', 'tagesmitteltemperatur_heute'] -ALL_DAILY_ATTRIBUTES = ['verbrauch_heute_minus1', 'verbrauch_heute_minus2', 'verbrauch_heute_minus3', 'verbrauch_heute_minus4', 'verbrauch_heute_minus5', 'verbrauch_heute_minus6', 'verbrauch_heute_minus7', 'verbrauch_rolling_12m_heute_minus1', 'verbrauch_jahreszeitraum_minus1', 'verbrauch_jahreszeitraum_minus2', 'verbrauch_jahreszeitraum_minus3', 'zaehlerstand_heute_minus1', 'zaehlerstand_heute_minus2', 'zaehlerstand_heute_minus3', 'minmax_last_24h_min', 'minmax_last_24h_max', 'minmax_last_24h_avg', 'minmax_last_7d_min', 'minmax_last_7d_max', 'minmax_last_7d_avg', 'minmax_heute_minus1_min', 'minmax_heute_minus1_max', 'minmax_heute_minus1_avg', 'minmax_heute_minus2_min', 'minmax_heute_minus2_max', 'minmax_heute_minus2_avg', 'minmax_heute_minus3_min', 'minmax_heute_minus3_max', 'minmax_heute_minus3_avg', 'tagesmitteltemperatur_heute_minus1', 'tagesmitteltemperatur_heute_minus2', 'tagesmitteltemperatur_heute_minus3', 'serie_minmax_tag_min_30d', 'serie_minmax_tag_max_30d', 'serie_minmax_tag_avg_30d', 'serie_verbrauch_tag_30d', 'serie_zaehlerstand_tag_30d', 'serie_tagesmittelwert_stunde_0d', 'serie_tagesmittelwert_tag_stunde_30d', 'kaeltesumme', 'waermesumme', 'gruenlandtempsumme', 'tagesmitteltemperatur', 'wachstumsgradtage'] +ALL_DAILY_ATTRIBUTES = ['verbrauch_heute_minus1', 'verbrauch_heute_minus2', 'verbrauch_heute_minus3', 'verbrauch_heute_minus4', 'verbrauch_heute_minus5', 'verbrauch_heute_minus6', 'verbrauch_heute_minus7', 'verbrauch_rolling_12m_heute_minus1', 'verbrauch_jahreszeitraum_minus1', 'verbrauch_jahreszeitraum_minus2', 'verbrauch_jahreszeitraum_minus3', 'zaehlerstand_heute_minus1', 'zaehlerstand_heute_minus2', 'zaehlerstand_heute_minus3', 'minmax_last_24h_min', 'minmax_last_24h_max', 'minmax_last_24h_avg', 'minmax_last_7d_min', 'minmax_last_7d_max', 'minmax_last_7d_avg', 'minmax_heute_minus1_min', 'minmax_heute_minus1_max', 'minmax_heute_minus1_avg', 'minmax_heute_minus2_min', 'minmax_heute_minus2_max', 'minmax_heute_minus2_avg', 'minmax_heute_minus3_min', 'minmax_heute_minus3_max', 'minmax_heute_minus3_avg', 'tagesmitteltemperatur_heute_minus1', 'tagesmitteltemperatur_heute_minus2', 'tagesmitteltemperatur_heute_minus3', 'serie_minmax_tag_min_30d', 'serie_minmax_tag_max_30d', 'serie_minmax_tag_avg_30d', 'serie_verbrauch_tag_30d', 'serie_zaehlerstand_tag_30d', 'serie_tagesmittelwert_0d', 'serie_tagesmittelwert_stunde_0d', 'serie_tagesmittelwert_stunde_30_0d', 'serie_tagesmittelwert_tag_stunde_30d', 'kaeltesumme', 'waermesumme', 'gruenlandtempsumme', 'tagesmitteltemperatur', 'wachstumsgradtage'] ALL_WEEKLY_ATTRIBUTES = ['verbrauch_woche_minus1', 'verbrauch_woche_minus2', 'verbrauch_woche_minus3', 'verbrauch_woche_minus4', 'verbrauch_rolling_12m_woche_minus1', 'zaehlerstand_woche_minus1', 'zaehlerstand_woche_minus2', 'zaehlerstand_woche_minus3', 'minmax_woche_minus1_min', 'minmax_woche_minus1_max', 'minmax_woche_minus1_avg', 'minmax_woche_minus2_min', 'minmax_woche_minus2_max', 'minmax_woche_minus2_avg', 'serie_minmax_woche_min_30w', 'serie_minmax_woche_max_30w', 'serie_minmax_woche_avg_30w', 'serie_verbrauch_woche_30w', 'serie_zaehlerstand_woche_30w'] ALL_MONTHLY_ATTRIBUTES = ['verbrauch_monat_minus1', 'verbrauch_monat_minus2', 'verbrauch_monat_minus3', 'verbrauch_monat_minus4', 'verbrauch_monat_minus12', 'verbrauch_rolling_12m_monat_minus1', 'zaehlerstand_monat_minus1', 'zaehlerstand_monat_minus2', 'zaehlerstand_monat_minus3', 'minmax_monat_minus1_min', 'minmax_monat_minus1_max', 'minmax_monat_minus1_avg', 'minmax_monat_minus2_min', 'minmax_monat_minus2_max', 'minmax_monat_minus2_avg', 'serie_minmax_monat_min_15m', 'serie_minmax_monat_max_15m', 'serie_minmax_monat_avg_15m', 'serie_verbrauch_monat_18m', 'serie_zaehlerstand_monat_18m', 'serie_waermesumme_monat_24m', 'serie_kaeltesumme_monat_24m'] ALL_YEARLY_ATTRIBUTES = ['verbrauch_jahr_minus1', 'verbrauch_jahr_minus2', 'verbrauch_rolling_12m_jahr_minus1', 'zaehlerstand_jahr_minus1', 'zaehlerstand_jahr_minus2', 'zaehlerstand_jahr_minus3', 'minmax_jahr_minus1_min', 'minmax_jahr_minus1_max', 'minmax_jahr_minus1_avg'] -ALL_NEED_PARAMS_ATTRIBUTES = ['kaeltesumme', 'waermesumme', 'gruenlandtempsumme', 'tagesmitteltemperatur', 'wachstumsgradtage', 'db_request'] +ALL_NEED_PARAMS_ATTRIBUTES = ['kaeltesumme', 'waermesumme', 'gruenlandtempsumme', 'tagesmitteltemperatur', 'wachstumsgradtage', 'db_request', 'minmax', 'minmax_last', 'verbrauch', 'zaehlerstand'] ALL_VERBRAUCH_ATTRIBUTES = ['verbrauch_heute', 'verbrauch_woche', 'verbrauch_monat', 'verbrauch_jahr', 'verbrauch_heute_minus1', 'verbrauch_heute_minus2', 'verbrauch_heute_minus3', 'verbrauch_heute_minus4', 'verbrauch_heute_minus5', 'verbrauch_heute_minus6', 'verbrauch_heute_minus7', 'verbrauch_woche_minus1', 'verbrauch_woche_minus2', 'verbrauch_woche_minus3', 'verbrauch_woche_minus4', 'verbrauch_monat_minus1', 'verbrauch_monat_minus2', 'verbrauch_monat_minus3', 'verbrauch_monat_minus4', 'verbrauch_monat_minus12', 'verbrauch_jahr_minus1', 'verbrauch_jahr_minus2', 'verbrauch_rolling_12m_heute_minus1', 'verbrauch_rolling_12m_woche_minus1', 'verbrauch_rolling_12m_monat_minus1', 'verbrauch_rolling_12m_jahr_minus1', 'verbrauch_jahreszeitraum_minus1', 'verbrauch_jahreszeitraum_minus2', 'verbrauch_jahreszeitraum_minus3'] +VERBRAUCH_ATTRIBUTES_ONCHANGE = ['verbrauch_heute', 'verbrauch_woche', 'verbrauch_monat', 'verbrauch_jahr'] +VERBRAUCH_ATTRIBUTES_TIMEFRAME = ['verbrauch_heute_minus1', 'verbrauch_heute_minus2', 'verbrauch_heute_minus3', 'verbrauch_heute_minus4', 'verbrauch_heute_minus5', 'verbrauch_heute_minus6', 'verbrauch_heute_minus7', 'verbrauch_woche_minus1', 'verbrauch_woche_minus2', 'verbrauch_woche_minus3', 'verbrauch_woche_minus4', 'verbrauch_monat_minus1', 'verbrauch_monat_minus2', 'verbrauch_monat_minus3', 'verbrauch_monat_minus4', 'verbrauch_monat_minus12', 'verbrauch_jahr_minus1', 'verbrauch_jahr_minus2'] +VERBRAUCH_ATTRIBUTES_ROLLING = ['verbrauch_rolling_12m_heute_minus1', 'verbrauch_rolling_12m_woche_minus1', 'verbrauch_rolling_12m_monat_minus1', 'verbrauch_rolling_12m_jahr_minus1'] +VERBRAUCH_ATTRIBUTES_JAHRESZEITRAUM = ['verbrauch_jahreszeitraum_minus1', 'verbrauch_jahreszeitraum_minus2', 'verbrauch_jahreszeitraum_minus3'] ALL_ZAEHLERSTAND_ATTRIBUTES = ['zaehlerstand_heute_minus1', 'zaehlerstand_heute_minus2', 'zaehlerstand_heute_minus3', 'zaehlerstand_woche_minus1', 'zaehlerstand_woche_minus2', 'zaehlerstand_woche_minus3', 'zaehlerstand_monat_minus1', 'zaehlerstand_monat_minus2', 'zaehlerstand_monat_minus3', 'zaehlerstand_jahr_minus1', 'zaehlerstand_jahr_minus2', 'zaehlerstand_jahr_minus3'] +ZAEHLERSTAND_ATTRIBUTES_TIMEFRAME = ['zaehlerstand_heute_minus1', 'zaehlerstand_heute_minus2', 'zaehlerstand_heute_minus3', 'zaehlerstand_woche_minus1', 'zaehlerstand_woche_minus2', 'zaehlerstand_woche_minus3', 'zaehlerstand_monat_minus1', 'zaehlerstand_monat_minus2', 'zaehlerstand_monat_minus3', 'zaehlerstand_jahr_minus1', 'zaehlerstand_jahr_minus2', 'zaehlerstand_jahr_minus3'] ALL_HISTORIE_ATTRIBUTES = ['minmax_last_24h_min', 'minmax_last_24h_max', 'minmax_last_24h_avg', 'minmax_last_7d_min', 'minmax_last_7d_max', 'minmax_last_7d_avg', 'minmax_heute_min', 'minmax_heute_max', 'minmax_heute_minus1_min', 'minmax_heute_minus1_max', 'minmax_heute_minus1_avg', 'minmax_heute_minus2_min', 'minmax_heute_minus2_max', 'minmax_heute_minus2_avg', 'minmax_heute_minus3_min', 'minmax_heute_minus3_max', 'minmax_heute_minus3_avg', 'minmax_woche_min', 'minmax_woche_max', 'minmax_woche_minus1_min', 'minmax_woche_minus1_max', 'minmax_woche_minus1_avg', 'minmax_woche_minus2_min', 'minmax_woche_minus2_max', 'minmax_woche_minus2_avg', 'minmax_monat_min', 'minmax_monat_max', 'minmax_monat_minus1_min', 'minmax_monat_minus1_max', 'minmax_monat_minus1_avg', 'minmax_monat_minus2_min', 'minmax_monat_minus2_max', 'minmax_monat_minus2_avg', 'minmax_jahr_min', 'minmax_jahr_max', 'minmax_jahr_minus1_min', 'minmax_jahr_minus1_max', 'minmax_jahr_minus1_avg'] +HISTORIE_ATTRIBUTES_ONCHANGE = ['minmax_heute_min', 'minmax_heute_max', 'minmax_woche_min', 'minmax_woche_max', 'minmax_monat_min', 'minmax_monat_max', 'minmax_jahr_min', 'minmax_jahr_max'] +HISTORIE_ATTRIBUTES_LAST = ['minmax_last_24h_min', 'minmax_last_24h_max', 'minmax_last_24h_avg', 'minmax_last_7d_min', 'minmax_last_7d_max', 'minmax_last_7d_avg'] +HISTORIE_ATTRIBUTES_TIMEFRAME = ['minmax_heute_minus1_min', 'minmax_heute_minus1_max', 'minmax_heute_minus1_avg', 'minmax_heute_minus2_min', 'minmax_heute_minus2_max', 'minmax_heute_minus2_avg', 'minmax_heute_minus3_min', 'minmax_heute_minus3_max', 'minmax_heute_minus3_avg', 'minmax_woche_minus1_min', 'minmax_woche_minus1_max', 'minmax_woche_minus1_avg', 'minmax_woche_minus2_min', 'minmax_woche_minus2_max', 'minmax_woche_minus2_avg', 'minmax_monat_minus1_min', 'minmax_monat_minus1_max', 'minmax_monat_minus1_avg', 'minmax_monat_minus2_min', 'minmax_monat_minus2_max', 'minmax_monat_minus2_avg', 'minmax_jahr_minus1_min', 'minmax_jahr_minus1_max', 'minmax_jahr_minus1_avg'] ALL_TAGESMITTEL_ATTRIBUTES = ['tagesmitteltemperatur_heute', 'tagesmitteltemperatur_heute_minus1', 'tagesmitteltemperatur_heute_minus2', 'tagesmitteltemperatur_heute_minus3'] -ALL_SERIE_ATTRIBUTES = ['serie_minmax_monat_min_15m', 'serie_minmax_monat_max_15m', 'serie_minmax_monat_avg_15m', 'serie_minmax_woche_min_30w', 'serie_minmax_woche_max_30w', 'serie_minmax_woche_avg_30w', 'serie_minmax_tag_min_30d', 'serie_minmax_tag_max_30d', 'serie_minmax_tag_avg_30d', 'serie_verbrauch_tag_30d', 'serie_verbrauch_woche_30w', 'serie_verbrauch_monat_18m', 'serie_zaehlerstand_tag_30d', 'serie_zaehlerstand_woche_30w', 'serie_zaehlerstand_monat_18m', 'serie_waermesumme_monat_24m', 'serie_kaeltesumme_monat_24m', 'serie_tagesmittelwert_stunde_0d', 'serie_tagesmittelwert_tag_stunde_30d'] +TAGESMITTEL_ATTRIBUTES_ONCHANGE = ['tagesmitteltemperatur_heute'] +TAGESMITTEL_ATTRIBUTES_TIMEFRAME = ['tagesmitteltemperatur_heute_minus1', 'tagesmitteltemperatur_heute_minus2', 'tagesmitteltemperatur_heute_minus3'] +ALL_SERIE_ATTRIBUTES = ['serie_minmax_monat_min_15m', 'serie_minmax_monat_max_15m', 'serie_minmax_monat_avg_15m', 'serie_minmax_woche_min_30w', 'serie_minmax_woche_max_30w', 'serie_minmax_woche_avg_30w', 'serie_minmax_tag_min_30d', 'serie_minmax_tag_max_30d', 'serie_minmax_tag_avg_30d', 'serie_verbrauch_tag_30d', 'serie_verbrauch_woche_30w', 'serie_verbrauch_monat_18m', 'serie_zaehlerstand_tag_30d', 'serie_zaehlerstand_woche_30w', 'serie_zaehlerstand_monat_18m', 'serie_waermesumme_monat_24m', 'serie_kaeltesumme_monat_24m', 'serie_tagesmittelwert_0d', 'serie_tagesmittelwert_stunde_0d', 'serie_tagesmittelwert_stunde_30_0d', 'serie_tagesmittelwert_tag_stunde_30d'] +SERIE_ATTRIBUTES_MINMAX = ['serie_minmax_monat_min_15m', 'serie_minmax_monat_max_15m', 'serie_minmax_monat_avg_15m', 'serie_minmax_woche_min_30w', 'serie_minmax_woche_max_30w', 'serie_minmax_woche_avg_30w', 'serie_minmax_tag_min_30d', 'serie_minmax_tag_max_30d', 'serie_minmax_tag_avg_30d'] +SERIE_ATTRIBUTES_ZAEHLERSTAND = ['serie_zaehlerstand_tag_30d', 'serie_zaehlerstand_woche_30w', 'serie_zaehlerstand_monat_18m'] +SERIE_ATTRIBUTES_VERBRAUCH = ['serie_verbrauch_tag_30d', 'serie_verbrauch_woche_30w', 'serie_verbrauch_monat_18m'] +SERIE_ATTRIBUTES_SUMME = ['serie_waermesumme_monat_24m', 'serie_kaeltesumme_monat_24m'] +SERIE_ATTRIBUTES_MITTEL_D = ['serie_tagesmittelwert_0d'] +SERIE_ATTRIBUTES_MITTEL_H = ['serie_tagesmittelwert_stunde_0d'] +SERIE_ATTRIBUTES_MITTEL_H1 = ['serie_tagesmittelwert_stunde_30_0d'] +SERIE_ATTRIBUTES_MITTEL_D_H = ['serie_tagesmittelwert_tag_stunde_30d'] ALL_GEN_ATTRIBUTES = ['general_oldest_value', 'general_oldest_log'] -ALL_COMPLEX_ATTRIBUTES = ['kaeltesumme', 'waermesumme', 'gruenlandtempsumme', 'tagesmitteltemperatur', 'wachstumsgradtage', 'db_request'] +ALL_COMPLEX_ATTRIBUTES = ['kaeltesumme', 'waermesumme', 'gruenlandtempsumme', 'tagesmitteltemperatur', 'wachstumsgradtage', 'db_request', 'minmax', 'minmax_last', 'verbrauch', 'zaehlerstand'] diff --git a/db_addon/item_attributes_master.py b/db_addon/item_attributes_master.py index 5bcd34761..4c08b54ec 100644 --- a/db_addon/item_attributes_master.py +++ b/db_addon/item_attributes_master.py @@ -27,116 +27,122 @@ ITEM_ATTRIBUTES = { 'db_addon_fct': { - 'verbrauch_heute': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch am heutigen Tag (Differenz zwischen aktuellem Wert und den Wert am Ende des vorherigen Tages)'}, - 'verbrauch_woche': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch in der aktuellen Woche'}, - 'verbrauch_monat': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch im aktuellen Monat'}, - 'verbrauch_jahr': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch im aktuellen Jahr'}, - 'verbrauch_heute_minus1': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch gestern (heute -1 Tag) (Differenz zwischen Wert am Ende des gestrigen Tages und dem Wert am Ende des Tages danach)'}, - 'verbrauch_heute_minus2': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch vorgestern (heute -2 Tage)'}, - 'verbrauch_heute_minus3': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -3 Tage'}, - 'verbrauch_heute_minus4': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -4 Tage'}, - 'verbrauch_heute_minus5': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -5 Tage'}, - 'verbrauch_heute_minus6': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -6 Tage'}, - 'verbrauch_heute_minus7': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -7 Tage'}, - 'verbrauch_woche_minus1': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch Vorwoche (aktuelle Woche -1)'}, - 'verbrauch_woche_minus2': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch aktuelle Woche -2 Wochen'}, - 'verbrauch_woche_minus3': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch aktuelle Woche -3 Wochen'}, - 'verbrauch_woche_minus4': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch aktuelle Woche -4 Wochen'}, - 'verbrauch_monat_minus1': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch Vormonat (aktueller Monat -1)'}, - 'verbrauch_monat_minus2': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -2 Monate'}, - 'verbrauch_monat_minus3': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -3 Monate'}, - 'verbrauch_monat_minus4': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -4 Monate'}, - 'verbrauch_monat_minus12': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -12 Monate'}, - 'verbrauch_jahr_minus1': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Verbrauch Vorjahr (aktuelles Jahr -1 Jahr)'}, - 'verbrauch_jahr_minus2': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Verbrauch aktuelles Jahr -2 Jahre'}, - 'verbrauch_rolling_12m_heute_minus1': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Tages'}, - 'verbrauch_rolling_12m_woche_minus1': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende der letzten Woche'}, - 'verbrauch_rolling_12m_monat_minus1': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Monats'}, - 'verbrauch_rolling_12m_jahr_minus1': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Jahres'}, - 'verbrauch_jahreszeitraum_minus1': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch seit dem 1.1. bis zum heutigen Tag des Vorjahres'}, - 'verbrauch_jahreszeitraum_minus2': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 2 Jahren'}, - 'verbrauch_jahreszeitraum_minus3': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 3 Jahren'}, - 'zaehlerstand_heute_minus1': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Tages (heute -1 Tag)'}, - 'zaehlerstand_heute_minus2': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Tages (heute -2 Tag)'}, - 'zaehlerstand_heute_minus3': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorvorletzten Tages (heute -3 Tag)'}, - 'zaehlerstand_woche_minus1': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand / Wert am Ende der vorvorletzten Woche (aktuelle Woche -1 Woche)'}, - 'zaehlerstand_woche_minus2': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand / Wert am Ende der vorletzten Woche (aktuelle Woche -2 Wochen)'}, - 'zaehlerstand_woche_minus3': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand / Wert am Ende der aktuellen Woche -3 Wochen'}, - 'zaehlerstand_monat_minus1': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Monates (aktueller Monat -1 Monat)'}, - 'zaehlerstand_monat_minus2': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Monates (aktueller Monat -2 Monate)'}, - 'zaehlerstand_monat_minus3': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand / Wert am Ende des aktuellen Monats -3 Monate'}, - 'zaehlerstand_jahr_minus1': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Jahres (aktuelles Jahr -1 Jahr)'}, - 'zaehlerstand_jahr_minus2': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Jahres (aktuelles Jahr -2 Jahre)'}, - 'zaehlerstand_jahr_minus3': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Zählerstand / Wert am Ende des aktuellen Jahres -3 Jahre'}, - 'minmax_last_24h_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'minimaler Wert der letzten 24h'}, - 'minmax_last_24h_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'maximaler Wert der letzten 24h'}, - 'minmax_last_24h_avg': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'durchschnittlicher Wert der letzten 24h'}, - 'minmax_last_7d_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'minimaler Wert der letzten 7 Tage'}, - 'minmax_last_7d_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'maximaler Wert der letzten 7 Tage'}, - 'minmax_last_7d_avg': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'durchschnittlicher Wert der letzten 7 Tage'}, - 'minmax_heute_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Minimalwert seit Tagesbeginn'}, - 'minmax_heute_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Maximalwert seit Tagesbeginn'}, - 'minmax_heute_minus1_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert gestern (heute -1 Tag)'}, - 'minmax_heute_minus1_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert gestern (heute -1 Tag)'}, - 'minmax_heute_minus1_avg': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durchschnittswert gestern (heute -1 Tag)'}, - 'minmax_heute_minus2_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert vorgestern (heute -2 Tage)'}, - 'minmax_heute_minus2_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert vorgestern (heute -2 Tage)'}, - 'minmax_heute_minus2_avg': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durchschnittswert vorgestern (heute -2 Tage)'}, - 'minmax_heute_minus3_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert heute vor 3 Tagen'}, - 'minmax_heute_minus3_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert heute vor 3 Tagen'}, - 'minmax_heute_minus3_avg': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durchschnittswert heute vor 3 Tagen'}, - 'minmax_woche_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Minimalwert seit Wochenbeginn'}, - 'minmax_woche_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Maximalwert seit Wochenbeginn'}, - 'minmax_woche_minus1_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Minimalwert Vorwoche (aktuelle Woche -1)'}, - 'minmax_woche_minus1_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Maximalwert Vorwoche (aktuelle Woche -1)'}, - 'minmax_woche_minus1_avg': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Durchschnittswert Vorwoche (aktuelle Woche -1)'}, - 'minmax_woche_minus2_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Minimalwert aktuelle Woche -2 Wochen'}, - 'minmax_woche_minus2_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Maximalwert aktuelle Woche -2 Wochen'}, - 'minmax_woche_minus2_avg': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Durchschnittswert aktuelle Woche -2 Wochen'}, - 'minmax_monat_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Minimalwert seit Monatsbeginn'}, - 'minmax_monat_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Maximalwert seit Monatsbeginn'}, - 'minmax_monat_minus1_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Minimalwert Vormonat (aktueller Monat -1)'}, - 'minmax_monat_minus1_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Maximalwert Vormonat (aktueller Monat -1)'}, - 'minmax_monat_minus1_avg': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Durchschnittswert Vormonat (aktueller Monat -1)'}, - 'minmax_monat_minus2_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Minimalwert aktueller Monat -2 Monate'}, - 'minmax_monat_minus2_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Maximalwert aktueller Monat -2 Monate'}, - 'minmax_monat_minus2_avg': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Durchschnittswert aktueller Monat -2 Monate'}, - 'minmax_jahr_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Minimalwert seit Jahresbeginn'}, - 'minmax_jahr_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Maximalwert seit Jahresbeginn'}, - 'minmax_jahr_minus1_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Minimalwert Vorjahr (aktuelles Jahr -1 Jahr)'}, - 'minmax_jahr_minus1_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Maximalwert Vorjahr (aktuelles Jahr -1 Jahr)'}, - 'minmax_jahr_minus1_avg': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Durchschnittswert Vorjahr (aktuelles Jahr -1 Jahr)'}, - 'tagesmitteltemperatur_heute': {'cat': 'tagesmittel', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Tagesmitteltemperatur heute'}, - 'tagesmitteltemperatur_heute_minus1': {'cat': 'tagesmittel', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des letzten Tages (heute -1 Tag)'}, - 'tagesmitteltemperatur_heute_minus2': {'cat': 'tagesmittel', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des vorletzten Tages (heute -2 Tag)'}, - 'tagesmitteltemperatur_heute_minus3': {'cat': 'tagesmittel', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des vorvorletzten Tages (heute -3 Tag)'}, - 'serie_minmax_monat_min_15m': {'cat': 'serie', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatlicher Minimalwert der letzten 15 Monate (gleitend)'}, - 'serie_minmax_monat_max_15m': {'cat': 'serie', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatlicher Maximalwert der letzten 15 Monate (gleitend)'}, - 'serie_minmax_monat_avg_15m': {'cat': 'serie', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatlicher Mittelwert der letzten 15 Monate (gleitend)'}, - 'serie_minmax_woche_min_30w': {'cat': 'serie', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'wöchentlicher Minimalwert der letzten 30 Wochen (gleitend)'}, - 'serie_minmax_woche_max_30w': {'cat': 'serie', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'wöchentlicher Maximalwert der letzten 30 Wochen (gleitend)'}, - 'serie_minmax_woche_avg_30w': {'cat': 'serie', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'wöchentlicher Mittelwert der letzten 30 Wochen (gleitend)'}, - 'serie_minmax_tag_min_30d': {'cat': 'serie', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'täglicher Minimalwert der letzten 30 Tage (gleitend)'}, - 'serie_minmax_tag_max_30d': {'cat': 'serie', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'täglicher Maximalwert der letzten 30 Tage (gleitend)'}, - 'serie_minmax_tag_avg_30d': {'cat': 'serie', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'täglicher Mittelwert der letzten 30 Tage (gleitend)'}, - 'serie_verbrauch_tag_30d': {'cat': 'serie', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Verbrauch pro Tag der letzten 30 Tage'}, - 'serie_verbrauch_woche_30w': {'cat': 'serie', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch pro Woche der letzten 30 Wochen'}, - 'serie_verbrauch_monat_18m': {'cat': 'serie', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch pro Monat der letzten 18 Monate'}, - 'serie_zaehlerstand_tag_30d': {'cat': 'serie', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Zählerstand am Tagesende der letzten 30 Tage'}, - 'serie_zaehlerstand_woche_30w': {'cat': 'serie', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand am Wochenende der letzten 30 Wochen'}, - 'serie_zaehlerstand_monat_18m': {'cat': 'serie', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand am Monatsende der letzten 18 Monate'}, - 'serie_waermesumme_monat_24m': {'cat': 'serie', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatliche Wärmesumme der letzten 24 Monate'}, - 'serie_kaeltesumme_monat_24m': {'cat': 'serie', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatliche Kältesumme der letzten 24 Monate'}, - 'serie_tagesmittelwert_stunde_0d': {'cat': 'serie', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Stundenmittelwert für den aktuellen Tag'}, - 'serie_tagesmittelwert_tag_stunde_30d': {'cat': 'serie', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Stundenmittelwert pro Tag der letzten 30 Tage (bspw. zur Berechnung der Tagesmitteltemperatur basierend auf den Mittelwert der Temperatur pro Stunde'}, - 'general_oldest_value': {'cat': 'gen', 'item_type': 'num', 'calc': 'no', 'params': False, 'description': 'Ausgabe des ältesten Wertes des entsprechenden "Parent-Items" mit database Attribut'}, - 'general_oldest_log': {'cat': 'gen', 'item_type': 'list', 'calc': 'no', 'params': False, 'description': 'Ausgabe des Timestamp des ältesten Eintrages des entsprechenden "Parent-Items" mit database Attribut'}, - 'kaeltesumme': {'cat': 'complex', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Kältesumme für einen Zeitraum, db_addon_params: (year=mandatory, month=optional)'}, - 'waermesumme': {'cat': 'complex', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Wärmesumme für einen Zeitraum, db_addon_params: (year=mandatory, month=optional)'}, - 'gruenlandtempsumme': {'cat': 'complex', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Grünlandtemperatursumme für einen Zeitraum, db_addon_params: (year=mandatory)'}, - 'tagesmitteltemperatur': {'cat': 'complex', 'item_type': 'list', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Tagesmitteltemperatur auf Basis der stündlichen Durchschnittswerte eines Tages für die angegebene Anzahl von Tagen (timeframe=day, count=integer)'}, - 'wachstumsgradtage': {'cat': 'complex', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Wachstumsgradtage auf Basis der stündlichen Durchschnittswerte eines Tages für das laufende Jahr mit an Angabe des Temperaturschwellenwertes (threshold=Schwellentemperatur)'}, - 'db_request': {'cat': 'complex', 'item_type': 'list', 'calc': 'group', 'params': True, 'description': 'Abfrage der DB: db_addon_params: (func=mandatory, item=mandatory, timespan=mandatory, start=optional, end=optional, count=optional, group=optional, group2=optional)'}, + 'verbrauch_heute': {'cat': 'verbrauch', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch am heutigen Tag (Differenz zwischen aktuellem Wert und den Wert am Ende des vorherigen Tages)'}, + 'verbrauch_woche': {'cat': 'verbrauch', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch in der aktuellen Woche'}, + 'verbrauch_monat': {'cat': 'verbrauch', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch im aktuellen Monat'}, + 'verbrauch_jahr': {'cat': 'verbrauch', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch im aktuellen Jahr'}, + 'verbrauch_heute_minus1': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch gestern (heute -1 Tag) (Differenz zwischen Wert am Ende des gestrigen Tages und dem Wert am Ende des Tages danach)'}, + 'verbrauch_heute_minus2': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch vorgestern (heute -2 Tage)'}, + 'verbrauch_heute_minus3': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -3 Tage'}, + 'verbrauch_heute_minus4': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -4 Tage'}, + 'verbrauch_heute_minus5': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -5 Tage'}, + 'verbrauch_heute_minus6': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -6 Tage'}, + 'verbrauch_heute_minus7': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -7 Tage'}, + 'verbrauch_woche_minus1': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch Vorwoche (aktuelle Woche -1)'}, + 'verbrauch_woche_minus2': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch aktuelle Woche -2 Wochen'}, + 'verbrauch_woche_minus3': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch aktuelle Woche -3 Wochen'}, + 'verbrauch_woche_minus4': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch aktuelle Woche -4 Wochen'}, + 'verbrauch_monat_minus1': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch Vormonat (aktueller Monat -1)'}, + 'verbrauch_monat_minus2': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -2 Monate'}, + 'verbrauch_monat_minus3': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -3 Monate'}, + 'verbrauch_monat_minus4': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -4 Monate'}, + 'verbrauch_monat_minus12': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -12 Monate'}, + 'verbrauch_jahr_minus1': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Verbrauch Vorjahr (aktuelles Jahr -1 Jahr)'}, + 'verbrauch_jahr_minus2': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Verbrauch aktuelles Jahr -2 Jahre'}, + 'verbrauch_rolling_12m_heute_minus1': {'cat': 'verbrauch', 'sub_cat': 'rolling', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Tages'}, + 'verbrauch_rolling_12m_woche_minus1': {'cat': 'verbrauch', 'sub_cat': 'rolling', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende der letzten Woche'}, + 'verbrauch_rolling_12m_monat_minus1': {'cat': 'verbrauch', 'sub_cat': 'rolling', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Monats'}, + 'verbrauch_rolling_12m_jahr_minus1': {'cat': 'verbrauch', 'sub_cat': 'rolling', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Jahres'}, + 'verbrauch_jahreszeitraum_minus1': {'cat': 'verbrauch', 'sub_cat': 'jahrzeit', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch seit dem 1.1. bis zum heutigen Tag des Vorjahres'}, + 'verbrauch_jahreszeitraum_minus2': {'cat': 'verbrauch', 'sub_cat': 'jahrzeit', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 2 Jahren'}, + 'verbrauch_jahreszeitraum_minus3': {'cat': 'verbrauch', 'sub_cat': 'jahrzeit', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 3 Jahren'}, + 'zaehlerstand_heute_minus1': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Tages (heute -1 Tag)'}, + 'zaehlerstand_heute_minus2': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Tages (heute -2 Tag)'}, + 'zaehlerstand_heute_minus3': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorvorletzten Tages (heute -3 Tag)'}, + 'zaehlerstand_woche_minus1': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand / Wert am Ende der vorvorletzten Woche (aktuelle Woche -1 Woche)'}, + 'zaehlerstand_woche_minus2': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand / Wert am Ende der vorletzten Woche (aktuelle Woche -2 Wochen)'}, + 'zaehlerstand_woche_minus3': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand / Wert am Ende der aktuellen Woche -3 Wochen'}, + 'zaehlerstand_monat_minus1': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Monates (aktueller Monat -1 Monat)'}, + 'zaehlerstand_monat_minus2': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Monates (aktueller Monat -2 Monate)'}, + 'zaehlerstand_monat_minus3': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand / Wert am Ende des aktuellen Monats -3 Monate'}, + 'zaehlerstand_jahr_minus1': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Jahres (aktuelles Jahr -1 Jahr)'}, + 'zaehlerstand_jahr_minus2': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Jahres (aktuelles Jahr -2 Jahre)'}, + 'zaehlerstand_jahr_minus3': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Zählerstand / Wert am Ende des aktuellen Jahres -3 Jahre'}, + 'minmax_last_24h_min': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'minimaler Wert der letzten 24h'}, + 'minmax_last_24h_max': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'maximaler Wert der letzten 24h'}, + 'minmax_last_24h_avg': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'durchschnittlicher Wert der letzten 24h'}, + 'minmax_last_7d_min': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'minimaler Wert der letzten 7 Tage'}, + 'minmax_last_7d_max': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'maximaler Wert der letzten 7 Tage'}, + 'minmax_last_7d_avg': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'durchschnittlicher Wert der letzten 7 Tage'}, + 'minmax_heute_min': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Minimalwert seit Tagesbeginn'}, + 'minmax_heute_max': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Maximalwert seit Tagesbeginn'}, + 'minmax_heute_minus1_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert gestern (heute -1 Tag)'}, + 'minmax_heute_minus1_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert gestern (heute -1 Tag)'}, + 'minmax_heute_minus1_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durchschnittswert gestern (heute -1 Tag)'}, + 'minmax_heute_minus2_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert vorgestern (heute -2 Tage)'}, + 'minmax_heute_minus2_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert vorgestern (heute -2 Tage)'}, + 'minmax_heute_minus2_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durchschnittswert vorgestern (heute -2 Tage)'}, + 'minmax_heute_minus3_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert heute vor 3 Tagen'}, + 'minmax_heute_minus3_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert heute vor 3 Tagen'}, + 'minmax_heute_minus3_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durchschnittswert heute vor 3 Tagen'}, + 'minmax_woche_min': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Minimalwert seit Wochenbeginn'}, + 'minmax_woche_max': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Maximalwert seit Wochenbeginn'}, + 'minmax_woche_minus1_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Minimalwert Vorwoche (aktuelle Woche -1)'}, + 'minmax_woche_minus1_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Maximalwert Vorwoche (aktuelle Woche -1)'}, + 'minmax_woche_minus1_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Durchschnittswert Vorwoche (aktuelle Woche -1)'}, + 'minmax_woche_minus2_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Minimalwert aktuelle Woche -2 Wochen'}, + 'minmax_woche_minus2_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Maximalwert aktuelle Woche -2 Wochen'}, + 'minmax_woche_minus2_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Durchschnittswert aktuelle Woche -2 Wochen'}, + 'minmax_monat_min': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Minimalwert seit Monatsbeginn'}, + 'minmax_monat_max': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Maximalwert seit Monatsbeginn'}, + 'minmax_monat_minus1_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Minimalwert Vormonat (aktueller Monat -1)'}, + 'minmax_monat_minus1_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Maximalwert Vormonat (aktueller Monat -1)'}, + 'minmax_monat_minus1_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Durchschnittswert Vormonat (aktueller Monat -1)'}, + 'minmax_monat_minus2_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Minimalwert aktueller Monat -2 Monate'}, + 'minmax_monat_minus2_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Maximalwert aktueller Monat -2 Monate'}, + 'minmax_monat_minus2_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Durchschnittswert aktueller Monat -2 Monate'}, + 'minmax_jahr_min': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Minimalwert seit Jahresbeginn'}, + 'minmax_jahr_max': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Maximalwert seit Jahresbeginn'}, + 'minmax_jahr_minus1_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Minimalwert Vorjahr (aktuelles Jahr -1 Jahr)'}, + 'minmax_jahr_minus1_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Maximalwert Vorjahr (aktuelles Jahr -1 Jahr)'}, + 'minmax_jahr_minus1_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Durchschnittswert Vorjahr (aktuelles Jahr -1 Jahr)'}, + 'tagesmitteltemperatur_heute': {'cat': 'tagesmittel', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Tagesmitteltemperatur heute'}, + 'tagesmitteltemperatur_heute_minus1': {'cat': 'tagesmittel', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des letzten Tages (heute -1 Tag)'}, + 'tagesmitteltemperatur_heute_minus2': {'cat': 'tagesmittel', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des vorletzten Tages (heute -2 Tag)'}, + 'tagesmitteltemperatur_heute_minus3': {'cat': 'tagesmittel', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des vorvorletzten Tages (heute -3 Tag)'}, + 'serie_minmax_monat_min_15m': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatlicher Minimalwert der letzten 15 Monate (gleitend)'}, + 'serie_minmax_monat_max_15m': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatlicher Maximalwert der letzten 15 Monate (gleitend)'}, + 'serie_minmax_monat_avg_15m': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatlicher Mittelwert der letzten 15 Monate (gleitend)'}, + 'serie_minmax_woche_min_30w': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'wöchentlicher Minimalwert der letzten 30 Wochen (gleitend)'}, + 'serie_minmax_woche_max_30w': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'wöchentlicher Maximalwert der letzten 30 Wochen (gleitend)'}, + 'serie_minmax_woche_avg_30w': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'wöchentlicher Mittelwert der letzten 30 Wochen (gleitend)'}, + 'serie_minmax_tag_min_30d': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'täglicher Minimalwert der letzten 30 Tage (gleitend)'}, + 'serie_minmax_tag_max_30d': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'täglicher Maximalwert der letzten 30 Tage (gleitend)'}, + 'serie_minmax_tag_avg_30d': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'täglicher Mittelwert der letzten 30 Tage (gleitend)'}, + 'serie_verbrauch_tag_30d': {'cat': 'serie', 'sub_cat': 'verbrauch', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Verbrauch pro Tag der letzten 30 Tage'}, + 'serie_verbrauch_woche_30w': {'cat': 'serie', 'sub_cat': 'verbrauch', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch pro Woche der letzten 30 Wochen'}, + 'serie_verbrauch_monat_18m': {'cat': 'serie', 'sub_cat': 'verbrauch', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch pro Monat der letzten 18 Monate'}, + 'serie_zaehlerstand_tag_30d': {'cat': 'serie', 'sub_cat': 'zaehler', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Zählerstand am Tagesende der letzten 30 Tage'}, + 'serie_zaehlerstand_woche_30w': {'cat': 'serie', 'sub_cat': 'zaehler', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand am Wochenende der letzten 30 Wochen'}, + 'serie_zaehlerstand_monat_18m': {'cat': 'serie', 'sub_cat': 'zaehler', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand am Monatsende der letzten 18 Monate'}, + 'serie_waermesumme_monat_24m': {'cat': 'serie', 'sub_cat': 'summe', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatliche Wärmesumme der letzten 24 Monate'}, + 'serie_kaeltesumme_monat_24m': {'cat': 'serie', 'sub_cat': 'summe', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatliche Kältesumme der letzten 24 Monate'}, + 'serie_tagesmittelwert_0d': {'cat': 'serie', 'sub_cat': 'mittel_d', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Tagesmittelwert für den aktuellen Tag'}, + 'serie_tagesmittelwert_stunde_0d': {'cat': 'serie', 'sub_cat': 'mittel_h', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Stundenmittelwert für den aktuellen Tag'}, + 'serie_tagesmittelwert_stunde_30_0d': {'cat': 'serie', 'sub_cat': 'mittel_h1', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Stundenmittelwert für den aktuellen Tag'}, + 'serie_tagesmittelwert_tag_stunde_30d': {'cat': 'serie', 'sub_cat': 'mittel_d_h', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Stundenmittelwert pro Tag der letzten 30 Tage (bspw. zur Berechnung der Tagesmitteltemperatur basierend auf den Mittelwert der Temperatur pro Stunde'}, + 'general_oldest_value': {'cat': 'gen', 'sub_cat': None, 'item_type': 'num', 'calc': 'no', 'params': False, 'description': 'Ausgabe des ältesten Wertes des entsprechenden "Parent-Items" mit database Attribut'}, + 'general_oldest_log': {'cat': 'gen', 'sub_cat': None, 'item_type': 'list', 'calc': 'no', 'params': False, 'description': 'Ausgabe des Timestamp des ältesten Eintrages des entsprechenden "Parent-Items" mit database Attribut'}, + 'kaeltesumme': {'cat': 'complex', 'sub_cat': 'summe', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Kältesumme für einen Zeitraum, db_addon_params: (year=mandatory, month=optional)'}, + 'waermesumme': {'cat': 'complex', 'sub_cat': 'summe', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Wärmesumme für einen Zeitraum, db_addon_params: (year=mandatory, month=optional)'}, + 'gruenlandtempsumme': {'cat': 'complex', 'sub_cat': None, 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Grünlandtemperatursumme für einen Zeitraum, db_addon_params: (year=mandatory)'}, + 'tagesmitteltemperatur': {'cat': 'complex', 'sub_cat': None, 'item_type': 'list', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Tagesmitteltemperatur auf Basis der stündlichen Durchschnittswerte eines Tages für die angegebene Anzahl von Tagen (timeframe=day, count=integer)'}, + 'wachstumsgradtage': {'cat': 'complex', 'sub_cat': None, 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Wachstumsgradtage auf Basis der stündlichen Durchschnittswerte eines Tages für das laufende Jahr mit an Angabe des Temperaturschwellenwertes (threshold=Schwellentemperatur)'}, + 'db_request': {'cat': 'complex', 'sub_cat': None, 'item_type': 'list', 'calc': 'group', 'params': True, 'description': 'Abfrage der DB: db_addon_params: (func=mandatory, item=mandatory, timespan=mandatory, start=optional, end=optional, count=optional, group=optional, group2=optional)'}, + 'minmax': {'cat': 'complex', 'sub_cat': None, 'item_type': 'num', 'calc': 'timeframe', 'params': True, 'description': 'Berechnet einen min/max/avg Wert für einen bestimmen Zeitraum: db_addon_params: (func=mandatory, timeframe=mandatory, start=mandatory)'}, + 'minmax_last': {'cat': 'complex', 'sub_cat': None, 'item_type': 'num', 'calc': 'timeframe', 'params': True, 'description': 'Berechnet einen min/max/avg Wert für ein bestimmtes Zeitfenster von jetzt zurück: db_addon_params: (func=mandatory, timeframe=mandatory, start=mandatory, end=mandatory)'}, + 'verbrauch': {'cat': 'complex', 'sub_cat': None, 'item_type': 'num', 'calc': 'timeframe', 'params': True, 'description': 'Berechnet einen Verbrauchswert für einen bestimmen Zeitraum: db_addon_params: (timeframe=mandatory, start=mandatory end=mandatory)'}, + 'zaehlerstand': {'cat': 'complex', 'sub_cat': None, 'item_type': 'num', 'calc': 'timeframe', 'params': True, 'description': 'Berechnet einen Zählerstand für einen bestimmen Zeitpunkt: db_addon_params: (timeframe=mandatory, start=mandatory)'}, }, 'db_addon_info': { 'db_version': {'cat': 'info', 'item_type': 'str', 'calc': 'no', 'params': False, 'description': 'Version der verbundenen Datenbank'}, @@ -198,10 +204,28 @@ def export_item_attributes_py(): ATTRS['ALL_YEARLY_ATTRIBUTES'] = get_attrs(sub_dict={'calc': 'yearly'}) ATTRS['ALL_NEED_PARAMS_ATTRIBUTES'] = get_attrs(sub_dict={'params': True}) ATTRS['ALL_VERBRAUCH_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'verbrauch'}) + ATTRS['VERBRAUCH_ATTRIBUTES_ONCHANGE'] = get_attrs(sub_dict={'cat': 'verbrauch', 'sub_cat': 'onchange'}) + ATTRS['VERBRAUCH_ATTRIBUTES_TIMEFRAME'] = get_attrs(sub_dict={'cat': 'verbrauch', 'sub_cat': 'timeframe'}) + ATTRS['VERBRAUCH_ATTRIBUTES_ROLLING'] = get_attrs(sub_dict={'cat': 'verbrauch', 'sub_cat': 'rolling'}) + ATTRS['VERBRAUCH_ATTRIBUTES_JAHRESZEITRAUM'] = get_attrs(sub_dict={'cat': 'verbrauch', 'sub_cat': 'jahrzeit'}) ATTRS['ALL_ZAEHLERSTAND_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'zaehler'}) + ATTRS['ZAEHLERSTAND_ATTRIBUTES_TIMEFRAME'] = get_attrs(sub_dict={'cat': 'zaehler', 'sub_cat': 'timeframe'}) ATTRS['ALL_HISTORIE_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'wertehistorie'}) + ATTRS['HISTORIE_ATTRIBUTES_ONCHANGE'] = get_attrs(sub_dict={'cat': 'wertehistorie', 'sub_cat': 'onchange'}) + ATTRS['HISTORIE_ATTRIBUTES_LAST'] = get_attrs(sub_dict={'cat': 'wertehistorie', 'sub_cat': 'last'}) + ATTRS['HISTORIE_ATTRIBUTES_TIMEFRAME'] = get_attrs(sub_dict={'cat': 'wertehistorie', 'sub_cat': 'timeframe'}) ATTRS['ALL_TAGESMITTEL_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'tagesmittel'}) + ATTRS['TAGESMITTEL_ATTRIBUTES_ONCHANGE'] = get_attrs(sub_dict={'cat': 'tagesmittel', 'sub_cat': 'onchange'}) + ATTRS['TAGESMITTEL_ATTRIBUTES_TIMEFRAME'] = get_attrs(sub_dict={'cat': 'tagesmittel', 'sub_cat': 'timeframe'}) ATTRS['ALL_SERIE_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'serie'}) + ATTRS['SERIE_ATTRIBUTES_MINMAX'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'minmax'}) + ATTRS['SERIE_ATTRIBUTES_ZAEHLERSTAND'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'zaehler'}) + ATTRS['SERIE_ATTRIBUTES_VERBRAUCH'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'verbrauch'}) + ATTRS['SERIE_ATTRIBUTES_SUMME'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'summe'}) + ATTRS['SERIE_ATTRIBUTES_MITTEL_D'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'mittel_d'}) + ATTRS['SERIE_ATTRIBUTES_MITTEL_H'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'mittel_h'}) + ATTRS['SERIE_ATTRIBUTES_MITTEL_H1'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'mittel_h1'}) + ATTRS['SERIE_ATTRIBUTES_MITTEL_D_H'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'mittel_d_h'}) ATTRS['ALL_GEN_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'gen'}) ATTRS['ALL_COMPLEX_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'complex'}) @@ -213,7 +237,7 @@ def export_item_attributes_py(): # write avm_data_types for attr, alist in ATTRS.items(): with open(FILENAME_ATTRIBUTES, "a") as f: - print (f'{attr} = {alist!r}', file=f) + print(f'{attr} = {alist!r}', file=f) print('item_attributes.py successfully created!') @@ -271,4 +295,4 @@ def update_plugin_yaml_item_attributes(): if __name__ == '__main__': export_item_attributes_py() for attribute in ITEM_ATTRIBUTES: - update_plugin_yaml_item_attributes() \ No newline at end of file + update_plugin_yaml_item_attributes() diff --git a/db_addon/plugin.yaml b/db_addon/plugin.yaml index 5c3dc1c71..662e2aab2 100644 --- a/db_addon/plugin.yaml +++ b/db_addon/plugin.yaml @@ -11,7 +11,7 @@ plugin: # keywords: iot xyz # documentation: https://github.com/smarthomeNG/smarthome/wiki/CLI-Plugin # url of documentation (wiki) page support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1848494-support-thread-databaseaddon-plugin - version: 1.1.3 # Plugin version (must match the version specified in __init__.py) + version: 1.1.4 # Plugin version (must match the version specified in __init__.py) sh_minversion: 1.9.3.5 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) py_minversion: 3.8 # minimum Python version to use for this plugin @@ -170,7 +170,9 @@ item_attributes: - serie_zaehlerstand_monat_18m - serie_waermesumme_monat_24m - serie_kaeltesumme_monat_24m + - serie_tagesmittelwert_0d - serie_tagesmittelwert_stunde_0d + - serie_tagesmittelwert_stunde_30_0d - serie_tagesmittelwert_tag_stunde_30d - general_oldest_value - general_oldest_log @@ -180,6 +182,10 @@ item_attributes: - tagesmitteltemperatur - wachstumsgradtage - db_request + - minmax + - minmax_last + - verbrauch + - zaehlerstand valid_list_description: # NOTE: valid_list_description is automatically created by using item_attributes_master.py - Verbrauch am heutigen Tag (Differenz zwischen aktuellem Wert und den Wert am Ende des vorherigen Tages) @@ -282,6 +288,8 @@ item_attributes: - Zählerstand am Monatsende der letzten 18 Monate - monatliche Wärmesumme der letzten 24 Monate - monatliche Kältesumme der letzten 24 Monate + - Tagesmittelwert für den aktuellen Tag + - Stundenmittelwert für den aktuellen Tag - Stundenmittelwert für den aktuellen Tag - Stundenmittelwert pro Tag der letzten 30 Tage (bspw. zur Berechnung der Tagesmitteltemperatur basierend auf den Mittelwert der Temperatur pro Stunde - Ausgabe des ältesten Wertes des entsprechenden "Parent-Items" mit database Attribut @@ -292,6 +300,10 @@ item_attributes: - Berechnet die Tagesmitteltemperatur auf Basis der stündlichen Durchschnittswerte eines Tages für die angegebene Anzahl von Tagen (timeframe=day, count=integer) - Berechnet die Wachstumsgradtage auf Basis der stündlichen Durchschnittswerte eines Tages für das laufende Jahr mit an Angabe des Temperaturschwellenwertes (threshold=Schwellentemperatur) - 'Abfrage der DB: db_addon_params: (func=mandatory, item=mandatory, timespan=mandatory, start=optional, end=optional, count=optional, group=optional, group2=optional)' + - 'Berechnet einen min/max/avg Wert für einen bestimmen Zeitraum: db_addon_params: (func=mandatory, timeframe=mandatory, start=mandatory)' + - 'Berechnet einen min/max/avg Wert für ein bestimmtes Zeitfenster von jetzt zurück: db_addon_params: (func=mandatory, timeframe=mandatory, start=mandatory, end=mandatory)' + - 'Berechnet einen Verbrauchswert für einen bestimmen Zeitraum: db_addon_params: (timeframe=mandatory, start=mandatory end=mandatory)' + - 'Berechnet einen Zählerstand für einen bestimmen Zeitpunkt: db_addon_params: (timeframe=mandatory, start=mandatory)' valid_list_item_type: # NOTE: valid_list_item_type is automatically created by using item_attributes_master.py - num @@ -396,6 +408,8 @@ item_attributes: - list - list - list + - list + - list - num - list - num @@ -404,6 +418,10 @@ item_attributes: - list - num - list + - num + - num + - num + - num valid_list_calculation: # NOTE: valid_list_calculation is automatically created by using item_attributes_master.py - onchange @@ -508,6 +526,8 @@ item_attributes: - monthly - daily - daily + - daily + - daily - no - no - daily @@ -516,6 +536,10 @@ item_attributes: - daily - daily - group + - timeframe + - timeframe + - timeframe + - timeframe db_addon_info: type: str description: From 14be08c6afa92106879571bb3af3aff035e9932f Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Sat, 24 Jun 2023 08:34:10 +0200 Subject: [PATCH 141/775] AVM Plugin: Improve Fritz_Home - bump to version 2.0.7 - improve login; if sid has expired a new one will be generated automatically - improve get requests - deactivate scheduler for check_sid; Plugin will re-login of sid has expired - improve check if request response is string prior to parse element from string - update handle_updated_item in FritzHome to handle all avm_data_types - implement new method to update all items connected to ain - restructure, simplify, harmonize coding in FritzHome - bugfix in debug_log in cyclic_item_update --- avm/__init__.py | 905 ++++++++++++++++++++++++++---------------------- avm/plugin.yaml | 2 +- 2 files changed, 491 insertions(+), 416 deletions(-) mode change 100755 => 100644 avm/__init__.py mode change 100755 => 100644 avm/plugin.yaml diff --git a/avm/__init__.py b/avm/__init__.py old mode 100755 new mode 100644 index 17bd74d6f..bcbaf2cdc --- a/avm/__init__.py +++ b/avm/__init__.py @@ -32,7 +32,6 @@ from abc import ABC from enum import IntFlag -from json.decoder import JSONDecodeError from typing import Dict from typing import Union from xml.etree import ElementTree @@ -46,6 +45,7 @@ ERROR_COUNT_TO_BE_BLACKLISTED = 2 + def NoAttributeError(func): @functools.wraps(func) def wrapper(*args, **kwargs): @@ -80,6 +80,28 @@ def to_int(arg) -> int: return 0 +def to_float(arg) -> float: + try: + return float(arg) + except (ValueError, TypeError): + return 0 + + +def to_int_to_bool(arg) -> bool: + arg = to_int(arg) + try: + return bool(arg) + except (ValueError, TypeError): + return False + + +def clamp(n, min_n, max_n): + try: + return max(min(max_n, n), min_n) + except (ValueError, TypeError): + return 0 + + def walk_nodes(root, nodes: list): data = root for atype, arg in nodes: @@ -102,13 +124,13 @@ class AVM(SmartPlugin): """ Main class of the Plugin. Does all plugin specific stuff """ - PLUGIN_VERSION = '2.0.6' + PLUGIN_VERSION = '2.0.7' # ToDo: FritzHome.handle_updated_item: implement 'saturation' # ToDo: FritzHome.handle_updated_item: implement 'unmapped_hue' # ToDo: FritzHome.handle_updated_item: implement 'unmapped_saturation' # ToDo: FritzHome.handle_updated_item: implement 'hsv' - # ToDo: FritzHome.handle_updated_item: implement 'hs' + # ToDo: FritzHome.handle_updated_item: implement 'color' def __init__(self, sh): """Initializes the plugin.""" @@ -208,8 +230,6 @@ def create_cyclic_scheduler(target: str, items: list, fct, offset: int) -> bool: # add scheduler for updating items create_cyclic_scheduler(target='aha', items=self.get_aha_items(), fct=self.fritz_home.cyclic_item_update, offset=4) self.fritz_home.cyclic_item_update(read_all=True) - # add scheduler for checking validity of session id - self.scheduler_add('check_sid', self.fritz_home.check_sid, prio=5, cycle=900, offset=30) if self.monitoring_service: self.monitoring_service.set_callmonitor_item_values_initially() @@ -232,7 +252,6 @@ def stop(self): if self.fritz_home: self.scheduler_remove('poll_aha') - self.scheduler_remove('check_sid') self.fritz_home.logout() self.fritz_home = None @@ -801,7 +820,7 @@ def handle_updated_item(self, item, avm_data_type: str, readafterwrite: int): cmd, args, wlan_index = _dispatcher[avm_data_type] self._set_fritz_device(cmd, args, wlan_index) if self.debug_log: - self.logger.debug(f"Setting AVM Device with successful.") + self.logger.debug(f"Setting command successfully sent.") # handle readafterwrite if readafterwrite: @@ -1824,8 +1843,8 @@ def get_hosts_list(self, identifier_list: list = None, filter_dict: dict = None) # process identifier list identifier_list_checked = [] for identifier in identifier_list: - if identifier.lower() in identifiers: - identifier_list_checked.append(identifier.lower()) + if identifier.lower() in identifiers: + identifier_list_checked.append(identifier.lower()) if not identifier_list_checked: return hosts @@ -1887,6 +1906,14 @@ class FritzHome: HOMEAUTO_ROUTE = '/webservices/homeautoswitch.lua' INTERNET_STATUS_ROUTE = '/internet/inetstat_monitor.lua?sid=' + # Definition of valid value ranges + HUE_RANGE = {'min': 0, 'max': 359} + SATURATION_RANGE = {'min': 0, 'max': 255} + LEVEL_RANGE = {'min': 0, 'max': 255} + LEVEL_PERCENTAGE_RANGE = {'min': 0, 'max': 100} + COLOR_TEMP_RANGE = {'min': 2700, 'max': 6500} + HKR_TEMP_RANGE = {'min': 8, 'max': 28, 'discrete': {0: 253, 100: 254}} + def __init__(self, host, ssl, verify, user, password, log_entry_count, plugin_instance): """ Init the Class FritzHome @@ -1895,9 +1922,8 @@ def __init__(self, host, ssl, verify, user, password, log_entry_count, plugin_in self.logger = self._plugin_instance.logger self.debug_log = self._plugin_instance.debug_log self.logger.debug("Init Fritzhome") - - self.host = host self.ssl = ssl + self.prefixed_host = self._get_prefixed_host(host) self.verify = verify self.user = user self.password = password @@ -1905,33 +1931,29 @@ def __init__(self, host, ssl, verify, user, password, log_entry_count, plugin_in self._sid = None self._devices: Dict[str, FritzHome.FritzhomeDevice] = {} self._templates: Dict[str, FritzHome.FritzhomeTemplate] = {} - self._logged_in = False self._session = requests.Session() self._timeout = 10 - self.connected = False self.last_request = None self.log_entry_count = log_entry_count - # Login + # Login to test, if login is possible and get first sid self.login() - if not self._logged_in: - raise IOError("Error 'Login failed'") + + # item-related methods def cyclic_item_update(self, read_all: bool = False): - """ - Update smarthome item values using information from dict '_aha_devices' - """ - if not self._logged_in: - self.logger.warning("No connection to FritzDevice via AHA-HTTP-Interface. No update of item values possible.") - return + """Update aha item values using information""" + + start_time = int(time.time()) # first update aha device data if not self.update_devices(): self.logger.warning("Update of AHA-Devices not successful. No update of item values possible.") return - # get current_time - current_time = int(time.time()) + update_time = int(time.time()) + self.logger.debug(f"Update of AHA-Device data took {update_time - start_time}s") + item_count = 0 # iterate over items and get data for item in self.item_list(): @@ -1948,7 +1970,7 @@ def cyclic_item_update(self, read_all: bool = False): continue # check if item is already due - if next_time > current_time: + if next_time > update_time: # self.logger.debug(f"Item={item.path()} is not due, yet.") continue @@ -1965,6 +1987,7 @@ def cyclic_item_update(self, read_all: bool = False): avm_data_type = avm_data_type[len('set_'):] # get value + item_count += 1 value = getattr(self.get_devices_as_dict().get(ain), avm_data_type, None) if value is None: self.logger.debug(f'Value for attribute={avm_data_type} at device with AIN={ain} to set Item={item.path()} is not available/None.') @@ -1974,55 +1997,99 @@ def cyclic_item_update(self, read_all: bool = False): item(value, self._plugin_instance.get_fullname()) # set next due date - item_config['next_update'] = current_time + cycle + item_config['next_update'] = update_time + cycle + + self.logger.debug(f"Update of {item_count} AHA-Items took {int(time.time()) - update_time}s") + + def update_items_of_ain(self, ain): + """Update all items connected to an ain""" + + # get relevant items + items = self._plugin_instance.get_item_list(filter_key='index', filter_value=ain) + + # update aha device data + if not self.update_devices(): + self.logger.warning("Update of AHA-Devices not successful. No update of item values possible.") + return + + # iterate over items and get data + for item in items: + # get item config + item_config = self._plugin_instance.get_item_config(item) + avm_data_type = item_config['avm_data_type'] + + self.logger.debug(f"Item={item.path()} with avm_data_type={avm_data_type} and ain={ain} will be updated") + + # Attributes that are write-only commands with no corresponding read commands are excluded from status updates via update black list: + if avm_data_type in ALL_ATTRIBUTES_WRITEONLY: + self.logger.info(f"avm_data_type '{avm_data_type}' is in update blacklist. Item will not be updated") + continue + + # Remove "set_" prefix to set corresponding r/o or r/w item to returned value: + if avm_data_type.startswith('set_'): + avm_data_type = avm_data_type[len('set_'):] + + # get value + value = getattr(self.get_devices_as_dict().get(ain), avm_data_type, None) + if value is None: + self.logger.debug(f'Value for attribute={avm_data_type} at device with AIN={ain} to set Item={item.path()} is not available/None.') + continue + + # set item + item(value, self._plugin_instance.get_fullname()) def handle_updated_item(self, item, avm_data_type: str, readafterwrite: int): """ Updated Item will be processed and value communicated to AVM Device """ - # define set method per avm_data_type - _dispatcher = {'window_open': (self.set_window_open, self.get_window_open), - 'target_temperature': (self.set_target_temperature, self.get_target_temperature), - 'hkr_boost': (self.set_boost, self.get_boost), - 'simpleonoff': (self.set_state, self.get_state), - 'level': (self.set_level, self.get_level), - 'levelpercentage': (self.set_level_percentage, self.get_level_percentage), - 'switch_state': (self.set_switch_state, self.get_switch_state), - 'switch_toggle': (self.set_switch_state_toggle, self.get_switch_state), - 'colortemperature': (self.set_color_temp, self.get_color_temp), - 'hue': (self.set_color_discrete, self.get_hue), - 'unmapped_saturation':(self.set_unmapped_saturation, self.get_unmapped_saturation), - 'unmapped_hue': (self.set_unmapped_hue, self.get_unmapped_hue) - } - # get AIN - _ain = self._plugin_instance.get_item_config(item)['index'] + # define set method per avm_data_type // all avm_data_types of AHA_WO_ATTRIBUTES + AHA_RW_ATTRIBUTES must be defined here + _dispatcher = {'window_open': (self.set_window_open, {'seconds': item()}, self.get_window_open), + 'target_temperature': (self.set_target_temperature, {'temperature': item()}, self.get_target_temperature), + 'hkr_boost': (self.set_boost, {'seconds': item()}, self.get_boost), + 'simpleonoff': (self.set_state, {'state': item()}, self.get_state), + 'level': (self.set_level, {'level': item()}, self.get_level), + 'levelpercentage': (self.set_level_percentage, {'level': item()}, self.get_level_percentage), + 'switch_state': (self.set_switch_state, {'state': item()}, self.get_switch_state), + 'switch_toggle': (self.set_switch_state_toggle, {}, self.get_switch_state), + 'colortemperature': (self.set_color_temp, {'temperature': item(), 'duration': 1}, self.get_color_temp), + 'hue': (self.set_hue, {'hue': int(), 'duration': 1}, self.get_hue), + 'saturation': (self.set_saturation, {'hue': int(), 'duration': 1}, self.get_saturation), + 'unmapped_hue': (self.set_unmapped_hue, {'hue': item()}, self.get_unmapped_hue), + 'unmapped_saturation': (self.set_unmapped_saturation, {'saturation': item()}, self.get_unmapped_saturation), + 'color': (self.set_color, {'hs': item(), 'duration': 1, 'mapped': False}, self.get_color), + } - # adapt avm_data_type by removing 'set_' + # Remove "set_" prefix of AHA_WO_ATTRIBUTES Items: if avm_data_type.startswith('set_'): - avm_data_type = avm_data_type[4:] - - # logs message for upcoming/limited functionality - if avm_data_type == 'hue' or avm_data_type == 'saturation': - # Full RGB hue will be supported by Fritzbox approximately from Q2 2022 on: - # Currently, only use default RGB colors that are supported by default (getcolordefaults) - # These default colors have given saturation values. - self.logger.info("Full RGB hue will be supported by Fritzbox approximately from Q2 2022. Limited functionality.") + avm_data_type = avm_data_type[len('set_'):] - # Call set method per avm_data_type + setter, setter_params, getter = _dispatcher[avm_data_type] to_be_set_value = item() + + # get AIN + ain = self._plugin_instance.get_item_config(item)['index'] + + # add ain to setter_params + setter_params.update({'ain': ain}) + try: - _dispatcher[avm_data_type][0](_ain, to_be_set_value) + result = setter(**setter_params) except KeyError: self.logger.error(f"{avm_data_type} is not defined to be updated.") + result = False + + self.logger.debug(f"handle_updated_item: result={result}") + + # Call update of all items connected to that ain + self.update_items_of_ain(ain) # handle readafterwrite if readafterwrite: wait = float(readafterwrite) time.sleep(wait) try: - set_value = _dispatcher[avm_data_type][1](_ain) - # only handle avm_data_type not present in _dispatcher + set_value = getter(ain) except KeyError: self.logger.error(f"{avm_data_type} is not defined to be read.") else: @@ -2033,236 +2100,224 @@ def handle_updated_item(self, item, avm_data_type: str, readafterwrite: int): if self.debug_log: self.logger.debug(f"Setting AVM Device defined in Item={item.path()} with avm_data_type={avm_data_type} to value={to_be_set_value} successful!") - def get_value_by_ain_and_avm_data_type(self, ain, avm_data_type): - """ - get value for given ain and avm_data_type - """ - - # get device sub-dict from dict - device = self.get_device_by_ain(ain) - # device = self._devices.get(ain, None) - - if device is None: - self.logger.warning(f'No values for device with AIN={ain} available.') - return - - # return value - return getattr(device, avm_data_type, None) - def item_list(self): return self._plugin_instance.get_aha_items() - def _request(self, url: str, params=None, result: str = 'text'): + # request-related methods + + def _request(self, url: str, params: dict = None): """ - Send a request with parameters. + Send a get request with parameters and return response as tuple with (content_type, response) :param url: URL to be requested :param params: params for request - :param result: type of result - :return: request response + :return: tuple with content_type, response (as text or json depending on content_type) """ - try: - rsp = self._session.get(url, params=params, timeout=self._timeout, verify=self.verify) - except requests.exceptions.Timeout: - if self._timeout < 31: - self._timeout += 5 - self.logger.info(f"request timed out. timeout extended by 5s to {self._timeout}") - else: - self.logger.debug(f"get request timeout.") - return - except Exception as e: - self.logger.error(f"Error during GET request {e} occurred.") - else: - status_code = rsp.status_code - if status_code == 200: - if self.debug_log: - self.logger.debug("Sending HTTP request successful") - if result == 'json': - try: - data = rsp.json() - except JSONDecodeError: - self.logger.error('Error occurred during parsing request response to json') - else: - return data - else: - return rsp.text.strip() - elif status_code == 403: - if self.debug_log: - self.logger.debug("HTTP access denied. Try to get new Session ID.") - else: - self.logger.error(f"HTTP request error code: {status_code}") - rsp.raise_for_status() - if self.debug_log: - self.logger.debug(f"Url: {url}") - self.logger.debug(f"Params: {params}") - def _login_request(self, username=None, challenge_response=None): - """ - Send a login request with parameters. - """ - url = self._get_prefixed_host() + self.LOGIN_ROUTE - params = {} - if username: - params["username"] = username - if challenge_response: - params["response"] = challenge_response - plain = self._request(url, params) - dom = ElementTree.fromstring(to_str(plain)) - sid = dom.findtext("SID") - challenge = dom.findtext("Challenge") - blocktime = to_int(dom.findtext("BlockTime")) + def get_sid(): + """ + Generator to provide the sid two times in case the first try failed. + This can happen on an invalide or expired sid. In this case the sid gets regenerated for the second try. + """ + yield self._sid + self.login() + yield self._sid - return sid, challenge, blocktime + for sid in get_sid(): + params['sid'] = sid - def _logout_request(self): - """ - Send a logout request. - """ - url = self._get_prefixed_host() + self.LOGIN_ROUTE - params = {"logout": "1", "sid": self._sid} + try: + response = self._session.get(url=url, params=params, verify=self.verify) + except requests.exceptions.Timeout: + if self._timeout < 31: + self._timeout += 5 + msg = f"HTTP request timed out. Timeout extended by 5s to {self._timeout}" + else: + msg = "HTTP request timed out." + self.logger.info(msg) + raise IOError(msg) + except requests.exceptions.ConnectionError: + raise IOError("ConnectionError during HTTP request.") + + if response.status_code == 200: + content_type = response.headers.get('content-type') + if 'json' in content_type: + return content_type, response.json() + return content_type, response.text + + elif response.status_code == 403: + msg = f"{response.status_code!r} Forbidden: 'Session-ID ungültig oder Benutzer nicht autorisiert'" + self.logger.info(msg) + raise IOError(msg) + + elif response.status_code == 400: + msg = f"{response.status_code!r} HTTP Request fehlerhaft, Parameter sind ungültig, nicht vorhanden oder Wertebereich überschritten" + self.logger.info(f"Error {msg}, params: {params}") + raise IOError(msg) - self._request(url, params) + else: + msg = f"Error {response.status_code!r} Internal Server Error: 'Interner Fehler'" + self.logger.info(f"{msg}, params: {params}") + raise IOError(msg) - @staticmethod - def _calculate_md5_response(challenge: str, password: str) -> str: - """ - Calculate the response for a challenge using legacy MD5 - """ - response = challenge + "-" + password - # the legacy response needs utf_16_le encoding - response = response.encode("utf_16_le") - md5_sum = hashlib.md5() - md5_sum.update(response) - response = challenge + "-" + md5_sum.hexdigest() - return response + def aha_request(self, cmd: str, ain: str = None, param: dict = None, result_type: str = None): + """Send an AHA request. - @staticmethod - def _calculate_pbkdf2_response(challenge: str, password: str) -> str: - """ - Calculate the response for a given challenge via PBKDF2 - """ - challenge_parts = challenge.split("$") - # Extract all necessary values encoded into the challenge - iter1 = int(challenge_parts[1]) - salt1 = bytes.fromhex(challenge_parts[2]) - iter2 = int(challenge_parts[3]) - salt2 = bytes.fromhex(challenge_parts[4]) - # Hash twice, once with static salt... - hash1 = hashlib.pbkdf2_hmac("sha256", password.encode(), salt1, iter1) - # Once with dynamic salt. - hash2 = hashlib.pbkdf2_hmac("sha256", hash1, salt2, iter2) - return f"{challenge_parts[4]}${hash2.hex()}" + :param cmd: CMD to be sent + :param ain: AktorIdentifikationsNummer + :param param: Dict having needed params + :param result_type: type the return should be transformed to; implemented 'bool', 'int', 'float', None = default resulting in str + :return: returns transformed result if request was successful else None - def _aha_request(self, cmd, ain=None, param=None, rf='str'): """ - Send an AHA request. - """ - url = self._get_prefixed_host() + self.HOMEAUTO_ROUTE - params = {"switchcmd": cmd, "sid": self._sid} + url = f"{self.prefixed_host}{self.HOMEAUTO_ROUTE}" + + params = {"switchcmd": cmd, 'ain': ain} if param: params.update(param) - if ain: - params["ain"] = ain - plain = self._request(url, params) + try: + header, content = self._request(url=url, params=params) + except IOError as e: + self.logger.warning(f"Error '{e}' occurred during requesting AHA Interface") + return None - if plain == "inval": - self.logger.error("InvalidError") - return + content_type, charset = [item.strip() for item in header.split(";")] + # encoding = charset.split("=")[-1].strip() - if plain is None: - self.logger.debug("Plain is None") - return + if content_type == 'text/xml': + self.last_request = content + return ElementTree.fromstring(content) - if rf == 'bool': - return bool(plain) - elif rf == 'str': - return str(plain) - elif rf == 'int': - return int(plain) - elif rf == 'float': - return float(plain) - else: - return plain + elif content_type == 'text/plain': + if content == "inval": + self.logger.error(f"InvalidError for params={params}") + return None + + if result_type == 'bool': + return to_int_to_bool(content) + elif result_type == 'int': + return to_int(content) + elif result_type == 'float': + return to_float(content) + else: + return content - def login(self): + def login(self) -> None: """Login and get a valid session ID.""" + + def login_request(username=None): + """Send a login request with parameters.""" + + url = f"{self.prefixed_host}{self.LOGIN_ROUTE}" + + params = {} + if username: + params["username"] = username + if challenge_hash: + params["response"] = challenge_hash + + with self._session.get(url, params=params, verify=self.verify) as response: + dom = ElementTree.fromstring(response.text) + + return dom.findtext("SID"), dom.findtext("Challenge"), to_int(dom.findtext("BlockTime")) + + def get_pbkdf2_hash(): + """Returns the vendor-recommended pbkdf2 challenge hash.""" + _, iterations_1, salt_1, iterations_2, salt_2 = challenge.split('$') + static_hash = hashlib.pbkdf2_hmac("sha256", self.password.encode(), bytes.fromhex(salt_1), int(iterations_1)) + dynamic_hash = hashlib.pbkdf2_hmac("sha256", static_hash, bytes.fromhex(salt_2), int(iterations_2)) + return f"{salt_2}${dynamic_hash.hex()}" + + def get_md5_hash() -> str: + """Returns the legathy md5 challenge hash.""" + md5_sum = hashlib.md5(f"{challenge}-{self.password}".encode("utf_16_le")) + return f"{challenge}-{md5_sum.hexdigest()}" + self.logger.debug("AHA login called") - try: - (sid, challenge, blocktime) = self._login_request() - if blocktime > 0: - self.logger.debug(f"Waiting for {blocktime} seconds...") - time.sleep(blocktime) - - if sid == "0000000000000000": - if challenge.startswith('2$'): - self.logger.debug("PBKDF2 supported") - challenge_response = self._calculate_pbkdf2_response(challenge, self.password) - else: - self.logger.debug("Falling back to MD5") - challenge_response = self._calculate_md5_response(challenge, self.password) - (sid2, challenge, blocktime) = self._login_request(username=self.user, challenge_response=challenge_response) - if sid2 == "0000000000000000": - self.logger.debug(f"Login failed for sid2={sid2}") - self.logger.warning(f"Login failed for user '{self.user}'") - return - self._sid = sid2 - except Exception as e: - self.logger.error(f"LoginError {e!r} occurred for user {self.user}") - else: - self._logged_in = True - def logout(self): - """ - Logout. - """ + challenge_hash = None + sid, challenge, blocktime = login_request() + + if blocktime > 0: + self.logger.debug(f"Waiting for {blocktime} seconds...") + time.sleep(blocktime) + + if sid == "0000000000000000": + if challenge.startswith('2$'): + self.logger.debug("AHA Login: PBKDF2 supported") + challenge_hash = get_pbkdf2_hash() + else: + self.logger.debug("AHA Login: Falling back to MD5") + challenge_hash = get_md5_hash() + + sid2, challenge, blocktime = login_request(username=self.user) + + if sid2 == "0000000000000000": + self.logger.warning(f"Login failed for user '{self.user}'") + raise IOError(f"Error 'AHA Login failed for user '{self.user}''") + + self._sid = sid2 + + def logout(self) -> None: + """Logout.""" + self.logger.debug("AHA logout called") - self._logout_request() + + url = f"{self.prefixed_host}{self.LOGIN_ROUTE}" + params = {"logout": "1", "sid": self._sid} + + with self._session.get(url, params=params, verify=self.verify) as response: + if response.status_code == 200: + self.logger.info('AHA logout successful') + else: + self.logger.info('AHA logout NOT successful') + self._sid = None - self._logged_in = False - def check_sid(self): + def check_sid(self) -> bool: """ Check if known Session ID is still valid """ self.logger.debug("check_sid called") - url = self._get_prefixed_host() + self.LOGIN_ROUTE + + url = f"{self.prefixed_host}{self.LOGIN_ROUTE}" params = {"sid": self._sid} - plain = self._request(url, params) - dom = ElementTree.fromstring(to_str(plain)) - sid = dom.findtext("SID") + + with self._session.get(url, params=params, verify=self.verify) as response: + sid = ElementTree.fromstring(response.text).findtext("SID") if sid == "0000000000000000": self.logger.warning("Session ID is invalid. Try to generate new one.") - self.login() + return False else: self.logger.info("Session ID is still valid.") + return True - def _get_prefixed_host(self): + def _get_prefixed_host(self, host): """ Choose the correct protocol prefix for the host. Supports three input formats: - https://(requests use strict certificate validation by default) - - http:// (unecrypted) + - http:// (unencrypted) - (unencrypted) """ - host = self.host if not host.startswith("https://") and not host.startswith("http://"): if self.ssl: - host = "https://" + host + host = f"https://{host}" else: - host = "http://" + host + host = f"http://{host}" return host - # device-related commands + # device-related methods def update_devices(self): """ Updating AHA Devices respective dictionary """ - self.logger.info("Updating AHA Devices ...") + self.logger.info("Updating Data of AHA Devices ...") elements = self.get_device_elements() if elements is None: @@ -2270,10 +2325,10 @@ def update_devices(self): for element in elements: if element.attrib["identifier"] in self._devices.keys(): - self.logger.debug("Updating already existing Device " + element.attrib["identifier"]) + self.logger.debug(f"Updating already existing Device '{element.attrib['identifier']}'") self._devices[element.attrib["identifier"]].update_from_node(element) else: - self.logger.info("Adding new Device " + element.attrib["identifier"]) + self.logger.info(f"Adding new Device '{element.attrib['identifier']}'") device = FritzHome.FritzhomeDevice(self, node=element) self._devices[device.ain] = device return True @@ -2282,13 +2337,9 @@ def _get_listinfo_elements(self, entity_type): """ Get the DOM elements for the entity list. """ - plain = self._aha_request("get" + entity_type + "listinfos") - - if plain is None: - return - self.last_request = to_str(plain) - dom = ElementTree.fromstring(to_str(plain)) - return dom.findall(entity_type) + result = self.aha_request(f"get{entity_type}listinfos") + if result: + return result.findall(entity_type) def get_device_elements(self): """ @@ -2296,7 +2347,7 @@ def get_device_elements(self): """ return self._get_listinfo_elements("device") - def get_device_element(self, ain): + def get_device_element(self, ain: str): """ Get the DOM element for the specified device. """ @@ -2315,60 +2366,59 @@ def get_devices(self): """ return list(self.get_devices_as_dict().values()) - def get_devices_as_dict(self): + def get_devices_as_dict(self, enforce_update: bool = False): """ Get the list of all known devices. """ - self.logger.debug("get_devices_as_dict called and forces update_devices") - if not self._devices: + if enforce_update or not self._devices: self.update_devices() return self._devices - def get_device_by_ain(self, ain): + def get_device_by_ain(self, ain: str): """ Return a device specified by the AIN. """ return self.get_devices_as_dict().get(ain) - def get_device_present(self, ain): + def get_device_present(self, ain: str): """ Get the device presence. """ - return self._aha_request("getswitchpresent", ain=ain, rf='bool') + return self.aha_request("getswitchpresent", ain=ain, result_type='bool') - def get_device_name(self, ain): + def get_device_name(self, ain: str): """ Get the device name. """ - return self._aha_request("getswitchname", ain=ain) + return self.aha_request("getswitchname", ain=ain) # switch-related commands - def get_switch_state(self, ain): + def get_switch_state(self, ain: str): """ Get the switch state. """ - return self._aha_request("getswitchstate", ain=ain, rf='bool') + return self.aha_request("getswitchstate", ain=ain, result_type='bool') - def set_switch_state_on(self, ain): + def set_switch_state_on(self, ain: str): """ Set the switch to on state. """ - return self._aha_request("setswitchon", ain=ain, rf='bool') + return self.aha_request("setswitchon", ain=ain, result_type='bool') - def set_switch_state_off(self, ain): + def set_switch_state_off(self, ain: str): """ Set the switch to off state. """ - return self._aha_request("setswitchoff", ain=ain, rf='bool') + return self.aha_request("setswitchoff", ain=ain, result_type='bool') - def set_switch_state_toggle(self, ain): + def set_switch_state_toggle(self, ain: str): """ Toggle the switch state. """ - return self._aha_request("setswitchtoggle", ain=ain, rf='bool') + return self.aha_request("setswitchtoggle", ain=ain, result_type='bool') - def set_switch_state(self, ain, state): + def set_switch_state(self, ain: str, state): """ Set the switch to on state. """ @@ -2377,39 +2427,43 @@ def set_switch_state(self, ain, state): else: return self.set_switch_state_off(ain) - def get_switch_power(self, ain): + def get_switch_power(self, ain: str): """ Get the switch power consumption in W. """ - value = self._aha_request("getswitchpower", ain=ain, rf='int') - try: + value = self.aha_request("getswitchpower", ain=ain, result_type='int') + + if isinstance(value, int): return value / 1000 # value in 0.001W - except TypeError: - pass - def get_switch_energy(self, ain): + def get_switch_energy(self, ain: str): """ Get the switch energy in Wh. """ - return self._aha_request("getswitchenergy", ain=ain, rf='int') + return self.aha_request("getswitchenergy", ain=ain, result_type='int') - # thermostat-related commands + # thermostat-related methods - def get_temperature(self, ain): + def get_temperature(self, ain: str): """ Get the device temperature sensor value. + + Temperatur-Wert in 0,1 °C, negative und positive Werte möglich, Bsp. „200“ bedeutet 20°C """ - value = self._aha_request("gettemperature", ain=ain, rf='int') - try: - return value / 10.0 - except TypeError: - pass + value = self.aha_request("gettemperature", ain=ain, result_type='int') + + if isinstance(value, int): + return value / 10 - def _get_temperature(self, ain, name): + def _get_temperature(self, ain: str, cmd: str): """ Get temperature raw value + + Temperatur-Wert in 0,5 °C: + Wertebereich: 16 – 56 mit 8 bis 28°C, 16 <= 8°C, 17 = 8,5°C...... 56 >= 28°C + 254 = ON , 253 = OFF """ - plain = to_int(self._aha_request(name, ain=ain, rf='int')) + plain = self.aha_request(cmd=cmd, ain=ain, result_type='int') return (plain - 16) / 2 + 8 def get_target_temperature(self, ain): @@ -2418,7 +2472,7 @@ def get_target_temperature(self, ain): """ return self._get_temperature(ain, "gethkrtsoll") - def set_target_temperature(self, ain, temperature): + def set_target_temperature(self, ain: str, temperature: float): """ Set the thermostate target temperature. """ @@ -2429,7 +2483,7 @@ def set_target_temperature(self, ain, temperature): elif (temp > max(range(16, 56))) and (temp != 253): temp = 254 - self._aha_request("sethkrtsoll", ain=ain, param={'param': temp}) + self.aha_request("sethkrtsoll", ain=ain, param={'param': temp}) def set_window_open(self, ain, seconds): """ @@ -2445,16 +2499,16 @@ def set_window_open(self, ain, seconds): if seconds > 0: endtimestamp = int(time.time() + seconds) if endtimestamp >= 0: - self._aha_request("sethkrwindowopen", ain=ain, param={'endtimestamp': endtimestamp}) + return self.aha_request("sethkrwindowopen", ain=ain, param={'endtimestamp': endtimestamp}, result_type='int') @NoAttributeError - def get_window_open(self, ain): + def get_window_open(self, ain: str): """ Get windows open. """ return self.get_devices_as_dict()[ain].window_open - def set_boost(self, ain, seconds): + def set_boost(self, ain: str, seconds): """ Set the thermostate to boost. """ @@ -2468,22 +2522,22 @@ def set_boost(self, ain, seconds): if seconds > 0: endtimestamp = int(time.time() + seconds) if endtimestamp >= 0: - self._aha_request("sethkrboost", ain=ain, param={'endtimestamp': endtimestamp}) + return self.aha_request(cmd="sethkrboost", ain=ain, param={'endtimestamp': endtimestamp}, result_type='int') @NoKeyOrAttributeError - def get_boost(self, ain): + def get_boost(self, ain: str): """ Get boost status. """ return self.get_devices_as_dict()[ain].hkr_boost - def get_comfort_temperature(self, ain): + def get_comfort_temperature(self, ain: str): """ Get the thermostate comfort temperature. """ return self._get_temperature(ain, "gethkrkomfort") - def get_eco_temperature(self, ain): + def get_eco_temperature(self, ain: str): """ Get the thermostate eco temperature. """ @@ -2493,7 +2547,7 @@ def get_device_statistics(self, ain): """ Get device statistics. """ - return self._aha_request("getbasicdevicestats", ain=ain) + return self.aha_request("getbasicdevicestats", ain=ain) # Switch-related commands @@ -2501,165 +2555,179 @@ def set_state_off(self, ain): """ Set the switch/actuator/lightbulb to on state. """ - self._aha_request("setsimpleonoff", ain=ain, param={'onoff': 0}) + return self.aha_request("setsimpleonoff", ain=ain, param={'onoff': 0}, result_type='bool') def set_state_on(self, ain): """ Set the switch/actuator/lightbulb to on state. """ - self._aha_request("setsimpleonoff", ain=ain, param={'onoff': 1}) + return self.aha_request("setsimpleonoff", ain=ain, param={'onoff': 1}, result_type='bool') - def set_state_toggle(self, ain): + def set_state_toggle(self, ain: str): """ Toggle the switch/actuator/lightbulb state. """ - self._aha_request("setsimpleonoff", ain=ain, param={'onoff': 2}) + return self.aha_request("setsimpleonoff", ain=ain, param={'onoff': 2}, result_type='bool') - def set_state(self, ain, state): + def set_state(self, ain: str, state): """ Set the switch/actuator/lightbulb to a state. """ if state: - self.set_state_on(ain) + return self.set_state_on(ain) else: - self.set_state_off(ain) + return self.set_state_off(ain) @NoKeyOrAttributeError - def get_state(self, ain): + def get_state(self, ain: str): """ Get the switch/actuator/lightbulb to a state. """ return self.get_devices_as_dict()[ain].switch_state - # Level/Dimmer-related commands + # Level/Dimmer-related methods - def set_level(self, ain, level): + def set_level(self, ain: str, level: int): """ Set level/brightness/height in interval [0,255]. """ - if level < 0: - level = 0 # 0% - elif level > 255: - level = 255 # 100 % - self._aha_request("setlevel", ain=ain, param={'level': int(level)}) + if not self.LEVEL_RANGE['min'] <= level <= self.LEVEL_RANGE['max']: + level = clamp(level, self.LEVEL_RANGE['min'], self.LEVEL_RANGE['max']) + self.logger.warning(f"set_level: level value must be between {self.LEVEL_RANGE['min']} and {self.LEVEL_RANGE['max']}; hue will be set to {level}") + else: + level = int(level) + + return self.aha_request("setlevel", ain=ain, param={'level': level}, result_type='int') @NoKeyOrAttributeError - def get_level(self, ain): + def get_level(self, ain: str): """ get level/brightness/height in interval [0,255]. """ return self.get_devices_as_dict()[ain].level - def set_level_percentage(self, ain, level): + def set_level_percentage(self, ain: str, level: int): """ Set level/brightness/height in interval [0,100]. """ - # Scale percentage to [0,255] interval - self.set_level(ain, int(level * 2.55)) + if not self.LEVEL_PERCENTAGE_RANGE['min'] <= level <= self.LEVEL_PERCENTAGE_RANGE['max']: + level = clamp(level, self.LEVEL_PERCENTAGE_RANGE['min'], self.LEVEL_PERCENTAGE_RANGE['max']) + self.logger.warning(f"set_level_percentage: level value must be between {self.LEVEL_PERCENTAGE_RANGE['min']} and {self.LEVEL_PERCENTAGE_RANGE['max']}; hue will be set to {level}") + else: + level = int(level) + + return self.aha_request("setlevelpercentage", ain=ain, param={'level': level}, result_type='int') @NoKeyOrAttributeError - def get_level_percentage(self, ain): + def get_level_percentage(self, ain: str): """ get level/brightness/height in interval [0,100]. """ return self.get_devices_as_dict()[ain].levelpercentage - # Color-related commands + # Color-related methods - def _get_colordefaults(self, ain): + def _get_colordefaults(self, ain: str): """ Get colour defaults """ - plain = self._aha_request("getcolordefaults", ain=ain) - return ElementTree.fromstring(to_str(plain)) + return self.aha_request("getcolordefaults", ain=ain) - def get_unmapped_hue(self, ain): - """ - get unmapped hue value represented in hsv domain as integer value between [0,359]. - """ - self.logger.warning("Debug: get_unmapped_hue called.") - try: - value = self.get_devices_as_dict()[ain].hue - self.logger.warning(f"Debug: get_unmapped_hue is {value}.") - return value - except AttributeError: - self.logger.warning("Debug: get_unmapped_hue attribute error exception") - pass - except Exception as e: - self.logger.warning(f"get_unmapped_hue: exception: {e}") + @NoKeyOrAttributeError + def get_hue(self, ain: str) -> int: + """get hue value represented in hsv domain as integer value between [0,359].""" + return self.get_devices_as_dict()[ain].hue + def set_hue(self, ain: str, hue: int) -> bool: + """set hue value (0-359)""" + self.logger.debug(f"set_unmapped_hue called with value={hue}") + + if (hue < 0) or hue > 359: + self.logger.error(f"set_unmapped_hue, hue value must be between 0 and 359") + return False - def set_unmapped_hue(self, ain, hue): + saturation = getattr(self.get_devices_as_dict()[ain], 'saturation', None) + if saturation: + self.logger.debug(f"set_hue: set_unmapped_hue, hue {hue}, saturation is {saturation}") + # saturation variable is scaled to 0-100. Scale to 0-255 for AVM AHA interface + self.set_color(ain, [hue, saturation], mapped=False) + return True + + @NoKeyOrAttributeError + def get_saturation(self, ain: str) -> int: + """get saturation as integer value between 0-100.""" + return self.get_devices_as_dict()[ain].saturation + + def set_saturation(self, ain: str, saturation: int) -> bool: """ - set hue value (0-359) + set saturation value + saturation defined in range (0-100) """ - self.logger.warning(f"Debug: set_unmapped_hue called with hue {hue}") + self.logger.debug(f" set_unmapped_saturation is called with value={saturation} defined in range 0-100") + + if (saturation < 0) or saturation > 100: + self.logger.error(f"set_unmapped_saturation: value must be between 0 and 100") + return False + + hue = getattr(self.get_devices_as_dict()[ain], 'hue', None) + if hue: + self.logger.debug(f"success: set_saturation: value is {saturation} (0-100), hue {hue}") + # Plugin handels saturation value in the range of 0-100. AVM function expect saturation to be within 0-255. Therefore, scale value: + self.set_color(ain, [hue, int(saturation*2.55)], mapped=False) + + @NoKeyOrAttributeError + def get_unmapped_hue(self, ain: str) -> int: + """get unmapped hue value represented in hsv domain as integer value between [0,359].""" + self.logger.debug("get_unmapped_hue called.") + return self.get_devices_as_dict()[ain].unmapped_hue + + def set_unmapped_hue(self, ain: str, hue: int) -> bool: + """set hue value (0-359)""" + self.logger.debug(f"set_unmapped_hue called with value={hue}") if (hue < 0) or hue > 359: self.logger.error(f"set_unmapped_hue, hue value must be between 0 and 359") return False - try: - # saturation already scaled to 0-100: - saturation = self.get_devices_as_dict()[ain].saturation - self.get_devices_as_dict()[ain].hue = hue - except AttributeError: - self.logger.warning(f"set_unmapped_hue exception occurred") - pass - except Exception as e: - self.logger.warning(f"set_unmapped_hue: exception: {e}") + saturation = getattr(self.get_devices_as_dict()[ain], 'unmapped_saturation', None) + if not saturation: + self.logger.info(f"set_unmapped_hue: unable to get value for 'unmapped_saturation', try to use value for 'saturation'") + saturation = getattr(self.get_devices_as_dict()[ain], 'saturation', None) - else: - self.logger.warning(f"Debug: Success: set_unmapped_hue, hue {hue}, saturation is {saturation}") + if saturation: + self.logger.debug(f"set_unmapped_hue: set_unmapped_hue, hue {hue}, saturation is {saturation}") # saturation variable is scaled to 0-100. Scale to 0-255 for AVM AHA interface - self.set_color(ain, [hue, int(saturation*2.55)], duration=0, mapped=False) - + self.set_color(ain, [hue, saturation], mapped=False) + return True - def get_unmapped_saturation(self, ain): - """ - get saturation as integer value between 0-100. - """ + @NoKeyOrAttributeError + def get_unmapped_saturation(self, ain: str) -> int: + """get saturation as integer value between 0-100.""" self.logger.warning("Debug: get_unmapped_saturation called.") - try: - value = self.get_devices_as_dict()[ain].saturation - self.logger.warning(f"Debug: get_unmapped_saturation is {value} (range 0-100).") - return value - except AttributeError: - self.logger.warning("Debug: get_unmapped_saturation attribute error xception") - pass - except Exception as e: - self.logger.warning(f"get_unmapped_saturation, exception: {e}") - + return self.get_devices_as_dict()[ain].unmapped_saturation - def set_unmapped_saturation(self, ain, saturation): + def set_unmapped_saturation(self, ain: str, saturation: int) -> bool: """ set saturation value saturation defined in range (0-100) """ - self.logger.warning(f"Debug: set_unampped_saturation is called with value {saturation} defined in range 0-100") + self.logger.debug(f" set_unmapped_saturation is called with value={saturation} defined in range 0-100") if (saturation < 0) or saturation > 100: self.logger.error(f"set_unmapped_saturation: value must be between 0 and 100") return False - try: - hue = self.get_devices_as_dict()[ain].hue - self.get_devices_as_dict()[ain].saturation = saturation + hue = getattr(self.get_devices_as_dict()[ain], 'unmapped_hue', None) + if not hue: + self.logger.info(f"set_unmapped_saturation: unable to get value for 'unmapped_hue', try to use value for 'hue'") + hue = getattr(self.get_devices_as_dict()[ain], 'hue', None) + if hue: + self.logger.debug(f"success: set_unmapped_saturation: value is {saturation} (0-100), hue {hue}") + # Plugin handels saturation value in the range of 0-100. AVM function expect saturation to be within 0-255. Therefore, scale value: + self.set_color(ain, [hue, int(saturation*2.55)], mapped=False) - except AttributeError: - self.logger.warning(f"set_unamapped_saturation attribute error exception occurred") - pass - except Exception as e: - self.logger.warning(f"set_unmapped_saturation, exception: {e}") - - else: - self.logger.warning(f"Debug: success: set_unmapped_saturation: value is {saturation} (0-100), hue {hue}") - # Plugin handles saturation value in the range of 0-100. AVM function expect saturation to be within 0-255. Therefore, scale value: - self.logger.warning(f"Debug: set_unmapped_saturation, after scaling: saturation is {int(saturation*2.55)}, hue {hue}") - self.set_color(ain, [hue, int(saturation*2.55)], duration=0, mapped=False) - - def get_colors(self, ain): + def get_colors(self, ain: str) -> dict: """ Get colors (HSV-space) supported by this lightbulb. """ @@ -2679,6 +2747,11 @@ def get_colors(self, ain): colors[name] = values return colors + @NoKeyOrAttributeError + def get_color(self, ain: str) -> list: + """get hue, saturation value as list""" + return self.get_devices_as_dict()[ain].color + def set_color(self, ain, hsv, duration=0, mapped=True): """ Set hue and saturation. @@ -2694,7 +2767,7 @@ def set_color(self, ain, hsv, duration=0, mapped=True): by AVM firmwareversion since approximately Q2 2022. It supports every combination if hue/saturation/level """ - success = False + params = { 'hue': int(hsv[0]), 'saturation': int(hsv[1]), @@ -2719,10 +2792,10 @@ def set_color(self, ain, hsv, duration=0, mapped=True): return success if mapped: - success = self._aha_request("setcolor", ain=ain, param=params) + success = self.aha_request(cmd="setcolor", ain=ain, param=params) else: # undocumented API method for free color selection - success = self._aha_request("setunmappedcolor", ain=ain, param=params) + success = self.aha_request(cmd="setunmappedcolor", ain=ain, param=params) self.logger.warning(f"Debug set color in mapped {mapped} mode: success: {success}") return success @@ -2771,14 +2844,7 @@ def set_color_discrete(self, ain, hue, duration=0): self.logger.error(f'setcolor hue out of range (hue={hue})') return - return self._aha_request("setcolor", ain=ain, param=param, rf='bool') - - @NoKeyOrAttributeError - def get_hue(self, ain): - """ - Get Hue value. - """ - return self.get_devices_as_dict()[ain].hue + return self.aha_request("setcolor", ain=ain, param=param, result_type='int') def get_color_temps(self, ain): """ @@ -2790,26 +2856,32 @@ def get_color_temps(self, ain): temperatures.append(temp.get("value")) return temperatures - def set_color_temp(self, ain, temperature, duration=1): + @NoKeyOrAttributeError + def get_color_temp(self, ain: str) -> int: + """ + Get color temperature. + """ + return self.get_devices_as_dict()[ain].colortemperature + + def set_color_temp(self, ain: str, temperature: int, duration: int = 1): """ Set color temperature. temperature: temperature element obtained from get_temperatures() duration: Speed of change in seconds, 0 = instant """ - params = { + + if not self.COLOR_TEMP_RANGE['min'] <= temperature <= self.COLOR_TEMP_RANGE['max']: + temperature = clamp(temperature, self.COLOR_TEMP_RANGE['min'], self.COLOR_TEMP_RANGE['max']) + self.logger.warning(f"set_color_temp: temperature value must be between {self.COLOR_TEMP_RANGE['min']} and {self.COLOR_TEMP_RANGE['max']}; temperature will be set to {temperature}") + + param = { 'temperature': int(temperature), 'duration': int(duration) * 10 } - self._aha_request("setcolortemperature", ain=ain, param=params) - @NoKeyOrAttributeError - def get_color_temp(self, ain): - """ - Get color temperature. - """ - return self.get_devices_as_dict()[ain].colortemperature + return self.aha_request("setcolortemperature", ain=ain, param=param, result_type='int') - # Template-related commands + # Template-related methods def update_templates(self): """ @@ -2825,7 +2897,7 @@ def update_templates(self): self.logger.info(f"Updating already existing Template {element.attrib['identifier']}") self._templates[element.attrib["identifier"]]._update_from_node(element) else: - self.logger.info("Adding new Template " + element.attrib["identifier"]) + self.logger.info(f"Adding new Template {element.attrib['identifier']}") template = FritzHome.FritzhomeTemplate(self, node=element) self._templates[template.ain] = template except TypeError: @@ -2853,37 +2925,38 @@ def get_templates_as_dict(self): return self._templates @NoAttributeError - def get_template_by_ain(self, ain): + def get_template_by_ain(self, ain: str): """ Return a template specified by the AIN. """ return self.get_templates_as_dict()[ain] - def apply_template(self, ain): + def apply_template(self, ain: str): """ Applies a template. """ - self._aha_request("applytemplate", ain=ain) + return self.aha_request("applytemplate", ain=ain) - # Log-related commands + # Log-related methods - def get_device_log_from_lua(self): + def get_device_log_from_lua(self) -> Union[None, list]: """ Gets the Device Log from the LUA HTTP Interface via LUA Scripts (more complete than the get_device_log TR-064 version). - :return: Array of Device Log Entries (text, type, category, timestamp, date, time) + :return: Array of Device Log Entries (text, type, category, timestamp, date, time) if response, else None """ - if not self._logged_in: - self.login() - url = self._get_prefixed_host() + self.LOG_ROUTE - params = {"sid": self._sid} + url = f"{self.prefixed_host}{self.LOG_ROUTE}" # get data - data = self._request(url, params, result='json') + try: + header, content = self._request(url=url, params={}) + except IOError as e: + self.logger.warning(f"Error '{e}' occurred during requesting AHA Interface") + return None - if isinstance(data, dict): - data = data.get('mq_log') + if isinstance(content, dict): + data = content.get('mq_log') if data and isinstance(data, list): # cut data if needed if self.log_entry_count: @@ -2901,23 +2974,24 @@ def get_device_log_from_lua(self): log_list.append([l_text, l_type, l_cat, l_ts, l_date, l_time]) return log_list - def get_device_log_from_lua_separated(self): + def get_device_log_from_lua_separated(self) -> Union[None, list]: """ Gets the Device Log from the LUA HTTP Interface via LUA Scripts (more complete than the get_device_log TR-064 version). - :return: list of device logs list (datetime, log, type, category) + :return: list of device logs list (datetime, log, type, category) if response, else None """ - if not self._logged_in: - self.login() - url = self._get_prefixed_host() + self.LOG_SEPARATE_ROUTE - params = {"sid": self._sid} + url = f"{self.prefixed_host}{self.LOG_SEPARATE_ROUTE}" # get data - data = self._request(url, params, result='json') + try: + header, content = self._request(url=url, params={}) + except IOError as e: + self.logger.warning(f"Error '{e}' occurred during requesting AHA Interface") + return None - if isinstance(data, dict): - data = data.get('mq_log') + if isinstance(content, dict): + data = content.get('mq_log') if data and isinstance(data, list): if self.log_entry_count: data = data[:self.log_entry_count] @@ -3081,7 +3155,7 @@ def __repr__(self): def update(self): """Update the device values.""" - self.logger.warning("update @ FritzhomeDeviceBase called") + self.logger.debug("update @ FritzhomeDeviceBase called") self._fritz.update_devices() def _update_from_node(self, node): @@ -3580,6 +3654,7 @@ class FritzhomeDeviceColor(FritzhomeDeviceBase): unmapped_hue = None unmapped_saturation = None colortemperature = None + color = None logger = logging.getLogger(__name__) @@ -3623,20 +3698,20 @@ def _update_color_from_node(self, node): try: self.hue = get_node_value_as_int(colorcontrol_element, "hue") - self.logger.dbglow(f"received hue value {self.hue}") + self.logger.debug(f"received hue value {self.hue}") except ValueError: self.hue = 0 try: value = get_node_value_as_int(colorcontrol_element, "saturation") self.saturation = int(value/2.55) - self.logger.dbglow(f"received unmapped saturation value {value}, scaled to {self.saturation}") + self.logger.debug(f"received unmapped saturation value {value}, scaled to {self.saturation}") except ValueError: self.saturation = 0 try: self.unmapped_hue = get_node_value_as_int(colorcontrol_element, "unmapped_hue") - self.logger.dbglow(f"received unmapped hue value {self.unmapped_hue}") + self.logger.debug(f"received unmapped hue value {self.unmapped_hue}") except ValueError: self.logger.warning(f"exception in unmapped_hue extraction") self.unmapped_hue = 0 @@ -3644,7 +3719,7 @@ def _update_color_from_node(self, node): try: value = get_node_value_as_int(colorcontrol_element, "unmapped_saturation") self.unmapped_saturation = int(value/2.55) - self.logger.dbglow(f"received unmapped saturation value {value}, scaled to {self.unmapped_saturation}") + self.logger.debug(f"received unmapped saturation value {value}, scaled to {self.unmapped_saturation}") except ValueError: self.unmapped_saturation = 0 except Exception as e: diff --git a/avm/plugin.yaml b/avm/plugin.yaml old mode 100755 new mode 100644 index bdb75794b..25ff19001 --- a/avm/plugin.yaml +++ b/avm/plugin.yaml @@ -12,7 +12,7 @@ plugin: documentation: http://smarthomeng.de/user/plugins/avm/user_doc.html support: https://knx-user-forum.de/forum/supportforen/smarthome-py/934835-avm-plugin - version: 2.0.6 # Plugin version (must match the version specified in __init__.py) + version: 2.0.7 # Plugin version (must match the version specified in __init__.py) sh_minversion: 1.8 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) # py_minversion: 3.6 # minimum Python version to use for this plugin From 40bbec8df0c756f3fced5c1a7d1c9d345d7d07d5 Mon Sep 17 00:00:00 2001 From: Bernd Meiners Date: Thu, 29 Jun 2023 12:58:34 +0200 Subject: [PATCH 142/775] knx plugin: prevent send actions to bus while plugin is not running --- knx/__init__.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/knx/__init__.py b/knx/__init__.py index 790d811a1..62c4288ef 100755 --- a/knx/__init__.py +++ b/knx/__init__.py @@ -157,6 +157,11 @@ def _check_projectfile_destination(self): self.logger.warning(self.translate("could not create directory {}").format(self.projectpath.parent)) def _send(self, data): + if not self.alive: + # do not send anything while plugin is not really running + self.logger.warning(self.translate('send called while self.alive is False, will NOT send anything to KNX')) + return + if len(data) < 2 or len(data) > 0xffff: if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(self.translate('Illegal data size: {}').format(repr(data))) @@ -257,6 +262,8 @@ def handle_connect(self, client): :param client: the calling client for adaption purposes :type client: TCP_client """ + if not self.alive: + self.logger.warning(self.translate('handle_connect called while self.alive is False')) # let the knxd use its group address cache enable_cache = bytearray([0, KNXD.CACHE_ENABLE]) From 30498efb842704b270f59268f7648b9046bc56fe Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sat, 1 Jul 2023 01:02:08 +0200 Subject: [PATCH 143/775] LMS Plugin: fix SDP_standalone issue --- lms/__init__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lms/__init__.py b/lms/__init__.py index e3d5dfca4..7363d52d3 100755 --- a/lms/__init__.py +++ b/lms/__init__.py @@ -26,6 +26,7 @@ import sys if __name__ == '__main__': + builtins.SDP_standalone = True class SmartPlugin(): pass @@ -36,6 +37,8 @@ class SmartPluginWebIf(): BASE = os.path.sep.join(os.path.realpath(__file__).split(os.path.sep)[:-3]) sys.path.insert(0, BASE) +else: + builtins.SDP_standalone = False from lib.model.sdp.globals import (CUSTOM_SEP, PLUGIN_ATTR_NET_HOST, PLUGIN_ATTR_RECURSIVE, PLUGIN_ATTR_CONN_TERMINATOR) from lib.model.smartdeviceplugin import SmartDevicePlugin, Standalone From 16e5f81e5bc7b97291394a96875111860a413fa4 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sat, 1 Jul 2023 01:02:57 +0200 Subject: [PATCH 144/775] LMS Plugin: add additional commands, add option for standby_item_path --- lms/__init__.py | 7 ++++ lms/commands.py | 8 ++++- lms/datatypes.py | 7 ++++ lms/plugin.yaml | 84 ++++++++++++++++++++++++++++++++++++++++++++++-- 4 files changed, 103 insertions(+), 3 deletions(-) diff --git a/lms/__init__.py b/lms/__init__.py index 7363d52d3..34f5cc040 100755 --- a/lms/__init__.py +++ b/lms/__init__.py @@ -82,6 +82,13 @@ def trigger_read(command): if not custom: return + if command == 'player.info.playlists.names': + self.logger.debug(f"Got command playlist names {command} data {data} value {value} custom {custom} by {by}") + trigger_read('player.playlist.id') + trigger_read('player.playlist.name') + + if command == 'playlist.rename': + trigger_read('info.playlists.names') # set alarm if command == 'player.control.alarms': # This does not really work currently. The created string is somehow correct. diff --git a/lms/commands.py b/lms/commands.py index 099448d56..ed1cfc582 100755 --- a/lms/commands.py +++ b/lms/commands.py @@ -13,7 +13,7 @@ 'database': { 'rescan': { 'start': {'read': False, 'write': True, 'write_cmd': 'rescan {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'cmd_settings': {'valid_list': ['playlists', 'onlinelibrary', 'external', 'full']}, 'item_attrs': {'attributes': {'remark': 'playlists|onlinelibrary|external|full|full file://some/path'}, 'custom1': ''}}, - 'running': {'read': True, 'write': False, 'read_cmd': 'rescan ?', 'item_type': 'bool', 'dev_datatype': 'LMSRescan', 'reply_pattern': 'rescan (.*)', 'item_attrs': {'cycle': '60', 'initial': True, 'custom1': ''}}, + 'running': {'read': True, 'write': False, 'read_cmd': 'rescan ?', 'item_type': 'bool', 'dev_datatype': 'LMSRescan', 'reply_pattern': 'rescan (.*)', 'item_attrs': {'cycle': '120', 'initial': True, 'custom1': ''}}, 'progress': {'read': True, 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'scanner notify progress:(.*)', 'item_attrs': {'custom1': ''}}, 'runningtime': {'read': True, 'read_cmd': 'rescanprogress totaltime', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'rescanprogress totaltime .* rescan:([0-9]{2}:[0-9]{2}:[0-9]{2})', 'item_attrs': {'custom1': ''}}, 'fail': {'read': True, 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'rescanprogress totaltime rescan:0 lastscanfailed:(.*)', 'item_attrs': {'custom1': ''}}, @@ -53,6 +53,7 @@ 'sleep': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} sleep ?', 'write_cmd': '{CUSTOM_ATTR1} sleep {VALUE}', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} sleep (.*[^?])', 'item_attrs': {'initial': True}} }, 'playlist': { + 'rename': {'read': False, 'write': False, 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'{CUSTOM_PATTERN1} playlists rename\s+(.*)'}, 'repeat': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist repeat ?', 'item_type': 'str', 'write_cmd': '{CUSTOM_ATTR1} playlist repeat {VALUE}', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} playlist repeat {LOOKUP}', '{CUSTOM_PATTERN1} status(?:.*)playlist repeat:{LOOKUP}'], 'lookup': 'REPEAT', 'item_attrs': {'attributes': {'remark': '0 = Off, 1 = Song, 2 = Playlist'}, 'lookup_item': True}}, 'shuffle': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist shuffle ?', 'item_type': 'str', 'write_cmd': '{CUSTOM_ATTR1} playlist shuffle {VALUE}', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} playlist shuffle {LOOKUP}', '{CUSTOM_PATTERN1} status(?:.*)playlist shuffle:{LOOKUP}'], 'lookup': 'SHUFFLE', 'item_attrs': {'attributes': {'remark': '0 = Off, 1 = Song, 2 = Album'}, 'lookup_item': True}}, 'index': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist index ?', 'write_cmd': '{CUSTOM_ATTR1} playlist index {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} playlist (?:index|newsong .*) (\d+)$', '{CUSTOM_PATTERN1} status(?:.*)playlist index:(\d*[^\s]+)', '{CUSTOM_PATTERN1} prefset server currentSong (\d+)$', '{CUSTOM_PATTERN1} playlist jump (\d*)', '{CUSTOM_PATTERN1} play (\d*)'], 'item_attrs': {'initial': True}}, @@ -78,6 +79,11 @@ 'customskip': {'read': False, 'write': True, 'item_type': 'str', 'write_cmd': '{CUSTOM_ATTR1} customskip setfilter filter{VALUE}.cs.xml', 'dev_datatype': 'str', 'item_attrs': {'attributes': {'cache': True}}} }, 'info': { + 'playlists': { + 'count': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} playlists', 'item_type': 'num', 'dev_datatype': 'raw', 'reply_pattern': r'{CUSTOM_PATTERN1} playlists\s+count:(\d+)', 'item_attrs': {'initial': True}}, + 'names': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} playlists name', 'item_type': 'dict', 'dev_datatype': 'LMSPlaylists', 'reply_pattern': r'{CUSTOM_PATTERN1} playlists name\s+(.*)\s+count:(?:\d+)', 'item_attrs': {'initial': True}}, + + }, 'status': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} status', 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'{CUSTOM_PATTERN1} status\s+(.*)', 'item_attrs': {'initial': True}}, 'connected': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} connected ?', 'item_type': 'bool', 'dev_datatype': 'LMSConnection', 'reply_pattern': [r'{CUSTOM_PATTERN1} (?:connected|client) (\d|disconnect|reconnect)', '{CUSTOM_PATTERN1} status(?:.*)player_connected:([^\s]+)']}, 'ip': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} ip ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': ['{CUSTOM_PATTERN1} ip (.*)', '{CUSTOM_PATTERN1} status(?:.*)player_ip:([^:\s]+)']}, diff --git a/lms/datatypes.py b/lms/datatypes.py index e331daf50..10acc5d3d 100755 --- a/lms/datatypes.py +++ b/lms/datatypes.py @@ -2,6 +2,7 @@ # vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab import lib.model.sdp.datatypes as DT +import re # handle feedback if rescan is running or not @@ -10,6 +11,12 @@ def get_shng_data(self, data, type=None, **kwargs): return True if data in ["1", "done"] else False +class DT_LMSPlaylists(DT.Datatype): + def get_shng_data(self, data, type=None, **kwargs): + _playlists = list(filter(None,re.split(r'id:|\sid:|\splaylist:', data))) + return dict(zip(*[iter(_playlists)]*2)) + + class DT_LMSConnection(DT.Datatype): def get_shng_data(self, data, type=None, **kwargs): return True if data in ["1", "reconnect"] else False diff --git a/lms/plugin.yaml b/lms/plugin.yaml index d6b17ef8b..9d28a304e 100755 --- a/lms/plugin.yaml +++ b/lms/plugin.yaml @@ -15,6 +15,14 @@ plugin: parameters: + standby_item_path: + type: str + default: '' + + description: + de: Item-Pfad für das Standby-Item + en: item path for standby switch item + host: type: str mandatory: true @@ -253,7 +261,7 @@ item_structs: - database - database.rescan sqb_read_initial: true - sqb_read_cycle: '60' + sqb_read_cycle: '120' sqb_custom1: '' progress: @@ -585,6 +593,12 @@ item_structs: enforce_updates: true sqb_read_group_trigger: player.playlist + rename: + type: str + sqb_command: player.playlist.rename + sqb_read: false + sqb_write: false + repeat: type: str sqb_command: player.playlist.repeat @@ -779,6 +793,35 @@ item_structs: enforce_updates: true sqb_read_group_trigger: player.info + playlists: + + read: + type: bool + enforce_updates: true + sqb_read_group_trigger: player.info.playlists + + count: + type: num + sqb_command: player.info.playlists.count + sqb_read: true + sqb_write: false + sqb_read_group: + - player + - player.info + - player.info.playlists + sqb_read_initial: true + + names: + type: dict + sqb_command: player.info.playlists.names + sqb_read: true + sqb_write: false + sqb_read_group: + - player + - player.info + - player.info.playlists + sqb_read_initial: true + status: type: str sqb_command: player.info.status @@ -978,7 +1021,7 @@ item_structs: - ALL.database - ALL.database.rescan sqb_read_initial: true - sqb_read_cycle: '60' + sqb_read_cycle: '120' sqb_custom1: '' progress: @@ -1325,6 +1368,12 @@ item_structs: enforce_updates: true sqb_read_group_trigger: ALL.player.playlist + rename: + type: str + sqb_command: player.playlist.rename + sqb_read: false + sqb_write: false + repeat: type: str sqb_command: player.playlist.repeat @@ -1525,6 +1574,37 @@ item_structs: enforce_updates: true sqb_read_group_trigger: ALL.player.info + playlists: + + read: + type: bool + enforce_updates: true + sqb_read_group_trigger: ALL.player.info.playlists + + count: + type: num + sqb_command: player.info.playlists.count + sqb_read: true + sqb_write: false + sqb_read_group: + - ALL + - ALL.player + - ALL.player.info + - ALL.player.info.playlists + sqb_read_initial: true + + names: + type: dict + sqb_command: player.info.playlists.names + sqb_read: true + sqb_write: false + sqb_read_group: + - ALL + - ALL.player + - ALL.player.info + - ALL.player.info.playlists + sqb_read_initial: true + status: type: str sqb_command: player.info.status From e2a5389a86d744983f655e071faf3ffcf7ba16c0 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sat, 1 Jul 2023 01:05:23 +0200 Subject: [PATCH 145/775] knx plugin: fix issue with polling knx items on startup. All polls are put in a waiting list and polled on run only. --- knx/__init__.py | 30 +++++++++++++++++++++++++----- 1 file changed, 25 insertions(+), 5 deletions(-) diff --git a/knx/__init__.py b/knx/__init__.py index 62c4288ef..461798095 100755 --- a/knx/__init__.py +++ b/knx/__init__.py @@ -89,6 +89,7 @@ def __init__(self, smarthome): self._send_time_do = self.get_parameter_value('send_time') self._bm_separatefile = False self._bm_format = "BM': {1} set {2} to {3}" + self._startup_polling = {} # following needed for statistics self.enable_stats = self.get_parameter_value('enable_stats') @@ -196,6 +197,8 @@ def groupwrite(self, ga, payload, dpt, flag='write'): pkt[5] = flag | pkt[5] if self.readonly: self.logger.info(self.translate("groupwrite telegram for: {} - Value: {} not sent. Plugin in READONLY mode.").format(ga, payload)) + elif not self.alive: + self.logger.info(self.translate("groupwrite telegram for: {} - Value: {} not sent. Plugin not alive.").format(ga, payload)) else: if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(self.translate("groupwrite telegram for: {} - Value: {} sent.").format(ga, payload)) @@ -209,9 +212,12 @@ def _cacheread(self, ga): self.logger.warning("_cacheread: " + self.translate('problem encoding ga: {}').format(ga)) return pkt.extend([0, 0]) - if self.logger.isEnabledFor(logging.DEBUG): - self.logger.debug(self.translate('reading knxd cache for ga: {}').format(ga)) - self._send(pkt) + if not self.alive: + self.logger.info(self.translate('not reading knxd cache for ga {} because plugin is not alive.').format(ga)) + else: + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(self.translate('reading knxd cache for ga: {}').format(ga)) + self._send(pkt) def groupread(self, ga): pkt = bytearray([0, KNXD.GROUP_PACKET]) @@ -221,14 +227,18 @@ def groupread(self, ga): self.logger.warning("groupread: " + self.translate('problem encoding ga: {}').format(ga)) return pkt.extend([0, FLAG_KNXREAD]) - self._send(pkt) + if not self.alive: + self.logger.info(self.translate('not reading knxd group for ga {} because plugin is not alive.').format(ga)) + else: + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug(self.translate('reading knxd group for ga: {}').format(ga)) + self._send(pkt) def _poll(self, **kwargs): if ITEM in kwargs: item = kwargs[ITEM] else: item = 'unknown item' - if 'ga' in kwargs: self.groupread(kwargs['ga']) else: @@ -524,6 +534,13 @@ def run(self): self.alive = True self._client.connect() # moved from __init__() for proper restart behaviour + for item in self._startup_polling: + _ga = self._startup_polling[item].get('ga') + _interval = self._startup_polling[item].get('interval') + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug("KNX Startup Poll for item '{}': ga {}, interval {}".format(item, _ga, _interval)) + self._poll(**{ITEM: item, 'ga':_ga, 'interval':_interval}) + if self._send_time_do: self._sh.scheduler.add('KNX[{0}] time'.format(self.get_instance_name()), self._send_time, prio=5, cycle=int(self._send_time_do)) @@ -642,8 +659,11 @@ def parse_item(self, item): "Item {} is polled on GA {} every {} seconds".format(item, poll_ga, poll_interval)) randomwait = random.randrange(15) next = self.shtime.now() + timedelta(seconds=poll_interval + randomwait) + self._startup_polling.update({item: {'ga': poll_ga, 'interval': poll_interval}}) + ''' self._sh.scheduler.add(f'KNX poll {item}', self._poll, value={ITEM: item, 'ga': poll_ga, 'interval': poll_interval}, next=next) + ''' else: self.logger.warning("Ignoring knx_poll for item {}: We need two parameters, one for the GA and one for the polling interval.".format(item)) pass From 3b9902e6b58a13c4b445df1a09c56ee87a29320b Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Sun, 2 Jul 2023 12:19:38 +0200 Subject: [PATCH 146/775] DB_ADDON: Bump to 1.2.0 - bump to version 1.2.0 due to significant change in code --- db_addon/__init__.py | 2 +- db_addon/plugin.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/db_addon/__init__.py b/db_addon/__init__.py index 90e2bce66..b35bea3df 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -54,7 +54,7 @@ class DatabaseAddOn(SmartPlugin): Main class of the Plugin. Does all plugin specific stuff and provides the update functions for the items """ - PLUGIN_VERSION = '1.1.4' + PLUGIN_VERSION = '1.2.0' def __init__(self, sh): """ diff --git a/db_addon/plugin.yaml b/db_addon/plugin.yaml index 662e2aab2..345ed21db 100644 --- a/db_addon/plugin.yaml +++ b/db_addon/plugin.yaml @@ -11,7 +11,7 @@ plugin: # keywords: iot xyz # documentation: https://github.com/smarthomeNG/smarthome/wiki/CLI-Plugin # url of documentation (wiki) page support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1848494-support-thread-databaseaddon-plugin - version: 1.1.4 # Plugin version (must match the version specified in __init__.py) + version: 1.2.0 # Plugin version (must match the version specified in __init__.py) sh_minversion: 1.9.3.5 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) py_minversion: 3.8 # minimum Python version to use for this plugin From 3be6658452b5c6146bbae4e94e6989769ac40d77 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Sun, 2 Jul 2023 20:44:04 +0200 Subject: [PATCH 147/775] AVM: Exception Classes & Minor Fixes - All: Introduction of Exception Classes - FritzDevice: add 'aha_device' to handle_updated_item in FritzDevice - FritzHome: Remove properties - WebIF: synchronize values between initial view and value updates - Bump to 2.0.8 --- avm/__init__.py | 121 +++++++++++++++++++-------------- avm/plugin.yaml | 2 +- avm/webif/templates/index.html | 30 +++++++- 3 files changed, 99 insertions(+), 54 deletions(-) mode change 100755 => 100644 avm/webif/templates/index.html diff --git a/avm/__init__.py b/avm/__init__.py index bcbaf2cdc..3e0701941 100644 --- a/avm/__init__.py +++ b/avm/__init__.py @@ -87,19 +87,8 @@ def to_float(arg) -> float: return 0 -def to_int_to_bool(arg) -> bool: - arg = to_int(arg) - try: - return bool(arg) - except (ValueError, TypeError): - return False - - def clamp(n, min_n, max_n): - try: - return max(min(max_n, n), min_n) - except (ValueError, TypeError): - return 0 + return max(min(max_n, n), min_n) def walk_nodes(root, nodes: list): @@ -124,7 +113,7 @@ class AVM(SmartPlugin): """ Main class of the Plugin. Does all plugin specific stuff """ - PLUGIN_VERSION = '2.0.7' + PLUGIN_VERSION = '2.0.8' # ToDo: FritzHome.handle_updated_item: implement 'saturation' # ToDo: FritzHome.handle_updated_item: implement 'unmapped_hue' @@ -162,30 +151,30 @@ def __init__(self, sh): # init FritzDevice try: self.fritz_device = FritzDevice(_host, _port, ssl, _verify, _username, _passwort, _call_monitor_incoming_filter, _use_tr064_backlist, _log_entry_count, self) - except IOError as e: + except FritzAuthorizationError as e: self.logger.warning(f"{e} occurred during establishing connection to FritzDevice via TR064-Interface. Not connected.") self.fritz_device = None else: - self.logger.debug("Connection to FritzDevice via TR064-Interface established.") + self.logger.info("Connection to FritzDevice via TR064-Interface established.") # init FritzHome try: self.fritz_home = FritzHome(_host, ssl, _verify, _username, _passwort, _log_entry_count, self) - except IOError as e: + except FritzAuthorizationError as e: self.logger.warning(f"{e} occurred during establishing connection to FritzDevice via AHA-HTTP-Interface. Not connected.") self.fritz_home = None else: - self.logger.debug("Connection to FritzDevice via AHA-HTTP-Interface established.") + self.logger.info("Connection to FritzDevice via AHA-HTTP-Interface established.") # init Call Monitor if self._call_monitor and self.fritz_device and self.fritz_device.connected: try: self.monitoring_service = Callmonitor(_host, 1012, self.fritz_device.get_contact_name_by_phone_number, _call_monitor_incoming_filter, self) - except IOError as e: + except FritzAuthorizationError as e: self.logger.warning(f"{e} occurred during establishing connection to FritzDevice CallMonitor. Not connected.") self.monitoring_service = None else: - self.logger.debug("Connection to FritzDevice CallMonitor established.") + self.logger.info("Connection to FritzDevice CallMonitor established.") else: self.monitoring_service = None @@ -782,7 +771,7 @@ def __init__(self, host, port, ssl, verify, username, password, call_monitor_inc # check connection: conn_test_result = self.model_name() if isinstance(conn_test_result, int): - raise IOError(f"Error {conn_test_result}-'{self.ERROR_CODES.get(conn_test_result, 'unknown')}'") + raise FritzAuthorizationError(f"Error {conn_test_result}-'{self.ERROR_CODES.get(conn_test_result, 'unknown')}'") self.connected = True if self.is_fritzbox(): @@ -805,11 +794,12 @@ def handle_updated_item(self, item, avm_data_type: str, readafterwrite: int): # to be set value to_be_set_value = item() - # define command per avm_data_type - _dispatcher = {'wlanconfig': ('set_wlan', {'NewEnable': int(to_be_set_value)}, index), - 'wps_active': ('set_wps', {'NewX_AVM_DE_WPSEnable': int(to_be_set_value)}, index), - 'tam': ('set_tam', {'NewIndex': int(index), 'NewEnable': int(to_be_set_value)}, None), - 'deflection_enable': ('set_deflection', {'NewDeflectionId': int(index), 'NewEnable': int(to_be_set_value)}, None), + # define command per avm_data_type // all avm_data_type of TR064_RW_ATTRIBUTES must be defined here + _dispatcher = {'wlanconfig': ('set_wlan', {'NewEnable': int(to_be_set_value)}, index), + 'wps_active': ('set_wps', {'NewX_AVM_DE_WPSEnable': int(to_be_set_value)}, index), + 'tam': ('set_tam', {'NewIndex': int(index), 'NewEnable': int(to_be_set_value)}, None), + 'deflection_enable': ('set_deflection', {'NewDeflectionId': int(index), 'NewEnable': int(to_be_set_value)}, None), + 'aha_device': ('set_aha_device', {'NewAIN': index, 'NewSwitchState': 'ON' if to_be_set_value else 'OFF'}, None) } # do logging @@ -923,9 +913,13 @@ def wlan_devices_count(self): def cyclic_item_update(self, read_all: bool = False): """Updates Item Values""" + if not self._plugin_instance.alive: + return + current_time = int(time.time()) # iterate over items and get data + item_count = 0 for item in self.item_list(): if not self.connected: @@ -963,6 +957,7 @@ def cyclic_item_update(self, read_all: bool = False): self.logger.debug(f"Item={item.path()} with avm_data_type={avm_data_type} and index={index} will be updated") # get data and set item value + item_count += 1 if not self._update_item_value(item, avm_data_type, index) and self.use_tr064_blacklist: error_count += 1 self.logger.debug(f"{item.path()} caused error. New error_count: {error_count}. Item will be blacklisted after more than 2 errors.") @@ -974,6 +969,8 @@ def cyclic_item_update(self, read_all: bool = False): # clear data cache dict after update cycle self._clear_data_cache() + self.logger.debug(f"Update of {item_count} TR064-Items took {int(time.time()) - current_time}s") + def _update_item_value(self, item, avm_data_type: str, index: str) -> bool: """ Polls data and set item value; Return True if action was successful, else False""" @@ -2108,6 +2105,10 @@ def item_list(self): def _request(self, url: str, params: dict = None): """ Send a get request with parameters and return response as tuple with (content_type, response) + Raises FritzHttpTimeoutError on timeout + Raises a FritzHttpRequestError if the device does not support the command or arguments. + Raises FritzHttpInterfaceError on missing rights. + Raises a FritzAuthorizationError if server error occurred. :param url: URL to be requested :param params: params for request @@ -2135,9 +2136,7 @@ def get_sid(): else: msg = "HTTP request timed out." self.logger.info(msg) - raise IOError(msg) - except requests.exceptions.ConnectionError: - raise IOError("ConnectionError during HTTP request.") + raise FritzHttpTimeoutError(msg) if response.status_code == 200: content_type = response.headers.get('content-type') @@ -2148,17 +2147,17 @@ def get_sid(): elif response.status_code == 403: msg = f"{response.status_code!r} Forbidden: 'Session-ID ungültig oder Benutzer nicht autorisiert'" self.logger.info(msg) - raise IOError(msg) + raise FritzHttpInterfaceError(msg) elif response.status_code == 400: msg = f"{response.status_code!r} HTTP Request fehlerhaft, Parameter sind ungültig, nicht vorhanden oder Wertebereich überschritten" self.logger.info(f"Error {msg}, params: {params}") - raise IOError(msg) + raise FritzHttpRequestError(msg) else: msg = f"Error {response.status_code!r} Internal Server Error: 'Interner Fehler'" self.logger.info(f"{msg}, params: {params}") - raise IOError(msg) + raise FritzAuthorizationError(msg) def aha_request(self, cmd: str, ain: str = None, param: dict = None, result_type: str = None): """Send an AHA request. @@ -2178,7 +2177,7 @@ def aha_request(self, cmd: str, ain: str = None, param: dict = None, result_type try: header, content = self._request(url=url, params=params) - except IOError as e: + except (FritzAuthorizationError, FritzHttpTimeoutError, FritzHttpInterfaceError, FritzHttpRequestError) as e: self.logger.warning(f"Error '{e}' occurred during requesting AHA Interface") return None @@ -2195,7 +2194,7 @@ def aha_request(self, cmd: str, ain: str = None, param: dict = None, result_type return None if result_type == 'bool': - return to_int_to_bool(content) + return bool(to_int(content)) elif result_type == 'int': return to_int(content) elif result_type == 'float': @@ -2255,7 +2254,7 @@ def get_md5_hash() -> str: if sid2 == "0000000000000000": self.logger.warning(f"Login failed for user '{self.user}'") - raise IOError(f"Error 'AHA Login failed for user '{self.user}''") + raise FritzAuthorizationError(f"Error 'AHA Login failed for user '{self.user}''") self._sid = sid2 @@ -2951,7 +2950,7 @@ def get_device_log_from_lua(self) -> Union[None, list]: # get data try: header, content = self._request(url=url, params={}) - except IOError as e: + except (FritzAuthorizationError, FritzHttpTimeoutError, FritzHttpInterfaceError, FritzHttpRequestError) as e: self.logger.warning(f"Error '{e}' occurred during requesting AHA Interface") return None @@ -2986,7 +2985,7 @@ def get_device_log_from_lua_separated(self) -> Union[None, list]: # get data try: header, content = self._request(url=url, params={}) - except IOError as e: + except (FritzAuthorizationError, FritzHttpTimeoutError, FritzHttpInterfaceError, FritzHttpRequestError) as e: self.logger.warning(f"Error '{e}' occurred during requesting AHA Interface") return None @@ -3285,7 +3284,6 @@ def _update_from_node(self, node): if not self.connected: return - @property def has_light(self): """Check if the device has LightBulb function.""" return self._has_feature(FritzHome.FritzhomeDeviceFeatures.LIGHT) @@ -3303,10 +3301,9 @@ def _update_from_node(self, node): if not self.connected: return - if self.has_powermeter: + if self.has_powermeter(): self._update_powermeter_from_node(node) - @property def has_powermeter(self): """Check if the device has powermeter function.""" return self._has_feature(FritzHome.FritzhomeDeviceFeatures.POWER_METER) @@ -3618,10 +3615,9 @@ def _update_from_node(self, node): self.levelpercentage = 0 return - if self.has_level: + if self.has_level(): self._update_level_from_node(node) - @property def has_level(self): """Check if the device has dimmer function.""" return self._has_feature(FritzHome.FritzhomeDeviceFeatures.LEVEL) @@ -3663,10 +3659,9 @@ def _update_from_node(self, node): if self.connected is False: return - if self.has_color: + if self.has_color(): self._update_color_from_node(node) - @property def has_color(self): """Check if the device has LightBulb function.""" return self._has_feature(FritzHome.FritzhomeDeviceFeatures.COLOR) @@ -3732,31 +3727,31 @@ def _update_color_from_node(self, node): def get_colors(self): """Get the supported colors.""" - if self.has_color: + if self.has_color(): return self._fritz.get_colors(self.ain) else: return {} def set_color(self, hsv, duration=0): """Set HSV color.""" - if self.has_color: + if self.has_color(): self._fritz.set_color(self.ain, hsv, duration, True) def set_unmapped_color(self, hsv, duration=0): """Set unmapped HSV color (Free color selection).""" - if self.has_color: + if self.has_color(): self._fritz.set_color(self.ain, hsv, duration, False) def get_color_temps(self): """Get the supported color temperatures energy.""" - if self.has_color: + if self.has_color(): return self._fritz.get_color_temps(self.ain) else: return [] def set_color_temp(self, temperature, duration=0): """Set white color temperature.""" - if self.has_color: + if self.has_color(): self._fritz.set_color_temp(self.ain, temperature, duration) class FritzhomeDeviceHumidity(FritzhomeDeviceBase): @@ -3860,7 +3855,7 @@ def __init__(self, host, port, callback, call_monitor_incoming_filter, plugin_in # connect self.connect() if not self.conn: - raise IOError("Connection Error") + raise FritzAuthorizationError("Callmonitor Connection Error") def connect(self): """ @@ -4110,7 +4105,8 @@ def _stop_counter(self, direction: str): self._duration_counter_thread_incoming.join(1) elif direction == 'outgoing': self._duration_counter_thread_outgoing.join(1) - except Exception: + except Exception as e: + self.logger.warning(f"Error {e!r} occurred during stopping counter of Callmonitor") pass def _count_duration_incoming(self): @@ -4342,10 +4338,35 @@ def _trigger(self, call_from: str, call_to: str, dt: str, callid: str, event: st self._call_incoming_cid = None +class FritzHttpInterfaceError(Exception): + """ + Exception raised on calling the aha-interface and getting a response with a status-code other than 200. + """ + + +class FritzHttpRequestError(Exception): + """ + Exception raised on calling the aha-interface with non-valid parameters + """ + + +class FritzHttpTimeoutError(Exception): + """ + Exception raised on calling the aha-interface and getting a timeout + """ + + +class FritzAuthorizationError(Exception): + """ + Authentication error. Not allowed to access the box at all. + """ + + # # static XML helpers # + def get_node_value(elem, node): return elem.findtext(node) diff --git a/avm/plugin.yaml b/avm/plugin.yaml index 25ff19001..03fcf6d65 100644 --- a/avm/plugin.yaml +++ b/avm/plugin.yaml @@ -12,7 +12,7 @@ plugin: documentation: http://smarthomeng.de/user/plugins/avm/user_doc.html support: https://knx-user-forum.de/forum/supportforen/smarthome-py/934835-avm-plugin - version: 2.0.7 # Plugin version (must match the version specified in __init__.py) + version: 2.0.8 # Plugin version (must match the version specified in __init__.py) sh_minversion: 1.8 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) # py_minversion: 3.6 # minimum Python version to use for this plugin diff --git a/avm/webif/templates/index.html b/avm/webif/templates/index.html old mode 100755 new mode 100644 index 3ffc52a69..f56ea308d --- a/avm/webif/templates/index.html +++ b/avm/webif/templates/index.html @@ -287,7 +287,15 @@ - + @@ -322,7 +330,15 @@ - + @@ -388,7 +404,15 @@ - + From 94aedd307447fdb676d970f9cbf26bb18e39c001 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Tue, 4 Jul 2023 10:40:30 +0200 Subject: [PATCH 148/775] DB_ADDON: - improve method for calculation of verbrauch - add method to determine zaehlerstand - add further functions to _query_log_timestamp --- db_addon/__init__.py | 167 +++++++++++++++++++++++++++++++++---------- db_addon/plugin.yaml | 6 ++ 2 files changed, 134 insertions(+), 39 deletions(-) diff --git a/db_addon/__init__.py b/db_addon/__init__.py index b35bea3df..af2be3143 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -92,10 +92,10 @@ def __init__(self, sh): # define debug logs self.parse_debug = True # Enable / Disable debug logging for method 'parse item' self.execute_debug = True # Enable / Disable debug logging for method 'execute items' - self.sql_debug = True # Enable / Disable debug logging for sql stuff + self.sql_debug = False # Enable / Disable debug logging for sql stuff self.ondemand_debug = True # Enable / Disable debug logging for method 'handle_ondemand' self.onchange_debug = True # Enable / Disable debug logging for method 'handle_onchange' - self.prepare_debug = True # Enable / Disable debug logging for query preparation + self.prepare_debug = False # Enable / Disable debug logging for query preparation # define default mysql settings self.default_connect_timeout = 60 @@ -374,6 +374,10 @@ def get_query_parameters_from_db_addon_params() -> Union[dict, None]: """get query parameters from item attribute db_addon_params""" db_addon_params = params_to_dict(self.get_iattr_value(item.conf, 'db_addon_params')) + + if not db_addon_params: + db_addon_params = self.get_iattr_value(item.conf, 'db_addon_params_dict') + new_db_addon_params = {} possible_params = required_params = [] @@ -836,6 +840,10 @@ def handle_ondemand(self, item: Item) -> None: if result and result < 0: self.logger.warning(f"Result of item {item.path()} with {db_addon_fct=} was negative. Something seems to be wrong.") + # handle item starting with 'zaehlerstand_' + elif db_addon_fct in ALL_ZAEHLERSTAND_ATTRIBUTES: + result = self._handle_zaehlerstand(params) + # handle 'serie_tagesmittelwert_stunde_30_0d' and 'serie_tagesmittelwert_tag_stunde_30d' elif db_addon_fct in SERIE_ATTRIBUTES_MITTEL_H1 + SERIE_ATTRIBUTES_MITTEL_D_H: result = self._prepare_temperature_list(**params) @@ -1295,8 +1303,21 @@ def suspend(self, state: bool = False) -> bool: # Calculation methods / Using Item Object ############################################## - def _handle_verbrauch(self, query_params: dict): - """Handle execution of verbrauch calculation""" + def _handle_verbrauch(self, query_params: dict) -> Union[None, float]: + """ + Ermittlung des Verbrauches innerhalb eines Zeitraumes + + Die Vorgehensweise ist: + - Endwert: Abfrage des letzten Eintrages im Zeitraum + - Ergibt diese Abfrage einen Wert, gab eines einen Eintrag im Zeitraum in der DB, es wurde also etwas verbraucht, dann entspricht dieser dem Endzählerstand + - Ergibt diese Abfrage keinen Wert, gab eines keinen Eintrag im Zeitraum in der DB, es wurde also nichts verbraucht -> Rückgabe von 0 + - Startwert: Abfrage des letzten Eintrages im Zeitraum vor dem Abfragezeitraum + - Ergibt diese Abfrage einen Wert, entspricht dieser dem Zählerstand am Ende des Zeitraumes vor dem Abfragezeitraum + - Ergibt diese Abfrage keinen Wert, wurde in Zeitraum, vor dem Abfragezeitraum nichts verbraucht, der Anfangszählerstand kann so nicht ermittelt werden. + - Abfrage des nächsten Wertes vor dem Zeitraum + - Ergibt diese Abfrage einen Wert, entspricht dieser dem Anfangszählerstand + - Ergibt diese Abfrage keinen Wert, Anfangszählerstand = 0 + """ # define start, end for verbrauch_jahreszeitraum_timedelta if 'timedelta' in query_params: @@ -1315,40 +1336,87 @@ def _handle_verbrauch(self, query_params: dict): if self.prepare_debug: self.logger.debug(f"called with {query_params=}") - _result = None - # get value for end and check it; - query_params.update({'func': 'max', 'start': end, 'end': end}) + query_params.update({'func': 'last', 'start': end, 'end': end}) value_end = self._query_item(**query_params)[0][1] if self.prepare_debug: self.logger.debug(f"{value_end=}") - if value_end is None: # if None (Error) return - return - elif value_end == 0: # wenn die Query "None" ergab, was wiederum bedeutet, dass zum Abfragezeitpunkt keine Daten vorhanden sind, ist der value hier gleich 0 → damit der Verbrauch für die Abfrage auch Null - return 0 + if value_end is None or value_end == 0: + return value_end # get value for start and check it; - query_params.update({'func': 'min'}) + query_params.update({'func': 'last', 'start': end+1, 'end': end+1}) value_start = self._query_item(**query_params)[0][1] if self.prepare_debug: self.logger.debug(f"{value_start=}") - if value_start is None: # if None (Error) return + if value_start is None: + if self.prepare_debug: + self.logger.debug(f"Error occurred during query. Return.") return - if value_start == 0: # wenn der Wert zum Startzeitpunkt 0 ist, gab es dort keinen Eintrag (also keinen Verbrauch), dann frage den nächsten Eintrag in der DB ab. + if value_start == 0: self.logger.info(f"No DB Entry found for requested start date. Looking for next DB entry.") - query_params.update({'func': 'next', 'start': start}) + query_params.update({'func': 'next', 'start': start+1}) value_start = self._query_item(**query_params)[0][1] if self.prepare_debug: self.logger.debug(f"next available value is {value_start=}") - # calculate result - if value_start is not None: - _new_value = value_end - value_start - return _new_value if isinstance(_new_value, int) else round(_new_value, 1) + if not value_start: + value_start = 0 + if self.prepare_debug: + self.logger.debug(f"No start value available. Will be set to 0 as default") + + # calculate consumption + consumption = value_end - value_start + if self.prepare_debug: + self.logger.debug(f"{consumption=}") + + if isinstance(consumption, float): + if consumption.is_integer(): + consumption = int(consumption) + else: + consumption = round(consumption, 1) + + return consumption + + def _handle_zaehlerstand(self, query_params: dict) -> Union[float, None]: + """ + Ermittlung des Zählerstandes zum Ende eines Zeitraumes + + Die Vorgehensweise ist: + - Abfrage des letzten Eintrages im Zeitraum + - Ergibt diese Abfrage einen Wert, entspricht dieser dem Zählerstand + - Ergibt diese Abfrage keinen Wert, dann + - Abfrage des nächsten Wertes vor dem Zeitraum + - Ergibt diese Abfrage einen Wert, entspricht dieser dem Zählerstand + - Ergibt diese Abfrage keinen Wert, dann Rückgabe von None + """ + + if self.prepare_debug: + self.logger.debug(f"called with {query_params=}") + + start = query_params['start'] + end = query_params['end'] + + # get last value of timeframe + query_params.update({'func': 'last', 'start': start, 'end': end}) + last_value = self._query_item(**query_params)[0][1] + + if last_value == 0: + # get last value (next) before timeframe + query_params.update({'func': 'next'}) + last_value = self._query_item(**query_params)[0][1] + + if isinstance(last_value, float): + if last_value.is_integer(): + last_value = int(last_value) + else: + last_value = round(last_value, 1) + + return last_value def _handle_kaeltesumme(self, database_item: Item, year: Union[int, str] = None, month: Union[int, str] = None) -> Union[int, None]: """ @@ -2243,29 +2311,50 @@ def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: i 'sum_min_neg': 'time, SUM(value) as value FROM (SELECT time, IF(min(val_num) < 0, ROUND(MIN(val_num), 1), 0) as value ', 'diff_max': 'time, value1 - LAG(value1) OVER (ORDER BY time) AS value FROM (SELECT time, ROUND(MAX(val_num), 1) as value1 ', 'next': 'time, val_num as value ', - 'raw': 'time, val_num as value ' + 'raw': 'time, val_num as value ', + 'first': 'time, val_num as value ', + 'last': 'time, val_num as value ', } _table_alias = { - 'avg': '', - 'avg1': ') AS table1 ', - 'min': '', - 'max': '', - 'max1': ') AS table1 ', - 'sum': '', - 'on': '', - 'integrate': '', - 'sum_max': ') AS table1 ', - 'sum_avg': ') AS table1 ', + 'avg': '', + 'avg1': ') AS table1 ', + 'min': '', + 'max': '', + 'max1': ') AS table1 ', + 'sum': '', + 'on': '', + 'integrate': '', + 'sum_max': ') AS table1 ', + 'sum_avg': ') AS table1 ', 'sum_min_neg': ') AS table1 ', - 'diff_max': ') AS table1 ', - 'next': '', - 'raw': '', + 'diff_max': ') AS table1 ', + 'next': '', + 'raw': '', + 'first': '', + 'last': '', } - _order = "time DESC LIMIT 1 " if func == "next" else "time ASC " + _order = { + 'avg': 'time ASC ', + 'avg1': 'time ASC ', + 'min': 'time ASC ', + 'max': 'time ASC ', + 'max1': 'time ASC ', + 'sum': 'time ASC ', + 'on': 'time ASC ', + 'integrate': 'time ASC ', + 'sum_max': 'time ASC ', + 'sum_avg': 'time ASC ', + 'sum_min_neg': 'time ASC ', + 'diff_max': 'time ASC ', + 'next': 'time DESC LIMIT 1 ', + 'raw': 'time ASC ', + 'first': 'time ASC LIMIT 1 ', + 'last': 'time DESC LIMIT 1 ', + } - _where = "item_id = :item_id AND time < :ts_start" if func == "next" else "item_id = :item_id AND time BETWEEN :ts_start AND :ts_end " + _where = "item_id = :item_id AND time < :ts_start " if func == "next" else "item_id = :item_id AND time BETWEEN :ts_start AND :ts_end " _db_table = 'log ' @@ -2275,7 +2364,7 @@ def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: i "week": "GROUP BY YEARWEEK(FROM_UNIXTIME(time/1000), 5) ", "day": "GROUP BY DATE(FROM_UNIXTIME(time/1000)) ", "hour": "GROUP BY FROM_UNIXTIME((time/1000),'%Y%m%d%H') ", - None: '' + None: "", } _group_by_sqlite = { @@ -2284,7 +2373,7 @@ def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: i "week": "GROUP BY strftime('%Y%W', date((time/1000),'unixepoch')) ", "day": "GROUP BY date((time/1000),'unixepoch') ", "hour": "GROUP BY strftime('%Y%m%d%H', datetime((time/1000),'unixepoch')) ", - None: '' + None: "", } # select query parts depending in db driver @@ -2322,7 +2411,7 @@ def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: i params.update({'ts_end': ts_end}) # assemble query - query = f"SELECT {_select[func]}FROM {_db_table}WHERE {_where}{_group_by[group]}ORDER BY {_order}{_table_alias[func]}{_group_by[group2]}".strip() + query = f"SELECT {_select[func]}FROM {_db_table}WHERE {_where}{_group_by[group]}ORDER BY {_order[func]}{_table_alias[func]}{_group_by[group2]}".strip() if self.db_driver.lower() == 'sqlite3': query = query.replace('IF', 'IIF') @@ -2584,7 +2673,7 @@ def convert_duration(timeframe: str, window_dur: str) -> int: 'week': _h_in_d * _d_in_w, 'month': _h_in_d * _d_in_m, 'year': _h_in_d * _d_in_y, - }, + }, 'day': {'hour': 1 / _h_in_d, 'day': 1, 'week': _d_in_w, diff --git a/db_addon/plugin.yaml b/db_addon/plugin.yaml index 345ed21db..a8b103016 100644 --- a/db_addon/plugin.yaml +++ b/db_addon/plugin.yaml @@ -588,6 +588,12 @@ item_attributes: de: Parameter für eine Auswertefunktion des DB-Addon Plugins im Format 'kwargs' enclosed in quotes like 'keyword=argument, keyword=argument' en: Parameters of a DB-Addon Plugin evaluation function. Need to have format of 'kwargs' enclosed in quotes like 'keyword=argument, keyword=argument' + db_addon_params_dict: + type: dict + description: + de: Parameter für eine Auswertefunktion des DB-Addon Plugins im Format eines Dictionary + en: Parameters of a DB-Addon Plugin evaluation function. Need to have format of a dictionary + db_addon_startup: type: bool description: From aa0b039349462bd31a07e78c89a3e09f86ce52b3 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 6 Jul 2023 22:36:33 +0200 Subject: [PATCH 149/775] stateengine plugin: (re)introduce separate loggers for each stateengine item. Custom logger stateengine..rules --- stateengine/StateEngineLogger.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/stateengine/StateEngineLogger.py b/stateengine/StateEngineLogger.py index 701f39ae9..8ce03712d 100755 --- a/stateengine/StateEngineLogger.py +++ b/stateengine/StateEngineLogger.py @@ -102,7 +102,8 @@ def create(item): # Constructor # item: item for which the detailed log is (used as part of file name) def __init__(self, item): - self.logger = StateEngineDefaults.se_logger + #self.logger = StateEngineDefaults.se_logger + self.logger = logging.getLogger('stateengine.{}'.format(item.property.path)) self.__section = item.property.path.replace(".", "_").replace("/", "") self.__indentlevel = 0 self.__loglevel = StateEngineDefaults.log_level From cad3d07221ce693ae5239cb469c85c22db91d794 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Fri, 7 Jul 2023 17:04:12 +0200 Subject: [PATCH 150/775] AVM: - fix if avm_home_automation is not activated - fix format of log_list of get_device_log_from_tr064_separated - update headtable in WebIF --- avm/__init__.py | 21 +++++--- avm/webif/__init__.py | 4 +- avm/webif/static/img/lamp_amber.png | Bin 0 -> 3340 bytes avm/webif/templates/index.html | 75 +++++++++++++++++----------- 4 files changed, 60 insertions(+), 40 deletions(-) mode change 100755 => 100644 avm/webif/__init__.py create mode 100644 avm/webif/static/img/lamp_amber.png diff --git a/avm/__init__.py b/avm/__init__.py index 3e0701941..abbbc6bd2 100644 --- a/avm/__init__.py +++ b/avm/__init__.py @@ -158,13 +158,16 @@ def __init__(self, sh): self.logger.info("Connection to FritzDevice via TR064-Interface established.") # init FritzHome - try: - self.fritz_home = FritzHome(_host, ssl, _verify, _username, _passwort, _log_entry_count, self) - except FritzAuthorizationError as e: - self.logger.warning(f"{e} occurred during establishing connection to FritzDevice via AHA-HTTP-Interface. Not connected.") - self.fritz_home = None + if self._aha_http_interface: + try: + self.fritz_home = FritzHome(_host, ssl, _verify, _username, _passwort, _log_entry_count, self) + except FritzAuthorizationError as e: + self.logger.warning(f"{e} occurred during establishing connection to FritzDevice via AHA-HTTP-Interface. Not connected.") + self.fritz_home = None + else: + self.logger.info("Connection to FritzDevice via AHA-HTTP-Interface established.") else: - self.logger.info("Connection to FritzDevice via AHA-HTTP-Interface established.") + self.fritz_home = None # init Call Monitor if self._call_monitor and self.fritz_device and self.fritz_device.connected: @@ -1500,8 +1503,10 @@ def get_device_log_from_tr064_separated(self): l_text = text[18:] l_cat = '-' l_type = '-' - l_ts = int(datetime.datetime.timestamp(datetime.datetime.strptime(text[:17], '%d.%m.%y %H:%M:%S'))) - log_list.append([l_text, l_type, l_cat, l_ts, l_date, l_time]) + # l_ts = int(datetime.datetime.timestamp(datetime.datetime.strptime(text[:17], '%d.%m.%y %H:%M:%S'))) + # log_list.append([l_text, l_type, l_cat, l_ts, l_date, l_time]) + dt = datetime.datetime.strptime(f"{l_date} {l_time}", '%d.%m.%y %H:%M:%S').strftime('%d.%m.%Y %H:%M:%S') + log_list.append([dt, l_text, l_type, l_cat]) return log_list diff --git a/avm/webif/__init__.py b/avm/webif/__init__.py old mode 100755 new mode 100644 index cdab224f9..7f88bb43f --- a/avm/webif/__init__.py +++ b/avm/webif/__init__.py @@ -70,12 +70,12 @@ def index(self, reload=None, action=None): if self.plugin.fritz_home: aha_items = self.plugin.get_aha_items() aha_item_count = len(aha_items) - logentries = self.plugin.get_device_log_from_lua_separated() + logentries = self.plugin.fritz_home.get_device_log_from_lua_separated() else: aha_items = None aha_item_count = None if self.plugin.fritz_device: - logentries = self.plugin.get_device_log_from_tr064_separated() + logentries = self.plugin.fritz_device.get_device_log_from_tr064_separated() else: logentries = None diff --git a/avm/webif/static/img/lamp_amber.png b/avm/webif/static/img/lamp_amber.png new file mode 100644 index 0000000000000000000000000000000000000000..381185b9cd29ae13a7e18b48cb1f9ee131450732 GIT binary patch literal 3340 zcmeHKi#ro+8=o2`5qXQ2$dtU1@jB#O4zH!yDvD7KA@xpMjE0SAmBU9?4&^lJ&62$4 zw3Kr?Nsbw6$xvf97h~oycKFtR@m=5dJ=b+#zw7=z*L^?Nb>H`M-_MhA8V*z22Hpk$ z0MtBBxSs_86kL7@NJ-9c)%RJ*0f=?=^aaTm0TiAt*A=j5VXgpLzb^OJW~7U^+@g-G zDh^eY>nhhzAh7^|`nzAEkZ+>CS5EGU{nJ0z2Xi?V7a9`*z~OLaSJ5bJSm^Z#GfYfm z@q!~50N5Vo>F(kiA2dIX$_k33yk3%hkK9HrkJ4hgDk%J!NzYfj;NPiwVD!-s)ubor zOGlZYjE8DSt$Iv)Rp;W)2Gghq8eU*CkE})4{~wpUTSyU9~kei0xx1fMfglZ*)5xH*Sd@Y_-*Ln}Gv4#$8Zi$m409)wMMQ zBKIl<)M(6aTw03u3Eq8CZ`X{@5Lh$H<(K=5u!@h`2qh(@5|gN#pwYa6&1ryZMYlq) zLF_4SCy4)BSBk6{tOv*3%_{G1!}siFeFjJK>_qJ0q&F&+CnxBL${K3v<`{H&Dp^K-7uJjYDBGAI=AWj~9o=$F008P3vvCeaDz-NVTng-hTeA|&n9nGZe4$Kja$Al=i|56 zEOxuPdJ))fKgpZ^Q+IMv3sLTeZaFfaGt7Ht;KAm`U0|X zl%pGJ4YAD>13%8rUO`r2VmDWXLwAopT_esd)hj+9BfO6fXb*J@Sn9((W$iNgG@2L# zN=sGu>aerdbWCjxi~$5PoqRS|soI!V25pHP3O8rM>`35a>*AV|++jZ9&=Hy7A$H`` zDDOMW)PkGbzMg8YeKNbRoAjh0 zLDlObexU^_nG*CU-b;vPsga*@plw!notM3sXDRqVvNw?YcmCFy=|HrntR>W~`HByQ zxn=(40?Ut~>K9bVBDAs+W*nyi)A1K3GQ85r_w+ON?%ivG8ewCJGTBIty{wDW%~2VU zG6#kxc_R_0tVNIv*qr(y66~Y4n;`bp^P_snw(}ce&iHZ{X>5b3#b6fOP}>=Blfa^$ znhC)++S`mt10HKZX5ETSAXYQEZfa!wxaY&HR zNMCI+M}MeiVlxPdG9xDhG|l21Eq6M;TU@-3{0lv>Bl(_mH7VExRU2G|K3v|x+3lkDx_BG3DXMS8jzz{n{C%B_ z1$e-kM9hg$v+rUl>zkhH%b0Y5d_B!c`MUNsjF|MhQEbjsUonH-o)mHI1|}I-H2>hNY4I`p`%3)|HUIR% z0XuefrV@_JYZU%UI3yfCwD}1fo2_+Bx-{%$etPt>A16~5ABp(woZ;6IMlL*{qx~Nj zj{j;UBA+_rzW9jt68tIV%H|rsOzLYP5Hh%VrHDLhe?p+MQ(ixQl{q{z5!~2|zJC2Q zNEQEh_G;GRb8tX|(fqo$_K8{D3c|EW{?>v>r!DdK?oS$cczB#08MkOo!Y4}Wdxj2@ z%lsQ(|MuR+}ADz zNkJljkoa_a&0e@<0moqMg+o!bi>0!f0cymT3qGSoJ&X`d#}e+M=7$d-hS=H8>a;_u71aWIFnW@ju0+I*)ex`id@T)#qWrHqh zKeKE1C%!yTbRMncFGcfU4b9gtwK@&iT-Wif2Pck87GQdwfi&8UCx?6AXJh_>bE3ZG zB|d;c1D{7*uxH&Djd|Hh4UgmIp1Q5KK-;RKZmy1r_zx>AiNYtt7k^%}D8%0EG9tn5 zSEyAJQ0lE5CvHiX-l>6c!Z~?AUSg-g7&MsFni4(WREg&f)xWTR@;VF~8ooOYnxo6R zTP8jrS=yLmG;ao(1U|BMpmpr8Zk>K$wa8j~aAf6`&L0kjok;er3ZCVASU_`f(yyLK z-_(}PTrfDolnz8vAKsAQ64%^zywH8^sJQzzbB~IRg|)S{6QraEU6i=oN9VBc43n1= zrK8{LTB7c}#Q)&0QK|9?0dHFUMi)M>5;PSGiZ=uauf-cN;|0|pkk8@54a7006-wNB za6pTVTJq%9O8k{);*swvA@b*jo5Rx&=4FE4;zL4?15npP%axRMD7{NeuwmzJ_C*_K zXO>{zau6+2lAT8jU_efJ|JbuoXZD%k_WT~k{%WmGyFU(x*Xc-Jq=6c{-@SV$d^Wfc z-ad^b+9i7s=M6~Lj8c2Gmx}cvwg`dHR{674Vbe#BSP+J>BiNDXWuX3sR(#~80>0(# z!!g3k&gg`~V92W!0j)G+s#iINEilH+gf+L+x?jIPJzjRqF5Oc0o76KKVQJVbUjDcv t6DIH3C07+)wOsaFX#)Stpf*psg{D=o**vu8yZmzk@ca|*PIC<*{Ttd7#RUKW literal 0 HcmV?d00001 diff --git a/avm/webif/templates/index.html b/avm/webif/templates/index.html index f56ea308d..0c1850dea 100644 --- a/avm/webif/templates/index.html +++ b/avm/webif/templates/index.html @@ -177,7 +177,7 @@ {% set tab4title = "hidden" %} {% endif %} -{% if p.fritz_home %} +{% if p.fritz_home or p.fritz_device %} {% set tab5title = _(""'AVM Log-Einträge'"") %} {% else %} {% set tab5title = "hidden" %} @@ -202,53 +202,62 @@
    {{ _('get_item_list') }} {{ p.get_item_list('database_addon', True) }}
    {{ _('_plg_item_dict') }}{{ p._plg_item_dict }}
    {{ _('work_item_queue_thread') }} {% if p.work_item_queue_thread != None %}{{ p.work_item_queue_thread.is_alive() }}{% endif %}
    {{ item.id() }} {{ item.property.type }}{{ item_config[0] }}{{ item_config[2] }}{{ item_config['avm_data_type'] }}{{ item_config['avm_data_cycle'] }} {{ item.property.value }} {{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }} {{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
    {{ item.id() }} {{ item.property.type }}{{ item_config[0] }}{{ item_config[2] }}{{ item_config['avm_data_type'] }}{{ item_config['avm_data_cycle'] }} {{ item.property.value }} {{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }} {{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
    {{ "01 TR064 Items @ Fritz_Device" }}{{ p.fritz_device.items }}{{ p.fritz_device.item_list() }}
    {{ "02 TR064 Blacklisted Items @ Fritz_Device " }}{{ p.fritz_device.get_tr064_items_blacklisted() }}{{ p.get_tr064_items_blacklisted() }}
    {{ "03 AHA Items @ Fritz_Home" }}{{ p.fritz_home.items }}{{ p.fritz_home.item_list() }}
    {{ p.get_item_config(item._path)['db_addon_fct'] }} {{ _(p.get_item_config(item)['cycle']|string) }} {% if p.get_item_config(item)['startup'] %}{{ _('Ja') }}{% else %}{{ _('Nein') }}{% endif %}{{ item._value | float }}{{ item._value | float | round(2) }} {{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }} {{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
    {{ item.property.type }} {{ item_config['avm_data_type'] }} {{ item_config['avm_data_cycle'] }}{{ item.property.value }} + {% if item.property.value == True or item.property.value == False %} + {{ item.property.value|lower }} + {% elif item.property.value|int != 0 %} + {{ item.property.value|int }} + {% else %} + {{ item.property.value }} + {% endif %} + {{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }} {{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
    {{ item.property.type }} {{ item_config['avm_data_type'] }} {{ item_config['avm_data_cycle'] }}{{ item.property.value }} + {% if item.property.value == True or item.property.value == False %} + {{ item.property.value|lower }} + {% elif item.property.value|int != 0 %} + {{ item.property.value|int }} + {% else %} + {{ item.property.value }} + {% endif %} + {{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }} {{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
    {{ item_id }} {{ item.property.type }} {{ item.conf[instance_key]}}{{ item.property.value }} + {% if item.property.value == True or item.property.value == False %} + {{ item.property.value|lower }} + {% elif item.property.value|int != 0 %} + {{ item.property.value|int }} + {% else %} + {{ item.property.value }} + {% endif %} + {{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }} {{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
    - + - - - + - - + {{ _('Passwort: ') }} + {{ p.get_parameter_value_for_display('password') }} + - + + - - -
    + {% if p.fritz_device %} - {{ _('Gerät verfügbar') }} + {{ _('Gerät verfügbar') }} {% else %} - {{ _('Gerät nicht verfügbar') }} + {{ _('Gerät nicht verfügbar') }} {% endif %} - {{ _('Verbunden') }} + {{ _('FritzDevice') }} - {% if p.fritz_device %} - {{ _('Ja') }}{% if p.fritz_device.ssl %}, SSL{% endif %} + {{ _('Host: ') }} + {% if p.fritz_device.is_fritzbox() %} + {{ _('Fritz!Box') }} + {% elif p.fritz_device.is_repeater() %} + {{ _('Fritz!Repeater') }} {% else %} - {{ _('Nein') }} + {{ _('-') }} {% endif %} + + {{ p.fritz_device.host }} + + {{ _('Benutzer: ') }} + {{ p.get_parameter_value_for_display('username') }} {{ _('Benutzer') }}{{ p.get_parameter_value_for_display('username') }}
    - {% if p.monitoring_service and p.monitoring_service._listen_active %} - {{ _('Call Monitor verbunden') }} + + {% if p.fritz_home %} + {{ _('AHA verfügbar') }} {% else %} - {{ _('Call Monitor nicht verbunden') }} + {{ _('AHA nicht verfügbar') }} {% endif %} - {{ _('Call Monitor') }} + {{ _('FritzHome') }} - {% if p._monitoring_service %}{{ _('Ja') }}{% if not p._monitoring_service._listen_active %}, {{ _('nicht verbunden') }}{% endif %}{% else %}{{ _('Nein') }}{% endif %}{{ _('Passwort') }}{{ p.get_parameter_value_for_display('password') }}
    - {{ _('Host') }} - {% if p.fritz_device.is_fritzbox() %} - {{ _(' is Fritz!Box') }} - {% elif p.fritz_device.is_repeater() %} - {{ _(' is Fritz!Repeater') }} + + {% if p.monitoring_service and p.monitoring_service._listen_active %} + {{ _('Call Monitor verbunden') }} + {% elif p.monitoring_service and not p.monitoring_service._listen_active %} + {{ _('Call Monitor nicht verbunden') }} {% else %} - {{ _(' -') }} + {{ _('Call Monitor nicht aktiv') }} {% endif %} - + {{ _('Call Monitor') }} + + {{ _('Port: ') }} + {{ p.fritz_device.port }} {% if p.fritz_device.ssl %}(HTTPS){% endif %} {{ p.fritz_device.host }}{{ _('Port') }}{{ p.fritz_device.port }} {% if p.fritz_device.ssl %}(HTTPS){% endif %}
    @@ -446,7 +455,13 @@ {{ logentry[2] }}
    - {{ _('cat_'+logentry[3]|string) }} + + {% if logentry[3] != "-" %} + {{ _('cat_'+logentry[3]|string) }} + {% else %} + {{ '-' }} + {% endif %} + {% endfor %} {% endif %} From 316cec20d787091736a456e2734eacb6f0362ec5 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Sat, 8 Jul 2023 14:15:47 +0200 Subject: [PATCH 151/775] DB_ADDON: - fix for calculating end_of_week --- db_addon/__init__.py | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/db_addon/__init__.py b/db_addon/__init__.py index af2be3143..23320e1b3 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -90,12 +90,12 @@ def __init__(self, sh): self.active_queue_item: str = '-' # String holding item path of currently executed item # define debug logs - self.parse_debug = True # Enable / Disable debug logging for method 'parse item' - self.execute_debug = True # Enable / Disable debug logging for method 'execute items' + self.parse_debug = False # Enable / Disable debug logging for method 'parse item' + self.execute_debug = False # Enable / Disable debug logging for method 'execute items' self.sql_debug = False # Enable / Disable debug logging for sql stuff - self.ondemand_debug = True # Enable / Disable debug logging for method 'handle_ondemand' - self.onchange_debug = True # Enable / Disable debug logging for method 'handle_onchange' - self.prepare_debug = False # Enable / Disable debug logging for query preparation + self.ondemand_debug = False # Enable / Disable debug logging for method 'handle_ondemand' + self.onchange_debug = False # Enable / Disable debug logging for method 'handle_onchange' + self.prepare_debug = True # Enable / Disable debug logging for query preparation # define default mysql settings self.default_connect_timeout = 60 @@ -511,7 +511,8 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte db_addon_ignore_value_list_formatted.append(f"{op} {value}") max_values[op].append(value) - self.logger.info(f"Summarized 'ignore_value_list' for item {item.path()}: {db_addon_ignore_value_list_formatted}") + if self.parse_debug: + self.logger.debug(f"Summarized 'ignore_value_list' for item {item.path()}: {db_addon_ignore_value_list_formatted}") if not db_addon_ignore_value_list_formatted: return @@ -519,7 +520,9 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte if not optimize: return db_addon_ignore_value_list_formatted - self.logger.info(f"Optimizing 'ignore_value_list' for item {item.path()} active.") + if self.parse_debug: + self.logger.debug(f"Optimizing 'ignore_value_list' for item {item.path()} active.") + # find low lower_value_list = max_values['<'] + max_values['<='] if lower_value_list: @@ -548,7 +551,9 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte if (not lower_end[0] or (lower_end[0] and v >= lower_end[1])) or (not upper_end[0] or (upper_end[0] and v <= upper_end[1])): db_addon_ignore_value_list_optimized.append(f'!= {v}') - self.logger.info(f"Optimized 'ignore_value_list' for item {item.path()}: {db_addon_ignore_value_list_optimized}") + if self.parse_debug: + self.logger.debug(f"Optimized 'ignore_value_list' for item {item.path()}: {db_addon_ignore_value_list_optimized}") + return db_addon_ignore_value_list_optimized # handle all items with db_addon_fct @@ -2805,7 +2810,7 @@ def week_beginning(delta: int = 0) -> datetime: def week_end(delta: int = 0) -> datetime: """provides datetime of end of week minus x weeks""" - return week_beginning(delta) + relativedelta(days=6) + return week_beginning(delta) + relativedelta(days=7) def day_beginning(delta: int = 0) -> datetime: From e8385cd835c3ae6029ae0acc09a7220c397bb3de Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sun, 9 Jul 2023 00:23:19 +0200 Subject: [PATCH 152/775] executor plugin: fix major issue with autocomplete. Now full item paths are available instead of (non existing) item name references --- executor/webif/__init__.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/executor/webif/__init__.py b/executor/webif/__init__.py index a74caa780..1395763a1 100755 --- a/executor/webif/__init__.py +++ b/executor/webif/__init__.py @@ -48,7 +48,7 @@ import csv from jinja2 import Environment, FileSystemLoader -import sys +import sys class PrintCapture: """this class overwrites stdout and stderr temporarily to capture output""" @@ -197,7 +197,7 @@ def eval_statement(self, eline, path, reload=None): def exec_code(self, eline, reload=None): """ evaluate a whole python block in eline - + :return: result of the evaluation """ result = "" @@ -284,7 +284,7 @@ def delete_file(self, filename=''): @cherrypy.expose def get_filelist(self): """returns all filenames from the defined script path with suffix ``.py``""" - + if self.plugin.executor_scripts is not None: subdir = self.plugin.executor_scripts self.logger.debug(f"list files in {subdir}") @@ -296,7 +296,7 @@ def get_filelist(self): return files return '' - + @cherrypy.expose def get_autocomplete(self): _sh = self.plugin.get_sh() @@ -310,11 +310,11 @@ def get_autocomplete(self): if api is not None: for function in api: plugin_list.append("sh."+plugin_config_name + "." + function) - + myItems = _sh.return_items() itemList = [] for item in myItems: - itemList.append("sh."+str(item)+"()") + itemList.append("sh."+str(item.id())+"()") retValue = {'items':itemList,'plugins':plugin_list} - return (json.dumps(retValue)) \ No newline at end of file + return (json.dumps(retValue)) From 5e1fe2c4a90f7352108a4f830aa6757c04cc2eb1 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sun, 9 Jul 2023 00:28:46 +0200 Subject: [PATCH 153/775] executor plugin: allow sorting of saved files, minor code improvements --- executor/webif/templates/index.html | 83 ++++++++++++++++++++++------- 1 file changed, 65 insertions(+), 18 deletions(-) diff --git a/executor/webif/templates/index.html b/executor/webif/templates/index.html index d34350a03..49cadb331 100755 --- a/executor/webif/templates/index.html +++ b/executor/webif/templates/index.html @@ -3,12 +3,6 @@ {% set logo_frame = false %} {% block pluginscripts %} - -{% endblock pluginscripts %} - - -{% block content -%} - +{% endblock pluginscripts %} +{% block pluginstyles %} +{% endblock pluginstyles %} +{% block content -%}
    @@ -358,12 +404,13 @@
    {{ _('Instanz') }}: {{ p.get_instance_name( {% endif %}
    {{ _('Plugin') }}     : {% if p.alive %}{{ _('Aktiv') }}{% else %}{{ _('Gestoppt') }}{% endif %}
    -
    +
    + + + +
    @@ -381,13 +428,13 @@
    {{ _('Plugin') }}     : {% if p.aliv
    -
    +
    -
    +
    From 8195163529697103df6a7b181878188eec96368d Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sun, 9 Jul 2023 00:37:30 +0200 Subject: [PATCH 154/775] executor plugin: re-write autocomplete method using worker and promise functions. Now the plugin can be used immediately without having to wait for fetching the autocomplete dict. --- executor/webif/templates/index.html | 110 +++++++++++++++++++--------- 1 file changed, 77 insertions(+), 33 deletions(-) diff --git a/executor/webif/templates/index.html b/executor/webif/templates/index.html index 49cadb331..8c9f66292 100755 --- a/executor/webif/templates/index.html +++ b/executor/webif/templates/index.html @@ -104,40 +104,83 @@ }; function get_Dict(cmPython) { - $.ajax({ - url: "get_autocomplete", - method: "GET", - dataType: "json", - success: function(myDict) { - console.log("got Autocomplete") - myAutocomplete = []; - for (i = 0; i < myDict.items.length; i++) { - if (myDict.items[i] != "." && myDict.items != "..") { - myAutocomplete.push({ - text: myDict.items[i], - displayText: myDict.items[i] + " | Item" - }); + return new window.Promise(function (resolve, reject) { + function successCallback(response) { + resolve(response); + } + + function errorCallback(response) { + reject(response); + } + + function fetch_autocomplete() { + CodeMirror.commands.autocomplete_items = function() { + }; + $.ajax({ + url: "get_autocomplete", + method: "GET", + async: true, + dataType: "json", + success: function(myDict) { + console.log("Initializing Autocomplete"); + let worker = new Worker( + `data:text/javascript, + function createAutocomplete(myDict){ + myAutocomplete = []; + for (i = 0; i < myDict.items.length; i++) { + if (myDict.items[i] != "." && myDict.items != "..") { + myAutocomplete.push({ + text: myDict.items[i], + displayText: myDict.items[i] + " | Item" + }); + } + } + for (i = 0; i < myDict.plugins.length; i++) { + if (myDict.plugins[i] != "." && myDict.plugins != "..") { + myAutocomplete.push({ + text: myDict.plugins[i], + displayText: myDict.plugins[i] + " | Plugin" + }); + } + } + return myAutocomplete; } - } - for (i = 0; i < myDict.plugins.length; i++) { - if (myDict.plugins[i] != "." && myDict.plugins != "..") { - myAutocomplete.push({ - text: myDict.plugins[i], - displayText: myDict.plugins[i] + " | Plugin" - }); - } - } - registerAutocompleteHelper('autocompleteHint', myAutocomplete); - console.log('Stored entries to Autocomplete dict') - CodeMirror.commands.autocomplete_items = function(cmPython) { - CodeMirror.showHint(cmPython, CodeMirror.hint.autocompleteHint); - } - }, - error: function(result) { - console.log("Error while receiving Autocomplete") + onmessage = function(event){ + let myDict = event.data; + let result = createAutocomplete(myDict); + postMessage(result); + }; + ` + ); + + worker.onmessage = function(event){ + myAutocomplete = event.data; + registerAutocompleteHelper('autocompleteHint', myAutocomplete); + console.log('Stored ' + myAutocomplete.length + ' entries to Autocomplete dict.'); + CodeMirror.commands.autocomplete_items = function(cmPython) { + CodeMirror.showHint(cmPython, CodeMirror.hint.autocompleteHint); + } + }; - } - }); + worker.postMessage(myDict); + + }, + error: function(result) { + console.log("Error while receiving Autocomplete") + + } + }).done(successCallback).fail(errorCallback); + } + fetch_autocomplete(); + }); +} + +function autocompleteSuccess(response) { + console.log("Filling autocomplete dict... This might take some time!"); +} + +function autocompleteError(error) { + console.warn(error); } @@ -260,7 +303,8 @@ }; }); - get_Dict(cmPython); + + get_Dict(cmPython).then(autocompleteSuccess).catch(autocompleteError); cmPython.refresh(); From 50a17eb249ef797a8a1a2acab4812c4d598f8d92 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sun, 9 Jul 2023 00:38:26 +0200 Subject: [PATCH 155/775] executor plugin: fix autocomplete dict. Previously (sub)items called "id" were fetched wrongly --- executor/webif/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/executor/webif/__init__.py b/executor/webif/__init__.py index 1395763a1..30b6b70c5 100755 --- a/executor/webif/__init__.py +++ b/executor/webif/__init__.py @@ -315,6 +315,6 @@ def get_autocomplete(self): myItems = _sh.return_items() itemList = [] for item in myItems: - itemList.append("sh."+str(item.id())+"()") + itemList.append("sh."+str(item.property.path)+"()") retValue = {'items':itemList,'plugins':plugin_list} return (json.dumps(retValue)) From 5078592efa7b230812ab1641048efc784c4ad652 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sun, 9 Jul 2023 09:20:22 +0200 Subject: [PATCH 156/775] executor plugin: fix and extend user_doc --- executor/user_doc.rst | 29 ++++++++++++++++++++++++----- 1 file changed, 24 insertions(+), 5 deletions(-) diff --git a/executor/user_doc.rst b/executor/user_doc.rst index 7c749a259..964d35c99 100755 --- a/executor/user_doc.rst +++ b/executor/user_doc.rst @@ -16,7 +16,7 @@ executor Einführung ~~~~~~~~~~ -Das executor plugin kann genutzt werden, um **Python Code** (z.B. für **Logiken**) und **eval Ausdrücke** zu testen. +Das executor Plugin kann genutzt werden, um **Python Code** (z.B. für **Logiken**) und **eval Ausdrücke** zu testen. .. important:: @@ -35,8 +35,8 @@ Damit wird dem Plugin eine relative Pfadangabe unterhalb *var* angegeben wo Skri Webinterface ============ -Im Webinterface findet sich eine Listbox mit den auf dem Rechner gespeicherten Skripten. -Um das Skript in den Editor zu laden entweder ein Skript in der Liste einfach anklicken und auf *aus Datei laden* klicken oder +Im Webinterface findet sich eine Listbox mit den auf dem Rechner gespeicherten Skripten. +Um das Skript in den Editor zu laden, entweder ein Skript in der Liste einfach anklicken und auf *aus Datei laden* klicken oder direkt in der Liste einen Doppelklick auf die gewünschte Datei ausführen. Der Dateiname wird entsprechend der gewählten Datei gesetzt. Mit Klick auf *aktuellen Code speichern* wird der Code im konfigurierten @@ -46,7 +46,7 @@ Mit einem Klick auf *Code ausführen!* oder der Kombination Ctrl+Return wird der Das kann gerade bei Datenbank Abfragen recht lange dauern. Es kann keine Rückmeldung von SmartHomeNG abgefragt werden wie weit der Code derzeit ist. Das Ergebnis wird unten angezeigt. Solange kein Ergebnis vorliegt, steht im Ergebniskasten **... processing ...** -Mit einem Klick auf Datei löschen wird versucht die unter Dateiname angezeigte Datei ohne Rückfrage zu löschen. +Mit einem Klick auf *Datei löschen* wird versucht, die unter Dateiname angezeigte Datei ohne Rückfrage zu löschen. Anschliessend wird die Liste der Skripte aktualisiert. Beispiel Python Code @@ -55,7 +55,9 @@ Beispiel Python Code Sowohl ``logger`` als auch ``print`` funktionieren für die Ausgabe von Ergebnissen. Die Idee ist, dass Logiken mehr oder weniger 1:1 kopiert und getestet werden können. + Loggertest +---------- .. code-block:: python @@ -66,6 +68,7 @@ Loggertest Datenserien für ein Item ausgeben +--------------------------------- Abfragen von Daten aus dem database plugin für ein spezifisches Item: @@ -111,4 +114,20 @@ würde in folgendem Ergebnis münden: ] } -Damit die Nutzung + +Zählen der Datensätze in der Datenbank +-------------------------------------- + +Das folgende Snippet zeigt alle Datenbank-Items an und zählt die Einträge in der Datenbank. Vorsicht: Dies kann sehr lange dauern, wenn Sie eine große Anzahl von Einträgen mit Datenbankattributen haben. + +.. code-block:: python + + from lib.item import Items + items = Items.get_instance() + myfiller = " " + allItems = items.return_items() + for myItem in allItems: + if not hasattr(myItem,'db'): + continue + mycount = myItem.db('countall', 0) + print (myItem.property.name + myfiller[0:len(myfiller)-len(myItem.property.name)]+ ' - Anzahl Datensätze :'+str(mycount)) From 1ba38c930a233a5d991ee140aa4057ed24a5beb3 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sun, 9 Jul 2023 09:20:39 +0200 Subject: [PATCH 157/775] Executor Plugin: bump version --- executor/__init__.py | 3 +-- executor/plugin.yaml | 4 ++-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/executor/__init__.py b/executor/__init__.py index c4f70e5b8..4e02a5cf0 100755 --- a/executor/__init__.py +++ b/executor/__init__.py @@ -40,7 +40,7 @@ class Executor(SmartPlugin): the update functions for the items """ - PLUGIN_VERSION = '1.1.1' + PLUGIN_VERSION = '1.2.0' def __init__(self, sh): """ @@ -142,4 +142,3 @@ def init_webinterface(self): description='') return True - diff --git a/executor/plugin.yaml b/executor/plugin.yaml index 3933b87be..3f26ce297 100755 --- a/executor/plugin.yaml +++ b/executor/plugin.yaml @@ -6,13 +6,13 @@ plugin: de: 'Ausführen von Python Statements im Kontext von SmartHomeNG v1.5 und höher' en: 'Execute Python statements in the context of SmartHomeNG v1.5 and up' maintainer: bmxp - tester: nobody # Who tests this plugin? + tester: onkelandy state: ready # change to ready when done with development keywords: Python eval exec code test documentation: https://www.smarthomeng.de/user/plugins/executor/user_doc.html support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1425152-support-thread-plugin-executor - version: 1.1.1 # Plugin version + version: 1.2.0 # Plugin version sh_minversion: 1.9 # minimum shNG version to use this plugin #sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) py_minversion: 3.8 # minimum Python version to use for this plugin, use f-strings for debug From f97d7929cae26e8e04c9a49cc95502ce069fbb6f Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Mon, 10 Jul 2023 01:13:32 +0200 Subject: [PATCH 158/775] executor plugin: introduce popper.js tooltips for sorting --- executor/webif/templates/index.html | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/executor/webif/templates/index.html b/executor/webif/templates/index.html index 8c9f66292..5a6b4a4d8 100755 --- a/executor/webif/templates/index.html +++ b/executor/webif/templates/index.html @@ -235,12 +235,17 @@ $(document).ready(function(){ - te_python = document.getElementById('pycodetext'); - te_resulttext = document.getElementById('resulttext'); - alpha_asc = document.getElementById('alpha-asc'); - alpha_desc = document.getElementById('alpha-desc'); - time_asc = document.getElementById('time-asc'); - time_desc = document.getElementById('time-desc'); + const te_python = document.getElementById('pycodetext'); + const te_resulttext = document.getElementById('resulttext'); + const alpha_asc = document.getElementById('alpha-asc'); + const alpha_desc = document.getElementById('alpha-desc'); + const time_asc = document.getElementById('time-asc'); + const time_desc = document.getElementById('time-desc'); + try { + const tooltipList = ['Sort by alphabet ascending', 'Sort by alphabet descending', 'Sort by creationtime ascending', 'Sort by creationtime descending']; + createTooltips(tooltipList); + } + catch (e) {} alpha_asc.addEventListener('click', function() { alpha_desc.classList.remove('active'); time_asc.classList.remove('active'); @@ -450,10 +455,10 @@
    {{ _('Plugin') }}     : {% if p.aliv
    - - - -
    + + + +
    From 25b5417a64d7ff9864ed2e8bac2c9cbd7aa69c5a Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Mon, 10 Jul 2023 23:28:26 +0200 Subject: [PATCH 159/775] executor plugin: add two database example scripts --- executor/examples/database_count.py | 9 +++++++++ executor/examples/database_series.py | 9 +++++++++ 2 files changed, 18 insertions(+) create mode 100644 executor/examples/database_count.py create mode 100644 executor/examples/database_series.py diff --git a/executor/examples/database_count.py b/executor/examples/database_count.py new file mode 100644 index 000000000..3d80771a5 --- /dev/null +++ b/executor/examples/database_count.py @@ -0,0 +1,9 @@ +from lib.item import Items +items = Items.get_instance() +myfiller = " " +allItems = items.return_items() +for myItem in allItems: + if not hasattr(myItem,'db'): + continue + mycount = myItem.db('countall', 0) + print (myItem.property.name + myfiller[0:len(myfiller)-len(myItem.property.name)]+ ' - Anzahl Datensätze :'+str(mycount)) diff --git a/executor/examples/database_series.py b/executor/examples/database_series.py new file mode 100644 index 000000000..88a4bcbe3 --- /dev/null +++ b/executor/examples/database_series.py @@ -0,0 +1,9 @@ +import json + +def myconverter(o): +import datetime +if isinstance(o, datetime.datetime): + return o.__str__() +data = sh..series('max','1d','now') +pretty = json.dumps(data, default = myconverter, indent = 2, separators=(',', ': ')) +print(pretty) From f1df7d607e133e4a0be47cb73fe4af153a29ae2f Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Mon, 10 Jul 2023 23:29:51 +0200 Subject: [PATCH 160/775] executor plugin: introduce example scripts that can be loaded in the web interface --- executor/webif/__init__.py | 43 ++++++++++++++++--------- executor/webif/templates/index.html | 49 +++++++++++++++++------------ 2 files changed, 57 insertions(+), 35 deletions(-) diff --git a/executor/webif/__init__.py b/executor/webif/__init__.py index 30b6b70c5..c964179dd 100755 --- a/executor/webif/__init__.py +++ b/executor/webif/__init__.py @@ -233,15 +233,19 @@ def get_code(self, filename=''): """loads and returns the given filename from the defined script path""" self.logger.debug(f"get_code called with {filename=}") try: - if self.plugin.executor_scripts is not None and filename != '': - filepath = os.path.join(self.plugin.executor_scripts,filename) - self.logger.debug(f"{filepath=}") + if (self.plugin.executor_scripts is not None and filename != '') or filename.startswith('examples/'): + if filename.startswith('examples/'): + filepath = os.path.join(self.plugin.get_plugin_dir(),filename) + self.logger.debug(f"Getting file from example path {filepath=}") + else: + filepath = os.path.join(self.plugin.executor_scripts,filename) + self.logger.debug(f"Getting file from script path {filepath=}") code_file = open(filepath) data = code_file.read() code_file.close() return data - except: - self.logger.error(f"{filepath} could not be read") + except Exception as e: + self.logger.error(f"{filepath} could not be read: {e}") return f"### {filename} could not be read ###" @cherrypy.expose @@ -283,19 +287,28 @@ def delete_file(self, filename=''): @cherrypy.expose def get_filelist(self): - """returns all filenames from the defined script path with suffix ``.py``""" - + """returns all filenames from the defined script path with suffix ``.py``, newest first""" + files = [] + files2 = [] + subdir = "{}/examples".format(self.plugin.get_plugin_dir()) + self.logger.debug(f"list files in plugin examples {subdir}") + mtime = lambda f: os.stat(os.path.join(subdir, f)).st_mtime + files = list(reversed(sorted(os.listdir(subdir), key=mtime))) + files = [f for f in files if os.path.isfile(os.path.join(subdir,f))] + files = ["examples/{}".format(f) for f in files if f.endswith(".py")] + #files = '\n'.join(f for f in files) + self.logger.debug(f"Examples Scripts {files}") if self.plugin.executor_scripts is not None: subdir = self.plugin.executor_scripts self.logger.debug(f"list files in {subdir}") - files = os.listdir(subdir) - files = [f for f in files if os.path.isfile(os.path.join(subdir,f))] - files = [f for f in files if f.endswith(".py")] - files = '\n'.join(f for f in files) - self.logger.debug(f"{files=}\n\n") - return files - - return '' + files2 = list(reversed(sorted(os.listdir(subdir), key=mtime))) + files2 = [f for f in files2 if os.path.isfile(os.path.join(subdir,f))] + files2 = [f for f in files2 if f.endswith(".py")] + #files = '\n'.join(f for f in files) + self.logger.debug(f"User scripts {files2}") + + return json.dumps(files + files2) + @cherrypy.expose def get_autocomplete(self): diff --git a/executor/webif/templates/index.html b/executor/webif/templates/index.html index 5a6b4a4d8..fbf7bc3ea 100755 --- a/executor/webif/templates/index.html +++ b/executor/webif/templates/index.html @@ -34,26 +34,31 @@ function get_filelist(order) { console.log("getting list of files with order " + order); $.get('get_filelist', {}, function(data){ - $('#filelist').empty(); - var lines; - if (order == 'alpha-asc') - lines = data.split(/\r\n|\n\r|\n|\r/).sort(); - else if (order == 'alpha-desc') - lines = data.split(/\r\n|\n\r|\n|\r/).sort().reverse(); - else if (order == 'time-desc') - lines = data.split(/\r\n|\n\r|\n|\r/).reverse(); - else - lines = data.split(/\r\n|\n\r|\n|\r/); - for (line in lines) { - $('#filelist').append(new Option(lines[line], lines[line])); - }; - var size; - if (max_script_entries == 0) { size = 0; } - else if (lines.length > max_script_entries) { size = max_script_entries; } - else { size = lines.length; }; - console.log("Size: ",size); - $('#filelist').attr('size',size); - console.log('Data:'+data); + $('#filelist').empty(); + var lines; + data = JSON.parse(data); + if (order == 'alpha-asc') + lines = data.sort(); + else if (order == 'alpha-desc') + lines = data.sort().reverse(); + else if (order == 'time-desc') + lines = data.reverse(); + else + lines = data; + for (line in lines) { + $('#filelist').append(new Option(lines[line], lines[line])); + }; + $('#filelist > option').each(function(){ + if ($(this).attr('value').indexOf('examples/') == 0) + $(this).addClass('example_file'); + }); + var size; + if (max_script_entries == 0) { size = 0; } + else if (lines.length > max_script_entries) { size = max_script_entries; } + else { size = lines.length; }; + console.log("Size: " + size); + console.log('Data: ' + data); + $('#filelist').attr('size',size); }); }; @@ -416,6 +421,10 @@ border: none; filter: invert(8%) sepia(100%) saturate(6481%) hue-rotate(246deg) brightness(102%) contrast(143%); } +.example_file { + font-style: italic; + color: grey; +} {% endblock pluginstyles %} {% block content -%} From 81326c1f5ae2f392c24625e4adad4acd4f64c1e5 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Mon, 10 Jul 2023 23:36:53 +0200 Subject: [PATCH 161/775] executor plugin: change order of file list --- executor/webif/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/executor/webif/__init__.py b/executor/webif/__init__.py index c964179dd..26a6f9e96 100755 --- a/executor/webif/__init__.py +++ b/executor/webif/__init__.py @@ -307,7 +307,7 @@ def get_filelist(self): #files = '\n'.join(f for f in files) self.logger.debug(f"User scripts {files2}") - return json.dumps(files + files2) + return json.dumps(files2 + files) @cherrypy.expose From dbeb6440822fc77253319e634d3af31ec642eabb Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Mon, 10 Jul 2023 23:38:01 +0200 Subject: [PATCH 162/775] executor plugin: update user_doc --- executor/user_doc.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/executor/user_doc.rst b/executor/user_doc.rst index 964d35c99..53238c335 100755 --- a/executor/user_doc.rst +++ b/executor/user_doc.rst @@ -16,7 +16,7 @@ executor Einführung ~~~~~~~~~~ -Das executor Plugin kann genutzt werden, um **Python Code** (z.B. für **Logiken**) und **eval Ausdrücke** zu testen. +Das executor Plugin kann genutzt werden, um **Python Code** (z.B. für **Logiken**) zu testen. .. important:: From 24b3c985a285db7939425113abc9bd3b801fa602 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Mon, 10 Jul 2023 23:41:18 +0200 Subject: [PATCH 163/775] executor plugin: add new log levels such as notice, dbglow, etc. --- executor/webif/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/executor/webif/__init__.py b/executor/webif/__init__.py index 26a6f9e96..512d899c9 100755 --- a/executor/webif/__init__.py +++ b/executor/webif/__init__.py @@ -201,7 +201,7 @@ def exec_code(self, eline, reload=None): :return: result of the evaluation """ result = "" - stub_logger = Stub(warning=print, info=print, debug=print, error=print) + stub_logger = Stub(warning=print, info=print, debug=print, error=print, criticl=print, notice=print, dbghigh=print, dbgmed=print, dbglow=print) g = {} l = { 'sh': self.plugin.get_sh(), From 231f5ac3489da366058218d6ee12454de7f956a6 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Tue, 11 Jul 2023 10:18:57 +0200 Subject: [PATCH 164/775] executor plugin: improve button enable/disable --- executor/webif/templates/index.html | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/executor/webif/templates/index.html b/executor/webif/templates/index.html index fbf7bc3ea..371ca5c99 100755 --- a/executor/webif/templates/index.html +++ b/executor/webif/templates/index.html @@ -65,6 +65,14 @@ function selectFile(selectObject) { var value = selectObject.value; $('#savefilename').val(value); + const del_button = document.querySelector('#deletefile'); + const load_button = document.querySelector('#loadfilename'); + load_button.removeAttribute("disabled"); + if (value.startsWith("examples/")) + del_button.setAttribute("disabled", "disabled"); + else + del_button.removeAttribute("disabled"); + console.log("selected ",value); }; @@ -240,6 +248,10 @@ $(document).ready(function(){ + const load_button = document.querySelector('#loadfilename'); + const del_button = document.querySelector('#deletefile'); + load_button.setAttribute("disabled", "disabled"); + del_button.setAttribute("disabled", "disabled"); const te_python = document.getElementById('pycodetext'); const te_resulttext = document.getElementById('resulttext'); const alpha_asc = document.getElementById('alpha-asc'); From 168b0f7f4b19afc491e7a893117867f87a6d2856 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Tue, 11 Jul 2023 21:26:03 +0200 Subject: [PATCH 165/775] DB_ADDON: - diverse fixes - improve determination of serie_zaehlerstand - improve determination of serie_verbrauch - fix in user_doc.rst - bump to 1.2.1 --- db_addon/__init__.py | 407 ++++++++++++++--------------- db_addon/item_attributes_master.py | 2 +- db_addon/plugin.yaml | 4 +- db_addon/user_doc.rst | 8 +- 4 files changed, 200 insertions(+), 221 deletions(-) diff --git a/db_addon/__init__.py b/db_addon/__init__.py index 23320e1b3..8eb7ee15b 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -54,7 +54,7 @@ class DatabaseAddOn(SmartPlugin): Main class of the Plugin. Does all plugin specific stuff and provides the update functions for the items """ - PLUGIN_VERSION = '1.2.0' + PLUGIN_VERSION = '1.2.1' def __init__(self, sh): """ @@ -194,8 +194,9 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: # handle functions 'minmax on-change' in format 'minmax_timeframe_func' items like 'minmax_heute_max', 'minmax_heute_min', 'minmax_woche_max', 'minmax_woche_min' timeframe = convert_timeframe(db_addon_fct_vars[1]) func = db_addon_fct_vars[2] if db_addon_fct_vars[2] in ALLOWED_MINMAX_FUNCS else None + start = end = 0 log_text = 'minmax_timeframe_func' - required_params = [func, timeframe] + required_params = [func, timeframe, start, end] elif db_addon_fct in HISTORIE_ATTRIBUTES_LAST: # handle functions 'minmax_last' in format 'minmax_last_timedelta|timeframe_function' like 'minmax_last_24h_max' @@ -213,23 +214,23 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: start = to_int(db_addon_fct_vars[2][-1]) # 1, 2, 3, ... end = start log_text = 'minmax_timeframe_timedelta_func' - required_params = [func, timeframe, start] + required_params = [func, timeframe, start, end] elif db_addon_fct in ZAEHLERSTAND_ATTRIBUTES_TIMEFRAME: # handle functions 'zaehlerstand' in format 'zaehlerstand_timeframe_timedelta' like 'zaehlerstand_heute_minus1' - func = 'max' + # func = 'max' timeframe = convert_timeframe(db_addon_fct_vars[1]) start = to_int(db_addon_fct_vars[2][-1]) end = start log_text = 'zaehlerstand_timeframe_timedelta' - required_params = [timeframe, start] + required_params = [timeframe, start, end] elif db_addon_fct in VERBRAUCH_ATTRIBUTES_ONCHANGE: # handle functions 'verbrauch on-change' items in format 'verbrauch_timeframe' like 'verbrauch_heute', 'verbrauch_woche', 'verbrauch_monat', 'verbrauch_jahr' timeframe = convert_timeframe(db_addon_fct_vars[1]) - func = 'max' + start = end = 0 log_text = 'verbrauch_timeframe' - required_params = [timeframe] + required_params = [timeframe, start, end] elif db_addon_fct in VERBRAUCH_ATTRIBUTES_TIMEFRAME: # handle functions 'verbrauch on-demand' in format 'verbrauch_timeframe_timedelta' like 'verbrauch_heute_minus2' @@ -262,8 +263,9 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: # handle functions 'tagesmitteltemperatur on-change' items in format 'tagesmitteltemperatur_timeframe' like 'tagesmitteltemperatur_heute', 'tagesmitteltemperatur_woche', 'tagesmitteltemperatur_monat', 'tagesmitteltemperatur_jahr' timeframe = convert_timeframe(db_addon_fct_vars[1]) func = 'max' + start = end = 0 log_text = 'tagesmitteltemperatur_timeframe' - required_params = [timeframe] + required_params = [timeframe, start, end] elif db_addon_fct in TAGESMITTEL_ATTRIBUTES_TIMEFRAME: # handle 'tagesmitteltemperatur_timeframe_timedelta' like 'tagesmitteltemperatur_heute_minus1' @@ -282,7 +284,7 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: end = 0 group = convert_timeframe(db_addon_fct_vars[4][len(db_addon_fct_vars[4]) - 1]) log_text = 'serie_minmax_timeframe_func_start|group' - required_params = [func, timeframe, start, group] + required_params = [func, timeframe, start, end, group] elif db_addon_fct in SERIE_ATTRIBUTES_ZAEHLERSTAND: # handle functions 'serie_zaehlerstand' in format 'serie_zaehlerstand_timeframe_start|group' like 'serie_zaehlerstand_tag_30d' @@ -311,7 +313,7 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: group = 'day', group2 = 'month' log_text = 'serie_xxsumme_timeframe_count|group' - required_params = [start] + required_params = [func, timeframe, start, end, group, group2] elif db_addon_fct in SERIE_ATTRIBUTES_MITTEL_D: # handle 'serie_tagesmittelwert_count|group' like 'serie_tagesmittelwert_0d' => Tagesmittelwert der letzten 0 Tage (also heute) @@ -737,15 +739,7 @@ def execute_startup_items(self) -> None: self.logger.info(f"{len(relevant_item_list)} items will be calculated at startup.") for item in relevant_item_list: - item_config = self.get_item_config(item) - db_addon_fct = item_config['db_addon_fct'] - - # handle on-change items - if db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: - self.item_queue.put((item_config['database_item'], None)) - # handle on-demand items - else: - self.item_queue.put(item) + self.item_queue.put(item) self.startup_finished = True @@ -832,12 +826,6 @@ def handle_ondemand(self, item: Item) -> None: if self.ondemand_debug: self.logger.debug(f"{db_addon_fct=} will _query_item with {params=}.") - # handle all on_change functions - if db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: - if self.ondemand_debug: - self.logger.debug(f"on-change function detected; will be calculated by next change of database item") - return - # handle item starting with 'verbrauch_' if db_addon_fct in ALL_VERBRAUCH_ATTRIBUTES: result = self._handle_verbrauch(params) @@ -849,6 +837,14 @@ def handle_ondemand(self, item: Item) -> None: elif db_addon_fct in ALL_ZAEHLERSTAND_ATTRIBUTES: result = self._handle_zaehlerstand(params) + # handle 'serie_zaehlerstand' + elif db_addon_fct in SERIE_ATTRIBUTES_ZAEHLERSTAND: + result = self._handle_zaehlerstand_serie(params) + + # handle 'serie_verbrauch' + elif db_addon_fct in SERIE_ATTRIBUTES_VERBRAUCH: + result = self._handle_verbrauch_serie(params) + # handle 'serie_tagesmittelwert_stunde_30_0d' and 'serie_tagesmittelwert_tag_stunde_30d' elif db_addon_fct in SERIE_ATTRIBUTES_MITTEL_H1 + SERIE_ATTRIBUTES_MITTEL_D_H: result = self._prepare_temperature_list(**params) @@ -898,7 +894,7 @@ def handle_ondemand(self, item: Item) -> None: item_config.update({'value': result}) item(result, self.get_shortname()) - def handle_onchange(self, updated_item: Item, value: float = None) -> None: + def handle_onchange(self, updated_item: Item, value: float) -> None: """ Get item and item value for which an update has been detected, fill cache dicts and set item value. @@ -924,20 +920,17 @@ def handle_minmax(): self.logger.debug(f"Item={updated_item.path()} with {func=} and {timeframe=} not in cache dict. Query database.") query_params = {'func': func, 'database_item': database_item, 'timeframe': timeframe, 'start': 0, 'end': 0, 'ignore_value_list': ignore_value_list, 'use_oldest_entry': True} - db_value = self._query_item(**query_params)[0][1] - - if db_value is not None: # Wenn Werte aus DB vorliegt, nutze den - cached_value = db_value - init = True - elif value is not None: # Wenn kein Wert aus DB vorliegt, aber ein aktueller Wert, nutze den (ggf. bei ganz neuen Items, bei denen noch kein Eintrag in der DB ist) - cached_value = value - else: # Wenn gar kein Wert verfügbar ist, Abbruch + cached_value = self._query_item(**query_params)[0][1] + + if cached_value is None: if self.onchange_debug: - self.logger.debug(f"no values available:{db_value=}, {value}. Abort...") + self.logger.debug(f"no values available:{cached_value=}, {value}. Abort...") return - # if value not given -> read at startup - if value is None or init: + init = True + + # if value not given -> read + if init: if self.onchange_debug: self.logger.debug(f"initial {func} value for {timeframe=} of Item={item.path()} with will be set to {cached_value}") cache_dict[database_item][func] = cached_value @@ -964,7 +957,6 @@ def handle_minmax(): def handle_verbrauch(): cache_dict = self.previous_values[timeframe] - _value = value if self.onchange_debug: self.logger.debug(f"'verbrauch' item {updated_item.path()} with {func=} and {value=} detected. Check for update of cache_dicts {cache_dict=} and item value.") @@ -973,40 +965,22 @@ def handle_verbrauch(): cached_value = cache_dict.get(database_item) if cached_value is None: if self.onchange_debug: - self.logger.debug(f"Item={updated_item.path()} with _func={func} and timeframe={timeframe} not in cache dict. recent value={cached_value}.") + self.logger.debug(f"Item={updated_item.path()} with _func={func} and timeframe={timeframe} not in cache dict.") - # try to get max value of last timeframe, assuming that this is the value at end of timeframe - query_params = {'func': 'max', 'database_item': database_item, 'timeframe': timeframe, 'start': 1, 'end': 1, 'ignore_value_list': ignore_value_list, 'use_oldest_entry': True} - db_value = self._query_item(**query_params)[0][1] + # try to get most recent value of last timeframe, assuming that this is the value at end of last timeframe + query_params = {'database_item': database_item, 'timeframe': timeframe, 'start': 1, 'end': 1, 'ignore_value_list': ignore_value_list, 'use_oldest_entry': True} + cached_value = self._handle_zaehlerstand(query_params) - if db_value is None: - self.logger.info(f"Value max value for last {timeframe} available from database. Try to get min value of current {timeframe}.") - - # try to get min value of current timeframe, assuming that this is the value at end of timeframe - query_params = {'func': 'min', 'database_item': database_item, 'timeframe': timeframe, 'start': 0, 'end': 0, 'ignore_value_list': ignore_value_list, 'use_oldest_entry': True} - db_value = self._query_item(**query_params)[0][1] - - if db_value is None: - self.logger.info(f"min value for current {timeframe} not available from database. Abort calculation.") + if cached_value is None: + self.logger.info(f"Most recent value for last {timeframe} not available in database. Abort calculation.") return - cache_dict[database_item] = db_value - cached_value = db_value + cache_dict[database_item] = cached_value if self.onchange_debug: - self.logger.debug(f"Value for Item={updated_item.path()} at end of last {timeframe} not in cache dict. Value={cached_value} has been added.") - - # get last value from db, if now updated value is given (init) - if _value is None: - # try to get max value of current timeframe - query_params = {'func': 'max', 'database_item': database_item, 'timeframe': timeframe, 'start': 0, 'end': 0, 'ignore_value_list': ignore_value_list, 'use_oldest_entry': True} - _value = self._query_item(**query_params)[0][1] - - if _value is None: - self.logger.info(f"max value for current {timeframe} not available from database. Abort calculation.") - return + self.logger.debug(f"Value for Item={updated_item.path()} at end of last {timeframe} not in cache dict. Value={db_value} has been added.") # calculate value, set item value, put data into plugin_item_dict - _new_value = _value - cached_value + _new_value = value - cached_value return _new_value if isinstance(_new_value, int) else round(_new_value, 1) def handle_tagesmitteltemp(): @@ -1015,7 +989,7 @@ def handle_tagesmitteltemp(): return if self.onchange_debug: - self.logger.debug(f"handle_onchange called with updated_item={updated_item.path()} and value={value}.") + self.logger.debug(f"called with updated_item={updated_item.path()} and value={value}.") relevant_item_list = set(self.get_item_list('database_item', updated_item)) & set(self.get_item_list('cycle', 'on-change')) @@ -1028,7 +1002,7 @@ def handle_tagesmitteltemp(): db_addon_fct = item_config['db_addon_fct'] database_item = item_config['database_item'] timeframe = item_config['query_params']['timeframe'] - func = item_config['query_params']['func'] + func = item_config['query_params'].get('func') ignore_value_list = item_config['query_params'].get('ignore_value_list') new_value = None @@ -1352,7 +1326,7 @@ def _handle_verbrauch(self, query_params: dict) -> Union[None, float]: return value_end # get value for start and check it; - query_params.update({'func': 'last', 'start': end+1, 'end': end+1}) + query_params.update({'func': 'last', 'start': start, 'end': start}) value_start = self._query_item(**query_params)[0][1] if self.prepare_debug: self.logger.debug(f"{value_start=}") @@ -1362,12 +1336,12 @@ def _handle_verbrauch(self, query_params: dict) -> Union[None, float]: self.logger.debug(f"Error occurred during query. Return.") return - if value_start == 0: - self.logger.info(f"No DB Entry found for requested start date. Looking for next DB entry.") - query_params.update({'func': 'next', 'start': start+1}) + if not value_start: + self.logger.info(f"No DB Entry found for requested start date. Looking for next recent DB entry.") + query_params.update({'func': 'next'}) value_start = self._query_item(**query_params)[0][1] if self.prepare_debug: - self.logger.debug(f"next available value is {value_start=}") + self.logger.debug(f"next recent value is {value_start=}") if not value_start: value_start = 0 @@ -1387,7 +1361,22 @@ def _handle_verbrauch(self, query_params: dict) -> Union[None, float]: return consumption - def _handle_zaehlerstand(self, query_params: dict) -> Union[float, None]: + def _handle_verbrauch_serie(self, query_params: dict) -> list: + """Ermittlung einer Serie von Verbräuchen in einem Zeitraumes für x Zeiträume""" + + series = [] + database_item = query_params['database_item'] + timeframe = query_params['timeframe'] + start = query_params['start'] + + for i in range(1, start): + value = self._handle_verbrauch({'database_item': database_item, 'timeframe': timeframe, 'start': i + 1, 'end': i}) + ts_start, ts_end = get_start_end_as_timestamp(timeframe, i, i + 1) + series.append([ts_end, value]) + + return series + + def _handle_zaehlerstand(self, query_params: dict) -> Union[float, int, None]: """ Ermittlung des Zählerstandes zum Ende eines Zeitraumes @@ -1403,17 +1392,24 @@ def _handle_zaehlerstand(self, query_params: dict) -> Union[float, None]: if self.prepare_debug: self.logger.debug(f"called with {query_params=}") - start = query_params['start'] - end = query_params['end'] - # get last value of timeframe - query_params.update({'func': 'last', 'start': start, 'end': end}) + query_params.update({'func': 'last'}) last_value = self._query_item(**query_params)[0][1] + if self.prepare_debug: + self.logger.debug(f"{last_value=}") + + if last_value is None: + if self.prepare_debug: + self.logger.debug(f"Error occurred during query. Return.") + return - if last_value == 0: + if not last_value: # get last value (next) before timeframe + self.logger.info(f"No DB Entry found for requested start date. Looking for next recent DB entry.") query_params.update({'func': 'next'}) last_value = self._query_item(**query_params)[0][1] + if self.prepare_debug: + self.logger.debug(f"next recent value is {last_value=}") if isinstance(last_value, float): if last_value.is_integer(): @@ -1423,6 +1419,21 @@ def _handle_zaehlerstand(self, query_params: dict) -> Union[float, None]: return last_value + def _handle_zaehlerstand_serie(self, query_params: dict) -> list: + """Ermittlung einer Serie von Zählerständen zum Ende eines Zeitraumes für x Zeiträume""" + + series = [] + database_item = query_params['database_item'] + timeframe = query_params['timeframe'] + start = query_params['start'] + + for i in range(1, start): + value = self._handle_zaehlerstand({'database_item': database_item, 'timeframe': timeframe, 'start': i, 'end': i}) + ts_start = get_start_end_as_timestamp(timeframe, i, i)[0] + series.append([ts_start, value]) + + return series + def _handle_kaeltesumme(self, database_item: Item, year: Union[int, str] = None, month: Union[int, str] = None) -> Union[int, None]: """ Query database for kaeltesumme for given year or year/month @@ -1478,7 +1489,7 @@ def _handle_kaeltesumme(self, database_item: Item, year: Union[int, str] = None, # get raw data as list if self.prepare_debug: - self.logger.debug("Try to get raw data") + self.logger.debug("try to get raw data") raw_data = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method='raw') if self.execute_debug: self.logger.debug(f"raw_value_list={raw_data=}") @@ -2129,70 +2140,38 @@ def _query_item(self, func: str, database_item: Item, timeframe: str, start: int :param ignore_value_list: list of comparison operators for val_num, which will be applied during query :param use_oldest_entry: if start is prior to oldest entry, oldest entry will be used - :return: query response / list for value pairs [[None, None]] for errors, [[0,0]] for + :return: query response / list for value pairs [[None, None]] for errors, [[0,0]] for no-data in DB """ - def _handle_query_result(query_result) -> list: - """ - Handle query result containing list - """ - - # if query delivers None, abort - if query_result is None: - # if query delivers None, abort - self.logger.error(f"Error occurred during _query_item. Aborting...") - _result = [[None, None]] - elif len(query_result) == 0: - _result = [[0, 0]] - self.logger.info(f" No values for item in requested timeframe in database found.") - else: - _result = [] - for element in query_result: - timestamp = element[0] - value = element[1] - if timestamp and value is not None: - _result.append([timestamp, round(value, 1)]) - if not _result: - _result = [[None, None]] - - return _result - if self.prepare_debug: self.logger.debug(f"called with {func=}, item={database_item.path()}, {timeframe=}, {start=}, {end=}, {group=}, {group2=}, {ignore_value_list=}") # set default result - result = [[None, None]] + default_result = [[None, None]] # check correctness of timeframe if timeframe not in ALLOWED_QUERY_TIMEFRAMES: self.logger.error(f"Requested {timeframe=} for item={database_item.path()} not defined; Need to be 'year' or 'month' or 'week' or 'day' or 'hour''. Query cancelled.") - return result + return default_result - # check start / end for being int - if isinstance(start, str) and start.isdigit(): - start = int(start) - if isinstance(end, str) and end.isdigit(): - end = int(end) - if not isinstance(start, int) and not isinstance(end, int): - return result + # define start and end of query as timestamp in microseconds + ts_start, ts_end = get_start_end_as_timestamp(timeframe, start, end) + oldest_log = int(self._get_oldest_log(database_item)) - # check correctness of start / end - if start < end: - self.logger.warning(f"Requested {start=} for item={database_item.path()} is not valid since {start=} < {end=}. Query cancelled.") - return result + # check correctness of ts_start / ts_end + if ts_start is None: + ts_start = oldest_log + if ts_end is None or ts_start > ts_end: + if self.prepare_debug: + self.logger.debug(f"{ts_start=}, {ts_end=}") + self.logger.warning(f"Requested {start=} for item={database_item.path()} is not valid since {start=} < {end=} or end not given. Query cancelled.") + return default_result # define item_id item_id = self._get_itemid(database_item) if not item_id: self.logger.error(f"ItemId for item={database_item.path()} not found. Query cancelled.") - return result - - # define start and end of query as timestamp in microseconds - ts_start, ts_end = get_start_end_as_timestamp(timeframe, start, end) - oldest_log = int(self._get_oldest_log(database_item)) - - if start is None: - ts_start = oldest_log + return default_result if self.prepare_debug: self.logger.debug(f"Requested {timeframe=} with {start=} and {end=} resulted in start being timestamp={ts_start} / {timestamp_to_timestring(ts_start)} and end being timestamp={ts_end} / {timestamp_to_timestring(ts_end)}") @@ -2200,7 +2179,7 @@ def _handle_query_result(query_result) -> list: # check if values for end time and start time are in database if ts_end < oldest_log: # (Abfrage abbrechen, wenn Endzeitpunkt in UNIX-timestamp der Abfrage kleiner (und damit jünger) ist, als der UNIX-timestamp des ältesten Eintrages) self.logger.info(f"Requested end time timestamp={ts_end} / {timestamp_to_timestring(ts_end)} of query for Item='{database_item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Query cancelled.") - return result + return default_result if ts_start < oldest_log: if self.use_oldest_entry or use_oldest_entry: @@ -2208,14 +2187,36 @@ def _handle_query_result(query_result) -> list: ts_start = oldest_log else: self.logger.info(f"Requested start time timestamp={ts_start} / {timestamp_to_timestring(ts_start)} of query for Item='{database_item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Query cancelled.") - return result + return default_result + # prepare and do query query_params = {'func': func, 'item_id': item_id, 'ts_start': ts_start, 'ts_end': ts_end, 'group': group, 'group2': group2, 'ignore_value_list': ignore_value_list} - result = _handle_query_result(self._query_log_timestamp(**query_params)) + query_result = self._query_log_timestamp(**query_params) + + # post process query_result + if query_result is None: + self.logger.error(f"Error occurred during _query_item. Aborting...") + return default_result + + if len(query_result) == 0: + self.logger.info(f"No values for item in requested timeframe in database found.") + return [[0, 0]] + + result = [] + for element in query_result: + timestamp, value = element + if timestamp and value is not None: + if isinstance(value, float): + value = round(value, 1) + result.append([timestamp, value]) if self.prepare_debug: self.logger.debug(f"value for item={database_item.path()} with {query_params=}: {result}") + if not result: + self.logger.info(f"No values for item in requested timeframe in database found.") + return default_result + return result def _init_cache_dicts(self) -> None: @@ -2643,7 +2644,7 @@ def timestamp_to_timestring(timestamp: int) -> str: def convert_timeframe(timeframe: str) -> str: """Convert timeframe""" - convertion = { + lookup = { 'tag': 'day', 'heute': 'day', 'woche': 'week', @@ -2658,7 +2659,7 @@ def convert_timeframe(timeframe: str) -> str: 'y': 'year' } - return convertion.get(timeframe) + return lookup.get(timeframe) def convert_duration(timeframe: str, window_dur: str) -> int: @@ -2672,7 +2673,7 @@ def convert_duration(timeframe: str, window_dur: str) -> int: _w_in_m = _w_in_y / _m_in_y _d_in_m = _d_in_y / _m_in_y - conversion = { + lookup = { 'hour': {'hour': 1, 'day': _h_in_d, 'week': _h_in_d * _d_in_w, @@ -2705,7 +2706,7 @@ def convert_duration(timeframe: str, window_dur: str) -> int: } } - return round(int(conversion[timeframe][window_dur]), 0) + return to_int(lookup[timeframe][window_dur]) def count_to_start(count: int = 0, end: int = 0): @@ -2714,7 +2715,7 @@ def count_to_start(count: int = 0, end: int = 0): return end + count, end -def get_start_end_as_timestamp(timeframe: str, start: int, end: int) -> tuple: +def get_start_end_as_timestamp(timeframe: str, start: Union[int, str, None], end: Union[int, str, None]) -> tuple: """ Provides start and end as timestamp in microseconds from timeframe with start and end @@ -2726,104 +2727,84 @@ def get_start_end_as_timestamp(timeframe: str, start: int, end: int) -> tuple: """ - return datetime_to_timestamp(get_start(timeframe, start)) * 1000, datetime_to_timestamp(get_end(timeframe, end)) * 1000 - - -def get_start(timeframe: str, start: int) -> datetime: - """ - Provides start as datetime - - :param timeframe: timeframe as week, month, year - :param start: beginning timeframe in x timeframes from now - - """ - - if start is None: - start = 0 - - if timeframe == 'week': - _dt_start = week_beginning(start) - elif timeframe == 'month': - _dt_start = month_beginning(start) - elif timeframe == 'year': - _dt_start = year_beginning(start) - else: - _dt_start = day_beginning(start) - - return _dt_start - - -def get_end(timeframe: str, end: int) -> datetime: - """ - Provides end as datetime - - :param timeframe: timeframe as week, month, year - :param end: end of timeframe in x timeframes from now - - """ - - if timeframe == 'week': - _dt_end = week_end(end) - elif timeframe == 'month': - _dt_end = month_end(end) - elif timeframe == 'year': - _dt_end = year_end(end) - else: - _dt_end = day_end(end) - - return _dt_end - - -def year_beginning(delta: int = 0) -> datetime: - """provides datetime of beginning of year of today minus x years""" - - _dt = datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) - return _dt.replace(month=1, day=1) - relativedelta(years=delta) - - -def year_end(delta: int = 0) -> datetime: - """provides datetime of end of year of today minus x years""" - - return year_beginning(delta) + relativedelta(years=1) + def get_start() -> datetime: + if timeframe == 'week': + return _week_beginning() + elif timeframe == 'month': + return _month_beginning() + elif timeframe == 'year': + return _year_beginning() + else: + return _day_beginning() + + def get_end() -> datetime: + if timeframe == 'week': + return _week_end() + elif timeframe == 'month': + return _month_end() + elif timeframe == 'year': + return _year_end() + else: + return _day_end() + def _year_beginning(delta: int = start) -> datetime: + """provides datetime of beginning of year of today minus x years""" -def month_beginning(delta: int = 0) -> datetime: - """provides datetime of beginning of month minus x month""" + _dt = datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) + return _dt.replace(month=1, day=1) - relativedelta(years=delta) - _dt = datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) - return _dt.replace(day=1) - relativedelta(months=delta) + def _year_end(delta: int = end) -> datetime: + """provides datetime of end of year of today minus x years""" + return year_beginning(delta) + relativedelta(years=1) -def month_end(delta: int = 0) -> datetime: - """provides datetime of end of month minus x month""" + def _month_beginning(delta: int = start) -> datetime: + """provides datetime of beginning of month minus x month""" - return month_beginning(delta) + relativedelta(months=1) + _dt = datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) + return _dt.replace(day=1) - relativedelta(months=delta) + def _month_end(delta: int = end) -> datetime: + """provides datetime of end of month minus x month""" -def week_beginning(delta: int = 0) -> datetime: - """provides datetime of beginning of week minus x weeks""" + return month_beginning(delta) + relativedelta(months=1) - _dt = datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) - return _dt - relativedelta(days=(datetime.date.today().weekday() + (delta * 7))) + def _week_beginning(delta: int = start) -> datetime: + """provides datetime of beginning of week minus x weeks""" + _dt = datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) + return _dt - relativedelta(days=(datetime.date.today().weekday() + (delta * 7))) -def week_end(delta: int = 0) -> datetime: - """provides datetime of end of week minus x weeks""" + def _week_end(delta: int = end) -> datetime: + """provides datetime of end of week minus x weeks""" - return week_beginning(delta) + relativedelta(days=7) + return week_beginning(delta) + relativedelta(days=7) + def _day_beginning(delta: int = start) -> datetime: + """provides datetime of beginning of today minus x days""" -def day_beginning(delta: int = 0) -> datetime: - """provides datetime of beginning of today minus x days""" + return datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) - relativedelta(days=delta) - return datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) - relativedelta(days=delta) + def _day_end(delta: int = end) -> datetime: + """provides datetime of end of today minus x days""" + return day_beginning(delta) + relativedelta(days=1) -def day_end(delta: int = 0) -> datetime: - """provides datetime of end of today minus x days""" + if isinstance(start, str) and start.isdigit(): + start = int(start) + if isinstance(start, int): + ts_start = datetime_to_timestamp(get_start()) * 1000 + else: + ts_start = None - return day_beginning(delta) + relativedelta(days=1) + if isinstance(end, str) and end.isdigit(): + end = int(end) + if isinstance(end, int): + ts_end = datetime_to_timestamp(get_end()) * 1000 + else: + ts_end = None + return ts_start, ts_end def datetime_to_timestamp(dt: datetime) -> int: """Provides timestamp from given datetime""" diff --git a/db_addon/item_attributes_master.py b/db_addon/item_attributes_master.py index 4c08b54ec..2be743883 100644 --- a/db_addon/item_attributes_master.py +++ b/db_addon/item_attributes_master.py @@ -31,7 +31,7 @@ 'verbrauch_woche': {'cat': 'verbrauch', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch in der aktuellen Woche'}, 'verbrauch_monat': {'cat': 'verbrauch', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch im aktuellen Monat'}, 'verbrauch_jahr': {'cat': 'verbrauch', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch im aktuellen Jahr'}, - 'verbrauch_heute_minus1': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch gestern (heute -1 Tag) (Differenz zwischen Wert am Ende des gestrigen Tages und dem Wert am Ende des Tages danach)'}, + 'verbrauch_heute_minus1': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch gestern (heute -1 Tag) (Differenz zwischen Wert am Ende des gestrigen Tages und dem Wert am Ende des Tages davor)'}, 'verbrauch_heute_minus2': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch vorgestern (heute -2 Tage)'}, 'verbrauch_heute_minus3': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -3 Tage'}, 'verbrauch_heute_minus4': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -4 Tage'}, diff --git a/db_addon/plugin.yaml b/db_addon/plugin.yaml index a8b103016..8483f8909 100644 --- a/db_addon/plugin.yaml +++ b/db_addon/plugin.yaml @@ -11,7 +11,7 @@ plugin: # keywords: iot xyz # documentation: https://github.com/smarthomeNG/smarthome/wiki/CLI-Plugin # url of documentation (wiki) page support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1848494-support-thread-databaseaddon-plugin - version: 1.2.0 # Plugin version (must match the version specified in __init__.py) + version: 1.2.1 # Plugin version (must match the version specified in __init__.py) sh_minversion: 1.9.3.5 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) py_minversion: 3.8 # minimum Python version to use for this plugin @@ -192,7 +192,7 @@ item_attributes: - Verbrauch in der aktuellen Woche - Verbrauch im aktuellen Monat - Verbrauch im aktuellen Jahr - - Verbrauch gestern (heute -1 Tag) (Differenz zwischen Wert am Ende des gestrigen Tages und dem Wert am Ende des Tages danach) + - Verbrauch gestern (heute -1 Tag) (Differenz zwischen Wert am Ende des gestrigen Tages und dem Wert am Ende des Tages davor) - Verbrauch vorgestern (heute -2 Tage) - Verbrauch heute -3 Tage - Verbrauch heute -4 Tage diff --git a/db_addon/user_doc.rst b/db_addon/user_doc.rst index d6ed6a49b..6f7978e1d 100644 --- a/db_addon/user_doc.rst +++ b/db_addon/user_doc.rst @@ -55,7 +55,6 @@ Bsp: db_addon_fct: heute_minus1_avg db_addon_database_item: 'temperatur' -| Anforderungen ============= @@ -68,7 +67,7 @@ Die Konfiguration des DatabaseAddon-Plugin erfolgt automatisch bei Start. Hinweis: Das Plugin selbst ist aktuell nicht multi-instance fähig. Das bedeutet, dass das Plugin aktuell nur eine Instanz des Database-Plugin abgebunden werden kann. -| + Konfiguration ============= @@ -93,7 +92,7 @@ Dazu folgenden Block am Ende der Datei */etc/mysql/my.cnf* einfügen bzw den exi wait_timeout = 28800 interactive_timeout = 28800 -| + Hinweise ======== @@ -139,8 +138,8 @@ Verbrauch Soll bspw. der Verbrauch von Wasser ausgewertet werden, so ist dies wie folgt möglich: -.. code-block:: yaml +.. code-block:: yaml wasserzaehler: zaehlerstand: type: num @@ -162,7 +161,6 @@ minmax Soll bspw. die minimalen und maximalen Temperaturen ausgewertet werden, kann dies so umgesetzt werden: .. code-block:: yaml - temperature: aussen: nord: From e5bd5575b59bf21d7283856b66c75bba6dc19bc4 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Tue, 11 Jul 2023 21:53:58 +0200 Subject: [PATCH 166/775] DB_ADDON: - diverse fixes - improve determination of serie_zaehlerstand - improve determination of serie_verbrauch - fix in user_doc.rst - bump to 1.2.1 --- db_addon/__init__.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/db_addon/__init__.py b/db_addon/__init__.py index 8eb7ee15b..4e661c5e1 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -977,7 +977,7 @@ def handle_verbrauch(): cache_dict[database_item] = cached_value if self.onchange_debug: - self.logger.debug(f"Value for Item={updated_item.path()} at end of last {timeframe} not in cache dict. Value={db_value} has been added.") + self.logger.debug(f"Value for Item={updated_item.path()} at end of last {timeframe} not in cache dict. Value={cached_value} has been added.") # calculate value, set item value, put data into plugin_item_dict _new_value = value - cached_value @@ -2146,6 +2146,8 @@ def _query_item(self, func: str, database_item: Item, timeframe: str, start: int if self.prepare_debug: self.logger.debug(f"called with {func=}, item={database_item.path()}, {timeframe=}, {start=}, {end=}, {group=}, {group2=}, {ignore_value_list=}") + # called with func='min', item=env.host_rpi.temperature, timeframe='day', start=0, end=0, group=None, group2=None, ignore_value_list=['!= 0'] + # set default result default_result = [[None, None]] @@ -2756,7 +2758,7 @@ def _year_beginning(delta: int = start) -> datetime: def _year_end(delta: int = end) -> datetime: """provides datetime of end of year of today minus x years""" - return year_beginning(delta) + relativedelta(years=1) + return _year_beginning(delta) + relativedelta(years=1) def _month_beginning(delta: int = start) -> datetime: """provides datetime of beginning of month minus x month""" @@ -2767,7 +2769,7 @@ def _month_beginning(delta: int = start) -> datetime: def _month_end(delta: int = end) -> datetime: """provides datetime of end of month minus x month""" - return month_beginning(delta) + relativedelta(months=1) + return _month_beginning(delta) + relativedelta(months=1) def _week_beginning(delta: int = start) -> datetime: """provides datetime of beginning of week minus x weeks""" @@ -2778,7 +2780,7 @@ def _week_beginning(delta: int = start) -> datetime: def _week_end(delta: int = end) -> datetime: """provides datetime of end of week minus x weeks""" - return week_beginning(delta) + relativedelta(days=7) + return _week_beginning(delta) + relativedelta(days=7) def _day_beginning(delta: int = start) -> datetime: """provides datetime of beginning of today minus x days""" @@ -2788,7 +2790,7 @@ def _day_beginning(delta: int = start) -> datetime: def _day_end(delta: int = end) -> datetime: """provides datetime of end of today minus x days""" - return day_beginning(delta) + relativedelta(days=1) + return _day_beginning(delta) + relativedelta(days=1) if isinstance(start, str) and start.isdigit(): start = int(start) @@ -2806,6 +2808,7 @@ def _day_end(delta: int = end) -> datetime: return ts_start, ts_end + def datetime_to_timestamp(dt: datetime) -> int: """Provides timestamp from given datetime""" From c62414eb929861a97a78a19758210abbbc391172 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Tue, 11 Jul 2023 22:24:21 +0200 Subject: [PATCH 167/775] executor plugin: add additional examples --- executor/examples/check_device_presence.py | 14 ++++ executor/examples/check_items.py | 95 ++++++++++++++++++++++ 2 files changed, 109 insertions(+) create mode 100644 executor/examples/check_device_presence.py create mode 100644 executor/examples/check_items.py diff --git a/executor/examples/check_device_presence.py b/executor/examples/check_device_presence.py new file mode 100644 index 000000000..c7dfcc743 --- /dev/null +++ b/executor/examples/check_device_presence.py @@ -0,0 +1,14 @@ +import os + +with os.popen('ip neigh show') as result: + # permanent, noarp, reachable, stale, none, incomplete, delay, probe, failed + ip = '192.168.10.56' + mac = "b4:b5:2f:ce:6d:29" + value = False + lines = str(result.read()).splitlines() + for line in lines: + if (ip in line or mac in line) and ("REACHABLE" in line or "STALE" in line): + value = True + break + #sh.devices.laptop.status(value)​ + print(f"set item to {value}") \ No newline at end of file diff --git a/executor/examples/check_items.py b/executor/examples/check_items.py new file mode 100644 index 000000000..8a9a7fb07 --- /dev/null +++ b/executor/examples/check_items.py @@ -0,0 +1,95 @@ +""" +given following items within a yaml: + + +MyItem: + MyChildItem: + type: num + initial_value: 12 + MyGrandchildItem: + type: str + initial_value: "foo" + +Within a logic it is possible to set the value of MyChildItem to 42 with +``sh.MyItem.MyChildItem(42)`` and retrieve the Items value with +``value = sh.MyItem.MyChildItem()`` + +Often beginners forget the parentheses and instead write +``sh.MyItem.MyChildItem = 42`` when they really intend to assign the value ``42`` +to the item or write ``value = sh.MyItem.MyChildItem`` when they really want to +retrieve the item's value. + +But using ``sh.MyItem.MyChildItem = 42`` destroys the structure here and makes +it impossible to retrieve the value of the child +``MyItem.MyChildItem.MyGrandchildItem`` +Alike, an instruction as ``value = sh.MyItem.MyChildItem`` will not assign the +value of ``sh.MyItem.MyChildItem`` but assign a reference to the item object +``sh.MyItem.MyChildItem`` + +It is not possible with Python to intercept an assignment to a variable or an +objects' attribute. The only thing one can do is search all items for a +mismatching item type. + +This logic checks all items returned by SmartHomeNG, and if it encounters one +which seems to be damaged like described before, it attempts to repair the +broken assignment. + +""" +from lib.item import Items +from lib.item.item import Item + +def repair_item(sh, item): + path = item.id() + path_elems = path.split('.') + ref = sh + + # traverse through object structure sh.path1.path2... + try: + for path_part in path_elems[:-1]: + ref = getattr(ref, path_part) + + setattr(ref, path_elems[-1], item) + print(f'Item reference repaired for {path}') + return True + except NameError: + print(f'Error: item traversal for {path} failed at part {path_part}. Item list not sorted?') + + return False + + +def get_item_type(sh, path): + expr = f'type(sh.{path})' + return str(eval(expr)) + + +def check_item(sh, path): + + return isinstance(path, Item) + + +# to get access to the object instance: +items = Items.get_instance() + +# to access a method (eg. to get the list of Items): +# allitems = items.return_items() +problems_found = 0 +problems_fixed = 0 +itemClass = Item + +for one in items.return_items(ordered=True): + # get the items full path + path = one.property.path + try: + if not isinstance(one, itemclass): + logger.error(f"Error: item {path} has type but should be an Item Object") + problems_found += 1 + if repair_item(sh, one): + if check_item(sh, path): + problems_fixed += 1 + except ValueError as e: + logger.error(f'Error {e} while processing item {path}, parent defective? Items not sorted?') + +if problems_found: + logger.error(f"{problems_found} problematic item assignment{'' if problems_found == 1 else 's'} found, {problems_fixed} item assignment{'' if problems_fixed == 1 else 's'} fixed") +else: + logger.warning("no problems found") From 1b86ce37eae6349bf81f9179ec768e36f1cb821b Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Tue, 11 Jul 2023 22:30:17 +0200 Subject: [PATCH 168/775] oppo plugin: re-establish standalone mode for struct creation --- oppo/__init__.py | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/oppo/__init__.py b/oppo/__init__.py index 0a045b87e..f09637415 100755 --- a/oppo/__init__.py +++ b/oppo/__init__.py @@ -21,10 +21,26 @@ # along with SmartHomeNG If not, see . ######################################################################### +import builtins +import os import sys +if __name__ == '__main__': + builtins.SDP_standalone = True + + class SmartPlugin(): + pass + + class SmartPluginWebIf(): + pass + + BASE = os.path.sep.join(os.path.realpath(__file__).split(os.path.sep)[:-3]) + sys.path.insert(0, BASE) + +else: + builtins.SDP_standalone = False from lib.model.sdp.globals import (PLUGIN_ATTR_NET_HOST, PLUGIN_ATTR_CONNECTION, PLUGIN_ATTR_SERIAL_PORT, PLUGIN_ATTR_CONN_TERMINATOR, CONN_NET_TCP_CLI, CONN_SER_ASYNC) -from lib.model.smartdeviceplugin import SmartDevicePlugin +from lib.model.smartdeviceplugin import SmartDevicePlugin, Standalone CUSTOM_INPUT_NAME_COMMAND = 'custom_inputnames' @@ -105,3 +121,6 @@ def _process_additional_data(self, command, data, value, custom, by): } if id in time_type: self._dispatch_callback(time_type[id], it) + +if __name__ == '__main__': + s = Standalone(oppo, sys.argv[0]) From 267e3f38f5a878f939c13cdfa352d106aa524d20 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Tue, 11 Jul 2023 22:30:39 +0200 Subject: [PATCH 169/775] oppo plugin; fix invalid struct "return" and rename it to "back" --- oppo/commands.py | 2 +- oppo/plugin.yaml | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/oppo/commands.py b/oppo/commands.py index e03ab3216..d402f54c9 100755 --- a/oppo/commands.py +++ b/oppo/commands.py @@ -82,7 +82,7 @@ 'left': {'read': True, 'write': True, 'write_cmd': '#NLT', 'item_type': 'bool', 'dev_datatype': 'ok', 'reply_pattern': ['@NLT (.*)'], 'item_attrs': {'enforce': True}}, 'right': {'read': True, 'write': True, 'write_cmd': '#NRT', 'item_type': 'bool', 'dev_datatype': 'ok', 'reply_pattern': ['@NRT (.*)'], 'item_attrs': {'enforce': True}}, 'select': {'read': True, 'write': True, 'write_cmd': '#SEL', 'item_type': 'bool', 'dev_datatype': 'ok', 'reply_pattern': ['@SEL (.*)'], 'item_attrs': {'enforce': True}}, - 'return': {'read': True, 'write': True, 'write_cmd': '#RET', 'item_type': 'bool', 'dev_datatype': 'ok', 'reply_pattern': ['@RET (.*)'], 'item_attrs': {'enforce': True}}, + 'back': {'read': True, 'write': True, 'write_cmd': '#RET', 'item_type': 'bool', 'dev_datatype': 'ok', 'reply_pattern': ['@RET (.*)'], 'item_attrs': {'enforce': True}}, 'red': {'read': True, 'write': True, 'write_cmd': '#RED', 'item_type': 'bool', 'dev_datatype': 'ok', 'reply_pattern': ['@RED (.*)'], 'item_attrs': {'enforce': True}}, 'green': {'read': True, 'write': True, 'write_cmd': '#GRN', 'item_type': 'bool', 'dev_datatype': 'ok', 'reply_pattern': ['@GRN (.*)'], 'item_attrs': {'enforce': True}}, 'blue': {'read': True, 'write': True, 'write_cmd': '#BLU', 'item_type': 'bool', 'dev_datatype': 'ok', 'reply_pattern': ['@BLU (.*)'], 'item_attrs': {'enforce': True}}, diff --git a/oppo/plugin.yaml b/oppo/plugin.yaml index 0f0ba7ac0..b6fb35fa4 100755 --- a/oppo/plugin.yaml +++ b/oppo/plugin.yaml @@ -657,9 +657,9 @@ item_structs: oppo_write: true enforce_updates: true - return: + back: type: bool - oppo_command: menu.return + oppo_command: menu.back oppo_read: true oppo_write: true enforce_updates: true @@ -1176,9 +1176,9 @@ item_structs: oppo_write: true enforce_updates: true - return: + back: type: bool - oppo_command: menu.return + oppo_command: menu.back oppo_read: true oppo_write: true enforce_updates: true @@ -1695,9 +1695,9 @@ item_structs: oppo_write: true enforce_updates: true - return: + back: type: bool - oppo_command: menu.return + oppo_command: menu.back oppo_read: true oppo_write: true enforce_updates: true From c4a46c5285c90c03c9837492144c75d3b73deccd Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 13 Jul 2023 08:45:45 +0200 Subject: [PATCH 170/775] executor plugin: minor adjustment for button disable/input field clear handling --- executor/webif/templates/index.html | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/executor/webif/templates/index.html b/executor/webif/templates/index.html index 371ca5c99..bda07578b 100755 --- a/executor/webif/templates/index.html +++ b/executor/webif/templates/index.html @@ -111,7 +111,11 @@ console.log('file to delete'+filenametodelete); $.get('delete_file', { filename: filenametodelete}, function(result) { console.log('Result:'+result); - get_filelist(getCookie('sort_order')); + const save_input = document.querySelector('#savefilename'); + const del_button = document.querySelector('#deletefile'); + del_button.setAttribute("disabled", "disabled"); + save_input.value=""; + get_filelist(getCookie('sort_order')); }); }; From dd464b8f354933adbe0754360c97c782eee0d565 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Thu, 13 Jul 2023 10:38:28 +0200 Subject: [PATCH 171/775] DB_ADDON: - diverse fixes --- db_addon/__init__.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/db_addon/__init__.py b/db_addon/__init__.py index 4e661c5e1..5cf72494c 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -157,7 +157,10 @@ def run(self): self.alive = True # start the queue consumer thread - self._work_item_queue_thread_startup() + # self._work_item_queue_thread_startup() + + # work queue + self.work_item_queue() def stop(self): """ @@ -167,7 +170,7 @@ def stop(self): self.logger.debug("Stop method called") self.alive = False self.scheduler_remove('cyclic') - self._work_item_queue_thread_shutdown() + # self._work_item_queue_thread_shutdown() def parse_item(self, item: Item): """ @@ -2146,8 +2149,6 @@ def _query_item(self, func: str, database_item: Item, timeframe: str, start: int if self.prepare_debug: self.logger.debug(f"called with {func=}, item={database_item.path()}, {timeframe=}, {start=}, {end=}, {group=}, {group2=}, {ignore_value_list=}") - # called with func='min', item=env.host_rpi.temperature, timeframe='day', start=0, end=0, group=None, group2=None, ignore_value_list=['!= 0'] - # set default result default_result = [[None, None]] @@ -2195,6 +2196,9 @@ def _query_item(self, func: str, database_item: Item, timeframe: str, start: int query_params = {'func': func, 'item_id': item_id, 'ts_start': ts_start, 'ts_end': ts_end, 'group': group, 'group2': group2, 'ignore_value_list': ignore_value_list} query_result = self._query_log_timestamp(**query_params) + if self.prepare_debug: + self.logger.debug(f"result of '_query_log_timestamp' {query_result=}") + # post process query_result if query_result is None: self.logger.error(f"Error occurred during _query_item. Aborting...") @@ -2207,7 +2211,7 @@ def _query_item(self, func: str, database_item: Item, timeframe: str, start: int result = [] for element in query_result: timestamp, value = element - if timestamp and value is not None: + if timestamp is not None and value is not None: if isinstance(value, float): value = round(value, 1) result.append([timestamp, value]) @@ -2573,14 +2577,14 @@ def _query(self, fetch, query: str, params: dict = None, cur=None) -> Union[None tuples = fetch(query, params, cur=cur) except Exception as e: self.logger.error(f"Error for query '{query_readable}': {e}") - else: - if self.sql_debug: - self.logger.debug(f"Result of '{query_readable}': {tuples}") - return tuples + raise e finally: if cur is None: self._db.release() + if self.sql_debug: + self.logger.debug(f"Result of '{query_readable}': {tuples}") + return tuples ####################### # Helper functions From e23bf29effe9a5dbca78ac5d28bc8a948eccf9a8 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 13 Jul 2023 14:58:31 +0200 Subject: [PATCH 172/775] executor plugin: fix check_items example --- executor/examples/check_items.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/executor/examples/check_items.py b/executor/examples/check_items.py index 8a9a7fb07..e41eb3316 100644 --- a/executor/examples/check_items.py +++ b/executor/examples/check_items.py @@ -63,8 +63,9 @@ def get_item_type(sh, path): def check_item(sh, path): + global get_item_type - return isinstance(path, Item) + return get_item_type(sh, path) == "" # to get access to the object instance: @@ -74,14 +75,13 @@ def check_item(sh, path): # allitems = items.return_items() problems_found = 0 problems_fixed = 0 -itemClass = Item for one in items.return_items(ordered=True): # get the items full path path = one.property.path try: - if not isinstance(one, itemclass): - logger.error(f"Error: item {path} has type but should be an Item Object") + if not check_item(sh, path): + logger.error(f"Error: item {path} has type {get_item_type(sh, path)} but should be an Item Object") problems_found += 1 if repair_item(sh, one): if check_item(sh, path): From 21f92803ab5a53acc3b9a9e361d96a2a33ebd052 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Thu, 13 Jul 2023 15:44:49 +0200 Subject: [PATCH 173/775] DB_ADDON: - diverse fixes --- db_addon/__init__.py | 45 ++++++++++++------------ db_addon/plugin.yaml | 81 ++++++++++++++++++++++---------------------- 2 files changed, 64 insertions(+), 62 deletions(-) diff --git a/db_addon/__init__.py b/db_addon/__init__.py index 5cf72494c..29c73698d 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -214,8 +214,8 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: # handle functions 'min/max/avg' in format 'minmax_timeframe_timedelta_func' like 'minmax_heute_minus2_max' func = db_addon_fct_vars[3] # min, max, avg timeframe = convert_timeframe(db_addon_fct_vars[1]) # day, week, month, year - start = to_int(db_addon_fct_vars[2][-1]) # 1, 2, 3, ... - end = start + end = to_int(split_sting_letters_numbers(db_addon_fct_vars[2])[1]) + start = end log_text = 'minmax_timeframe_timedelta_func' required_params = [func, timeframe, start, end] @@ -223,23 +223,25 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: # handle functions 'zaehlerstand' in format 'zaehlerstand_timeframe_timedelta' like 'zaehlerstand_heute_minus1' # func = 'max' timeframe = convert_timeframe(db_addon_fct_vars[1]) - start = to_int(db_addon_fct_vars[2][-1]) - end = start + end = to_int(split_sting_letters_numbers(db_addon_fct_vars[2])[1]) + start = end log_text = 'zaehlerstand_timeframe_timedelta' required_params = [timeframe, start, end] elif db_addon_fct in VERBRAUCH_ATTRIBUTES_ONCHANGE: # handle functions 'verbrauch on-change' items in format 'verbrauch_timeframe' like 'verbrauch_heute', 'verbrauch_woche', 'verbrauch_monat', 'verbrauch_jahr' timeframe = convert_timeframe(db_addon_fct_vars[1]) - start = end = 0 + end = 0 + start = 1 log_text = 'verbrauch_timeframe' required_params = [timeframe, start, end] elif db_addon_fct in VERBRAUCH_ATTRIBUTES_TIMEFRAME: # handle functions 'verbrauch on-demand' in format 'verbrauch_timeframe_timedelta' like 'verbrauch_heute_minus2' timeframe = convert_timeframe(db_addon_fct_vars[1]) - start = to_int(db_addon_fct_vars[2][-1]) + 1 - end = to_int(db_addon_fct_vars[2][-1]) + # end = to_int(db_addon_fct_vars[2][-1]) + end = to_int(split_sting_letters_numbers(db_addon_fct_vars[2])[1]) + start = end + 1 log_text = 'verbrauch_timeframe_timedelta' required_params = [timeframe, start, end] @@ -258,7 +260,7 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: elif db_addon_fct in VERBRAUCH_ATTRIBUTES_JAHRESZEITRAUM: # handle functions of format 'verbrauch_jahreszeitraum_timedelta' like 'verbrauch_jahreszeitraum_minus1' timeframe = convert_timeframe(db_addon_fct_vars[1]) # day, week, month, year - timedelta = to_int(db_addon_fct_vars[2][-1]) # 1 oder 2 oder 3 + timedelta = to_int(split_sting_letters_numbers(db_addon_fct_vars[2])[1]) log_text = 'verbrauch_jahreszeitraum_timedelta' required_params = [timeframe, timedelta] @@ -274,8 +276,8 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: # handle 'tagesmitteltemperatur_timeframe_timedelta' like 'tagesmitteltemperatur_heute_minus1' func = 'max' timeframe = convert_timeframe(db_addon_fct_vars[1]) - start = to_int(db_addon_fct_vars[2][-1]) - end = start + end = to_int(split_sting_letters_numbers(db_addon_fct_vars[2])[1]) + start = end log_text = 'tagesmitteltemperatur_timeframe_timedelta' required_params = [func, timeframe, start, end] @@ -836,6 +838,10 @@ def handle_ondemand(self, item: Item) -> None: if result and result < 0: self.logger.warning(f"Result of item {item.path()} with {db_addon_fct=} was negative. Something seems to be wrong.") + # handle 'serie_verbrauch' + elif db_addon_fct in SERIE_ATTRIBUTES_VERBRAUCH: + result = self._handle_verbrauch_serie(params) + # handle item starting with 'zaehlerstand_' elif db_addon_fct in ALL_ZAEHLERSTAND_ATTRIBUTES: result = self._handle_zaehlerstand(params) @@ -844,10 +850,6 @@ def handle_ondemand(self, item: Item) -> None: elif db_addon_fct in SERIE_ATTRIBUTES_ZAEHLERSTAND: result = self._handle_zaehlerstand_serie(params) - # handle 'serie_verbrauch' - elif db_addon_fct in SERIE_ATTRIBUTES_VERBRAUCH: - result = self._handle_verbrauch_serie(params) - # handle 'serie_tagesmittelwert_stunde_30_0d' and 'serie_tagesmittelwert_tag_stunde_30d' elif db_addon_fct in SERIE_ATTRIBUTES_MITTEL_H1 + SERIE_ATTRIBUTES_MITTEL_D_H: result = self._prepare_temperature_list(**params) @@ -1365,7 +1367,7 @@ def _handle_verbrauch(self, query_params: dict) -> Union[None, float]: return consumption def _handle_verbrauch_serie(self, query_params: dict) -> list: - """Ermittlung einer Serie von Verbräuchen in einem Zeitraumes für x Zeiträume""" + """Ermittlung einer Serie von Verbräuchen in einem Zeitraum für x Zeiträume""" series = [] database_item = query_params['database_item'] @@ -2257,9 +2259,7 @@ def _clear_queue(self) -> None: self.item_queue.queue.clear() def _work_item_queue_thread_startup(self): - """ - Start a thread to work item queue - """ + """Start a thread to work item queue""" try: _name = 'plugins.' + self.get_fullname() + '.work_item_queue' @@ -2272,9 +2272,7 @@ def _work_item_queue_thread_startup(self): self.work_item_queue_thread = None def _work_item_queue_thread_shutdown(self): - """ - Shut down the thread to work item queue - """ + """Shut down the thread to work item queue""" if self.work_item_queue_thread: self.work_item_queue_thread.join() @@ -2840,7 +2838,7 @@ def to_int_float(arg): return to_float(arg) -def timeframe_to_updatecyle(timeframe): +def timeframe_to_updatecyle(timeframe) -> str: lookup = {'day': 'daily', 'week': 'weekly', @@ -2850,6 +2848,9 @@ def timeframe_to_updatecyle(timeframe): return lookup.get(timeframe) +def split_sting_letters_numbers(string) -> tuple: + return re.findall('(\d+|[A-Za-z]+)', string) + ALLOWED_QUERY_TIMEFRAMES = ['year', 'month', 'week', 'day', 'hour'] ALLOWED_MINMAX_FUNCS = ['min', 'max', 'avg'] diff --git a/db_addon/plugin.yaml b/db_addon/plugin.yaml index 8483f8909..63592f355 100644 --- a/db_addon/plugin.yaml +++ b/db_addon/plugin.yaml @@ -627,7 +627,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - cache: yes + # cache: yes verbrauch_woche: name: Verbrauch seit Wochenbeginn @@ -635,7 +635,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - cache: yes + # cache: yes verbrauch_monat: name: Verbrauch seit Monatsbeginn @@ -643,7 +643,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - cache: yes + # cache: yes verbrauch_jahr: name: Verbrauch seit Jahresbeginn @@ -651,7 +651,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - cache: yes + # cache: yes verbrauch_rolling_12m: name: Verbrauch innerhalb der letzten 12 Monate ausgehend von gestern @@ -659,7 +659,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - cache: yes + # cache: yes verbrauch_gestern: name: Verbrauch gestern @@ -667,7 +667,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - cache: yes + # cache: yes verbrauch_gestern_minus1: name: Verbrauch vorgestern @@ -675,7 +675,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - cache: yes + # cache: yes verbrauch_gestern_minus2: name: Verbrauch vor 3 Tagen @@ -683,7 +683,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - cache: yes + # cache: yes verbrauch_vorwoche: name: Verbrauch in der Vorwoche @@ -691,7 +691,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - cache: yes + # cache: yes verbrauch_vorwoche_minus1: name: Verbrauch vor 2 Wochen @@ -699,7 +699,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - cache: yes + # cache: yes verbrauch_vormonat: name: Verbrauch im Vormonat @@ -707,7 +707,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - cache: yes + # cache: yes verbrauch_vormonat_minus12: name: Verbrauch vor 12 Monaten @@ -715,7 +715,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - cache: yes + # cache: yes verbrauch_vorjahreszeitraum: name: Verbrauch im Jahreszeitraum 1.1. bis jetzt vor einem Jahr @@ -723,7 +723,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - cache: yes + # cache: yes verbrauch_2: name: Struct für Verbrauchsauswertung bei Zählern mit stetig ansteigendem Zählerstand (Teil 2) @@ -802,9 +802,10 @@ item_structs: zaehlerstand_gestern: name: Zählerstand zum Ende des gestrigen Tages db_addon_fct: zaehlerstand_heute_minus1 + db_addon_startup: yes type: num visu_acl: ro - cache: yes + # cache: yes zaehlerstand_vorwoche: name: Zählerstand zum Ende der vorigen Woche @@ -812,7 +813,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - cache: yes + # cache: yes zaehlerstand_vormonat: name: Zählerstand zum Ende des Vormonates @@ -820,7 +821,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - cache: yes + # cache: yes zaehlerstand_vormonat_minus1: name: Zählerstand zum Monatsende vor 2 Monaten @@ -828,7 +829,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - cache: yes + # cache: yes zaehlerstand_vormonat_minus2: name: Zählerstand zum Monatsende vor 3 Monaten @@ -836,7 +837,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - cache: yes + # cache: yes zaehlerstand_vorjahr: name: Zählerstand am Ende des vorigen Jahres @@ -844,7 +845,7 @@ item_structs: db_addon_startup: yes type: num visu_acl: ro - cache: yes + # cache: yes minmax_1: name: Struct für Auswertung der Wertehistorie bei schwankenden Werten wie bspw. Temperatur oder Leistung (Teil 1) @@ -855,147 +856,147 @@ item_structs: db_addon_ignore_value: 0 db_addon_startup: yes type: num - cache: yes + # cache: yes heute_max: name: Maximaler Wert seit Tagesbeginn db_addon_fct: minmax_heute_max db_addon_startup: yes type: num - cache: yes + # cache: yes last24h_min: name: Minimaler Wert in den letzten 24h (gleitend) db_addon_fct: minmax_last_24h_min db_addon_startup: yes type: num - cache: yes + # cache: yes last24h_max: name: Maximaler Wert in den letzten 24h (gleitend) db_addon_fct: minmax_last_24h_max db_addon_startup: yes type: num - cache: yes + # cache: yes woche_min: name: Minimaler Wert seit Wochenbeginn db_addon_fct: minmax_woche_min db_addon_startup: yes type: num - cache: yes + # cache: yes woche_max: name: Maximaler Wert seit Wochenbeginn db_addon_fct: minmax_woche_max db_addon_startup: yes type: num - cache: yes + # cache: yes monat_min: name: Minimaler Wert seit Monatsbeginn db_addon_fct: minmax_monat_min db_addon_startup: yes type: num - cache: yes + # cache: yes monat_max: name: Maximaler Wert seit Monatsbeginn db_addon_fct: minmax_monat_max db_addon_startup: yes type: num - cache: yes + # cache: yes jahr_min: name: Minimaler Wert seit Jahresbeginn db_addon_fct: minmax_jahr_min db_addon_startup: yes type: num - cache: yes + # cache: yes jahr_max: name: Maximaler Wert seit Jahresbeginn db_addon_fct: minmax_jahr_max db_addon_startup: yes type: num - cache: yes + # cache: yes gestern_min: name: Minimaler Wert gestern db_addon_fct: minmax_heute_minus1_min db_addon_startup: yes type: num - cache: yes + # cache: yes gestern_max: name: Maximaler Wert gestern db_addon_fct: minmax_heute_minus1_max db_addon_startup: yes type: num - cache: yes + # cache: yes gestern_avg: name: Durchschnittlicher Wert gestern db_addon_fct: minmax_heute_minus1_avg db_addon_startup: yes type: num - cache: yes + # cache: yes vorwoche_min: name: Minimaler Wert in der Vorwoche db_addon_fct: minmax_woche_minus1_min db_addon_startup: yes type: num - cache: yes + # cache: yes vorwoche_max: name: Maximaler Wert in der Vorwoche db_addon_fct: minmax_woche_minus1_max db_addon_startup: yes type: num - cache: yes + # cache: yes vorwoche_avg: name: Durchschnittlicher Wert in der Vorwoche db_addon_fct: minmax_woche_minus1_avg db_addon_startup: yes type: num - cache: yes + # cache: yes vormonat_min: name: Minimaler Wert im Vormonat db_addon_fct: minmax_monat_minus1_min db_addon_startup: yes type: num - cache: yes + # cache: yes vormonat_max: name: Maximaler Wert im Vormonat db_addon_fct: minmax_monat_minus1_max db_addon_startup: yes type: num - cache: yes + # cache: yes vormonat_avg: name: Durchschnittlicher Wert im Vormonat db_addon_fct: minmax_monat_minus1_avg db_addon_startup: yes type: num - cache: yes + # cache: yes vorjahr_min: name: Minimaler Wert im Vorjahr db_addon_fct: minmax_jahr_minus1_min db_addon_startup: yes type: num - cache: yes + # cache: yes vorjahr_max: name: Maximaler Wert im Vorjahr db_addon_fct: minmax_jahr_minus1_max db_addon_startup: yes type: num - cache: yes + # cache: yes minmax_2: name: Struct für Auswertung der Wertehistorie bei schwankenden Werten wie bspw. Temperatur oder Leistung (Teil 2) From 4fffa1c8a791b6b7edd91acd76414c77a3eeae96 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 13 Jul 2023 22:04:50 +0200 Subject: [PATCH 174/775] drexel und weiss: improved error handling --- drexelundweiss/__init__.py | 47 +++++++++++++++++++++++--------------- 1 file changed, 28 insertions(+), 19 deletions(-) diff --git a/drexelundweiss/__init__.py b/drexelundweiss/__init__.py index bd6f13338..d32f521c5 100755 --- a/drexelundweiss/__init__.py +++ b/drexelundweiss/__init__.py @@ -260,12 +260,15 @@ def run(self): divisor = int(reginfo[4]) komma = int(reginfo[5]) for item in self.LUregl[register]['items']: - (data, done) = self._read_register( - reginfo[7], register, int(reginfo[4]), int(reginfo[5])) - if done: - item(data, 'DuW', 'init process') - else: - self.logger.debug("Init LU register failed: {}".format(register)) + try: + (data, done) = self._read_register( + reginfo[7], register, int(reginfo[4]), int(reginfo[5])) + if done: + item(data, 'DuW', 'init process') + else: + self.logger.debug("Init LU register failed: {}".format(register)) + except Exception as e: + self.logger.error("Init LU register not possible: {}".format(register)) # WP register init for register in self.WPregl: @@ -273,12 +276,15 @@ def run(self): divisor = int(reginfo[4]) komma = int(reginfo[5]) for item in self.WPregl[register]['items']: - (data, done) = self._read_register( - reginfo[7], register, int(reginfo[4]), int(reginfo[5])) - if done: - item(data, 'DuW', 'init process') - else: - self.logger.debug("Init WP register failed: {}".format(register)) + try: + (data, done) = self._read_register( + reginfo[7], register, int(reginfo[4]), int(reginfo[5])) + if done: + item(data, 'DuW', 'init process') + else: + self.logger.debug("Init WP register failed: {}".format(register)) + except Exception as e: + self.logger.error("Init WP register not possible: {}".format(register)) # PANEL register init for register in self.PANELregl: @@ -286,13 +292,16 @@ def run(self): divisor = int(reginfo[4]) komma = int(reginfo[5]) for item in self.PANELregl[register]['items']: - (data, done) = self._read_register( - reginfo[7], register, int(reginfo[4]), int(reginfo[5])) - if done: - item(data, 'DuW', 'init process') - else: - self.logger.debug("Init PANEL register failed: {}".format(register)) - + try: + (data, done) = self._read_register( + reginfo[7], register, int(reginfo[4]), int(reginfo[5])) + if done: + item(data, 'DuW', 'init process') + else: + self.logger.debug("Init PANEL register failed: {}".format(register)) + except Exception as e: + self.logger.error("Init PANEL register not possible: {}".format(register)) + # poll DuW interface dw_id = 0 dw_register = 0 From 4b16ca47f704dffac9e0269f3ee5599738f80edc Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 13 Jul 2023 22:17:26 +0200 Subject: [PATCH 175/775] drexelundweiss: implement plugin logo --- drexelundweiss/user_doc.rst | 7 +++++++ drexelundweiss/webif/static/img/plugin_logo.jpg | Bin 0 -> 85947 bytes 2 files changed, 7 insertions(+) create mode 100644 drexelundweiss/webif/static/img/plugin_logo.jpg diff --git a/drexelundweiss/user_doc.rst b/drexelundweiss/user_doc.rst index ba89c0b11..bb34a43e1 100755 --- a/drexelundweiss/user_doc.rst +++ b/drexelundweiss/user_doc.rst @@ -8,6 +8,13 @@ drexelundweiss Einführung ========== +.. image:: webif/static/img/plugin_logo.jpg + :alt: plugin logo + :width: 900px + :height: 900px + :scale: 16 % + :align: left + Dieses Plugin ermöglicht es, Drexel und Weiß Geräte (Wärmepumpen) direkt über USB ohne Modbusadapter zu steuern. .. important:: diff --git a/drexelundweiss/webif/static/img/plugin_logo.jpg b/drexelundweiss/webif/static/img/plugin_logo.jpg new file mode 100644 index 0000000000000000000000000000000000000000..849e5ca03ea7b5207249bba7e7113505d021762c GIT binary patch literal 85947 zcma&NbyytD6F<1PySux)26qcA4ogCi1YOw0g9LX7POt=*4Z6X0aktuUf& zSC=1v3jhGn0b*$A0Gy{1_tWkghxs2HK=^6@kF>qK-gr0z02ly38V&&S>58bhxF}jN z#{anf5p4h{hkJ}%Ky85a!$6B8Q?8y_1RpM(gX=!r;3{yYBvrhmNvGF*)4rwxrAfKG;n zL5B8k5Ww^#JUTkYe}noz^mKv=z{SGG!F#G!B?X{iV4!1Q;b5R+p<_SsJq;8ClZ;JF z6^q;i8;64O6)wBD8YCi}3fN?d;*d~xD4~H;bDCxJ&C*J0!2j#vsm0Sh|I^0*LugO8 zc5{t7}(hViSnd0bPO^~ayBt6RTFHASCDi{Dt2*T1Wrj4jf9$M z-zA5-19bL^6GbfjabVgB{*c}l*$lQMOm-_d6l zks%nFw4hz~VJcl&_9q3kh5)5&7RSynEuLxel?HlSF&)``szbJd-3|t_MKtKEm0O!y z$E{4>rPUQ}cSst2IzGvb=B3HoFdm=XjrMy?4;oiGf$DtYed~3*Vk>8yV|y&#d1DJ5 zSUumj?1PIx#FHylHY0Y!y~58^@5*(8aC&9z7_S&#JgA%t;tr1sCqlnYaHM$01u&6K zs){GZ;Dn;|{yrLcRdE&sLpROnP=)W8!16LT7fBm^tY&*Y>P4b!{lv{^yHb z$JZB zwxSRFBvHyjrW;=ZG>vTPdvV8`Vy)>6gdZx$ExejsK1Q%C#=q|a7g=KeW$(NEGs7$> z-EWj15-D|k^zKnkPZwc$kI(=5ARqy`GfmzduT2YBQH;0y2M{<6_&WxP1?XS4hs~Nj zT(E|4_J6T;i=CNsk$UX zr5>Px5CL)SbH{+WmX;UFGiS{kmA+!vjW0hqd*2pSr4I}UNh{RLj0la&YH^9swBz#^ znj$=!s5s$O&NHX!{x!2&94VFkcvmnH`K>(sEQ?mgCJDEd=9caa zf(%*e2S?2#rtmY6nF)3K-eG55p=yM{twsCtpKL!~{`B!Xk%8Th%%wF1uUm2>4w)*? zL}3{h1K2TsA7$QqDZZsV2wy1i(SDMU-W_JYmj$VnI74zXft{6$1W>gcv^<(zL@*zpOW-V)Qix-;tG@Ze%kM*w>Qj&e50(?QNX~)C?)>NwgB2IcT zRPu@Z-+J_~@k>qF4lJtqf8s(`%zY3Lh#wIQW5;M|CNak{%z5hB@b7p2%SNaW^SvzN; z`ec`BKQ0_5K5Z|G4NJ}XxynvUfAVFi#vuU55gd--9}{1#Uo51!CuAS=I(L7ry9!hK zxs`6eE(M{0BoJu4jhF?9vd!8Cki9x~ zL4`VFPm@Bl_*{PC6+wF@LnTbCg7!4&O=96+SJ#<~-@%<#`hakZid@a8Xo937BXh`* z>QoFtV8vxT(AXk1vLPuuh|esbHEHygUwB^pBMA|r|F96)F&MB2A=>@KXME0&jr+*k zADPr6ZTph>xKFiI~Hq|h)Ihxdv>s(9PqMFk|uhrT4 zzD+$N5w3HIP5I4Im$wN&Q~v@AnWd}o@$nO^jty|PTXv#AL}iR`#j60kU?n>=Nt;6w zBe$cvrFs_E3WR@PVk2WtAd%LXx>1=GzrI0+AI_@vuX|!l%cyH-Gn#yijzLK+etVsx zT6*$4*Pu^%kawTaeeQtg?AZ5*wWF=P<}H(uAI6d3A{G*c!}hI3PgpJ_=??{j6{hJ1 z`V=Q)ksTa8u>FYwQVbH1jqeeiFEv=q&;g6s$C0M!RQa~-{{!$i?i7`WOp&x|DUD@9 zpu^UAK7U0tm+F6XC5r0Iu54&`B)n-hY~47Y_)P~N;EC@GF8l|$?iisxtKxid@B(!~ zoK#{8X+s;AI!@yf;>-nQovs9r!u(;P8}u>14G3({ z>&)hshjO^ae-@pe|DFQY#w7`R#Vm^F<-Y1jE%C#1j~gUaNQ^2q%6D6jq%F*V+TI@e zb6-tg5w?!2^8_eVx*$LtffYk-8dJh|sTN5YpZs&#m1Z_CM$Vy9=2IV-JxHquzqdK% z{3wF>Zht2vugtKe)H5$(FK(J<^wbD){{8MgI%6sjq>#MWzxk6+L@Tu|&VD@nrl6iDW0qEilH!CeOO>Q?khy-& zPP#h_RUd6S)-M#FwBi{Z1mE2|BV$c+sxRH-@~YANKKfW*ttnsy9how^mxB?{>XL|J z?0^jIBqlnIYRD$p#%PU_dw{3Dmb~RpV=#m23E$ZUlC%1BC;kXLO+m})-7xXpbE4FO zqUvg68``yul!eZraq+n;=K}?$2swqc-={_- z>;?(WiDQVU2-q2W*AWn-9ed0wh%hKyG4UI3wqJq7*3C1FSpSITEBO_X6~E%%>lXn6 zF_}pij_}OEvHQ4zSv5&uhb?^k!`CAo_{uxe`3^o>y7v7If%o^x2R(hsc2R!cwNnis zOalQbpg->yEl-ceuW!UZk!88=RrxL6UTO#JY#@BMH8cxK1nRGB?zPWF?W%z7Q$C(y zFpfH&jh+oVdXv;O+7>a>&jCE2zoTZ{-9RORz3-se0vrrG3Nm0`2j>Db>|ZhS?+O0_ z{7sMT!=}t9843#aDR;B2ih}Mlpc7tLa7Nn6DK&MJd#Yh13tQJh{BVv1?X(@epmYxR zc}bSZi#=GkK8zchOlmCfz`j!P2E^;ffz$sP`j&y!G3CtBi9{v>Qu}(fEU{dMLn$#C z@@s-4V*Q?;y&vd$nDpM~Q%m-n%eG8=^D!%<*f9h5k+1183|(NA5>~6i;y?0R6pa;^ z%CI4t`+6P1+F8*JFN_4T0T?drQV+Z&}>B+S}Nb}NcU zDCzV_L8Lu1xKPu_wWcHf%i9$nUjE7u%;9_LhEXA5g`ovQTvhL@HDi#BJi3M}`R2Fe9TF0nINKz07S}w~_ zBPs_~>L%6kw+!40YZ*fLuL(~Zp`qEHU6i<&(b4;}jN!G7_gYjw7$1d+x=3fFCYRJc zNl7&LBm0CBPPqInyzs)>j6&%+a1?6TzB7(_R)s~3;c+5F-x+Qd@I`Wip*b#v>sw~T z#hXVlop>Qakd{-5or-ub-w0B?*9n;Z--aq@hM{)RYI*r~=E)I}__>7JlvrnD)P@Y$ ziE?zUuyQbKx3UAML6hOZ}9vB2(G zvjsKxz9UEA&x|djldU{7Cj@C=(9fc16JQInkS!x-#PSv2LR%j%cI_R51l@JE=##Se z!i=Q|jvCS;rN28V=~)$(z~5x0F3WA0I=!j_WND447)$;pb060V7-zwYBL2mn;UY-1 zFp(w#6BJzauWtNA8=t`;^Al6is>;MVDWc z3QbBBdX}oU>#uwILB0ypU=Q&{>>i9K{0B8cUrzhunBU6x@*9-MqWd0koSF7L1R6M= z8z|KdF_EB^cdEC{bjd)jc5sw;3vPMvgU(NYuQ?u?U#VP~xEnIQyE70WRBX*E@aUlFlD zmBOUjI?DA9(T4U4WSKPpZuGP-;3z~)GcJ5NWoQhMem8SW z{;@LQuPMEhIyqrW?h5gO9%;Rt?C9$wXUWlV`3)gCkWO%bvW8kkgkw#jL~6eKnN= zpkOcWvzht)N*jHds*OqzMUy%gRWr1Dg*iYQRq*1Ft@4Q2Sqcscm#aE3-6yE^N?EJ| z4sVE?wmV;v2VuvaU{2B>md~PHa1Uwq3#W5_Xaa3W>igN+7I!T!UEaw$QmfcI*G7WH zx6}|Y4CzIR_NcecEa8Iik9ig73)p5~LXGi@iFz#zDU^;KOVQAqql2KQhh?LF{^oLf zfqoIgqk~54`{Fdu(*|_^?qf!zI;YQPCVeLLzKghUONvYiW^<6f1s>U?EY!)frWUpB ztDKHeO)~I?oGpSVO_G`PNZ7x<-C!S~3#78OAQ?~T)a-~JV`X6VQ_xjqGA4w1UW-=z zkbam|vaw*~^~7C4d(Bt%iQZbZgvZi~YpJmf#YgzvzY3Ra`(992_aufSS%zyAQ(Mi{ z0kzhS5^cHD(4vQe;;B>S?F=RX`hE&p)$ONCYKjwTBj(Nf*0Cu$h*>(+=>n&9jvS@owgXBKV-cGVQ&tuZ;Ze~K408UuB)oSX@kjD`C;StpY5>PP!;*FUS^O3;> zDVEG!@61TwDv0k^*Ok?W1J}65)Og%|UnjRGF@p0L0_mpaQ=PWF!TGKnUW*vjYxScD z!8%{8FH+A7!TH?UI{hCXvB{3pB`d(1B;pLkNHH^Pk2+dwcdtdhOHBMpg3NfC(ovX| z)S@{4>%y{724ZCK+l!Kyli^@=9#fh(Y9Gg0r9dFKqI~ZkQq>KxogcCIX$AiCd(P|j zV`v^DW~4|K!Kq_B?STOyxJ_Gszy_xWmJ4iIX5fMB+skX6LX)OMJz{AY!t5SDdzQQ| zC?6YAvf$TMFq5uj$<=f*X+iD_%@CkOzo-y%ZuC=|Hy0*hhMBjG`y9~%X$Hs7tHI;e zY+;gU3ZpX4mhsJPC}{(u6U(32vO(SLs#1I>F*Y)aNtw~T0|-?nIo7X=2w_bYwlROR zk%1opjNHid0#@AkRoJXn{dlvp&2=VNpA$Yq8t$rIAq-0oXJ?55L$B27ykEeu}H&Wfe;eJuC za91jyD==TmWrFjDIxww=&w8g|*`-w2P{))r%9CoCYi%Rrab~OX(9S~2Z8-2P)3-oE zT0HEAJh8j~sjV;1GLcTvjsiV?HDA#BC z0*wUA1z65kGAjGT+XD@=BuB6Eg!Oh!La|##zhXdvwjzAvBK%U_{s!*!;x`xuEDL~t zfFr#8pDZyM)D#gZ-)Lm^4J<>|D0XGl1n5s))W|pjRwzndG4`Paj716vdN!I0sXWBS z?^d~YC(>InO~BEo))M_(7C1Zu*K6{wHYf2l1&C#Igai&}6_wr(ritbGq|$`_1C*Lu zFdrdk2G{tapX}mF+M;PRiSM!G|klTFBu7OIhr`0 z8^Qho@Q`>NMRme$Y_`7K^4IVCaA2ZHms!^y}(#;Cew< z-Xg0cGk}Vy+n9T6`ym}9$?s5ac;(QlWvtDBzsz{{i92SJ!>-$8XxvX)OIE^2RYjIZ z+AGVBNXv5hRfX5NspN+~q*WPFAHTI8TA${a^^I5yfW%z6IxA1FR^KylX1h|C!M13gg%i}q!AtbEgs|SkMBt@Ir8Wg4Sy<|P8Z@bdcf7xJpL=jc z?56I|=sV5sDgi8BuyFPmZ?}s_w9TZ9uG8I;nMVZ%YZ+S9&KR&=viFaUi)Me6Fr+k| zK#J{E;7?zb`pr`g8=P4J7`?9*0scsT-(@*{x7)CIvx{+eL)Im4MkRHlj+r9aiU8g=7#|LJ}S|E%5$})9A*n#={-mB7uAUDI+Dj*72TE zq;dkrQYNOk?O~!##1)p93Zo~yZyuoRsyQFGk`BFm8LM+h(FQ@XkJhPS24Rn%zI0sbVN#*KOASEz`lH;8DRybsVGj zqT2+rpL2%kx(fN4a^AS_s(1aWz1u|k17T#uN+i>gc|tcqfB>nsfC7oD>`gZM{s=)$ zqA}406!CY#tU1lWvjB<8lL){^>hgZ(;GyD?PZ`^|Jq9HH;uBgP25S` zRQZhnx_~PAUcyDF_Axz~W3@4ER_IVubIa^m@yHtu^!PI-Qtmni)pwzvt^0;8a~c`uN;|H<$3&^G|1KW&`~w44 z%BMG&q%*jA9z@i-7_m|2E@bfcl!cgB*@+JEf5xSaJ5EdH+L_3775Z^k1=0yCy+@5G zH4au(2!@Rx=9oyOz^>7cJdCS@Px*(ISy5d+n#0dcb3L={MjLc%!NkLfqa@NnvD8P? zrspF!Ed6uLecp`%C5UNJ+ddQh_`gvPpdOl^sJ*wJoRobmpYN#sUQ%f^V>pUIE2g}p z4!(Z{Qkw`gMK1p;rQ{5eJFcZ^PqMa5v^m!gWFM3}1LP{~;(_gl0RM_2}L zgpzWniOneZMds~lcoqGsAr4frh8l>l?-niStDSi|kkWA~Q?vSxTaw76?PqkpvWKgBS(*mYBOYrixolTh}p zA6*qlL&r&>2+4fkSL!Pir8D}Xyj^Op6!^3go_-vBhs~Tk|vKiw_A{jbEQHlcY zrn94L&MZ7m)9Wc70W5+B?;_xLEW)C@eqq5T+jSfhgjCkKEU&iQ_<->rQhH$n<@t?@ z#;7p`)C%SET(w-u>nQio8GTgCtq@QJXYj;Ys;>uO{O8%=uYIu_PlgQ)%ho=9wT+;9 zJ!+trAJIOYoUxlou_}!N2{Dfh@Poj#CLRe&4t9Kb#$exmzq~U>TrI5FYIT_8on@X> zrMP--Q12hk=lP13O;Bu?L&LDrrw#5=FVlWE4XMc3%U_0*Y3z2JG1H^2n1Ec>MqUOL zV{$Wm0mQ*t#4r*fZyKZx&d`jfGP!gUjfxxI5}MGB(E=klrio!=mCr?W95gJQ@Cji> zd5U2RBTi32G~F0fR1?pz>ee;O;b_f?k|MT=#6Rl;z3fY7iolUyH*gd%Fu1ts3fD5k zOC}-nW+AEGt%pw3wN6w43w-1$H+qt(bH^bJ0Ca>(WNGxer_0L16NdyZC0C$u6lA4OO6VK@eLXos95b%{Kk zrzI;Up=kgXbCN2gzl`-)VN#qA0b^9f%9@|iS`XSxvs<6t&HJc$&bpbl2mdT`OsfA9 z^Y`RDh{et8Oy5WaUeagfMc&>fZ2pe(OgLuYISv-MS+;Wy6y+0C_thhRl^P?=+59SO zkGm#{TE?LMB#7}tLfdh|DFY!t!~nOSdLbjo*s46q%Ugl}2aN}1Ag3VFyA^aW3f|mo z^&aWu19Ek0ok#pSEE{j%IU=`geC?Um2`W((8rYFPEWZ3sG+P{EhnC|A_fXVG(VW{D zSjW16#*Hc%n-n*2jXECd%Jr&^Q?uk@a41VBfPEMWD^+_w{}5>{3b(m85~A(}c~BYp zM~;0lPxFq{(`nXWa2A?s>>bxH<>6->VUki=yX59$6j$>R5UJL`k8eF8!iaq=@0Zb6 zEj7+r!tfPFk;pR&GKv-v$IrZ@QDf9mXM$jn76-`}QwooE`#&kMt;xY{&X-zO^31=! zN$+}^zOeRk`Dv{$)O!1S!gx*;T&vFDlT?^nWjUEm#Q9lLxS(J42g7Sk!zyIf?f zzjMfNGqTTVCd>DZC~)YqX@QslMNbxlB=REXLOKhtZ;`=qW)&HlQ5sEQd!ubU?RPU@ z?(O6no8Q(yyA=crJ_#ktf!mmwa#jMhAlQC|G4Yl^vOII*AAt9(2J08`U<@8gDXwXDlM0c}b&YU(343Z3r@o9OI4|p_v*3SBB-z zo87RlBMC)YMug!ZzR3(QxL6MaW-DMDY*pnTqaVyzZqinlhXgQ2s@N-b+se1)$cTXuRzHqaFfmzi11Fu#)AQm3Z{Dx^42M6XEWqJy z4Bzqdw%h}#P(*RcytB*lD`2Dw3KFekpE*wY1fgz$w}NJqavJH17bX6!McY2t%UC1w z0m=%()0-mX-@QF(Q-9i7FI>2^jh7g1%D8-Fr{k9$+7NbsO7L#yc5OTIYLKTxq9Iy; z(kD$g%wuJe+8HxK7up6l0OAUU#z?s^rWpcQ`{o-)@}@LJhL?D+i~a$8hqc-0_gLNj za!MENVjkCy7<$E{_&UGaseyh=Yo{1X`N18>oUesRb$NfP?UuXeMP;*>tYMy-eIFm% z^qJ+SvkP&HlUdOEh;I6b(6x8e zSDiECF8B#t4Y?0_?{i)<(_;)=!Hfo#r8LZ9*8f%uRhR-7P${H>W2afvN^YiQUHeqd zwx@=cy|5e;of3CRo#@XJ*Hx&7Zg{@kmJju0`BV1)gpYAf4CkNi-jS2^ z5fA`YI6F5OeT2{sxy|8yY;kxEt^q69q?jNglU9NBS;DJ#A};d@ zrjfm5OzRAp6>GjuF&#@IF^e)~Gmx}hmri(#(l5M1ns%CGmKd+PN+}$so!?o~p_Gly zCXIeL`U*M@Q^>toXWYPOEf7mrG13KIZ;)>o{0R4`QHbkS$N)#Q&Mof|CSwqd>jWKE zOZ&|8)%lG3^G`#*7_q;B{moc0jp_dWFCI3JUCkr>`BzB7ci$HjqfLB<3EP#ed{Rg}5ky+=KR|T$tFJ_T zW4eMtQ^&RI-0BzP5pJ!T6OD|~Z9T<}px}KDOv*mvGr^+7AJ>8tVy!qEgizV&*7?pF z%4vEI)syy-XHU4IBdn6=5tlhlw61W&0fV>290IFQa|-IwM8XO0MP984 zjw%^fE73@RYCZK^X@O0)p9E+$$~}GC_x-L)jY;IWpJ2(xx_C>w=^3EcSuc%o9xwI} z;OF>TMGS`1V?r2d0%P0xllBuq3=47w8^vkVD%4xju?21&M=V?nMChl|5m%SF_rY-* z;>Xy++6)qlMr2gj1D9lgt{=Veai(8Op~@Bs;S#U+As48I*Sbhs?q}jf^=hW>T1o5Q zOF{DtH##2g`C=2k+nE&CZw`P!ekPI`E?E8HONVF8Mll(Lr;#>YQe@kDK5+Nw0^U%k ze2-#nz6T(Y9Ef9w$P8OC1^CmMdpv|DGQ1;H3r>^nV4-Pr5uN>!F~3X*LbHLJyOJVE zMD1VWvDdBk*|gtDj>|j)DCR_#mA}ClwRb2;s?XOF{tGr)4H-&rrO7%~mk1{ENVO^( z>rzXZ5bq)X-Ru+lYm&`e5vWaUWG7=c-^^5cB?>_E&YJNd*Q}+2fR{2P_~#RIY@nWu z1a6z6n(4Hh-OPGZwz{9O z++irLi?qWJwLsYXOPfAlbu|N;FNsLPV19xWT!d`8Pa5>0 zx_vU6KECh4n1{iqoyfV#*SKqM`8O|%kxtjb5W`fAxb(MC_sX#FSHjjWeDT?XK zQXi>63H8so>$XT$$p8Fi+keaMX|vPde*o;ZUupHB_g1St8m1bQ(rn1@)2e85xvt^i zvQdk+$<`Dq46OJEU#d0SpOGSp3!@F%G|;}=-I`ILojAkL1MHTLEf(*$Fp_a{X>F?c zUIf8K{Fb~MHbSS_&VXUx>Oy0*1*7apkrlr%S6ufRN0Be1?~*dwbargqYsAbIeC`Z? zOwJ_3wn^g8^Eo`vBP!3@xyy^X<83W|BndyyYj4yTi@DKh(jMG#_?)RP=erst9_k*bE)sJZtsf|kyNGMO~Dl3+^9HYI%bOjN2S;> zzFbk?>-cIPfc(cDUH524b)5xs7tvkoi?uC&eMhsR@}%=O51>~-Ply{fIo2*+5{gTG`H$jO~4ft6QqpjOk36PCL#^QiO-5nN*f#`PIJ!(yOes*pctF4Pfghyx=WC6iZa$X6Dl+G-GpQHf&dF#&?jTx0R91(5}CH~E6Oy3 zO59b(Cg<7eCYXadlFN~$ze34kB@1Ihpu6;(R+%AF5>uTi;-&I=^1{}c=w9}*ZiU;S z#`+?UsX{GY-8rV?b~T_BjpFb4nY)O4J>2%SqETX2E5=IG@oNc~Mq0u4zRW<_2w$4M z@sRo)XGngu_xum|57hP*hjy`KEK;`-f@t@-090EmMx!mK0 zRx2ij7!lEU!|Ow>3gCoHr8{NfQ6>Y=;F^>M4iLB2>T(ENp^|(rdkqUc-wrGoKRZ_a zWI>^t;&0w35N3-!axa*6re~2$4dC!VbDe3pQA+XLSv>XO;nn|8?$JHS^rR5G{ENV0)_iAGgGnM`wzrF6us$Nq!s9)@ui zTd?Ms8vFc}IDH$~wDUxggTFgZp|VxFFfH8h7IWdHO87VB(tWFgodFGlqP7`M>)b)q z?}Z}Ddb@ezL?H9?Tb$Ju#79N3SI7#+di367gwfI!?PMntjZMsS*PTh#b!lJ%m-))p zgu_^IfSnosvk0{nPVO}U!Z-D1xk?-ED_ES{`E0}|;?~av$(gcx?%v}rb8JyX^ov;kp1xML; z<$Jy1FMe`avY&jZOx*DBO@@IvbDDmEO8ohqH9CjHWt^{$SSrVUH6p+~E7m*81U*;~ z34IRlGYg>W>7>CnsytIQ$TDSKu_)#O`gz0#1*6^Ltg))UQeb&?>}dyR4SI6BZv#`x z{sEfb5$JYcZ@xYB5LAD@~a`sNAer_T0;(V@p-x7T@9%$ z9}I^%+#7xOJ@^FT^IMmBf}887R|Q$O{oF4$uVorVT#to1zhU1!&IamT*}JyY|5U0;(~ny81{v~P;<^9doOF$j(xV&`;ihoyiFZ8tk2dT zY~6AlQT}lhdyc_9O9(Z+@lEs1OS1&GqF_qa9P`nTb-58Ce_-Cg%bIs982ZDi#*ZT` zFhPO( zIML+4kjyy`EP;RMR z7S!DJu`nd{H??C?;K>CyoYu5%0kdeP>*=bz;OLxe@-)-Ync>mcKD8#yR&1Bk$!=auypNxC zG6-0)2%rp*;ictUq(C5DOxg0N_grQ1wBw4#8z{%h-Xpe$Ie|nfH113f{B%pezXvb%e@`jVYZKBqV?4Ou5?l0W6|D}R{K3!cJGQ-+ z-%|VNybxI>Qt{VW+PE`h$sjQ~^Fje{67^&A_tPlFy~o~DDLKLT#dBu=_NX;M=)%1L^HB*eMd|7tS zgkL%noRwxCeaMzGduj3sxG{0jvviHKwFeZOR*zGoT>Pqodp_jDDJr zycG7|X(npGT&CQmJ<0k)z;LT_XcI8rSQ40=UhHDhDxD5BcPXZ~Q*Za1G9quZyJ6h4 zU01;COkuv{Q;%&a?X3txm+5z0gfc(a25(5`GcFUmAJTBGeC(Y7ZqpK54EQr3Xi-GI z#WeQ&_6}lcr7dp@HW~U(cAYOjYkyXJ5ARwuGb5Yo|HgFMl^Gd8LC8vDRkT#0YGi|< zuHrI}2c7GFtaoTtsxvG~Kc*F)MtM(|=iNHXGf&SCuStvI-hh%Jk|h4UC6Zu6O3pf; z%-}C&$sANJ<(+9`Fh&48*1YFDQGFu1hee*#N?z^Pi{qptL^msb$Sv`TBeyp$*@(p* z5YN`3?_KHj`Px1968ghm;iP5oeu_`aqSM@72@)Yoc^>;#;_E}(8~#@f!1qHWlT0K{ zAu2x)0o3=g7N?VR?25n39&B`%kCQC>*R0~OI;7G%auj@7`g?c&0TdNeE*v+GEdBwY zoEV{IEh}UFATuzW_b~ND9rQ?=b9 znEkf&-}Ndk_bL#mR}5aSg8rA1DSLX5y>qt!#ip7@moS7K z5mmJ}@}+y95M*W}WV>Fnme=mh*r}7q4938(-LuFhCJ&RuUR!RR+DM+ssvY8(>!2IP zj$O_DaLOXSUm@{OIW(5)zrzyjRqb2!`ICEEP!;Hi##sc+$}(S(_uU8Tk=Z1m6!GTU zrE-yX-Tn929-O&2C3Y8$q2Kxph7tYO#~mH}kj5Dz8j++OVCICr=EAKK#Y(%WUM1HH zJ2??$ia6A>lvsgxv3MuW)6y|Lyl9TEA^o;;IVock7E=e5D6Ic_ zG=4X?fMx7#RA|dN*oD!+KIc)W(i2UgwzdirDvOT4{OW3v@7Z~3FiY>{Lrbf*pH`yT z+%>bMJ?uO>?6fZG!V+=UPRCBSMDXII-6UnGzMb_Pd8))NwHx?RcQDWoM5IkuC#O`Y zfi{ph#$i9Zq8G}>fy3{BGF<2o;9NNVNkZtlCBQhGp6SkiUNYII_9$+cpz;K|Z!Ls{ zI)@Bi=BdR(8zi5oTbd0F*9>DvNZG%?h%eCELYNanW6~9Xz=fb(wRs1ZKHi@IYM#Ti zabk}xemZ6kqeQhZ?Y3V^SAljsk#}({=8d0<=QG~*gP&1<3~(Q2cy6{NxwrKjHu<%&L3&i8qt z3^sBrr8^BrO81I|O*lZnua7;(=Os4{8bJ-Ab~!N%ap@7|VDv?Z+8VS-)wX37O!zwH z^l+}Ztl2Ep(=0t1I|G?YWzsGy(^{45|An~3h6=c0`y9aR3lHi0g6H&N1^RCI zuob;z3c34`ZOPyI+>^xNbc)Qh|4o+p-UX~Xj)CRPQ9%TS=Z*e9z<@9EgY5$-EvAp( zCLp6l?vv%QU)O(AlNc2~cSi9JuPkEjA3(P!ye?7Onu1r)`ibFxQFjn|`|&DmLbA#<{|uS_LD=;jnxrk#gt|tmBK!_$6met>dVAepReXV z13$bi!&t^nO<^m@9?N>AsyMN5pudx#Te1` z9zHy;1t&gM^y#g=E%p$iFN7s0Y*Edxs7_wI7qkOV;4K1`khocTe|i56@G8=|!TU-Z~;)xM4Kx zN2B*Dvs@X-dx3BtuYy~p@DyH6TmBYe_2QU{R5J^V{=6* zDGsL-;DI*2Bj`20f!?A6nvd+z3M_YKIBtHXm754`Dydu|pvfB>K%`8x02?>377JqD zS2r75$l1hy__o|3kmyUKEZ$^pG?J8=ZIYz|hVQr?yWLq}OvpeZnu}i_4-~MNeU`k4 z5l;%DQb&{NN)HrPK-rRNNm(7o5_)70Wq#(zS0iq>`_%zH|76RHjS8>lf&a$%svNKP9Z*(&TERSq!Bzkb*V zrW`%G$D*o-w*S-}+xR$UROmXSg+E5%JSAMwWA+0bXIRQa5G#Ymt5lBJMmU=>z##BA zTtK?INX-LUmhoy<6%C*v(shxV030+C2Ld<0v*L(9a*$QT6>yNP$>`Y2lZ665A0n33 z<0Op6`dQEB<#hU8)7>5=dKL6Ha${anHnm~(?s_zYq%mlTH2y`;dvLdNP~qg3SF|l1 ztBcPzXI#%t0h6E+3c}=r4L3Ki66%sFTRy^ixNZ!H;KS09ZUT0}HYokd@xUxhaY?td z@DQYxU>=gRRM%*nlLwQPMEnAVv8|vWWR%Dw*`bi#+I960kfwXsV*j-t^vv>Q|JH+c znmbgBCGpZoV)NE4wL^Db;7wew z8LR<*C6~%Ja`~X^^s{0XDm|sdITyzE(~|br&WR!2rn9vyWCV8J_5PytwQS&bj729i zlplM2(WF01i`CnV3CX4R9Itl|^>Xx#Xh>GN=QYh~gNgpo2r-D1&g%&aPy4h~0j1b0 zXnaSyzA?ix`NtLngoj}-MGs4@EJNQTHT!H<&WM&bburUhCl}IMog&~ze-6A`C$)J* z5r8!U1sv-+*r(;0t6EA(p!G<;<@3?Goq^AKQCdb`*@1(jKV$FtIr8EJ5C{~Vd^XmZ zgK>~$(c;qRKsHfrdm|nw2Ur}jC6PUvKO>%_B7YojQgmaqqi>glPEA&qCjNoxGUJ@t zD=ONkRC)=Qt3ho#vac*VKyAICLMFEPym0k3ksgNS*wc^xa`nV5@dDlo)n_#CjfRu2 z(V#6@f0@JU!tY#n{oR-K=CAaEst*qR{^Sk@{9gcmK!Lw_zVDQky$^Jin}oQdJye$* z+eHx)xdYgTh}20E7RF9iNI}8|NQmrGn|rGaio;bgM}1HN4eakwLe$RqyFMbRbH+J} z+mjW(i(kE_yFY$Mv2D0zOtr1l#;m>>&OWy+tmb=pl+WIU4aJq!0WZ7*xcanxPIVMd}2_%n}UFPnswhdX3RAwHs&{7 zDc5KjTLJA6!2_NpBSqmdOvhyGyR02t^kUxUgK>;6af_)HweE^$o%<_miGum9xF_aXw0Cw3~ARwJn@K4zbB4bv!1Wpyc_USK7=& zl~_`a$zA!6eIObgQMR3fS7fTuP`j!JP=a!Dx5oo_(B)ghFQHhfHO?3XVuhq#c=Rht zN=9ym8eVUia~)m11tvpWTa_L|Y96}vrOx-RP* zO&F2m-y-0Z=+jg6IZoyP1`Qf4KFCaamBLl4Dw1-M?)XeCJS`AMpjp zs^J;m6+4Z&#=Taj%~{TJ=YCW_lw>&H>bBA0RMOR)l}^>-B>otrjm?9+N>T^-g8u+j z&(c>iQFgo$^;#td5iLIxza@5_&Ue8)4-EwJ1&>pN>F_^OKMmT|nJV$J@HZWl{;B+@ za=hQLz3f3#%F=A*>3$$KS{jt4P1;EG3^ppBCK>5Fp+)Dam&uP*_QV21*_Bb040RhL zW38@=3?StYXea?~8M-$%>SZ8kgPQ09&w-37F@%AAlz5py9(UhuC-?{GwK^^sF5`UN zZI^x1k1l%IYBRWy1=`-_b-XY4N09cbFSRT5FZnivZeX>S5-ZKFiIrsKZcZ~g7HX>-nQ zz33dAFD~vM!|-n1Y`s0Q84FbA^RH{WjroSIjRPgmD9n@K?d|QYbm%2$$ zFuAqWc;f@ryAX{&z|9~TY{^-gUwMvIIb zSKhUX7uS$&)mUQYidr13@r{=mHA{{W3a1SdnQcmFi+3rR9*Qn~&K03!k)kFX+9C5%jWkG=k%Nb{XjggeyNw!pnA#7z;x4*4dq#FwD0skQ z+Wv)ARvF^s)Xc)w?rO%O(WZ^@jm>ar8W0l9IbBb>{yF2)wfE32-yDL=&*vPHaeTBs zfuU7Zry7fzxdIzVUFw?^v{2ZZN(L>mbzXDdCj~RW z=MRuCOO=6e{Cbc&o>rZ~S5clxsV=4tHj1rAbl%M~MLaS~vzC$(!B5OG<;(HvCPh2j zrQBB=Eb)VlSD-0fT^~M9o=G(KPQbT^w{n`=AU=u%+U*uLuncreicwsNj)|kvWdR*J`3lfZa4mm%4S8^SZ6VHFxZVK}gUItCu1`@Pg1}wVLIDj;WH2 zpEG^Hu+guKgl(dCXR|>G05<;s4ils{-AD}tlyWp3R}g!UqT_TE&6_JuV73)v&Zrn( zHo;+cRVE$L2!YB318|Nw^OO=lqRf;5W5HQ0dnEHXDP*V_(mUMjxSEaUa2BDrgNW|a zlXADKjZ+CDbbUh7Ufs#y);=%}rF6U|t<#6x^A6VG;~B!exA;n}rf5+#V`as@wA_rM z6;2;T(4Kjnax$iO3?zwP)m>q}T(gv4(MYYU>vFVw=Tn6yJ8rH?Sj`YI@#^gmH5Jt5 zdmX_1TA0~;+=aSa1BCc6vOO>suXioNTO~_mXv0d|7s-2l7b(T6UTO=H{LfeHmKj2f z>swaryoWX%1VSJZ1@=&7&6b4kLNKBb{gE-jEz;iz8D4s9>WD@_z3u{FjBdF8JH;Ce zG7^4MWhAcbLUK_~Ph@M4v`1_mB!P@L7&cxs>0N9tbF@5B(^X1c@beokG`wgRe}|;0 zQ;d8MpLmWX;;|E|-tNonR1SYs$3{rK@~y`+S@9@sLkU?IHR>yy5XS!ixk|U#3^oplPTQE^(kzNStpq-smyii}ps zbE9Yq89O&si3HgFkWa&op&;Ys4FILG@UT35BM+)TUh?4&RO7jdb43#Xg)S{7~sqh-PlhaD?qxDZ? z1+8>1V7?SNtqFiqLF%R85RxQB;w^*$A)s!&!@Z92W2RvZc;_QU-8)&vTMQ49`Dj{k z+P*y6shuO7PEIbi{67oc%QUt}AKD%&@Ra+ye>|6>>Q=0;GriJC?rWQmMN7wR(&cko z@aR-NfDv)3>=H{u$7Lrcecv+An~ZFh>d4x9-BF5;$0@SWH0i2#A)ZfL7Gm(ZaO9a9?6H%jG(5CH0glhIEnMHX?w1TI>F5k>*ZY_N^id2lV10p^|d zL<9xLqSYuXBXM=xp(|_q@{AgD^jeSQY+OfG*Kn`hea|25TYlEOivqcqz(VTbd^ZU# zg2L&My5iPbql>#9=Qa9a43vOqQ)ww$2fS>SVI6RhHmeA!%Q5vP)~e3|V%o5ZTP zIL7+NNBs%sASi|5$<2qV80m*qsyd)u1@~NUX}Bns7ewrWG+e-F*$mAJN!i^M&I)mH zdp!*&?Y9LKv$?#MWzCmdlDWGzZJ&Gn+cK&W<^j&pd##5fZ}nU@GQWLM$!vR{2*CRm z8Z_SfvjiL83QM{Kask%}2)2t1-I>l;6WT5(;l$@109?dzldAK+`FQIgrhH`k&KOrw zgPhb?QwJGYP@_7Jl4h}X(~Cy>r#xpRaPn8PV5&Zt9%H#GTSf*qUV#|pNpwDYsa8{y zmsfJ*YGWfzsPoOY!mEm0sdH_zb|IOLhP-an#?uFz-oYqGd|6zY1^_H_f^ki)O0J{; z+W18c1Z3)<)3)~=s=8m6Z97A!sH$n)#@}NdFMqzHZT9dCUvYN*CPjdm$EJO+y z5%Qo0cy>Ca)KZ6gyAYwS1xR=W0QEH|zfR9KD!gD4yzS7&O85T&XnqRPJX85z0}~>1 zeRf^e3Z{B7e`CTp))`U8R9b58hxknzCB-{0oTQmEWHa!~3ys!sqHXSxa!Sbd2t4x} zi(cxTH6R1tQYb+;*%l(FC+2b#&ugC62HxZ?&+glVGCDR`a@+-~9n^3?7O{;Y;^A(M z6$@C_vtSoB#p-;t{^zFlLk{ga@BOF=DF($nFk08j2_qN6mw*>GdPqheRT4B1mEv1f zGFS_y0diY(EQfpHaNV)vZGxQ4P6BUbIMZq=n4mjpA^5pKeiQrjz7qmBL z1eCzAWNNlNKBh{`$3vU#1{=P?&fCXSYXPrqugazx@wiAaa{Id5RAM`>-0=ImmOK#r z-oY=*3;bACO)~(s@{rng?ouY#fe}!!opeDo3>enxWQ~pZPt_a(K6VYs2y99z>7lW{ z&HbKhVsnVj%h{VD=(?wNTogl0!ZFOyyI5gjkY|-$x_5R;dZw? zQKr@2;JS^wo2j0OfNG`C$GOT=!*nGVBW2MHWn5!rDLy|vZFh4~KSIV0*F==M|e$M$J!w8ix$sII z&JGFg)!5%FebWpe1zbF=$yf&XAs{$Q^2u(KHbiA32oZuDqjf%(Lef+^aL|1M0_TYP zLw1M30wC-=P_^DzTHOWiy(_;CFxjWI{t~?KMST}@fv1v;dY%`@F%G^meA3Z)3S^9E z>QbcQBxu|#29cxctVHppmT{w3<)KlWQx+}N3o2}W6?xj^1ulDfATs!r4-vk|UsZf7 zu7D_HY~2?OkEu>dzEP#nikDj8WnGPpA4M@R+>kJx$Yp&^f%bCk36YL$hfDxhwzdm` z7NU1c+A|eXSPi*~+p_F5^#O65N|s93>U%$Cv0e5NMehe@L8J9U9WM96C(H_v<*GWv z9Rbu7u}tE1O>0TfJ*~0;sOmG-5TQL$aup)z1I@eamzD$Q7N>dZmm6+Y+4tDnz}b(rqrMxe-CW|FA5xPbbcZ+r zHPKsib#o9bab%ck+qyL&WVGgEDF8RRXo0FZn|thy!KXwLNiB;!9=t;{9x-AT-}V=^ z0PZ^go|s~?5$p>?`Cz5~TQimQzNNpI$D$)k5wEu=bK zQQ;g^R7BYrZaoU*8pDIpaji7$&t{C&(vo^Id=6C6O?}d&d05w!i54AB0M47Q67QG0 zAuuw+-*{w!y7pdTQe|+`*H_T3Dl*Lz0~>;;jHKyQT0h9s9s}4andR{L2+fMwj_*6H zWfMU*FWq*kE)g7WGBtyk>xF-qm79vk&bEt0X*gE^rGk0o@lTWJgLF4a@|Mu*nAxnB z$6VcN&ZwB_eO5XXWL_dB8()~%AT}yW3s_uTpt{v=`Fm>VX5=35X|nV__dC`sE`lSS z4T|u2 zMa%RFysg<2=|!`mEJ#e1z1JGx!?PH!=M|B?Cai1&DScKwplX!Y9h7UX z%a1*PA}^v@r?MF3Sjmm@m<1dv4Ia5zN|#K4JpTZ`o&eX;yK|MP4}q-rZID**c8-y< zLhy5fw9~YdBh1JKfR)s6ncglPH9WJm)GbyOQr%eN4O2$fDK6qpzr%%OLfhYUWs(E1 zTl0cR-+L6F#Ce6eR5G%_`?@Qk!*CN@8(c$el8e|fw)Z`Y+24W*9piG^+pUk1A$zYl z?oWb_q6cfjMi;jz^_%FXo9_dW@R?l5qVH>>jxoM?sg$x^z;$ek%zCiKA-^+fu6 z<6?)J>l;uTlb}EzSKqa8Wig8SrF%c|kt7}s{uSq4{{VKNNl+Nm?yT+)j)*5Mz8#|J zJ)8dk565%OUlLyU-{JW_#?CsNi>zfUt*UeYk#qARk%wi?o|IPdXIrOvQ>iKpXGCC- zEKmftK4rvqr+|rRD@eb*Y^a)t zY6A3L_;^Dhs5T!F%IR&0?V|J=Q&WPMRDAV{!^)LuQ*UODtXu#ceZo&3-v(zwd92Z<+ZSS)6{{UyM1RYXdejS!BDyMf!Qt&q1FAv}1 zY16ijM#oT4^2Hklz$Th^Uj;Xi^H^vToYJ@x-;%Po+t8}&GksLDjNep#866)7fQ_?%H5|biyhd$^Do(;E6tdxP#SAwa#t>MBnmYm3K7`{rg z98GSyLCn-`EwTx%HZbtkoY`V~WKyAKPvPhbX-k-128xt?Mco%=0Jb6kx7BsVRC*!~ zp8H zbhpisGN!`n%weYpjWa?3sLJ0(R=TKoavNIBE0ypX0^mDaIAjuE7im(0yGW+2vO&wD zW|?-ki3w_@9UOm^#2T_R3hxlJY9%M;aY&K&*?J@ul1rjKb*m=lDL1X(>I*8lwF@T> z8E8v=U!usktA;IwI~3+z*@mJ`+EW2Kl@8;C{HP?ZE=X=msKHR~m{|A^#1KVVK+_65 z+qD`X6IRq~?sl?M>RpDkC~pvU;Yi|O3xSfIg_(6ZzR5%xR}1f(i-o#$JWHN9&Aw6m zEhFsBDe;){bt21jyAW?q)94p7#n;Ot(>otKcaY@uIYzt;*+Hnjfm`GXUkDm%xwFzl zG?Foem&LDgvZ7lID)AngrUj^{##DIsLepWCx606fJoCQEtpT=hwMncl=DrQq)OXc5 z@Ch9jsL5DMk6W(0g)i`rA?;gU)UVKFZaNT_C38v%D<6x5-_(ZTZme+`mr@%Q^5bO2 z#=sJo!zt(y%}xPGn(C2~Ov@#VXqd`2oRm?_)Ab39Q7x`;**-b%n`H@}$00I?7roYN zZcjjyvIyO~BhY)X?@oG0hQ97$Hn&BzU7qa#o-pyay4U`}_eoIl*(GndZKzuV!uV`Z zFfpAnxgI4dGL_-;bGFB?_HLdo6zy91IbU;-G6eRP*-cguL09^}fZgf`+4NxiC0tQoboX1Ne6##6kHJ~n&+V1k8r6fs?ig1Lc?GljCPZ}PL>(jbARwyudI>oFE?|J>Z7V8+$?Vudvq5GvT@VlK02K=_94PA`N#?7Olwkofg)6N6H&S2n$raxD5^)>bl++ zcSi52v zYtY|^k!F#&2{TRx%Yf`p8x<7nlJ~F+oos-MuRfG=w-?Cz8kI#-3US%1h;*Cij#|Jr zR@)OuQRSMVT2(c8(L)`5mBR~kP}ZacH_p-=2-F|pX}5WF&W5r_a^b|S)D+`&t-jKn zE5R*z9}?Ygs+nr#>TzAFp`QufeGdMoiwSR=CB4rX0SKzot&vB@UN&8M*eBlof>>HrDswZ;|_|VD-0I} z$I8~R&Of}9`Aub4A!~rcP1k>sWy|1v{OZfR?;F~P`RkFanV>9oo>fluQGn(s5drrlPsCxa_7MHYQai$~t+R^JFI@3L|OirSe!|R*C%= zCVJ6rl`9)jbP

    K>1h-0DMmf<#TLE)Lm6$+H_ohX?UwC;d|U|cAF-m+p-~2Y7ugl z$r{z)_ZTwH8Z%vr0(DMu~H~p z4vV+;8Vl4$PCekd@vU{VT-Rc%9BL!JoWUx;Bl0Z5B&x&B8>17)g%&UFg=L(&*##&9 z7{Bcw!f%)tUUv6BiH^(3eufFX5EuF*@vX9nrm(mOCL$nHByj-@GG8S!N~8#{;yA9K z-km`d?(_wBj~R3>`RX}oi5(V21$vQQCclU~Vd_8hvr%}3X>;A~IPSSkDp~`+qp(qNjfVKu$);O6{Oz51hsCq0+9rg)G290bjqz1M)?>{iLTW+ zRT3;hL$Yk9*bV&_KbUARo@6B1IKA3zfXZ>^L;F!j1@mQb?rfd0|^6y-H+jX zK=`a4p>8)}Do>Va^b1b-EJMX%^$T+Q5&r<{v|C`g?l1Dh^j^qcJxiRNH7?Z{k=MSd z(YFZTkc|T7PfsG1`tt;DU>OO{&Z#CFAYli#*;Sh(Z=$s7V4~n`fgWSulRv;bqfoUo z!yvd?=Id>{?5qC(0l;fzs!%!CcUzU$aIf7SLE5CAEqN4~BZdgN7Q{|0uQ!hZU2lU9|<60UJ25wpyj4`s#=PH3(>FzCxYEIkatpNt~Pp*!5NgV|pw69sVPO zLvG`Z61|ZmvpZi>dpp47o#HSFxm!2gzjKjOFg{Qm;xCkr=6kmA2a79p$hcmt=s&XM zaroyqk`F`GxCa7-7%KIZ8;e}hEwNN)h?8&@&v(1smx$L-6MgFv zKnqPx~rIm(C57kz6K_=E*k?X^oby`W?k!5Mp*kK5$zQ_Z}{iCb7Xn@qMU#G3KV;8#F z4`|AP;ALy0D_R=dF3W~1P+n~Dj?+&zIp0e~BC}+5@GKF@89D`b^#+^}lIrt&G%c~n zxW}l7SVqdzsKvRIzO3nGSV;16jmDB%3v&t3DV`ErVXC+0NZCWDJ0Mw6vHs6Jr5+D} z?3L`Y3u4Ps`;g=2DzIK`(pK3%w&R#+;f2O=Dp@Nx*Hha2H;V7@YA<-{_92&9ETn9T z33{v*8PRHvtWSvJln$$Q%HWeJOc}xiDbqOEFEQ`egSvPwTYgc6>b9|5cWUW;N=dFV z8f>yCPbEmnbv67#g0F|2H_boD(I{beF}Tp#TZRs75oKnxosX0_Ti!POsuE89hO5vo zbLYIOJKV{mFxa#bYGCQOi`A#CWba=(Y3O zJGAA&v5b(m9o501&Nu)Sr#)we5g!s+I#o2I83+Kf+ih3@b3yB#u0%Yk;? z3pt25!mv1lLDLD6aS^b>>@`P^J3ZO$e$M@%yMyNbswC7|&kPHB!(gU)WOLT0M z6am0?zK6wu(c1(qFYQkU@akN^ECDNdynDEUF=eV<_HdU+ROv15lDGUOt)}7B^ZwUi zROrIDH0}00=1TG~Hc~jaBHX#^rLkWSOF(s7N~dI$K3j|)|WcxuK-k;Sb3LlzErfv54cA_OFj0fdu3cIVoSa#0vVnpZKiKfk9z5rmE~t68 zT<2nVSN>u*7ktBXs{D`0vkfQS#U;&trt8wp{tS5eLRphKBBP)D7=^x2kIaH}UU|&= z8apo~`Ucl>jEk(j#)t)I(1cH%{GkIf6LXv>mK^#ijil?UYf1n>j?1X`rB8ZyTw=v{ zmpOHg-_w~X-aQZSuA=QyFT}&f$2AGB25Fs37}+0<%1Z-I)>&Uj{{R|JfRvj%Rp<+2 z=gDZ=w>QYP+YJU0hp_r8ZFBy}9m5N=dnW8hGua~3+yS^aN^~LFz7pNSEG!+B1A@5X z=m(kUVeY4H&IxXd{mLQWon4TLyc`_!WgxTZ`GO$ET!oR1Ik24W34fVq=t9|YE{5YS zpP6T}!1<`#sJL6Pz$xtu02t*9PCF36=V4&T<`*k=+RjGC*6GSWh0Sq!YZc&nUt{5D zRhNoIXFbCf!m!yr6B{AiEE05F>FB~3(|qWS)8!Loz6yyP0$?rfxmi5d#gLhR(HrV< zq6B%5d|-Ga9Aj0hE`$Q_dv&#a@-RLE4luP-^nuLpdly}A3;ogLU8+yom6sws8+^!o zAU|wOs7;ogvF~)ZRa|JKik_Q}hLk%^dM$lFT-P2PZ3dQ zw!bzL?m*=hXxTN>Sam8)zl9BG0{gC1^-+E$!j{d9gc3{h$E(*@V;bmS{gUd(-Zz zj%k_}*o=@jUd<%E0$k7sYo_63(XA>n%@bdZ!BfT4bm}iB%^EjXNytcdx<)KjC%L0T zg|MCRcR<;Jk`ZTWKw9%x*|QleakGb2+2QdJtsAZS9xKGSeQXs+2`xE3qp2wQ<&MYZ z3v@dn!+a`ut@}mRruS*tb1g$b&4?>;X`~E|jOYMbUM)Mg!0kIDL8#QM*7iuSA9H|# z-$Z^ewlzQ`dBGT5@#w+LAG?GC1nx^F2|zbG0D&6k?Fq!%@9_6tBMmb^%I_Wfac+9z z*6DT7-%9hM?(lHQMl0@kHyE$qV=6uD?dUkj-`b6>8i1Quy9VUOF`zBLCyi#y1_`$J|vOCk5z+#8|BC&@ju}$Qpw2n7P{W>{#N96 z4~%%!k%5u`xmsp;IKRb!wlmr5juOX5dx3?d;*|4K&(Q7rEr@jSE|c0y-iNe8&_+^Q z4pSlrDe*aSXRVck5X#pL234?aq7NC@(Eut5PU9(c(3cKfh*u1j*$ZYMpL;qjyP>7<<7{@&1+QMu-Ig9;Ppy{8)KuN~vgf#^Ow|-uFVyr-%HiBF&ZL&I ze~>}kqay1bZV;NliSi$mEh*ZA9;zme+9`>o>Yro*)4R^%9t9hny_TI+%q4TAI+9k; zdq8o3Da$&oQ>AGm?z-L;3sZ9H&m!$*eE68t+%G-BGcU3N?K zTm~kmc~(;KK7ir4e&Y>Jmqq%Oq6bo%>FUorI6p1);3yoUr=Ow-sk4$E8qM70PP3>MSF8CdHom+7izlB7fUTX&za>8Z(MYYUZZbM%l;T=20;vpcIwJD#Zn_i_PogijcK+1~(Qs*qxENTfyiAO<==}~Q z^-`1mA19#P0Zkb5(Y1fMN_m~F)vgmPo;)Nv!FeH$--i_^!}%$cPrV^G|hMyEje3RDZ@uV0`b3RPRtls=^QZa7qKuCb;a=- znd?4?_P)+xyPOur1LaWEU>3sdEdr<&(-y51N2ueszMt4rF~ zkPd|6&rwm$pmGMp%W74KM0T@^-5gbu{{XOIwX>3t%BviOW%Q%vLo12p1a1=(M#1|v zQ2b6U*FwGBqgBy6o#7r8BOPyu46gD|MZx1LXRRmpJ?n!|?l5%V`d`$LNeCJdc%&r? zi)CS(q0MmUqDjMANf;SeD@R4uyUye0DmTbF6*xyUlzOs?^;IfzsQD((4ez78(>u*P zPUW@<3&-bx?C+|%S34$ir6=AcB*M@&UF{kwlvHAP9y=90T~}6a_uE~Nw^NWW&-PaM zZDZmg?SH&7s2XMr^Au3AfZq65TKkwO*`~Dnn>pQ2d@Y@=hDUqj7dx+CwuET3SBF&y z-X9SljIE6BtG(YDq8W{xFfO@_b^|Z?l-8Wl>Rbztcb1`udhDN5*J>PoR81u6f;Ssn zWE5E|KzHQ1#ST=5ln<9E0G`2ZFLmM_?{_@uL8yROjX+-B+RsH3RNPwE^jQ=u%8au_ z*JH3Wv6S69i_5|FJPX<1v9iZAq?{i>R>PWhmmFbnbd;Qjm^R1gt-5Cv@ipCP=VUl$ z1z@NuDc@G^TeCtJlz|c543d$Bifs!{j|;AL&b-A-B$^NnzYCgn*JTx8_~DT%|Ee3DAY z`T9;$mS**Oh!yVE`AwFo+Kp9)@ZiyPku)Yo&t}d&)ebx5NMrRbT$AY-G8R5K&iP4= zLGyo^Ns_+$;XWof>g3YKThT+QW7IhW(*Euv!FBTG=zY=lkIPgsocM7I&*q7?$yPW! z*&M8W&DuhytTVyInOI8Bg=o}{>8^gJ{73HwxNZUmu-$cj?zqFJr)%~AF3{OrxCzHB zoO&NY(Wt7zGg~!qOk^cp$)-Y0ihxM^D3*j9aedI6kbM*jsqqLv9#!Ah11_Ug#|u*7;VkpwANT(N^17ZESNu6e^3?L4)Zp)lr%`Psm+ovo zv+rm8Dn4fmxbD45Hi9x23E9dcPYW2)s0)_yTn3AsuT#9J<<*{#fW!Mf9u&GF(4}p( zSO@~aQ|EEfG0}cjIj|bQqGni*inFIFJW{F6f=6x|Q%df{WbB*!n%C_g+LPu4Z5JD_ zA2kO-h1q*?#@#hT$rrPV>#l(6H(mZ3nx{@N@IHOwc!!9@&Zj2*3h%Cacfu&^9ebxX zhWev4p`m1&%Sc3(4nB+6J(_z*$vc|t-FZw6Y1MXK!f|HO)Uc14!D8Z}-cpo&4X+EV z@c62AZ=!nr&Cy}WHo;7|Cq+`5iE~98oAWd#HfLW&z}Wf$JFAu(PSdDN3}`r29XpDV z8Qmg7p5(f^d&c(u;8L9-3n6=n7flJP=xNV-oB&f)KN)c)2XuTp%v80!xoKXB?H|Bh z;}(AXaAR10Py(5|7V%F16d9I%0wDIB5i3N)d9BMKx+F zaC3Z>&0gcZO^%pWolZVf7AFKL39T$C z(>d90ES4)=;sH7o=fdHph?&Fhlw68T?sPq)22mJrPG%iOf5s$jj(}ESj-x61>&|tk@l*YoRu>0Pyc2OMeD+ zDCr6GOOl^9aaO7%zK9&goVcy+&Jo9;nCZd5StU1JQzEd)O>ErSrbQ)fL>?G=rn84O z*E*vTk*Hh#AlhJCYj6L8^Zo2Ll`=iLaRF}1;`Uq?czb(G% zWZ{j~Igeo&)dOrbD|B&Ki_dC&&&{7iVb!TYFQtBioiNc1RO3^k zT#YDQCd#t%!})d#%e(NXca(E>J1rZjRmd>WP5hWOJ!rfZIIQ(n~Txg$k~qQ``i;WMJeTQ#me=sI1Y4mVJ)Ayv=nj@6i;{xlK$6#{ar9HW$)VLQNRT80H{9K#o=6f)f!s&h= zK1L8WTaK$7;sVee`2JRmZkAtTqFVtu2-_jHhyuZ$`z#Wr2@b(9kIF+5*@-C3x7ieE zrfJzR9Cd#d5i!qpIYA+62YJ2VIz zy?LM(uX4Ro8lAc=Q|>d5n5x5EebTm(@s}Q9 zp$m)R6nnbQ$5YySJ&X2?Bzni;Ejp-lKT>3H_2=VTz*%*x@!v!)fx^CBXN)nm6bxfq@`|Z#;YBHjw)2#wv4WF zaL-@?8eu!dzM&3tVT?{Bs~R++>10=vwy zmEc~|TzEOHVAZ=U>>V!htYgU0@tjYy;U_h{td9d{5Odu37TcJTq4n+<k7j$?={KOlP>XgcN@0EBw1Wu^kzjTS^T*`6M7kVOzoU$>_KM4u5&3ud<&^fIj z^AyP0ayJLM&NwEgX&L1UyLp?97lV5__K%f@5s+89w2_N04;N3nt0m%l&j`f(MiQL) z+RSK_4BINdj7d*>Rz|T6$Oc>NgUneu?K%Xv4Ui+wJMg71r7i;HM{xWj!=&*@vx@G# zZ-n@WqvpfF)@H@XB$kO`)_U~!slHYTXZD0JUY}ZwccEW2gu+7kK zoE%w8U=7tJj60z&CkO$v`UT`3(4D9Po5F4!y>0b{mn2{XsU7cjozm7hzb(qz@ajF~ zTk=|*r)e>Z4}@{&KZNLUP~D)7PN|V_&74zRaiWtj>boPxY`LLkm5nylWV|i9rueau zYvlm2)Szugz@t;d%Sa7v)15Pjy_4phqv01SW2$ws5J$nkLb0!Lu(B$E>{(i5*VF{a z$Rs6lN;l>rE*$a@uH8?;oi9N~^@g<_^x z$T^87-Bo&Nt&&ynR$9BWU0KFcyklhc)MF@!)ZtHn%j$+n+H@#HP3ojZ>yga%dyIH9 zG2>zu<#tPnnW>p2^#N$?5{)-o58OX^UMK!p$3>*#lrzCia{Uh5viS8zv!gepw(~vW z8(3W%{Sw<3;ZK8Q$sV>Pu^rA61YW6b#-|7iFQLK)Fh)k{m%2a-A0Y{dYuTJ?y!*W# z?FDRSlM(<;*L#lopk##Ob!9q|rzI%vV`6a7#M5-DE2e1=6YPA@J<+@o%600v=Tm`$ zmAYM}?%o=&i~;JrgSCFoPv*2a9amLDiBp=i^km9Lo`_q%adw|4K+$DTaj3g-{uCPEt4mmfv!+y~j} zPGMX0B_w+p1y3XdVTO@+OX2pBO zH7i=s)?f@NdjKBjx1d67a1K{5UlA(OypK`9aQav(c`2_q)H4C=2+&) zbHV`2YLM5GrSB*L)yC>t(Xy@}BqL;I$pCJYXs~4jbczirKpt=3bI5?-qSV;f1DsD) zwmtSX&>n7!Q);Ab&5rA@;eWb3gSB?{rGA9-G-Pgsbtefjn}N|Ap2uLEE!o62HKN%W z2&Iwau#99Zh=GRWCq8mO8-)_ZvAR9mR0mK*3AqgyU5G`#%g1|!<4%%444d+=UXjFI za5$J`t0m%l{{RfGt6^!)bW6~4N+KJ3s;p^paERY46xocK;4UY$d@xTFqB;;)BjpJ> z(G#gkl6Gm3oD|~X_Iesk+inPH>K@Bkpsu+80C{b9dL7e>hL^B=_FB`&xDlsuW5EO! z+hH+Ltp(Ka9x=k|VCuWCfxD;6$l>`}_fQ{oUMFO!WTCpPiOsno>>z29kHvMgK~Kul zaBfPq6+s&Af=I0H9`_6LeU-i&;xWO^aSjTx=DD}wb3LEz+|>t_jW;S1X)lXn+EA3F zloQS1Y&Ud$=C)Rdq<6kCa=mqq1TPxy-?_mM4v?0=RqO1dEdUJy;qjfBGNbKvU| zu<2q}yt>=Ou*VA^@lZaToe>&Ugl_|DafJ{Ls0}-jM*-a-55nX4w2o^_!O$yihw_tG zP8>N-hg8;&QAs;#xu!ko@i$FY>$?D)F0Sc_zzf?u8B1yg7Hi>S{RwCySIDwLIe(Rwd|q1HN#SyQ6VDZ2@b9{&dV^qjk^<3AW;lRO23F1c zJa&jm!yh%n1~RlsJu(<$OXlltG0#hl-D7>j901 zzk3{a#;uWn+%%Q6-Q#g_1uNveuvVCHDz?67O4D-US4YArYbpJYIPG>O(WgOmyYdj} zU26b^+&S(S=1*lwA9ROE;BE@(PcwGrOmq(1tbY!6p~+9;78yYVgyfXSUOOT2P&=w< zfD99L2CcTW@_=&gQY_Pv=v~fnp%q&g4haiyUi;XNi$i!f5D-e)0o*Op>`h^sk&@o` z0dpK$m8BW|k5TNFD>NrT?w)V4&O~~v77DyjpSod(^11WSh075q1ru57hDN2}=yH&T zJ_FSgTy$05)72BOn8v&f(DJJOAte-&SfC3$!*oOiS+En7UJ=F6Bd+(0#~TeSj=#JC zvePR5E#p1UI^3^z_KWQYg=oRoP`m}QHpbeO*YLVIDQ4<<2Wl~W?kBoiL0j%i66IIU2aY;>FUTFzjQ7C0CB=pP6u5P_^c+!L=bH;(FI~RC#(8S=#NaZIsT|QbS>~Ogk&~uJy9BdaHU~TRXx{6CIZE@mc=QljI zPi4^09x;v3I-RFNnA#R<4pQ95025vaL-}Yk(%i ztB$2N7b{=*&j8^-9>00sHxZ5|oCg554y1x3&cpawHK%0dqU=MB zmNHA~puM-X(2WX~Y+-76fD+?;T&}x?e(Uae4{E>rYfeSWGi?%uJZWA{gj^Md zot;+b;DpBLLh3^;D`jBoB4V2jOmM@mZy5 zE1X^@vb1LJPFC=-rjP7Qpj%1SbH1zf?XyzN)b3wyf{glGi6{)R>saNJ_ zU4!9xd?K|t>(%PX8Oha5&T@%f(W044uttj%+Ys(xvCMX>BbbzPR1gp&3~y4)jY_8- zYn`I887EW)?meoz-*aRyjGzp$$^cDo3}eA-#lZ_)yWH-X@i@r{yH0Et&NYEmsiTeH zwe}#ayf!9OC3UwnY5126rH-ePTk?Gk4?I2X)Z5V2$_Yw?c5TO>c#fK0IgL z0SKmOI^i+YOd5Vtof~uo1J{~EglxY+q3|%{L@%aovQpaC^++B1BFt1#x8gTM@gLbz z@J6KJ8)(~YP&@QUV_}a)=zY!ckxe*bP99gA_zWAX{hzE6%*ii?kgBasP7W-~!B=!E z!lJd)UYyP5H0PD3{{RpL9C6!q!S*YUwx*q~2S67x9NT2CKa^ZsPm%N;I*Oze=bx>$HUS9Cfa93}Dr;>{C)bQ>Bu%Q zjQFyomOIf%q+!E~i78q)(E+bS3t?yi{Z`d?AIJP!cEs0wBSot4!q*$@7p?m~@!=Ii zn|*Ks)$tm6=)S8{sO;_~(Z)qRrK2`^mm2JdIU+d;xjX2A(G|s>))Sp`s$Yd;g@v}& zS#6L7Zc<7&Qy!?nXecCbJD>^Eb&Zf6P?t3BoJ&FhlG1inC!B^o$6!jZ$uN0WNbgs) zMw*$#LUqq_#@twfC9FO;uC5op8ds#h6b-HEwI3=iK||>K#bS zWUoMk6e6OzBj>vHWmcrBJ=<^o0(~P1wbZhOjX5_74Qn?jOfBn$VMo~)Ch+!3Fh-Wz zCT>C2&~7=G9p(C* zl-OySJ|`iPj-0(tb`BzJGND#w2(~m(sBwv!r&8bhIld!DXvuKp9T!tGjfNL1#=B1X zlgvjb8!jN%f^@>2IJC7Y#MFeP@I5&`p#U}tO(a|5BXxi$!l%rBE zGFlmUY*g^|6-sink-Ht5?KD%gIiwQdqU;ZhXtbxY{ozeY;8;l=7rpj7zKCW3O6xsx zn4TRvZt>L~vBNt>M-Y@MytDDyp0e0lmn=hMsv7n@0vs%TRf~dfwH9dW)RWjAM(mi{ zP*!+jWYEh5iH7({w?v1VXH~dZ4w6fJLlb zWx@7`wlLDPnH)FWN-=VFNUEh+Q;uo0k~tdcma{G*%f&m|?`@*+#gcGHU01vg`W+#2 zVJDR0>&hxC+)pvNB(9SM!>rWmdLC`YyGExMM-2Sm{2hz36ui~&zWFW}{H@fgVC9VZ z58-J40AtLaYA1AXTdPA<9Ta4o@{i$F#n;6L;AM6~YrCqsxh6fZw^VF|Ib#cLlaz6= zYjw{ahjFpe+e9yi@{>JaxD6{0)dRX2ax_BN&F+dyk%4s%Hq_w)xzc;tp%15fjz9}g z#O!mP0I&qD!|xflrIdqk8djP^Rtu!Xtm3-968`|hlJPvFwU$_jKB@h}01XBa!uIYk zf;#cAStVumvfdmngWnTf@Zn@sg`+0wybf{N$;}QLr(jO{*kfSD%6|&tW+#amaNxMt zPVjy06^2#%VMVL0Nyc_}i?c@0cTL`7Dq0yKv&Q9mxMlS?S|h{zMCe{086bnQ_m05# z?ANq5b7C;LjAsvN=ccsidUpox948e=5bY(i>U*nOWG-ndjNo0UieYxRE=!vF6{2xz z*`4$%Mr!eQL9vHIRtkSq1-_``iux52V4jIF+e4LRR$N&vumS2&209_4#y}9dE2E8< z2mw<D|SzzlJax)D2pRz00dagniH~7xg~7ts&i4DcH19G-(pv1`Et;CvRQQhzie?r_0l3{};juEI zE3LVsOT{>JG1RiXCCT(OZ;_zfgqO+*(!*uz9qD&y>UWtK+sBiI&1guKw^9~ssyHU*ub8Ors$yt0^3>@Ux zVF7+d%KJWXAR+Kc%Jty@y^7Y3;W(!yP-D%O%{}9vCku<>H1bw{I-b|r+(&zYkNSQS z14fDQQ2~^@3*g~PsslmK(Q6K#=y8bdmKkjlA(WAln#q6~pjSi|l8=A1KJ>3CMXa^z7s&}K>QBO?qe~1vQrxz(Cx|rCCv875=Z$z1@{lDVE zYL^{su&d(YDrbRXCH(wK!j9Mk3@X;TCRDyz1wWEIC;JAgzpOFC&Pc) z1LS|%PQ_`YI$|`k4sbLXLk(Z1QA=C~*+6DbTYwx6ZIpcDaG|wh?uFHzLS%Vv5aMmo zt^2Oc-Twd*5!6RvVq6ze^P@o3YLAQ-%NYXJaJsH>Qj0~@D!O$eJM7mZd*F+jIL-9| zHO#W>eTU<%sHcw3$Y@=XCjq+i*+s>)&!OnljYvjOdNhjik$k6!#;Fc#Z+nDKm|I*Z zmfZx{LWv@6kTik1IULctVRR{wv}>wsVFf@f+~qPLI;1Y1Y$88ZsBW_ zy0ZAP30)6#O?Sd9pi+=8f3m8{9^-*v6_)b^>?H0pz}%&V2N7_q`0oea(`CtUiJD0c z(M9R0IJGBl)a(?zN-)qlRIagC#sF~uVM3?6xt;Bij+%N8;$TMJP&u!-Ts#NSc8)td zbmPPLuZyk&%!BZSWjA$m4-o{VYwgRS5i|{yzNNplE%k(Kq6&-cm18l}TQAUOo(;z; z&o0=AYs!LJb^L?7b_r6|hUK>o z>V$J%_QGJ5r)1FE=yIGhyz0n^_m3!EjljL7d^iZIMqn``husJ2vpWVyAG)jE5xw&nYqk|F zZtq^VHtwgr7@}!#a4V|uFK7tb3AM`CM%7H=V&Xth;&dTf9vMk*sfky#_^JG#3F+OS z_JNXHh}uEeu7$)qYdrUC_aRe;qQfi$qFJqR$12!U!@{4-6Q0Cl>*6)5QNIpegvH3+ z0>dC~ZFHwO#{eT__QQr&L9e>x1wR^lBy58~Z)NDT-Tu9x3mgUKGETstJ~w2AT$!Cbf4Xmm5N?%Ev?|fhLPf%}ul3#T) zckwr>^5a#UHwi|!KMoW2N?%DA#&K)>#(KH$@=RcqQu{&5;cTxeWY#!s!A)~5^6I1P zl)jRF_Qn4IF`k26--M5i(U3@Rv0iSU85zZ3VHd*OtkIx1w9>9DY;`ac*1{rZvzr@hyO|s4LyLSEikskb}&y;@DCumHLQh z5#}E7LTX7@8u*5)9a|957Ztk5sS)&6Bd|9`=E3+vRF$M`i;G3f=lFMS>LJ8#Aaq^p zv!2jW!(kvLc{Eid^#kpGnp6DEzT$o6Q!@5@Pu+AbIqd}$Hw_nBpoui@MU(rWnh(%;1-s-Av7lp%FV=(d#T!V1wx zEr`ZeuJ2N9{(&-9jg$(ZuPRTSzAUV^LU0PDG6s=%u!R7+jR^#vK`+~fc)rHvP`=!0^IOSM=#Yz7F3A>LP z#l>5^Z|r*v{?HBsLQ@&-D9|#}ym68NhYY35BxB`E*l7JB@62l2r{sGyp3`IYl!^VL z`G__v$P40jtqF{l4w+BbX#F6#?m7Pen!OKK;@;2=o--VbxFW%MB<~J;oCTUr77ig5 z?aHv#=K~8Ol^9aKMPpry#?{4H){=JcFvtM4l)j<3_fk!5jzVNp8{lfMFjK$%1?OA) zP`0t4{5MT4BT%L{5uAvb-QoEx-{h2lZ+4cDW=-~61Q8{985ZA%U;na>E-RZ@&9e8|be z<7?s5RO(qX)8h7nlG~-p-^8x!ywXazTcElVR@V!(Dt(fd(q_Idi~ecqfA!u`)h<5% zq~7bw-&yFfwKo?H%6`d9=_Bu)TYtP~tVit)D|RU_@9Ir+mFBI;xoXB|JgPp+OX(-? zTx0(LF`kkC0AJ?N0=S;m_oDjN1~?IA)XjE20d%(2J7(E9;i<*lM6P^C5mGJ^sM>pm z*EdTg$yOVnL3bS635}Vj7SUx`*0<23C42AauyIqN7*B0e5i!x~REnMYmCaW`fJxLV z&Z&p)gU_=-5pc@7i(>)JvY6_I?hwAB+hHx0w>bzE(i+;PjPj}T;OCVSTu;k|jyqXI zx9|BK)3Y7!$xO~xg1M+W{{Wje2P@6vo0Mg!9ac}sv0fUSo!m{NIF=@yRAW)u9+2Mp zX3g)E@A}~&8M$6xB#t=cQrUpRhEn!QUrA@Zv0uz*q>ldppX!0W&{6}NuQi%T#>+Y5 zJ16XvzLK}av7gO7G2Qzn$F97bPRi1E5NMZMIj_r3!y4v4e@mRj+RLcd+TI8~avy*6GTI3WKw6bkfo_ z3T8IFe<4D#qvg6EG(Tn)n4}>5I7cmU8Z;CR-CyJ_yoWrgjx9O_vpc_Ts956MxM3QS zx(0qDvqhCh*E^-Cb2yI( zot7@nYhoC}g$(8X&mX9x#HA8@#e~~+#>-Lw_gcLhye0}J_(6b|V(ZS?Gux2csL_^a z-EGLX7fI5n;V1EP(_B?`yp9`vRjB3V z$!cOtU0D-E-I}qnGT!h*ClZc8^RtVhFdupVY zjE3AGSA4}yTK8KB{{T^N#giQ)jzd!2%um9QUtVCli92DGgH-LtRi6Rck@QZcaXPg! zHf`!MCN#GrC>X~6+aq@n8w`XoaR)mEwjEL}ymtP8OxVZEl?Bm}@a02l3s1vzW~7mD z)X1j4v?@xSgH!y=IdUveGPU~hDRg}P@fJN03&ZpUwYyvBvDFS4C`>LpTFX7-wn9ML z1aTK<8laA`?bIi-S6aoWF8gFAEBTOkle#2JSmTl|6PpY7ZV@y%9WD?jchs5(hkQc` z&xN4jxJ-3W8zl}X>*2Qv6VWR_(3vBTaDlIge2Ar#>trpiBzy_jS6|uz^MUZS91{!s z!DpCw$ZgRz#rlgN6W9L$KuK6H?uC!3annVQr*Oa+>VuKP10^84u@0|r&>(e7iM^A9 z-Zg5MJ;z3%P5Jr@{Y~0gEb#~E6CT%}g}+sv8;03UAvWJpbaffaRq0@17Y)kDYB@$) zhHfWsg}F$QvReLzHa1%$DN&flttqCUyHR46E39@9Y^*oSaG3gs6K+*M zsJr1T`=BrV*qLe=G#NnJ!gRtX(%4{Jos!uLhDfpk@*&kO8(g50x$fCA&lv*Y7mV-P zfqIG@NyB6>;!fCQ7!JR}ut5Dt`Xnp6TTu?FanxmUQL&_CWh$JzcBY;pI%u0E6>V6! zqHXdpf@a5jG@_4)x!C2f>VdT-xA$%k7hl>5&y0*WDRNKyTAFIjt{S1WZyDu66L!8BzL!{u0HKQf>xN z9$%pht8o~@161L)%ErdwsIs4bP%aV+zG7sodD#%+ZWyv)ng0N_0p|U`L|9k0*CE_R z-*sK}PA4NOz=ise^iGGHcBUqqe8xr^;gOUrg`nhNHT*zZO%kH7bFf=s)dh{O?E^Do z2jRMhx-+9x8iL=J;S;M(t{RnyU4Lj?wLi6~%$>W7dMIUE+nz;@Z;9^_Z$!qQP5gls zxxli=%>7CUA#u&EWj^s38A$_d6U3csSp;>C8z+e!YZm1L-1~efiN7#^RB_oRTGt&# z!g-12)@~HDm>Y83ArW_dNuYQ?5W;h#ZSdSC8lc@qsYi;CaNC3yXqK=YiV$RmI;i50fM=vnLTG zZ^XB{Qyj53D_%w8x1n7xsPc* z^eaMK1`Yah>M9EUdUsr zG8-HPY1FOT?vo>c#)GP2_?GOj*RiN?J0kGQGz-BbuT2vvgLu zov^}~q5#~aJgsSRkgagSk2>vD>0tvsP(C4Rpe-(?CUIvI?*wx3Gq6_t7LlA3qY!Rv znHx)}QAXC;$5XP2>W0_~w;*F&xW=eFf%~fvQ60iG4~gi?t*Uv#vgzE>ebtvRi~xkeeRW0dJgsvnikFPL zbVjkn`YYZGNctrR!TKXWvs}tQt3LK438#wfWo5AS7((hs>J!jbmr-=bo8r+q)hz=W zDzi*J6NI@LZ**qqSXr!x3cG8NiQG(E0aJ~(Hq|%7BOqZhMp7I&fjJ*=rTjwp2|h&Y z*JMnfaM>kdSDP2-pP)^J&I0FEZID=A*-^^pYAlP7BU;?1YToa%pVS>|Szd|(p+h7< zVHyRc)T1vHdqxM<6F8yP%EyPbu8K-#r=YDarC90=hd2t`UdH`WB(rg#Rov0>19FQB z?GKi-KB$inZ%nE#kLFEmX4)e`DKX=VZEGH?W8xP0vXI(B>=AEOEuue!v?qCkC0l{G zOonH;=S5-~GrDEnpcqBLLGIa7N~eQiqR%+st+Jd0bW%Vy8AgM&x4B2oxddEc&Ngt- zU6LVdg@O=9MT#^dX>}^g#Lj>x#}iwVx`P;QZIxVdyEpqL7i)6OMNr#B4dMW5FseDX z#=%Qj>7p!0GronN)W!6|!^D3HJ{dbHJUs2HS`)n9!OvT9VS<#}$DAt=(H#|sFpUOL zab=U%;vYMjF{(>yAFGwO)8AcFj7=ijglIcUsmJ-}T`k;M;}*icAq7Jt&4|ZT1*O!j z`iZidLC=0{SvmB>Z54Uu7;cG?)oxlf4FbdRkSi^&Re2lBfuB?njP*r?=I&d65&&Eb zsr^Ko5%)-JWNn2ejQxsQ63rp!i36ZjJmbN}g*miN8(~{*HHFb}+EU!DPs}oPLh7fx zO#-$WPsM~#zy_yO!6&kM6#Vn8fs_*QH#b3*b@2g!gvHQZAhf!bW8$Y$GO@K~1P9EhnQZMI+pEqGYv>7#b@Vc7S85!2r5r z%GS)PD;(W-SlY*GuRbxZsU|V1K-20|kBhvYcp{sM9RSr^5P~u?miWU(?Ucy1E~LX7 zMqILLpfu4}eVvP>$rjrLAaZulwH0t0i-jYK4wzMA2d0ROMCcN-9Az#;zfd>yQ#Ez; zN;C&#*yxW$^bd*dAXYr!D-udt+l^I%2+(Ci>B-jxE8KZn=2Y`;)T}kGD*ph0(msi8 zj2s{>E~ObbtHv#a-_-GY01@My&ni+2$ea}oT<;iqvp#_m_pZ= z9AON#FF55(ZE^T*m3fBWiE-S+2?hu2b%I9`ln_<8BueW1!;(22y{wbqJxP)GzR2Ii z95zggw>yy)GswzO5@-{lt}fRJhNZ)7bx3~;T3oDYUN_Y;9AV)(^EeD7x}>`VCM_1j zTJ~rH7sv)JbVaSuBWtaIFY1vi3S~#85lL5ZU=&MK;eg+&K&dS^BMFgj#Jd|D0$j1Y z=x~)COOMeD<#0IB1G={{tq-+=M3m1Om4I?wmKjMqbCHWwV-C- zWQlGiv+e>hT6$QcbxGHk?5jyorx3iliUrdb`=C=06$Yh-E4 z8EK?G_0~sJ;w*L93nzPZNHp=VvkUgWs%z<-0>|+u4U;3OBwYZqm7Liy4v3#l@NuGh zCnr#)^==ro&?_$xM<>iOjJ>-AxSh@M1Z=VRP)PvkMu24)s^!}Wo;8LC-B*&O#~i2= z)Bc0;3y-36xrP-cR*{QkIkLEml)*C}1e$`TM!nml$5i3!iPAe=kQz0m#hQ1cf}Q7G zMu?4jhJi#}LC)6+Y9*HZ#gP31#?wB^$*FMJOQs_s18I1PgBW;CjI%%zojHD?HT;6W zYJgrJm@kxQLdH&dVKJ|^*Fhy?h68Ycy$q7Hk${^Ra0;2KEy)S3sB!uxMN6pH#`B@V z2DVRyDe^cR93qyX$3zmk@fRgC#-{5T1MldV%BOuw57gazSp10Z{{c2>M| zwox{6QXP?1tahhF_c}ZfKB!+@;es%UfukVlG)RHcyI)k`Kos-(-e=hdpP@NcH_{Mfc zEklNa>1;gT5YYs7dVXUX_UU8}kY2!jQL~ikr&Mm5+7G%33l&6S*i+&cA(pMj9JuO| z%Az(*jmxK-M42IzTc~V~waQdexO#*Vy73z%Z+7@dbvyU_BXi$-jS+eGY%gRHcIHG> z&b7{|e6nWAu~!#sgvV64V##VXmfBpb+EcusW0Xq0LTwH8cPOU!E>9>D|U z9T6TtZMtJ!ZIUIZmP~DXEPYuz45E^;+chK37UYdnBHg){7{?5y$s5jvDk_H)=!RJc zI4C8Vc#UX%xl&z9@x9T@JDsA&)i`>Tgze%jkUsX=6wtVOs60*AwXmP#J5iJZ%2ni( zbWLM{xyV*oIXfk~xx*GrR$d`a)De)} zsSu#(m4Q<2EoBo_xM(hcG`vWNv*#Jv z6JFf_OJn)97PhIptvYGd16qoF!$3aijH@EpOcQJyj;Rw>IGYCHF}*+N7}q(3iyHi=lNP(&}NPdDjS?PT-A# z*W?RnAGyLZ&5*j8=XF%kNpuP6$(H1&zI0gU9SKMjGHP(8ygkI~kZaJ8#|`kEeo=d* zc?VxyAo4B;-qAe+RmeD}7tuC)v!g;=>Vd_r(L*ZBbO{fZ;~+1wN{!^3IT@;P$O=R* zak&;v3HSzKBSVD-l-pGfrFg)|M(S4$&>vw@(!XMw*a+V!e7p5XnvMFR;cIebpAYhY zMq9c|D5U=CO$*PvbOql)-y5~MV`C>$kZ4>nbrNv-EQ_IQ(T4e&PDSjT+MqI!*~0~h zx!nt9^hlC^A;lxfxzlBnNc-nSKd3p_wYn+y$hE%dpnGX>Zo!N#80V z4jbh`;fCDTvKZ)|MiS6XK8!kt^7`EzGc*e{StcM^mt|y?H#cZOB*UI}8wP~RhrgE^ zrHbM-ZmSyS^u5p@b0cNN&ZyrJIJ(BtyHl#jsNJb4iC>`)qdEejDp%-E5lK$rp;+oy zk*Wu81{jZJ62X;G)K5kzq}>9$=sHI`LQ>3b2V5v`4sklDbqj&8H$}mw`7!2hT~c9` z6LJumCj)nAm}@~6BFAKbU!I<0A6KvoAam?e8B#_pkIgvf5J^7}V{vh_lYBFv(LjaH zITl&vU3(1_h%1@pNZkmde#JD>IAZ902c%0cfK7tiP0(_bSa@dSW?7KyL2*9odH=LE6%8&PUh%tf!h3lj+Ocogx0VO z1O}D;RcFz@=o8bEEj|q2bp)NtRCE$=qR6;=oJc1K4SF(N8-dpeq}wGY$v7JWbRJ3I z``RE{u0cgJNx2!>I!eQA44^fww=J@UI-)j6lJgr%Uu>na?lyLcO;eqlSr~=R*B+^$ zeisPFMw%cLXItu_)X%+!iKdO+Pz%UNq<+OUuv=4z3%-H!6Od8F!gVQ5;jZhHGVu8=ara8-R(%+ARDx}5r#7H8U?ehd%v$|a z8MAOM5J^7}nIp-)k<`lIP@W*-Eo&^Z&MbrvZ7wm8ZrxD%a(7s0oCU_c5weZexhadn z!d*)1-sz%|+PtU`JfVFv&MttpxiO`PA5XX{vuLFHqcm?BKZFI}L9$%#qBgs#6T=(_ zHT6)+CzM=GOQRVyoS4??A(UxsCBp$`U!IE{Q*&@EktF(uQBvcEjBbucyf+DLuW-O_ zx}t2)z6H=5^JAifk(G#iCoDkz!~Xz9k~|kBzlTT-%do7~aJq-Pmp4I6*}2@}*AgwN zj%96GbEMm}VIm!JFmy_F`l70#u`OxH)mL=`w7ru}5tL+|ti~B#rPEr+>{f98&vRfn zl7#TrHT;~}OEM1bdN!DTVcHU)xZcBby1o?X&%AY9hYjHlc6J-2=90NM)101LJC&ac znW@zxYr6#D7Zj-k*qd1*pNBq7vgI_^?2O{?XzD|FK|B2ue@A~%j!qvdNEy0iR|uF6 zYn${^aw4f(?<>$q;Sba}BYg~nd{$N4O)pZP2L_rN|sxhIvs5nNvx;Czx0*IlD^1yU{LOuATlkb;woOO?u{&KV?IqL`drz7iY4 zGnXHtI-VcOE*wIg(MDNsVd6)(ab=E@;^S3BQyqDiAvFkOeAl^2w24MCmGl-^O^xyq zNjtix@cLHr7dY7L7asWejk&UgC30>OQNK`|!#R!ar0{2KRmvXEO7ZUOkyXN`Z@NhZ z8Pj&T4&kMD7DwqXKQKl5e4n^0JP)&T%Ybx4Q@|<&oQ))uv#Z+Woy_v7WR|64M)r_P zuA3kyb;@v_!%X*N;Cx<)c+X>=*N#VByOkdHH?_&EEnK;>I~+i9*no$|B-;M~b<1%c z$^y~I{O@JQOTc+?&7P`Cab~ zcQ`sC=W|7_sfHczgx1rS+Q)k)+9H#C4rdN?{)unkr{ycXan1$cQ!vMuyk&L2{4G0U z=grPlP^_h?msbTSJ2ZjHl2Ca~?2b;) zhBSiWT$8CCiFXw(dpi{x*L6&eSCebqHcrXP)(70!=()bjc1VZwxRZ?(WmYL(<&^OC zWhES~f5F$BU3+MOtT(;Y?lDB?&fvkOtBRGJq=Is!q?#c(N-g=d=r%^V=}+L~oRoC5 z?$xE6p;F9>oja?v+c5BF{@{pkXX+d*5KTcVb#j>EkPnEW;gZ^AW2B&w+B(W& z4rsACRN|qT#{`65raT@}?4s+aD9d#gSmh&i4Gy*Mgmk^1d3bH_bTKcK=(%nk*~bzK+(T5HlH95k=O<*DS?@N! z=v>o-b!xAJkneEas+pvANl8gEPMntLIVX2jSM>6C)mmo`<`=mL2L_v9Y^9kM%AB6* zuo=N!u}1C=^YaDRX*)9i01udw4VPP*j#jvfoamyar1WKzpy{P}y+*czL5qYQQSCP_ zx=X3%7S5}X;(Qpjtu1jk*U?Khq%t;}N$4ryoZSXahRI*0zS^5E-`KwH9aDD~S*_JV zbt46uOuCL9t!b!2F6`=#d$hmZFtx)%7dzVS%?2J;91YhAMZg~6eq&*(6-dckQ=w85 zaIG;0HVdxD zz@!)d0EGzDPUY2b(^gixeK{2y!VbqNjNtDdFv?`y7Ius0Ixb>v%1InTNgZp9DN3SN z)UFtLJK7IT&K5ouBYt3|)DP6SDuyX9@qHA;1>_TRZmDKPj-2{Cg6T-q-8Lv5b9DL+$sS&E;zw1>KZZuw?qBp(x{+HK!wom9 zR_D3Fc#a0dg%5E$ebrM*Vox>GA4=<794Xx*g&96fYg%=>2f5DFrNIr&fmSNGVFTSa zJA`Ieu<}jD={pri2)Vt}KSqAe3eODS6U*?c9B;F-!q9GRQQj-%xYiz7bL4h7^TFSH zhYJK!aOn@tV5+t$KXN8fOzjD>mHbL={+3#au5Jm>RRyQ2hHz);b97Sh7qnjKW<=FF zwW4DA+&2iK4t9k{4(#!f{Ji<-xmR{=%zq#`=%DIFXjuvsB(r69*rO|+&CnMebt&9m zgE^lODy=Ck^}3RhwaB>1O3g5w4!2Vz&g!{Z9?44JWOf;<JJK-;1y#zCHJ+;d61MOw93YN z$3tWyVoia{bGta_3x`+@Qeoji#>{ogbxSe_N~cCS@klpD*ORWQ%+wB$3lr{Ys6H`# z8`&i*mP!siY+w#I7ev>&qSr^lsk}#8_9=W~kHdc{D9oF%vPnljEriWZU#U-zvu`7t z{#B08c79nOVV0dr%I-Oa$(^9^vTVvik zCB#_^9Z^u=Z<`Xj-x}{KDyN?V-3&SPvhwaN#-x-1%y~x2r7UC*ciq0est=3eEtN^w z%#UDrR%cqJ80ULgXujP|jqERYRIX{ooY9l&y7Yaa^Hqn#JfVkOu)KOMC|J_g3lphN zrQ^?IY}3l)tCYR(UGFATy8*)~w&~H1x<=~hb!V@3tGo#2kW#n&!H#9mN!!S%;cQY^ zyo{}TrQ)2A=hf%tU6@H6tuK}Lb+ME#D~Nx@?nbX+xLrJ}xmkJXtAFrz-rsh<7sJmM zyRS#?pNquWMm&ypyHTkNSZZpBByD^J>Yah@q%;s2T<&)`VVyq8RJg5qO7ferkB#g< z4X208Lk|}xE}Fj!@;!UK4%$<}3wc^RSLx|>cd#{)8c`)d1;=iSSbI(0H<)>Qh6%@_ z3J%`M7!BtouEnpSvGIzod9yvNV#o0F2C&MSq?EH}ch>$NW83@4?;4T^2;#AGw;L{n zRoa@S0_c6>M$4k{$~c@nPZOE$&blkTitV_i4-fwU65xS+*-<_zUZkxlw2h`0v0O6? zM)W8tO8TiS--koj`?c?-?WdNWL3tl>>K8GH&QOO_ks46vCjI7T#`s? zbUnSF%o*Qu&6%+KDO2LLX{GTy{{VWjtao8>SeZ)HVwJCNAErG#&9~tEDuy;1iQL{& zE*o)m(tBmvIx3J`PiB$2*N!#X`k9F(g#VT@SlY_8 z_Hg%|a+>0Jg!GfYnHhE`h0iLT7}-17BffBM`RZbou3`5t?*-{xXS`?fwylF~wQKJW zEnyBO_S>rT8g9|X_5$oQe5j|yE9-f?kjHjYhN(fr4;wCv^8WzY+-2{!QyFYymC|DC zgQ)F|Vp+QTO2@Rc|g#F6n(0HIKD?-iTlE;{x%={PXgk<@uHHxmPfhp z8kf_+(JkbXVS+Que!Uk~?RRL671;N4J^2~Z`znxkk}2UWBgJ!vx=wjN`d(dKUnH_~ zNaF0=Ut!T*Q;F3{D9zu`V^M+ZW(uWAPOS)K^7*=3rN1-W`?P@1dE`8-ZcXj3PhYI>CDY8Ez zsEo-Zwk>`62TSObp9+pt`PRD8DZ-(k=cmledX<90>W zI}O40wFR17?WMdtPh8`j^19+E&49s5;yv*|Ver~N#kRhy#@g(Yf4VMa*nmCmsw;|O zRoa`>wta`;6HtukCzo&ip2gh{3KdF6W$EW#D{1LeY_P)eT3z z@r#|4j}^p5Hd1$d$}4skgjcSoDhn)mEt#@ineQErgt}5W0J}mi6W{eM+}mk@wat}} z<>f9Ry%l(*Xf^}xg~cnz){_*w55sC_bEO?@m6fcKw!NS2Jv8nzFo1q#=Dq51_P#*c zR+8)c)?P=$xYUrJb;JX%S76~?;)oy4Sc^-$^j@4CF)H6QlKKIdgF7$gy(DWzN?>K zBi{H|YJ(UWTs&^KRJ(27Bz^;%Hf6WEygm*iH;K>8Z*_0996-xr?2xZqTDsLq-(yiO z!SKmWFr6s4-!n;DU-&s*%y!-EcQmAy-ycQo95cL_9Ub5Bfuh$4-F-#^Psuf3N;*SME`boU}Q*bP^2?)!+v#4jho8#4Ri_SI`$O+y}e zgKgDwoHM)`B8kyLIRO?VVNRbGuBDn&W)3f~oH}$BIus(F*K2F;dv9+$$D@3#q@-`Y z90Qy_tJ?T{lvH#LnxX724Y_T_^jaaoJ3-()$;-_1o^Mq=ceT|OC*GOdpR)M;E0-Cq ztfeN~{EdB2!)yao=~9A>BlA05%c}4;c7wfYdWjz9i$vSw=)C^`i+7D2wSmpBiHF`D zY`m+6cB!#P$lZ5bOQsGahg$lDQ{woGD@@O)_iLFqUEW$)7suvd!XxOOX9WV z^GVzFGe7t*4Wk#_(u!|qZIj&_KgWB?qacA}ZC9OF_omqihMZdXC3v&zpYYt9>bor+ zd~ipB@3?BLd{+@IGg%n$>>mrm>)feH?f(EJ&OO@aMDZQ&;&NAL;Jipbklw^v>&qWh zIr*LM9d3@VjJ()1@~W2<#3y|3)Xtw`cwC~CaPUu?%6sg6tbj@ZIcV#J*Eq*`l2uhU z8fKgj2^udTe-wU%o@2V~eSq!TD1+Qvc@BTK_t zWN@{Tmh*rL@m}*g-E~8}7|n!TJr_%b#`!+(hH zeCw6UaW2ut=zDhDimUu@jGweY+noQ6jam_$uohy`CEMY zFB_%p!=>kt-@^p*h0^%X9hz$gIPwpc_$a9O&npW#Y|^6mwRdYtrTV_5lzRumVHr8m zjHM^jNXMc*ABS;{cpo=58@1OT+5X$UM_ijQYP|9a=ZmyD%sVC;!Udaw{HmW5uPa_} zPZK&FjN!ENB|@INd^;|Od+lF%O}(k`;i~Hp_k&Uixi>3>6WccXrJ9gPaoIufEK6-$j0yf#7)Ha?48pe^O) z1zha^02*~UBsh|E^i!wBaS?jWvl|`QUKNOplqg0om65gmzwJ#O3wa4^p8Sh!yfe4G z?77VihgIjiM~TTZ%jJ1-+UG?@j3wG9TZ`f*^pi-hhxP-7VWOQ%QIxsqqi1X3-RMmj z4$nI0doA8n@=xMC*bJMlfH9IXqA8twjCEMv@UGq5->JJ@mEmd@h8hd)%-PDG_Gn)L zd%)&s$n{+}f_CyAV^Ht%!NXULR8$9jHyu|!*!nh1@YkML!N~Mke)h!4rJbDgy9vT5 z!gF*}&AyjubY3RRcX!%naVPtY<)?4Ojh7MLuW2bLoJ0EXeiN-!xYx7Bi4I8+lgwgDK=v? z`z{lS_mLr(avL-pu7&>qiaZ$1ahy9;F}f#5@!9(QRu{ahw33_kIoIx%T%t)=-h+$sCcWDpj= z-gC$DxULP_!A!@_<8dYVThi;yBXPlhyaF1!*F25bK2*Kyh}$y-*lrUie&Rdpr!I@3 z@7>;amjo2^zN0=R`3KiTnr_xpGAB%P;u`_d>%(fRxqt`Z8z)ooi(uuu4vNLa@e54s z!>QN~7dM^Jj{94eM0%_oMbXqp%Io;Y-SoQe+-9<#n%-cCIJWm%-$}>hW3Epz`9Ip9 z#`|R~YO;eXzHZyRfy~k2D^+M7-U+zD>nIh9?QWy)M#^^79(xfR{sFSQt=AQ zM0r}%?-IPulZx^*uwweAaK6$Qfol&TKQgWHTJ7sM;%paTxNIWSC`Rd8H&;u>+|Ow3 zr*GnmmKKm++XoPvS=%_D3mX{%Q)REtb>3e07ZbX=*X5)=U3+jr&A+MWkJZ-n1?Xr*}P4evT;Ff=&fO37S7+g8mS`yjJBLKgBmS0CZNNcd#) z7h>zKR8$Mv$@P_vll;@nb|qEu!NtDoos;N$B^XL6W_NtwQVgf%S^A5HER-v)fD;)~ zcM1keUKa#aIGqu`py_O(n~3J%D0dJ#*(5xz@(Ir$%G6Prn!L=4Zb8%!p9WK^;avj3)~W{85#w~s6Ekp zDSYh#G0N`1M?P*fME)TA=z;YBWCe|^CuE6ulP!ClvV&CaT!d!38YK$*;>iMy`3BdU zC~MvJOY_@W$+6bk7EEpq#D}sO}KU#4~UdK`XmhnW{;~>jRCwP@3)eP&T*GGHMRP8_ZF7uC~dN<+rAmuP(blHp^G%=U`Tg)f~NexLI?F$cPcP+lJ34dCB~us~t8Jqt-Qz1+RMdu*TW~wLY=6RVL`>hMjT@`v zG+^vY8v5c)!_~0yJ`BZ9={^kV<97!FYCobioCYtoC748@Wm>3BSz7SW;O(~pq8EKQ zayboT;Ike=MGd%uIVOKeosZ0D!{Nbli@~k96S%2>V22B=vAhKTtmmMw&`7#3%jYws zD@lkrIZp;Tg{=qPSO>ti*CaJ!lEm@dKdBj96ld2-M2S$diT^QGvkt>@oTZaZi*Mh^ zu!{a=r&o49r=ht@ZjnXh_*e@00;^QQNAYSIiD?)3u3K`wUV10C*eiuutmlDwsG7No zDu?yR6xdd&rlDYoW|v=*Hi3>>`cAf>wjQD4E*Zb{xf>)yJgX4I*VzH z!dKtS$5Oa@yIY|>>nBw13e>P9nN+VsSaAp3Su|6mV)ixn0Er*`*Cq=cS+1&JmXAU# z+X54)X@jKB>ya2?y;c5*dixJ?9Zl2FVzGKQqa`1i@9^?h>#58NtK_bJzGcH2&IEFZ zFo$=vAB#~xl0_yjm0db`=%onPCKZ&urHepg@aEIC*i2Kb zFtov@MUK}Z#b47# z(f?gXVwWGEaKN~B`B7>smR`VSs&*!>Sg?p{0#fvD$*dx`Oy_7ng}(Mp9zDKw+JoAm zh?@O-Nk29JuVw*A>vnM66hOGv_`6bCS)79{N)wFa1U&$qIbY)C@i|Q$U z(Cqv*Fh7 z_Ta)+Fsq}t`%SYqmZ1S}&QC1@CpC6PX;&#|@J)em(U9tUQ$%)f)g+ebO!3qYXMDQw zuoRIPGaP|7+;F@mG=*NU^z@^BD}`5tt%mhuyD;#E-BF)b`E`dpB77M~m=H7+nT}b< z=kdGBF~FCR-s}gBK|m*8Ho>Qg(OE;miy{78K-uW>LNC31$;c->gE7J_Loj9 zDQ>wc74i(K%O4|i;o8#B*|v2V8g?}2SB%R}@!PHqKv2I~rdfjjt#H;`ho~&kCqdTi z0_Vv~B)<(Ss}gitW%76Msq-XC?1SRvA`Km9&RH^6!w&7e%+TC%;-$hK1HlzUxK|YcQ%BeID{mM0MyKQ?Rd!8V|vfZ>C`eiLfk_r) zXcxuGF|ZqFChH#7M?e+Ds?xY1E9fX62%*VZD&tu`;-VcR%13Va6>P=Ui`{4(>k7}r zBCvg9va!CRJGp@kPwIeoG6HUvA>@bKQu0>3xO*5-(F6JxHI049?om_o%q}eV4e<1x zyAEl(1wIegwgB9W1$6I&-qX+^$1S62YMr3^G$%|fti65hz~#ph=-?n1a@(a%ukUSA z_?#DX20OH5Qb8={A7Of&!3M3OllOUW%sE>~-`rP41E%9Fk+idck>p11M1VO; zt6h|)zSp1F$Eq=S|3~5g95*(`S6yfvB~5Tb>D3)8mHNtg?(fKpKW;I$U2t?RJDi;i z^L4k*a23m=F}!3NY7W~)<%g9VyVATJlYQY6HPjvM+z`jB^PQ(KBI$65A+Pg=%H>HsnY z^@YWr?QDrDqwYd6_W^@1LvRw|5rld{)aaPHDiL>>9!J-`2>#BJJ26}B_RCOk(Lx)J zqCrqi-smaf%?9F7JfQv+aj*a0Fo_I9cWR1h%g%)HEU`;OrFe0RmN|vOh5y(tyj0zx zD5y1VbDj`1whi00;)(@E(%jnurz?K}cV93ThRIP+UOIn-LUNvBeZEnZO|kU*dsuIv!#-o9#9`i@(28O^;_Ldl-}TQ-40^rp6FcHy)y zY?OIQgWeZ%gH_BB)w33qUG%=ve~C{-g}y1Hc7aCcC}WV*tGHL9ni(-L{-8xJ_Kgvi9e1I>#Di|}DcL`s(?=55x4r)V-V$q2twl{H|-IiUamM2$s z_^LP-Efx;*j;A8eJGjQ%mBYA6Qvn&0D6ZTEJ-_BamZ^E8**5rdE{yqC3YFmwPUH2HJj9*f}8s)Ixwf) z+fwTZ@7IrA^U+M5Kw!Q=%N{*Au#;T~WQ2rng7WWt-pTQlNs>I%r*nn0 zGLn`@RsS=IRRhuD9slxI1vWSD$cbMquPxg$jD@D$F1(kvI zzi64Xeug58S$QnNniL1jhJQ5Rw+}&!yam8)89W7Av?|d!zS1ENZ=>ifh>`E1c?^ja zOD@pJNI{oLa$6qk{hX#|d{w3=<$%kr5K@|p(x0#%m4|>9d!docZPeg`CCwwD%HRaC zz_rx*9B1hm#gdhd#7J9eTE%BJvA&*V`0-SMX;FqqjU~0x;S3>br8*yf)MS8+YXf

    )mZboc9vSB;3AX-x7-sqjB;K3)f9j$`ZtCaq4Q=x(!A z3qWxU;r#~uZRKUH1+_C$bYzLdwoFb#lYLowEFsZbAt2UDA>D5Mqxr|(U@CZa1*RbZX@p}>2@R7KnZOl#dP zW*8E{ZD>3t=Ez2 z6AseM-q-<*IvRT7<_G54CCs$yPs**n5*m8QQtov^Gf$$hH_sRVS?W0e)lNcEr+W9Y zWA41*y+>sa^_28KwNbzaSw4ww2N|U2Zs#L_(GWV5U4xnU5~o|hdSnt(&s zi2QuaabR-a@~Lc;?arU@ZtdkM`d7B~XS~;sRvM~GNNa%;z{hPrhw)K-))$n=^qwMn zQV>5-e4hnDhiy>$HQDpDue<3KbJMv%Eer4c6@E)6nW$$X&>56fH?bIZArY;+i=vtH zrKY21o=F5=X3WfGc2@6bb4s=3$oNAO%KV{8jt?uBEb|0|iDn5LFelru@;^D4+r;#H zP9EcQNM&yF?Ph07T-K8H2wL?_!$xfLQw@9WlGXdt+M;5u^ji5 z-g6I3+Jd{r>B*AJb5yE90J;R1>}b*Gccu>!WSvk$q2q~-lmWSuICz1A;qonK*heiF z{x6^Qi^ARBzOJF@zln%wK6BYgocTLRR+J@LSHW7VZs-^1_|OqY>#NYrCHv~rmWlr3 z`bd#E0K2PxL*DRyQ?mP!R|`t91iJP0sC=f@k*;`2>0{wdyF>)_lzc+#yI-xS)<5m+ zLuQ;7Grjg(Ly`VVH_GgI!w`lqT*H*pDabU~iMm_f1QKd)gE_Ni1Sh{jP#u9OmEK=> z^QZ?~8(d{5A^s`n>B7tb-K4*KLW0$+#fNq6F7&QP2JPq)@2p=9j|9V07xulI>(9Qb z&be;OSjqDs%Rj^&RdQNy^xN{Gjl2}$!V`j=4Yq%mSkZqoq$&)9qH*lg#TJjE^=Y*0 zot)5Y_&3+BSr*xwvW(KV(8b*PDaQ6T6$XEFS@8@z_Te(rC9Ux#|}*tJ)NtIZu&)q_MDwl;v0Jw|*Df z3mz_3rxCR2J8o`WG8)lp1?w2AB%JBV3-h%_ni1+JFU<08J&)2Wa5Yqr^6XT37VMDM zwzh2kb^ABnyF9BLO(`ljiGg))>o;rO$!_6}hHtEjiBKa8&8ha8>@9S2sS)f3)nxyG zPJg#qEwj>m$3InGqM*vU*`Rs&+l}w8aLnT~i4riJxR^7%9o8`(GbH+ z-~MUX=FPv0T`qA^GJSbkqB+_}9mc<|0TH*tYtz1>yumTO`j?$&R)OO?Hl@@WXuV3| zREc*Z`hFVdr&QM*$dd-5MeT#}M{@MVC9h>8(dm=lV0xF6zeI^vMYTde!CQM>Y=ejB zagdLM4%vso{M5!J`eyDoCIZWA`!i4fASBBD5CYbiYWAd<-BV-_xzUIeY|h+FUJt@i z1{}iirk8-FouZ8^zX1MvU>epD&NmG9F*|%+Rd_v&dp7DfZSOhD>&jNQuFfs*&u!LM zea81`CCnhDALYJZFqVuVU^ccWd<&C~V*L7&GJ*SW%2!o13iO%V21x)+fys#c2zlD2=AHbMt=DAX`B5ibR$N$so)d)xE zczC3dj<)WId0u6s1{&i3TZfvIXTX30fY$S~^nw$Ku+M)Mm(C%D^(^4i(8Re z^+mZ?NVkh1Wn}tgRcYZ;d!(M|N3-G{Ku(EU*@(rfe)A~nHujUOAR*yJeB8LHz^3^bLsrxmniJc91W`aY+meGEn~wu9xS*JgrQ z0xuh-gBDT-8~xN*$!uS%TfAPYfA|!vP{BZ<--JEwzl%SKr+^OM2PgxH}l6Au9Z?5G|#0h^riERd<3=5=e1ixE{v%@npB**D%U3_*yFs2Aeo!Z`$cKHk@>qO!){U@9yXI`?-neDbbbwz>tHS5bD$s=47w>M=$5FMQ&c zd3=;N@Yiinpr&3x?#tN8uwwT@6%`O)a-h4OI?nL%iWjuooYp@bRLK-+xa%6zLhD-b zfq^upyb!(x*e@+NHMcfsOAFspXFW5h`S z&?=I)U$GRZa%6mBs{@9GfkXMF)TR+9C*2VYvzGpMV}U&w0R@ zU4vZ|f-&TLq4r-IJ-LS+e5~gq_1x|)c6;aC!-#{yo8UoF^2oyDPm@`*ABAoQy(_Q~ z0DD9-28KYh+n#sW>_QVB>=K+u zxB2L}6D*Da>NXdm?sfS+%4JDll2dePbhMb`&tCh!|47904}Ew9D1s%1aG{Eg{G1Va zdC~Mtz;-6Ske||%xJsB$5v>w0eY7-#0YXO8pBfaKAniQ)evmGR>`K!TCjaVrN+k`} zH-%(L)X^raCfRUhWoD{fzz3m7BDg?n#Kf8yyQSQ15>#r4xFS6qCn_`qT#ZftGrV3few2Koo2^q))O($x;A@%{!;G z8aO_+Q@P5y_36lk9MwmXO5d`^tk2qkq0q|wM$IW0ESIl@%?`cRmX}Z%(=N&kos3iK z)dY%!fULGseq;DermO3ViU_nhk%kyOtsV9^ixO1HXvgm5Hboi_kab{i5kB2PJ|ohy z&bchd62)Zq7Ahv;RG1LlIKA;LC1YIOwd_6XI!GZ#x>DpH=ftt}jp&O8@5#nnYFrkWjb&)Hum%Pe5P!dLRIxZEw$ZR%7b_Pb zUOTQOxgQG%pb)$)grrB_E{QofT2G!~wwI(!(x_kcSy{t7SeqL=Qd65-SgGlC z*tRK_WS(GFuLCkMT5JScYd1vlqZ?>F+BRiB4O%FXgoEn%poobdFJsLull+4*Ws+y3 zE-9`cOalsWL)W%P?whwey+LSr{rDU;sE1@3Z>fLXz;0{Q(lgXvCHc7*v8%j9UPXl$ zD|dx2ctMWLGux|=5lPiCEyscd>ZKP9b43lOCWz^mTLLA)+6}CG17UWIQ`GkB2C1xb z#c0XU7f&CV<{q3qc*uwvH2;zAAey&|)=&*-^mf8Cg!b0dw1K^^iPC+7po;NweJSt- z@03EFH>A&jpM}tXO+E8 z(R@uD5|DUroON3upis5AZz5bB1NYHzNHIW&k@OuUnoCD~fgZ++{qVur?hF%RUv(r7 z90$d;M;vwBzI|pZBAhK26tavL`T7RJc_#D8GSWPb*faQaVRB}H{Y2>vX^)%`{bP7> zYPCltxhFZu`BAwr13AJsY28iWsNpR|i=i4iH&foR@fOK-V<6wNiqta`j3SZdwveCw zjwOKH@<|6s4kKOK!gZ5`pax*t`a>yofwWjY@$To=l%*r;efeBnJN6&g^8-f>B?{W@ zaORzjayugOb@b3K(8L&1@Ym;JK3#0$RRohXCj3@n(^q(m@uWhIHms_9xsjJ=aF(Ml zXa$BHZ|jtqXJ5#w2}jG3MY0Tr8?j%S9Afn$%b6wqx8 zKU2OIZ1;G`61~i)=p}ML_ZYS+_T~MWxxlW;Blpsta(djuDnQyebVq9)`p*S|HlGMZ4j!rA6yR1gKc^P7Y@y~_%4)QBg=~zgJ8vt6usU=Ab|*t`rvk1kYfiFt3-!2+1Z`F zr>7=ptJPKmP<;~s)KbFDS*gBJ;|o+HGKwUNKoEWJswO28f=|M`iem2sCln z8rgE663rQKvKnjQ?WIRmAxRYoQy{y<-dg+uRxG$T$;h&-Z41B4kpiHguul3_@3fc> zDwRY`Qz{V>-jFU)6dP;V-AM&tYG&<|fP?in+61qJgXPc6auUKjCIlCUC66kKzshZB ziKQ3o*AF%tM)|yAk?fsI!pxG9vh=03r%Bc@0JsA5e{f-D*CnY-8DCA(yp%gi2$}$K zC@;!ze0nBmsV=h-ZOEJ5D{W-8tV)XZfkV6K47*bLMie~NR<)Ga1xd(dlA^rnS8S=2 zSele`?IeOzgdOqx@BOV48)e{J`RIHS1_GOK@2;%rwItO5Wy+?Z{xt_9-X;$Uo&Ws= zVMWC6WjBwM^bK%Eq{EMdX{aO5ShST7pRqYLEQOcUMVOK!m)!6JJ#0<`YW@R*qm%4w ztEFd(c9sY+WUOxtifL2oK59}>3o~xVJ^j;#nAj0Q? zEYCbyx&6j{JpGyS8TCO=Uli8IzaO1!t%g>xP!X&724kw*FAT3Wm+|q#k`JReOp(@9 zXgTGH$h=%M$J^RouD=Fw^)x@=iQ2#t?~XLPJ41w+CXFCJG}qox-uVq*L`h0YS~1*j zV2PYmyr4>KxQ)7js%e#6_A#Mp;cAn)uaN0XQ3c%hf4)1ld-~ph=Fm)Ds2mRtC9N&Cd?UO2Y=I`_%|U%m&dXKr z&!6!Q8uKwL*jo!wFdp%SmsgK|AjmkUsk2?T;AkvMR6C0Ik7+T$};8NpvzB=L1O^tggF&k zPT8s)*SpNTy6b?>aS|>Z07RmsVpn~)1!&eX9aO`prU%d)(`xYfUJVHPsDV#&3M3wz z3xxSSn<~;2L4q$!XxS8ztRd8=bB26i$~1Pxr_r7nH01n8a-P<luWa%Se2kB#B9O+rzBJFMn*@VE{1}HN_jCH&etF9i!4~%buF&lg3%*nC+KlULR$t z@Et)`n|iOr*%wqX@(pa&qE!rIDJh+5Dv?ibcQhEK6(2Un^msomeWaKShpBF{*0>rf zw&B?Tk2|SVQ-azkoq~R?vJ}89yKLRwKWm|uw+M2y$qJMl?iC1c0v!6;DHViPZfQ!X zcQBTqdnFl%Q~fGJN+0ED|0#_UXt`5T`=gLoRniwC^ON&|FDbFZXNucgiK9dHyLZPS z>S-jPdH5-8K~V;4lkb&?$b_EGsgIM1XbL$vgCc2@ygGX=v-7HQVu_$5dKLuFZP+$W zT%1qF;7($d6Na`i8Z_(VdG^(giiO|@129Ye*g~gI=sxSQew_S*J5NqwGMLWBK=y-? zLx;pYmgDT=NQRQ&Y*stR9+1i!p={h&c0vPQ24c!kw8VTSmoi)srWo%4+Hz9MAkv9VlgB$0rYFwUI^3J=9;uF@yCF45w^!sgl*&131@lwC}+9?-N{6-2mYxhPoZQiA$Z-GF{3q?4z3sW%nX7Pq$t`2py)O>PC$K*UW31m0% z9V7IZ4R8HS#2?*#{U)&mPbsekFyh@bvh%5t;-TH76BmwoeOyT?Z$6vT{C?f9i&uBl zDSJAJttZI2Wg&N%FzrJMuq0Ukk+piWn6>=Q77as|D``3tinnuvTE~ZjlKOA>S;B}n zz($IB*8yF0BTxmqmzd#9s;HF~{X<~EPLLB*W^mPiB!*;V=!OEN!wUnuC_^685#$H@ z61N={-qNXtW}3V;LPy^h5b{?+oUQlb(!|X~ozZXH(1vY>9bP(zsVcSSoT`T4;ZHNk92@~Q-Hw74HcTL(cO{uwmwIH{$Z{}%!u2OR%n8vzbtzk^a9R#V> zWl>>Y0p16lORw6k@0f_-o9wo@ef-QHc6X4ypdCxExWPTDj5-s;J21w=tL`Y%u6NPHYZC7?fKMe%bW?^e zQ9zV})k>u#ohwjP_V!IEmpc9Dl&r4glLSu&VFj9Mo4yhnK*dqnTcuv2oCC>@O$niU zlxng^E?D;H7vQ1#nN84&Eu(uM?Y*~e(gfTHwfygpET|5c@zl>>K)nXZsh7q4_^vzk zFtfm;aF%S*H3n-y`Ii(cU-(mE{YNUXm!t~Xtx|6(*yc>QpXk%DAui7~S`ksp?qP=` z;~s(C2g*=h^|tQpR}6~`i@7VZLE-)N8Y)b=v)4?z~%lyvchb4#Es^Bfav z9o6gbuBIHRF$q^8@R9cw_sEj%N;6wzu+`U6eF^j__d@|E^+8Dr2eY2WSwovpvfj1& zDD$GmdR>z^I_!rPt|o1e3Iy1Qco5dMdVZYAQw?S5;rd8HxSmO zX+hJb4(&~S@iuOYp}&)9BjRZtL5+`Xt#28N1tLv(sE9dd2Fe82B zh$SeXybRHP8MIFvXWDx}1M*0}Y~ochpqN10hO##zYB?cDxm!4@+z(UPUh#?o4+_fG z1?>f?zTKv87ulEqWjl)}JppaXa;aEwR49^AtK(f*hwa_%zx$`yEhPcfeburu4{|;- zWqE*@Fd~Q^yGOHQbp9=b+%xV8*-=F#WTWFfyJ%MXP}^4>R+X|}(xvXIK8VCs1#fkn za>*2hOv9&fb>l=r84)`l8LDRjbL^hIPelKmN~%1`B+mIubw+X}i}WNYWDKyv$Cne4 zl1s@*Lq61w@0~Gn8QYmfIqNol3dXpTKoM@q14|@>i&s03%QU{*Ag&_~RK*jh`-avq zCAgK(#h?MnPBFidai7Y3Vv)W*P8#QxKq<)={FYc6@qR1>gazg}N;g?=!_&LkJ45GE z84)y<_cdq+>FFa&eXTgG~*36jC_3_dmX*zUClneusu-qkN0*W|B$K!ba)Lw6m+ z9Ob6|dz5N(|4+#zE}tQ6FtPkw$N`#qwE0YI{L5vwxw?5uN7={S*v?3Su^a5K&r9$F z;MWV$pP#oY95tqS{73T9_scJU;C$%V-|V|Ws|Gl>>%xfNW=;Q*{C)Y#rX0{zwvxG- zgw*1VZ~8vsVP^uXQDQ#kz5F{Ff7+kszq3kw`qkoLCrlJzZzB$4x!-mbwj=aM;|bk| z`JKD_LqD7rH7=}1iBg{*CeDp#zg>F%N22#M|KKUwV=?(W-=viGapTT+yj|ed&=26` zh=kTS(F*WGl$%m~{l;}Xc>Ka+%=AAJ5#m?UMSNCx6VroLnkAD z)LzOF!j7J$e*1j$=bwTh^b?-=InQ75;25}vozS6d`LP5ccJLUv`MdQAouUMBOxhF? zXyPEA{IvLgPB-9|L*K+>Ab+2(-q??~lZ($-jQ_W^fk=Vsx8I33{Nh^1ME7lLad#PR z{UCyj;m7&XuTlNIi+r+QzL@&U82%blj=1B|pJ*PLyP4Fm!Dsuo`ZZ;oQG8VF&1=Tr z@sOOO=`{BLNWQ)J`KR!xGS2j_-|7E&Ka`({xk05 zK5Uv`_}c$}QGQXJin&Yt+lFIyq0`!s1`m52&V&9go`_e82D*zk@FBzh1%~;&*y4)q z?2a2BukL>&GRxPTX?Y&|TATXAjs0i^-ak~ts8njVZ6*KD(-VC8V#X6J{(mVJPLR=k zZ{0sve;f|wC;D#^9a*FFWt!-85pl&%^gRE-o9tp&Y-jcOKa5}ICr!l@dEdfBEj>Zx zjR$ZQ7tbK{Z(#-fL)Jule~HvEy|B10c!J%(ON?ZGdCuo>L7(W)?YBj)A6gSB#&Y34 zrgnVzKQMnti{D?pG(i(N`FCC>Ui5zJbabdx{weNKnt${wkFqwA`D=CG27LeiLpbr( z;*S?kNr_M8Xvmppd~cN-p2#i*@h!zQsV8(6ZOwCr-~J<6b6TQ`b{#$apV-Im+b|H9 z^e;cfh+!%CxNRlz*YK51*^eKC@lD_U&?(9(p8jLx-9HjrRhR#8H9+6t4f&Uy7&lQ5 zJCR=|@U!zr!~}Z@Q#oXRO-bJl{#*V0^zI);A}5L2`#&tci+(~BOK2rp5eX#{CjXEx z|9_lMPqqAq7yLGpUY*Gky6cor2b%wq_jUfk0Mn<7$g#$z0u)!Wk57mV+-}Rx{HhSD z_*N@YibqyusTqC8*XBl0Xj54tWIF0oEgYe{L1LAT3l%Ne{bnHH>$4&=W6fIb5W2ya z_?<78#mtP*VPGu}&w_kJ4l>rZKk;5Ay;-&{7W6rQMf_UyrCm|5e$Vb9GRTf8dmuV& zMZ;+kDcQSHSgNTvxymz;V<=;^_^iopsn%RlX9DMSSGVH&md3vf&xj&l9nQ<`{8_Oo zU}Dj=cNeyylz+^B`FA>+iBr7`moM}7iHKK&?Rc$>xm;DuQ$XwR_9jRka^~8+`TT|W z0}UH?p3%5BNHzlF~2FXQm2tY z&hW%r54x%w$~s|z1|gM2dCWs3^B=|o=TL8111Bl2czMNy4qC#c(cy;8jrOqlxOpl5a4%Zo9fb?aNv!MG*QqeY}-{b#fNk+8m*xmZgiE|s`W?b;v z@6>ToikOg%i5Aa^x$7Y7pK<3$|5BVq+tG)#m3-2{Q+oH9-{+7U`~;aa63G#t<*nkB z2&ss$(O=Q2@0LLbqtR>0^VbG8%+eMQ1JZvS-`K6aXL=W0j=kN|tAfB@6!w_y8q?0zw{*nO#%HC(czi1zABSg z`66-fh?Tf4ok0!a8;_(&D8&n0L#ONcTVV^G{jlw|9?B$$^G+JHTw@Tzb7U( z*B6kjg0>>E(9xPfQrlE~oknye%@n6Yz8_p<-z+g#4rf;%O8Md_KI9n}PMLDMx!z-v zhcx+jGCQU;NVgm9hjh1?#M$1+OdTPOoCja*DoCn=+oEO6V{akTZ67zeG+!{|ezm39 zHqePQ^;j1vcTbT{t@9aRBpcnLsP36E7#U1mmRfKeAR)2qM5+ZhTj1k7e!M$3+A;c`lFUk|SeZH% z`=U240wuFlrf&orPg*_rsLLhk2PBW*@_ARcCJ?%Odi72G44|45MW=WRnYU zRt}}Jep}*-<<3^DrxVLKrO)u?^uxaSv*7*ZP&4RQzf8egEn zCOw^}z+Kf-hgH+h!I`IDIY>Sp@27tJ6=^NrTv+d0@*fF(6$d_0^I^pO+0MJ8DWyx< zZ6CzHDCFMUk$Sk;+&8a!@a1<8}| zjqDlCEo!*&5_>SKw!0ASQ{xBYSufO{JtGUW9x(_AK}rts0n)63*?TxW`uhorz ztzKDGO}my=#Qd^y_b)Afz^%eYU!V8~60VTC&C0`swaxQqHV{3o5d zcAHzn>Ag7*wed*jsiZuET8>O8l01$^%@?SWxRi2`VHWKvBBdqeC3k=95wQ_yWkbKy zK_1ERBC~*PpqU#=MSXxYE@@fvPzTCpMYG?_m47r^t`Ea0@WELxcP~t>0_eYp5fbZj zZO!Cou*!R`?i9;)p6Zs@0C3$oyV-b#Xf&L9@jGq;oB3^JA}4}*Cr<6NfRYN8k`Rm( zay-$k({OyIj8S@&emq=#PM!Vm)YephR-XH}LUl@CnMLL2|41?x5waz%zxm^4XGKVY zN*P>?m)X?JG}X~~W*&ekKOU~MX>|XdL7TKsvozV*mq4`4?M5C`7==iXyFJRo4OcahOKxw(5e;(VH#+yn|v6LItj5G-;2}K>rloo zOf)x-(FQ+;G(uv`b_vaqyfK?KM+)R58}#`a<=qq0%E6R|E8!%oZIi2&WZjE zQq}!PytTMr^CA4)@!(5<+{#6Qt{7z;LwghHjNh0pHWq*Gbf}p6w$UeFT0CA3|IUSS zCR8|y+hz0+ryUZpr}&oBHjHNXD%^kWp&!yDX%O2JrcvV?r0YT3+NbRF@%7iG=%-HSE}#o{rjwWm zJaZDrlxfOv93vLyEahBo#|J$#+)L829sk=v`wh$$%n-rxpd%--G`ARQ;Sm9UTo@1R zcdW}y{Ewt$Q2e6T-a*PR9nzdFD#cP9XmB=yWFoGt?ZI(z3nk?1mZjmLDXg5(k+>xuuX)J9-jPE0SM9q})q`WK=$U!Iy znw!FAtSdP6k9KSEAX10J@G^`8e7Y@Cg-Yr@N6VCAsh=(tf+1*$n*~tLV+FoEv-2jm zT72k5;uAn;>o1|&&EPKIiNf>d7DIBQ!gG`YBbm6TDERBB+EwWB8Fh$Qx&0K9R@HTU zDSK_~vl`Usgnd0E>=v4~bqXm{a;sBQH}#$Ls=7>2cymENgo3^vJZWFL=l&&n$x1Iw z?%m%8`|qz`@mxZ+Cqwr_Z$H~}g2q5>J#KEwW%^5w{*pJ=5Zw5!e3fr{&<_#fc=h{L!Lp^5^o>KF3o!F_T*f)PDP9cnkXd~*^84+ZIj9GH2O9n)Td~=#t-|Tl zEZ;WdQRg1tFZ*WTZIRzS1>)%+rED|V*|rlNnfS+W@^Lh0jJU76n2e2KaO7GpHElk8 zpXseeW|l3nJ?^yI+v}mhc`i(i4%z{kZ%DgQ7d!#mY8;0z>)n@m@9B)oc+kHO zfz~Yegkj?XBb1Eme;Z&QO2yrIKev2hLvG++q`T3{q(S=hOY3WckJ>V8_$}}5iVww? zkcE-|NDOpuUjWM{`;n*CAER{~ERPwi<@r}(Yjw=h>R<1s&Y0K053onQ%CJBq^O|Ct zw{Y1!lzB;UwbY_2KGcq3&xl-Bu6|c9bm*?9wy!+HOqr-rE{4ACGI(W+J83JyKHDD` z>~t?D&}gCUId}6@1H4o8-=&fW!{w zuEycr6bPjD;MSx-jQ3PX3*r53s=(h;KalsS)&8J~%MHZZw4v||g}pz`0n&#YDVIKE zf4I|VBNCAQAqaG>i~S?`ncB*ckO999`Z@pe1q^O5?zw08#p@mQyMcT$kj>*>|3m+A zkb3fB2XXGG<1OlyC;ZT+klS@1S(vN;f`-=VYM3+qUbHC=+Wi&KvHi~X_M_LOuj&l` z&I;)KvgkwKq2>n_tnb|Js?{+GBlbJrn_tYMzpXag&tD~%6nt-SN{xPg$ZP8!Ma?SI zu=Fr~|8KJkgu9Cj-lg7YRQigsj2gM2O=;`U%0FFt^@y=JlbS#A%>FuoUqoVvvSI_k zj_`+3zW5IQpc09swp6Ec`_-bsUp5fUQ^d=!`Y^^+)3tJAvyVcO3)y-8kZbauWdI%5 zTeAUiA*0gtx3kGFt>be-0e3l@i_l9!%lPYfo6S^SZZ>9&JM~M&fP2pHBF#?>DJ%fT z^gLzYhPuI(Y{}wm*lt8-_{!pTqvLBz<0mJ*SBFDsi$?|BWL_RZ^co9Tt>i zKVPW^p9@23xj1;qZGx1ASo$HYLVRC`lz}3>ryH0m3IibwwzYKQ)6;~Mftr*|aka&^ zSW!A5$}tUR;jo#xkGi^5af|AV%Xg?F^;c>tA#M)G9VRglzAWAVK+C9xfZ(b ziPqSF2QCg9;eFrf|LzC%8f$$7B;Ft=J)W3EwyyMpL z=uMQRe#J~lRtx~DODUA2CgbMqG4{N=fy43L!f&u-YzxVibu(+;2ul$`s+fzG#y}h` zR}T&V=hG%CQr=MpSNTRd@|}|OCs}i7mBYgC!Z8Q{z>d^1Z-M}S*m1+G+fRahkG?ay za6AW8bUSb=KBeinWg5Q_8DLBwHLohQ=K7M{<=*A3+{Q4z)BCys@*zeceP=f$U>}Ap z@{ik;oOeRi9_(pIE&dWp-x5h3U1+2P!(Tz$#_Z!tRCL3TOs;~VKr_1f4`$?m%I+zi zx}QjxHRG8bp@_x&cL({3wKc-vgHe)ql10o=~m5$Jn3Xrb=4KV;%cH%q_UFsJ^~fEjjeDv&Sv$V}2s& zi~o^$9jV;+;Ef#9o3NBe#TmPCN2IiWvdhUY{AH5ptRUi;6Ez%SE8m35c!~N2KjAT+ zV084Fp>2!nwT%cdKxUdrVGVeK6Qq9ze;5JIe|)>ynS?)y(Xll(llS|jXjFGG+P&_& zH10-NDwMsd6*lnQESN?!9z z=JDq`DdpGmo08gt-~4KN)zBsexzLmIu)}nS>9&}sFp?iN7m~LdO&t_7=DbQ+{4Aew zba%DN{Ec`mDrZj&vM2Z^%Brt)#^;!CEA{kg@(sEn;)&S7s$QV;6S6N4%U^Jk|5<<> zIMbDv56;a0TMEFtEZXg zq2}5niW};Hyy${XZHm$!vaUP9>WPvBwi4_Tr^wvkrYrR^_*3QzIVdf#a|_KRE;T#Je%%$HK&G&w}*?k<+dK*`EORuoIN3Hc)-Fx2h_61>T} z1e;_w;Tu6J9P&0o@O8Q_NjCKw`hJTT>mJ7N%v9AEqmDrXH#^eP0hXnLP$_?H?m|ecHuR% zjR4Ar)g2LG-u!@cb|{O9!p{)fOrhr|>WhMHjTHX?bRo9YChiAR^B*wDOm6(caR+q2 z63SuWzN#NfZO|n3uwFIMx@#(zxweVGTT_JY5|4oIHF1!El3xlr2UIVL*a=*UNL12h z!z&o)1AvYY8Lml8HC9NIdgz6|Gpc1d_<9qRu_-GH&WH_Ohp%vs%2K*ElHqa`FDqvh zlwdg9>Y-&S(E3dTuo#BhRay1$I1B}nfD&C#ZPOzob>5#Jg$qTpvD}3+M}0V3Y28qSDaOS%9!rUyM1 zIoHucA{#7xHW)wz(iBNJ8fv(nDa4r*OXP598$m)5Ti|w=n#z4&$EJPnG4&xE&Qre`@Jk;B#QL&xz8x*vGJZ zMaK5mq4Wefy;E55aJ3Jz9@x=J=0@>tiCZbzPS-dB#(=EqSekWyR&u;Yf#W!T z{{Ui}ORpocKU*dnZlaux^+Pj-mQkTG!KdoFw|JaC#ify-?LWfoaj+nskT^IWm=L2S zbz<5z9Vy0ZbkjaecGHLQS4`83IR5}EhB1o4Rpct8oscI~2FLt0lR?P4eAJrpDJZTaiH(^5p86uaJ`W?1a6; zM9Q_j#>D$Z=8V@~QknYv`vt|7qeppu^YV$=0YeJ*uNQsnL_g~N2nTilj&M7 zlCoHNjtU6LtJv6&VXi*fddWOTSU%uf^}^p+<__jNJ4Nz$vpi+=va(3sZc+)d-2UqQ zsUWCpB9MlbfC($4;kdX)9o)Xhg?8Vw_>4v+z3<9T9@tE|h!Hq5iuw z>J&4)iXZ($KlcT{?<3V7KkPV<<;(v7^3LM=oq)m^WecrSca=bKG_5c1Fn6E#(JiiSzLehbxAKhf>POjX{SVQdp5yYh z)7sm#amPsbi>&7i1>d`4-z_aHZg7>ZEd<)@L%a6xUEErunWVL>I(1sSekw78ih8rq zyC2#7Mkg5>^NWo*JGR$8N0ie<;2oXG{{WQcJ>64S%`&V>7+i&qrRLFBmg^e^_f})2 zwr-5VHM)sYnM_50s+eFP>YDa$3Mg1f8B!aQvFl#Rbq*X$?ibOaHU;`Go_@)c;BhSV z3+Sn*2H!>2aIf7zGv`0VIbVp!{fHvwkOk2ewEdE1r5hm;dacprg|^5J0#u*E6BV)( zpm5y)O9-}5*`-B9jRR%GJB~uz-QXXpE|%ou2}|WiDT~m~kVd}l2{Ta){P}@jJ&>en@=O?sRXd5kbt!<5*R>*hd z?HdIv9TW}@bAcBHXzv%|kD6!9<8ty(<#W7G5g8>;KXcnV3Bc>ol(BWAWYwd6{K^iL z{gPhZiQZhGb@%ks%hFu%CN|9sYL5HL~Ank%*oWE`jv-#e?0gbdv!(QkASQf#hZI)a3~+AB<9&=k=C z3D|ks+&V}=8Px=kkZ-bCebhPb`ymiX+_34cG*twbOmt-Eq10q2Vo|aya-CK@lc81} zPi%EcZ8f*Tjf<1Mpcgw!qRxb?ZD_ZuTWRmQ8C}G-8>uWhY_0im)hx2+*TMv(*;s8JgT49}VtYpEMWIDekt2OWm?)gS&Sf6qXjUOQp67hTnZuGKt3} zGD^waqYWuJx|Bi(DMg8A@jk#k&FZi)EOj7GYd~I#d_F0x2~)fc6W&5YBDu-{5aF^i z7Ba(R44*nC0lrIRLrHd2d8?fk8tY&L0U_r3Mjw=yOVlPmL;&XiP`UxJOYrU!e78h~ z2xCU-1&Wy|k;MoI5FEeovE*9m&Un#QnV*CJxTFO#HI86yv8oAWwt+Ah4VN9pIMZTd zp7UVPPF2}zwzAj1YmV}~y%~-yNN{YZtx;2ho8)bHUItj~RXjX@iQCB@DeV#M8tLBZ zYIhI;AdptL$(*>Cxa)kZ^I(q{?ZWy993DzeQZ>s}kfsPOtKg#~MF`M2T# zEw%y^DCmwjZ;`rz9k5nD#^URkaLRsBW7T?xbiMG^aQRvb%a};L&dba%WY`{abM#h1 za)ZqlVB+w!u{7N(sW`r;)h_P)ZxwV5d~OBA4qz8-|il(iMwphV&7&cnWW(ii+e8+j& zzSPy>GVbNAC(Eai@~O1VXg5k`cUyVepJTTd7sc?LfES)S*{Yx(UPJg?)bMp+?BaW_ z8?`(>yfenCzmFrqt+L0td$<1pj8zUl2_Iy`?5R>VD!*l`^qK3lI1Y7Jso;#5-8`J6*Z=DKz3EZTYl@lTg z+gW*NE6R0Fij)zOgmD45P8GoHjR{T2PlavpptAenJIxM@A6PUENQP!C!ZigL<-Nhm z7uJD}RU$^*SScX8d!Ebemh15w?{L13K_DP~7tUW~xk2JM7Rvf52p{mez7_kY=6vb+ zgzv-``w?Tq8x$;wqf(+ee^h>3s@)!31B-gDE3_Sp}xCVi9b$+9jfzKwvpp*7m zA?-uolj>%5H8bqin>xqru(4Q`LKo-A>H9yx>T!HZr5z%*ljx4)-oLeZW1PxU%mv05 zg;sk&R?6Z@aJutjSIeDA*6EDmVC=anxVn`dtolm@*~|_Knv5x?`n}oS&wX`+N4xT+ zz57;a%fvvNQQuFgvG;x(ohbSp*uL*3YC{#1Pq_cx zMn9KgmHgwHClw`6Qi=r+C9!pprwq_zP~YmVhB5U`h0 zle$-v^-XkzV{5?FT|E*l_>$b1sX-YiOn#}txE+)lk3J$l@e+7 z1(Ou;=V1c?)kv6=f`L17Ias(f#9qn~#a%V9>?~Fq_Ud1FpIMz+nD-kXj#iD9GPcIn zUW9yLEz&#gmddAJMQ6-!WUn|crc)4pbo5dx2+ff?qQgXiPN9 zqcP}-rN|x3ERO#G2>d3tIR5}>{3!r$s+eq$UH<@V2_$Iw1-^)1J3nCjCe+o9EV4OU z(E)Y?nA>~<@VNdV!{K-=bS*c=i;-<^ZDdA!=E$)nCY{rMrjUD3_ae$-YGP-_k^s?b z2y;#k-F-3d(iTfaFFS5SO_$7>q5l9DVQdE*epeyHXvUpO$C>n}VfdX~ZVkq(yMwj* zu0@u}&D09!-quNlqS*+{Z8`<1Jr7P}TT;vDjnynWBst9@+__jI7O)5-qY8PLMVc~@ zXF4nm4cjXVd9smewY&&$8>#S{Yu_rogDV7kp%ZGzw7@Nsm7WmltFdH|lnApBVAT|| zw)oLej-01HEv%X%)}@xn#U`nPK&wL)wz_}2rI1D0J%cq&J0`_d9+t|l5o>i9Q4nqK zWXOCw9@XrnJ3mpjhT(kO(uyj`&3RImWhHRw>UQ8->VmN~%>du1C^cg5mCl0o9tc zrvCu_c@{)XzI07=j2bRH!gMOYhTAPVJt|AYb6(xk9Ww^Xn1xtnw+cjMVWzQ`(Gw~o z)P-Hqi{8klb5F{G8f-bWOxD;)F9$6cQ7j;hFr%CSV39R$KzPRL3r6K*qgBf@l#8N~ zYr<&AE75zG>;bQ;V@6sWIhUF>mcP+$hp-Jh43iguaeFMhEj3cCV$N%{SlM9seOyF0 zO5VB~XW4$n)6vAp*%L|S7d9HMT$bM67`M?}j(oWEC{2H&?vzqiNckeAS=5u1`@5itLg01%!jFZZ~md#KsF{xU4?dwSxWKL+wwd}P# zN{nSm&ze0Ou-rnWEPQa+ZWT1QM!A}nD_hY^YFT{{J~o!HfwInK+T6KZk@RzDbE+fg zmpwxc8Zv+|5o;i9VY_7^$0?7L=z!aFPWQ5_Ns^ItQD8*UFMOhC9aT{pHXux~;f|_c zV+KmCB{@LI2{tJOuD35sBX5lnHSATL9@beDHrnb%f5KUHTLyt)s$tY13~rWF7;AMG zOpVeW+KBYBA2Arg8A)Jm?_>s?UgRJaQhDu>q8B>)sWRUQGPIAPiFLKqV;Iwmscc{z z94gI?6Iy}GMp^L1^;jZp@{;%o(`2s<-631VzUQ*oK*<}rSA%Vkd41G~CDgd-*fvty z#`H^PF5yF=7q}QqhMfm8rGsT6rJCCbQ#q$bsG1N)m_WW4xi6F)v?Q5HHYn~IEE1d) zOP5ky0NoSfXyvigs=U2X7H)!XOCFiR`zsTe*B4}{T@o%UC>9RK)ndr+A<)uG30=2o zpInXaqF^`BWdo;8Y@Ib-`SHn|xYH;ikI^zI2}^Nx&;-do)>0+f$mMnI)nSb4fCRtM zGsPNPC}fP_rQ0{%09a%F5@xGwu#C8ngUU2z00>|u6`D7(ZX}Fud3&cHx8BV_>)%;QNej0~zT(-pkt#h_s zv)?y$6L9I=c?pTf7{Ck4-2C7!7I2g$X&+nQ@l>%?m1{h6%eda_hYNsMd(RohSE#?P<#xC#4vjlK+49$F_`GAkIl$QK?UZC=MGcb})f93YAP+z9ai94q zi60^U7tCt8&yO{S%zu^i=Zgi*+>$1*a22`<+_nnOG@sC;*Mbx;-*<3&ha%6UD@j{plA+kn z8BU^}R{S2vH`^ZBKDtvhr1Kq@0ncM~Y6=c)l@5Rne#>1ew{~|FqY7zLg0xbyT>yPm z_D*G9G*zYo%REDE6bZ^Hs|Dkm=(0-7>Jdim);3YPknHcU0;ud1nzv{gnH=K!BXs+K zf-P&RE6G~zETxRTsNGZ!ZIxa0?sh@43GA(5ftD%^d#19E?bj$9Om`^}x+Sjxdatv; zc^{|MG!KSH3;oxWid(B9n6l1k_ky;Z-8m^PMix5@PZdU3dYegG{tv92AB-|+nlLVipxuAdU5<9RzX=S+8D_>CH_G+~}Rnl`J9 zamGZ)IphEVIbS|~rMuMg)dmrkFyUD5PugaB>_ST#bzUt|R9s7)mmNygW3j5L$qm0# z(K{#EI-EX!)rnsnv|mYFRcHXw7n?t~DA_A|WjDs?GPH-O$#q30PQcnsuMEU$nA)8B zsdQi)6qyB%5@WT?Gu2;YxavsRF6B7MV1f!ZRzlxZy_B}J>uaKXjk0WW3uuA3>9JhZ zyIxfuamN-`JjYRlpBpB@1B&Nk1UOhUw#hKjxRY~*Vf060fV#e-srL$6#K_!AK5H*S zJ}fHE=N*#bnZspbSCgWM*muz|BcF6etz?niV5Y`W5Hd=j8)PNDmsrTGn8ukd62Q31 zGZ8u|EJSp&c$(fSOry%a4PGjJ0%vAkbopi zPN5K}ArWQ}2oKpY1Sh^ynBYyy$yk=?Iv{6U?bl%7uZ_{l&~7knwDY`AWg3`dca~v$ zNjU*;UnR$?HaX221NB%{@RaL(+x0q*5!yUw3(||cewVTG>x=g#T@$SKIDLwW9_T9` z;MTFk{R;YJ#CQx7W=1yu06>*T*;;7NGh-L}1+07~H__;Kf3#}ZEa|M@k)u74_F1i! z?eSPcPNvIjyRGgv4jcoyL!WMo)0NUp54z~Go+F3u_n#F%d+?B+gDas%EuB~*BcGBUVP97*%~e{8;O-^ zw6;go{g>c0xMmqmtJRWCzL(^JH3LptCHTXlnVf~T15B-H+$O9MAm}W5uJ7Hyc=Pc2 zTJa-`Zav%Vx?E!69CW$4DshvGPCJ;miZtufl{$*)?{n%WbiLrk#A(>}8^{3WA7$QU zr|GiNA7WnJ2xpQfyAHS)D|n-Q?sj%U?eLYIYHCk&f63+3HY$)0Oz z&VQB6ac`O@&>x2i_SPxsUu`E$`cF5%DBl$QI+)= z`H3ACG9q_U4e*#Laj&8SFzPtTJfiL=7+vp#cQsinIhh-*eG2!FWv=2Pk)oodmubkj zTW7<&H%}1djg95{1--*?-tsx5?Q_Vu?``9QuPPX5=91m=zeDC<8}6#E2Hr+E0q9pp zo;>GntLr}(>?JJiEtP6*402aMTw_iyj{RV>G_6mp{}+P z-g=}f-Gs`IMI5B&>jZ#oo8aGc4ZTbl&Cz*)P&$}20q6OOus;gqqGhA?+h`3A@*wuVP7&65R5(%3@F z4BILudIgy_VUPobkC2~4Jk}azHYVL94TBtZ-5;A8T~CzW>3od)p$gfC3afn-KBRR^ z1Pydi&cmX29!I$^Xw>8~uME$$@TXqnCtVqYzR ze4t?)Y6#f?ZS!m<7lMFtuwc-9w^ohg1>L zS`ua-b&je8SeqpB4OAJJb=5KCV;n(T{{UvYD=k6gjf9_~S7UoL>Z#ca0VI*4`XAjV zay0a`>@Q&PYzesZTMQ-@*M11%`$yU;d?K1pS<7U*vI$;!S^;sXhIwJCZs1?a=SkRRss05(KTh&83a87`xJclUCqihnfa#WqB zr40bLD;3`lQ(F7W{_USu2qQ#I=G`%jD5SXyzdkVl9*Hssrm_=kYKl_pY7+op1dORX z!9fRHDf0)>ATg}P_dwE7WkDv%fHFDPutfIt)e`{&)h^qV44!Zlx}XFc;F|!VT1=op zhMRp*vCd73GrDEZqChQ;x77@ccFQ~?d!~@?p)r-AF1}OE#t=7{tnrA#KubY7s5c5+ zPf(z^rUqE*v7nKaEoo69KV&fi8Umjb4U`#2MURHz07o_SSX}F@ikO9AjVJ*vY1E^2 z$im53N+z|#0U?!SbC*i*$A}nsd*6F|r}$m!YuM<#%i2tl!ry*}_*GYSrzhZTI8+qz zlw$UCN6k(C%^t2 z-C^r_qJtVnQMRy*S!3rXL4?6jEv8n4_D8m$M+3De4=~UszljnwMd!URf*P&GvQlg< zWz292CdoW;-&YI%R~BWyfmz|sFqX0?ZGQr)s!lCOM{PXMsQuE=wjLc6U<@~3 zs0+475dbeM_ImAuYdCyu1Q{9y$Ju(sQM*I3@94=mCmpPOZ^Y@;r;Do^zczP&`2ZJZ z3!!)E!y5h-J6r)grr(9=w>mEn_L%M4Y507w^A8cg*k92`yp(ClzKpwr(y3P&Ml|=& z7y0gd>ZpvyUdw#P`9ekV0t$M^I6Its#l)vFTWPA`u8*;u*O-Y~N)b65Xq@VsZIlqr zZ-^vVwL}o-o7&4HYprW0w}{(7p|$>qA*%8#x?^h)ti}Ls$;yV*Uj!)>oY_F7wYDG& zjFh4XDUWCcKg=`@GKMDFDVknma*j9KL}semrMh9L2q$N?^cv|1&5*|>_Q_Z(ow&j# z%WS4cmVi8wgr?o9!|FF7D9lN{u#q^JW^up@Oaxe_IZs0U<;tL!tF z^gw1O1T1Y!hRB;O_DqQ{X#(R#UdL5v)EzB^m((2qK`30oT0rHp##6g|BW&OdjHrJT zCu|@Z=!IR)&?yoVwUfO60HU7|9RfnT=wsXl`AZ=%Q4ffqia-ivR>HtKA#ErbD&*Az zV3X#Cgd|EWy;1KT$(%i|;tY8g0EP52wj<0rMl8N)`!{yXQ*jucw?C8+?BaJwJYpt zKes0cXsKj?ScR)H7f5ewSS^6|!S4x`b&YI!4K_Aj2>vBwW{g>I_}M2~Rz9`twHmaz zT?o|qvh(DG&lynagKH3&%Hz{0n^F#;V4Ys1unt?3ZiomL)lZh@`Y0`EHbNr2z;j5- zA1%yoRnIalrcw=6wMZj(uq=2QCdlG5WP>O<0W-|IafHb=uBJxojWVEu1(O>n8Cdzw zq+u6CZ?Az>t}WRy)kNS0f^tpmgy&WPkPuCwO@+-+$y^D-W4MVsEH%D~5!ad*Pm*>C zt|Ion78YMjA$2$yOixw}jp&2JBwJ->8JeO>k#S*!fj4^<#|~L5A!O z61;P&uE)~`ckM&{R{laU-+cl9091ZL`Hv?QoAm`nfo}yCHvNW zFn@kjSN{MsenNxaT^IEm`3vTd;&Xpd-;{r<&HYDyQunm}l0UPYH}hHavQGPGhx)Dj zg;@9PL=X9c^1f|;u{qX9Tk2FED=GQi`A^={=pVD3H}hHat-boupYOb^Z8HS)94?wtUvP^^qsx?(Vz7PKs{Dt#p^_j*5j{KD$t4sUcf2w}=pVABP=MVhWeJ6*%x*~1j56DeD z`q2aaV}Buh)n#m>N1Xboe-)Sdhw_hl&!E3&SZ-BW^rl|^=x_12@)XJ6ykm&RIOLr2O zeD3^|{p&u6KWBJv=CkQo@79L@0Oq&y6MyTy5A|F53+7bZMq!?JXVjo~6Px$D@>2J# z`V(J=IA{KAKAX+oZ4Is^Z{#mJ@5{Y(m-Vq=bXeoBXN8MH4a?yH{^ zoZ{nc%#1H!Xi%*hs-wJGYwTE__df2WRCG2u3M8HVPzfD-xoJ;Oj2a(t(E*I~N)2&0 zOfoobf#qwMY`b*{PiHn!d7s>}Sw?{u1qXAscU#68PgfL z$k+hBakL9s`6({pw)x$kQnnN^lc1W4+~hdVW-#1dq-s~3lZvvpGwDS~w9gH{`K|ng zjP{%1x_GVph4WHg8-JbOlB>Ii{{X$;lCu5lSE4!i{vB}l{$$Uf{pEM{K~YOAamove zNCN9aW&}$qwjASc%HwJ2`Ax&!`6|ihIl9;4aZ;|+lGy9`Hvyxql8~l#<9UHNt$G4zJYn)NBt{bK|8$HPp9+pJ;E-X;Cpy*aMX8qC= zi)X0!G2d+5Dok9k?6%vpKGhUbN6jpOt~*_P_=)3mF5%(X<|FR5RB_cPt1XT@i*`>1 z#A24@Y{8HVWwR`&u(Mee7Q(G#3L z%3N=vmB(u&CCnt~jh^nxWNU#-m^gjZN$;Rfn&U+RGHh*xmodrBnbxpOoRyY?z&Ftw z0sX3@T=9#hx>xK}c@#IrahoY?RbDO3zXZ1lti)cAvCnPLq(+MU=opey?9B>dkj@yrWdITO3 z>17RLhU*(r?Ft2F$&8Xq;UL9i(KBu8qBjjnXcSroM*1M|{S)08IT|4iatWtll(mkG zg4_|Ct)cEy+MUt0wDl&;ZHF;hrslguzEW^azKZ%4=SAj;s^WYf4aBK;c!hqCn(;@{ z={=;la01e|PN8`Zd>+&7@Z8E%&CUq8T4Ci0`Q7;`(1$liU+S}}aXPbBwT?3h*gOUv zqN#T)a+b#>#42Aa-6Qf|NzT_)V{Nuj`JbW_G1GOaJrWe`thWAx&UVUKiQN#ju0-kH z6B>);!V+01!JDHq4b(lt{)j-Mt;lOV5E#iiD;nsRAaE&?R|So*(M#v%8f65`a7rz9 zswJskQNkw@F@*f|NNmo~EDmtp7b|J9(;9zjOX0R`q0+x%fJ*I32q-noveye}mi8Wr zaq}FbFL2#3%G2DIw2Yh~l0Ic3d;Cfapy{eXk$}kdSmx=sr!jl`dPjxfN z*9#3h!Pf||XSrLEZ6_4x-TlEvDyS|KZKUT;-*?hbxdO&WtMS4QpYSqL63gioR0XlpKQUUkk5dmijjZ z62$pj$r;LS4bl0X(j$n3Y=oejutukKo+)yd(=0I%g`g~^HI6$cc}I0o6i!5W_9!Zo z*Fgf6`yvs&J~-XgNTq*NSdMWI4jUnCPdQfE3->6wNK*{{rBg(2a%_ds0FJSaHYrY{ zaS8>BS}U_8hkN~&Hl*m2;JGo;9*8BUFx^s45So*sCc$+Lg4swGUSwFNwg(`NH)@~A z;ksmI?V=F+M|DzScuov=RBonu-4-j(SMZ&x9#@8r z*sU9oY6%!v#<;BTh)jw%Aa2WIMJu(sP^NapKi=w!COI0E=4e5U>R!hUlf^6gscbL4 zP7|Wlxkz>0|WIO9amDC=2T@J_cv^6isj&B+yoC>x`AxyP zF;-F|h=gJSP&Z)BQiQ=sp%!E=hC7^}hR7b%z85{(C)wkWi!KhP?slqAO$upE%_(vg z0ns`Z9SKVCi!*zuX>m7E(6UP7Bo^IELAI$6VEe3OC#qw%gUpAjE z@wiHRow{PZ-kS>_R7?)jWsIXb-48i98z_=?WtT(LrLgBz4sTQpj`|fM31~dHjn+2j zbcaH8x;DdY&?m5O1mfsCat|m#N(V-7go!SeA1toWEE70l$&g$GaxvDu&^M#EBD8gG z{i=QY-q=l#nCWGcIpgSoH+N(vOQP`4#1KhNPU>WNZ@a2Qt$Pn^3C+;E(4{qmebBwO zI8q>%<%eHlqnw@6j0A3?I&V}4mLGG~Da5xoxlr-ymi8W~1>`@+aTnbS9{p{Vl+AEf z@^mQ;=u>rDziKzR=VdKrywCwsnB*@K2IEQ%oTH3$=WXd>?Dgf%ik?aB`(ek-GKuM%3v z52AD9+?83=IN?j%)e6K~)dw4{bw)`Ws?o=N3MW8%rZ%j{vgdRUxi-2MM{NoRAdU6d zGbskc@}=^)=n|a6)mem)5+`jhVQrbiYp9N`_qQst_PW;vExgA<5G&{|@4fAW4kqfv zu76cs^ySA?4m#??EINe9*lns)-dpbKp{+ZiSd5mJ7~yRSCj-@0k$@Z&W)syhwXovy zcT(U$)gV)PsgU}hk3ESlEzRyxwxHVks_u-YQ#6E&Ep@StFV$nK9s5qaLu97i9?`YjN*ow^W0bE{eo%tB48IqYd>;i}DHWI^9H71nQS$ER5Ymn%9uW$otyZQtF2dm6+aiOP8Cf z0X=mV%7+o~sQ0%C@wN8UCCc|`P)Anu7osM{;hm6*Gp*D~Gma7_^(?+U$x30l)dQQ= zEsW??h(%~wE+cg;1n!XN4^&KujS2+z7;bTNuN<`4Y1KoAs#~Ke4S5zHBeqZ~9nl;O za88ypYl09-=*o?(cATj0{)nRHCuDND>508;itoCfL$ZTTUAmIu+-jp@R^`ldAvV=6 zOkflsJ9MYCDG-X(v|4s545v$^x)ar7X}wSySbM=6r4n14+^lfwQrymkAdisN)VkiP z1D-Qw6wNBz1CB0&PeP-r-TP5KN4r3b^Jtmxfv7-9?O+=tx-3ia+@UfykCx-*`D$R-7ra7eOP!8*6Hd;Zk(F5YJ!ALg=(BmgW zCOuNn!%pL&Q{@hqOl4-sqAvpG`=FEYA3R|0uu+a*ip2n--sl|5q(<6VF`|l2n0is48l%fK8D4Hw@h|l+HV(zM$bt zCvZ2}3c&MNC(x1eBV#S`eo`M(b9ICleGoluscOTj2)q6Hi7%Up#4rE$&dn8#RULu+mj z!8rhF3DD(BAa<8ZZ0BVyo!@k5N#+|R7p;oML2XUaTX@qBCU63GFmbFkcv-Khck77j{7D_McZ-XhhOGI`Y~;Ck;qPkEP{a~+uV1B zVXDW%=We1CY?RSiY#RN3OBjQ-z9vi-? z4UDLJ9FZG4+T#T+w@}ud5|{}p%jy8NwNB*S?_q7EaMb;jI+4SDQBPO8RFw|GK%AK7 zQn-y!zL4y4qB@5Rpmj!6v?8x@{FWKds$gqvs;X}9qBhZbqz>G}ii}?;3k5E4>!q0O zgaR@&MuW5JKgs_9xk=%o_vTgE8M-qO>Vhuon8_(~d!YRi}5>IEKc%#p}K#>RI{mRD`dBZrIL=q04h?t#&hp-Rq(6mQgfqaj1-TyuLPh}e6| zeC8~HUP8qoJFH|b(&=rS=F1x?BM4S3cPxq`r2{yPuvDpNV>dp4uycANyL39PW4AQS zW51B9 zgce%HK5HCkq@Th#shR@MFLYRokhjbV$AU$|KO{cNAMW62knhO=Be>|GH=s=u9mW=N zlkn()3MD&`YEUu*WSf(O$z@EoB5BbRUeSb~FN_s}HvQ3Z8BkaCKxG!f8KD?i;f<3f zmoyZTi+ZeVw;rn_q!+mmg&_O5ah`@`*BSnpust2gH zx6=ri+neZ#(K!7O$q^bR7EEiocT!<)w^gE!=P8CH3nD}OU~!^T$QqTN3rbVs8=xYP z$4)-TWSYcVLa;&zM-3<%l*fB+3H%{^p*Yimgi(Wpgr(G6gQklFcNqzi4qOEWQ@FxG zxU$}XVd$Fck%TO#HrFT`OPFDRYNfEWs<$Tz{suKp;NXzv$kyR4*4F4mZ9^;|A4CyR zR}01#!pBMozUT~NWi6x^xhN&EEtcUxm<(!~_(sT07(pDu!PqEk4umF}Zd`+v0&;dp zmG7z+V%v3=*rxMe>V&{=lpwO^Ol(HhwBaIqgM?AR)jNZNdX53DMJ$72kl4nB7l15K zh%}563#eqnbhbp*CYt-8vc$(NipM)Qx^rL}AQZPKBb!hRpydmJ`loq8sus*f22u_7 z0K$c>{3eNdsB~^WL?QW**4*7S%^H-aL_oR-nSE2ZWKUAbfx6ujW(OK17_5uJY132@ zaYi}7V4J9Pp)Jvk(HKU^Cu?j(vt$9HH$6-=zzNfT)7D|jS9&D)F!fs9Z<*% zaFFkPfDznih0&Dn9Q0WGoe&{J?3&vQ9E2j6+b1a`5^PXQDrKP?N+!LnN-zs7WKG>N z8BjzGZlFvyK{Oy8aG|jclO&d!5@i;?>l-D)nCV6GQ(XwY5n(x4NO=0Jjn}|LX#y`T z%aC%S!#08#!+Tj1qS+H>JrM_AO;9m*>WV0@m4*X+loPas$CWi<Er@pFr z2IwN@Z>onvErhnj-?|v$hKWY#BbzE6O?}gaIJOfN>@S+e>K!Q6AC|f= zBsJMYGj$N=oplOKd!rK(jg|>IpanD@m_*%hh{ogeP+}ktRL5j;45wwYs>Obju@b$Z z);UtGw7M7$r4;^u%;29N?s7pkHrZe86XSUv663r~SgM|k&Ze^@_V6AwOOFRs!y`aK z`B+i}o8%m^H(TXakdZ}&yRJTY55H-LcZ&A`XbwJ4Tsu#Jx zbs}c%ssy$jEAO@fmgmqY@ZG+NVlqy+MZLPX;m?mjb( zP`o0E?OEcrFt)lOoUJ1zAb-L*`3fl|X~59}O3beuXR5Ilbtqk7q8@Ro2|Y*i`lLuV zy29qDY^+F076Zr@wePydbQ<+h7cmCfAahRIsUiz~N0>cQ9T@6~(AZp^QYDXYkm8#r zH)klM2x5?KnCY64HmvkAn2iZhG+-ZhDhrsHUeGbBWNl_wb4lGcCi|up&}`2S*OMKP#w7zLeG2PMKCw9O%cAjj10Ke$wnc$Q5;0-Qs!Vd zx(Owz4|TqYyDInCrg24>3{+2dY$! zKXopPme(i~*J3;&)h;JzC~70z6*0;Vs4UB%ax8+{T@&SM7deO{X3-Ny0rzs1BYG8Q z#Aq1R3weI%99_2|jkf878M;w;I8n|tKr|eg-Ayhpx>0je82)d80(%TE+>f$93i-W~ z!vL`ejsz~TiTBCB!@Rq@3()OS-w4jPlpvj|cuY9Wi0Pvm* z22gl_lmv9#QvvF%yn3a&Ko9~3P&cUeICaWXBhI%%=eJZsX6>p3mV@Mw-qy!eEoOB? z;XDlJl*rFi5st%Uht(!gZ8DoIU}{xn`?=VpPBCo_iO6IbMFX3x6HNxAWdA{g5hUiP$kpza) zw^OZ=x!aNUN^9@%COwAvO)@WiMhNezvRsj58|ZF=I7z+EibNd6jlN5OMTc#C4)Hi)g7=wucE*h;XGgq_w`gn(ku++Gx{xy4-B#Z^Ct@*4$3|Ebta4nm%w|SP6C-v?4{gvj9;`E0 zjFgDobE>xuI49vYM>P{$k_h!eOn(Z~sZp&5Qk~&9a-P`ZBO=LRv5is-ihM0O1woCa zIx2RZ4ia9_4iRk&6qeW2aiwQeUQwNLs~LuX8>s{hLAna_RZLW$oFzi%OWuv&4 zeQum0ZM9B_FyI18!%ESyx@0bd@;9mg#)Qc^HrI5L8+4IuSz-!CoRm$cZ&h4O2B=HM zxv~gbeM_yRZ&WU)Zpjasw%uc88(ktQHY}F1*4;xW7)9=t3pW6iGCV0*GW20O%=k)7 z)8BMf2V$NnL2oxHvPgMjyDCQOjZwoJN!3`624cY2u4{qzOiv(VebK2W0ZA*nBDvLs z6O^5iz?L)}P(6)Y=V|F-iT()7o48yIp}$m`v#~l;Q;L^R*#k;*saw@eB@3J`80&Si ztmy^qV}ufIIcUp5k-b()O5Uox4;QtGzKPY8rG#Ci6Q$Cz3ncYnM-z8YX;|Wt&^~2q ziV){==%rwiwzm;94Q869io9g5LyGu7@(+uu<60P^)tKOv)!oH4`PC2r>Qma-{^`az z?VVF3+P;EJO}40C6Haf0#)!vKmi7*Y$!Ivebt9FxP84nGsbR$tf=1z4mdJu}o3vi2 zC-C-lNObwZunHuYdaQd}f}mQX)CuRl>kp~Ab6Uk}k_Ni~5~L?+uOg!%Z&f}MS0v61 z(oL5iP#6gVZBl4anr_L94QLijij}*jNyEn{I;z>lu29vB9F&*B%L0|U-5V&|)l%mg zo7pr<+h*z{9B<5f>6@~W)@iz3D{gGiN|~v{4s}vUjh5F8P_T>LCgGn7i5?Kg%2H&M zdaz$ni(l@Aj=qI3257JF=p(otQ%F*@QOCsAo$Qa5qji6Z(MHz+vZo-7B*HRM*Qm1# zM(SgUq@}-=KzIn)yLGu~)U49#MI$<@u9O`CMHHZF3JRQVw=Bs{&bLs=nl)9!+p?2m zs%Z&FLH=2#1hlMfnNZn6Wdj;Xz3ht%Wp?xs>Z8>qkIt)=brT$Y-4#bi7&hHBnjBnt zZdS0|r#dF}Rt!udI;^FoVa16l24t1`6`vDMRB|x2>&Z@hm7cH?QD7YHm=%OoMnQ&4z+NB5@l_ZVzQqz3_D6o`lw=6yuXrcT$ zol;$4u>~$!0$`)jh2|}xun98Hom1NzNePovuxbiQ;Tn&`0CatPsAIi^%beGg(*ZZb7mBjSfTc+VajEe-4ZRkIRZgK248m2-4&Fn^r zoUwys*T)&x2xDy*;_X#yMVnCI(V)3PsE7u@sZ-4QvX17L(GNs(J>WNy?Q3ue(I$FE z_*b^e7ADuk%xXWorgi4GRbrMfT;HmdiYD57KO>*xluaDA8z%4?1|DKYg9=tDFPPEF zPr3g3l^~2bNqpM&RhdpXP86(AMXlQQ@F_MCm6JdPmMl(2Qg1Qo9i8D(m@mteuG{do z3TBYp-D1U>*TDRaUYzCaD_`R}w-cjig`^FaT8)6LSg>cg(_!Dj{Ma*_Xs&Sh!PQE{ z;%Ci#2Rp(j7*0ZO5utBID;1ApMJYVz6FnnF$#6nCC|IcGWnJYP#hdV~Zywj!td*u04#x#fq~!o73(IU{~dcSQU#BESDpZ?4=uA{{VPhc1|b57#eKV zixx#S@S0lkef_+S1CCKOv0MzR-HPGNaJd&MRwZwUy0d=H--$zyQM5~GBjqxpPBvsNkn$}RUR%6BLypF-xdN!F| zakAhzyFc?14S^~aD;LU)T5bD8{{Y&G@j?|!me}E9#RhEP{vbm#xvtJY4>7ij7AGCT zx5)m-BUMb(6NwoYTo(<15N;GKQkLk(qV$)T5=1PH7)CI$Vu_sgM=?)A+l(pW^eq5b zC|HNFs?7VyoJ%Riv{qq&9hNLgJEJ12VkdxKMa=NpM)I62Sgc!=)o&=e=tE&~7%Gi5 zGfZu987meh_xlp0{u}=QtNItXjUz(_He9cF^bK)!#q6PCBEB?Y;v4pPd7KYyWIjJG z*U@tPHUQROY7{I@e9X(JzrcZGvz#%Gn;~&sq@!q@fH1LQQ~7@8H90>0uk@T-sfF}i zkFpd^E;kBRC{x^)GEGut9@f$|vRDf)AHW?HtWv4FCx?^nq2wZspW+Xy?Yvrnynxw4 z#6x0Omeoku>XJeOfL)KW)ZjhX* Date: Thu, 13 Jul 2023 22:17:52 +0200 Subject: [PATCH 176/775] drexelundweiss: clean up init code, bump version --- drexelundweiss/__init__.py | 20 +++++--------------- drexelundweiss/plugin.yaml | 2 +- 2 files changed, 6 insertions(+), 16 deletions(-) diff --git a/drexelundweiss/__init__.py b/drexelundweiss/__init__.py index d32f521c5..4a2b332bc 100755 --- a/drexelundweiss/__init__.py +++ b/drexelundweiss/__init__.py @@ -28,26 +28,16 @@ import re import codecs from lib.model.smartplugin import SmartPlugin -from bin.smarthome import VERSION +import serial -try: - import serial - REQUIRED_PACKAGE_IMPORTED = True -except Exception: - REQUIRED_PACKAGE_IMPORTED = False class DuW(SmartPlugin): ALLOW_MULTIINSTANCE = False - PLUGIN_VERSION = "1.5.3" + PLUGIN_VERSION = "1.5.4" def __init__(self, smarthome): - self._name = self.get_fullname() - if '.'.join(VERSION.split('.', 2)[:2]) <= '1.5': - self.logger = logging.getLogger(__name__) - if not REQUIRED_PACKAGE_IMPORTED: - self.logger.error("Unable to import Python package 'serial'") - self._init_complete = False - return + super().__init__() + try: self._LU_ID = self.get_parameter_value('LU_ID') self._WP_ID = self.get_parameter_value('WP_ID') @@ -301,7 +291,7 @@ def run(self): self.logger.debug("Init PANEL register failed: {}".format(register)) except Exception as e: self.logger.error("Init PANEL register not possible: {}".format(register)) - + # poll DuW interface dw_id = 0 dw_register = 0 diff --git a/drexelundweiss/plugin.yaml b/drexelundweiss/plugin.yaml index eaf828760..33b46c17e 100755 --- a/drexelundweiss/plugin.yaml +++ b/drexelundweiss/plugin.yaml @@ -12,7 +12,7 @@ plugin: # documentation: https://github.com/smarthomeNG/smarthome/wiki/CLI-Plugin # url of documentation (wiki) page support: https://knx-user-forum.de/forum/supportforen/smarthome-py/34582-drexel-weiss-plugin - version: 1.5.3 # Plugin version + version: 1.5.4 # Plugin version sh_minversion: 1.5 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) multi_instance: False # plugin supports multi instance From 1fa048ea120c360702fb6b644a8f527c74c57388 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 13 Jul 2023 22:31:55 +0200 Subject: [PATCH 177/775] pioneer: intruduce standyby_item for mute functionality --- pioneer/plugin.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/pioneer/plugin.yaml b/pioneer/plugin.yaml index 8b3171471..5ce0ecee0 100755 --- a/pioneer/plugin.yaml +++ b/pioneer/plugin.yaml @@ -15,6 +15,13 @@ plugin: parameters: + standby_item_path: + type: str + default: '' + description: + de: 'Item-Pfad für das Standby-Item' + en: 'item path for standby switch item' + host: type: str mandatory: false From 99801d175b3a8841d155df38c4355155b1c85be8 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 13 Jul 2023 22:32:07 +0200 Subject: [PATCH 178/775] pioneer: fix commands --- pioneer/commands.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pioneer/commands.py b/pioneer/commands.py index 72be9ddfa..b02ca258b 100755 --- a/pioneer/commands.py +++ b/pioneer/commands.py @@ -17,7 +17,7 @@ 'general': { 'error': {'read': True, 'write': False, 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': r'E0{LOOKUP}', 'lookup': 'ERROR'}, 'display': {'read': True, 'write': False, 'read_cmd': '?FL', 'item_type': 'str', 'dev_datatype': 'PioDisplay', 'reply_pattern': 'FL(.{28}).*'}, - 'pqls': {'read': True, 'write': True, 'read_cmd': '?PQ', 'write_cmd': '{RAW_VALUE:01}PQ', 'item_type': 'str', 'dev_datatype': 'bool', 'reply_pattern': r'PQ(\d)'}, + 'pqls': {'read': True, 'write': True, 'read_cmd': '?PQ', 'write_cmd': '{RAW_VALUE:01}PQ', 'item_type': 'bool', 'dev_datatype': 'raw', 'reply_pattern': r'PQ(\d)'}, 'dimmer': {'read': True, 'write': True, 'write_cmd': '{RAW_VALUE}SAA', 'cmd_settings': {'force_min': 0, 'force_max': 3}, 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': r'SAA(\d)', 'item_attrs': {'attributes': {'remark': '0 = very bright, 1 = bright, 2 = dark, 3 = off'}}}, 'sleep': {'read': True, 'write': True, 'read_cmd': '?SAB', 'write_cmd': '{VALUE}SAB', 'item_type': 'num', 'dev_datatype': 'PioSleep', 'reply_pattern': r'SAB(\d{3})', 'item_attrs': {'attributes': {'remark': '0 = off, 30 = 30 minutes, 60 = 60 minutes, 90 = 90 minutes'}}}, 'amp': {'read': True, 'write': True, 'read_cmd': '?SAC', 'write_cmd': '{VALUE}SAC', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': r'SAC{LOOKUP}', 'lookup': 'AMP', 'item_attrs': {'attributes': {'remark': '0 = AMP, 1 = THR'}, 'lookup_item': True}}, From 246250a7b0b5679df11a092d647644754248e392 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 13 Jul 2023 22:35:11 +0200 Subject: [PATCH 179/775] denon: introduce standby item for mute functionality --- denon/plugin.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/denon/plugin.yaml b/denon/plugin.yaml index 394a568c4..f6d3a79b0 100755 --- a/denon/plugin.yaml +++ b/denon/plugin.yaml @@ -15,6 +15,13 @@ plugin: parameters: + standby_item_path: + type: str + default: '' + description: + de: 'Item-Pfad für das Standby-Item' + en: 'item path for standby switch item' + model: type: str mandatory: false From bd0af1fee7b6dd90de1c4326084a6b3cc9b42846 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 13 Jul 2023 22:35:35 +0200 Subject: [PATCH 180/775] denon: re-introduce standalone functionality --- denon/__init__.py | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/denon/__init__.py b/denon/__init__.py index e098feb55..c21477510 100755 --- a/denon/__init__.py +++ b/denon/__init__.py @@ -22,9 +22,26 @@ ######################################################################### import builtins +import os +import sys + +if __name__ == '__main__': + builtins.SDP_standalone = True + + class SmartPlugin(): + pass + + class SmartPluginWebIf(): + pass + + BASE = os.path.sep.join(os.path.realpath(__file__).split(os.path.sep)[:-3]) + sys.path.insert(0, BASE) + +else: + builtins.SDP_standalone = False from lib.model.sdp.globals import (PLUGIN_ATTR_NET_HOST, PLUGIN_ATTR_CONNECTION, PLUGIN_ATTR_SERIAL_PORT, PLUGIN_ATTR_CONN_TERMINATOR, CONN_NULL, CONN_NET_TCP_CLI, CONN_SER_ASYNC) -from lib.model.smartdeviceplugin import SmartDevicePlugin +from lib.model.smartdeviceplugin import SmartDevicePlugin, Standalone # from .webif import WebInterface @@ -126,3 +143,6 @@ def _check_for_custominputs(self, command, data): src = tmp[0][5:] name = tmp[1] self._custom_inputnames[src] = name + +if __name__ == '__main__': + s = Standalone(lms, sys.argv[0]) From 9412361c2e819e76c4e69808e4ffdea6beea1012 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 13 Jul 2023 22:37:08 +0200 Subject: [PATCH 181/775] stateengine plugin: minor log improvement --- stateengine/StateEngineItem.py | 1 + 1 file changed, 1 insertion(+) diff --git a/stateengine/StateEngineItem.py b/stateengine/StateEngineItem.py index 24989a35f..65adfccec 100755 --- a/stateengine/StateEngineItem.py +++ b/stateengine/StateEngineItem.py @@ -289,6 +289,7 @@ def __init__(self, smarthome, item, se_plugin): startup_delay = 1 if self.__startup_delay.is_empty() or _startup_delay_param == 0 else _startup_delay_param if startup_delay > 0: first_run = self.__shtime.now() + datetime.timedelta(seconds=startup_delay) + self.__logger.info("Will start stateengine evaluation at {}", first_run) scheduler_name = self.__id + "-Startup Delay" value = {"item": self.__item, "caller": "Init"} self.__se_plugin.scheduler_add(scheduler_name, self.__startup_delay_callback, value=value, next=first_run) From 3a8ab0826d245152e4ad4d33596257de54d00d2d Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 13 Jul 2023 22:58:00 +0200 Subject: [PATCH 182/775] appletv: clean up user_doc --- appletv/user_doc.rst | 156 +++---------------------------------------- 1 file changed, 8 insertions(+), 148 deletions(-) diff --git a/appletv/user_doc.rst b/appletv/user_doc.rst index 378ee43a8..0024c5120 100755 --- a/appletv/user_doc.rst +++ b/appletv/user_doc.rst @@ -37,135 +37,18 @@ plugin.yaml Items ===== -name (String) -------------- -Enthält den Namen des Geräts, wird beim Starten des Plugins durch die automatische Erkennung gefüllt +.. important:: -artwork_url (String) --------------------- -Enthält eine URL zum Artwork der aktuell abgespielten Mediendatei (falls vorhanden). + Detaillierte Informationen zur Konfiguration des Plugins sind unter :doc:`/plugins_doc/config/appletv` zu finden. -play_state (Ganzzahl) ---------------------- -Der aktuelle Abspielstatus als Integer. Derzeit unterstützte Abspielzustände: -* 0: Gerät befindet sich im Leerlaufzustand -* 1: Kein Medium wird gerade ausgewählt/abgespielt -* 2: Medium wird geladen/gepuffert -* 3: Medium ist pausiert -* 4: Medium wird abgespielt -* 5: Medien werden vorgespult -* 6: Medien werden zurückgespult - -play_state_text (String) ----------------------------- -Der aktuelle Status der Wiedergabe als Text. - -playing (bool) --------------- -`True` wenn play_state 4 ist (Medium wird abgespielt), `False` für alle anderen play_states. - -media_type (Ganzzahl) ------------------------ -Der aktuelle Abspielstatus als Integer. Derzeit unterstützte Abspielzustände: - -* 1: Medientyp ist unbekannt -* 2: Medientyp ist Video -* 3: Medientyp ist Musik -* 4: Medientyp ist TV - -media_type_text (String) ----------------------------- -Der aktuelle Medientyp als Text. - -album (String) --------------- -Der Name des Albums. Nur relevant, wenn der Inhalt Musik ist. - -artist (String) ---------------- -Der Name des Interpreten. Nur relevant, wenn der Inhalt Musik ist. - -genre (String) --------------- -Das Genre der Musik. Nur relevant, wenn der Inhalt Musik ist. - -title (String) --------------- -Der Titel des aktuellen Mediums. - -position (Ganzzahl) -------------------- -Die aktuelle Position innerhalb des abspielenden Mediums in Sekunden. - -total_time (Ganzzahl) ------------------------ -Die tatsächliche Abspielzeit des Mediums in Sekunden. - -position_percent (Ganzzahl) ------------------------------ -Die aktuelle Position innerhalb des abspielenden Mediums in %. - -repeat (Ganzzahl) -------------------- -Der aktuelle Status des ausgewählten Wiederholungsmodus. Derzeit unterstützte Wiederholungsmodi: - -* 0: Keine Wiederholung -* 1: Wiederholung des aktuellen Titels -* 2: Alle Spuren wiederholen - -repeat_text (String) ----------------------- -Der aktuell gewählte Typ des Wiederholungsmodus als String. - -shuffle (bool) --------------- -`True` wenn shuffle aktiviert ist, `False` wenn nicht. - -rc_top_menu (bool) ------------------- -Setzt diesen Punkt auf `True`, um zum Home-Menü zurückzukehren. -Das Plugin setzt diesen Eintrag nach der Befehlsausführung auf `False` zurück. - -rc_menu (bool) --------------- -Setzt diesen Punkt auf `True`, um zum Menü zurückzukehren. -Das Plugin setzt dieses Element nach der Ausführung des Befehls auf `False` zurück. - -rc_select (bool) ----------------- -Setzt diesen Punkt auf `True` um die 'select' Taste zu drücken. -Das Plugin setzt diesen Punkt nach der Ausführung des Befehls auf `False` zurück. - -rc_left, rc_up, rc_right, rc_down (bools) ------------------------------------------ -Setzt eines dieser Elemente auf `True`, um den Cursor in die entsprechende Richtung zu bewegen. -Das Plugin setzt diese Werte nach der Befehlsausführung auf `False` zurück. - -rc_previous (bool) ------------------- -Setzen Sie dieses Element auf `True`, um die 'previous'-Taste zu drücken. -Das Plugin setzt diesen Punkt nach der Befehlsausführung auf `False` zurück. - -rc_play (bool) --------------- -Setzt dieses Element auf `True`, um die 'play'-Taste zu drücken. -Das Plugin setzt dieses Element nach der Ausführung des Befehls auf `False` zurück. - -rc_pause (bool) ---------------- -Setzt dieses Element auf `True`, um die 'Pause'-Taste zu drücken. -Das Plugin setzt dieses Element nach der Ausführung des Befehls auf `False` zurück. - -rc_stop (bool) --------------- -Setzt dieses Element auf `True`, um die 'stop'-Taste zu drücken. -Das Plugin setzt dieses Element nach der Ausführung des Befehls auf `False` zurück. +.. code-block:: yaml -rc_next (bool) --------------- -Setze dieses Element auf `True`, um die 'next'-Taste zu drücken. -Das Plugin setzt dieses Element nach der Ausführung des Befehls auf `False` zurück. + # etc/plugin.yaml + appletv: + plugin_name: appletv + #ip: 0.0.0.0 + #scan_timeout: 5 Struct Vorlagen @@ -178,29 +61,6 @@ Ab smarthomeNG 1.6 können Vorlagen aus dem Plugin einfach eingebunden werden. D - control: verschiedene Fernbedienungsfunktionen wie Menü, Play/Pause, etc. -Funktionen -========== - -is_playing() ------------- -Gibt `true` oder `false` zurück und zeigt an, ob das Apple TV gerade Medien abspielt. -Beispiel: `playing = sh.appletv.is_playing()` - -play() ------- -Sendet einen Abspielbefehl an das Gerät. -Beispiel: `sh.appletv.play()` - -pause() -------- -Sendet einen Pausenbefehl an das Gerät. -Beispiel: `sh.appletv.pause()` - -play_url(url) -------------- -Spielt ein Medium unter Verwendung der angegebenen URL ab. Das Medium muss natürlich mit dem Apple TV Gerät kompatibel sein. Damit dies funktioniert, muss SHNG zuerst beim Gerät authentifiziert werden. Dies geschieht über die Schaltfläche "Authentifizieren" in der Weboberfläche. Anschließend muss ein PIN-Code, der auf dem Fernsehbildschirm angezeigt wird, in die Weboberfläche eingegeben werden. Dieser sollte nur einmal benötigt werden und für immer gültig sein. -Beispiel: `sh.appletv.play_url('http://distribution.bbb3d.renderfarming.net/video/mp4/bbb_sunflower_1080p_60fps_normal.mp4')` - SmartVISU ========= Wenn SmartVISU als Visualisierung verwendet wird, kann folgender HTML-Code in einer der Seiten verwendet werden: From 6637559871e858bc10c1f4143d5919ef3ee0e0a4 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 13 Jul 2023 22:58:17 +0200 Subject: [PATCH 183/775] appletv: improve error handling and logging --- appletv/__init__.py | 34 ++++++++++++++++++---------------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/appletv/__init__.py b/appletv/__init__.py index abad567d6..514ba1603 100755 --- a/appletv/__init__.py +++ b/appletv/__init__.py @@ -226,20 +226,23 @@ async def discover(self): """ Discovers Apple TV's on local mdns domain """ - self.logger.debug("Discovering Apple TV's in your network for {} seconds...".format( - int(self._atv_scan_timeout))) - self._atvs = await pyatv.scan(self._loop, timeout=self._atv_scan_timeout) + try: + self.logger.debug("Discovering Apple TV's in your network for {} seconds...".format( + int(self._atv_scan_timeout))) + self._atvs = await pyatv.scan(self._loop, timeout=self._atv_scan_timeout) - if not self._atvs: - self.logger.warning("No Apple TV found") - else: - self.logger.info("Found {} Apple TV's:".format(len(self._atvs))) - for _atv in self._atvs: - _markup = '-' - if str(_atv.address) == str(self._ip): - _markup = '*' - self._atv = _atv - self.logger.info(" {} {}, IP: {}".format(_markup, _atv.name, _atv.address)) + if not self._atvs: + self.logger.warning("No Apple TV found") + else: + self.logger.info("Found {} Apple TV's:".format(len(self._atvs))) + for _atv in self._atvs: + _markup = '-' + if str(_atv.address) == str(self._ip): + _markup = '*' + self._atv = _atv + self.logger.info(" {} {}, IP: {}".format(_markup, _atv.name, _atv.address)) + except Exception as e: + self.logger.warning("Issue while searching for Apple TV: {}".format(e)) async def connect(self): """ @@ -325,9 +328,8 @@ def _update_position(self, new_position, from_device): self._update_items('playing_position_percent', 0) def handle_async_exception(self, loop, context): - self.logger.error('*** ASYNC EXCEPTION ***') - self.logger.error('Context: {}'.format(context)) - raise + self.logger.error('ASYNC EXCEPTION. Context: {}'.format(context)) + #raise Exception() def _push_listener_thread_worker(self): """ From ece8c32b341f041225454452d489ce3934aacb3d Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Fri, 14 Jul 2023 09:34:09 +0200 Subject: [PATCH 184/775] DB_ADDON: - Improve getting query params during parse_item - fix calculation of start and end of verbrauch_jahreszeitraum_timedelta - fix calculation of rolling_window --- db_addon/__init__.py | 63 +++++++++++++++++++++++++------------------- 1 file changed, 36 insertions(+), 27 deletions(-) diff --git a/db_addon/__init__.py b/db_addon/__init__.py index 29c73698d..a137f9c11 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -204,8 +204,9 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: elif db_addon_fct in HISTORIE_ATTRIBUTES_LAST: # handle functions 'minmax_last' in format 'minmax_last_timedelta|timeframe_function' like 'minmax_last_24h_max' func = db_addon_fct_vars[3] - timeframe = convert_timeframe(db_addon_fct_vars[2][-1:]) - start = to_int(db_addon_fct_vars[2][:-1]) + start, timeframe = split_sting_letters_numbers(db_addon_fct_vars[2]) + start = to_int(start) + timeframe = convert_timeframe(timeframe) end = 0 log_text = 'minmax_last_timedelta|timeframe_function' required_params = [func, timeframe, start, end] @@ -248,12 +249,13 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: elif db_addon_fct in VERBRAUCH_ATTRIBUTES_ROLLING: # handle functions 'verbrauch_on-demand' in format 'verbrauch_rolling_window_timeframe_timedelta' like 'verbrauch_rolling_12m_woche_minus1' func = db_addon_fct_vars[1] - window_inc = to_int(db_addon_fct_vars[2][:-1]) # 12 - window_dur = convert_timeframe(db_addon_fct_vars[2][-1]) # day, week, month, year + window_inc, window_dur = split_sting_letters_numbers(db_addon_fct_vars[2]) + window_inc = to_int(window_inc) # 12 + window_dur = convert_timeframe(window_dur) # day, week, month, year timeframe = convert_timeframe(db_addon_fct_vars[3]) # day, week, month, year - if window_dur in ALLOWED_QUERY_TIMEFRAMES and window_inc and timeframe: - start = convert_duration(timeframe, window_dur) * window_inc - end = to_int(db_addon_fct_vars[4][-1]) # 1 + end = to_int(split_sting_letters_numbers(db_addon_fct_vars[4])[1]) + if window_dur in ALLOWED_QUERY_TIMEFRAMES and window_inc and timeframe and end: + start = to_int(convert_duration(timeframe, window_dur) * window_inc) + end log_text = 'verbrauch_rolling_window_timeframe_timedelta' required_params = [func, timeframe, start, end] @@ -285,9 +287,10 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: # handle functions 'serie_minmax' in format 'serie_minmax_timeframe_func_start|group' like 'serie_minmax_monat_min_15m' func = db_addon_fct_vars[3] timeframe = convert_timeframe(db_addon_fct_vars[2]) - start = to_int(db_addon_fct_vars[4][:-1]) + start, group = split_sting_letters_numbers(db_addon_fct_vars[4]) + start = to_int(start) + group = convert_timeframe(group) end = 0 - group = convert_timeframe(db_addon_fct_vars[4][len(db_addon_fct_vars[4]) - 1]) log_text = 'serie_minmax_timeframe_func_start|group' required_params = [func, timeframe, start, end, group] @@ -295,8 +298,9 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: # handle functions 'serie_zaehlerstand' in format 'serie_zaehlerstand_timeframe_start|group' like 'serie_zaehlerstand_tag_30d' func = 'max' timeframe = convert_timeframe(db_addon_fct_vars[2]) - start = to_int(db_addon_fct_vars[3][:-1]) - group = convert_timeframe(db_addon_fct_vars[3][len(db_addon_fct_vars[3]) - 1]) + start, group = split_sting_letters_numbers(db_addon_fct_vars[3]) + start = to_int(start) + group = convert_timeframe(group) log_text = 'serie_zaehlerstand_timeframe_start|group' required_params = [timeframe, start, group] @@ -304,16 +308,18 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: # handle all functions of format 'serie_verbrauch_timeframe_start|group' like 'serie_verbrauch_tag_30d' func = 'diff_max' timeframe = convert_timeframe(db_addon_fct_vars[2]) - start = to_int(db_addon_fct_vars[3][:-1]) - group = convert_timeframe(db_addon_fct_vars[3][len(db_addon_fct_vars[3]) - 1]) + start, group = split_sting_letters_numbers(db_addon_fct_vars[3]) + start = to_int(start) + group = convert_timeframe(group) log_text = 'serie_verbrauch_timeframe_start|group' required_params = [timeframe, start, group] elif db_addon_fct in SERIE_ATTRIBUTES_SUMME: # handle all summe in format 'serie_xxsumme_timeframe_count|group' like serie_waermesumme_monat_24m func = 'sum_max' - timeframe = 'month' - start = to_int(db_addon_fct_vars[3][:-1]) + start, timeframe = split_sting_letters_numbers(db_addon_fct_vars[3]) + start = to_int(start) + timeframe = convert_timeframe(timeframe) end = 0 group = 'day', group2 = 'month' @@ -324,9 +330,10 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: # handle 'serie_tagesmittelwert_count|group' like 'serie_tagesmittelwert_0d' => Tagesmittelwert der letzten 0 Tage (also heute) func = 'max' timeframe = 'year' - start = to_int(db_addon_fct_vars[2][:-1]) + start, group = split_sting_letters_numbers(db_addon_fct_vars[2]) + start = to_int(start) + group = convert_timeframe(group) end = 0 - group = convert_timeframe(db_addon_fct_vars[2][len(db_addon_fct_vars[2]) - 1]) log_text = 'serie_tagesmittelwert_count|group' required_params = [func, timeframe, start, end, group] @@ -334,28 +341,31 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: # handle 'serie_tagesmittelwert_group2_count|group' like 'serie_tagesmittelwert_stunde_0d' => Stundenmittelwerte der letzten 0 Tage (also heute) func = 'avg1' timeframe = 'day' - start = to_int(db_addon_fct_vars[3][:-1]) end = 0 group = 'hour' - group2 = convert_timeframe(db_addon_fct_vars[3][len(db_addon_fct_vars[3]) - 1]) + start, group2 = split_sting_letters_numbers(db_addon_fct_vars[3]) + start = to_int(start) + group2 = convert_timeframe(group2) log_text = 'serie_tagesmittelwert_group2_count|group' required_params = [func, timeframe, start, end, group, group2] elif db_addon_fct in SERIE_ATTRIBUTES_MITTEL_H1: # handle 'serie_tagesmittelwert_stunde_start_end|group' like 'serie_tagesmittelwert_stunde_30_0d' => Stundenmittelwerte von vor 30 Tage bis vor 0 Tagen (also heute) - timeframe = 'day' method = 'raw' start = to_int(db_addon_fct_vars[3]) - end = to_int(db_addon_fct_vars[4][:-1]) + end, timeframe = split_sting_letters_numbers(db_addon_fct_vars[4]) + end = to_int(end) + timeframe = convert_timeframe(timeframe) log_text = 'serie_tagesmittelwert_stunde_start_end|group' required_params = [timeframe, method, start, end] elif db_addon_fct in SERIE_ATTRIBUTES_MITTEL_D_H: # handle 'serie_tagesmittelwert_tag_stunde_end|group' like 'serie_tagesmittelwert_tag_stunde_30d' => Tagesmittelwert auf Basis des Mittelwerts pro Stunden für die letzten 30 Tage - timeframe = 'day' method = 'raw' - start = to_int(db_addon_fct_vars[4][:-1]) end = 0 + start, timeframe = split_sting_letters_numbers(db_addon_fct_vars[4]) + start = to_int(start) + timeframe = convert_timeframe(timeframe) log_text = 'serie_tagesmittelwert_tag_stunde_end|group' required_params = [timeframe, method, start, end] @@ -1307,9 +1317,8 @@ def _handle_verbrauch(self, query_params: dict) -> Union[None, float]: if 'timedelta' in query_params: timedelta = query_params.pop('timedelta') today = datetime.date.today() - year = today.year - timedelta - start_date = datetime.date(year, 1, 1) - relativedelta(days=1) # Start ist Tag vor dem 1.1., damit Abfrage den Maximalwert von 31.12. 00:00:00 bis 1.1. 00:00:00 ergibt - end_date = today - relativedelta(timedelta) + start_date = datetime.date(today.year, 1, 1) - relativedelta(years=timedelta) + end_date = today - relativedelta(years=timedelta) start = (today - start_date).days end = (today - end_date).days else: @@ -2710,7 +2719,7 @@ def convert_duration(timeframe: str, window_dur: str) -> int: } } - return to_int(lookup[timeframe][window_dur]) + return lookup[timeframe][window_dur] def count_to_start(count: int = 0, end: int = 0): From 8ac4f29b0f46abcda910c404a9f18f6d60801db4 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Fri, 14 Jul 2023 17:30:59 +0200 Subject: [PATCH 185/775] DB_ADDON: - Implement tagesmitteltemp for onchange (current day) --- db_addon/__init__.py | 35 ++++++++++++++++++++---------- db_addon/item_attributes.py | 6 ++--- db_addon/item_attributes_master.py | 1 + db_addon/plugin.yaml | 10 +++++++++ 4 files changed, 38 insertions(+), 14 deletions(-) diff --git a/db_addon/__init__.py b/db_addon/__init__.py index a137f9c11..cb55c4b88 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -864,6 +864,17 @@ def handle_ondemand(self, item: Item) -> None: elif db_addon_fct in SERIE_ATTRIBUTES_MITTEL_H1 + SERIE_ATTRIBUTES_MITTEL_D_H: result = self._prepare_temperature_list(**params) + # handle TAGESMITTEL_ATTRIBUTES_TIMEFRAME like tagesmitteltemperatur_heute_minus1 + elif db_addon_fct in TAGESMITTEL_ATTRIBUTES_TIMEFRAME: + + params.update({'method': 'raw'}) + _result = self._prepare_temperature_list(**params) + + if isinstance(_result, list): + result = _result[0][1] + else: + result = None + # handle info functions elif db_addon_fct == 'info_db_version': result = self._get_db_version() @@ -999,9 +1010,10 @@ def handle_verbrauch(): return _new_value if isinstance(_new_value, int) else round(_new_value, 1) def handle_tagesmitteltemp(): - self.logger.info(f"Onchange handling of 'tagesmitteltemperatur' not implemented, yet.") - # ToDo: Implement tagesmitteltemperatur onchange - return + result = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=0, end=0, ignore_value_list=ignore_value_list, method='raw') + + if isinstance(result, list): + return result[0][1] if self.onchange_debug: self.logger.debug(f"called with updated_item={updated_item.path()} and value={value}.") @@ -1027,18 +1039,18 @@ def handle_tagesmitteltemp(): self.logger.debug(f"non on-change function detected. Skip update.") continue + # handle minmax on-change items tagesmitteltemperatur_heute, minmax_heute_avg + if db_addon_fct in ['tagesmitteltemperatur_heute', 'minmax_heute_avg']: + new_value = handle_tagesmitteltemp() + # handle minmax on-change items like minmax_heute_max, minmax_heute_min, minmax_woche_max, minmax_woche_min..... - if db_addon_fct.startswith('minmax'): + elif db_addon_fct.startswith('minmax'): new_value = handle_minmax() # handle verbrauch on-change items ending with heute, woche, monat, jahr elif db_addon_fct.startswith('verbrauch'): new_value = handle_verbrauch() - # handle tagesmitteltemperatur on-change items ending with heute, woche, monat, jahr - elif db_addon_fct.startswith('tagesmitteltemperatur'): - new_value = handle_tagesmitteltemp() - if new_value is None: continue @@ -1854,7 +1866,7 @@ def _create_list_timestamp_minmaxtemp() -> list: # temp_list = [[timestamp1, avg-value1], [timestamp2, avg-value2], [timestamp3, avg-value3], ...] Tagesmitteltemperatur pro Stunde wird in der Datenbank per avg ermittelt if method == 'hour': - raw_data = self._query_item(func='avg', database_item=database_item, timeframe=timeframe, start=start, end=end, group='hour', ignore_value_list=ignore_value_list) + raw_data = self._query_item(func='avg', database_item=database_item, timeframe=timeframe, start=start, end=end, group=method, ignore_value_list=ignore_value_list) if self.prepare_debug: self.logger.debug(f"{raw_data=}") @@ -2857,8 +2869,9 @@ def timeframe_to_updatecyle(timeframe) -> str: return lookup.get(timeframe) -def split_sting_letters_numbers(string) -> tuple: - return re.findall('(\d+|[A-Za-z]+)', string) +def split_sting_letters_numbers(string) -> list: + return re.findall(r'(\d+|[A-Za-z]+)', string) + ALLOWED_QUERY_TIMEFRAMES = ['year', 'month', 'week', 'day', 'hour'] ALLOWED_MINMAX_FUNCS = ['min', 'max', 'avg'] diff --git a/db_addon/item_attributes.py b/db_addon/item_attributes.py index 75c44316b..7c04066fc 100644 --- a/db_addon/item_attributes.py +++ b/db_addon/item_attributes.py @@ -28,7 +28,7 @@ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -ALL_ONCHANGE_ATTRIBUTES = ['verbrauch_heute', 'verbrauch_woche', 'verbrauch_monat', 'verbrauch_jahr', 'minmax_heute_min', 'minmax_heute_max', 'minmax_woche_min', 'minmax_woche_max', 'minmax_monat_min', 'minmax_monat_max', 'minmax_jahr_min', 'minmax_jahr_max', 'tagesmitteltemperatur_heute'] +ALL_ONCHANGE_ATTRIBUTES = ['verbrauch_heute', 'verbrauch_woche', 'verbrauch_monat', 'verbrauch_jahr', 'minmax_heute_min', 'minmax_heute_max', 'minmax_heute_avg', 'minmax_woche_min', 'minmax_woche_max', 'minmax_monat_min', 'minmax_monat_max', 'minmax_jahr_min', 'minmax_jahr_max', 'tagesmitteltemperatur_heute'] ALL_DAILY_ATTRIBUTES = ['verbrauch_heute_minus1', 'verbrauch_heute_minus2', 'verbrauch_heute_minus3', 'verbrauch_heute_minus4', 'verbrauch_heute_minus5', 'verbrauch_heute_minus6', 'verbrauch_heute_minus7', 'verbrauch_rolling_12m_heute_minus1', 'verbrauch_jahreszeitraum_minus1', 'verbrauch_jahreszeitraum_minus2', 'verbrauch_jahreszeitraum_minus3', 'zaehlerstand_heute_minus1', 'zaehlerstand_heute_minus2', 'zaehlerstand_heute_minus3', 'minmax_last_24h_min', 'minmax_last_24h_max', 'minmax_last_24h_avg', 'minmax_last_7d_min', 'minmax_last_7d_max', 'minmax_last_7d_avg', 'minmax_heute_minus1_min', 'minmax_heute_minus1_max', 'minmax_heute_minus1_avg', 'minmax_heute_minus2_min', 'minmax_heute_minus2_max', 'minmax_heute_minus2_avg', 'minmax_heute_minus3_min', 'minmax_heute_minus3_max', 'minmax_heute_minus3_avg', 'tagesmitteltemperatur_heute_minus1', 'tagesmitteltemperatur_heute_minus2', 'tagesmitteltemperatur_heute_minus3', 'serie_minmax_tag_min_30d', 'serie_minmax_tag_max_30d', 'serie_minmax_tag_avg_30d', 'serie_verbrauch_tag_30d', 'serie_zaehlerstand_tag_30d', 'serie_tagesmittelwert_0d', 'serie_tagesmittelwert_stunde_0d', 'serie_tagesmittelwert_stunde_30_0d', 'serie_tagesmittelwert_tag_stunde_30d', 'kaeltesumme', 'waermesumme', 'gruenlandtempsumme', 'tagesmitteltemperatur', 'wachstumsgradtage'] ALL_WEEKLY_ATTRIBUTES = ['verbrauch_woche_minus1', 'verbrauch_woche_minus2', 'verbrauch_woche_minus3', 'verbrauch_woche_minus4', 'verbrauch_rolling_12m_woche_minus1', 'zaehlerstand_woche_minus1', 'zaehlerstand_woche_minus2', 'zaehlerstand_woche_minus3', 'minmax_woche_minus1_min', 'minmax_woche_minus1_max', 'minmax_woche_minus1_avg', 'minmax_woche_minus2_min', 'minmax_woche_minus2_max', 'minmax_woche_minus2_avg', 'serie_minmax_woche_min_30w', 'serie_minmax_woche_max_30w', 'serie_minmax_woche_avg_30w', 'serie_verbrauch_woche_30w', 'serie_zaehlerstand_woche_30w'] ALL_MONTHLY_ATTRIBUTES = ['verbrauch_monat_minus1', 'verbrauch_monat_minus2', 'verbrauch_monat_minus3', 'verbrauch_monat_minus4', 'verbrauch_monat_minus12', 'verbrauch_rolling_12m_monat_minus1', 'zaehlerstand_monat_minus1', 'zaehlerstand_monat_minus2', 'zaehlerstand_monat_minus3', 'minmax_monat_minus1_min', 'minmax_monat_minus1_max', 'minmax_monat_minus1_avg', 'minmax_monat_minus2_min', 'minmax_monat_minus2_max', 'minmax_monat_minus2_avg', 'serie_minmax_monat_min_15m', 'serie_minmax_monat_max_15m', 'serie_minmax_monat_avg_15m', 'serie_verbrauch_monat_18m', 'serie_zaehlerstand_monat_18m', 'serie_waermesumme_monat_24m', 'serie_kaeltesumme_monat_24m'] @@ -41,8 +41,8 @@ VERBRAUCH_ATTRIBUTES_JAHRESZEITRAUM = ['verbrauch_jahreszeitraum_minus1', 'verbrauch_jahreszeitraum_minus2', 'verbrauch_jahreszeitraum_minus3'] ALL_ZAEHLERSTAND_ATTRIBUTES = ['zaehlerstand_heute_minus1', 'zaehlerstand_heute_minus2', 'zaehlerstand_heute_minus3', 'zaehlerstand_woche_minus1', 'zaehlerstand_woche_minus2', 'zaehlerstand_woche_minus3', 'zaehlerstand_monat_minus1', 'zaehlerstand_monat_minus2', 'zaehlerstand_monat_minus3', 'zaehlerstand_jahr_minus1', 'zaehlerstand_jahr_minus2', 'zaehlerstand_jahr_minus3'] ZAEHLERSTAND_ATTRIBUTES_TIMEFRAME = ['zaehlerstand_heute_minus1', 'zaehlerstand_heute_minus2', 'zaehlerstand_heute_minus3', 'zaehlerstand_woche_minus1', 'zaehlerstand_woche_minus2', 'zaehlerstand_woche_minus3', 'zaehlerstand_monat_minus1', 'zaehlerstand_monat_minus2', 'zaehlerstand_monat_minus3', 'zaehlerstand_jahr_minus1', 'zaehlerstand_jahr_minus2', 'zaehlerstand_jahr_minus3'] -ALL_HISTORIE_ATTRIBUTES = ['minmax_last_24h_min', 'minmax_last_24h_max', 'minmax_last_24h_avg', 'minmax_last_7d_min', 'minmax_last_7d_max', 'minmax_last_7d_avg', 'minmax_heute_min', 'minmax_heute_max', 'minmax_heute_minus1_min', 'minmax_heute_minus1_max', 'minmax_heute_minus1_avg', 'minmax_heute_minus2_min', 'minmax_heute_minus2_max', 'minmax_heute_minus2_avg', 'minmax_heute_minus3_min', 'minmax_heute_minus3_max', 'minmax_heute_minus3_avg', 'minmax_woche_min', 'minmax_woche_max', 'minmax_woche_minus1_min', 'minmax_woche_minus1_max', 'minmax_woche_minus1_avg', 'minmax_woche_minus2_min', 'minmax_woche_minus2_max', 'minmax_woche_minus2_avg', 'minmax_monat_min', 'minmax_monat_max', 'minmax_monat_minus1_min', 'minmax_monat_minus1_max', 'minmax_monat_minus1_avg', 'minmax_monat_minus2_min', 'minmax_monat_minus2_max', 'minmax_monat_minus2_avg', 'minmax_jahr_min', 'minmax_jahr_max', 'minmax_jahr_minus1_min', 'minmax_jahr_minus1_max', 'minmax_jahr_minus1_avg'] -HISTORIE_ATTRIBUTES_ONCHANGE = ['minmax_heute_min', 'minmax_heute_max', 'minmax_woche_min', 'minmax_woche_max', 'minmax_monat_min', 'minmax_monat_max', 'minmax_jahr_min', 'minmax_jahr_max'] +ALL_HISTORIE_ATTRIBUTES = ['minmax_last_24h_min', 'minmax_last_24h_max', 'minmax_last_24h_avg', 'minmax_last_7d_min', 'minmax_last_7d_max', 'minmax_last_7d_avg', 'minmax_heute_min', 'minmax_heute_max', 'minmax_heute_avg', 'minmax_heute_minus1_min', 'minmax_heute_minus1_max', 'minmax_heute_minus1_avg', 'minmax_heute_minus2_min', 'minmax_heute_minus2_max', 'minmax_heute_minus2_avg', 'minmax_heute_minus3_min', 'minmax_heute_minus3_max', 'minmax_heute_minus3_avg', 'minmax_woche_min', 'minmax_woche_max', 'minmax_woche_minus1_min', 'minmax_woche_minus1_max', 'minmax_woche_minus1_avg', 'minmax_woche_minus2_min', 'minmax_woche_minus2_max', 'minmax_woche_minus2_avg', 'minmax_monat_min', 'minmax_monat_max', 'minmax_monat_minus1_min', 'minmax_monat_minus1_max', 'minmax_monat_minus1_avg', 'minmax_monat_minus2_min', 'minmax_monat_minus2_max', 'minmax_monat_minus2_avg', 'minmax_jahr_min', 'minmax_jahr_max', 'minmax_jahr_minus1_min', 'minmax_jahr_minus1_max', 'minmax_jahr_minus1_avg'] +HISTORIE_ATTRIBUTES_ONCHANGE = ['minmax_heute_min', 'minmax_heute_max', 'minmax_heute_avg', 'minmax_woche_min', 'minmax_woche_max', 'minmax_monat_min', 'minmax_monat_max', 'minmax_jahr_min', 'minmax_jahr_max'] HISTORIE_ATTRIBUTES_LAST = ['minmax_last_24h_min', 'minmax_last_24h_max', 'minmax_last_24h_avg', 'minmax_last_7d_min', 'minmax_last_7d_max', 'minmax_last_7d_avg'] HISTORIE_ATTRIBUTES_TIMEFRAME = ['minmax_heute_minus1_min', 'minmax_heute_minus1_max', 'minmax_heute_minus1_avg', 'minmax_heute_minus2_min', 'minmax_heute_minus2_max', 'minmax_heute_minus2_avg', 'minmax_heute_minus3_min', 'minmax_heute_minus3_max', 'minmax_heute_minus3_avg', 'minmax_woche_minus1_min', 'minmax_woche_minus1_max', 'minmax_woche_minus1_avg', 'minmax_woche_minus2_min', 'minmax_woche_minus2_max', 'minmax_woche_minus2_avg', 'minmax_monat_minus1_min', 'minmax_monat_minus1_max', 'minmax_monat_minus1_avg', 'minmax_monat_minus2_min', 'minmax_monat_minus2_max', 'minmax_monat_minus2_avg', 'minmax_jahr_minus1_min', 'minmax_jahr_minus1_max', 'minmax_jahr_minus1_avg'] ALL_TAGESMITTEL_ATTRIBUTES = ['tagesmitteltemperatur_heute', 'tagesmitteltemperatur_heute_minus1', 'tagesmitteltemperatur_heute_minus2', 'tagesmitteltemperatur_heute_minus3'] diff --git a/db_addon/item_attributes_master.py b/db_addon/item_attributes_master.py index 2be743883..00b54a8cf 100644 --- a/db_addon/item_attributes_master.py +++ b/db_addon/item_attributes_master.py @@ -76,6 +76,7 @@ 'minmax_last_7d_avg': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'durchschnittlicher Wert der letzten 7 Tage'}, 'minmax_heute_min': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Minimalwert seit Tagesbeginn'}, 'minmax_heute_max': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Maximalwert seit Tagesbeginn'}, + 'minmax_heute_avg': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Durschnittswert seit Tagesbeginn'}, 'minmax_heute_minus1_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert gestern (heute -1 Tag)'}, 'minmax_heute_minus1_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert gestern (heute -1 Tag)'}, 'minmax_heute_minus1_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durchschnittswert gestern (heute -1 Tag)'}, diff --git a/db_addon/plugin.yaml b/db_addon/plugin.yaml index 63592f355..184b7bab9 100644 --- a/db_addon/plugin.yaml +++ b/db_addon/plugin.yaml @@ -119,6 +119,7 @@ item_attributes: - minmax_last_7d_avg - minmax_heute_min - minmax_heute_max + - minmax_heute_avg - minmax_heute_minus1_min - minmax_heute_minus1_max - minmax_heute_minus1_avg @@ -237,6 +238,7 @@ item_attributes: - durchschnittlicher Wert der letzten 7 Tage - Minimalwert seit Tagesbeginn - Maximalwert seit Tagesbeginn + - Durschnittswert seit Tagesbeginn - Minimalwert gestern (heute -1 Tag) - Maximalwert gestern (heute -1 Tag) - Durchschnittswert gestern (heute -1 Tag) @@ -389,6 +391,7 @@ item_attributes: - num - num - num + - num - list - list - list @@ -473,6 +476,7 @@ item_attributes: - daily - onchange - onchange + - onchange - daily - daily - daily @@ -865,6 +869,12 @@ item_structs: type: num # cache: yes + heute_avg: + name: Maximaler Wert seit Tagesbeginn + db_addon_fct: minmax_heute_avg + db_addon_startup: yes + type: num + last24h_min: name: Minimaler Wert in den letzten 24h (gleitend) db_addon_fct: minmax_last_24h_min From 1a52479d04f87984602914f715192fd6a76d81b1 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Sun, 16 Jul 2023 17:00:19 +0200 Subject: [PATCH 186/775] DB_ADDON: - bump to 1.2.2 - Remove tread for working item queue; use shNG scheduler instead - improve inline docu - improve execute_items - catch exception within work_item_queue and de-init plugin in case of failure - improve prepare_value_list for calculation of avg, min_max... for value series - handle oldest_log for new-born database items - adapt webif --- db_addon/__init__.py | 578 +++++++++++----------------- db_addon/plugin.yaml | 10 +- db_addon/webif/templates/index.html | 4 - 3 files changed, 235 insertions(+), 357 deletions(-) diff --git a/db_addon/__init__.py b/db_addon/__init__.py index cb55c4b88..182cebe9e 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -32,7 +32,6 @@ import queue from dateutil.relativedelta import relativedelta from typing import Union -import threading from lib.model.smartplugin import SmartPlugin from lib.item import Items @@ -54,7 +53,7 @@ class DatabaseAddOn(SmartPlugin): Main class of the Plugin. Does all plugin specific stuff and provides the update functions for the items """ - PLUGIN_VERSION = '1.2.1' + PLUGIN_VERSION = '1.2.2' def __init__(self, sh): """ @@ -76,7 +75,6 @@ def __init__(self, sh): # define variables for database, database connection, working queue and status self.item_queue = queue.Queue() # Queue containing all to be executed items - self.work_item_queue_thread = None # Working Thread for queue self._db_plugin = None # object if database plugin self._db = None # object of database self.connection_data = None # connection data list of database @@ -90,12 +88,12 @@ def __init__(self, sh): self.active_queue_item: str = '-' # String holding item path of currently executed item # define debug logs - self.parse_debug = False # Enable / Disable debug logging for method 'parse item' - self.execute_debug = False # Enable / Disable debug logging for method 'execute items' - self.sql_debug = False # Enable / Disable debug logging for sql stuff - self.ondemand_debug = False # Enable / Disable debug logging for method 'handle_ondemand' - self.onchange_debug = False # Enable / Disable debug logging for method 'handle_onchange' - self.prepare_debug = True # Enable / Disable debug logging for query preparation + self.parse_debug = True # Enable / Disable debug logging for method 'parse item' + self.execute_debug = True # Enable / Disable debug logging for method 'execute items' + self.sql_debug = True # Enable / Disable debug logging for sql stuff + self.ondemand_debug = True # Enable / Disable debug logging for method 'handle_ondemand' + self.onchange_debug = True # Enable / Disable debug logging for method 'handle_onchange' + self.prepare_debug = True # Enable / Disable debug logging for query preparation # define default mysql settings self.default_connect_timeout = 60 @@ -127,15 +125,16 @@ def run(self): self.logger.error(f"Check of existence of database plugin incl connection check failed. Plugin not loaded") return self.deinit() + # create db object self._db = lib.db.Database("DatabaseAddOn", self.db_driver, self.connection_data) if not self._db.api_initialized: self.logger.error("Initialization of database API failed") return self.deinit() - self.logger.debug("Initialization of database API successful") # init db if not self._initialize_db(): + self.logger.error("Connection to database failed") return self.deinit() # check db connection settings @@ -156,11 +155,14 @@ def run(self): # set plugin to alive self.alive = True - # start the queue consumer thread - # self._work_item_queue_thread_startup() - - # work queue - self.work_item_queue() + # work item queue + try: + self.work_item_queue() + except Exception as e: + self.logger.warning(f"During working item queue Exception '{e}' occurred.") + self.logger.debug(e, exc_info=True) + self.logger.error("Thread for working item queue died. De-init plugin.") + self.deinit() def stop(self): """ @@ -170,7 +172,6 @@ def stop(self): self.logger.debug("Stop method called") self.alive = False self.scheduler_remove('cyclic') - # self._work_item_queue_thread_shutdown() def parse_item(self, item: Item): """ @@ -195,7 +196,7 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: if db_addon_fct in HISTORIE_ATTRIBUTES_ONCHANGE: # handle functions 'minmax on-change' in format 'minmax_timeframe_func' items like 'minmax_heute_max', 'minmax_heute_min', 'minmax_woche_max', 'minmax_woche_min' - timeframe = convert_timeframe(db_addon_fct_vars[1]) + timeframe = harmonize_timeframe_expression(db_addon_fct_vars[1]) func = db_addon_fct_vars[2] if db_addon_fct_vars[2] in ALLOWED_MINMAX_FUNCS else None start = end = 0 log_text = 'minmax_timeframe_func' @@ -206,7 +207,7 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: func = db_addon_fct_vars[3] start, timeframe = split_sting_letters_numbers(db_addon_fct_vars[2]) start = to_int(start) - timeframe = convert_timeframe(timeframe) + timeframe = harmonize_timeframe_expression(timeframe) end = 0 log_text = 'minmax_last_timedelta|timeframe_function' required_params = [func, timeframe, start, end] @@ -214,7 +215,7 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: elif db_addon_fct in HISTORIE_ATTRIBUTES_TIMEFRAME: # handle functions 'min/max/avg' in format 'minmax_timeframe_timedelta_func' like 'minmax_heute_minus2_max' func = db_addon_fct_vars[3] # min, max, avg - timeframe = convert_timeframe(db_addon_fct_vars[1]) # day, week, month, year + timeframe = harmonize_timeframe_expression(db_addon_fct_vars[1]) # day, week, month, year end = to_int(split_sting_letters_numbers(db_addon_fct_vars[2])[1]) start = end log_text = 'minmax_timeframe_timedelta_func' @@ -223,7 +224,7 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: elif db_addon_fct in ZAEHLERSTAND_ATTRIBUTES_TIMEFRAME: # handle functions 'zaehlerstand' in format 'zaehlerstand_timeframe_timedelta' like 'zaehlerstand_heute_minus1' # func = 'max' - timeframe = convert_timeframe(db_addon_fct_vars[1]) + timeframe = harmonize_timeframe_expression(db_addon_fct_vars[1]) end = to_int(split_sting_letters_numbers(db_addon_fct_vars[2])[1]) start = end log_text = 'zaehlerstand_timeframe_timedelta' @@ -231,7 +232,7 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: elif db_addon_fct in VERBRAUCH_ATTRIBUTES_ONCHANGE: # handle functions 'verbrauch on-change' items in format 'verbrauch_timeframe' like 'verbrauch_heute', 'verbrauch_woche', 'verbrauch_monat', 'verbrauch_jahr' - timeframe = convert_timeframe(db_addon_fct_vars[1]) + timeframe = harmonize_timeframe_expression(db_addon_fct_vars[1]) end = 0 start = 1 log_text = 'verbrauch_timeframe' @@ -239,7 +240,7 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: elif db_addon_fct in VERBRAUCH_ATTRIBUTES_TIMEFRAME: # handle functions 'verbrauch on-demand' in format 'verbrauch_timeframe_timedelta' like 'verbrauch_heute_minus2' - timeframe = convert_timeframe(db_addon_fct_vars[1]) + timeframe = harmonize_timeframe_expression(db_addon_fct_vars[1]) # end = to_int(db_addon_fct_vars[2][-1]) end = to_int(split_sting_letters_numbers(db_addon_fct_vars[2])[1]) start = end + 1 @@ -251,24 +252,24 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: func = db_addon_fct_vars[1] window_inc, window_dur = split_sting_letters_numbers(db_addon_fct_vars[2]) window_inc = to_int(window_inc) # 12 - window_dur = convert_timeframe(window_dur) # day, week, month, year - timeframe = convert_timeframe(db_addon_fct_vars[3]) # day, week, month, year + window_dur = harmonize_timeframe_expression(window_dur) # day, week, month, year + timeframe = harmonize_timeframe_expression(db_addon_fct_vars[3]) # day, week, month, year end = to_int(split_sting_letters_numbers(db_addon_fct_vars[4])[1]) if window_dur in ALLOWED_QUERY_TIMEFRAMES and window_inc and timeframe and end: - start = to_int(convert_duration(timeframe, window_dur) * window_inc) + end + start = to_int(convert_timeframe(timeframe, window_dur) * window_inc) + end log_text = 'verbrauch_rolling_window_timeframe_timedelta' required_params = [func, timeframe, start, end] elif db_addon_fct in VERBRAUCH_ATTRIBUTES_JAHRESZEITRAUM: # handle functions of format 'verbrauch_jahreszeitraum_timedelta' like 'verbrauch_jahreszeitraum_minus1' - timeframe = convert_timeframe(db_addon_fct_vars[1]) # day, week, month, year + timeframe = harmonize_timeframe_expression(db_addon_fct_vars[1]) # day, week, month, year timedelta = to_int(split_sting_letters_numbers(db_addon_fct_vars[2])[1]) log_text = 'verbrauch_jahreszeitraum_timedelta' required_params = [timeframe, timedelta] elif db_addon_fct in TAGESMITTEL_ATTRIBUTES_ONCHANGE: # handle functions 'tagesmitteltemperatur on-change' items in format 'tagesmitteltemperatur_timeframe' like 'tagesmitteltemperatur_heute', 'tagesmitteltemperatur_woche', 'tagesmitteltemperatur_monat', 'tagesmitteltemperatur_jahr' - timeframe = convert_timeframe(db_addon_fct_vars[1]) + timeframe = harmonize_timeframe_expression(db_addon_fct_vars[1]) func = 'max' start = end = 0 log_text = 'tagesmitteltemperatur_timeframe' @@ -277,19 +278,20 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: elif db_addon_fct in TAGESMITTEL_ATTRIBUTES_TIMEFRAME: # handle 'tagesmitteltemperatur_timeframe_timedelta' like 'tagesmitteltemperatur_heute_minus1' func = 'max' - timeframe = convert_timeframe(db_addon_fct_vars[1]) + timeframe = harmonize_timeframe_expression(db_addon_fct_vars[1]) end = to_int(split_sting_letters_numbers(db_addon_fct_vars[2])[1]) start = end + method = 'avg_hour' log_text = 'tagesmitteltemperatur_timeframe_timedelta' - required_params = [func, timeframe, start, end] + required_params = [func, timeframe, start, end, method] elif db_addon_fct in SERIE_ATTRIBUTES_MINMAX: # handle functions 'serie_minmax' in format 'serie_minmax_timeframe_func_start|group' like 'serie_minmax_monat_min_15m' func = db_addon_fct_vars[3] - timeframe = convert_timeframe(db_addon_fct_vars[2]) + timeframe = harmonize_timeframe_expression(db_addon_fct_vars[2]) start, group = split_sting_letters_numbers(db_addon_fct_vars[4]) start = to_int(start) - group = convert_timeframe(group) + group = harmonize_timeframe_expression(group) end = 0 log_text = 'serie_minmax_timeframe_func_start|group' required_params = [func, timeframe, start, end, group] @@ -297,20 +299,20 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: elif db_addon_fct in SERIE_ATTRIBUTES_ZAEHLERSTAND: # handle functions 'serie_zaehlerstand' in format 'serie_zaehlerstand_timeframe_start|group' like 'serie_zaehlerstand_tag_30d' func = 'max' - timeframe = convert_timeframe(db_addon_fct_vars[2]) + timeframe = harmonize_timeframe_expression(db_addon_fct_vars[2]) start, group = split_sting_letters_numbers(db_addon_fct_vars[3]) start = to_int(start) - group = convert_timeframe(group) + group = harmonize_timeframe_expression(group) log_text = 'serie_zaehlerstand_timeframe_start|group' required_params = [timeframe, start, group] elif db_addon_fct in SERIE_ATTRIBUTES_VERBRAUCH: # handle all functions of format 'serie_verbrauch_timeframe_start|group' like 'serie_verbrauch_tag_30d' func = 'diff_max' - timeframe = convert_timeframe(db_addon_fct_vars[2]) + timeframe = harmonize_timeframe_expression(db_addon_fct_vars[2]) start, group = split_sting_letters_numbers(db_addon_fct_vars[3]) start = to_int(start) - group = convert_timeframe(group) + group = harmonize_timeframe_expression(group) log_text = 'serie_verbrauch_timeframe_start|group' required_params = [timeframe, start, group] @@ -319,7 +321,7 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: func = 'sum_max' start, timeframe = split_sting_letters_numbers(db_addon_fct_vars[3]) start = to_int(start) - timeframe = convert_timeframe(timeframe) + timeframe = harmonize_timeframe_expression(timeframe) end = 0 group = 'day', group2 = 'month' @@ -332,7 +334,7 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: timeframe = 'year' start, group = split_sting_letters_numbers(db_addon_fct_vars[2]) start = to_int(start) - group = convert_timeframe(group) + group = harmonize_timeframe_expression(group) end = 0 log_text = 'serie_tagesmittelwert_count|group' required_params = [func, timeframe, start, end, group] @@ -345,27 +347,27 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: group = 'hour' start, group2 = split_sting_letters_numbers(db_addon_fct_vars[3]) start = to_int(start) - group2 = convert_timeframe(group2) + group2 = harmonize_timeframe_expression(group2) log_text = 'serie_tagesmittelwert_group2_count|group' required_params = [func, timeframe, start, end, group, group2] elif db_addon_fct in SERIE_ATTRIBUTES_MITTEL_H1: # handle 'serie_tagesmittelwert_stunde_start_end|group' like 'serie_tagesmittelwert_stunde_30_0d' => Stundenmittelwerte von vor 30 Tage bis vor 0 Tagen (also heute) - method = 'raw' + method = 'avg_hour' start = to_int(db_addon_fct_vars[3]) end, timeframe = split_sting_letters_numbers(db_addon_fct_vars[4]) end = to_int(end) - timeframe = convert_timeframe(timeframe) + timeframe = harmonize_timeframe_expression(timeframe) log_text = 'serie_tagesmittelwert_stunde_start_end|group' required_params = [timeframe, method, start, end] elif db_addon_fct in SERIE_ATTRIBUTES_MITTEL_D_H: # handle 'serie_tagesmittelwert_tag_stunde_end|group' like 'serie_tagesmittelwert_tag_stunde_30d' => Tagesmittelwert auf Basis des Mittelwerts pro Stunden für die letzten 30 Tage - method = 'raw' + method = 'avg_hour' end = 0 start, timeframe = split_sting_letters_numbers(db_addon_fct_vars[4]) start = to_int(start) - timeframe = convert_timeframe(timeframe) + timeframe = harmonize_timeframe_expression(timeframe) log_text = 'serie_tagesmittelwert_tag_stunde_end|group' required_params = [timeframe, method, start, end] @@ -374,11 +376,11 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: required_params = [] if required_params is None: - self.logger.warning(f"ERROR: For calculating '{db_addon_fct}' at Item '{item.path()}' no mandatory parameters given.") + self.logger.warning(f"For calculating '{db_addon_fct}' at Item '{item.path()}' no mandatory parameters given.") return if required_params and None in required_params: - self.logger.warning(f"ERROR: For calculating '{db_addon_fct}' at Item '{item.path()}' not all mandatory parameters given. Definitions are: {func=}, {timeframe=}, {timedelta=}, {start=}, {end=}, {group=}, {group2=}, {method=}") + self.logger.warning(f"For calculating '{db_addon_fct}' at Item '{item.path()}' not all mandatory parameters given. Definitions are: {func=}, {timeframe=}, {timedelta=}, {start=}, {end=}, {group=}, {group2=}, {method=}") return # create dict and reduce dict to keys with value != None @@ -399,7 +401,7 @@ def get_query_parameters_from_db_addon_params() -> Union[dict, None]: possible_params = required_params = [] if db_addon_params is None: - self.logger.warning(f"ERROR: Definition for Item '{item.path()}' with db_addon_fct={db_addon_fct} incomplete, since parameters via 'db_addon_params' not given. Item will be ignored.") + self.logger.warning(f"Definition for Item '{item.path()}' with db_addon_fct={db_addon_fct} incomplete, since parameters via 'db_addon_params' not given. Item will be ignored.") return # create item config for all functions with 'summe' like waermesumme, kaeltesumme, gruenlandtemperatursumme @@ -436,7 +438,7 @@ def get_query_parameters_from_db_addon_params() -> Union[dict, None]: possible_params = ['start', 'end', 'group', 'group2', 'ignore_value_list', 'use_oldest_entry'] if required_params and not any(param in db_addon_params for param in required_params): - self.logger.warning(f"ERROR: Item '{item.path()}' with {db_addon_fct=} ignored, since not all mandatory parameters in {db_addon_params=} are given. Item will be ignored.") + self.logger.warning(f"Item '{item.path()}' with {db_addon_fct=} ignored, since not all mandatory parameters in {db_addon_params=} are given. Item will be ignored.") return # reduce dict to possible keys + required_params @@ -725,85 +727,81 @@ def update_item(self, item, caller=None, source=None, dest=None): item(False, self.get_shortname()) def execute_due_items(self) -> None: - """ - Execute all items, which are due - """ - - if self.execute_debug: - self.logger.debug("execute_due_items called") + """Execute all items, which are due""" - if not self.suspended: - _todo_items = self._create_due_items() - self.logger.info(f"{len(_todo_items)} items are due and will be calculated.") - [self.item_queue.put(i) for i in _todo_items] - else: - self.logger.info(f"Plugin is suspended. No items will be calculated.") + self.execute_items() def execute_startup_items(self) -> None: - """ - Execute all startup_items - """ - if self.execute_debug: - self.logger.debug("execute_startup_items called") + """Execute all startup_items""" - if self.suspended: - self.logger.info(f"Plugin is suspended. No items will be calculated.") - return + self.execute_items(option='startup') + self.startup_finished = True - relevant_item_list = self._startup_items() - self.logger.info(f"{len(relevant_item_list)} items will be calculated at startup.") + def execute_items(self, option: str = 'due'): + """Execute all items per option""" - for item in relevant_item_list: - self.item_queue.put(item) + def _create_due_items() -> list: + """Create list of items which are due and reset cache dicts""" - self.startup_finished = True + # täglich zu berechnende Items zur Action Liste hinzufügen + _todo_items = set() + _todo_items.update(set(self._daily_items())) + self.current_values[DAY] = {} + self.previous_values[DAY] = {} - def execute_static_items(self) -> None: - """ - Execute all static items - """ - if self.execute_debug: - self.logger.debug("execute_static_item called") + # wenn Wochentag == Montag, werden auch die wöchentlichen Items berechnet + if self.shtime.now().hour == 0 and self.shtime.now().minute == 0 and self.shtime.weekday( + self.shtime.today()) == 1: + _todo_items.update(set(self._weekly_items())) + self.current_values[WEEK] = {} + self.previous_values[WEEK] = {} - if not self.suspended: - self.logger.info(f"{len(self._static_items())} items will be calculated.") - [self.item_queue.put(i) for i in self._static_items()] - else: - self.logger.info(f"Plugin is suspended. No items will be calculated.") + # wenn der erste Tage eines Monates ist, werden auch die monatlichen Items berechnet + if self.shtime.now().hour == 0 and self.shtime.now().minute == 0 and self.shtime.now().day == 1: + _todo_items.update(set(self._monthly_items())) + self.current_values[MONTH] = {} + self.previous_values[MONTH] = {} - def execute_info_items(self) -> None: - """ - Execute all info items - """ - if self.execute_debug: - self.logger.debug("execute_info_items called") + # wenn der erste Tage des ersten Monates eines Jahres ist, werden auch die jährlichen Items berechnet + if self.shtime.now().hour == 0 and self.shtime.now().minute == 0 and self.shtime.now().day == 1 and self.shtime.now().month == 1: + _todo_items.update(set(self._yearly_items())) + self.current_values[YEAR] = {} + self.previous_values[YEAR] = {} - if not self.suspended: - self.logger.info(f"{len(self._info_items())} items will be calculated.") - [self.item_queue.put(i) for i in self._info_items()] - else: - self.logger.info(f"Plugin is suspended. No items will be calculated.") + return list(_todo_items) - def execute_all_items(self) -> None: - """ - Execute all ondemand items - """ + if self.execute_debug: + self.logger.debug(f"execute_items called with {option=}") - if not self.suspended: - self.logger.info(f"Values for all {len(self._ondemand_items())} items with 'db_addon_fct' attribute, which are not 'on-change', will be calculated!") - [self.item_queue.put(i) for i in self._ondemand_items()] - else: + if self.suspended: self.logger.info(f"Plugin is suspended. No items will be calculated.") + return + + todo_items = [] + if option == 'startup': + todo_items = self._startup_items() + elif option == 'static': + todo_items = self._static_items() + elif option == 'info': + todo_items = self._info_items() + elif option == 'ondemand': + todo_items = self._ondemand_items() + elif option == 'onchange': + todo_items = self._onchange_items() + elif option == 'all': + todo_items = self._all_items() + elif option == 'due': + todo_items = _create_due_items() + + self.logger.info(f"{len(todo_items)} items will be calculated for {option=}.") + [self.item_queue.put(i) for i in todo_items] def work_item_queue(self) -> None: - """ - Handles item queue were all to be executed items were be placed in. - """ + """Handles item queue were all to be executed items were be placed in.""" while self.alive: try: queue_entry = self.item_queue.get(True, 10) - # self.logger.info(f" Queue Entry: '{queue_entry}' received.") except queue.Empty: self.active_queue_item = '-' pass @@ -862,13 +860,13 @@ def handle_ondemand(self, item: Item) -> None: # handle 'serie_tagesmittelwert_stunde_30_0d' and 'serie_tagesmittelwert_tag_stunde_30d' elif db_addon_fct in SERIE_ATTRIBUTES_MITTEL_H1 + SERIE_ATTRIBUTES_MITTEL_D_H: - result = self._prepare_temperature_list(**params) + result = self._prepare_value_list(**params) # handle TAGESMITTEL_ATTRIBUTES_TIMEFRAME like tagesmitteltemperatur_heute_minus1 elif db_addon_fct in TAGESMITTEL_ATTRIBUTES_TIMEFRAME: - params.update({'method': 'raw'}) - _result = self._prepare_temperature_list(**params) + params.update({'method': 'avg_hour'}) + _result = self._prepare_value_list(**params) if isinstance(_result, list): result = _result[0][1] @@ -943,14 +941,14 @@ def handle_minmax(): cached_value = cache_dict[database_item].get(func) if cached_value is None: if self.onchange_debug: - self.logger.debug(f"Item={updated_item.path()} with {func=} and {timeframe=} not in cache dict. Query database.") + self.logger.debug(f"{func} value for {timeframe=} of item={updated_item.path()} not in cache dict. Query database.") query_params = {'func': func, 'database_item': database_item, 'timeframe': timeframe, 'start': 0, 'end': 0, 'ignore_value_list': ignore_value_list, 'use_oldest_entry': True} cached_value = self._query_item(**query_params)[0][1] if cached_value is None: if self.onchange_debug: - self.logger.debug(f"no values available:{cached_value=}, {value}. Abort...") + self.logger.debug(f"{func} value for {timeframe=} of item={updated_item.path()} not available in database. Abort calculation.") return init = True @@ -958,7 +956,7 @@ def handle_minmax(): # if value not given -> read if init: if self.onchange_debug: - self.logger.debug(f"initial {func} value for {timeframe=} of Item={item.path()} with will be set to {cached_value}") + self.logger.debug(f"initial {func} value for {timeframe=} of item={item.path()} with will be set to {cached_value}") cache_dict[database_item][func] = cached_value return cached_value @@ -991,14 +989,14 @@ def handle_verbrauch(): cached_value = cache_dict.get(database_item) if cached_value is None: if self.onchange_debug: - self.logger.debug(f"Item={updated_item.path()} with _func={func} and timeframe={timeframe} not in cache dict.") + self.logger.debug(f"Most recent value for last {timeframe=} of item={updated_item.path()} not in cache dict. Query database.") # try to get most recent value of last timeframe, assuming that this is the value at end of last timeframe query_params = {'database_item': database_item, 'timeframe': timeframe, 'start': 1, 'end': 1, 'ignore_value_list': ignore_value_list, 'use_oldest_entry': True} cached_value = self._handle_zaehlerstand(query_params) if cached_value is None: - self.logger.info(f"Most recent value for last {timeframe} not available in database. Abort calculation.") + self.logger.info(f"Most recent value for last {timeframe} of item={updated_item.path()} not available in database. Abort calculation.") return cache_dict[database_item] = cached_value @@ -1009,8 +1007,8 @@ def handle_verbrauch(): _new_value = value - cached_value return _new_value if isinstance(_new_value, int) else round(_new_value, 1) - def handle_tagesmitteltemp(): - result = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=0, end=0, ignore_value_list=ignore_value_list, method='raw') + def handle_tagesmittel(): + result = self._prepare_value_list(database_item=database_item, timeframe='day', start=0, end=0, ignore_value_list=ignore_value_list, method='first_hour') if isinstance(result, list): return result[0][1] @@ -1025,7 +1023,7 @@ def handle_tagesmitteltemp(): for item in relevant_item_list: item_config = self.get_item_config(item) - self.logger.debug(f"handle_onchange: Item={item.path()} with {item_config=}") + self.logger.debug(f"Item={item.path()} with {item_config=}") db_addon_fct = item_config['db_addon_fct'] database_item = item_config['database_item'] timeframe = item_config['query_params']['timeframe'] @@ -1041,7 +1039,7 @@ def handle_tagesmitteltemp(): # handle minmax on-change items tagesmitteltemperatur_heute, minmax_heute_avg if db_addon_fct in ['tagesmitteltemperatur_heute', 'minmax_heute_avg']: - new_value = handle_tagesmitteltemp() + new_value = handle_tagesmittel() # handle minmax on-change items like minmax_heute_max, minmax_heute_min, minmax_woche_max, minmax_woche_min..... elif db_addon_fct.startswith('minmax'): @@ -1120,6 +1118,9 @@ def _database_item_path_items(self) -> list: def _ondemand_items(self) -> list: return self._daily_items() + self._weekly_items() + self._monthly_items() + self._yearly_items() + self._static_items() + def _all_items(self) -> list: + return self._ondemand_items() + self._ondemand_items() + self._static_items() + self._admin_items() + self._info_items() + ######################################### # Public functions / Using item_path ######################################### @@ -1193,8 +1194,9 @@ def tagesmitteltemperatur(self, item_path: str, timeframe: str = None, count: in item = self.items.return_item(item_path) if item: count = to_int(count) - start, end = count_to_start(count) - query_params = {'database_item': item, 'func': 'max', 'timeframe': convert_timeframe(timeframe), 'start': start, 'end': end} + end = 0 + start = end + count + query_params = {'database_item': item, 'func': 'max', 'timeframe': harmonize_timeframe_expression(timeframe), 'start': start, 'end': end} return self._handle_tagesmitteltemperatur(**query_params) def wachstumsgradtage(self, item_path: str, year: Union[int, str] = None, method: int = 0, threshold: int = 10) -> Union[int, None]: @@ -1213,7 +1215,7 @@ def wachstumsgradtage(self, item_path: str, year: Union[int, str] = None, method if item: return self._handle_wachstumsgradtage(database_item=item, year=year, method=method, threshold=threshold) - def temperaturserie(self, item_path: str, year: Union[int, str] = None, method: str = 'raw') -> Union[list, None]: + def temperaturserie(self, item_path: str, year: Union[int, str] = None, method: str = 'avg_hour') -> Union[list, None]: """ Query database for wachstumsgradtage https://de.wikipedia.org/wiki/Wachstumsgradtag @@ -1254,7 +1256,7 @@ def fetch_log(self, func: str, item_path: str, timeframe: str, start: int = None item = self.items.return_item(item_path) if count: - start, end = count_to_start(count) + start = end + count if item and start and end: return self._query_item(func=func, database_item=item, timeframe=timeframe, start=start, end=end, group=group, group2=group2, ignore_value_list=ignore_value_list) @@ -1516,7 +1518,7 @@ def _handle_kaeltesumme(self, database_item: Item, year: Union[int, str] = None, # get raw data as list if self.prepare_debug: self.logger.debug("try to get raw data") - raw_data = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method='raw') + raw_data = self._prepare_value_list(database_item=database_item, timeframe='day', start=start, end=end, method='avg_hour') if self.execute_debug: self.logger.debug(f"raw_value_list={raw_data=}") @@ -1638,7 +1640,7 @@ def _handle_wachstumsgradtage(self, database_item: Item, year: Union[int, str] = return # get raw data as list - raw_data = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method='minmax') + raw_data = self._prepare_value_list(database_item=database_item, timeframe='day', start=start, end=end, method='minmax_hour') if self.execute_debug: self.logger.debug(f"raw_value_list={raw_data}") @@ -1694,7 +1696,7 @@ def _handle_wachstumsgradtage(self, database_item: Item, year: Union[int, str] = else: self.logger.info(f"Method for 'Wachstumsgradtag' calculation not defined.'") - def _handle_temperaturserie(self, database_item: Item, year: Union[int, str] = None, method: str = 'raw') -> Union[list, None]: + def _handle_temperaturserie(self, database_item: Item, year: Union[int, str] = None, method: str = 'avg_hour') -> Union[list, None]: """ provide list of lists having timestamp and temperature(s) per day @@ -1735,13 +1737,8 @@ def _handle_temperaturserie(self, database_item: Item, year: Union[int, str] = N self.logger.error(f"End time for query of item={database_item.path()} is before start time. Query cancelled.") return - # check method - if method not in ['hour', 'raw', 'minmax']: - self.logger.error(f"Calculation method {method!r} unknown. Need to be 'hour', 'raw' or 'minmax'. Query cancelled.") - return - # get raw data as list - temp_list = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method=method) + temp_list = self._prepare_value_list(database_item=database_item, timeframe='day', start=start, end=end, method=method) if self.execute_debug: self.logger.debug(f"{temp_list=}") @@ -1790,178 +1787,100 @@ def _prepare_waermesumme(self, database_item: Item, year: Union[int, str] = None return # return raw data as list - return self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method='raw') + return self._prepare_value_list(database_item=database_item, timeframe='day', start=start, end=end, method='avg_hour') - def _prepare_temperature_list(self, database_item: Item, timeframe: str, start: int, end: int = 0, ignore_value_list=None, method: str = 'hour') -> Union[list, None]: + def _prepare_value_list(self, database_item: Item, timeframe: str, start: int, end: int = 0, ignore_value_list=None, method: str = 'avg_hour') -> Union[list, None]: """ - returns list of lists having timestamp and temperature(s) per day + returns list of lists having timestamp and values(s) per day / hour in format of regular database query - :param database_item: item object or item_id for which the query should be done - :param timeframe: timeframe for query - :param start: increments for timeframe from now to start - :param end: increments for timeframe from now to end - :param ignore_value_list: list of comparison operators for val_num, which will be applied during query - :param method: Calculation method - :return: list of temperatures + :param database_item: item object or item_id for which the query should be done + :param timeframe: timeframe for query + :param start: increments for timeframe from now to start + :param end: increments for timeframe from now to end + :param ignore_value_list: list of comparison operators for val_num, which will be applied during query + :param method: calculation method + - avg_day: determines average value per day of values within plugin + - avg_hour: determines average value per hour of values within plugin + - first_day: determines first value per day of values within plugin + - first_hour: determines first value per hour of values within plugin + - minmax_day: determines min and max value per day of values within plugin + - minmax_hour: determines min and max value per hour of values within plugin + :return: list of list with [timestamp, value] """ - def _create_temp_dict() -> dict: - """create dict based on database query result like {'date1': {'hour1': [temp values], 'hour2': [temp values], ...}, 'date2': {'hour1': [temp values], 'hour2': [temp values], ...}, ...}""" + def _create_raw_value_dict(block: str) -> dict: + """ + create dict of datetimes (per day or hour) and values based on database query result in format {'datetime1': [values]}, 'datetime1': [values], ..., 'datetimex': [values]} + :param block: defined the increment of datetimes, default is hour, furhter possible is 'day' + """ - _temp_dict = {} + _value_dict = {} for _entry in raw_data: dt = datetime.datetime.utcfromtimestamp(_entry[0] / 1000) - date = dt.strftime('%Y-%m-%d') - hour = dt.strftime('%H') - if date not in _temp_dict: - _temp_dict[date] = {} - if hour not in _temp_dict[date]: - _temp_dict[date][hour] = [] - _temp_dict[date][hour].append(_entry[1]) - return _temp_dict - - def _calculate_hourly_average(): - """ calculate hourly average based on list of temperatures and update temp_dict""" - - for _date in temp_dict: - for hour in temp_dict[_date]: - hour_raw_value_list = temp_dict[_date][hour] - # hour_value = round(sum(hour_raw_value_list) / len(hour_raw_value_list), 1) # Durchschnittsbildung über alle Werte der Liste - hour_value = hour_raw_value_list[0] # Nehme den ersten Wert der Liste als Stundenwert (kommt am nächsten an die Definition, den Wert exakt zur vollen Stunden zu nehmen) - temp_dict[_date][hour] = [hour_value] - - def _create_list_timestamp_avgtemp() -> list: - """Create list of list with [[timestamp1, value1], [timestamp2, value2], ...] based on temp_dict""" - - _temp_list = [] - for _date in temp_dict: - - # wenn mehr als 20 Stundenwerte vorliegen, berechne den Tagesdurchschnitt über alle Werte - if len(temp_dict[_date]) >= 20: - _values = sum(list(temp_dict[_date].values()), []) - _values_avg = round(sum(_values) / len(_values), 1) - - # wenn für 00, 06, 12 und 18 Uhr Werte vorliegen, berechne den Tagesdurchschnitt über diese Werte - elif '00' in temp_dict[_date] and '06' in temp_dict[_date] and '12' in temp_dict[_date] and '18' in temp_dict[_date]: - _values_avg = round((temp_dict[_date]['00'][0] + temp_dict[_date]['06'][0] + temp_dict[_date]['12'][0] + temp_dict[_date]['18'][0]) / 4, 1) - - # sonst berechne den Tagesdurchschnitt über alle Werte - else: - _values = sum(list(temp_dict[_date].values()), []) - _values_avg = round(sum(_values) / len(_values), 1) - - _timestamp = datetime_to_timestamp(datetime.datetime.strptime(_date, '%Y-%m-%d')) - _temp_list.append([_timestamp, _values_avg]) - return _temp_list - - def _create_list_timestamp_minmaxtemp() -> list: - """Create list of list with [[timestamp1, min value1, max_value1], [timestamp2, min value2, max_value2], ...] based on temp_dict""" - - _temp_list = [] - for _date in temp_dict: - _timestamp = datetime_to_timestamp(datetime.datetime.strptime(_date, '%Y-%m-%d')) - _day_values = sum(list(temp_dict[_date].values()), []) - _temp_list.append([_timestamp, min(_day_values), max(_day_values)]) - return _temp_list - - # temp_list = [[timestamp1, avg-value1], [timestamp2, avg-value2], [timestamp3, avg-value3], ...] Tagesmitteltemperatur pro Stunde wird in der Datenbank per avg ermittelt - if method == 'hour': - raw_data = self._query_item(func='avg', database_item=database_item, timeframe=timeframe, start=start, end=end, group=method, ignore_value_list=ignore_value_list) - if self.prepare_debug: - self.logger.debug(f"{raw_data=}") + dt = dt.replace(minute=0, second=0, microsecond=0) + if block == 'day': + dt = dt.replace(hour=0) + if dt not in _value_dict: + _value_dict[dt] = [] + _value_dict[dt].append(_entry[1]) - if raw_data and isinstance(raw_data, list): - if raw_data == [[None, None]]: - return - - # create nested dict with temps - temp_dict = _create_temp_dict() - - # create list of list like database query response - temp_list = _create_list_timestamp_avgtemp() - if self.prepare_debug: - self.logger.debug(f"{temp_list=}") - return temp_list + return dict(sorted(_value_dict.items())) - # temp_list = [[timestamp1, avg-value1], [timestamp2, avg-value2], [timestamp3, avg-value3], ...] Tagesmitteltemperatur pro Stunde wird hier im Plugin ermittelt ermittelt - elif method == 'raw': - raw_data = self._query_item(func='raw', database_item=database_item, timeframe=timeframe, start=start, end=end, ignore_value_list=ignore_value_list) - - if raw_data and isinstance(raw_data, list): - if raw_data == [[None, None]]: - return + def _create_value_list_timestamp_value(option: str) -> list: + """ + Create list of list with [[timestamp1, value1], [timestamp2, value2], ...] based on value_dict in format of database query result + values given in the list will be concentrated as per given option - # create nested dict with temps - temp_dict = _create_temp_dict() + :param option defines option to be used to determine the concentrated values, possible are 'first', 'avg', minmax + 'first' will take first entry of list per datetime to get as close to value at full hour as possible + 'avg' will use the calculated average of values in list per datetime + 'minmax' will get min and max value of list per datetime + """ - # calculate 'tagesdurchschnitt' and create list of list like database query response - _calculate_hourly_average() - if self.prepare_debug: - self.logger.debug(f"raw: {temp_dict=}") + _value_list = [] + # create nested list with timestamp, avg_value per hour/day + for entry in value_dict: + _timestamp = datetime_to_timestamp(entry) + if option == 'first': + _value_list.append([_timestamp, value_dict[entry][0]]) + elif option == 'avg': + _value_list.append([_timestamp, round(sum(value_dict[entry]) / len(value_dict[entry]), 1)]) + elif option == 'minmax': + _value_list.append([_timestamp, min(value_dict[entry]), max(value_dict[entry])]) + return _value_list - # create list of list like database query response - temp_list = _create_list_timestamp_avgtemp() - if self.prepare_debug: - self.logger.debug(f"{temp_list=}") - return temp_list + # check method + if method in ['avg_day', 'avg_hour', 'minmax_day', 'minmax_hour', 'first_day', 'first_hour']: + _method, _block = method.split('_') + elif method in ['avg', 'minmax', 'first']: + _method = method + _block = 'hour' + else: + self.logger.warning(f"defined {method=} for _prepare_value_list unknown. Need to be 'avg_day', 'avg_hour', 'minmax_day', 'minmax_hour', 'first_day' or 'first_hour'. Aborting...") + return - # temp_list = [[timestamp1, min-value1, max-value1], [timestamp2, min-value2, max-value2], [timestamp3, min-value3, max-value3], ...] - elif method == 'minmax': - raw_data = self._query_item(func='raw', database_item=database_item, timeframe=timeframe, start=start, end=end, ignore_value_list=ignore_value_list) + # get raw data from database + raw_data = self._query_item(func='raw', database_item=database_item, timeframe=timeframe, start=start, end=end, ignore_value_list=ignore_value_list) + if raw_data in [[[None, None]], [[0, 0]]]: + self.logger.warning("no valid data from database query received during _prepare_value_list. Aborting...") + return - if raw_data and isinstance(raw_data, list): - if raw_data == [[None, None]]: - return + # create nested dict with values + value_dict = _create_raw_value_dict(block=_block) + if self.prepare_debug: + self.logger.debug(f"{value_dict=}") - # create nested dict with temps - temp_dict = _create_temp_dict() - if self.prepare_debug: - self.logger.debug(f"raw: {temp_dict=}") + # return value list + result = _create_value_list_timestamp_value(option=_method) + if self.prepare_debug: + self.logger.debug(f"{method=}, {result=}") - # create list of list like database query response - temp_list = _create_list_timestamp_minmaxtemp() - if self.prepare_debug: - self.logger.debug(f"{temp_list=}") - return temp_list + return result #################### # Support stuff #################### - def _create_due_items(self) -> list: - """ - Create set of items which are due and resets cache dicts - - :return: set of items, which need to be processed - - """ - - # täglich zu berechnende Items zur Action Liste hinzufügen - _todo_items = set() - _todo_items.update(set(self._daily_items())) - self.current_values[DAY] = {} - self.previous_values[DAY] = {} - - # wenn Wochentag == Montag, werden auch die wöchentlichen Items berechnet - if self.shtime.now().hour == 0 and self.shtime.now().minute == 0 and self.shtime.weekday(self.shtime.today()) == 1: - _todo_items.update(set(self._weekly_items())) - self.current_values[WEEK] = {} - self.previous_values[WEEK] = {} - - # wenn der erste Tage eines Monates ist, werden auch die monatlichen Items berechnet - if self.shtime.now().hour == 0 and self.shtime.now().minute == 0 and self.shtime.now().day == 1: - _todo_items.update(set(self._monthly_items())) - self.current_values[MONTH] = {} - self.previous_values[MONTH] = {} - - # wenn der erste Tage des ersten Monates eines Jahres ist, werden auch die jährlichen Items berechnet - if self.shtime.now().hour == 0 and self.shtime.now().minute == 0 and self.shtime.now().day == 1 and self.shtime.now().month == 1: - _todo_items.update(set(self._yearly_items())) - self.current_values[YEAR] = {} - self.previous_values[YEAR] = {} - - return list(_todo_items) - def _check_db_existence(self) -> bool: """ Check existence of database plugin with given config name @@ -2057,7 +1976,7 @@ def _check_db_connection_setting(self) -> None: except Exception: pass - def _get_oldest_log(self, item: Item) -> int: + def _get_oldest_log(self, item: Item) -> Union[None, int]: """ Get timestamp of the oldest entry of item from cache dict or get value from db and put it to cache dict @@ -2065,19 +1984,21 @@ def _get_oldest_log(self, item: Item) -> int: :return: timestamp of the oldest log """ - _oldest_log = self.item_cache.get(item, {}).get('oldest_log', None) + oldest_log = self.item_cache.get(item, {}).get('oldest_log') - if _oldest_log is None: + if oldest_log is None: item_id = self._get_itemid(item) - _oldest_log = self._read_log_oldest(item_id) - if item not in self.item_cache: - self.item_cache[item] = {} - self.item_cache[item]['oldest_log'] = _oldest_log + oldest_log = self._read_log_oldest(item_id) + + if isinstance(oldest_log, int): + if item not in self.item_cache: + self.item_cache[item] = {} + self.item_cache[item]['oldest_log'] = oldest_log if self.prepare_debug: - self.logger.debug(f"_get_oldest_log for item {item.path()} = {_oldest_log}") + self.logger.debug(f"_get_oldest_log for item {item.path()} = {oldest_log}") - return _oldest_log + return oldest_log def _get_oldest_value(self, item: Item) -> Union[int, float, bool]: """ @@ -2087,7 +2008,7 @@ def _get_oldest_value(self, item: Item) -> Union[int, float, bool]: :return: oldest value """ - _oldest_entry = self.item_cache.get(item, {}).get('_oldest_entry', None) + _oldest_entry = self.item_cache.get(item, {}).get('oldest_entry', None) if _oldest_entry is not None: _oldest_value = _oldest_entry[0][4] @@ -2097,7 +2018,11 @@ def _get_oldest_value(self, item: Item) -> Union[int, float, bool]: i = 0 _oldest_value = -999999999 while validity is False: - oldest_entry = self._read_log_timestamp(item_id, self._get_oldest_log(item)) + oldest_log = self._get_oldest_log(item) + if oldest_log is None: + validity = True + self.logger.error(f"oldest_log for item {item.path()} could not be read; value is set to -999999999") + oldest_entry = self._read_log_timestamp(item_id, oldest_log) i += 1 if isinstance(oldest_entry, list) and isinstance(oldest_entry[0], tuple) and len(oldest_entry[0]) >= 4: if item not in self.item_cache: @@ -2182,7 +2107,9 @@ def _query_item(self, func: str, database_item: Item, timeframe: str, start: int # define start and end of query as timestamp in microseconds ts_start, ts_end = get_start_end_as_timestamp(timeframe, start, end) - oldest_log = int(self._get_oldest_log(database_item)) + oldest_log = self._get_oldest_log(database_item) + if oldest_log is None: + return default_result # check correctness of ts_start / ts_end if ts_start is None: @@ -2279,30 +2206,6 @@ def _clear_queue(self) -> None: self.logger.info(f"Working queue will be cleared. Calculation run will end.") self.item_queue.queue.clear() - def _work_item_queue_thread_startup(self): - """Start a thread to work item queue""" - - try: - _name = 'plugins.' + self.get_fullname() + '.work_item_queue' - self.work_item_queue_thread = threading.Thread(target=self.work_item_queue, name=_name) - self.work_item_queue_thread.daemon = False - self.work_item_queue_thread.start() - self.logger.debug("Thread for 'work_item_queue_thread' has been started") - except threading.ThreadError: - self.logger.error("Unable to launch thread for 'work_item_queue_thread'.") - self.work_item_queue_thread = None - - def _work_item_queue_thread_shutdown(self): - """Shut down the thread to work item queue""" - - if self.work_item_queue_thread: - self.work_item_queue_thread.join() - if self.work_item_queue_thread.is_alive(): - self.logger.error("Unable to shut down 'work_item_queue_thread' thread") - else: - self.logger.info("Thread 'work_item_queue_thread' has been terminated.") - self.work_item_queue_thread = None - ################################# # Database Query Preparation ################################# @@ -2596,7 +2499,8 @@ def _query(self, fetch, query: str, params: dict = None, cur=None) -> Union[None tuples = fetch(query, params, cur=cur) except Exception as e: self.logger.error(f"Error for query '{query_readable}': {e}") - raise e + tuples = None + pass finally: if cur is None: self._db.release() @@ -2666,8 +2570,8 @@ def timestamp_to_timestring(timestamp: int) -> str: return datetime.datetime.utcfromtimestamp(timestamp / 1000).strftime('%Y-%m-%d %H:%M:%S') -def convert_timeframe(timeframe: str) -> str: - """Convert timeframe""" +def harmonize_timeframe_expression(timeframe: str) -> str: + """harmonizes different expression of timeframe""" lookup = { 'tag': 'day', @@ -2687,8 +2591,8 @@ def convert_timeframe(timeframe: str) -> str: return lookup.get(timeframe) -def convert_duration(timeframe: str, window_dur: str) -> int: - """Convert duration""" +def convert_timeframe(timeframe_in: str, timeframe_out: str) -> int: + """Convert timeframe to timeframe like month in years or years in days""" _h_in_d = 24 _d_in_y = 365 @@ -2731,13 +2635,7 @@ def convert_duration(timeframe: str, window_dur: str) -> int: } } - return lookup[timeframe][window_dur] - - -def count_to_start(count: int = 0, end: int = 0): - """Converts given count and end ot start and end""" - - return end + count, end + return lookup[timeframe_in][timeframe_out] def get_start_end_as_timestamp(timeframe: str, start: Union[int, str, None], end: Union[int, str, None]) -> tuple: @@ -2876,27 +2774,3 @@ def split_sting_letters_numbers(string) -> list: ALLOWED_QUERY_TIMEFRAMES = ['year', 'month', 'week', 'day', 'hour'] ALLOWED_MINMAX_FUNCS = ['min', 'max', 'avg'] - -""" - 'serie_minmax_monat_min_15m': {'func': 'min', 'timeframe': 'month', 'start': 15, 'end': 0, 'group': 'month'}, - 'serie_minmax_monat_max_15m': {'func': 'max', 'timeframe': 'month', 'start': 15, 'end': 0, 'group': 'month'}, - 'serie_minmax_monat_avg_15m': {'func': 'avg', 'timeframe': 'month', 'start': 15, 'end': 0, 'group': 'month'}, - 'serie_minmax_woche_min_30w': {'func': 'min', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, - 'serie_minmax_woche_max_30w': {'func': 'max', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, - 'serie_minmax_woche_avg_30w': {'func': 'avg', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, - 'serie_minmax_tag_min_30d': {'func': 'min', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, - 'serie_minmax_tag_max_30d': {'func': 'max', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, - 'serie_minmax_tag_avg_30d': {'func': 'avg', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, - 'serie_verbrauch_tag_30d': {'func': 'diff_max', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, - 'serie_verbrauch_woche_30w': {'func': 'diff_max', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, - 'serie_verbrauch_monat_18m': {'func': 'diff_max', 'timeframe': 'month', 'start': 18, 'end': 0, 'group': 'month'}, - 'serie_zaehlerstand_tag_30d': {'func': 'max', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, - 'serie_zaehlerstand_woche_30w': {'func': 'max', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, - 'serie_zaehlerstand_monat_18m': {'func': 'max', 'timeframe': 'month', 'start': 18, 'end': 0, 'group': 'month'}, - 'serie_waermesumme_monat_24m': {'func': 'sum_max', 'timeframe': 'month', 'start': 24, 'end': 0, 'group': 'day', 'group2': 'month'}, - 'serie_kaeltesumme_monat_24m': {'func': 'sum_min_neg', 'timeframe': 'month', 'start': 24, 'end': 0, 'group': 'day', 'group2': 'month'}, - 'serie_tagesmittelwert_0d': {'func': 'max', 'timeframe': 'year', 'start': 0, 'end': 0, 'group': 'day'}, - 'serie_tagesmittelwert_stunde_0d': {'func': 'avg1', 'timeframe': 'day', 'start': 0, 'end': 0, 'group': 'hour', 'group2': 'day'}, - 'serie_tagesmittelwert_stunde_30d': {'func': 'avg1', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'hour', 'group2': 'day'}, - 'gts': {'func': 'max', 'timeframe': 'year', 'start': None, 'end': None, 'group': 'day'}, -""" diff --git a/db_addon/plugin.yaml b/db_addon/plugin.yaml index 184b7bab9..98115af82 100644 --- a/db_addon/plugin.yaml +++ b/db_addon/plugin.yaml @@ -11,7 +11,7 @@ plugin: # keywords: iot xyz # documentation: https://github.com/smarthomeNG/smarthome/wiki/CLI-Plugin # url of documentation (wiki) page support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1848494-support-thread-databaseaddon-plugin - version: 1.2.1 # Plugin version (must match the version specified in __init__.py) + version: 1.2.2 # Plugin version (must match the version specified in __init__.py) sh_minversion: 1.9.3.5 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) py_minversion: 3.8 # minimum Python version to use for this plugin @@ -729,6 +729,14 @@ item_structs: visu_acl: ro # cache: yes + verbrauch_verbrauch_rolling_12m_woche_minus1: + name: Verbrauch innerhalb der letzten 12 Monate ausgehend vom Ende letzter Woche + db_addon_fct: verbrauch_rolling_12m_woche_minus1 + db_addon_startup: yes + type: num + visu_acl: ro + # cache: yes + verbrauch_2: name: Struct für Verbrauchsauswertung bei Zählern mit stetig ansteigendem Zählerstand (Teil 2) verbrauch_gestern_minus3: diff --git a/db_addon/webif/templates/index.html b/db_addon/webif/templates/index.html index 9228c5919..95d4be3c9 100644 --- a/db_addon/webif/templates/index.html +++ b/db_addon/webif/templates/index.html @@ -377,10 +377,6 @@ {{ _('get_item_list') }} {{ p.get_item_list('database_addon', True) }} - - {{ _('work_item_queue_thread') }} - {% if p.work_item_queue_thread != None %}{{ p.work_item_queue_thread.is_alive() }}{% endif %} - {% endblock bodytab2 %} From df8932c2a04a942068bab1a80c5b7aa2621f686f Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Sun, 16 Jul 2023 17:10:41 +0200 Subject: [PATCH 187/775] DB_ADDON: - bump to 1.2.2 - Remove tread for working item queue; use shNG scheduler instead - improve inline docu - improve execute_items - catch exception within work_item_queue and de-init plugin in case of failure - improve prepare_value_list for calculation of avg, min_max... for value series - handle oldest_log for new-born database items - adapt webif --- db_addon/webif/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/db_addon/webif/__init__.py b/db_addon/webif/__init__.py index 0ecb4ea54..604f6b147 100644 --- a/db_addon/webif/__init__.py +++ b/db_addon/webif/__init__.py @@ -115,7 +115,7 @@ def get_data_html(self, dataSet=None): @cherrypy.expose def recalc_all(self): self.logger.debug(f"recalc_all called") - self.plugin.execute_all_items() + self.plugin.execute_items('all') @cherrypy.expose def clean_cache_dicts(self): From c9a53044ac8b2200bb242c927514a38c2b0c3e00 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Sun, 16 Jul 2023 21:56:36 +0200 Subject: [PATCH 188/775] DB_ADDON: - Put Plugin to suspend in case of exception in work_item_queue instead of deinit - recalc all will just call all items with db_addon_fct - minor change on WebIF --- db_addon/__init__.py | 9 ++++++--- db_addon/webif/templates/index.html | 10 +++++----- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/db_addon/__init__.py b/db_addon/__init__.py index 182cebe9e..f20b78023 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -161,8 +161,10 @@ def run(self): except Exception as e: self.logger.warning(f"During working item queue Exception '{e}' occurred.") self.logger.debug(e, exc_info=True) - self.logger.error("Thread for working item queue died. De-init plugin.") - self.deinit() + # self.logger.error("Thread for working item queue died. De-init plugin.") + # self.deinit() + self.logger.error("Suspend Plugin and clear Item-Queue.") + self.suspend(True) def stop(self): """ @@ -1119,7 +1121,8 @@ def _ondemand_items(self) -> list: return self._daily_items() + self._weekly_items() + self._monthly_items() + self._yearly_items() + self._static_items() def _all_items(self) -> list: - return self._ondemand_items() + self._ondemand_items() + self._static_items() + self._admin_items() + self._info_items() + # return self._ondemand_items() + self._onchange_items() + self._static_items() + self._admin_items() + self._info_items() + return self.get_item_list('db_addon', 'function') ######################################### # Public functions / Using item_path diff --git a/db_addon/webif/templates/index.html b/db_addon/webif/templates/index.html index 95d4be3c9..f2a6f7a56 100644 --- a/db_addon/webif/templates/index.html +++ b/db_addon/webif/templates/index.html @@ -69,14 +69,14 @@ value.substring(0, length - 3) + " ..." : value; - shngInsertText(item+'_value', round(new_value, 2), 'maintable', 5); + shngInsertText(item+'_value', Math.round(new_value, 2), 'maintable', 5); shngInsertText(item+'_last_update', objResponse['items'][item]['last_update'], 'maintable'); shngInsertText(item+'_last_change', objResponse['items'][item]['last_change'], 'maintable'); } - $('#maintable').DataTable().draw(false); + item_count = String(objResponse['queue_length']) + ' Items'; - shngInsertText('queue_length', item_count, null, 2); - shngInsertText('active_queue_item', objResponse['active_queue_item'], null, 2); + shngInsertText('queue_length', item_count); + shngInsertText('active_queue_item', objResponse['active_queue_item']); if (objResponse['plugin_suspended'] === false) { document.getElementById('play').classList = 'btn btn-success btn-sm'; @@ -136,7 +136,7 @@ targets: [5], "className": "init" }, { - title: '{{ _('Wert') }}', + title: '{{ _('Wert') }}', targets: [6], "className": "truncate value" }, { From 8ad8abd30b43ae02ea1616b41d0e9f0b25f601af Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Sun, 16 Jul 2023 22:02:59 +0200 Subject: [PATCH 189/775] DB_ADDON: - minor change on WebIF --- db_addon/webif/templates/index.html | 32 ++++++++++++++++++++++++++--- 1 file changed, 29 insertions(+), 3 deletions(-) diff --git a/db_addon/webif/templates/index.html b/db_addon/webif/templates/index.html index f2a6f7a56..9da923351 100644 --- a/db_addon/webif/templates/index.html +++ b/db_addon/webif/templates/index.html @@ -45,6 +45,11 @@ table th.content { width: 500px; } + table td.active_item { + max-width: 250px; + width: 200px; + min-width:150px!important; + } .shng_effect_highlight { background-color: #FFFFE0; } @@ -75,6 +80,11 @@ } item_count = String(objResponse['queue_length']) + ' Items'; + if (objResponse['queue_length'] == 0) + recalc_button(false); + else { + recalc_button(true); + } shngInsertText('queue_length', item_count); shngInsertText('active_queue_item', objResponse['active_queue_item']); @@ -196,12 +206,28 @@ } } + {% endblock pluginscripts %} {% block headtable %} - +
    @@ -230,7 +256,7 @@ {% endfor %} - + @@ -242,7 +268,7 @@ {% block buttons %}
    - +
    From 3e731a73f1df7b4703177bdf7dbb029cf5d53c28 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Sun, 16 Jul 2023 22:05:25 +0200 Subject: [PATCH 190/775] DB_ADDON: - minor change on WebIF --- db_addon/webif/templates/index.html | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/db_addon/webif/templates/index.html b/db_addon/webif/templates/index.html index 9da923351..62f398e70 100644 --- a/db_addon/webif/templates/index.html +++ b/db_addon/webif/templates/index.html @@ -325,11 +325,15 @@
    - + + + + + - + @@ -399,10 +403,6 @@ - - - -
    {{ _('Verbunden') }}
    {{ _('Item in Berechnung') }}{{ p.active_queue_item }}{{ p.active_queue_item }} {{ _('Arbeitsvorrat') }} {{ p.queue_backlog }} {{ _('Items') }}
    {{ _('00_items') }}{{ p.get_item_path_list('database_addon', True) }}{{ p._all_items() }}
    {{ _('01_ondemand_items') }}{{ p._ondemand_items() }}
    {{ _('02_admin_items') }}{{ p.get_item_path_list('database_addon', 'admin') }}{{ p._admin_items() }}
    {{ _('10_daily_items') }}{{ _('27_vorjahresendwert_dict') }} {{ p.previous_values['year'] }}
    {{ _('get_item_list') }}{{ p.get_item_list('database_addon', True) }}
    {% endblock bodytab2 %} From 65267c6d5563708147fde90c836d2c874e51b0fa Mon Sep 17 00:00:00 2001 From: psilo909 Date: Mon, 17 Jul 2023 20:30:40 +0200 Subject: [PATCH 191/775] Webservices: fix for splitting up of autotimer attribute into _value and _time --- webservices/__init__.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/webservices/__init__.py b/webservices/__init__.py index e63f16fbd..1a099c93d 100755 --- a/webservices/__init__.py +++ b/webservices/__init__.py @@ -263,7 +263,8 @@ def assemble_item_data(self, item, webservices_data='full'): 'eval_trigger': str(item._eval_trigger), 'cycle': str(cycle), 'crontab': str(crontab), - 'autotimer': str(item._autotimer), + 'autotimer_value': str(item._autotimer_value), + 'autotimer_time': str(item._autotimer_time), 'threshold': str(item._threshold), 'config': item_conf_sorted, 'logics': logics, @@ -316,7 +317,7 @@ def itemset(self, set_id=None, mode=None): @cherrypy.tools.json_out() def items(self, item_path=None, value=None, mode=None): """ - Simpole WS functions for item + Simple WS functions for item """ if item_path is None: self.logger.debug(cherrypy.request.method) From d25c8539a521e9da6e2ed367ca4e9311b854ca0a Mon Sep 17 00:00:00 2001 From: psilo909 Date: Mon, 17 Jul 2023 20:31:06 +0200 Subject: [PATCH 192/775] Webservices: fix for splitting up of autotimer attribute into _value and _time --- webservices/__init__.py | 2 +- webservices/plugin.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/webservices/__init__.py b/webservices/__init__.py index 1a099c93d..95290c807 100755 --- a/webservices/__init__.py +++ b/webservices/__init__.py @@ -34,7 +34,7 @@ class WebServices(SmartPlugin): - PLUGIN_VERSION = '1.6.3' + PLUGIN_VERSION = '1.6.4' ALLOWED_FOO_PATHS = ['env.location.moonrise', 'env.location.moonset', 'env.location.sunrise', 'env.location.sunset'] def __init__(self, sh, *args, **kwargs): diff --git a/webservices/plugin.yaml b/webservices/plugin.yaml index fee6ac088..09bd4ba4d 100755 --- a/webservices/plugin.yaml +++ b/webservices/plugin.yaml @@ -13,7 +13,7 @@ plugin: #documentation: http://smarthomeng.de/user/plugins/webservices/user_doc.html support: https://knx-user-forum.de/node/1163886 - version: 1.6.3 # Plugin version + version: 1.6.4 # Plugin version sh_minversion: 1.6 # minimum shNG version to use this plugin #sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) multi_instance: False # plugin supports multi instance From 64f3ec0adeae3dfc7a1f6eef9414ddd31a1ff1d6 Mon Sep 17 00:00:00 2001 From: psilo909 Date: Tue, 18 Jul 2023 09:57:08 +0200 Subject: [PATCH 193/775] Update __init__.py --- alexarc4shng/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/alexarc4shng/__init__.py b/alexarc4shng/__init__.py index 4d686018f..c42111181 100755 --- a/alexarc4shng/__init__.py +++ b/alexarc4shng/__init__.py @@ -117,7 +117,7 @@ def __init__(self, id): ############################################################################## class AlexaRc4shNG(SmartPlugin): - PLUGIN_VERSION = '1.0.3' + PLUGIN_VERSION = '1.0.4' ALLOW_MULTIINSTANCE = False """ Main class of the Plugin. Does all plugin specific stuff and provides @@ -1122,7 +1122,7 @@ def auto_login_by_request(self): "Referer": myLocation } newUrl = "https://www.amazon.de"+"/ap/signin/"+actSessionID - postfields = urllib3.request.urlencode(PostData) + postfields = urlencode(PostData) myStatus,myRespHeader, myRespCookie, myContent = self.send_post_request(newUrl,myHeaders,myCollectionCookie,PostData) myCollectionTxtCookie = self.parse_response_cookie_2_txt(myRespCookie,myCollectionTxtCookie) @@ -1165,7 +1165,7 @@ def auto_login_by_request(self): myResults.append('MFA : ' + 'use MFA/OTP - Login OTP : {}'.format(mfaCode)) - postfields = urllib3.request.urlencode(PostData) + postfields = urlencode(PostData) myStatus,myRespHeader, myRespCookie, myContent = self.send_post_request(newUrl,myHeaders,myCollectionCookie,PostData) myCollectionTxtCookie = self.parse_response_cookie_2_txt(myRespCookie,myCollectionTxtCookie) myCollectionCookie = self.parse_response_cookie(myRespCookie,myCollectionCookie) From 393b001aa1b098507abb2f70a69b802332684a75 Mon Sep 17 00:00:00 2001 From: psilo909 Date: Tue, 18 Jul 2023 09:57:22 +0200 Subject: [PATCH 194/775] Update plugin.yaml --- alexarc4shng/plugin.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/alexarc4shng/plugin.yaml b/alexarc4shng/plugin.yaml index 447b7ac04..1e00fb5f5 100755 --- a/alexarc4shng/plugin.yaml +++ b/alexarc4shng/plugin.yaml @@ -8,7 +8,7 @@ plugin: maintainer: AndreK tester: henfri, juergen, psilo #documentation: https://www.smarthomeng.de/user/plugins/alexarc4shng/user_doc.html # url of documentation - version: 1.0.3 # Plugin version + version: 1.0.4 # Plugin version sh_minversion: 1.5.2 # minimum shNG version to use this plugin multi_instance: False # plugin supports multi instance classname: AlexaRc4shNG # class containing the plugin From 879adaf5cad00e09d660be1db5c0f2b568cb0d2d Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Wed, 19 Jul 2023 00:27:27 +0200 Subject: [PATCH 195/775] lms plugin: fix wipecache and scan running commands --- lms/commands.py | 2 +- lms/datatypes.py | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/lms/commands.py b/lms/commands.py index ed1cfc582..6f0afe4da 100755 --- a/lms/commands.py +++ b/lms/commands.py @@ -18,7 +18,7 @@ 'runningtime': {'read': True, 'read_cmd': 'rescanprogress totaltime', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'rescanprogress totaltime .* rescan:([0-9]{2}:[0-9]{2}:[0-9]{2})', 'item_attrs': {'custom1': ''}}, 'fail': {'read': True, 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': 'rescanprogress totaltime rescan:0 lastscanfailed:(.*)', 'item_attrs': {'custom1': ''}}, 'abortscan': {'read': True, 'write': True, 'write_cmd': 'abortscan', 'item_type': 'bool', 'dev_datatype': 'str', 'reply_pattern': 'abortscan', 'item_attrs': {'custom1': ''}}, - 'wipecache': {'read': True, 'write': True, 'write_cmd': 'wipecache', 'item_type': 'bool', 'dev_datatype': 'str', 'reply_pattern': 'wipecache', 'item_attrs': {'custom1': ''}} + 'wipecache': {'read': True, 'write': True, 'write_cmd': 'wipecache', 'item_type': 'bool', 'dev_datatype': 'LMSWipecache', 'reply_pattern': 'wipecache', 'item_attrs': {'custom1': ''}} }, 'totalgenres': {'read': True, 'write': False, 'read_cmd': 'info total genres ?', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': r'info total genres (\d+)', 'item_attrs': {'initial': True, 'custom1': ''}}, 'totalduration': {'read': True, 'write': False, 'read_cmd': 'info total duration ?', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': r'info total duration ([0-9.]*)', 'item_attrs': {'item_template': 'duration', 'initial': True, 'custom1': ''}}, diff --git a/lms/datatypes.py b/lms/datatypes.py index 10acc5d3d..c60618a47 100755 --- a/lms/datatypes.py +++ b/lms/datatypes.py @@ -8,9 +8,15 @@ # handle feedback if rescan is running or not class DT_LMSRescan(DT.Datatype): def get_shng_data(self, data, type=None, **kwargs): - return True if data in ["1", "done"] else False + return True if data == "1" else False +class DT_LMSWipecache(DT.Datatype): + def get_shng_data(self, data, type=None, **kwargs): + return True if data == "wipecache" else False + def get_send_data(self, data, type=None, **kwargs): + return "wipecache" if data is True else "" + class DT_LMSPlaylists(DT.Datatype): def get_shng_data(self, data, type=None, **kwargs): _playlists = list(filter(None,re.split(r'id:|\sid:|\splaylist:', data))) From 193f138cad856e4c69acda327bef54e1abe989c8 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sun, 23 Jul 2023 10:30:38 +0200 Subject: [PATCH 196/775] LMS plugin: improve playlist name regex --- lms/commands.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lms/commands.py b/lms/commands.py index 6f0afe4da..6d17d55fd 100755 --- a/lms/commands.py +++ b/lms/commands.py @@ -57,7 +57,7 @@ 'repeat': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist repeat ?', 'item_type': 'str', 'write_cmd': '{CUSTOM_ATTR1} playlist repeat {VALUE}', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} playlist repeat {LOOKUP}', '{CUSTOM_PATTERN1} status(?:.*)playlist repeat:{LOOKUP}'], 'lookup': 'REPEAT', 'item_attrs': {'attributes': {'remark': '0 = Off, 1 = Song, 2 = Playlist'}, 'lookup_item': True}}, 'shuffle': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist shuffle ?', 'item_type': 'str', 'write_cmd': '{CUSTOM_ATTR1} playlist shuffle {VALUE}', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} playlist shuffle {LOOKUP}', '{CUSTOM_PATTERN1} status(?:.*)playlist shuffle:{LOOKUP}'], 'lookup': 'SHUFFLE', 'item_attrs': {'attributes': {'remark': '0 = Off, 1 = Song, 2 = Album'}, 'lookup_item': True}}, 'index': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist index ?', 'write_cmd': '{CUSTOM_ATTR1} playlist index {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} playlist (?:index|newsong .*) (\d+)$', '{CUSTOM_PATTERN1} status(?:.*)playlist index:(\d*[^\s]+)', '{CUSTOM_PATTERN1} prefset server currentSong (\d+)$', '{CUSTOM_PATTERN1} playlist jump (\d*)', '{CUSTOM_PATTERN1} play (\d*)'], 'item_attrs': {'initial': True}}, - 'name': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist name ?', 'write_cmd': '{CUSTOM_ATTR1} playlist name {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} playlist name (.*[^?])', 'item_attrs': {'initial': True}}, + 'name': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist name ?', 'write_cmd': '{CUSTOM_ATTR1} playlist name {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} playlistcontrol cmd:load playlist_name:(.*) count:(?:\d*)', '{CUSTOM_PATTERN1} playlist name (.*[^?])'], 'item_attrs': {'initial': True}}, 'id': {'read': True, 'write': True, 'read_cmd': '{CUSTOM_ATTR1} playlist playlistsinfo', 'write_cmd': '{CUSTOM_ATTR1} playlistcontrol cmd:load playlist_id:{VALUE}', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': [r'{CUSTOM_PATTERN1} (?:status - 1 .*|playlist playlistsinfo |playlistcontrol cmd:load playlist_)id:(\d*)', '{CUSTOM_PATTERN1} playlist loadtracks playlist.id=(\d*)\s']}, 'save': {'read': True, 'write': True, 'write_cmd': '{CUSTOM_ATTR1} playlist save {VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '{CUSTOM_PATTERN1} playlist save (.*)', 'item_attrs': {'enforce': True}}, 'load': {'read': True, 'write': True, 'write_cmd': '{CUSTOM_ATTR1} playlistcontrol cmd:load playlist_name:{VALUE}', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': ['{CUSTOM_PATTERN1} playlist resume (.*)', '{CUSTOM_PATTERN1} playlist loadtracks playlist.name:(.*)\s'], 'item_attrs': {'enforce': True}}, @@ -97,7 +97,7 @@ 'path': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} path ?', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': ['{CUSTOM_PATTERN1} path (.*)', '{CUSTOM_PATTERN1} playlist open (.*)', '{CUSTOM_PATTERN1} playlist play (.*)']}, 'duration': {'read': True, 'write': False, 'read_cmd': '{CUSTOM_ATTR1} duration ?', 'item_type': 'num', 'dev_datatype': 'str', 'reply_pattern': r'{CUSTOM_PATTERN1} duration (\d+)'}, 'trackstat': {'read': True, 'write': False, 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'{CUSTOM_PATTERN1} trackstat changedstatistic (.*)'}, - 'albumarturl': {'read': True, 'write': False, 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '(http://.*)'} + 'albumarturl': {'read': True, 'write': False, 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': '(http://.*)', 'item_attrs': {'attributes': {'remark': 'This item gets automatically defined and overwritten based on (web_)host and web_port'}}} } } } From a953831ace4319a946990ce26d3c38804c489345 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sun, 23 Jul 2023 10:31:07 +0200 Subject: [PATCH 197/775] LMS Plugin: Introduce option to define web_host (for making coverarturl work with reverse proxies) --- lms/__init__.py | 22 +++++++++++++++++++--- lms/plugin.yaml | 14 ++++++++++++-- 2 files changed, 31 insertions(+), 5 deletions(-) diff --git a/lms/__init__.py b/lms/__init__.py index 34f5cc040..8dff5d715 100755 --- a/lms/__init__.py +++ b/lms/__init__.py @@ -59,6 +59,18 @@ def _set_device_defaults(self): self._use_callbacks = True self._parameters[PLUGIN_ATTR_RECURSIVE] = 1 self._parameters['web_port'] = self.get_parameter_value('web_port') + if self.get_parameter_value('web_host') == '': + host = self._parameters.get(PLUGIN_ATTR_NET_HOST) + if host.startswith('http'): + self._parameters['web_host'] = host + else: + self._parameters['web_host'] = f'http://{host}' + else: + host = self.get_parameter_value('web_host') + if host.startswith('http'): + self._parameters['web_host'] = host + else: + self._parameters['web_host'] = f'http://{host}' def on_connect(self, by=None): self.logger.debug("Activating listen mode after connection.") @@ -108,9 +120,13 @@ def trigger_read(command): # set album art URL if command == 'player.info.album': self.logger.debug(f"Got command album {command} data {data} value {value} custom {custom} by {by}") - host = self._parameters.get(PLUGIN_ATTR_NET_HOST) - port = self._parameters.get('web_port') - url = f'http://{host}:{port}/music/current/cover.jpg?player={custom}' + host = self._parameters['web_host'] + port = self._parameters['web_port'] + if port == 0: + url = f'{host}/music/current/cover.jpg?player={custom}' + else: + url = f'{host}:{port}/music/current/cover.jpg?player={custom}' + self.logger.debug(f"Setting albumarturl to {url}") self._dispatch_callback('player.info.albumarturl' + CUSTOM_SEP + custom, url, by) # set playlist ID diff --git a/lms/plugin.yaml b/lms/plugin.yaml index 9d28a304e..fc1427317 100755 --- a/lms/plugin.yaml +++ b/lms/plugin.yaml @@ -92,8 +92,16 @@ parameters: default: 9000 description: - de: Port für Webinterface-Verbindung (nötig für coverarturl) - en: port for web interface connection (necessary for coverarturl) + de: Port für Webinterface-Verbindung (nötig für coverarturl). Wird der Port auf 0 gesetzt, wird für die Coverart URL kein Port verwendet (z.B. beim Einsatz eines Reverse Proxy). + en: port for web interface connection (necessary for coverarturl). Is the port set to 0, no port is used for the cover art URL (e.g. when using a reverse proxy). + + web_host: + type: str + default: '' + + description: + de: Host für Webinterface-Verbindung (nötig für coverarturl bei Einsatz eines Reverse Proxy). Wird dieser Parameter nicht definiert, wird die URL unter "Host" herangezogen. + en: Host for web interface connection (necessary for coverarturl when using a reverse proxy). If this parameter is not set, the standard host URL is used. port: type: int @@ -945,6 +953,7 @@ item_structs: sqb_command: player.info.albumarturl sqb_read: true sqb_write: false + remark: This item gets automatically defined and overwritten based on (web_)host and web_port ALL: @@ -1740,5 +1749,6 @@ item_structs: sqb_command: player.info.albumarturl sqb_read: true sqb_write: false + remark: This item gets automatically defined and overwritten based on (web_)host and web_port plugin_functions: NONE logic_parameters: NONE From fa3d7e8db2e1aa190028265b7b51452983bbd655 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sun, 23 Jul 2023 22:10:35 +0200 Subject: [PATCH 198/775] Pioneer Plugin: fix and improve commands --- pioneer/commands.py | 24 +- pioneer/plugin.yaml | 1268 +++++++++++++++++++++++++++++++++++++++---- 2 files changed, 1177 insertions(+), 115 deletions(-) diff --git a/pioneer/commands.py b/pioneer/commands.py index b02ca258b..cdac9906d 100755 --- a/pioneer/commands.py +++ b/pioneer/commands.py @@ -3,11 +3,11 @@ # commands for dev pioneer models = { - 'ALL': ['general.pqls', 'general.setup.surroundposition', 'general.setup.speakersystem', 'general.setup.xcurve', 'general.setup.xover', 'general.setup.hdmi', 'general.setup.name', 'general.setup.language', 'general.dimmer', 'general.sleep', 'general.display', 'general.error', 'general.multizone', 'tuner', 'zone1', 'zone2.control', 'hdzone'], - 'SC-LX87': ['general.amp', 'general.setup.loudness', 'zone2.settings.sound.channel_level', 'zone2.settings.sound.tone_control', 'zone3'], - 'SC-LX77': ['general.amp', 'general.setup.loudness', 'zone2.settings.sound.channel_level', 'zone2.settings.sound.tone_control', 'zone3'], - 'SC-LX57': ['general.amp', 'general.setup.loudness', 'zone2.settings.sound.channel_level', 'zone2.settings.sound.tone_control', 'zone3'], - 'SC-2023': ['zone2.settings.sound.channel_level', 'zone2.settings.sound.tone_control', 'zone3'], + 'ALL': ['general.pqls', 'general.settings.speakersystem', 'general.settings.xcurve', 'general.settings.hdmi', 'general.settings.name', 'general.settings.language', 'general.dimmer', 'general.sleep', 'general.display', 'general.error', 'general.multizone', 'tuner', 'zone1', 'zone2.control', 'hdzone'], + 'SC-LX87': ['general.amp', 'general.settings.surroundposition', 'general.settings.xover', 'general.settings.loudness', 'zone2.settings.sound.channel_level', 'zone2.settings.sound.tone_control', 'zone3'], + 'SC-LX77': ['general.amp', 'general.settings.surroundposition', 'general.settings.xover', 'general.settings.loudness', 'zone2.settings.sound.channel_level', 'zone2.settings.sound.tone_control', 'zone3'], + 'SC-LX57': ['general.amp', 'general.settings.surroundposition', 'general.settings.xover', 'general.settings.loudness', 'zone2.settings.sound.channel_level', 'zone2.settings.sound.tone_control', 'zone3'], + 'SC-2023': ['general.settings.surroundposition', 'general.settings.xover', 'zone2.settings.sound.channel_level', 'zone2.settings.sound.tone_control', 'zone3'], 'SC-1223': ['zone2.settings.sound.channel_level', 'zone2.settings.sound.tone_control'], 'VSX-1123': [], 'VSX-923': [] @@ -23,20 +23,20 @@ 'amp': {'read': True, 'write': True, 'read_cmd': '?SAC', 'write_cmd': '{VALUE}SAC', 'item_type': 'str', 'dev_datatype': 'str', 'reply_pattern': r'SAC{LOOKUP}', 'lookup': 'AMP', 'item_attrs': {'attributes': {'remark': '0 = AMP, 1 = THR'}, 'lookup_item': True}}, 'multizone': {'read': False, 'write': True, 'write_cmd': 'ZZ', 'item_type': 'str', 'dev_datatype': 'str'}, 'settings': { - 'language': {'read': True, 'write': True, 'read_cmd': '?SSE', 'write_cmd': '{RAW_VALUE:02}SSE', 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'SSE{LOOKUP}', 'lookup': 'LANGUAGE', 'item_attrs': {'initial': True}}, + 'language': {'read': True, 'write': True, 'read_cmd': '?SSE', 'write_cmd': '{VALUE}SSE', 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'SSE{LOOKUP}', 'lookup': 'LANGUAGE', 'item_attrs': {'initial': True}}, 'name': {'read': True, 'write': True, 'read_cmd': '?SSO', 'write_cmd': '{VALUE}SSO', 'item_type': 'str', 'dev_datatype': 'PioName', 'reply_pattern': r'SSO(?:\d{2})(.*)', 'item_attrs': {'initial': True}}, - 'speakersystem': {'read': True, 'write': True, 'read_cmd': '?SSF', 'write_cmd': '{RAW_VALUE:02}SSF', 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'SSF{LOOKUP}', 'lookup': 'SPEAKERSYSTEM', 'item_attrs': {'initial': True}}, - 'surroundposition': {'read': True, 'write': True, 'read_cmd': '?SSP', 'write_cmd': '{RAW_VALUE:01}SSP', 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'SSP{LOOKUP}', 'lookup': 'SURROUNDPOSITION', 'item_attrs': {'initial': True}}, - 'xover': {'read': True, 'write': True, 'read_cmd': '?SSQ', 'write_cmd': '{RAW_VALUE:01}SSQ', 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'SSQ{LOOKUP}', 'lookup': 'XOVER', 'item_attrs': {'initial': True}}, - 'xcurve': {'read': True, 'write': True, 'read_cmd': '?SST', 'write_cmd': '{RAW_VALUE:01}SST', 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'SST{LOOKUP}', 'lookup': 'XCURVE', 'item_attrs': {'initial': True}}, + 'speakersystem': {'read': True, 'write': True, 'read_cmd': '?SSF', 'write_cmd': '{VALUE}SSF', 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'SSF{LOOKUP}', 'lookup': 'SPEAKERSYSTEM', 'item_attrs': {'lookup_item': True, 'initial': True}}, + 'surroundposition': {'read': True, 'write': True, 'read_cmd': '?SSP', 'write_cmd': '{VALUE}SSP', 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'SSP{LOOKUP}', 'lookup': 'SURROUNDPOSITION', 'item_attrs': {'lookup_item': True, 'initial': True}}, + 'xover': {'read': True, 'write': True, 'read_cmd': '?SSQ', 'write_cmd': '{VALUE}SSQ', 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'SSQ{LOOKUP}', 'lookup': 'XOVER', 'item_attrs': {'initial': True}}, + 'xcurve': {'read': True, 'write': True, 'read_cmd': '?SST', 'write_cmd': '{VALUE}SST', 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'SST{LOOKUP}', 'lookup': 'XCURVE', 'item_attrs': {'initial': True}}, 'loudness': {'read': True, 'write': True, 'read_cmd': '?SSU', 'write_cmd': '{RAW_VALUE:01}SSU', 'item_type': 'bool', 'dev_datatype': 'raw', 'reply_pattern': r'SSU(\d{1})', 'item_attrs': {'initial': True}}, 'initialvolume': {'read': True, 'write': True, 'read_cmd': '?SUC', 'write_cmd': '{VALUE}SUC', 'item_type': 'num', 'dev_datatype': 'PioInitVol', 'reply_pattern': r'SUC(\d{3})', 'item_attrs': {'initial': True}}, - 'mutelevel': {'read': True, 'write': True, 'read_cmd': '?SUE', 'write_cmd': '{RAW_VALUE:01}SUE', 'item_type': 'num', 'dev_datatype': 'raw', 'reply_pattern': r'SUE{LOOKUP}', 'lookup': 'MUTELEVEL', 'item_attrs': {'initial': True}}, + 'mutelevel': {'read': True, 'write': True, 'read_cmd': '?SUE', 'write_cmd': '{VALUE}SUE', 'item_type': 'num', 'dev_datatype': 'raw', 'reply_pattern': r'SUE{LOOKUP}', 'lookup': 'MUTELEVEL', 'item_attrs': {'initial': True}}, 'hdmi': { 'control': {'read': True, 'write': True, 'read_cmd': '?STQ', 'write_cmd': '{RAW_VALUE:01}STQ', 'item_type': 'bool', 'dev_datatype': 'raw', 'reply_pattern': r'STQ(\d{1})', 'item_attrs': {'initial': True}}, 'controlmode': {'read': True, 'write': True, 'read_cmd': '?STR', 'write_cmd': '{RAW_VALUE:01}STR', 'item_type': 'bool', 'dev_datatype': 'raw', 'reply_pattern': r'STR(\d{1})', 'item_attrs': {'initial': True}}, 'arc': {'read': True, 'write': True, 'read_cmd': '?STT', 'write_cmd': '{RAW_VALUE:01}STT', 'item_type': 'bool', 'dev_datatype': 'raw', 'reply_pattern': r'STT(\d{1})', 'item_attrs': {'initial': True}}, - 'standbythrough': {'read': True, 'write': True, 'read_cmd': '?STU', 'write_cmd': '{RAW_VALUE:02}STU', 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'STU{LOOKUP})', 'lookup': 'STANDBYTHROUGH', 'item_attrs': {'initial': True}} + 'standbythrough': {'read': True, 'write': True, 'read_cmd': '?STU', 'write_cmd': '{VALUE}STU', 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': r'STU{LOOKUP}', 'lookup': 'STANDBYTHROUGH', 'item_attrs': {'lookup_item': True, 'initial': True}} } } diff --git a/pioneer/plugin.yaml b/pioneer/plugin.yaml index 5ce0ecee0..bae6e15a9 100755 --- a/pioneer/plugin.yaml +++ b/pioneer/plugin.yaml @@ -18,9 +18,10 @@ parameters: standby_item_path: type: str default: '' + description: - de: 'Item-Pfad für das Standby-Item' - en: 'item path for standby switch item' + de: Item-Pfad für das Standby-Item + en: item path for standby switch item host: type: str @@ -266,7 +267,7 @@ item_structs: - general pqls: - type: str + type: bool pioneer_command: general.pqls pioneer_read: true pioneer_write: true @@ -345,6 +346,10 @@ item_structs: - general.settings pioneer_read_initial: true + lookup: + type: list + pioneer_lookup: SPEAKERSYSTEM#list + surroundposition: type: str pioneer_command: general.settings.surroundposition @@ -355,6 +360,10 @@ item_structs: - general.settings pioneer_read_initial: true + lookup: + type: list + pioneer_lookup: SURROUNDPOSITION#list + xover: type: str pioneer_command: general.settings.xover @@ -456,6 +465,10 @@ item_structs: - general.settings.hdmi pioneer_read_initial: true + lookup: + type: list + pioneer_lookup: STANDBYTHROUGH#list + tuner: read: @@ -1310,7 +1323,7 @@ item_structs: - ALL.general pqls: - type: str + type: bool pioneer_command: general.pqls pioneer_read: true pioneer_write: true @@ -1341,6 +1354,120 @@ item_structs: pioneer_read: false pioneer_write: true + settings: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: ALL.general.settings + + language: + type: str + pioneer_command: general.settings.language + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - ALL + - ALL.general + - ALL.general.settings + pioneer_read_initial: true + + name: + type: str + pioneer_command: general.settings.name + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - ALL + - ALL.general + - ALL.general.settings + pioneer_read_initial: true + + speakersystem: + type: str + pioneer_command: general.settings.speakersystem + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - ALL + - ALL.general + - ALL.general.settings + pioneer_read_initial: true + + lookup: + type: list + pioneer_lookup: SPEAKERSYSTEM#list + + xcurve: + type: str + pioneer_command: general.settings.xcurve + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - ALL + - ALL.general + - ALL.general.settings + pioneer_read_initial: true + + hdmi: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: ALL.general.settings.hdmi + + control: + type: bool + pioneer_command: general.settings.hdmi.control + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - ALL + - ALL.general + - ALL.general.settings + - ALL.general.settings.hdmi + pioneer_read_initial: true + + controlmode: + type: bool + pioneer_command: general.settings.hdmi.controlmode + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - ALL + - ALL.general + - ALL.general.settings + - ALL.general.settings.hdmi + pioneer_read_initial: true + + arc: + type: bool + pioneer_command: general.settings.hdmi.arc + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - ALL + - ALL.general + - ALL.general.settings + - ALL.general.settings.hdmi + pioneer_read_initial: true + + standbythrough: + type: str + pioneer_command: general.settings.hdmi.standbythrough + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - ALL + - ALL.general + - ALL.general.settings + - ALL.general.settings.hdmi + pioneer_read_initial: true + + lookup: + type: list + pioneer_lookup: STANDBYTHROUGH#list + tuner: read: @@ -1992,7 +2119,7 @@ item_structs: - SC-LX87.general pqls: - type: str + type: bool pioneer_command: general.pqls pioneer_read: true pioneer_write: true @@ -2037,6 +2164,157 @@ item_structs: pioneer_read: false pioneer_write: true + settings: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: SC-LX87.general.settings + + language: + type: str + pioneer_command: general.settings.language + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX87 + - SC-LX87.general + - SC-LX87.general.settings + pioneer_read_initial: true + + name: + type: str + pioneer_command: general.settings.name + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX87 + - SC-LX87.general + - SC-LX87.general.settings + pioneer_read_initial: true + + speakersystem: + type: str + pioneer_command: general.settings.speakersystem + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX87 + - SC-LX87.general + - SC-LX87.general.settings + pioneer_read_initial: true + + lookup: + type: list + pioneer_lookup: SPEAKERSYSTEM#list + + surroundposition: + type: str + pioneer_command: general.settings.surroundposition + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX87 + - SC-LX87.general + - SC-LX87.general.settings + pioneer_read_initial: true + + lookup: + type: list + pioneer_lookup: SURROUNDPOSITION#list + + xover: + type: str + pioneer_command: general.settings.xover + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX87 + - SC-LX87.general + - SC-LX87.general.settings + pioneer_read_initial: true + + xcurve: + type: str + pioneer_command: general.settings.xcurve + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX87 + - SC-LX87.general + - SC-LX87.general.settings + pioneer_read_initial: true + + loudness: + type: bool + pioneer_command: general.settings.loudness + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX87 + - SC-LX87.general + - SC-LX87.general.settings + pioneer_read_initial: true + + hdmi: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: SC-LX87.general.settings.hdmi + + control: + type: bool + pioneer_command: general.settings.hdmi.control + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX87 + - SC-LX87.general + - SC-LX87.general.settings + - SC-LX87.general.settings.hdmi + pioneer_read_initial: true + + controlmode: + type: bool + pioneer_command: general.settings.hdmi.controlmode + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX87 + - SC-LX87.general + - SC-LX87.general.settings + - SC-LX87.general.settings.hdmi + pioneer_read_initial: true + + arc: + type: bool + pioneer_command: general.settings.hdmi.arc + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX87 + - SC-LX87.general + - SC-LX87.general.settings + - SC-LX87.general.settings.hdmi + pioneer_read_initial: true + + standbythrough: + type: str + pioneer_command: general.settings.hdmi.standbythrough + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX87 + - SC-LX87.general + - SC-LX87.general.settings + - SC-LX87.general.settings.hdmi + pioneer_read_initial: true + + lookup: + type: list + pioneer_lookup: STANDBYTHROUGH#list + tuner: read: @@ -2924,7 +3202,7 @@ item_structs: - SC-LX77.general pqls: - type: str + type: bool pioneer_command: general.pqls pioneer_read: true pioneer_write: true @@ -2969,94 +3247,245 @@ item_structs: pioneer_read: false pioneer_write: true - tuner: - - read: - type: bool - enforce_updates: true - pioneer_read_group_trigger: SC-LX77.tuner - - tunerpreset: - type: num - pioneer_command: tuner.tunerpreset - pioneer_read: true - pioneer_write: true - pioneer_read_group: - - SC-LX77 - - SC-LX77.tuner - - tunerpresetup: - type: bool - pioneer_command: tuner.tunerpresetup - pioneer_read: false - pioneer_write: true - - tunerpresetdown: - type: bool - pioneer_command: tuner.tunerpresetdown - pioneer_read: false - pioneer_write: true - - title: - type: str - pioneer_command: tuner.title - pioneer_read: true - pioneer_write: false - - genre: - type: str - pioneer_command: tuner.genre - pioneer_read: true - pioneer_write: false - - station: - type: str - pioneer_command: tuner.station - pioneer_read: true - pioneer_write: false - - zone1: - - read: - type: bool - enforce_updates: true - pioneer_read_group_trigger: SC-LX77.zone1 - - control: + settings: read: type: bool enforce_updates: true - pioneer_read_group_trigger: SC-LX77.zone1.control + pioneer_read_group_trigger: SC-LX77.general.settings - power: - type: bool - pioneer_command: zone1.control.power + language: + type: str + pioneer_command: general.settings.language pioneer_read: true pioneer_write: true pioneer_read_group: - SC-LX77 - - SC-LX77.zone1 - - SC-LX77.zone1.control + - SC-LX77.general + - SC-LX77.general.settings pioneer_read_initial: true - on_change: sh....read.timer(sh..readdelay(), True) if value else None - - readdelay: - type: num - initial_value: 1 - remark: After turning on a zone, the most likely needs some time to react to read commands. If not, set this value to 0 - mute: - type: bool - pioneer_command: zone1.control.mute + name: + type: str + pioneer_command: general.settings.name pioneer_read: true pioneer_write: true pioneer_read_group: - SC-LX77 - - SC-LX77.zone1 - - SC-LX77.zone1.control + - SC-LX77.general + - SC-LX77.general.settings + pioneer_read_initial: true - volume: + speakersystem: + type: str + pioneer_command: general.settings.speakersystem + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX77 + - SC-LX77.general + - SC-LX77.general.settings + pioneer_read_initial: true + + lookup: + type: list + pioneer_lookup: SPEAKERSYSTEM#list + + surroundposition: + type: str + pioneer_command: general.settings.surroundposition + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX77 + - SC-LX77.general + - SC-LX77.general.settings + pioneer_read_initial: true + + lookup: + type: list + pioneer_lookup: SURROUNDPOSITION#list + + xover: + type: str + pioneer_command: general.settings.xover + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX77 + - SC-LX77.general + - SC-LX77.general.settings + pioneer_read_initial: true + + xcurve: + type: str + pioneer_command: general.settings.xcurve + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX77 + - SC-LX77.general + - SC-LX77.general.settings + pioneer_read_initial: true + + loudness: + type: bool + pioneer_command: general.settings.loudness + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX77 + - SC-LX77.general + - SC-LX77.general.settings + pioneer_read_initial: true + + hdmi: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: SC-LX77.general.settings.hdmi + + control: + type: bool + pioneer_command: general.settings.hdmi.control + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX77 + - SC-LX77.general + - SC-LX77.general.settings + - SC-LX77.general.settings.hdmi + pioneer_read_initial: true + + controlmode: + type: bool + pioneer_command: general.settings.hdmi.controlmode + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX77 + - SC-LX77.general + - SC-LX77.general.settings + - SC-LX77.general.settings.hdmi + pioneer_read_initial: true + + arc: + type: bool + pioneer_command: general.settings.hdmi.arc + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX77 + - SC-LX77.general + - SC-LX77.general.settings + - SC-LX77.general.settings.hdmi + pioneer_read_initial: true + + standbythrough: + type: str + pioneer_command: general.settings.hdmi.standbythrough + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX77 + - SC-LX77.general + - SC-LX77.general.settings + - SC-LX77.general.settings.hdmi + pioneer_read_initial: true + + lookup: + type: list + pioneer_lookup: STANDBYTHROUGH#list + + tuner: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: SC-LX77.tuner + + tunerpreset: + type: num + pioneer_command: tuner.tunerpreset + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX77 + - SC-LX77.tuner + + tunerpresetup: + type: bool + pioneer_command: tuner.tunerpresetup + pioneer_read: false + pioneer_write: true + + tunerpresetdown: + type: bool + pioneer_command: tuner.tunerpresetdown + pioneer_read: false + pioneer_write: true + + title: + type: str + pioneer_command: tuner.title + pioneer_read: true + pioneer_write: false + + genre: + type: str + pioneer_command: tuner.genre + pioneer_read: true + pioneer_write: false + + station: + type: str + pioneer_command: tuner.station + pioneer_read: true + pioneer_write: false + + zone1: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: SC-LX77.zone1 + + control: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: SC-LX77.zone1.control + + power: + type: bool + pioneer_command: zone1.control.power + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX77 + - SC-LX77.zone1 + - SC-LX77.zone1.control + pioneer_read_initial: true + on_change: sh....read.timer(sh..readdelay(), True) if value else None + + readdelay: + type: num + initial_value: 1 + remark: After turning on a zone, the most likely needs some time to react to read commands. If not, set this value to 0 + + mute: + type: bool + pioneer_command: zone1.control.mute + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX77 + - SC-LX77.zone1 + - SC-LX77.zone1.control + + volume: type: num pioneer_command: zone1.control.volume pioneer_read: true @@ -3856,7 +4285,7 @@ item_structs: - SC-LX57.general pqls: - type: str + type: bool pioneer_command: general.pqls pioneer_read: true pioneer_write: true @@ -3901,6 +4330,157 @@ item_structs: pioneer_read: false pioneer_write: true + settings: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: SC-LX57.general.settings + + language: + type: str + pioneer_command: general.settings.language + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX57 + - SC-LX57.general + - SC-LX57.general.settings + pioneer_read_initial: true + + name: + type: str + pioneer_command: general.settings.name + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX57 + - SC-LX57.general + - SC-LX57.general.settings + pioneer_read_initial: true + + speakersystem: + type: str + pioneer_command: general.settings.speakersystem + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX57 + - SC-LX57.general + - SC-LX57.general.settings + pioneer_read_initial: true + + lookup: + type: list + pioneer_lookup: SPEAKERSYSTEM#list + + surroundposition: + type: str + pioneer_command: general.settings.surroundposition + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX57 + - SC-LX57.general + - SC-LX57.general.settings + pioneer_read_initial: true + + lookup: + type: list + pioneer_lookup: SURROUNDPOSITION#list + + xover: + type: str + pioneer_command: general.settings.xover + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX57 + - SC-LX57.general + - SC-LX57.general.settings + pioneer_read_initial: true + + xcurve: + type: str + pioneer_command: general.settings.xcurve + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX57 + - SC-LX57.general + - SC-LX57.general.settings + pioneer_read_initial: true + + loudness: + type: bool + pioneer_command: general.settings.loudness + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX57 + - SC-LX57.general + - SC-LX57.general.settings + pioneer_read_initial: true + + hdmi: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: SC-LX57.general.settings.hdmi + + control: + type: bool + pioneer_command: general.settings.hdmi.control + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX57 + - SC-LX57.general + - SC-LX57.general.settings + - SC-LX57.general.settings.hdmi + pioneer_read_initial: true + + controlmode: + type: bool + pioneer_command: general.settings.hdmi.controlmode + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX57 + - SC-LX57.general + - SC-LX57.general.settings + - SC-LX57.general.settings.hdmi + pioneer_read_initial: true + + arc: + type: bool + pioneer_command: general.settings.hdmi.arc + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX57 + - SC-LX57.general + - SC-LX57.general.settings + - SC-LX57.general.settings.hdmi + pioneer_read_initial: true + + standbythrough: + type: str + pioneer_command: general.settings.hdmi.standbythrough + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-LX57 + - SC-LX57.general + - SC-LX57.general.settings + - SC-LX57.general.settings.hdmi + pioneer_read_initial: true + + lookup: + type: list + pioneer_lookup: STANDBYTHROUGH#list + tuner: read: @@ -4788,7 +5368,7 @@ item_structs: - SC-2023.general pqls: - type: str + type: bool pioneer_command: general.pqls pioneer_read: true pioneer_write: true @@ -4796,28 +5376,168 @@ item_structs: - SC-2023 - SC-2023.general - dimmer: - type: num - pioneer_command: general.dimmer - pioneer_read: true - pioneer_write: true - remark: 0 = very bright, 1 = bright, 2 = dark, 3 = off + dimmer: + type: num + pioneer_command: general.dimmer + pioneer_read: true + pioneer_write: true + remark: 0 = very bright, 1 = bright, 2 = dark, 3 = off + + sleep: + type: num + pioneer_command: general.sleep + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-2023 + - SC-2023.general + remark: 0 = off, 30 = 30 minutes, 60 = 60 minutes, 90 = 90 minutes + + multizone: + type: str + pioneer_command: general.multizone + pioneer_read: false + pioneer_write: true + + settings: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: SC-2023.general.settings + + language: + type: str + pioneer_command: general.settings.language + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-2023 + - SC-2023.general + - SC-2023.general.settings + pioneer_read_initial: true + + name: + type: str + pioneer_command: general.settings.name + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-2023 + - SC-2023.general + - SC-2023.general.settings + pioneer_read_initial: true + + speakersystem: + type: str + pioneer_command: general.settings.speakersystem + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-2023 + - SC-2023.general + - SC-2023.general.settings + pioneer_read_initial: true + + lookup: + type: list + pioneer_lookup: SPEAKERSYSTEM#list + + surroundposition: + type: str + pioneer_command: general.settings.surroundposition + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-2023 + - SC-2023.general + - SC-2023.general.settings + pioneer_read_initial: true + + lookup: + type: list + pioneer_lookup: SURROUNDPOSITION#list + + xover: + type: str + pioneer_command: general.settings.xover + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-2023 + - SC-2023.general + - SC-2023.general.settings + pioneer_read_initial: true + + xcurve: + type: str + pioneer_command: general.settings.xcurve + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-2023 + - SC-2023.general + - SC-2023.general.settings + pioneer_read_initial: true + + hdmi: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: SC-2023.general.settings.hdmi + + control: + type: bool + pioneer_command: general.settings.hdmi.control + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-2023 + - SC-2023.general + - SC-2023.general.settings + - SC-2023.general.settings.hdmi + pioneer_read_initial: true + + controlmode: + type: bool + pioneer_command: general.settings.hdmi.controlmode + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-2023 + - SC-2023.general + - SC-2023.general.settings + - SC-2023.general.settings.hdmi + pioneer_read_initial: true - sleep: - type: num - pioneer_command: general.sleep - pioneer_read: true - pioneer_write: true - pioneer_read_group: - - SC-2023 - - SC-2023.general - remark: 0 = off, 30 = 30 minutes, 60 = 60 minutes, 90 = 90 minutes + arc: + type: bool + pioneer_command: general.settings.hdmi.arc + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-2023 + - SC-2023.general + - SC-2023.general.settings + - SC-2023.general.settings.hdmi + pioneer_read_initial: true - multizone: - type: str - pioneer_command: general.multizone - pioneer_read: false - pioneer_write: true + standbythrough: + type: str + pioneer_command: general.settings.hdmi.standbythrough + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-2023 + - SC-2023.general + - SC-2023.general.settings + - SC-2023.general.settings.hdmi + pioneer_read_initial: true + + lookup: + type: list + pioneer_lookup: STANDBYTHROUGH#list tuner: @@ -5706,7 +6426,7 @@ item_structs: - SC-1223.general pqls: - type: str + type: bool pioneer_command: general.pqls pioneer_read: true pioneer_write: true @@ -5737,6 +6457,120 @@ item_structs: pioneer_read: false pioneer_write: true + settings: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: SC-1223.general.settings + + language: + type: str + pioneer_command: general.settings.language + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-1223 + - SC-1223.general + - SC-1223.general.settings + pioneer_read_initial: true + + name: + type: str + pioneer_command: general.settings.name + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-1223 + - SC-1223.general + - SC-1223.general.settings + pioneer_read_initial: true + + speakersystem: + type: str + pioneer_command: general.settings.speakersystem + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-1223 + - SC-1223.general + - SC-1223.general.settings + pioneer_read_initial: true + + lookup: + type: list + pioneer_lookup: SPEAKERSYSTEM#list + + xcurve: + type: str + pioneer_command: general.settings.xcurve + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-1223 + - SC-1223.general + - SC-1223.general.settings + pioneer_read_initial: true + + hdmi: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: SC-1223.general.settings.hdmi + + control: + type: bool + pioneer_command: general.settings.hdmi.control + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-1223 + - SC-1223.general + - SC-1223.general.settings + - SC-1223.general.settings.hdmi + pioneer_read_initial: true + + controlmode: + type: bool + pioneer_command: general.settings.hdmi.controlmode + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-1223 + - SC-1223.general + - SC-1223.general.settings + - SC-1223.general.settings.hdmi + pioneer_read_initial: true + + arc: + type: bool + pioneer_command: general.settings.hdmi.arc + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-1223 + - SC-1223.general + - SC-1223.general.settings + - SC-1223.general.settings.hdmi + pioneer_read_initial: true + + standbythrough: + type: str + pioneer_command: general.settings.hdmi.standbythrough + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - SC-1223 + - SC-1223.general + - SC-1223.general.settings + - SC-1223.general.settings.hdmi + pioneer_read_initial: true + + lookup: + type: list + pioneer_lookup: STANDBYTHROUGH#list + tuner: read: @@ -6478,7 +7312,7 @@ item_structs: - VSX-1123.general pqls: - type: str + type: bool pioneer_command: general.pqls pioneer_read: true pioneer_write: true @@ -6509,6 +7343,120 @@ item_structs: pioneer_read: false pioneer_write: true + settings: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: VSX-1123.general.settings + + language: + type: str + pioneer_command: general.settings.language + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - VSX-1123 + - VSX-1123.general + - VSX-1123.general.settings + pioneer_read_initial: true + + name: + type: str + pioneer_command: general.settings.name + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - VSX-1123 + - VSX-1123.general + - VSX-1123.general.settings + pioneer_read_initial: true + + speakersystem: + type: str + pioneer_command: general.settings.speakersystem + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - VSX-1123 + - VSX-1123.general + - VSX-1123.general.settings + pioneer_read_initial: true + + lookup: + type: list + pioneer_lookup: SPEAKERSYSTEM#list + + xcurve: + type: str + pioneer_command: general.settings.xcurve + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - VSX-1123 + - VSX-1123.general + - VSX-1123.general.settings + pioneer_read_initial: true + + hdmi: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: VSX-1123.general.settings.hdmi + + control: + type: bool + pioneer_command: general.settings.hdmi.control + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - VSX-1123 + - VSX-1123.general + - VSX-1123.general.settings + - VSX-1123.general.settings.hdmi + pioneer_read_initial: true + + controlmode: + type: bool + pioneer_command: general.settings.hdmi.controlmode + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - VSX-1123 + - VSX-1123.general + - VSX-1123.general.settings + - VSX-1123.general.settings.hdmi + pioneer_read_initial: true + + arc: + type: bool + pioneer_command: general.settings.hdmi.arc + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - VSX-1123 + - VSX-1123.general + - VSX-1123.general.settings + - VSX-1123.general.settings.hdmi + pioneer_read_initial: true + + standbythrough: + type: str + pioneer_command: general.settings.hdmi.standbythrough + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - VSX-1123 + - VSX-1123.general + - VSX-1123.general.settings + - VSX-1123.general.settings.hdmi + pioneer_read_initial: true + + lookup: + type: list + pioneer_lookup: STANDBYTHROUGH#list + tuner: read: @@ -7160,7 +8108,7 @@ item_structs: - VSX-923.general pqls: - type: str + type: bool pioneer_command: general.pqls pioneer_read: true pioneer_write: true @@ -7191,6 +8139,120 @@ item_structs: pioneer_read: false pioneer_write: true + settings: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: VSX-923.general.settings + + language: + type: str + pioneer_command: general.settings.language + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - VSX-923 + - VSX-923.general + - VSX-923.general.settings + pioneer_read_initial: true + + name: + type: str + pioneer_command: general.settings.name + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - VSX-923 + - VSX-923.general + - VSX-923.general.settings + pioneer_read_initial: true + + speakersystem: + type: str + pioneer_command: general.settings.speakersystem + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - VSX-923 + - VSX-923.general + - VSX-923.general.settings + pioneer_read_initial: true + + lookup: + type: list + pioneer_lookup: SPEAKERSYSTEM#list + + xcurve: + type: str + pioneer_command: general.settings.xcurve + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - VSX-923 + - VSX-923.general + - VSX-923.general.settings + pioneer_read_initial: true + + hdmi: + + read: + type: bool + enforce_updates: true + pioneer_read_group_trigger: VSX-923.general.settings.hdmi + + control: + type: bool + pioneer_command: general.settings.hdmi.control + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - VSX-923 + - VSX-923.general + - VSX-923.general.settings + - VSX-923.general.settings.hdmi + pioneer_read_initial: true + + controlmode: + type: bool + pioneer_command: general.settings.hdmi.controlmode + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - VSX-923 + - VSX-923.general + - VSX-923.general.settings + - VSX-923.general.settings.hdmi + pioneer_read_initial: true + + arc: + type: bool + pioneer_command: general.settings.hdmi.arc + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - VSX-923 + - VSX-923.general + - VSX-923.general.settings + - VSX-923.general.settings.hdmi + pioneer_read_initial: true + + standbythrough: + type: str + pioneer_command: general.settings.hdmi.standbythrough + pioneer_read: true + pioneer_write: true + pioneer_read_group: + - VSX-923 + - VSX-923.general + - VSX-923.general.settings + - VSX-923.general.settings.hdmi + pioneer_read_initial: true + + lookup: + type: list + pioneer_lookup: STANDBYTHROUGH#list + tuner: read: From 46a95e925ebfc9175222b9602194e9f9eb4c52bd Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sun, 23 Jul 2023 22:10:53 +0200 Subject: [PATCH 199/775] Pioneer Plugin: Query some settings when powered on --- pioneer/__init__.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/pioneer/__init__.py b/pioneer/__init__.py index b24003f9d..8d8472b27 100755 --- a/pioneer/__init__.py +++ b/pioneer/__init__.py @@ -71,5 +71,18 @@ def _transform_send_data(self, data=None, **kwargs): data['payload'] = f'{data.get("payload", "")}{data["limit_response"].decode("unicode-escape")}' return data + def _process_additional_data(self, command, data, value, custom, by): + + if command == 'zone1.control.power' and value: + self.logger.debug(f"Zone 1 is turned on. Requesting settings.") + self.send_command('general.settings.language') + self.send_command('general.settings.speakersystem') + self.send_command('general.settings.xcurve') + self.send_command('general.settings.hdmi.control') + self.send_command('general.settings.hdmi.controlmode') + self.send_command('general.settings.hdmi.arc') + self.send_command('general.settings.hdmi.standbythrough') + + if __name__ == '__main__': s = Standalone(pioneer, sys.argv[0]) From 1a7972e5f63109ed32888ac09ea998e5f40b2fa7 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Sun, 23 Jul 2023 22:30:50 +0200 Subject: [PATCH 200/775] LMS Plugin: bump version to 1.5.2 --- lms/__init__.py | 2 +- lms/plugin.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lms/__init__.py b/lms/__init__.py index 8dff5d715..43e14ca5b 100755 --- a/lms/__init__.py +++ b/lms/__init__.py @@ -49,7 +49,7 @@ class SmartPluginWebIf(): class lms(SmartDevicePlugin): """ Device class for Logitech Mediaserver/Squeezebox function. """ - PLUGIN_VERSION = '1.5.1' + PLUGIN_VERSION = '1.5.2' def _set_device_defaults(self): self.custom_commands = 1 diff --git a/lms/plugin.yaml b/lms/plugin.yaml index fc1427317..81a482357 100755 --- a/lms/plugin.yaml +++ b/lms/plugin.yaml @@ -6,7 +6,7 @@ plugin: tester: Morg state: develop keywords: iot device logitechmediaserver lms sdp av - version: 1.5.1 + version: 1.5.2 sh_minversion: 1.9.5 py_minversion: 3.7 multi_instance: false From f25f2505912b738468e34c39387daa3ab5fae70d Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Mon, 24 Jul 2023 00:06:32 +0200 Subject: [PATCH 201/775] AVDevice Plugin: Fix dependency function for int/float values --- avdevice/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/avdevice/__init__.py b/avdevice/__init__.py index 0b7d30c99..090fcd4fb 100755 --- a/avdevice/__init__.py +++ b/avdevice/__init__.py @@ -1201,7 +1201,7 @@ def _checkdependency(self, dep_function, dep_type): expectedvalue = eval(expectedvalue.lstrip('0')) except Exception: pass - if type(dependvalue) == type(expectedvalue): + if type(dependvalue) == type(expectedvalue) or (isinstance(dependvalue, (int,float)) and isinstance(expectedvalue, (int,float))): groupcount[group] += 1 if (dependvalue == expectedvalue and compare == '==') or \ (dependvalue >= expectedvalue and compare == '>=') or \ (dependvalue <= expectedvalue and compare == '<=') or \ From b75576b296ab99faace180c0072c1e7cc4b9daec Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Mon, 24 Jul 2023 08:09:35 +0200 Subject: [PATCH 202/775] avdevice plugin: fix dependency check part 2 --- avdevice/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/avdevice/__init__.py b/avdevice/__init__.py index 090fcd4fb..e178f5872 100755 --- a/avdevice/__init__.py +++ b/avdevice/__init__.py @@ -1256,7 +1256,7 @@ def _checkdependency(self, dep_function, dep_type): self.logger.log(VERBOSE2, "Checking Dependency {}: Expectedvalue after Translation {}. Dependitem: {}, expected {}".format( self._name, expectedvalue, dependitem, expectedvalue)) - if type(dependvalue) == type(expectedvalue): + if type(dependvalue) == type(expectedvalue) or (isinstance(dependvalue, (int,float)) and isinstance(expectedvalue, (int,float))): groupcount[group] += 1 if (dependvalue == expectedvalue and compare == '==') or \ (dependvalue >= expectedvalue and compare == '>=') or \ (dependvalue <= expectedvalue and compare == '<=') or \ From d6857e531357f5a343584d795c95bc42d7f333b6 Mon Sep 17 00:00:00 2001 From: Morg42 <43153739+Morg42@users.noreply.github.com> Date: Mon, 24 Jul 2023 10:19:30 +0200 Subject: [PATCH 203/775] only check dir if options are set --- smartvisu/__init__.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/smartvisu/__init__.py b/smartvisu/__init__.py index df8cf2a63..e83c4775e 100755 --- a/smartvisu/__init__.py +++ b/smartvisu/__init__.py @@ -46,7 +46,7 @@ class SmartVisu(SmartPlugin): - PLUGIN_VERSION="1.8.11" + PLUGIN_VERSION="1.8.12" ALLOW_MULTIINSTANCE = True visu_definition = None @@ -125,7 +125,8 @@ def __init__(self, sh): def run(self): self.alive = True - if self.smartvisu_dir != '': + # skip directory handling if generate pages and handle_widgets are disabled + if self.smartvisu_dir != '' and (self._generate_pages or self._handle_widgets): if not os.path.isdir(os.path.join(self.smartvisu_dir, 'pages')): self.logger.error("Could not find valid smartVISU directory: {}".format(self.smartvisu_dir)) else: From 6368b11ebf82d455c978afaa4aa140c647538618 Mon Sep 17 00:00:00 2001 From: Morg42 <43153739+Morg42@users.noreply.github.com> Date: Mon, 24 Jul 2023 12:21:42 +0200 Subject: [PATCH 204/775] added standby mode, do_before_send --- kodi/__init__.py | 47 ++++++++++++++++++----------------------------- kodi/plugin.yaml | 10 +++++++++- 2 files changed, 27 insertions(+), 30 deletions(-) diff --git a/kodi/__init__.py b/kodi/__init__.py index b3f6ba87d..533ad064a 100644 --- a/kodi/__init__.py +++ b/kodi/__init__.py @@ -81,7 +81,7 @@ class kodi(SmartDevicePlugin): another place, in ``commands.py`` and/or the item configuration. """ - PLUGIN_VERSION = '1.7.0' + PLUGIN_VERSION = '1.7.1' def _set_device_defaults(self): self._use_callbacks = True @@ -111,6 +111,10 @@ def on_data_received(self, by, data, command=None): :param data: received data in 'raw' connection format :type command: str """ + if self.standby: + self.logger.debug(f'received data for command {command} on standby, discarding data.') + return + if command is not None: self.logger.debug(f'received data "{data}" for command {command}') else: @@ -321,45 +325,26 @@ def on_data_received(self, by, data, command=None): self.logger.debug(f'received data "{data}" for command {command} converted to value {value}') self._dispatch_callback(command, value, by) - def send_command(self, command, value=None, **kwargs): + def _do_before_send(self, command, value, kwargs): """ - Checks for special commands and handles them, otherwise call the - base class' method - - :param command: the command to send - :param value: the data to send, if applicable - :type command: str - :return: True if send was successful, False otherwise - :rtype: bool + Checks for special commands and handles them """ - if not self.alive: - self.logger.warning(f'trying to send command {command} with value {value}, but device is not active.') - return False - - if not self._connection: - self.logger.warning(f'trying to send command {command} with value {value}, but connection is None. This shouldn\'t happen...') - return False - - if not self._connection.connected: - self._connection.open() - if not self._connection.connected: - self.logger.warning(f'trying to send command {command} with value {value}, but connection could not be established.') - return False - if command in self._special_commands['read' if value is None else 'write']: if command == 'status.update': if value: self._update_status() - return True + return (False, True) elif value is None: self.logger.debug(f'Special command {command} called for reading, which is not intended. Ignoring request') - return True + return (False, True) else: # this shouldn't happen self.logger.warning(f'Special command {command} found, no action set for processing. Please inform developers. Ignoring request') - return True - else: - return super().send_command(command, value, playerid=self._playerid, **kwargs) + return (False, True) + + # add playerid to kwargs for further processing + kwargs['playerid'] = self._playerid + return (True, True) def is_valid_command(self, command, read=None): """ @@ -395,6 +380,10 @@ def notify(self, title, message, image=None, display_time=10000): :param image: an optional image to be displayed alongside the message :param display_time: how long the message is displayed in milli seconds """ + if self.standby: + self.logger.info(f'trying to send notification {title}, but plugin is in standby mode. Discarding notification.') + return + params = {'title': title, 'message': message, 'displaytime': display_time} if image is not None: params['image'] = image diff --git a/kodi/plugin.yaml b/kodi/plugin.yaml index 14f09ac1e..8fa4da0eb 100644 --- a/kodi/plugin.yaml +++ b/kodi/plugin.yaml @@ -6,7 +6,7 @@ plugin: tester: OnkelAndy state: develop keywords: iot device mediacenter kodi xmbc sdp - version: 1.7.0 + version: 1.7.1 sh_minversion: 1.9.5 py_minversion: 3.7 multi_instance: false @@ -23,6 +23,14 @@ parameters: de: Netzwerkziel/-host en: network host + standby_item_path: + type: str + default: '' + + description: + de: 'Item-Pfad für das Standby-Item' + en: 'item path for standby switch item' + timeout: type: num default: 3 From 9fc9ac8c693e5c37a8f8c2ff7d7222ff8afcd839 Mon Sep 17 00:00:00 2001 From: Morg42 <43153739+Morg42@users.noreply.github.com> Date: Mon, 24 Jul 2023 16:31:32 +0200 Subject: [PATCH 205/775] adjust version number --- smartvisu/plugin.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/smartvisu/plugin.yaml b/smartvisu/plugin.yaml index 0238cdeb5..d8cf4cb29 100755 --- a/smartvisu/plugin.yaml +++ b/smartvisu/plugin.yaml @@ -12,7 +12,7 @@ plugin: #documentation: '' support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1586800-support-thread-für-das-smartvisu-plugin - version: 1.8.11 # Plugin version + version: 1.8.12 # Plugin version sh_minversion: 1.9.3.5 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) py_minversion: 3.6 # minimum Python version to use for this plugin From 0bfad2e6518a8d522480ef95b1e5077dfb027a32 Mon Sep 17 00:00:00 2001 From: Morg42 <43153739+Morg42@users.noreply.github.com> Date: Wed, 26 Jul 2023 17:40:32 +0200 Subject: [PATCH 206/775] update to sdp standby --- kodi/__init__.py | 2 +- kodi/plugin.yaml | 18 +++++++++++++++++- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/kodi/__init__.py b/kodi/__init__.py index 533ad064a..e0873c373 100644 --- a/kodi/__init__.py +++ b/kodi/__init__.py @@ -81,7 +81,7 @@ class kodi(SmartDevicePlugin): another place, in ``commands.py`` and/or the item configuration. """ - PLUGIN_VERSION = '1.7.1' + PLUGIN_VERSION = '1.7.2' def _set_device_defaults(self): self._use_callbacks = True diff --git a/kodi/plugin.yaml b/kodi/plugin.yaml index 8fa4da0eb..a2b809c1a 100644 --- a/kodi/plugin.yaml +++ b/kodi/plugin.yaml @@ -6,7 +6,7 @@ plugin: tester: OnkelAndy state: develop keywords: iot device mediacenter kodi xmbc sdp - version: 1.7.1 + version: 1.7.2 sh_minversion: 1.9.5 py_minversion: 3.7 multi_instance: false @@ -71,6 +71,22 @@ parameters: de: Pause zwischen Verbindungsversuchen en: wait time between connect retries + retry_cycle: + type: num + default: 30 + + description: + de: 'Pause zwischen Durchgängen von Verbindungsversuchen' + en: 'wait time between connect retry rounds' + + retry_standby: + type: num + default: 0 + + description: + de: 'Anzahl von Durchgängen vor Verbindungsabbruch oder Standby-Modus' + en: 'number of connect rounds before giving up / entering standby mode' + message_timeout: type: num default: 5 From 80a8358775f20114796bdd23e1f5ca10f7c36afe Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 27 Jul 2023 09:27:39 +0200 Subject: [PATCH 207/775] Pioneer Plugin: improve settings read on power on --- pioneer/__init__.py | 30 +++++++++++++++++++----------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/pioneer/__init__.py b/pioneer/__init__.py index 8d8472b27..ac9d4aaac 100755 --- a/pioneer/__init__.py +++ b/pioneer/__init__.py @@ -24,6 +24,7 @@ import builtins import os import sys +import time if __name__ == '__main__': @@ -38,7 +39,7 @@ class SmartPluginWebIf(): from lib.model.sdp.globals import (PLUGIN_ATTR_NET_HOST, PLUGIN_ATTR_CONNECTION, PLUGIN_ATTR_SERIAL_PORT, PLUGIN_ATTR_CONN_TERMINATOR, - CONN_NET_TCP_CLI, CONN_SER_ASYNC, CONN_NULL) + PLUGIN_ATTR_MODEL, CONN_NET_TCP_CLI, CONN_SER_ASYNC, CONN_NULL) from lib.model.smartdeviceplugin import SmartDevicePlugin, Standalone # from .webif import WebInterface @@ -72,16 +73,23 @@ def _transform_send_data(self, data=None, **kwargs): return data def _process_additional_data(self, command, data, value, custom, by): - - if command == 'zone1.control.power' and value: - self.logger.debug(f"Zone 1 is turned on. Requesting settings.") - self.send_command('general.settings.language') - self.send_command('general.settings.speakersystem') - self.send_command('general.settings.xcurve') - self.send_command('general.settings.hdmi.control') - self.send_command('general.settings.hdmi.controlmode') - self.send_command('general.settings.hdmi.arc') - self.send_command('general.settings.hdmi.standbythrough') + cond1 = command == 'zone1.control.power' or command == 'zone2.control.power' or command == 'zone3.control.power' + if cond1 and value: + self.logger.debug(f"Device is turned on by command {command}. Requesting settings.") + time.sleep(1) + if self._parameters[PLUGIN_ATTR_MODEL] == '': + self.read_all_commands('ALL.general.settings') + else: + self.read_all_commands(f'{self._parameters[PLUGIN_ATTR_MODEL]}.general.settings') + #self.send_command('general.settings.language') + #self.send_command('general.settings.speakersystem') + #self.send_command('general.settings.surroundposition') + #self.send_command('general.settings.xover') + #self.send_command('general.settings.xcurve') + #self.send_command('general.settings.hdmi.control') + #self.send_command('general.settings.hdmi.controlmode') + #self.send_command('general.settings.hdmi.arc') + #self.send_command('general.settings.hdmi.standbythrough') if __name__ == '__main__': From 2bfaf5b1594b4cfff742e7814f5853b867291836 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Thu, 27 Jul 2023 12:46:35 +0200 Subject: [PATCH 208/775] DB_ADDON: - Introduce plugin parameter to lock database before reading - allow excluding items from calculation via WebIF - allow to re-calculate items per WebIF - allow plugin item cache to be cleared for certain item per WebIF - just use shtime for time and date calculation - make sure, that db query always fetches latest entries - internal improvements --- db_addon/__init__.py | 953 ++++++++++++---------- db_addon/item_attributes_master.py | 299 ------- db_addon/locale.yaml | 0 db_addon/plugin.yaml | 17 +- db_addon/requirements.txt | 0 db_addon/user_doc.rst | 0 db_addon/webif/__init__.py | 102 ++- db_addon/webif/static/img/plugin_logo.png | Bin db_addon/webif/templates/index.html | 111 ++- 9 files changed, 733 insertions(+), 749 deletions(-) mode change 100755 => 100644 db_addon/__init__.py delete mode 100755 db_addon/item_attributes_master.py mode change 100755 => 100644 db_addon/locale.yaml mode change 100755 => 100644 db_addon/plugin.yaml mode change 100755 => 100644 db_addon/requirements.txt mode change 100755 => 100644 db_addon/user_doc.rst mode change 100755 => 100644 db_addon/webif/__init__.py mode change 100755 => 100644 db_addon/webif/static/img/plugin_logo.png mode change 100755 => 100644 db_addon/webif/templates/index.html diff --git a/db_addon/__init__.py b/db_addon/__init__.py old mode 100755 new mode 100644 index f20b78023..02c521b7a --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -30,8 +30,11 @@ import time import re import queue +import threading +import logging from dateutil.relativedelta import relativedelta from typing import Union +from dataclasses import dataclass, InitVar from lib.model.smartplugin import SmartPlugin from lib.item import Items @@ -53,7 +56,8 @@ class DatabaseAddOn(SmartPlugin): Main class of the Plugin. Does all plugin specific stuff and provides the update functions for the items """ - PLUGIN_VERSION = '1.2.2' + PLUGIN_VERSION = '1.2.3' + REVISION = 'C' def __init__(self, sh): """ @@ -75,6 +79,8 @@ def __init__(self, sh): # define variables for database, database connection, working queue and status self.item_queue = queue.Queue() # Queue containing all to be executed items + # ToDo: Check if still needed + self.queue_consumer_thread = None # Queue consumer thread self._db_plugin = None # object if database plugin self._db = None # object of database self.connection_data = None # connection data list of database @@ -82,19 +88,13 @@ def __init__(self, sh): self.db_instance = None # instance of the used database self.item_attribute_search_str = 'database' # attribute, on which an item configured for database can be identified self.last_connect_time = 0 # mechanism for limiting db connection requests + # ToDo: Check if still needed + self.last_commit_time = 0 self.alive = None # Is plugin alive? self.startup_finished = False # Startup of Plugin finished self.suspended = False # Is plugin activity suspended self.active_queue_item: str = '-' # String holding item path of currently executed item - # define debug logs - self.parse_debug = True # Enable / Disable debug logging for method 'parse item' - self.execute_debug = True # Enable / Disable debug logging for method 'execute items' - self.sql_debug = True # Enable / Disable debug logging for sql stuff - self.ondemand_debug = True # Enable / Disable debug logging for method 'handle_ondemand' - self.onchange_debug = True # Enable / Disable debug logging for method 'handle_onchange' - self.prepare_debug = True # Enable / Disable debug logging for query preparation - # define default mysql settings self.default_connect_timeout = 60 self.default_net_read_timeout = 60 @@ -106,6 +106,12 @@ def __init__(self, sh): self.value_filter = self.get_parameter_value('value_filter') self.optimize_value_filter = self.get_parameter_value('optimize_value_filter') self.use_oldest_entry = self.get_parameter_value('use_oldest_entry') + self.lock_db_for_query = self.get_parameter_value('lock_db_for_query') + # ToDo: Check if still needed + self.refresh_cycle = self.get_parameter_value('refresh_cycle') + + # get debug log options + self.debug_log = DebugLogOptions(self.log_level) # init cache dicts self._init_cache_dicts() @@ -132,20 +138,21 @@ def run(self): return self.deinit() self.logger.debug("Initialization of database API successful") - # init db + # check initialization of db if not self._initialize_db(): self.logger.error("Connection to database failed") return self.deinit() + self._db.close() # check db connection settings - if self.db_driver is not None and self.db_driver.lower() == 'pymysql': + if self.db_driver.lower() == 'pymysql': self._check_db_connection_setting() # add scheduler for cyclic trigger item calculation - self.scheduler_add('cyclic', self.execute_due_items, prio=3, cron='5 0 0 * * *', cycle=None, value=None, offset=None, next=None) + self.scheduler_add('cyclic', self.execute_due_items, prio=3, cron='10 0 * * *', cycle=None, value=None, offset=None, next=None) # add scheduler to trigger items to be calculated at startup with delay - dt = self.shtime.now() + datetime.timedelta(seconds=(self.startup_run_delay + 3)) + dt = self.shtime.now() + relativedelta(seconds=(self.startup_run_delay + 3)) self.logger.info(f"Set scheduler for calculating startup-items with delay of {self.startup_run_delay + 3}s to {dt}.") self.scheduler_add('startup', self.execute_startup_items, next=dt) @@ -156,8 +163,12 @@ def run(self): self.alive = True # work item queue + self.work_item_queue() + + # ToDo: Check if still needed + """ try: - self.work_item_queue() + self._queue_consumer_thread_startup() except Exception as e: self.logger.warning(f"During working item queue Exception '{e}' occurred.") self.logger.debug(e, exc_info=True) @@ -165,6 +176,7 @@ def run(self): # self.deinit() self.logger.error("Suspend Plugin and clear Item-Queue.") self.suspend(True) + """ def stop(self): """ @@ -174,6 +186,9 @@ def stop(self): self.logger.debug("Stop method called") self.alive = False self.scheduler_remove('cyclic') + self._db.close() + # ToDo: Check if still needed + # self._queue_consumer_thread_shutdown() def parse_item(self, item: Item): """ @@ -198,7 +213,7 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: if db_addon_fct in HISTORIE_ATTRIBUTES_ONCHANGE: # handle functions 'minmax on-change' in format 'minmax_timeframe_func' items like 'minmax_heute_max', 'minmax_heute_min', 'minmax_woche_max', 'minmax_woche_min' - timeframe = harmonize_timeframe_expression(db_addon_fct_vars[1]) + timeframe = translate_timeframe(db_addon_fct_vars[1]) func = db_addon_fct_vars[2] if db_addon_fct_vars[2] in ALLOWED_MINMAX_FUNCS else None start = end = 0 log_text = 'minmax_timeframe_func' @@ -209,7 +224,7 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: func = db_addon_fct_vars[3] start, timeframe = split_sting_letters_numbers(db_addon_fct_vars[2]) start = to_int(start) - timeframe = harmonize_timeframe_expression(timeframe) + timeframe = translate_timeframe(timeframe) end = 0 log_text = 'minmax_last_timedelta|timeframe_function' required_params = [func, timeframe, start, end] @@ -217,7 +232,7 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: elif db_addon_fct in HISTORIE_ATTRIBUTES_TIMEFRAME: # handle functions 'min/max/avg' in format 'minmax_timeframe_timedelta_func' like 'minmax_heute_minus2_max' func = db_addon_fct_vars[3] # min, max, avg - timeframe = harmonize_timeframe_expression(db_addon_fct_vars[1]) # day, week, month, year + timeframe = translate_timeframe(db_addon_fct_vars[1]) # day, week, month, year end = to_int(split_sting_letters_numbers(db_addon_fct_vars[2])[1]) start = end log_text = 'minmax_timeframe_timedelta_func' @@ -226,7 +241,7 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: elif db_addon_fct in ZAEHLERSTAND_ATTRIBUTES_TIMEFRAME: # handle functions 'zaehlerstand' in format 'zaehlerstand_timeframe_timedelta' like 'zaehlerstand_heute_minus1' # func = 'max' - timeframe = harmonize_timeframe_expression(db_addon_fct_vars[1]) + timeframe = translate_timeframe(db_addon_fct_vars[1]) end = to_int(split_sting_letters_numbers(db_addon_fct_vars[2])[1]) start = end log_text = 'zaehlerstand_timeframe_timedelta' @@ -234,7 +249,7 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: elif db_addon_fct in VERBRAUCH_ATTRIBUTES_ONCHANGE: # handle functions 'verbrauch on-change' items in format 'verbrauch_timeframe' like 'verbrauch_heute', 'verbrauch_woche', 'verbrauch_monat', 'verbrauch_jahr' - timeframe = harmonize_timeframe_expression(db_addon_fct_vars[1]) + timeframe = translate_timeframe(db_addon_fct_vars[1]) end = 0 start = 1 log_text = 'verbrauch_timeframe' @@ -242,7 +257,7 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: elif db_addon_fct in VERBRAUCH_ATTRIBUTES_TIMEFRAME: # handle functions 'verbrauch on-demand' in format 'verbrauch_timeframe_timedelta' like 'verbrauch_heute_minus2' - timeframe = harmonize_timeframe_expression(db_addon_fct_vars[1]) + timeframe = translate_timeframe(db_addon_fct_vars[1]) # end = to_int(db_addon_fct_vars[2][-1]) end = to_int(split_sting_letters_numbers(db_addon_fct_vars[2])[1]) start = end + 1 @@ -250,28 +265,29 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: required_params = [timeframe, start, end] elif db_addon_fct in VERBRAUCH_ATTRIBUTES_ROLLING: + # ToDo: check if rolling window correct; muss start und ende dynamisch berechnet werden? # handle functions 'verbrauch_on-demand' in format 'verbrauch_rolling_window_timeframe_timedelta' like 'verbrauch_rolling_12m_woche_minus1' func = db_addon_fct_vars[1] window_inc, window_dur = split_sting_letters_numbers(db_addon_fct_vars[2]) window_inc = to_int(window_inc) # 12 - window_dur = harmonize_timeframe_expression(window_dur) # day, week, month, year - timeframe = harmonize_timeframe_expression(db_addon_fct_vars[3]) # day, week, month, year + window_dur = translate_timeframe(window_dur) # day, week, month, year + timeframe = translate_timeframe(db_addon_fct_vars[3]) # day, week, month, year end = to_int(split_sting_letters_numbers(db_addon_fct_vars[4])[1]) if window_dur in ALLOWED_QUERY_TIMEFRAMES and window_inc and timeframe and end: - start = to_int(convert_timeframe(timeframe, window_dur) * window_inc) + end + start = to_int(timeframe_to_timeframe(timeframe, window_dur) * window_inc) + end log_text = 'verbrauch_rolling_window_timeframe_timedelta' required_params = [func, timeframe, start, end] elif db_addon_fct in VERBRAUCH_ATTRIBUTES_JAHRESZEITRAUM: # handle functions of format 'verbrauch_jahreszeitraum_timedelta' like 'verbrauch_jahreszeitraum_minus1' - timeframe = harmonize_timeframe_expression(db_addon_fct_vars[1]) # day, week, month, year + timeframe = translate_timeframe(db_addon_fct_vars[1]) # day, week, month, year timedelta = to_int(split_sting_letters_numbers(db_addon_fct_vars[2])[1]) log_text = 'verbrauch_jahreszeitraum_timedelta' required_params = [timeframe, timedelta] elif db_addon_fct in TAGESMITTEL_ATTRIBUTES_ONCHANGE: # handle functions 'tagesmitteltemperatur on-change' items in format 'tagesmitteltemperatur_timeframe' like 'tagesmitteltemperatur_heute', 'tagesmitteltemperatur_woche', 'tagesmitteltemperatur_monat', 'tagesmitteltemperatur_jahr' - timeframe = harmonize_timeframe_expression(db_addon_fct_vars[1]) + timeframe = translate_timeframe(db_addon_fct_vars[1]) func = 'max' start = end = 0 log_text = 'tagesmitteltemperatur_timeframe' @@ -280,7 +296,7 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: elif db_addon_fct in TAGESMITTEL_ATTRIBUTES_TIMEFRAME: # handle 'tagesmitteltemperatur_timeframe_timedelta' like 'tagesmitteltemperatur_heute_minus1' func = 'max' - timeframe = harmonize_timeframe_expression(db_addon_fct_vars[1]) + timeframe = translate_timeframe(db_addon_fct_vars[1]) end = to_int(split_sting_letters_numbers(db_addon_fct_vars[2])[1]) start = end method = 'avg_hour' @@ -290,10 +306,10 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: elif db_addon_fct in SERIE_ATTRIBUTES_MINMAX: # handle functions 'serie_minmax' in format 'serie_minmax_timeframe_func_start|group' like 'serie_minmax_monat_min_15m' func = db_addon_fct_vars[3] - timeframe = harmonize_timeframe_expression(db_addon_fct_vars[2]) + timeframe = translate_timeframe(db_addon_fct_vars[2]) start, group = split_sting_letters_numbers(db_addon_fct_vars[4]) start = to_int(start) - group = harmonize_timeframe_expression(group) + group = translate_timeframe(group) end = 0 log_text = 'serie_minmax_timeframe_func_start|group' required_params = [func, timeframe, start, end, group] @@ -301,20 +317,20 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: elif db_addon_fct in SERIE_ATTRIBUTES_ZAEHLERSTAND: # handle functions 'serie_zaehlerstand' in format 'serie_zaehlerstand_timeframe_start|group' like 'serie_zaehlerstand_tag_30d' func = 'max' - timeframe = harmonize_timeframe_expression(db_addon_fct_vars[2]) + timeframe = translate_timeframe(db_addon_fct_vars[2]) start, group = split_sting_letters_numbers(db_addon_fct_vars[3]) start = to_int(start) - group = harmonize_timeframe_expression(group) + group = translate_timeframe(group) log_text = 'serie_zaehlerstand_timeframe_start|group' required_params = [timeframe, start, group] elif db_addon_fct in SERIE_ATTRIBUTES_VERBRAUCH: # handle all functions of format 'serie_verbrauch_timeframe_start|group' like 'serie_verbrauch_tag_30d' func = 'diff_max' - timeframe = harmonize_timeframe_expression(db_addon_fct_vars[2]) + timeframe = translate_timeframe(db_addon_fct_vars[2]) start, group = split_sting_letters_numbers(db_addon_fct_vars[3]) start = to_int(start) - group = harmonize_timeframe_expression(group) + group = translate_timeframe(group) log_text = 'serie_verbrauch_timeframe_start|group' required_params = [timeframe, start, group] @@ -323,7 +339,7 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: func = 'sum_max' start, timeframe = split_sting_letters_numbers(db_addon_fct_vars[3]) start = to_int(start) - timeframe = harmonize_timeframe_expression(timeframe) + timeframe = translate_timeframe(timeframe) end = 0 group = 'day', group2 = 'month' @@ -336,7 +352,7 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: timeframe = 'year' start, group = split_sting_letters_numbers(db_addon_fct_vars[2]) start = to_int(start) - group = harmonize_timeframe_expression(group) + group = translate_timeframe(group) end = 0 log_text = 'serie_tagesmittelwert_count|group' required_params = [func, timeframe, start, end, group] @@ -349,17 +365,17 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: group = 'hour' start, group2 = split_sting_letters_numbers(db_addon_fct_vars[3]) start = to_int(start) - group2 = harmonize_timeframe_expression(group2) + group2 = translate_timeframe(group2) log_text = 'serie_tagesmittelwert_group2_count|group' required_params = [func, timeframe, start, end, group, group2] elif db_addon_fct in SERIE_ATTRIBUTES_MITTEL_H1: - # handle 'serie_tagesmittelwert_stunde_start_end|group' like 'serie_tagesmittelwert_stunde_30_0d' => Stundenmittelwerte von vor 30 Tage bis vor 0 Tagen (also heute) + # handle 'serie_tagesmittelwert_stunde_start_end|group' like 'serie_tagesmittelwert_stunde_30_0d' => Stundenmittelwerte von vor 30 Tagen bis vor 0 Tagen (also heute) method = 'avg_hour' start = to_int(db_addon_fct_vars[3]) end, timeframe = split_sting_letters_numbers(db_addon_fct_vars[4]) end = to_int(end) - timeframe = harmonize_timeframe_expression(timeframe) + timeframe = translate_timeframe(timeframe) log_text = 'serie_tagesmittelwert_stunde_start_end|group' required_params = [timeframe, method, start, end] @@ -369,7 +385,7 @@ def get_query_parameters_from_db_addon_fct() -> Union[dict, None]: end = 0 start, timeframe = split_sting_letters_numbers(db_addon_fct_vars[4]) start = to_int(start) - timeframe = harmonize_timeframe_expression(timeframe) + timeframe = translate_timeframe(timeframe) log_text = 'serie_tagesmittelwert_tag_stunde_end|group' required_params = [timeframe, method, start, end] @@ -461,7 +477,7 @@ def get_database_item_path() -> tuple: for i in range(3): if self.has_iattr(_lookup_item.conf, 'db_addon_database_item'): - if self.parse_debug: + if self.debug_log.parse: self.logger.debug(f"Attribut 'db_addon_database_item' for item='{item.path()}' has been found {i + 1} level above item at '{_lookup_item.path()}'.") _database_item_path = self.get_iattr_value(_lookup_item.conf, 'db_addon_database_item') _startup = bool(self.get_iattr_value(_lookup_item.conf, 'db_addon_startup')) @@ -480,7 +496,7 @@ def get_database_item() -> Item: for i in range(2): if self.has_iattr(_lookup_item.conf, self.item_attribute_search_str): - if self.parse_debug: + if self.debug_log.parse: self.logger.debug(f"Attribut '{self.item_attribute_search_str}' for item='{item.path()}' has been found {i + 1} level above item at '{_lookup_item.path()}'.") return _lookup_item else: @@ -532,7 +548,7 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte db_addon_ignore_value_list_formatted.append(f"{op} {value}") max_values[op].append(value) - if self.parse_debug: + if self.debug_log.parse: self.logger.debug(f"Summarized 'ignore_value_list' for item {item.path()}: {db_addon_ignore_value_list_formatted}") if not db_addon_ignore_value_list_formatted: @@ -541,7 +557,7 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte if not optimize: return db_addon_ignore_value_list_formatted - if self.parse_debug: + if self.debug_log.parse: self.logger.debug(f"Optimizing 'ignore_value_list' for item {item.path()} active.") # find low @@ -572,7 +588,7 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte if (not lower_end[0] or (lower_end[0] and v >= lower_end[1])) or (not upper_end[0] or (upper_end[0] and v <= upper_end[1])): db_addon_ignore_value_list_optimized.append(f'!= {v}') - if self.parse_debug: + if self.debug_log.parse: self.logger.debug(f"Optimized 'ignore_value_list' for item {item.path()}: {db_addon_ignore_value_list_optimized}") return db_addon_ignore_value_list_optimized @@ -580,7 +596,7 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte # handle all items with db_addon_fct if self.has_iattr(item.conf, 'db_addon_fct'): - if self.parse_debug: + if self.debug_log.parse: self.logger.debug(f"parse item: {item.path()} due to 'db_addon_fct'") # get db_addon_fct attribute value @@ -600,7 +616,7 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte database_item = get_database_item() db_addon_startup = bool(self.get_iattr_value(item.conf, 'db_addon_startup')) if database_item is None: - self.logger.warning(f"No database item found for {item.path()}: Item ignored. Maybe you should check instance of database plugin.") + self.logger.warning(f"No database item found for item={item.path()}: Item ignored. Maybe you should check instance of database plugin.") return # get/create list of comparison operators and check it @@ -623,20 +639,20 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte if db_addon_ignore_value_list: db_addon_ignore_value_list_final = format_db_addon_ignore_value_list() - if self.parse_debug: + if self.debug_log.parse: self.logger.debug(f"{db_addon_ignore_value_list_final=}") query_params.update({'ignore_value_list': db_addon_ignore_value_list_final}) # create standard items config - item_config_data_dict = {'db_addon': 'function', 'db_addon_fct': db_addon_fct, 'database_item': database_item, 'query_params': query_params} + item_config_data_dict = {'db_addon': 'function', 'db_addon_fct': db_addon_fct, 'database_item': database_item, 'query_params': query_params, 'active': True} if isinstance(database_item, str): item_config_data_dict.update({'database_item_path': True}) else: database_item = database_item.path() # do logging - if self.parse_debug: - self.logger.debug(f"Item '{item.path()}' added with db_addon_fct={db_addon_fct} and database_item={database_item}") + if self.debug_log.parse: + self.logger.debug(f"Item={item.path()} added with db_addon_fct={db_addon_fct} and database_item={database_item}") # add cycle for item groups if db_addon_fct in ALL_DAILY_ATTRIBUTES: @@ -661,7 +677,7 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte item_config_data_dict.update({'cycle': f"{timeframe_to_updatecyle(cycle)}"}) # do logging - if self.parse_debug: + if self.debug_log.parse: self.logger.debug(f"Item '{item.path()}' added to be run {item_config_data_dict['cycle']}.") # create item config for item to be run on startup @@ -675,21 +691,21 @@ def format_db_addon_ignore_value_list(optimize: bool = self.optimize_value_filte # handle all items with db_addon_info elif self.has_iattr(item.conf, 'db_addon_info'): - if self.parse_debug: - self.logger.debug(f"parse item: {item.path()} due to used item attribute 'db_addon_info'") + if self.debug_log.parse: + self.logger.debug(f"parse item={item.path()} due to used item attribute 'db_addon_info'") self.add_item(item, config_data_dict={'db_addon': 'info', 'db_addon_fct': f"info_{self.get_iattr_value(item.conf, 'db_addon_info').lower()}", 'database_item': None, 'startup': True}) # handle all items with db_addon_admin elif self.has_iattr(item.conf, 'db_addon_admin'): - if self.parse_debug: - self.logger.debug(f"parse item: {item.path()} due to used item attribute 'db_addon_admin'") + if self.debug_log.parse: + self.logger.debug(f"parse item={item.path()} due to used item attribute 'db_addon_admin'") self.add_item(item, config_data_dict={'db_addon': 'admin', 'db_addon_fct': f"admin_{self.get_iattr_value(item.conf, 'db_addon_admin').lower()}", 'database_item': None}) return self.update_item # Reference to 'update_item' für alle Items mit Attribut 'database', um die on_change Items zu berechnen elif self.has_iattr(item.conf, self.item_attribute_search_str) and has_db_addon_item(): - if self.parse_debug: - self.logger.debug(f"reference to update_item for item '{item.path()}' will be set due to on-change") + if self.debug_log.parse: + self.logger.debug(f"reference to update_item for item={item.path()} will be set due to on-change") self.add_item(item, config_data_dict={'db_addon': 'database'}) return self.update_item @@ -739,7 +755,7 @@ def execute_startup_items(self) -> None: self.execute_items(option='startup') self.startup_finished = True - def execute_items(self, option: str = 'due'): + def execute_items(self, option: str = 'due', item: str = None): """Execute all items per option""" def _create_due_items() -> list: @@ -752,33 +768,36 @@ def _create_due_items() -> list: self.previous_values[DAY] = {} # wenn Wochentag == Montag, werden auch die wöchentlichen Items berechnet - if self.shtime.now().hour == 0 and self.shtime.now().minute == 0 and self.shtime.weekday( - self.shtime.today()) == 1: + if self.shtime.weekday(self.shtime.today()) == 1: _todo_items.update(set(self._weekly_items())) self.current_values[WEEK] = {} self.previous_values[WEEK] = {} # wenn der erste Tage eines Monates ist, werden auch die monatlichen Items berechnet - if self.shtime.now().hour == 0 and self.shtime.now().minute == 0 and self.shtime.now().day == 1: + if self.shtime.now().day == 1: _todo_items.update(set(self._monthly_items())) self.current_values[MONTH] = {} self.previous_values[MONTH] = {} - # wenn der erste Tage des ersten Monates eines Jahres ist, werden auch die jährlichen Items berechnet - if self.shtime.now().hour == 0 and self.shtime.now().minute == 0 and self.shtime.now().day == 1 and self.shtime.now().month == 1: - _todo_items.update(set(self._yearly_items())) - self.current_values[YEAR] = {} - self.previous_values[YEAR] = {} + # wenn der erste Tage des ersten Monates eines Jahres ist, werden auch die jährlichen Items berechnet + if self.shtime.now().month == 1: + _todo_items.update(set(self._yearly_items())) + self.current_values[YEAR] = {} + self.previous_values[YEAR] = {} return list(_todo_items) - if self.execute_debug: + if self.debug_log.execute: self.logger.debug(f"execute_items called with {option=}") if self.suspended: self.logger.info(f"Plugin is suspended. No items will be calculated.") return + deactivated_items = self._deactivated_items() + if len(deactivated_items) > 0: + self.logger.info(f"{len(deactivated_items)} are de-activated and will not be calculated.") + todo_items = [] if option == 'startup': todo_items = self._startup_items() @@ -794,7 +813,17 @@ def _create_due_items() -> list: todo_items = self._all_items() elif option == 'due': todo_items = _create_due_items() + elif option == 'item': + if isinstance(item, str): + item = self.items.return_item(item) + if isinstance(item, Item): + todo_items = [item] + # remove de-activated items + if option != 'item': + todo_items = list(set(todo_items) - set(deactivated_items)) + + # put to queue self.logger.info(f"{len(todo_items)} items will be calculated for {option=}.") [self.item_queue.put(i) for i in todo_items] @@ -804,17 +833,18 @@ def work_item_queue(self) -> None: while self.alive: try: queue_entry = self.item_queue.get(True, 10) + self.logger.debug(f"{queue_entry=}") except queue.Empty: self.active_queue_item = '-' pass else: if isinstance(queue_entry, tuple): item, value = queue_entry - self.logger.info(f"# {self.item_queue.qsize() + 1} item(s) to do. || 'on-change' item '{item.path()}' with {value=} will be processed.") + self.logger.info(f"# {self.item_queue.qsize() + 1} item(s) to do. || 'on-change' item={item.path()} with {value=} will be processed.") self.active_queue_item = str(item.path()) self.handle_onchange(item, value) else: - self.logger.info(f"# {self.item_queue.qsize() + 1} item(s) to do. || 'on-demand' item '{queue_entry.path()}' will be processed.") + self.logger.info(f"# {self.item_queue.qsize() + 1} item(s) to do. || 'on-demand' item={queue_entry.path()} will be processed.") self.active_queue_item = str(queue_entry.path()) self.handle_ondemand(queue_entry) @@ -827,7 +857,7 @@ def handle_ondemand(self, item: Item) -> None: # get parameters item_config = self.get_item_config(item) - if self.ondemand_debug: + if self.debug_log.ondemand: self.logger.debug(f"Item={item.path()} with {item_config=}") db_addon_fct = item_config['db_addon_fct'] database_item = item_config['database_item'] @@ -838,7 +868,7 @@ def handle_ondemand(self, item: Item) -> None: else: params = {} - if self.ondemand_debug: + if self.debug_log.ondemand: self.logger.debug(f"{db_addon_fct=} will _query_item with {params=}.") # handle item starting with 'verbrauch_' @@ -846,7 +876,7 @@ def handle_ondemand(self, item: Item) -> None: result = self._handle_verbrauch(params) if result and result < 0: - self.logger.warning(f"Result of item {item.path()} with {db_addon_fct=} was negative. Something seems to be wrong.") + self.logger.info(f"Result of item {item.path()} with {db_addon_fct=} was negative. Something seems to be wrong.") # handle 'serie_verbrauch' elif db_addon_fct in SERIE_ATTRIBUTES_VERBRAUCH: @@ -907,7 +937,7 @@ def handle_ondemand(self, item: Item) -> None: result = self._query_item(**params)[0][1] # log result - if self.ondemand_debug: + if self.debug_log.ondemand: self.logger.debug(f"result is {result} for item '{item.path()}' with '{db_addon_fct=}'") if result is None: @@ -932,7 +962,7 @@ def handle_minmax(): cache_dict = self.current_values[timeframe] init = False - if self.onchange_debug: + if self.debug_log.onchange: self.logger.debug(f"'minmax' Item={updated_item.path()} with {func=} and {timeframe=} detected. Check for update of cache_dicts {cache_dict=} and item value.") # make sure, that database item is in cache dict @@ -942,55 +972,55 @@ def handle_minmax(): # get _recent_value; if not already cached, create cache cached_value = cache_dict[database_item].get(func) if cached_value is None: - if self.onchange_debug: + if self.debug_log.onchange: self.logger.debug(f"{func} value for {timeframe=} of item={updated_item.path()} not in cache dict. Query database.") query_params = {'func': func, 'database_item': database_item, 'timeframe': timeframe, 'start': 0, 'end': 0, 'ignore_value_list': ignore_value_list, 'use_oldest_entry': True} cached_value = self._query_item(**query_params)[0][1] if cached_value is None: - if self.onchange_debug: - self.logger.debug(f"{func} value for {timeframe=} of item={updated_item.path()} not available in database. Abort calculation.") + if self.debug_log.onchange: + self.logger.debug(f"{func} value for {timeframe=} of item={updated_item.path()} not available in database. Abort calculation.") return init = True - # if value not given -> read + # if value not given if init: - if self.onchange_debug: - self.logger.debug(f"initial {func} value for {timeframe=} of item={item.path()} with will be set to {cached_value}") - cache_dict[database_item][func] = cached_value - return cached_value + if self.debug_log.onchange: + self.logger.debug(f"initial {func} value for {timeframe=} of item={item.path()} with will be set to {value}") + cache_dict[database_item][func] = value + return value # check value for update of cache dict min elif func == 'min' and value < cached_value: - if self.onchange_debug: + if self.debug_log.onchange: self.logger.debug(f"new value={value} lower then current min_value={cached_value} for {timeframe=}. cache_dict will be updated") cache_dict[database_item][func] = value return value # check value for update of cache dict max elif func == 'max' and value > cached_value: - if self.onchange_debug: + if self.debug_log.onchange: self.logger.debug(f"new value={value} higher then current max_value={cached_value} for {timeframe=}. cache_dict will be updated") cache_dict[database_item][func] = value return value # no impact - if self.onchange_debug: + if self.debug_log.onchange: self.logger.debug(f"new value={value} will not change max/min for period={timeframe}.") return None def handle_verbrauch(): cache_dict = self.previous_values[timeframe] - if self.onchange_debug: + if self.debug_log.onchange: self.logger.debug(f"'verbrauch' item {updated_item.path()} with {func=} and {value=} detected. Check for update of cache_dicts {cache_dict=} and item value.") # get _cached_value for value at end of last period; if not already cached, create cache cached_value = cache_dict.get(database_item) if cached_value is None: - if self.onchange_debug: + if self.debug_log.onchange: self.logger.debug(f"Most recent value for last {timeframe=} of item={updated_item.path()} not in cache dict. Query database.") # try to get most recent value of last timeframe, assuming that this is the value at end of last timeframe @@ -1002,12 +1032,12 @@ def handle_verbrauch(): return cache_dict[database_item] = cached_value - if self.onchange_debug: + if self.debug_log.onchange: self.logger.debug(f"Value for Item={updated_item.path()} at end of last {timeframe} not in cache dict. Value={cached_value} has been added.") # calculate value, set item value, put data into plugin_item_dict _new_value = value - cached_value - return _new_value if isinstance(_new_value, int) else round(_new_value, 1) + return _new_value if isinstance(_new_value, int) else round(_new_value, 2) def handle_tagesmittel(): result = self._prepare_value_list(database_item=database_item, timeframe='day', start=0, end=0, ignore_value_list=ignore_value_list, method='first_hour') @@ -1015,17 +1045,18 @@ def handle_tagesmittel(): if isinstance(result, list): return result[0][1] - if self.onchange_debug: + if self.debug_log.onchange: self.logger.debug(f"called with updated_item={updated_item.path()} and value={value}.") relevant_item_list = set(self.get_item_list('database_item', updated_item)) & set(self.get_item_list('cycle', 'on-change')) - if self.onchange_debug: + if self.debug_log.onchange: self.logger.debug(f"Following items where identified for update: {relevant_item_list}.") for item in relevant_item_list: item_config = self.get_item_config(item) - self.logger.debug(f"Item={item.path()} with {item_config=}") + if self.debug_log.onchange: + self.logger.debug(f"Item={item.path()} with {item_config=}") db_addon_fct = item_config['db_addon_fct'] database_item = item_config['database_item'] timeframe = item_config['query_params']['timeframe'] @@ -1035,7 +1066,7 @@ def handle_tagesmittel(): # handle all on_change functions if db_addon_fct not in ALL_ONCHANGE_ATTRIBUTES: - if self.onchange_debug: + if self.debug_log.onchange: self.logger.debug(f"non on-change function detected. Skip update.") continue @@ -1074,8 +1105,19 @@ def _update_database_items(self) -> None: if db_addon_startup: item_config.update({'startup': True}) + def _activate_item_calculation(self, item: Union[str, Item], active: bool = True) -> None: + """active / de-active item calculation""" + if isinstance(item, str): + item = self.items.return_item(item) + + if not isinstance(item, Item): + return + + item_config = self.get_item_config(item) + item_config['active'] = active + @property - def log_level(self): + def log_level(self) -> int: return self.logger.getEffectiveLevel() def queue_backlog(self) -> int: @@ -1120,6 +1162,9 @@ def _database_item_path_items(self) -> list: def _ondemand_items(self) -> list: return self._daily_items() + self._weekly_items() + self._monthly_items() + self._yearly_items() + self._static_items() + def _deactivated_items(self) -> list: + return self.get_item_list('active', False) + def _all_items(self) -> list: # return self._ondemand_items() + self._onchange_items() + self._static_items() + self._admin_items() + self._info_items() return self.get_item_list('db_addon', 'function') @@ -1133,7 +1178,7 @@ def gruenlandtemperatursumme(self, item_path: str, year: Union[int, str] = None) Query database for gruenlandtemperatursumme for given year or year https://de.wikipedia.org/wiki/Gr%C3%BCnlandtemperatursumme - Beim Grünland wird die Wärmesumme nach Ernst und Loeper benutzt, um den Vegetationsbeginn und somit den Termin von Düngungsmaßnahmen zu bestimmen. + Beim Grünland wird die Wärmesumme nach Ernst und Loeper benutzt, um den Vegetationsbeginn und somit den Termin von Duengemaßnahmen zu bestimmen. Dabei erfolgt die Aufsummierung der Tagesmitteltemperaturen über 0 °C, wobei der Januar mit 0.5 und der Februar mit 0.75 gewichtet wird. Bei einer Wärmesumme von 200 Grad ist eine Düngung angesagt. @@ -1199,7 +1244,7 @@ def tagesmitteltemperatur(self, item_path: str, timeframe: str = None, count: in count = to_int(count) end = 0 start = end + count - query_params = {'database_item': item, 'func': 'max', 'timeframe': harmonize_timeframe_expression(timeframe), 'start': start, 'end': end} + query_params = {'database_item': item, 'func': 'max', 'timeframe': translate_timeframe(timeframe), 'start': start, 'end': end} return self._handle_tagesmitteltemperatur(**query_params) def wachstumsgradtage(self, item_path: str, year: Union[int, str] = None, method: int = 0, threshold: int = 10) -> Union[int, None]: @@ -1295,11 +1340,11 @@ def suspend(self, state: bool = False) -> bool: """ if state: - self.logger.warning("Plugin is set to 'suspended'. Queries to database will not be made until suspension is cancelled.") + self.logger.info("Plugin is set to 'suspended'. Queries to database will not be made until suspension is cleared.") self.suspended = True self._clear_queue() else: - self.logger.warning("Plugin suspension cancelled. Queries to database will be resumed.") + self.logger.info("Plugin suspension cleared. Queries to database will be resumed.") self.suspended = False # write back value to item, if one exists @@ -1333,8 +1378,8 @@ def _handle_verbrauch(self, query_params: dict) -> Union[None, float]: # define start, end for verbrauch_jahreszeitraum_timedelta if 'timedelta' in query_params: timedelta = query_params.pop('timedelta') - today = datetime.date.today() - start_date = datetime.date(today.year, 1, 1) - relativedelta(years=timedelta) + today = self.shtime.today(offset=0) + start_date = self.shtime.beginning_of_year(offset=-timedelta) end_date = today - relativedelta(years=timedelta) start = (today - start_date).days end = (today - end_date).days @@ -1343,14 +1388,14 @@ def _handle_verbrauch(self, query_params: dict) -> Union[None, float]: end = query_params['end'] # calculate consumption - if self.prepare_debug: + if self.debug_log.prepare: self.logger.debug(f"called with {query_params=}") # get value for end and check it; query_params.update({'func': 'last', 'start': end, 'end': end}) value_end = self._query_item(**query_params)[0][1] - if self.prepare_debug: + if self.debug_log.prepare: self.logger.debug(f"{value_end=}") if value_end is None or value_end == 0: @@ -1359,36 +1404,37 @@ def _handle_verbrauch(self, query_params: dict) -> Union[None, float]: # get value for start and check it; query_params.update({'func': 'last', 'start': start, 'end': start}) value_start = self._query_item(**query_params)[0][1] - if self.prepare_debug: + if self.debug_log.prepare: self.logger.debug(f"{value_start=}") if value_start is None: - if self.prepare_debug: + if self.debug_log.prepare: self.logger.debug(f"Error occurred during query. Return.") return if not value_start: - self.logger.info(f"No DB Entry found for requested start date. Looking for next recent DB entry.") + self.logger.info(f"No DB Entry of item={query_params['database_item'].path()} found for requested start date. Looking for next recent DB entry.") query_params.update({'func': 'next'}) value_start = self._query_item(**query_params)[0][1] - if self.prepare_debug: + if self.debug_log.prepare: self.logger.debug(f"next recent value is {value_start=}") if not value_start: value_start = 0 - if self.prepare_debug: + if self.debug_log.prepare: self.logger.debug(f"No start value available. Will be set to 0 as default") # calculate consumption consumption = value_end - value_start - if self.prepare_debug: - self.logger.debug(f"{consumption=}") if isinstance(consumption, float): if consumption.is_integer(): consumption = int(consumption) else: - consumption = round(consumption, 1) + consumption = round(consumption, 2) + + if self.debug_log.prepare: + self.logger.debug(f"{consumption=}") return consumption @@ -1402,7 +1448,7 @@ def _handle_verbrauch_serie(self, query_params: dict) -> list: for i in range(1, start): value = self._handle_verbrauch({'database_item': database_item, 'timeframe': timeframe, 'start': i + 1, 'end': i}) - ts_start, ts_end = get_start_end_as_timestamp(timeframe, i, i + 1) + ts_start, ts_end = self._get_start_end_as_timestamp(timeframe, i, i + 1) series.append([ts_end, value]) return series @@ -1420,33 +1466,34 @@ def _handle_zaehlerstand(self, query_params: dict) -> Union[float, int, None]: - Ergibt diese Abfrage keinen Wert, dann Rückgabe von None """ - if self.prepare_debug: + if self.debug_log.prepare: self.logger.debug(f"called with {query_params=}") # get last value of timeframe query_params.update({'func': 'last'}) last_value = self._query_item(**query_params)[0][1] - if self.prepare_debug: + if self.debug_log.prepare: self.logger.debug(f"{last_value=}") if last_value is None: - if self.prepare_debug: + if self.debug_log.prepare: self.logger.debug(f"Error occurred during query. Return.") return if not last_value: # get last value (next) before timeframe - self.logger.info(f"No DB Entry found for requested start date. Looking for next recent DB entry.") + if self.debug_log.prepare: + self.logger.debug(f"No DB entry for item={query_params['database_item'].path()} found for requested start date. Looking for next recent DB entry.") query_params.update({'func': 'next'}) last_value = self._query_item(**query_params)[0][1] - if self.prepare_debug: + if self.debug_log.prepare: self.logger.debug(f"next recent value is {last_value=}") if isinstance(last_value, float): if last_value.is_integer(): last_value = int(last_value) else: - last_value = round(last_value, 1) + last_value = round(last_value, 2) return last_value @@ -1460,7 +1507,7 @@ def _handle_zaehlerstand_serie(self, query_params: dict) -> list: for i in range(1, start): value = self._handle_zaehlerstand({'database_item': database_item, 'timeframe': timeframe, 'start': i, 'end': i}) - ts_start = get_start_end_as_timestamp(timeframe, i, i)[0] + ts_start = self._get_start_end_as_timestamp(timeframe, i, i)[0] series.append([ts_start, value]) return series @@ -1476,30 +1523,29 @@ def _handle_kaeltesumme(self, database_item: Item, year: Union[int, str] = None, :return: kaeltesumme """ - if self.prepare_debug: + if self.debug_log.prepare: self.logger.debug(f"called with {database_item=}, {year=}, {month=}") # check validity of given year - if not valid_year(year): + if not self._valid_year(year): self.logger.error(f"Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") return - # set default year - if not year: - year = 'current' + # get datetime of today + today = self.shtime.today(offset=0) # define year - if year == 'current': - if datetime.date.today() < datetime.date(int(datetime.date.today().year), 9, 21): - year = datetime.date.today().year - 1 + if not year or year == 'current': + if today < datetime.date(int(today.year), 9, 21): + year = today.year - 1 else: - year = datetime.date.today().year + year = today.year # define start_date and end_date if month is None: start_date = datetime.date(int(year), 9, 21) end_date = datetime.date(int(year) + 1, 3, 22) - elif valid_month(month): + elif self._valid_month(month): start_date = datetime.date(int(year), int(month), 1) end_date = start_date + relativedelta(months=+1) - datetime.timedelta(days=1) else: @@ -1507,7 +1553,6 @@ def _handle_kaeltesumme(self, database_item: Item, year: Union[int, str] = None, return # define start / end - today = datetime.date.today() if start_date > today: self.logger.error(f"Start time for query of item={database_item.path()} is in future. Query cancelled.") return @@ -1519,10 +1564,10 @@ def _handle_kaeltesumme(self, database_item: Item, year: Union[int, str] = None, return # get raw data as list - if self.prepare_debug: + if self.debug_log.prepare: self.logger.debug("try to get raw data") raw_data = self._prepare_value_list(database_item=database_item, timeframe='day', start=start, end=end, method='avg_hour') - if self.execute_debug: + if self.debug_log.prepare: self.logger.debug(f"raw_value_list={raw_data=}") # calculate value @@ -1549,7 +1594,7 @@ def _handle_waermesumme(self, database_item: Item, year: Union[int, str] = None, # get raw data as list raw_data = self._prepare_waermesumme(database_item=database_item, year=year, month=month) - if self.execute_debug: + if self.debug_log.prepare: self.logger.debug(f"raw_value_list={raw_data=}") # set threshold to min 0 @@ -1578,7 +1623,7 @@ def _handle_gruenlandtemperatursumme(self, database_item: Item, year: Union[int, # get raw data as list raw_data = self._prepare_waermesumme(database_item=database_item, year=year) - if self.execute_debug: + if self.debug_log.prepare: self.logger.debug(f"raw_data={raw_data}") # calculate value @@ -1591,7 +1636,7 @@ def _handle_gruenlandtemperatursumme(self, database_item: Item, year: Union[int, for entry in raw_data: timestamp, value = entry if value > 0: - dt = datetime.datetime.fromtimestamp(timestamp / 1000) + dt = self._timestamp_to_datetime(timestamp / 1000) if dt.month == 1: value = value * 0.5 elif dt.month == 2: @@ -1611,24 +1656,22 @@ def _handle_wachstumsgradtage(self, database_item: Item, year: Union[int, str] = :return: wachstumsgradtage """ - # set default year - if not year: - year = 'current' - - if not valid_year(year): + if not self._valid_year(year): self.logger.error(f"Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") return + # get datetime of today + today = self.shtime.today(offset=0) + # define year - if year == 'current': - year = datetime.date.today().year + if not year or year == 'current': + year = today.year # define start_date, end_date start_date = datetime.date(int(year), 1, 1) end_date = datetime.date(int(year), 9, 21) # check start_date - today = datetime.date.today() if start_date > today: self.logger.info(f"Start time for query of item={database_item.path()} is in future. Query cancelled.") return @@ -1643,8 +1686,8 @@ def _handle_wachstumsgradtage(self, database_item: Item, year: Union[int, str] = return # get raw data as list - raw_data = self._prepare_value_list(database_item=database_item, timeframe='day', start=start, end=end, method='minmax_hour') - if self.execute_debug: + raw_data = self._prepare_value_list(database_item=database_item, timeframe='day', start=start, end=end, method='minmax_hour') + if self.debug_log.prepare: self.logger.debug(f"raw_value_list={raw_data}") # calculate value @@ -1709,24 +1752,22 @@ def _handle_temperaturserie(self, database_item: Item, year: Union[int, str] = N :return: list of temperatures """ - # set default year - if not year: - year = 'current' - - if not valid_year(year): + if not self._valid_year(year): self.logger.error(f"Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") return + # get datetime of today + today = self.shtime.today(offset=0) + # define year - if year == 'current': - year = datetime.date.today().year + if not year or year == 'current': + year = today.year # define start_date, end_date start_date = datetime.date(int(year), 1, 1) end_date = datetime.date(int(year), 12, 31) # check start_date - today = datetime.date.today() if start_date > today: self.logger.info(f"Start time for query of item={database_item.path()} is in future. Query cancelled.") return @@ -1742,7 +1783,7 @@ def _handle_temperaturserie(self, database_item: Item, year: Union[int, str] = N # get raw data as list temp_list = self._prepare_value_list(database_item=database_item, timeframe='day', start=start, end=end, method=method) - if self.execute_debug: + if self.debug_log.prepare: self.logger.debug(f"{temp_list=}") return temp_list @@ -1751,23 +1792,22 @@ def _prepare_waermesumme(self, database_item: Item, year: Union[int, str] = None """Prepares raw data for waermesumme""" # check validity of given year - if not valid_year(year): + if not self._valid_year(year): self.logger.error(f"Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") return - # set default year - if not year: - year = 'current' + # get datetime of today + today = self.shtime.today(offset=0) # define year - if year == 'current': - year = datetime.date.today().year + if not year or year == 'current': + year = today.year # define start_date, end_date if month is None: start_date = datetime.date(int(year), 1, 1) end_date = datetime.date(int(year), 9, 21) - elif valid_month(month): + elif self._valid_month(month): start_date = datetime.date(int(year), int(month), 1) end_date = start_date + relativedelta(months=+1) - datetime.timedelta(days=1) else: @@ -1775,7 +1815,6 @@ def _prepare_waermesumme(self, database_item: Item, year: Union[int, str] = None return # check start_date - today = datetime.date.today() if start_date > today: self.logger.info(f"Start time for query of item={database_item.path()} is in future. Query cancelled.") return @@ -1814,12 +1853,12 @@ def _prepare_value_list(self, database_item: Item, timeframe: str, start: int, e def _create_raw_value_dict(block: str) -> dict: """ create dict of datetimes (per day or hour) and values based on database query result in format {'datetime1': [values]}, 'datetime1': [values], ..., 'datetimex': [values]} - :param block: defined the increment of datetimes, default is hour, furhter possible is 'day' + :param block: defined the increment of datetimes, default is hour, further possible is 'day' """ _value_dict = {} for _entry in raw_data: - dt = datetime.datetime.utcfromtimestamp(_entry[0] / 1000) + dt = self._timestamp_to_datetime(_entry[0] / 1000) dt = dt.replace(minute=0, second=0, microsecond=0) if block == 'day': dt = dt.replace(hour=0) @@ -1843,15 +1882,18 @@ def _create_value_list_timestamp_value(option: str) -> list: _value_list = [] # create nested list with timestamp, avg_value per hour/day for entry in value_dict: - _timestamp = datetime_to_timestamp(entry) + _timestamp = self._datetime_to_timestamp(entry) if option == 'first': _value_list.append([_timestamp, value_dict[entry][0]]) elif option == 'avg': - _value_list.append([_timestamp, round(sum(value_dict[entry]) / len(value_dict[entry]), 1)]) + _value_list.append([_timestamp, round(sum(value_dict[entry]) / len(value_dict[entry]), 2)]) elif option == 'minmax': _value_list.append([_timestamp, min(value_dict[entry]), max(value_dict[entry])]) return _value_list + if self.debug_log.prepare: + self.logger.debug(f'called with database_item={database_item.path()}, {timeframe=}, {start=}, {end=}, {ignore_value_list=}, {method=}') + # check method if method in ['avg_day', 'avg_hour', 'minmax_day', 'minmax_hour', 'first_day', 'first_hour']: _method, _block = method.split('_') @@ -1864,18 +1906,18 @@ def _create_value_list_timestamp_value(option: str) -> list: # get raw data from database raw_data = self._query_item(func='raw', database_item=database_item, timeframe=timeframe, start=start, end=end, ignore_value_list=ignore_value_list) - if raw_data in [[[None, None]], [[0, 0]]]: - self.logger.warning("no valid data from database query received during _prepare_value_list. Aborting...") + if raw_data == [[None, None]] or raw_data == [[0, 0]]: + self.logger.info(f"no valid data from database query for item={database_item.path()} received during _prepare_value_list. Aborting...") return # create nested dict with values value_dict = _create_raw_value_dict(block=_block) - if self.prepare_debug: + if self.debug_log.prepare: self.logger.debug(f"{value_dict=}") # return value list result = _create_value_list_timestamp_value(option=_method) - if self.prepare_debug: + if self.debug_log.prepare: self.logger.debug(f"{method=}, {result=}") return result @@ -1937,33 +1979,9 @@ def _get_db_parameter(self) -> bool: else: return True - def _initialize_db(self) -> bool: - """ - Initializes database connection - - :return: Status of initialization - """ - - try: - if not self._db.connected(): - # limit connection requests to 20 seconds. - current_time = time.time() - time_delta_last_connect = current_time - self.last_connect_time - if time_delta_last_connect > 20: - self.last_connect_time = time.time() - self._db.connect() - else: - self.logger.error(f"_initialize_db: Database reconnect suppressed: Delta time: {time_delta_last_connect}") - return False - except Exception as e: - self.logger.critical(f"_initialize_db: Database: Initialization failed: {e}") - return False - else: - return True - def _check_db_connection_setting(self) -> None: """ - Check Setting of DB connection for stable use. + Check Setting of mysql connection for stable use. """ try: connect_timeout = int(self._get_db_connect_timeout()[1]) @@ -1998,8 +2016,8 @@ def _get_oldest_log(self, item: Item) -> Union[None, int]: self.item_cache[item] = {} self.item_cache[item]['oldest_log'] = oldest_log - if self.prepare_debug: - self.logger.debug(f"_get_oldest_log for item {item.path()} = {oldest_log}") + if self.debug_log.prepare: + self.logger.debug(f"_get_oldest_log for item={item.path()} = {oldest_log}") return oldest_log @@ -2024,7 +2042,7 @@ def _get_oldest_value(self, item: Item) -> Union[int, float, bool]: oldest_log = self._get_oldest_log(item) if oldest_log is None: validity = True - self.logger.error(f"oldest_log for item {item.path()} could not be read; value is set to -999999999") + self.logger.error(f"oldest_log for item={item.path()} could not be read; value is set to -999999999") oldest_entry = self._read_log_timestamp(item_id, oldest_log) i += 1 if isinstance(oldest_entry, list) and isinstance(oldest_entry[0], tuple) and len(oldest_entry[0]) >= 4: @@ -2035,10 +2053,10 @@ def _get_oldest_value(self, item: Item) -> Union[int, float, bool]: validity = True elif i == 10: validity = True - self.logger.error(f"oldest_value for item {item.path()} could not be read; value is set to -999999999") + self.logger.error(f"oldest_value for item={item.path()} could not be read; value is set to -999999999") - if self.prepare_debug: - self.logger.debug(f"_get_oldest_value for item {item.path()} = {_oldest_value}") + if self.debug_log.prepare: + self.logger.debug(f"_get_oldest_value for item={item.path()} = {_oldest_value}") return _oldest_value @@ -2080,7 +2098,7 @@ def _get_itemid_for_query(self, item: Union[Item, str, int]) -> Union[int, None] item_id = None return item_id - def _query_item(self, func: str, database_item: Item, timeframe: str, start: int = None, end: int = 0, group: str = None, group2: str = None, ignore_value_list=None, use_oldest_entry: bool = False) -> list: + def _query_item(self, func: str, database_item: Item, timeframe: str, start: int = None, end: int = 0, group: str = "", group2: str = "", ignore_value_list=None, use_oldest_entry: bool = False) -> list: """ Do diverse checks of input, and prepare query of log by getting item_id, start / end in timestamp etc. @@ -2097,84 +2115,85 @@ def _query_item(self, func: str, database_item: Item, timeframe: str, start: int :return: query response / list for value pairs [[None, None]] for errors, [[0,0]] for no-data in DB """ - if self.prepare_debug: - self.logger.debug(f"called with {func=}, item={database_item.path()}, {timeframe=}, {start=}, {end=}, {group=}, {group2=}, {ignore_value_list=}") + if self.debug_log.prepare: + self.logger.debug(f" called with {func=}, item={database_item.path()}, {timeframe=}, {start=}, {end=}, {group=}, {group2=}, {ignore_value_list=}, {use_oldest_entry=}") # set default result - default_result = [[None, None]] + error_result = [[None, None]] + nodata_result = [[0, 0]] # check correctness of timeframe if timeframe not in ALLOWED_QUERY_TIMEFRAMES: self.logger.error(f"Requested {timeframe=} for item={database_item.path()} not defined; Need to be 'year' or 'month' or 'week' or 'day' or 'hour''. Query cancelled.") - return default_result + return error_result # define start and end of query as timestamp in microseconds - ts_start, ts_end = get_start_end_as_timestamp(timeframe, start, end) + ts_start, ts_end = self._get_start_end_as_timestamp(timeframe, start, end) oldest_log = self._get_oldest_log(database_item) if oldest_log is None: - return default_result + return error_result # check correctness of ts_start / ts_end if ts_start is None: ts_start = oldest_log if ts_end is None or ts_start > ts_end: - if self.prepare_debug: + if self.debug_log.prepare: self.logger.debug(f"{ts_start=}, {ts_end=}") self.logger.warning(f"Requested {start=} for item={database_item.path()} is not valid since {start=} < {end=} or end not given. Query cancelled.") - return default_result + return error_result # define item_id item_id = self._get_itemid(database_item) if not item_id: - self.logger.error(f"ItemId for item={database_item.path()} not found. Query cancelled.") - return default_result + self.logger.error(f"DB ItemId for item={database_item.path()} not found. Query cancelled.") + return error_result - if self.prepare_debug: - self.logger.debug(f"Requested {timeframe=} with {start=} and {end=} resulted in start being timestamp={ts_start} / {timestamp_to_timestring(ts_start)} and end being timestamp={ts_end} / {timestamp_to_timestring(ts_end)}") + if self.debug_log.prepare: + self.logger.debug(f" Requested {timeframe=} with {start=} and {end=} resulted in start being timestamp={ts_start}/{self._timestamp_to_timestring(ts_start)} and end being timestamp={ts_end}/{self._timestamp_to_timestring(ts_end)}") # check if values for end time and start time are in database if ts_end < oldest_log: # (Abfrage abbrechen, wenn Endzeitpunkt in UNIX-timestamp der Abfrage kleiner (und damit jünger) ist, als der UNIX-timestamp des ältesten Eintrages) - self.logger.info(f"Requested end time timestamp={ts_end} / {timestamp_to_timestring(ts_end)} of query for Item='{database_item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Query cancelled.") - return default_result + self.logger.info(f" Requested end time timestamp={ts_end}/{self._timestamp_to_timestring(ts_end)} of query for item={database_item.path()} is prior to oldest entry with timestamp={oldest_log}/{self._timestamp_to_timestring(oldest_log)}. Query cancelled.") + return error_result if ts_start < oldest_log: if self.use_oldest_entry or use_oldest_entry: - self.logger.info(f"Requested start time timestamp={ts_start} / {timestamp_to_timestring(ts_start)} of query for Item='{database_item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Oldest available entry will be used.") + self.logger.info(f" Requested start time timestamp={ts_start}/{self._timestamp_to_timestring(ts_start)} of query for item={database_item.path()} is prior to oldest entry with timestamp={oldest_log}/{self._timestamp_to_timestring(oldest_log)}. Oldest available entry will be used.") ts_start = oldest_log else: - self.logger.info(f"Requested start time timestamp={ts_start} / {timestamp_to_timestring(ts_start)} of query for Item='{database_item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Query cancelled.") - return default_result + self.logger.info(f" Requested start time timestamp={ts_start}/{self._timestamp_to_timestring(ts_start)} of query for item={database_item.path()} is prior to oldest entry with timestamp={oldest_log}/{self._timestamp_to_timestring(oldest_log)}. Query cancelled.") + return error_result # prepare and do query query_params = {'func': func, 'item_id': item_id, 'ts_start': ts_start, 'ts_end': ts_end, 'group': group, 'group2': group2, 'ignore_value_list': ignore_value_list} query_result = self._query_log_timestamp(**query_params) - if self.prepare_debug: - self.logger.debug(f"result of '_query_log_timestamp' {query_result=}") + if self.debug_log.prepare: + self.logger.debug(f" result of '_query_log_timestamp' {query_result=}") # post process query_result if query_result is None: - self.logger.error(f"Error occurred during _query_item. Aborting...") - return default_result + self.logger.error(f"Error occurred during '_query_log_timestamp' of item={database_item.path()}. Aborting...") + return error_result if len(query_result) == 0: - self.logger.info(f"No values for item in requested timeframe in database found.") - return [[0, 0]] + self.logger.info(f" No values for item={database_item.path()} in requested timeframe between {ts_start}/{self._timestamp_to_timestring(ts_start)} and {ts_end}/{self._timestamp_to_timestring(ts_end)} in database found.") + return nodata_result result = [] for element in query_result: timestamp, value = element if timestamp is not None and value is not None: if isinstance(value, float): - value = round(value, 1) + value = round(value, 2) result.append([timestamp, value]) - if self.prepare_debug: - self.logger.debug(f"value for item={database_item.path()} with {query_params=}: {result}") + if self.debug_log.prepare: + self.logger.debug(f" value for item={database_item.path()} with {query_params=}: {result}") if not result: - self.logger.info(f"No values for item in requested timeframe in database found.") - return default_result + self.logger.info(f" No values for item={database_item.path()} in requested timeframe between {ts_start}/{self._timestamp_to_timestring(ts_start)} and {ts_end}/{self._timestamp_to_timestring(ts_end)} in database found.") + return nodata_result return result @@ -2201,6 +2220,28 @@ def _init_cache_dicts(self) -> None: YEAR: {} } + def _clean_item_cache(self, item: Union[str, Item]) -> None: + """set cached values for item to None""" + + if isinstance(item, str): + item = self.items.return_item(item) + + if not isinstance(item, Item): + return + + database_item = self.get_item_config(item).get('database_item') + + if database_item: + for timeframe in self.previous_values: + for cached_item in self.previous_values[timeframe]: + if cached_item == database_item: + self.previous_values[timeframe][cached_item] = None + + for timeframe in self.current_values: + for cached_item in self.current_values[timeframe]: + if cached_item == database_item: + self.current_values[timeframe][cached_item] = {} + def _clear_queue(self) -> None: """ Clear working queue @@ -2209,11 +2250,108 @@ def _clear_queue(self) -> None: self.logger.info(f"Working queue will be cleared. Calculation run will end.") self.item_queue.queue.clear() + # ToDo: Check if still needed + def _queue_consumer_thread_startup(self): + """Start a thread to work item queue""" + + self.logger = logging.getLogger(__name__) + _name = 'plugins.' + self.get_fullname() + '.work_item_queue' + + try: + self.queue_consumer_thread = threading.Thread(target=self.work_item_queue, name=_name, daemon=False) + self.queue_consumer_thread.start() + self.logger.debug("Thread for 'queue_consumer_thread' has been started") + except threading.ThreadError: + self.logger.error("Unable to launch thread for 'queue_consumer_thread'.") + self.queue_consumer_thread = None + + # ToDo: Check if still needed + def _queue_consumer_thread_shutdown(self): + """Shut down the thread to work item queue""" + + if self.queue_consumer_thread: + self.queue_consumer_thread.join() + if self.queue_consumer_thread.is_alive(): + self.logger.error("Unable to shut down 'queue_consumer_thread' thread") + else: + self.logger.info("Thread 'queue_consumer_thread' has been shut down.") + self.queue_consumer_thread = None + + def _get_start_end_as_timestamp(self, timeframe: str, start: Union[int, str, None], end: Union[int, str, None]) -> tuple: + """ + Provides start and end as timestamp in microseconds from timeframe with start and end + + :param timeframe: timeframe as week, month, year + :param start: beginning timeframe in x timeframes from now + :param end: end of timeframe in x timeframes from now + + :return: start time in timestamp in microseconds, end time in timestamp in microseconds + + """ + + ts_start = ts_end = None + + def get_query_timestamp(_offset) -> int: + if timeframe == 'week': + _date = self.shtime.beginning_of_week(offset=_offset) + elif timeframe == 'month': + _date = self.shtime.beginning_of_month(offset=_offset) + elif timeframe == 'year': + _date = self.shtime.beginning_of_year(offset=_offset) + else: + _date = self.shtime.today(offset=_offset) + + return self._datetime_to_timestamp(datetime.datetime.combine(_date, datetime.datetime.min.time())) * 1000 + + if isinstance(start, str) and start.isdigit(): + start = int(start) + if isinstance(start, int): + ts_start = get_query_timestamp(-start) + + if isinstance(end, str) and end.isdigit(): + end = int(end) + if isinstance(end, int): + ts_end = get_query_timestamp(-end + 1) + + return ts_start, ts_end + + def _datetime_to_timestamp(self, dt: datetime) -> int: + """Provides timestamp from given datetime""" + + return int(dt.replace(tzinfo=self.shtime.tzinfo()).timestamp()) + + def _timestamp_to_datetime(self, timestamp: int) -> datetime: + """Parse timestamp from db query to datetime""" + + return datetime.datetime.fromtimestamp(timestamp / 1000, tz=self.shtime.tzinfo()) + + def _timestamp_to_timestring(self, timestamp: int) -> str: + """Parse timestamp from db query to string representing date and time""" + + return self._timestamp_to_datetime(timestamp).strftime('%Y-%m-%d %H:%M:%S') + + def _valid_year(self, year: Union[int, str]) -> bool: + """Check if given year is digit and within allowed range""" + + if ((isinstance(year, int) or (isinstance(year, str) and year.isdigit())) and ( + 1980 <= int(year) <= self.shtime.today(offset=0).year)) or (isinstance(year, str) and year == 'current'): + return True + else: + return False + + def _valid_month(self, month: Union[int, str]) -> bool: + """Check if given month is digit and within allowed range""" + + if (isinstance(month, int) or (isinstance(month, str) and month.isdigit())) and (1 <= int(month) <= 12): + return True + else: + return False + ################################# # Database Query Preparation ################################# - def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: int, group: str = None, group2: str = None, ignore_value_list=None) -> Union[list, None]: + def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: int, group: str = "", group2: str = "", ignore_value_list=None) -> Union[list, None]: """ Assemble a mysql query str and param dict based on given parameters, get query response and return it @@ -2230,7 +2368,7 @@ def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: i """ # do debug log - if self.prepare_debug: + if self.debug_log.prepare: self.logger.debug(f"Called with {func=}, {item_id=}, {ts_start=}, {ts_end=}, {group=}, {group2=}, {ignore_value_list=}") # define query parts @@ -2254,41 +2392,31 @@ def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: i } _table_alias = { - 'avg': '', 'avg1': ') AS table1 ', - 'min': '', - 'max': '', 'max1': ') AS table1 ', - 'sum': '', - 'on': '', - 'integrate': '', 'sum_max': ') AS table1 ', 'sum_avg': ') AS table1 ', 'sum_min_neg': ') AS table1 ', 'diff_max': ') AS table1 ', - 'next': '', - 'raw': '', - 'first': '', - 'last': '', } _order = { - 'avg': 'time ASC ', - 'avg1': 'time ASC ', - 'min': 'time ASC ', - 'max': 'time ASC ', - 'max1': 'time ASC ', - 'sum': 'time ASC ', - 'on': 'time ASC ', - 'integrate': 'time ASC ', - 'sum_max': 'time ASC ', - 'sum_avg': 'time ASC ', - 'sum_min_neg': 'time ASC ', - 'diff_max': 'time ASC ', - 'next': 'time DESC LIMIT 1 ', - 'raw': 'time ASC ', - 'first': 'time ASC LIMIT 1 ', - 'last': 'time DESC LIMIT 1 ', + 'avg1': 'ORDER BY time ASC ', + 'max1': 'ORDER BY time ASC ', + 'on': 'ORDER BY time ASC ', + 'sum_max': 'ORDER BY time ASC ', + 'sum_min_neg': 'ORDER BY time ASC ', + 'diff_max': 'ORDER BY time ASC ', + 'next': 'ORDER BY time DESC ', + 'raw': 'ORDER BY time ASC ', + 'first': 'ORDER BY time ASC ', + 'last': 'ORDER BY time DESC ', + } + + _limit = { + 'next': 'LIMIT 1 ', + 'first': 'LIMIT 1 ', + 'last': 'LIMIT 1 ', } _where = "item_id = :item_id AND time < :ts_start " if func == "next" else "item_id = :item_id AND time BETWEEN :ts_start AND :ts_end " @@ -2301,7 +2429,6 @@ def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: i "week": "GROUP BY YEARWEEK(FROM_UNIXTIME(time/1000), 5) ", "day": "GROUP BY DATE(FROM_UNIXTIME(time/1000)) ", "hour": "GROUP BY FROM_UNIXTIME((time/1000),'%Y%m%d%H') ", - None: "", } _group_by_sqlite = { @@ -2310,7 +2437,6 @@ def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: i "week": "GROUP BY strftime('%Y%W', date((time/1000),'unixepoch')) ", "day": "GROUP BY date((time/1000),'unixepoch') ", "hour": "GROUP BY strftime('%Y%m%d%H', datetime((time/1000),'unixepoch')) ", - None: "", } # select query parts depending in db driver @@ -2328,10 +2454,10 @@ def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: i return # check correctness of group and group2 - if group not in _group_by: + if group and group not in _group_by: self.logger.error(f"Requested {group=} for item={item_id=} not defined. Query cancelled.") return - if group2 not in _group_by: + if group2 and group2 not in _group_by: self.logger.error(f"Requested {group2=} for item={item_id=} not defined. Query cancelled.") return @@ -2348,75 +2474,62 @@ def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: i params.update({'ts_end': ts_end}) # assemble query - query = f"SELECT {_select[func]}FROM {_db_table}WHERE {_where}{_group_by[group]}ORDER BY {_order[func]}{_table_alias[func]}{_group_by[group2]}".strip() + query = f"SELECT {_select[func]}FROM {_db_table}WHERE {_where}{_group_by.get(group, '')}{_order.get(func, '')}{_limit.get(func, '')}{_table_alias.get(func, '')}{_group_by.get(group2, '')}".strip() if self.db_driver.lower() == 'sqlite3': query = query.replace('IF', 'IIF') # do debug log - if self.prepare_debug: + if self.debug_log.prepare: self.logger.debug(f"{query=}, {params=}") # request database and return result return self._fetchall(query, params) - def _read_log_all(self, item_id: int): + def _read_log_oldest(self, item_id: int) -> int: """ - Read the oldest log record for given item + Read the oldest log record for given database ID - :param item_id: item_id to read the record for - :return: Log record for item_id + :param item_id: Database ID of item to read the record for + :return: timestamp of oldest log entry of given item_id """ - if self.prepare_debug: - self.logger.debug(f"called for {item_id=}") - - query = "SELECT * FROM log WHERE (item_id = :item_id) AND (time = None OR 1 = 1)" params = {'item_id': item_id} - result = self._fetchall(query, params) - return result + query = "SELECT min(time) FROM log WHERE item_id = :item_id;" + return self._fetchall(query, params)[0][0] - def _read_log_oldest(self, item_id: int, cur=None) -> int: + def _read_log_newest(self, item_id: int) -> int: """ Read the oldest log record for given database ID :param item_id: Database ID of item to read the record for - :type item_id: int - :param cur: A database cursor object if available (optional) - - :return: Log record for the database ID + :return: timestamp of newest log entry of given item_id """ params = {'item_id': item_id} - query = "SELECT min(time) FROM log WHERE item_id = :item_id;" - return self._fetchall(query, params, cur=cur)[0][0] + query = "SELECT max(time) FROM log WHERE item_id = :item_id;" + return self._fetchall(query, params)[0][0] - def _read_log_timestamp(self, item_id: int, timestamp: int, cur=None) -> Union[list, None]: + def _read_log_timestamp(self, item_id: int, timestamp: int) -> Union[list, None]: """ Read database log record for given database ID :param item_id: Database ID of item to read the record for - :type item_id: int :param timestamp: timestamp for the given value - :type timestamp: int - :param cur: A database cursor object if available (optional) - :return: Log record for the database ID at given timestamp """ params = {'item_id': item_id, 'timestamp': timestamp} query = "SELECT * FROM log WHERE item_id = :item_id AND time = :timestamp;" - return self._fetchall(query, params, cur=cur) + return self._fetchall(query, params) - def _read_item_table(self, item_id: int = None, item_path: str = None): + def _read_item_table(self, item_id: int = None, item_path: str = None) -> Union[list, None]: """ Read item table :param item_id: unique ID for item within database :param item_path: item_path for Item within the database - :return: Data for the selected item - :rtype: tuple """ columns_entries = ('id', 'name', 'time', 'val_str', 'val_num', 'val_bool', 'changed') @@ -2456,9 +2569,34 @@ def _get_db_net_read_timeout(self) -> list: query = "SHOW GLOBAL VARIABLES LIKE 'net_read_timeout'" return self._fetchone(query) - ####################### - # Database Queries - ####################### + ############################### + # Database specific stuff + ############################### + + def _initialize_db(self) -> bool: + """ + Initializes database connection + + :return: Status of initialization + """ + + try: + if not self._db.connected(): + # limit connection requests to 20 seconds. + time_since_last_connect = time.time() - self.last_connect_time + if time_since_last_connect > 20: + self.last_connect_time = time.time() + self.logger.debug(f"Connect to database.") + self._db.connect() + else: + self.logger.warning(f"Database reconnect suppressed since last connection is less then 20sec ago.") + return False + + except Exception as e: + self.logger.critical(f"Initialization of Database Connection failed: {e}") + return False + + return True def _execute(self, query: str, params: dict = None, cur=None) -> list: if params is None: @@ -2476,13 +2614,15 @@ def _fetchall(self, query: str, params: dict = None, cur=None) -> list: if params is None: params = {} - return self._query(self._db.fetchall, query, params, cur) + tuples = self._query(self._db.fetchall, query, params, cur) + return None if tuples is None else list(tuples) - def _query(self, fetch, query: str, params: dict = None, cur=None) -> Union[None, list]: + # ToDo: Check if still needed. + def _query_geht(self, fetch, query: str, params: dict = None, cur=None) -> Union[None, list]: if params is None: params = {} - if self.sql_debug: + if self.debug_log.sql: self.logger.debug(f"Called with {query=}, {params=}, {cur=}") if not self._initialize_db(): @@ -2492,26 +2632,103 @@ def _query(self, fetch, query: str, params: dict = None, cur=None) -> Union[None if self._db.verify(5) == 0: self.logger.error("Connection to database not recovered.") return None - if not self._db.lock(300): - self.logger.error("Can't query due to fail to acquire lock.") + + if self.lock_db_for_query and not self._db.lock(300): + self.logger.error("Can't query database due to fail to acquire lock.") return None query_readable = re.sub(r':([a-z_]+)', r'{\1}', query).format(**params) + # do periodic commit to get latest data during fetch + time_since_last_commit = time.time() - self.last_commit_time + if time_since_last_commit > self.refresh_cycle: + self.last_commit_time = time.time() + self.logger.debug(f"Commit to database for getting updated data. time_since_last_commit={int(time_since_last_commit)}") + self._db.commit() + + # fetch data try: tuples = fetch(query, params, cur=cur) except Exception as e: - self.logger.error(f"Error for query '{query_readable}': {e}") + self.logger.error(f"Error '{e}' for query={query_readable} occurred.") tuples = None pass finally: - if cur is None: + if cur is None and self.lock_db_for_query: self._db.release() - if self.sql_debug: - self.logger.debug(f"Result of '{query_readable}': {tuples}") + if self.debug_log.sql: + self.logger.debug(f"Result of query={query_readable}: {tuples}") + + return tuples + + def _query(self, fetch, query: str, params: dict = None, cur=None) -> Union[None, list]: + if params is None: + params = {} + + if self.debug_log.sql: + self.logger.debug(f"Called with {query=}, {params=}, {cur=}") + + # recovery connection to database + if cur is None or not self._db.connected: + verify_conn = self._db.verify(retry=5) + if verify_conn == 0: + self.logger.error("Connection to database NOT recovered.") + return None + else: + if self.debug_log.sql: + self.logger.debug("Connection to database recovered.") + + # lock database if required + if cur is None and self.lock_db_for_query: + if not self._db.lock(300): + self.logger.error("Can't query database due to fail to acquire lock.") + return None + + # fetch data + query_readable = re.sub(r':([a-z_]+)', r'{\1}', query).format(**params) + try: + tuples = fetch(query, params, cur=cur) + except Exception as e: + self.logger.error(f"Error '{e}' for query={query_readable} occurred.") + tuples = None + pass + + # release database + if cur is None and self.lock_db_for_query: + self._db.release() + + # close connection + self._db.close() + + if self.debug_log.sql: + self.logger.debug(f"Result of query={query_readable}: {tuples}") + return tuples + +@dataclass +class DebugLogOptions: + """Class to simplify use and handling of debug log options.""" + + log_level: InitVar[int] = 10 + parse: bool = True # Enable / Disable debug logging for method 'parse item' + execute: bool = True # Enable / Disable debug logging for method 'execute items' + ondemand: bool = True # Enable / Disable debug logging for method 'handle_ondemand' + onchange: bool = True # Enable / Disable debug logging for method 'handle_onchange' + prepare: bool = True # Enable / Disable debug logging for query preparation + sql: bool = True # Enable / Disable debug logging for sql stuff + + def __post_init__(self, log_level): + if log_level > 10: + self.parse = False + self.execute = False + self.ondemand = False + self.onchange = False + self.prepare = False + self.prepare = False + + ####################### # Helper functions ####################### @@ -2539,42 +2756,11 @@ def params_to_dict(string: str) -> Union[dict, None]: return None elif key in ('start', 'end', 'count') and not isinstance(res_dict[key], int): return None - elif key in 'year': - if not valid_year(res_dict[key]): - return None - elif key in 'month': - if not valid_month(res_dict[key]): - return None return res_dict -def valid_year(year: Union[int, str]) -> bool: - """Check if given year is digit and within allowed range""" - - if ((isinstance(year, int) or (isinstance(year, str) and year.isdigit())) and ( - 1980 <= int(year) <= datetime.date.today().year)) or (isinstance(year, str) and year == 'current'): - return True - else: - return False - - -def valid_month(month: Union[int, str]) -> bool: - """Check if given month is digit and within allowed range""" - - if (isinstance(month, int) or (isinstance(month, str) and month.isdigit())) and (1 <= int(month) <= 12): - return True - else: - return False - - -def timestamp_to_timestring(timestamp: int) -> str: - """Parse timestamp from db query to string representing date and time""" - - return datetime.datetime.utcfromtimestamp(timestamp / 1000).strftime('%Y-%m-%d %H:%M:%S') - - -def harmonize_timeframe_expression(timeframe: str) -> str: - """harmonizes different expression of timeframe""" +def translate_timeframe(timeframe: str) -> str: + """translates different expression of timeframe""" lookup = { 'tag': 'day', @@ -2594,7 +2780,7 @@ def harmonize_timeframe_expression(timeframe: str) -> str: return lookup.get(timeframe) -def convert_timeframe(timeframe_in: str, timeframe_out: str) -> int: +def timeframe_to_timeframe(timeframe_in: str, timeframe_out: str) -> int: """Convert timeframe to timeframe like month in years or years in days""" _h_in_d = 24 @@ -2641,104 +2827,6 @@ def convert_timeframe(timeframe_in: str, timeframe_out: str) -> int: return lookup[timeframe_in][timeframe_out] -def get_start_end_as_timestamp(timeframe: str, start: Union[int, str, None], end: Union[int, str, None]) -> tuple: - """ - Provides start and end as timestamp in microseconds from timeframe with start and end - - :param timeframe: timeframe as week, month, year - :param start: beginning timeframe in x timeframes from now - :param end: end of timeframe in x timeframes from now - - :return: start time in timestamp in microseconds, end time in timestamp in microseconds - - """ - - def get_start() -> datetime: - if timeframe == 'week': - return _week_beginning() - elif timeframe == 'month': - return _month_beginning() - elif timeframe == 'year': - return _year_beginning() - else: - return _day_beginning() - - def get_end() -> datetime: - if timeframe == 'week': - return _week_end() - elif timeframe == 'month': - return _month_end() - elif timeframe == 'year': - return _year_end() - else: - return _day_end() - - def _year_beginning(delta: int = start) -> datetime: - """provides datetime of beginning of year of today minus x years""" - - _dt = datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) - return _dt.replace(month=1, day=1) - relativedelta(years=delta) - - def _year_end(delta: int = end) -> datetime: - """provides datetime of end of year of today minus x years""" - - return _year_beginning(delta) + relativedelta(years=1) - - def _month_beginning(delta: int = start) -> datetime: - """provides datetime of beginning of month minus x month""" - - _dt = datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) - return _dt.replace(day=1) - relativedelta(months=delta) - - def _month_end(delta: int = end) -> datetime: - """provides datetime of end of month minus x month""" - - return _month_beginning(delta) + relativedelta(months=1) - - def _week_beginning(delta: int = start) -> datetime: - """provides datetime of beginning of week minus x weeks""" - - _dt = datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) - return _dt - relativedelta(days=(datetime.date.today().weekday() + (delta * 7))) - - def _week_end(delta: int = end) -> datetime: - """provides datetime of end of week minus x weeks""" - - return _week_beginning(delta) + relativedelta(days=7) - - def _day_beginning(delta: int = start) -> datetime: - """provides datetime of beginning of today minus x days""" - - return datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()) - relativedelta(days=delta) - - def _day_end(delta: int = end) -> datetime: - """provides datetime of end of today minus x days""" - - return _day_beginning(delta) + relativedelta(days=1) - - if isinstance(start, str) and start.isdigit(): - start = int(start) - if isinstance(start, int): - ts_start = datetime_to_timestamp(get_start()) * 1000 - else: - ts_start = None - - if isinstance(end, str) and end.isdigit(): - end = int(end) - if isinstance(end, int): - ts_end = datetime_to_timestamp(get_end()) * 1000 - else: - ts_end = None - - return ts_start, ts_end - - -def datetime_to_timestamp(dt: datetime) -> int: - """Provides timestamp from given datetime""" - - return int(dt.replace(tzinfo=datetime.timezone.utc).timestamp()) - - def to_int(arg) -> Union[int, None]: try: return int(arg) @@ -2776,4 +2864,3 @@ def split_sting_letters_numbers(string) -> list: ALLOWED_QUERY_TIMEFRAMES = ['year', 'month', 'week', 'day', 'hour'] ALLOWED_MINMAX_FUNCS = ['min', 'max', 'avg'] - diff --git a/db_addon/item_attributes_master.py b/db_addon/item_attributes_master.py deleted file mode 100755 index 00b54a8cf..000000000 --- a/db_addon/item_attributes_master.py +++ /dev/null @@ -1,299 +0,0 @@ -# !/usr/bin/env python -# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -# Copyright 2023 Michael Wenzel -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -# AVM for SmartHomeNG. https://github.com/smarthomeNG// -# -# This plugin is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This plugin is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this plugin. If not, see . -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # - -import ruamel.yaml - -FILENAME_ATTRIBUTES = 'item_attributes.py' - -FILENAME_PLUGIN = 'plugin.yaml' - -ITEM_ATTRIBUTES = { - 'db_addon_fct': { - 'verbrauch_heute': {'cat': 'verbrauch', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch am heutigen Tag (Differenz zwischen aktuellem Wert und den Wert am Ende des vorherigen Tages)'}, - 'verbrauch_woche': {'cat': 'verbrauch', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch in der aktuellen Woche'}, - 'verbrauch_monat': {'cat': 'verbrauch', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch im aktuellen Monat'}, - 'verbrauch_jahr': {'cat': 'verbrauch', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch im aktuellen Jahr'}, - 'verbrauch_heute_minus1': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch gestern (heute -1 Tag) (Differenz zwischen Wert am Ende des gestrigen Tages und dem Wert am Ende des Tages davor)'}, - 'verbrauch_heute_minus2': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch vorgestern (heute -2 Tage)'}, - 'verbrauch_heute_minus3': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -3 Tage'}, - 'verbrauch_heute_minus4': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -4 Tage'}, - 'verbrauch_heute_minus5': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -5 Tage'}, - 'verbrauch_heute_minus6': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -6 Tage'}, - 'verbrauch_heute_minus7': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -7 Tage'}, - 'verbrauch_woche_minus1': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch Vorwoche (aktuelle Woche -1)'}, - 'verbrauch_woche_minus2': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch aktuelle Woche -2 Wochen'}, - 'verbrauch_woche_minus3': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch aktuelle Woche -3 Wochen'}, - 'verbrauch_woche_minus4': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch aktuelle Woche -4 Wochen'}, - 'verbrauch_monat_minus1': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch Vormonat (aktueller Monat -1)'}, - 'verbrauch_monat_minus2': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -2 Monate'}, - 'verbrauch_monat_minus3': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -3 Monate'}, - 'verbrauch_monat_minus4': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -4 Monate'}, - 'verbrauch_monat_minus12': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -12 Monate'}, - 'verbrauch_jahr_minus1': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Verbrauch Vorjahr (aktuelles Jahr -1 Jahr)'}, - 'verbrauch_jahr_minus2': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Verbrauch aktuelles Jahr -2 Jahre'}, - 'verbrauch_rolling_12m_heute_minus1': {'cat': 'verbrauch', 'sub_cat': 'rolling', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Tages'}, - 'verbrauch_rolling_12m_woche_minus1': {'cat': 'verbrauch', 'sub_cat': 'rolling', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende der letzten Woche'}, - 'verbrauch_rolling_12m_monat_minus1': {'cat': 'verbrauch', 'sub_cat': 'rolling', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Monats'}, - 'verbrauch_rolling_12m_jahr_minus1': {'cat': 'verbrauch', 'sub_cat': 'rolling', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Jahres'}, - 'verbrauch_jahreszeitraum_minus1': {'cat': 'verbrauch', 'sub_cat': 'jahrzeit', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch seit dem 1.1. bis zum heutigen Tag des Vorjahres'}, - 'verbrauch_jahreszeitraum_minus2': {'cat': 'verbrauch', 'sub_cat': 'jahrzeit', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 2 Jahren'}, - 'verbrauch_jahreszeitraum_minus3': {'cat': 'verbrauch', 'sub_cat': 'jahrzeit', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 3 Jahren'}, - 'zaehlerstand_heute_minus1': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Tages (heute -1 Tag)'}, - 'zaehlerstand_heute_minus2': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Tages (heute -2 Tag)'}, - 'zaehlerstand_heute_minus3': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorvorletzten Tages (heute -3 Tag)'}, - 'zaehlerstand_woche_minus1': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand / Wert am Ende der vorvorletzten Woche (aktuelle Woche -1 Woche)'}, - 'zaehlerstand_woche_minus2': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand / Wert am Ende der vorletzten Woche (aktuelle Woche -2 Wochen)'}, - 'zaehlerstand_woche_minus3': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand / Wert am Ende der aktuellen Woche -3 Wochen'}, - 'zaehlerstand_monat_minus1': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Monates (aktueller Monat -1 Monat)'}, - 'zaehlerstand_monat_minus2': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Monates (aktueller Monat -2 Monate)'}, - 'zaehlerstand_monat_minus3': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand / Wert am Ende des aktuellen Monats -3 Monate'}, - 'zaehlerstand_jahr_minus1': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Jahres (aktuelles Jahr -1 Jahr)'}, - 'zaehlerstand_jahr_minus2': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Jahres (aktuelles Jahr -2 Jahre)'}, - 'zaehlerstand_jahr_minus3': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Zählerstand / Wert am Ende des aktuellen Jahres -3 Jahre'}, - 'minmax_last_24h_min': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'minimaler Wert der letzten 24h'}, - 'minmax_last_24h_max': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'maximaler Wert der letzten 24h'}, - 'minmax_last_24h_avg': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'durchschnittlicher Wert der letzten 24h'}, - 'minmax_last_7d_min': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'minimaler Wert der letzten 7 Tage'}, - 'minmax_last_7d_max': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'maximaler Wert der letzten 7 Tage'}, - 'minmax_last_7d_avg': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'durchschnittlicher Wert der letzten 7 Tage'}, - 'minmax_heute_min': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Minimalwert seit Tagesbeginn'}, - 'minmax_heute_max': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Maximalwert seit Tagesbeginn'}, - 'minmax_heute_avg': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Durschnittswert seit Tagesbeginn'}, - 'minmax_heute_minus1_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert gestern (heute -1 Tag)'}, - 'minmax_heute_minus1_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert gestern (heute -1 Tag)'}, - 'minmax_heute_minus1_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durchschnittswert gestern (heute -1 Tag)'}, - 'minmax_heute_minus2_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert vorgestern (heute -2 Tage)'}, - 'minmax_heute_minus2_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert vorgestern (heute -2 Tage)'}, - 'minmax_heute_minus2_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durchschnittswert vorgestern (heute -2 Tage)'}, - 'minmax_heute_minus3_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert heute vor 3 Tagen'}, - 'minmax_heute_minus3_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert heute vor 3 Tagen'}, - 'minmax_heute_minus3_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durchschnittswert heute vor 3 Tagen'}, - 'minmax_woche_min': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Minimalwert seit Wochenbeginn'}, - 'minmax_woche_max': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Maximalwert seit Wochenbeginn'}, - 'minmax_woche_minus1_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Minimalwert Vorwoche (aktuelle Woche -1)'}, - 'minmax_woche_minus1_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Maximalwert Vorwoche (aktuelle Woche -1)'}, - 'minmax_woche_minus1_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Durchschnittswert Vorwoche (aktuelle Woche -1)'}, - 'minmax_woche_minus2_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Minimalwert aktuelle Woche -2 Wochen'}, - 'minmax_woche_minus2_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Maximalwert aktuelle Woche -2 Wochen'}, - 'minmax_woche_minus2_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Durchschnittswert aktuelle Woche -2 Wochen'}, - 'minmax_monat_min': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Minimalwert seit Monatsbeginn'}, - 'minmax_monat_max': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Maximalwert seit Monatsbeginn'}, - 'minmax_monat_minus1_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Minimalwert Vormonat (aktueller Monat -1)'}, - 'minmax_monat_minus1_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Maximalwert Vormonat (aktueller Monat -1)'}, - 'minmax_monat_minus1_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Durchschnittswert Vormonat (aktueller Monat -1)'}, - 'minmax_monat_minus2_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Minimalwert aktueller Monat -2 Monate'}, - 'minmax_monat_minus2_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Maximalwert aktueller Monat -2 Monate'}, - 'minmax_monat_minus2_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Durchschnittswert aktueller Monat -2 Monate'}, - 'minmax_jahr_min': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Minimalwert seit Jahresbeginn'}, - 'minmax_jahr_max': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Maximalwert seit Jahresbeginn'}, - 'minmax_jahr_minus1_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Minimalwert Vorjahr (aktuelles Jahr -1 Jahr)'}, - 'minmax_jahr_minus1_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Maximalwert Vorjahr (aktuelles Jahr -1 Jahr)'}, - 'minmax_jahr_minus1_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Durchschnittswert Vorjahr (aktuelles Jahr -1 Jahr)'}, - 'tagesmitteltemperatur_heute': {'cat': 'tagesmittel', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Tagesmitteltemperatur heute'}, - 'tagesmitteltemperatur_heute_minus1': {'cat': 'tagesmittel', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des letzten Tages (heute -1 Tag)'}, - 'tagesmitteltemperatur_heute_minus2': {'cat': 'tagesmittel', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des vorletzten Tages (heute -2 Tag)'}, - 'tagesmitteltemperatur_heute_minus3': {'cat': 'tagesmittel', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des vorvorletzten Tages (heute -3 Tag)'}, - 'serie_minmax_monat_min_15m': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatlicher Minimalwert der letzten 15 Monate (gleitend)'}, - 'serie_minmax_monat_max_15m': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatlicher Maximalwert der letzten 15 Monate (gleitend)'}, - 'serie_minmax_monat_avg_15m': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatlicher Mittelwert der letzten 15 Monate (gleitend)'}, - 'serie_minmax_woche_min_30w': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'wöchentlicher Minimalwert der letzten 30 Wochen (gleitend)'}, - 'serie_minmax_woche_max_30w': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'wöchentlicher Maximalwert der letzten 30 Wochen (gleitend)'}, - 'serie_minmax_woche_avg_30w': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'wöchentlicher Mittelwert der letzten 30 Wochen (gleitend)'}, - 'serie_minmax_tag_min_30d': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'täglicher Minimalwert der letzten 30 Tage (gleitend)'}, - 'serie_minmax_tag_max_30d': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'täglicher Maximalwert der letzten 30 Tage (gleitend)'}, - 'serie_minmax_tag_avg_30d': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'täglicher Mittelwert der letzten 30 Tage (gleitend)'}, - 'serie_verbrauch_tag_30d': {'cat': 'serie', 'sub_cat': 'verbrauch', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Verbrauch pro Tag der letzten 30 Tage'}, - 'serie_verbrauch_woche_30w': {'cat': 'serie', 'sub_cat': 'verbrauch', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch pro Woche der letzten 30 Wochen'}, - 'serie_verbrauch_monat_18m': {'cat': 'serie', 'sub_cat': 'verbrauch', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch pro Monat der letzten 18 Monate'}, - 'serie_zaehlerstand_tag_30d': {'cat': 'serie', 'sub_cat': 'zaehler', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Zählerstand am Tagesende der letzten 30 Tage'}, - 'serie_zaehlerstand_woche_30w': {'cat': 'serie', 'sub_cat': 'zaehler', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand am Wochenende der letzten 30 Wochen'}, - 'serie_zaehlerstand_monat_18m': {'cat': 'serie', 'sub_cat': 'zaehler', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand am Monatsende der letzten 18 Monate'}, - 'serie_waermesumme_monat_24m': {'cat': 'serie', 'sub_cat': 'summe', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatliche Wärmesumme der letzten 24 Monate'}, - 'serie_kaeltesumme_monat_24m': {'cat': 'serie', 'sub_cat': 'summe', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatliche Kältesumme der letzten 24 Monate'}, - 'serie_tagesmittelwert_0d': {'cat': 'serie', 'sub_cat': 'mittel_d', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Tagesmittelwert für den aktuellen Tag'}, - 'serie_tagesmittelwert_stunde_0d': {'cat': 'serie', 'sub_cat': 'mittel_h', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Stundenmittelwert für den aktuellen Tag'}, - 'serie_tagesmittelwert_stunde_30_0d': {'cat': 'serie', 'sub_cat': 'mittel_h1', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Stundenmittelwert für den aktuellen Tag'}, - 'serie_tagesmittelwert_tag_stunde_30d': {'cat': 'serie', 'sub_cat': 'mittel_d_h', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Stundenmittelwert pro Tag der letzten 30 Tage (bspw. zur Berechnung der Tagesmitteltemperatur basierend auf den Mittelwert der Temperatur pro Stunde'}, - 'general_oldest_value': {'cat': 'gen', 'sub_cat': None, 'item_type': 'num', 'calc': 'no', 'params': False, 'description': 'Ausgabe des ältesten Wertes des entsprechenden "Parent-Items" mit database Attribut'}, - 'general_oldest_log': {'cat': 'gen', 'sub_cat': None, 'item_type': 'list', 'calc': 'no', 'params': False, 'description': 'Ausgabe des Timestamp des ältesten Eintrages des entsprechenden "Parent-Items" mit database Attribut'}, - 'kaeltesumme': {'cat': 'complex', 'sub_cat': 'summe', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Kältesumme für einen Zeitraum, db_addon_params: (year=mandatory, month=optional)'}, - 'waermesumme': {'cat': 'complex', 'sub_cat': 'summe', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Wärmesumme für einen Zeitraum, db_addon_params: (year=mandatory, month=optional)'}, - 'gruenlandtempsumme': {'cat': 'complex', 'sub_cat': None, 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Grünlandtemperatursumme für einen Zeitraum, db_addon_params: (year=mandatory)'}, - 'tagesmitteltemperatur': {'cat': 'complex', 'sub_cat': None, 'item_type': 'list', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Tagesmitteltemperatur auf Basis der stündlichen Durchschnittswerte eines Tages für die angegebene Anzahl von Tagen (timeframe=day, count=integer)'}, - 'wachstumsgradtage': {'cat': 'complex', 'sub_cat': None, 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Wachstumsgradtage auf Basis der stündlichen Durchschnittswerte eines Tages für das laufende Jahr mit an Angabe des Temperaturschwellenwertes (threshold=Schwellentemperatur)'}, - 'db_request': {'cat': 'complex', 'sub_cat': None, 'item_type': 'list', 'calc': 'group', 'params': True, 'description': 'Abfrage der DB: db_addon_params: (func=mandatory, item=mandatory, timespan=mandatory, start=optional, end=optional, count=optional, group=optional, group2=optional)'}, - 'minmax': {'cat': 'complex', 'sub_cat': None, 'item_type': 'num', 'calc': 'timeframe', 'params': True, 'description': 'Berechnet einen min/max/avg Wert für einen bestimmen Zeitraum: db_addon_params: (func=mandatory, timeframe=mandatory, start=mandatory)'}, - 'minmax_last': {'cat': 'complex', 'sub_cat': None, 'item_type': 'num', 'calc': 'timeframe', 'params': True, 'description': 'Berechnet einen min/max/avg Wert für ein bestimmtes Zeitfenster von jetzt zurück: db_addon_params: (func=mandatory, timeframe=mandatory, start=mandatory, end=mandatory)'}, - 'verbrauch': {'cat': 'complex', 'sub_cat': None, 'item_type': 'num', 'calc': 'timeframe', 'params': True, 'description': 'Berechnet einen Verbrauchswert für einen bestimmen Zeitraum: db_addon_params: (timeframe=mandatory, start=mandatory end=mandatory)'}, - 'zaehlerstand': {'cat': 'complex', 'sub_cat': None, 'item_type': 'num', 'calc': 'timeframe', 'params': True, 'description': 'Berechnet einen Zählerstand für einen bestimmen Zeitpunkt: db_addon_params: (timeframe=mandatory, start=mandatory)'}, - }, - 'db_addon_info': { - 'db_version': {'cat': 'info', 'item_type': 'str', 'calc': 'no', 'params': False, 'description': 'Version der verbundenen Datenbank'}, - }, - 'db_addon_admin': { - 'suspend': {'cat': 'admin', 'item_type': 'bool', 'calc': 'no', 'params': False, 'description': 'Unterbricht die Aktivitäten des Plugin'}, - 'recalc_all': {'cat': 'admin', 'item_type': 'bool', 'calc': 'no', 'params': False, 'description': 'Startet einen Neuberechnungslauf aller on-demand Items'}, - 'clean_cache_values': {'cat': 'admin', 'item_type': 'bool', 'calc': 'no', 'params': False, 'description': 'Löscht Plugin-Cache und damit alle im Plugin zwischengespeicherten Werte'}, - }, -} - -FILE_HEADER = """\ -# !/usr/bin/env python -# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -# Copyright 2023 Michael Wenzel -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -# DatabaseAddOn for SmartHomeNG. https://github.com/smarthomeNG// -# -# This plugin is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This plugin is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this plugin. If not, see . -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # - - -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -# -# -# THIS FILE IS AUTOMATICALLY CREATED BY USING item_attributes_master.py -# -# -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # - -""" - -def get_attrs(sub_dict: dict = {}) -> list: - attributes = [] - for entry in ITEM_ATTRIBUTES: - for db_addon_fct in ITEM_ATTRIBUTES[entry]: - if sub_dict.items() <= ITEM_ATTRIBUTES[entry][db_addon_fct].items(): - attributes.append(db_addon_fct) - return attributes - -def export_item_attributes_py(): - ATTRS = dict() - ATTRS['ALL_ONCHANGE_ATTRIBUTES'] = get_attrs(sub_dict={'calc': 'onchange'}) - ATTRS['ALL_DAILY_ATTRIBUTES'] = get_attrs(sub_dict={'calc': 'daily'}) - ATTRS['ALL_WEEKLY_ATTRIBUTES'] = get_attrs(sub_dict={'calc': 'weekly'}) - ATTRS['ALL_MONTHLY_ATTRIBUTES'] = get_attrs(sub_dict={'calc': 'monthly'}) - ATTRS['ALL_YEARLY_ATTRIBUTES'] = get_attrs(sub_dict={'calc': 'yearly'}) - ATTRS['ALL_NEED_PARAMS_ATTRIBUTES'] = get_attrs(sub_dict={'params': True}) - ATTRS['ALL_VERBRAUCH_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'verbrauch'}) - ATTRS['VERBRAUCH_ATTRIBUTES_ONCHANGE'] = get_attrs(sub_dict={'cat': 'verbrauch', 'sub_cat': 'onchange'}) - ATTRS['VERBRAUCH_ATTRIBUTES_TIMEFRAME'] = get_attrs(sub_dict={'cat': 'verbrauch', 'sub_cat': 'timeframe'}) - ATTRS['VERBRAUCH_ATTRIBUTES_ROLLING'] = get_attrs(sub_dict={'cat': 'verbrauch', 'sub_cat': 'rolling'}) - ATTRS['VERBRAUCH_ATTRIBUTES_JAHRESZEITRAUM'] = get_attrs(sub_dict={'cat': 'verbrauch', 'sub_cat': 'jahrzeit'}) - ATTRS['ALL_ZAEHLERSTAND_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'zaehler'}) - ATTRS['ZAEHLERSTAND_ATTRIBUTES_TIMEFRAME'] = get_attrs(sub_dict={'cat': 'zaehler', 'sub_cat': 'timeframe'}) - ATTRS['ALL_HISTORIE_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'wertehistorie'}) - ATTRS['HISTORIE_ATTRIBUTES_ONCHANGE'] = get_attrs(sub_dict={'cat': 'wertehistorie', 'sub_cat': 'onchange'}) - ATTRS['HISTORIE_ATTRIBUTES_LAST'] = get_attrs(sub_dict={'cat': 'wertehistorie', 'sub_cat': 'last'}) - ATTRS['HISTORIE_ATTRIBUTES_TIMEFRAME'] = get_attrs(sub_dict={'cat': 'wertehistorie', 'sub_cat': 'timeframe'}) - ATTRS['ALL_TAGESMITTEL_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'tagesmittel'}) - ATTRS['TAGESMITTEL_ATTRIBUTES_ONCHANGE'] = get_attrs(sub_dict={'cat': 'tagesmittel', 'sub_cat': 'onchange'}) - ATTRS['TAGESMITTEL_ATTRIBUTES_TIMEFRAME'] = get_attrs(sub_dict={'cat': 'tagesmittel', 'sub_cat': 'timeframe'}) - ATTRS['ALL_SERIE_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'serie'}) - ATTRS['SERIE_ATTRIBUTES_MINMAX'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'minmax'}) - ATTRS['SERIE_ATTRIBUTES_ZAEHLERSTAND'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'zaehler'}) - ATTRS['SERIE_ATTRIBUTES_VERBRAUCH'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'verbrauch'}) - ATTRS['SERIE_ATTRIBUTES_SUMME'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'summe'}) - ATTRS['SERIE_ATTRIBUTES_MITTEL_D'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'mittel_d'}) - ATTRS['SERIE_ATTRIBUTES_MITTEL_H'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'mittel_h'}) - ATTRS['SERIE_ATTRIBUTES_MITTEL_H1'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'mittel_h1'}) - ATTRS['SERIE_ATTRIBUTES_MITTEL_D_H'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'mittel_d_h'}) - ATTRS['ALL_GEN_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'gen'}) - ATTRS['ALL_COMPLEX_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'complex'}) - - # create file and write header - f = open(FILENAME_ATTRIBUTES, "w") - f.write(FILE_HEADER) - f.close() - - # write avm_data_types - for attr, alist in ATTRS.items(): - with open(FILENAME_ATTRIBUTES, "a") as f: - print(f'{attr} = {alist!r}', file=f) - - print('item_attributes.py successfully created!') - -def create_plugin_yaml_item_attribute_valids(): - """Create valid_list of db_addon_fct based on master dict""" - - valid_list_str = """ # NOTE: valid_list is automatically created by using item_attributes_master.py""" - valid_list_desc_str = """ # NOTE: valid_list_description is automatically created by using item_attributes_master.py""" - valid_list_item_type = """ # NOTE: valid_list_item_type is automatically created by using item_attributes_master.py""" - valid_list_calculation = """ # NOTE: valid_list_calculation is automatically created by using item_attributes_master.py""" - - for db_addon_fct in ITEM_ATTRIBUTES[attribute]: - valid_list_str = f"""{valid_list_str}\n\ - - {db_addon_fct!r:<40}""" - - valid_list_desc_str = f"""{valid_list_desc_str}\n\ - - '{ITEM_ATTRIBUTES[attribute][db_addon_fct]['description']:<}'""" - - valid_list_item_type = f"""{valid_list_item_type}\n\ - - '{ITEM_ATTRIBUTES[attribute][db_addon_fct]['item_type']:<}'""" - - valid_list_calculation = f"""{valid_list_calculation}\n\ - - '{ITEM_ATTRIBUTES[attribute][db_addon_fct]['calc']:<}'""" - - valid_list_calculation = f"""{valid_list_calculation}\n\r""" - - return valid_list_str, valid_list_desc_str, valid_list_item_type, valid_list_calculation - -def update_plugin_yaml_item_attributes(): - """Update 'valid_list', 'valid_list_description', 'valid_list_item_type' and 'valid_list_calculation' of item attributes in plugin.yaml""" - - yaml = ruamel.yaml.YAML() - yaml.indent(mapping=4, sequence=4, offset=4) - yaml.width = 200 - yaml.allow_unicode = True - yaml.preserve_quotes = False - - valid_list_str, valid_list_desc_str, valid_list_item_type_str, valid_list_calc_str = create_plugin_yaml_item_attribute_valids() - - with open(FILENAME_PLUGIN, 'r', encoding="utf-8") as f: - data = yaml.load(f) - - if data.get('item_attributes', {}).get(attribute): - data['item_attributes'][attribute]['valid_list'] = yaml.load(valid_list_str) - data['item_attributes'][attribute]['valid_list_description'] = yaml.load(valid_list_desc_str) - data['item_attributes'][attribute]['valid_list_item_type'] = yaml.load(valid_list_item_type_str) - data['item_attributes'][attribute]['valid_list_calculation'] = yaml.load(valid_list_calc_str) - - with open(FILENAME_PLUGIN, 'w', encoding="utf-8") as f: - yaml.dump(data, f) - print(f"Successfully updated Attribute '{attribute}' in plugin.yaml!") - else: - print(f"Attribute '{attribute}' not defined in plugin.yaml") - -if __name__ == '__main__': - export_item_attributes_py() - for attribute in ITEM_ATTRIBUTES: - update_plugin_yaml_item_attributes() diff --git a/db_addon/locale.yaml b/db_addon/locale.yaml old mode 100755 new mode 100644 diff --git a/db_addon/plugin.yaml b/db_addon/plugin.yaml old mode 100755 new mode 100644 index 98115af82..c050022f5 --- a/db_addon/plugin.yaml +++ b/db_addon/plugin.yaml @@ -11,7 +11,7 @@ plugin: # keywords: iot xyz # documentation: https://github.com/smarthomeNG/smarthome/wiki/CLI-Plugin # url of documentation (wiki) page support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1848494-support-thread-databaseaddon-plugin - version: 1.2.2 # Plugin version (must match the version specified in __init__.py) + version: 1.2.3 # Plugin version (must match the version specified in __init__.py) sh_minversion: 1.9.3.5 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) py_minversion: 3.8 # minimum Python version to use for this plugin @@ -62,6 +62,21 @@ parameters: de: 'True: Verwendung des ältesten Eintrags des Items in der Datenbank, falls der Start des Abfragezeitraums zeitlich vor diesem Eintrag liegt False: Abbruch der Datenbankabfrage' en: 'True: Use of oldest entry of item in database, if start of query is prior to oldest entry False: Cancel query' + lock_db_for_query: + type: bool + default: false + description: + de: 'Sperren der Datenbank während der Abfrage' + en: 'Lock the database during queries' + + refresh_cycle: + type: int + default: 60 + description: + de: 'Zyklus, in dem die Datenbank neu gelesen wird' + en: 'Cycle to update database' + + item_attributes: db_addon_fct: type: str diff --git a/db_addon/requirements.txt b/db_addon/requirements.txt old mode 100755 new mode 100644 diff --git a/db_addon/user_doc.rst b/db_addon/user_doc.rst old mode 100755 new mode 100644 diff --git a/db_addon/webif/__init__.py b/db_addon/webif/__init__.py old mode 100755 new mode 100644 index 604f6b147..0bc74ca9a --- a/db_addon/webif/__init__.py +++ b/db_addon/webif/__init__.py @@ -59,7 +59,7 @@ def __init__(self, webif_dir, plugin): self.tplenv = self.init_template_environment() @cherrypy.expose - def index(self, reload=None): + def index(self, reload=None, action=None, item_path=None, active=None, option=None): """ Build index.html for cherrypy @@ -70,6 +70,19 @@ def index(self, reload=None): tmpl = self.tplenv.get_template('index.html') + if action is not None: + if action == "recalc_item" and item_path is not None: + self.logger.info(f"Recalc of item={item_path} called via WebIF. Item put to Queue for new calculation.") + self.plugin.execute_items(option='item', item=item_path) + + elif action == "clean_item_cache" and item_path is not None: + self.logger.info(f"Clean item cache of item={item_path} called via WebIF. Plugin item value cache will be cleaned.") + self.plugin._clean_item_cache(item=item_path) + + elif action == "_activate_item_calculation" and item_path is not None and active is not None: + self.logger.info(f"Item calculation of item={item_path} will be set to {bool(int(active))} via WebIF.") + self.plugin._activate_item_calculation(item=item_path, active=bool(int(active))) + return tmpl.render(p=self.plugin, webif_pagelength=self.plugin.get_parameter_value('webif_pagelength'), suspended='true' if self.plugin.suspended else 'false', @@ -78,7 +91,7 @@ def index(self, reload=None): plugin_shortname=self.plugin.get_shortname(), plugin_version=self.plugin.get_version(), plugin_info=self.plugin.get_info(), - maintenance=True if self.plugin.log_level == 10 else False, + maintenance=True if self.plugin.log_level < 20 else False, ) @cherrypy.expose @@ -94,24 +107,33 @@ def get_data_html(self, dataSet=None): if dataSet is None: # get the new data data = dict() - data['items'] = {} + data['items'] = {} for item in self.plugin.get_item_list('db_addon', 'function'): - data['items'][item.id()] = {} - data['items'][item.id()]['value'] = item.property.value - data['items'][item.id()]['last_update'] = item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') - data['items'][item.id()]['last_change'] = item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') + data['items'][item.path()] = {} + data['items'][item.path()]['value'] = item.property.value + data['items'][item.path()]['last_update'] = item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') + data['items'][item.path()]['last_change'] = item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') data['plugin_suspended'] = self.plugin.suspended data['maintenance'] = True if self.plugin.log_level == 10 else False data['queue_length'] = self.plugin.queue_backlog() data['active_queue_item'] = self.plugin.active_queue_item + data['debug_log'] = {} + for debug in ['parse', 'execute', 'ondemand', 'onchange', 'prepare', 'sql']: + data['debug_log'][debug] = getattr(self.plugin.debug_log, debug) + try: return json.dumps(data, default=str) except Exception as e: self.logger.error(f"get_data_html exception: {e}") + @cherrypy.expose + def submit(self, cmd=None, param1=None, param2=None): + """Submit handler für Ajax""" + self.logger.warning(f"submit: {cmd=}, {param1=}, {param2=}") + @cherrypy.expose def recalc_all(self): self.logger.debug(f"recalc_all called") @@ -136,3 +158,69 @@ def activate(self): def suspend(self): self.logger.debug(f"suspend called") self.plugin.suspend(True) + + @cherrypy.expose + def debug_log_option(self, log: str = None, state: bool = None): + self.logger.warning(f"debug_log_option called with {log=}, {state=}") + _state = True if state == 'true' else False + setattr(self.plugin.debug_log, log, _state) + + @cherrypy.expose + def debug_log_option_parse_true(self): + self.logger.debug("debug_log_option_parse_true") + setattr(self.plugin.debug_log, 'parse', True) + + @cherrypy.expose + def debug_log_option_parse_false (self): + self.logger.debug("debug_log_option_parse_false") + setattr(self.plugin.debug_log, 'parse', False) + + @cherrypy.expose + def debug_log_option_execute_true(self): + self.logger.debug("debug_log_option_execute_true") + setattr(self.plugin.debug_log, 'execute', True) + + @cherrypy.expose + def debug_log_option_execute_false (self): + self.logger.debug("debug_log_option_execute_false") + setattr(self.plugin.debug_log, 'execute', False) + + @cherrypy.expose + def debug_log_option_ondemand_true(self): + self.logger.debug("debug_log_option_ondemand_true") + setattr(self.plugin.debug_log, 'ondemand', True) + + @cherrypy.expose + def debug_log_option_ondemand_false (self): + self.logger.debug("debug_log_option_ondemand_false") + setattr(self.plugin.debug_log, 'ondemand', False) + + @cherrypy.expose + def debug_log_option_onchange_true(self): + self.logger.debug("debug_log_option_onchange_true") + setattr(self.plugin.debug_log, 'onchange', True) + + @cherrypy.expose + def debug_log_option_onchange_false (self): + self.logger.debug("debug_log_option_onchange_false") + setattr(self.plugin.debug_log, 'onchange', False) + + @cherrypy.expose + def debug_log_option_prepare_true(self): + self.logger.debug("debug_log_option_prepare_true") + setattr(self.plugin.debug_log, 'prepare', True) + + @cherrypy.expose + def debug_log_option_prepare_false (self): + self.logger.debug("debug_log_option_prepare_false") + setattr(self.plugin.debug_log, 'prepare', False) + + @cherrypy.expose + def debug_log_option_sql_true(self): + self.logger.debug("debug_log_option_sql_true") + setattr(self.plugin.debug_log, 'sql', True) + + @cherrypy.expose + def debug_log_option_sql_false (self): + self.logger.debug("debug_log_option_sql_false") + setattr(self.plugin.debug_log, 'sql', False) \ No newline at end of file diff --git a/db_addon/webif/static/img/plugin_logo.png b/db_addon/webif/static/img/plugin_logo.png old mode 100755 new mode 100644 diff --git a/db_addon/webif/templates/index.html b/db_addon/webif/templates/index.html old mode 100755 new mode 100644 index 62f398e70..99aeddaa5 --- a/db_addon/webif/templates/index.html +++ b/db_addon/webif/templates/index.html @@ -35,6 +35,9 @@ } table th.last { width: 150px; + } + table th.aktion { + width: 100px; } table th.dict { width: 150px; @@ -101,6 +104,13 @@ document.getElementById('pause').disabled = true; } } + + { document.getElementById('debug_parse').checked = objResponse['debug_log']['parse']; } + { document.getElementById('debug_execute').checked = objResponse['debug_log']['execute']; } + { document.getElementById('debug_ondemand').checked = objResponse['debug_log']['ondemand']; } + { document.getElementById('debug_onchange').checked = objResponse['debug_log']['onchange']; } + { document.getElementById('debug_prepare').checked = objResponse['debug_log']['prepare']; } + { document.getElementById('debug_sql').checked = objResponse['debug_log']['sql']; } } @@ -156,8 +166,13 @@ { title: '{{ _('Letzter Change') }}', targets: [8], "className": "last" + }, + { + title: '{{ _('Aktionen') }}', + targets: [9], "className": "aktion" }].concat($.fn.dataTable.defaults.columnDefs), pageResize: resize}); + {% if maintenance %} mtable2 = $('#mtable2').DataTable( { columnDefs: [ @@ -180,6 +195,8 @@ catch (e) { console.warn("Datatable JS not loaded, showing standard table without reorder option " + e); } + + // Handler für Suspend (Play/Pause) Button if ({{ suspended }} == false) { document.getElementById('play').classList = 'btn btn-success btn-sm'; document.getElementById('play').disabled = true; @@ -192,6 +209,31 @@ document.getElementById('pause').classList = 'btn btn-danger btn-sm'; document.getElementById('pause').disabled = true; } + + // Handler für Formular - das "submit"-Element (Senden) wird abgefangen + $("#button_pressed").submit(function(e) { + + // keine HTML-Aktion ausführen (z.B. Formular senden) + e.preventDefault(); + + console.log('submit') + + // die Kennung des gedrückten Buttons per AJAX senden + $.post('submit', {button: $("#button").val()}, function(data) { + + console.log(data) + + // Zeile ermitteln + // var row = $("#button").val() + // var id = row + "_value" + + // nur die betroffene Zeile ändern. Der dritte Parameter muss mit der Tabellen-ID identisch sein. + // shngInserText(id, data.wert, 'maintable') + + }); + return false ; + }); + }); @@ -206,6 +248,7 @@ } } + + + {% endblock pluginscripts %} @@ -231,7 +282,7 @@ {{ _('Verbunden') }} - {% if p._db._connected %}{{ _('Ja') }}{% else %}{{ _('Nein') }}{% endif %} + {% if p._db._connected %}{{ _('Ja') }}{% else %}{{ _('Nein') }}{% endif %} {{ _('Treiber') }} {{ p.db_driver }} {{ _('Startup Delay') }} @@ -255,10 +306,35 @@ {% endif %} {% endfor %} - {{ _('Item in Berechnung') }} + {{ _('Item in Berechnung') }} {{ p.active_queue_item }} - {{ _('Arbeitsvorrat') }} - {{ p.queue_backlog }} {{ _('Items') }} + {{ _('Arbeitsvorrat') }} + {{ p.queue_backlog }} {{ _('Items') }} + {{ _('LogLevel') }} + + {{ p.log_level }} + {% if p.log_level == 10 %} + {{ (' || ') }} +

    + +
    +
    + +
    +
    + +
    +
    + +
    +
    + +
    +
    + +
    + {% endif %} + @@ -282,11 +358,7 @@ {% set tabcount = 3 %} {% set tab1title = "" ~ plugin_shortname ~ " Items (" ~ item_count ~ ")" %} -{% if maintenance %} - {% set tab2title = "" ~ plugin_shortname ~ " Maintenance" %} -{% else %} - {% set tab2title = "hidden" %} -{% endif %} +{% set tab2title = "" ~ plugin_shortname ~ " Maintenance" %} {% set tab3title = "" ~ plugin_shortname ~ " API/Doku" %} @@ -312,10 +384,31 @@ {{ item._value | float | round(2) }} {{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }} {{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }} + + + + {% if p.get_item_config(item._path)['active'] %} + + {% else %} + + {% endif %} + + + {% endfor %} +
    + +
    {% endblock bodytab1 %} From 38f8d85fcd1cf26000577b95a894a6b1910a2e56 Mon Sep 17 00:00:00 2001 From: Morg42 <43153739+Morg42@users.noreply.github.com> Date: Thu, 27 Jul 2023 16:52:03 +0200 Subject: [PATCH 209/775] fix commands.py --- kodi/plugin.yaml | 384 ++++++++++++++++++++++------------------------- 1 file changed, 182 insertions(+), 202 deletions(-) diff --git a/kodi/plugin.yaml b/kodi/plugin.yaml index a2b809c1a..a931d56ca 100644 --- a/kodi/plugin.yaml +++ b/kodi/plugin.yaml @@ -9,7 +9,7 @@ plugin: version: 1.7.2 sh_minversion: 1.9.5 py_minversion: 3.7 - multi_instance: false + multi_instance: true restartable: true classname: kodi @@ -212,52 +212,47 @@ item_structs: info: - read: - type: bool - enforce_updates: true - kodi_read_group_trigger: info - player: type: num - kodi_command: info.player - kodi_read: true - kodi_write: false + kodi_command@instance: info.player + kodi_read@instance: true + kodi_write@instance: false state: type: str - kodi_command: info.state - kodi_read: true - kodi_write: false + kodi_command@instance: info.state + kodi_read@instance: true + kodi_write@instance: false media: type: str - kodi_command: info.media - kodi_read: true - kodi_write: false + kodi_command@instance: info.media + kodi_read@instance: true + kodi_write@instance: false title: type: str - kodi_command: info.title - kodi_read: true - kodi_write: false + kodi_command@instance: info.title + kodi_read@instance: true + kodi_write@instance: false streams: type: list - kodi_command: info.streams - kodi_read: true - kodi_write: false + kodi_command@instance: info.streams + kodi_read@instance: true + kodi_write@instance: false subtitles: type: list - kodi_command: info.subtitles - kodi_read: true - kodi_write: false + kodi_command@instance: info.subtitles + kodi_read@instance: true + kodi_write@instance: false macro: type: bool - kodi_command: info.macro - kodi_read: true - kodi_write: true + kodi_command@instance: info.macro + kodi_read@instance: true + kodi_write@instance: true status: @@ -266,366 +261,351 @@ item_structs: read: type: bool enforce_updates: true - kodi_read_group_trigger: status + kodi_read_group_trigger@instance: status update: type: bool - kodi_command: status.update - kodi_read: false - kodi_write: true - kodi_read_group: + kodi_command@instance: status.update + kodi_read@instance: false + kodi_write@instance: true + kodi_read_group@instance: - status ping: type: bool - kodi_command: status.ping - kodi_read: true - kodi_write: false - kodi_read_group: + kodi_command@instance: status.ping + kodi_read@instance: true + kodi_write@instance: false + kodi_read_group@instance: - status get_status_au: type: bool - kodi_command: status.get_status_au - kodi_read: true - kodi_write: false - kodi_read_group: + kodi_command@instance: status.get_status_au + kodi_read@instance: true + kodi_write@instance: false + kodi_read_group@instance: - status get_players: type: bool - kodi_command: status.get_players - kodi_read: true - kodi_write: false - kodi_read_group: + kodi_command@instance: status.get_players + kodi_read@instance: true + kodi_write@instance: false + kodi_read_group@instance: - status get_actplayer: type: bool - kodi_command: status.get_actplayer - kodi_read: true - kodi_write: false - kodi_read_group: + kodi_command@instance: status.get_actplayer + kodi_read@instance: true + kodi_write@instance: false + kodi_read_group@instance: - status get_status_play: type: bool - kodi_command: status.get_status_play - kodi_read: true - kodi_write: false - kodi_read_group: + kodi_command@instance: status.get_status_play + kodi_read@instance: true + kodi_write@instance: false + kodi_read_group@instance: - status get_item: type: bool - kodi_command: status.get_item - kodi_read: true - kodi_write: false - kodi_read_group: + kodi_command@instance: status.get_item + kodi_read@instance: true + kodi_write@instance: false + kodi_read_group@instance: - status get_favourites: type: bool - kodi_command: status.get_favourites - kodi_read: true - kodi_write: false - kodi_read_group: + kodi_command@instance: status.get_favourites + kodi_read@instance: true + kodi_write@instance: false + kodi_read_group@instance: - status control: control: - read: - type: bool - enforce_updates: true - kodi_read_group_trigger: control - playpause: type: bool - kodi_command: control.playpause - kodi_read: true - kodi_write: true + kodi_command@instance: control.playpause + kodi_read@instance: true + kodi_write@instance: true seek: type: num - kodi_command: control.seek - kodi_read: true - kodi_write: true + kodi_command@instance: control.seek + kodi_read@instance: true + kodi_write@instance: true audio: type: foo - kodi_command: control.audio - kodi_read: true - kodi_write: true + kodi_command@instance: control.audio + kodi_read@instance: true + kodi_write@instance: true speed: type: num - kodi_command: control.speed - kodi_read: true - kodi_write: true + kodi_command@instance: control.speed + kodi_read@instance: true + kodi_write@instance: true subtitle: type: foo - kodi_command: control.subtitle - kodi_read: true - kodi_write: true + kodi_command@instance: control.subtitle + kodi_read@instance: true + kodi_write@instance: true stop: type: bool - kodi_command: control.stop - kodi_read: true - kodi_write: true + kodi_command@instance: control.stop + kodi_read@instance: true + kodi_write@instance: true goto: type: str - kodi_command: control.goto - kodi_read: true - kodi_write: true + kodi_command@instance: control.goto + kodi_read@instance: true + kodi_write@instance: true power: type: bool - kodi_command: control.power - kodi_read: true - kodi_write: true + kodi_command@instance: control.power + kodi_read@instance: true + kodi_write@instance: true quit: type: bool - kodi_command: control.quit - kodi_read: true - kodi_write: true + kodi_command@instance: control.quit + kodi_read@instance: true + kodi_write@instance: true mute: type: bool - kodi_command: control.mute - kodi_read: true - kodi_write: true + kodi_command@instance: control.mute + kodi_read@instance: true + kodi_write@instance: true volume: type: num - kodi_command: control.volume - kodi_read: true - kodi_write: true + kodi_command@instance: control.volume + kodi_read@instance: true + kodi_write@instance: true action: type: str - kodi_command: control.action - kodi_read: true - kodi_write: true + kodi_command@instance: control.action + kodi_read@instance: true + kodi_write@instance: true ALL: read: type: bool enforce_updates: true - kodi_read_group_trigger: ALL + kodi_read_group_trigger@instance: ALL info: - read: - type: bool - enforce_updates: true - kodi_read_group_trigger: ALL.info - player: type: num - kodi_command: info.player - kodi_read: true - kodi_write: false + kodi_command@instance: info.player + kodi_read@instance: true + kodi_write@instance: false state: type: str - kodi_command: info.state - kodi_read: true - kodi_write: false + kodi_command@instance: info.state + kodi_read@instance: true + kodi_write@instance: false media: type: str - kodi_command: info.media - kodi_read: true - kodi_write: false + kodi_command@instance: info.media + kodi_read@instance: true + kodi_write@instance: false title: type: str - kodi_command: info.title - kodi_read: true - kodi_write: false + kodi_command@instance: info.title + kodi_read@instance: true + kodi_write@instance: false streams: type: list - kodi_command: info.streams - kodi_read: true - kodi_write: false + kodi_command@instance: info.streams + kodi_read@instance: true + kodi_write@instance: false subtitles: type: list - kodi_command: info.subtitles - kodi_read: true - kodi_write: false + kodi_command@instance: info.subtitles + kodi_read@instance: true + kodi_write@instance: false macro: type: bool - kodi_command: info.macro - kodi_read: true - kodi_write: true + kodi_command@instance: info.macro + kodi_read@instance: true + kodi_write@instance: true status: read: type: bool enforce_updates: true - kodi_read_group_trigger: ALL.status + kodi_read_group_trigger@instance: ALL.status update: type: bool - kodi_command: status.update - kodi_read: false - kodi_write: true - kodi_read_group: + kodi_command@instance: status.update + kodi_read@instance: false + kodi_write@instance: true + kodi_read_group@instance: - ALL - ALL.status ping: type: bool - kodi_command: status.ping - kodi_read: true - kodi_write: false - kodi_read_group: + kodi_command@instance: status.ping + kodi_read@instance: true + kodi_write@instance: false + kodi_read_group@instance: - ALL - ALL.status get_status_au: type: bool - kodi_command: status.get_status_au - kodi_read: true - kodi_write: false - kodi_read_group: + kodi_command@instance: status.get_status_au + kodi_read@instance: true + kodi_write@instance: false + kodi_read_group@instance: - ALL - ALL.status get_players: type: bool - kodi_command: status.get_players - kodi_read: true - kodi_write: false - kodi_read_group: + kodi_command@instance: status.get_players + kodi_read@instance: true + kodi_write@instance: false + kodi_read_group@instance: - ALL - ALL.status get_actplayer: type: bool - kodi_command: status.get_actplayer - kodi_read: true - kodi_write: false - kodi_read_group: + kodi_command@instance: status.get_actplayer + kodi_read@instance: true + kodi_write@instance: false + kodi_read_group@instance: - ALL - ALL.status get_status_play: type: bool - kodi_command: status.get_status_play - kodi_read: true - kodi_write: false - kodi_read_group: + kodi_command@instance: status.get_status_play + kodi_read@instance: true + kodi_write@instance: false + kodi_read_group@instance: - ALL - ALL.status get_item: type: bool - kodi_command: status.get_item - kodi_read: true - kodi_write: false - kodi_read_group: + kodi_command@instance: status.get_item + kodi_read@instance: true + kodi_write@instance: false + kodi_read_group@instance: - ALL - ALL.status get_favourites: type: bool - kodi_command: status.get_favourites - kodi_read: true - kodi_write: false - kodi_read_group: + kodi_command@instance: status.get_favourites + kodi_read@instance: true + kodi_write@instance: false + kodi_read_group@instance: - ALL - ALL.status control: - read: - type: bool - enforce_updates: true - kodi_read_group_trigger: ALL.control - playpause: type: bool - kodi_command: control.playpause - kodi_read: true - kodi_write: true + kodi_command@instance: control.playpause + kodi_read@instance: true + kodi_write@instance: true seek: type: num - kodi_command: control.seek - kodi_read: true - kodi_write: true + kodi_command@instance: control.seek + kodi_read@instance: true + kodi_write@instance: true audio: type: foo - kodi_command: control.audio - kodi_read: true - kodi_write: true + kodi_command@instance: control.audio + kodi_read@instance: true + kodi_write@instance: true speed: type: num - kodi_command: control.speed - kodi_read: true - kodi_write: true + kodi_command@instance: control.speed + kodi_read@instance: true + kodi_write@instance: true subtitle: type: foo - kodi_command: control.subtitle - kodi_read: true - kodi_write: true + kodi_command@instance: control.subtitle + kodi_read@instance: true + kodi_write@instance: true stop: type: bool - kodi_command: control.stop - kodi_read: true - kodi_write: true + kodi_command@instance: control.stop + kodi_read@instance: true + kodi_write@instance: true goto: type: str - kodi_command: control.goto - kodi_read: true - kodi_write: true + kodi_command@instance: control.goto + kodi_read@instance: true + kodi_write@instance: true power: type: bool - kodi_command: control.power - kodi_read: true - kodi_write: true + kodi_command@instance: control.power + kodi_read@instance: true + kodi_write@instance: true quit: type: bool - kodi_command: control.quit - kodi_read: true - kodi_write: true + kodi_command@instance: control.quit + kodi_read@instance: true + kodi_write@instance: true mute: type: bool - kodi_command: control.mute - kodi_read: true - kodi_write: true + kodi_command@instance: control.mute + kodi_read@instance: true + kodi_write@instance: true volume: type: num - kodi_command: control.volume - kodi_read: true - kodi_write: true + kodi_command@instance: control.volume + kodi_read@instance: true + kodi_write@instance: true action: type: str - kodi_command: control.action - kodi_read: true - kodi_write: true + kodi_command@instance: control.action + kodi_read@instance: true + kodi_write@instance: true plugin_functions: NONE logic_parameters: NONE From cc41c4d5350584f9344704e98cb39be891de5d45 Mon Sep 17 00:00:00 2001 From: Morg42 <43153739+Morg42@users.noreply.github.com> Date: Thu, 27 Jul 2023 21:18:06 +0200 Subject: [PATCH 210/775] added create_masteritem_file to directory check --- smartvisu/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/smartvisu/__init__.py b/smartvisu/__init__.py index e83c4775e..4566940b9 100755 --- a/smartvisu/__init__.py +++ b/smartvisu/__init__.py @@ -125,8 +125,8 @@ def __init__(self, sh): def run(self): self.alive = True - # skip directory handling if generate pages and handle_widgets are disabled - if self.smartvisu_dir != '' and (self._generate_pages or self._handle_widgets): + # skip directory handling if all relevant handling options are disabled + if self.smartvisu_dir != '' and (self._generate_pages or self._handle_widgets or self._create_masteritem_file): if not os.path.isdir(os.path.join(self.smartvisu_dir, 'pages')): self.logger.error("Could not find valid smartVISU directory: {}".format(self.smartvisu_dir)) else: From 42e011c3d36608c0b0b4c691dcc8bd4f87212326 Mon Sep 17 00:00:00 2001 From: Morg42 <43153739+Morg42@users.noreply.github.com> Date: Thu, 27 Jul 2023 21:24:08 +0200 Subject: [PATCH 211/775] initial commit --- viessmann/__init__.py | 2095 +------- viessmann/{ => _pv_1_2_3}/README.md | 0 viessmann/_pv_1_2_3/__init__.py | 2122 ++++++++ viessmann/_pv_1_2_3/commands.py | 900 ++++ viessmann/{ => _pv_1_2_3}/locale.yaml | 0 viessmann/_pv_1_2_3/plugin.yaml | 231 + viessmann/{ => _pv_1_2_3}/requirements.txt | 0 viessmann/{ => _pv_1_2_3}/user_doc.rst | 0 .../webif/static/datatables_min.css | 0 .../webif/static/datatables_min.js | 0 .../webif/static/img/plugin_logo.svg | 1 + .../webif/static/img/sort_asc.png | Bin .../webif/static/img/sort_asc_disabled.png | Bin .../webif/static/img/sort_both.png | Bin .../webif/static/img/sort_desc.png | Bin .../webif/static/img/sort_desc_disabled.png | Bin .../_pv_1_2_3/webif/templates/index.html | 264 + viessmann/commands.py | 1636 +++---- viessmann/datatypes.py | 181 + viessmann/plugin.yaml | 4334 ++++++++++++++++- viessmann/protocol.py | 490 ++ viessmann/webif/__init__.py | 155 + viessmann/webif/static/img/plugin_logo.svg | 89 +- viessmann/webif/templates/index.html | 390 +- 24 files changed, 9638 insertions(+), 3250 deletions(-) mode change 100755 => 100644 viessmann/__init__.py rename viessmann/{ => _pv_1_2_3}/README.md (100%) create mode 100755 viessmann/_pv_1_2_3/__init__.py create mode 100755 viessmann/_pv_1_2_3/commands.py rename viessmann/{ => _pv_1_2_3}/locale.yaml (100%) create mode 100755 viessmann/_pv_1_2_3/plugin.yaml rename viessmann/{ => _pv_1_2_3}/requirements.txt (100%) rename viessmann/{ => _pv_1_2_3}/user_doc.rst (100%) rename viessmann/{ => _pv_1_2_3}/webif/static/datatables_min.css (100%) rename viessmann/{ => _pv_1_2_3}/webif/static/datatables_min.js (100%) create mode 100755 viessmann/_pv_1_2_3/webif/static/img/plugin_logo.svg rename viessmann/{ => _pv_1_2_3}/webif/static/img/sort_asc.png (100%) rename viessmann/{ => _pv_1_2_3}/webif/static/img/sort_asc_disabled.png (100%) rename viessmann/{ => _pv_1_2_3}/webif/static/img/sort_both.png (100%) rename viessmann/{ => _pv_1_2_3}/webif/static/img/sort_desc.png (100%) rename viessmann/{ => _pv_1_2_3}/webif/static/img/sort_desc_disabled.png (100%) create mode 100755 viessmann/_pv_1_2_3/webif/templates/index.html mode change 100755 => 100644 viessmann/commands.py create mode 100644 viessmann/datatypes.py mode change 100755 => 100644 viessmann/plugin.yaml create mode 100644 viessmann/protocol.py create mode 100644 viessmann/webif/__init__.py mode change 100755 => 100644 viessmann/webif/static/img/plugin_logo.svg mode change 100755 => 100644 viessmann/webif/templates/index.html diff --git a/viessmann/__init__.py b/viessmann/__init__.py old mode 100755 new mode 100644 index 11bd79325..1120b51c6 --- a/viessmann/__init__.py +++ b/viessmann/__init__.py @@ -1,39 +1,32 @@ #!/usr/bin/env python3 # vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab - ######################################################################### -# Copyright 2020 Michael Wenzel -# Copyright 2020 Sebastian Helms +# Copyright 2020- Sebastian Helms Morg @ knx-user-forum ######################################################################### -# Viessmann-Plugin for SmartHomeNG. https://github.com/smarthomeNG// +# This file is part of SmartHomeNG +# +# Viessmann heating plugin for SmartDevicePlugin class # -# This plugin is free software: you can redistribute it and/or modify +# SmartHomeNG is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# This plugin is distributed in the hope that it will be useful, +# SmartHomeNG is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with this plugin. If not, see . +# along with SmartHomeNG If not, see . ######################################################################### -import logging +import builtins +import os import sys -import time -import re -import json -import serial -import threading -from datetime import datetime -import dateutil.parser -import cherrypy if __name__ == '__main__': - # just needed for standalone mode + builtins.SDP_standalone = True class SmartPlugin(): pass @@ -41,392 +34,104 @@ class SmartPlugin(): class SmartPluginWebIf(): pass - import os BASE = os.path.sep.join(os.path.realpath(__file__).split(os.path.sep)[:-3]) sys.path.insert(0, BASE) - import commands else: - from . import commands - - from lib.item import Items - from lib.model.smartplugin import SmartPlugin, SmartPluginWebIf, Modules - - from bin.smarthome import VERSION - - -class Viessmann(SmartPlugin): - ''' - Main class of the plugin. Provides communication with Viessmann heating systems - via serial / USB-to-serial connections to read values and set operating parameters. - - Supported device types must be defined in ./commands.py. - ''' - ALLOW_MULTIINSTANCE = False - - PLUGIN_VERSION = '1.2.3' - -# -# public methods -# - - def __init__(self, sh, *args, standalone='', logger=None, **kwargs): + builtins.SDP_standalone = False - # Call init code of parent class (SmartPlugin) - super().__init__() +from lib.model.sdp.globals import PLUGIN_ATTR_SERIAL_PORT, PLUGIN_ATTR_PROTOCOL +from lib.model.smartdeviceplugin import SmartDevicePlugin, Standalone +from .protocol import SDPProtocolViessmann - # standalone mode: just setup basic info - if standalone: - self._serialport = standalone - self._timeout = 3 - self.logger = logger - self._standalone = True - else: - # Get plugin parameter - self._serialport = self.get_parameter_value('serialport') - self._heating_type = self.get_parameter_value('heating_type') - self._protocol = self.get_parameter_value('protocol') - self._timeout = self.get_parameter_value('timeout') - self._standalone = False - - # Set variables - self._error_count = 0 - self._params = {} # Item dict - self._init_cmds = [] # List of command codes for read at init - self._cyclic_cmds = {} # Dict of command codes with cylce-times for cyclic readings - self._application_timer = {} # Dict of application timer with command codes and values - self._timer_cmds = [] # List of command codes for timer - self._viess_timer_dict = {} - self._last_values = {} - self._balist_item = None # list of last value per command code - self._lock = threading.Lock() - self._initread = False - self._timerread = False - self._connected = False - self._initialized = False - self._lastbyte = b'' - self._lastbytetime = 0 - self._cyclic_update_active = False - self._wochentage = { - 'MO': ['mo', 'montag', 'monday'], - 'TU': ['di', 'dienstag', 'tuesday'], - 'WE': ['mi', 'mittwoch', 'wednesday'], - 'TH': ['do', 'donnerstag', 'thursday'], - 'FR': ['fr', 'freitag', 'friday'], - 'SA': ['sa', 'samstag', 'saturday'], - 'SU': ['so', 'sonntag', 'sunday']} - - # if running standalone, don't initialize command sets - if not sh: - return - - # initialize logger if necessary - if '.'.join(VERSION.split('.', 2)[:2]) <= '1.5': - self.logger = logging.getLogger(__name__) - - self._config_loaded = False - - if not self._load_configuration(): - return None - - # Init web interface - self.init_webinterface() +if not SDP_standalone: + from .webif import WebInterface - def run(self): - ''' - Run method for the plugin - ''' - if not self._config_loaded: - if not self._load_configuration(): - return - self.alive = True - self._connect() - self._read_initial_values() - self._read_timers() - def stop(self): - ''' - Stop method for the plugin - ''' - self.alive = False - if self.scheduler_get('cyclic'): - self.scheduler_remove('cyclic') - self._disconnect() - # force reload of configuration on restart - self._config_loaded = False +class sdp_viessmann(SmartDevicePlugin): + """ Device class for Viessmann heating systems. - def parse_item(self, item): - ''' - Method for parsing items. - If the item carries any viess_* field, this item is registered to the plugin. + Standalone mode is automatic device type discovery + """ + PLUGIN_VERSION = '1.3.0' - :param item: The item to process. - :type item: object + def _set_device_defaults(self): - :return: The item update method to be triggered if the item is changed, or None. - :rtype: object - ''' - # Process the update config - if self.has_iattr(item.conf, 'viess_update'): - self.logger.debug(f'Item for requesting update for all items triggered: {item}') - return self.update_item + if not SDP_standalone: + self._webif = WebInterface - # Process the timer config and fill timer dict - if self.has_iattr(item.conf, 'viess_timer'): - timer_app = self.get_iattr_value(item.conf, 'viess_timer') - for commandname in self._commandset: - if commandname.startswith(timer_app): - commandconf = self._commandset[commandname] - self.logger.debug(f'Process the timer config, commandname: {commandname}') - # {'addr': '2100', 'len': 8, 'unit': 'CT', 'set': True} - commandcode = (commandconf['addr']).lower() - if timer_app not in self._application_timer: - self._application_timer[timer_app] = {'item': item, 'commandcodes': []} - if commandcode not in self._application_timer[timer_app]['commandcodes']: - self._application_timer[timer_app]['commandcodes'].append(commandcode) - self._application_timer[timer_app]['commandcodes'].sort() - self.logger.info(f'Loaded Application Timer {self._application_timer}') - # self._application_timer: {'Timer_M2': {'item': Item: heizung.heizkreis_m2.schaltzeiten, 'commandcodes': ['3000', '3008', '3010', '3018', '3020', '3028', '3030']}, 'Timer_Warmwasser': {'item': Item: heizung.warmwasser.schaltzeiten, 'commandcodes': ['2100', '2108', '2110', '2118', '2120', '2128', '2130']}} + self._parameters[PLUGIN_ATTR_PROTOCOL] = SDPProtocolViessmann - for subdict in self._application_timer: - for commandcode in self._application_timer[subdict]['commandcodes']: - if commandcode not in self._timer_cmds: - self._timer_cmds.append(commandcode) - self._timer_cmds.sort() - self.logger.debug(f'Loaded Timer commands {self._timer_cmds}') - return self.update_item +# +# methods for standalone mode +# - # Process the read config - if self.has_iattr(item.conf, 'viess_read'): - commandname = self.get_iattr_value(item.conf, 'viess_read') - if commandname is None or commandname not in self._commandset: - self.logger.error(f'Item {item} contains invalid read command {commandname}!') - return None + def run_standalone(self): + """ + try to identify device + """ + print(f'dev_viessmann trying to identify device at {self._parameters.get("serialport", "unknown")}...') + devs = self.get_lookup('devicetypes') + if not devs: + devs = {} - # Remember the read config to later update this item if the configured response comes in - self.logger.info(f'Item {item} reads by using command {commandname}') - commandconf = self._commandset[commandname] - commandcode = (commandconf['addr']).lower() + for proto in ('P300', 'KW'): - # Fill item dict - self._params[commandcode] = {'item': item, 'commandname': commandname} - self.logger.debug(f'Loaded params {self._params}') + res = self.get_device_type(proto) - # Allow items to be automatically initiated on startup - if self.has_iattr(item.conf, 'viess_init') and self.get_iattr_value(item.conf, 'viess_init'): - self.logger.info(f'Item {item} is initialized on startup') - if commandcode not in self._init_cmds: - self._init_cmds.append(commandcode) - self.logger.debug(f'CommandCodes should be read at init: {self._init_cmds}') + if res is None: - # Allow items to be cyclically updated - if self.has_iattr(item.conf, 'viess_read_cycle'): - cycle = int(self.get_iattr_value(item.conf, 'viess_read_cycle')) - self.logger.info(f'Item {item} should read cyclic every {cycle} seconds') - nexttime = time.time() + cycle - if commandcode not in self._cyclic_cmds: - self._cyclic_cmds[commandcode] = {'cycle': cycle, 'nexttime': nexttime} - else: - # If another item requested this command already with a longer cycle, use the shorter cycle now - if self._cyclic_cmds[commandcode]['cycle'] > cycle: - self._cyclic_cmds[commandcode]['cycle'] = cycle - self.logger.debug(f'CommandCodes should be read cyclic: {self._cyclic_cmds}') + # None means no connection, no further tries + print(f'Connection could not be established to {self._parameters[PLUGIN_ATTR_SERIAL_PORT]}. Please check connection.') + break - # Process the write config - if self.has_iattr(item.conf, 'viess_send'): - if self.get_iattr_value(item.conf, 'viess_send'): - commandname = self.get_iattr_value(item.conf, 'viess_read') - else: - commandname = self.get_iattr_value(item.conf, 'viess_send') + if res is False: - if commandname is None or commandname not in self._commandset: - self.logger.error(f'Item {item} contains invalid write command {commandname}!') - return None + # False means no comm init (only P300), go on + print(f'Communication could not be established using protocol {proto}.') else: - self.logger.info(f'Item {item} to be written by using command {commandname}') - return self.update_item - - # get operating modes list - if self.has_iattr(item.conf, 'viess_ba_list'): - self._balist_item = item - self.logger.info(f'Item {item} wants list of operating modes') - - def parse_logic(self, logic): - pass - - def update_item(self, item, caller=None, source=None, dest=None): - ''' - Callback method for sending values to the plugin when a registered item has changed - - :param item: item to be updated towards the plugin - :param caller: if given it represents the callers name - :param source: if given it represents the source - :param dest: if given it represents the dest - ''' - if self.alive and caller != self.get_shortname(): - self.logger.info(f'Update item: {item.id()}, item has been changed outside this plugin') - self.logger.debug(f'update_item was called with item {item} from caller {caller}, source {source} and dest {dest}') - - if self.has_iattr(item.conf, 'viess_send'): - # Send write command - if self.get_iattr_value(item.conf, 'viess_send'): - commandname = self.get_iattr_value(item.conf, 'viess_read') - else: - commandname = self.get_iattr_value(item.conf, 'viess_send') - value = item() - self.logger.debug(f'Got item value to be written: {value} on command name {commandname}') - if not self._send_command(commandname, value): - # create_write_command() liefert False, wenn das Schreiben fehlgeschlagen ist - # -> dann auch keine weitere Verarbeitung - self.logger.debug(f'Write for {commandname} with value {value} failed, reverting value, canceling followup actions') - item(item.property.last_value, self.get_shortname()) - return None - - # If a read command should be sent after write - if self.has_iattr(item.conf, 'viess_read') and self.has_iattr(item.conf, 'viess_read_afterwrite'): - readcommandname = self.get_iattr_value(item.conf, 'viess_read') - readafterwrite = self.get_iattr_value(item.conf, 'viess_read_afterwrite') - self.logger.debug(f'Attempting read after write for item {item}, command {readcommandname}, delay {readafterwrite}') - if readcommandname is not None and readafterwrite is not None: - aw = float(readafterwrite) - time.sleep(aw) - self._send_command(readcommandname) - - # If commands should be triggered after this write - if self.has_iattr(item.conf, 'viess_trigger'): - trigger = self.get_iattr_value(item.conf, 'viess_trigger') - if trigger is None: - self.logger.error(f'Item {item} contains invalid trigger command list {trigger}!') - else: - tdelay = 5 # default delay - if self.has_iattr(item.conf, 'viess_trigger_afterwrite'): - tdelay = float(self.get_iattr_value(item.conf, 'viess_trigger_afterwrite')) - if type(trigger) != list: - trigger = [trigger] - for triggername in trigger: - triggername = triggername.strip() - if triggername is not None and readafterwrite is not None: - self.logger.debug(f'Triggering command {triggername} after write for item {item}') - time.sleep(tdelay) - self._send_command(triggername) - - elif self.has_iattr(item.conf, 'viess_timer'): - timer_app = self.get_iattr_value(item.conf, 'viess_timer') - uzsu_dict = item() - self.logger.debug(f'Got changed UZSU timer: {uzsu_dict} on timer application {timer_app}') - self._uzsu_dict_to_viess_timer(timer_app, uzsu_dict) - - elif self.has_iattr(item.conf, 'viess_update'): - if item(): - self.logger.debug('Reading of all values/items has been requested') - self.update_all_read_items() - def send_cyclic_cmds(self): - ''' - Recall function for shng scheduler. Reads all values configured to be read cyclically. - ''' - # check if another cyclic cmd run is still active - if self._cyclic_update_active: - self.logger.warning('Triggered cyclic command read, but previous cyclic run is still active. Check device and cyclic configuration (too much/too short?)') - return - else: - self.logger.info('Triggering cyclic command read') - - # set lock - self._cyclic_update_active = True - currenttime = time.time() - read_items = 0 - todo = [] - for commandcode in list(self._cyclic_cmds.keys()): - - entry = self._cyclic_cmds[commandcode] - # Is the command already due? - if entry['nexttime'] <= currenttime: - todo.append(commandcode) - - if self._protocol == 'KW': - # see if we got to do anything - maybe no items are due to be read? - if len(todo) > 0: - self._KW_send_multiple_read_commands(todo) - for addr in todo: - self._cyclic_cmds[addr]['nexttime'] = currenttime + self._cyclic_cmds[addr]['cycle'] - read_items = len(todo) - else: - for addr in todo: - # as this loop can take considerable time, repeatedly check if shng wants to stop - if not self.alive: - self.logger.info('shng issued stop command, canceling cyclic read.') - return - - commandname = self._commandname_by_commandcode(addr) - self.logger.debug(f'Triggering cyclic read command: {commandname}') - self._send_command(commandname, ) - self._cyclic_cmds[addr]['nexttime'] = currenttime + self._cyclic_cmds[addr]['cycle'] - read_items += 1 - - self._cyclic_update_active = False - if read_items: - self.logger.debug(f'cyclic command read took {(time.time() - currenttime):.1f} seconds for {read_items} items') - - def update_all_read_items(self): - ''' - Read all values preset in commands.py as readable - ''' - for commandcode in list(self._params.keys()): - commandname = self._commandname_by_commandcode(commandcode) - self.logger.debug(f'Triggering read command: {commandname} for requested value update') - self._send_command(commandname) + # anything else should be the devices answer, try to decode and quit + print(f'Device ID is {res}, device type is {devs.get(res.upper(), "unknown")} supporting protocol {proto}') + # break def read_addr(self, addr): - ''' + """ Tries to read a data point indepently of item config :param addr: data point addr (2 byte hex address) :type addr: str :return: Value if read is successful, None otherwise - ''' + """ addr = addr.lower() - commandname = self._commandname_by_commandcode(addr) + commandname = self._commands.get_command_from_reply(addr) if commandname is None: self.logger.debug(f'Address {addr} not defined in commandset, aborting') return None self.logger.debug(f'Attempting to read address {addr} for command {commandname}') - (packet, responselen) = self._build_command_packet(commandname) - if packet is None: - return None - - response_packet = self._send_command_packet(packet, responselen) - if response_packet is None: - return None + return self.send_command(commandname) - res = self._parse_response(response_packet, commandname) - if res is None: - return None - - (value, commandcode) = res - - return value - - def read_temp_addr(self, addr, length, unit): - ''' + def read_temp_addr(self, addr, length=1, mult=0, signed=False): + """ Tries to read an arbitrary supplied data point indepently of device config :param addr: data point addr (2 byte hex address) :type addr: str :param len: Length (in byte) expected from address read :type len: num - :param unit: Unit code from commands.py - :type unit: str + :param mult: value multiplicator + :type mult: num + :param signed: specifies signed or unsigned value + :type signed: bool :return: Value if read is successful, None otherwise - ''' + """ # as we have no reference whatever concerning the supplied data, we do a few sanity checks... addr = addr.lower() - if len(addr) != 4: # addresses are 2 bytes self.logger.warning(f'temp address: address not 4 digits long: {addr}') return None @@ -440,1683 +145,103 @@ def read_temp_addr(self, addr, length, unit): self.logger.warning(f'temp address: len is not > 0 and < 33: {len}') return None - if unit not in self._unitset: # units need to be predefined - self.logger.warning(f'temp address: unit {unit} not in unitset. Cannot use custom units') - return None - # addr already known? - if addr in self._commandset: - cmd = self._commandname_by_commandcode(addr) + cmd = self._commands.get_command_from_reply(addr) + if cmd: self.logger.info(f'temp address {addr} already known for command {cmd}') else: # create temp commandset cmd = 'temp_cmd' - cmdconf = {'addr': addr, 'len': length, 'unit': unit, 'set': False} + cmdconf = {'read': True, 'write': False, 'opcode': addr, 'reply_token': addr, 'item_type': 'str', 'dev_datatype': 'H', 'params': ['value', 'mult', 'signed', 'len'], 'param_values': ['VAL', mult, signed, length]} self.logger.debug(f'Adding temporary command config {cmdconf} for command temp_cmd') - self._commandset[cmd] = cmdconf + self._commands._parse_commands(self.device_id, {cmd: cmdconf}, [cmd]) - res = self.read_addr(addr) + try: + res = self.read_addr(addr) + except Exception as e: + self.logger.error(f'Error on send: {e}') + res = None - if cmd == 'temp_cmd': - del self._commandset['temp_cmd'] + try: + del self._commands._commands['temp_cmd'] + except (KeyError, AttributeError): + pass return res def write_addr(self, addr, value): - ''' + """ Tries to write a data point indepently of item config :param addr: data point addr (2 byte hex address) :type addr: str :param value: value to write :return: Value if read is successful, None otherwise - ''' + """ addr = addr.lower() - commandname = self._commandname_by_commandcode(addr) + commandname = self._commands.get_command_from_reply(addr) if commandname is None: self.logger.debug(f'Address {addr} not defined in commandset, aborting') return None self.logger.debug(f'Attempting to write address {addr} with value {value} for command {commandname}') - (packet, responselen) = self._build_command_packet(commandname, value) - if packet is None: - return None - - response_packet = self._send_command_packet(packet, responselen) - if response_packet is None: - return None - - return self._parse_response(response_packet, commandname) - -# -# initialization methods -# - - def _load_configuration(self): - ''' - Load configuration sets from commands.py - ''' - - # Load protocol dependent sets - if self._protocol in commands.controlset and self._protocol in commands.errorset and self._protocol in commands.unitset and self._protocol in commands.returnstatus and self._protocol in commands.setreturnstatus: - self._controlset = commands.controlset[self._protocol] - self.logger.debug(f'Loaded controlset for protocol {self._controlset}') - self._errorset = commands.errorset[self._protocol] - self.logger.debug(f'Loaded errors for protocol {self._errorset}') - self._unitset = commands.unitset[self._protocol] - self.logger.debug(f'Loaded units for protocol {self._unitset}') - self._devicetypes = commands.devicetypes - self.logger.debug(f'Loaded device types for protocol {self._devicetypes}') - self._returnstatus = commands.returnstatus[self._protocol] - self.logger.debug(f'Loaded return status for protocol {self._returnstatus}') - self._setreturnstatus = commands.setreturnstatus[self._protocol] - self.logger.debug(f'Loaded set return status for protocol {self._setreturnstatus}') - else: - self.logger.error(f'Sets for protocol {self._protocol} could not be found or incomplete!') - return False - - # Load device dependent sets - if self._heating_type in commands.commandset and self._heating_type in commands.operatingmodes and self._heating_type in commands.systemschemes: - self._commandset = commands.commandset[self._heating_type] - self.logger.debug(f'Loaded commands for heating type {self._commandset}') - self._operatingmodes = commands.operatingmodes[self._heating_type] - self.logger.debug(f'Loaded operating modes for heating type {self._operatingmodes}') - self._systemschemes = commands.systemschemes[self._heating_type] - self.logger.debug(f'Loaded system schemes for heating type {self._systemschemes}') - else: - sets = [] - if self._heating_type not in commands.commandset: - sets += 'command' - if self._heating_type not in commands.operatingmodes: - sets += 'operating modes' - if self._heating_type not in commands.systemschemes: - sets += 'system schemes' - - self.logger.error(f'Sets {", ".join(sets)} for heating type {self._heating_type} could not be found!') - return False - - self.logger.info(f'Loaded configuration for heating type {self._heating_type} with protocol {self._protocol}') - self._config_loaded = True - return True - - def _connect(self): - ''' - Tries to establish a connection to the serial reading device. To prevent - multiple concurrent connection locking is used. - - :return: Returns True if connection was established, False otherwise - :rtype: bool - ''' - if not self.alive: - return False - - if self._connected and self._serial: - return True - - self._lock.acquire() - try: - self.logger.debug(f'Connecting to {self._serialport}..') - self._serial = serial.Serial() - self._serial.baudrate = self._controlset['Baudrate'] - self._serial.parity = self._controlset['Parity'] - self._serial.bytesize = self._controlset['Bytesize'] - self._serial.stopbits = self._controlset['Stopbits'] - self._serial.port = self._serialport - - # both of the following timeout values are determined by trial and error - if self._protocol == 'KW': - # needed to "capture" the 0x05 sync bytes - self._serial.timeout = 1.0 - else: - # not too long to prevent lags in communication. - self._serial.timeout = 0.5 - self._serial.open() - self._connected = True - self.logger.info(f'Connected to {self._serialport}') - self._connection_attempts = 0 - if not self._standalone and not self.scheduler_get('cyclic'): - self._create_cyclic_scheduler() - return True - except Exception as e: - self.logger.error(f'Could not _connect to {self._serialport}; Error: {e}') - return False - finally: - self._lock.release() - - def _disconnect(self): - ''' - Disconnect any connected devices. - ''' - self._connected = False - self._initialized = False - try: - self._serial.close() - except IOError: - pass - self._serial = None - try: - self._lock.release() - except RuntimeError: - pass - self.logger.info('Disconnected') - - def _init_communication(self): - ''' - After connecting to the device, setup the communication protocol - - :return: Returns True, if communication was established successfully, False otherwise - :rtype: bool - ''' - # just try to connect anyway; if connected, this does nothing and no harm, if not, it connects - if not self._connect(): - - self.logger.error('Init communication not possible as connect failed.') - return False - - # initialization only necessary for P300 protocol... - if self._protocol == 'P300': + return self.send_command(commandname, value) - # init procedure is - # interface: 0x04 (reset) - # device: 0x05 (repeated) - # interface: 0x160000 (sync) - # device: 0x06 (sync ok) - # interface: resume communication, periodically send 0x160000 as keepalive if necessary + def get_device_type(self, protocol): - self.logger.debug('Init Communication....') - is_initialized = False - initstringsent = False - self.logger.debug(f'send_bytes: Send reset command {self._int2bytes(self._controlset["Reset_Command"], 1)}') - self._send_bytes(self._int2bytes(self._controlset['Reset_Command'], 1)) - readbyte = self._read_bytes(1) - self.logger.debug(f'read_bytes: read {readbyte}, last byte is {self._lastbyte}') + serialport = self._parameters.get('serialport', None) - for i in range(0, 10): - if initstringsent and self._lastbyte == self._int2bytes(self._controlset['Acknowledge'], 1): - is_initialized = True - self.logger.debug('Device acknowledged initialization') - break - if self._lastbyte == self._int2bytes(self._controlset['Not_initiated'], 1): - self._send_bytes(self._int2bytes(self._controlset['Sync_Command'], 3)) - self.logger.debug(f'send_bytes: Send sync command {self._int2bytes(self._controlset["Sync_Command"], 3)}') - initstringsent = True - elif self._lastbyte == self._int2bytes(self._controlset['Init_Error'], 1): - self.logger.error(f'The interface has reported an error (\x15), loop increment {i}') - self._send_bytes(self._int2bytes(self._controlset['Reset_Command'], 1)) - self.logger.debug(f'send_bytes: Send reset command {self._int2bytes(self._controlset["Reset_Command"], 1)}') - initstringsent = False - else: - self._send_bytes(self._int2bytes(self._controlset['Reset_Command'], 1)) - self.logger.debug(f'send_bytes: Send reset command {self._int2bytes(self._controlset["Reset_Command"], 1)}') - initstringsent = False - readbyte = self._read_bytes(1) - self.logger.debug(f'read_bytes: read {readbyte}, last byte is {self._lastbyte}') + # try to connect and read device type info from 0x00f8 + self.logger.info(f'Trying protocol {protocol} on device {serialport}') - self.logger.debug(f'Communication initialized: {is_initialized}') - self._initialized = is_initialized - - else: # at the moment the only other supported protocol is 'KW' which is not stateful - is_initialized = True - self._initialized = is_initialized - - return is_initialized - - def _create_cyclic_scheduler(self): - ''' - Setup the scheduler to handle cyclic read commands and find the proper time for the cycle. - ''' - if not self.alive: - return - - shortestcycle = -1 - # find shortest cycle - for commandname in list(self._cyclic_cmds.keys()): - entry = self._cyclic_cmds[commandname] - if shortestcycle == -1 or entry['cycle'] < shortestcycle: - shortestcycle = entry['cycle'] - # Start the worker thread - if shortestcycle != -1: - # Balance unnecessary calls and precision - workercycle = int(shortestcycle / 2) - # just in case it already exists... - if self.scheduler_get('cyclic'): - self.scheduler_remove('cyclic') - self.scheduler_add('cyclic', self.send_cyclic_cmds, cycle=workercycle, prio=5, offset=0) - self.logger.info(f'Added cyclic worker thread ({workercycle} sec cycle). Shortest item update cycle found: {shortestcycle} sec') - - def _read_initial_values(self): - ''' - Read all values configured to be read at startup / connection - ''' - if self._balist_item is not None: - balist = list(self._operatingmodes.values()) - self._balist_item(balist, self.get_shortname()) - self.logger.info(f'writing list of operating modes ({len(balist)} entries) to item {self._balist_item}') - - if self._init_cmds != []: - self.logger.info('Starting initial read commands.') - if self._protocol == 'KW': - self._KW_send_multiple_read_commands(self._init_cmds) - else: - for commandcode in self._init_cmds: - commandname = self._commandname_by_commandcode(commandcode) - self.logger.debug(f'send_init_commands {commandname}') - self._send_command(commandname) - self._initread = True - self.logger.debug(f'self._initread = {self._initread}') - - # - # send and receive commands - # - - def _read_timers(self): - ''' - Read all configured timer values from device and create uzsu timer dict - ''' - if self._application_timer is not []: - self.logger.debug('Starting timer read commands.') - for timer_app in self._application_timer: - for commandcode in self._application_timer[timer_app]['commandcodes']: - commandname = self._commandname_by_commandcode(commandcode) - self.logger.debug(f'send_timer_commands {commandname}') - self._send_command(commandname) - self._timerread = True - self.logger.debug(f'Timer Readout done = {self._timerread}') - self._viess_dict_to_uzsu_dict() - - def _send_command(self, commandname, value=None): - ''' - Create formatted command sequence from command name and send to device - - Note: The implementation detail results in "write if value present, read if value is None". - I have not found anything wrong with this; if any use case needs a specific read/write - selection, please tell me. - - :param commandname: Command for which to create command sequence as defined in commands.py - :type commandname: str - :param value: Value to write to device, None if command is read command - ''' - if value is not None: - self.logger.debug(f'Got a new write job: Command {commandname} with value {value}') - else: - self.logger.debug(f'Got a new read job: Command {commandname}') - - # Build packet with value bytes for write commands - (packet, responselen) = self._build_command_packet(commandname, value) - - # quit if no packet (error on packet build) - if packet is None: - return False - - if value is not None and self._protocol == 'KW': - read_response = False - else: - read_response = True - - # hand over built packet to send_command_packet - response_packet = self._send_command_packet(packet, responselen) - - # process response - if response_packet is None: - return False - - result = self._process_response(response_packet, commandname, read_response) - return result - - def _KW_send_multiple_read_commands(self, commandcodes): - ''' - Takes list of commandnames, builds all command packets and tries to send them in one go. - This only works for read commands and only with KW protocol. - On error the whole remaining read process is aborted, no retries or continuation is attempted. - - :param commandnames: List of commands for which to create command sequence as defined in commands.py - :type commandname: str - ''' - if self._protocol != 'KW': - self.logger.error(f'Called _KW_send_multiple_read_commands, but protocol is {self._protocol}. This shouldn\'t happen..') - return - - self.logger.debug(f'Got a new bulk read job: Commands {commandcodes}') - - bulk = {} - - # Build packets with value bytes for write commands - for addr in commandcodes: - commandname = self._commandname_by_commandcode(addr) - (packet, responselen) = self._build_command_packet(commandname, None, True) - - if packet: - bulk[addr] = {'packet': packet, 'responselen': responselen, 'command': commandname} - - # quit if no packet (error on packet build) - if not bulk: - return - - if not self._connected: - self.logger.error('Not connected, trying to reconnect.') - if not self._connect(): - self.logger.error('Could not connect to serial device') - return + # first, initialize Viessmann object for use + self.alive = True + self._parameters['viess_proto'] = protocol + self._parameters['protocol'] = 'viessmann' + self._get_connection() + self._dispatch_callback = self._cb_standalone - self._lock.acquire() + err = None + res = None try: - self._init_communication() - - replies = {} - - if not self._KW_get_sync(): - return - - first_cmd = True - first_packet = bytearray(self._int2bytes(self._controlset['StartByte'], 1)) - - for addr in bulk.keys(): - - if first_cmd: - # make sure that the first sent packet has the StartByte (0x01) lead byte set - # this way the first packet actually sent has the start byte, regardless of bulk.keys() order - first_packet.extend(bulk[addr]['packet']) - bulk[addr]['packet'] = first_packet - first_cmd = False - - # send query - try: - self._send_bytes(bulk[addr]['packet']) - self.logger.debug(f'Successfully sent packet: {self._bytes2hexstring(bulk[addr]["packet"])}') - except IOError as io: - raise IOError(f'IO Error: {io}') - return - except Exception as e: - raise Exception(f'Exception while sending: {e}') - return - - # receive response - replies[addr] = bytearray() - try: - self.logger.debug(f'Trying to receive {bulk[addr]["responselen"]} bytes of the response') - chunk = self._read_bytes(bulk[addr]['responselen']) - - self.logger.debug(f'Received {len(chunk)} bytes chunk of response as hexstring {self._bytes2hexstring(chunk)} and as bytes {chunk}') - if len(chunk) != 0: - replies[addr].extend(chunk) - else: - self.logger.error(f'Received 0 bytes chunk from {addr} - this probably is a communication error, possibly a wrong datapoint address?') - return - except IOError as io: - raise IOError(f'IO Error: {io}') - return - except Exception as e: - raise Exception(f'Error receiving response: {e}') - return - - # sent all read requests, time to parse the replies - # do this inside the _lock-block so this doesn't interfere with - # possible cyclic read data assignments - for addr in bulk.keys(): - if len(replies[addr]) > 0: - self._process_response(replies[addr], bulk[addr]['command'], True) - - except IOError as io: - self.logger.error(f'KW_send_multiple_read_commands failed with IO error: {io}') - self.logger.error('Trying to reconnect (disconnecting, connecting') - self._disconnect() - return + res = self._connection.open() except Exception as e: - self.logger.error(f'KW_send_multiple_read_commands failed with error: {e}') - return - finally: - try: - self._lock.release() - except RuntimeError: - pass - - def _KW_get_sync(self): - ''' - Try to get a sync packet (0x05) from heating system to be able to send commands - - :return: True if sync packet received, False otherwise (after retries) - :rtype: bool - ''' - if not self._connected or self._protocol != 'KW': - return False # don't even try. We only want to be called by _send_command_packet, which just before executed connect() - - retries = 5 - - # try to reset communication, especially if previous P300 comms is still open - self._send_bytes(self._int2bytes(self._controlset['Reset_Command'], 1)) - - attempt = 0 - while attempt < retries: - self.logger.debug(f'Starting sync loop - attempt {attempt + 1}/{retries}') - - self._serial.reset_input_buffer() - chunk = self._read_bytes(1) - # enable for 'raw' debugging - # self.logger.debug(f'sync loop - got {self._bytes2hexstring(chunk)}') - if chunk == self._int2bytes(self._controlset['Not_initiated'], 1, False): - self.logger.debug('Got sync. Commencing command send') - return True - time.sleep(.8) - attempt = attempt + 1 - self.logger.error(f'Sync not acquired after {attempt} attempts') - self._disconnect() - - return False - - def _send_command_packet(self, packet, packetlen_response): - ''' - Send command sequence to device - - :param packet: Command sequence to send - :type packet: bytearray - :param packetlen_response: number of bytes expected in reply - :type packetlen_response: int - :param read_response: True if command was read command and value is expected, False if only status byte is expected (only needed for KW protocol) - :type read_response: bool - :return: Response packet (bytearray) if no error occured, None otherwise - ''' - if not self._connected: - self.logger.error('Not connected, trying to reconnect.') - if not self._connect(): - self.logger.error('Could not connect to serial device') - return None + err = e + if not res: + self.logger.info(f'Connection to {serialport} failed. Please check connection. {err if err else ""}') + return None - self._lock.acquire() + res = None try: - if not self._initialized or (time.time() - 500) > self._lastbytetime: - if self._protocol == 'P300': - if self._initialized: - self.logger.debug('Communication timed out, trying to reestablish communication.') - else: - self.logger.info('Communication no longer initialized, trying to reestablish.') - self._init_communication() - - if self._initialized: - # send query - try: - if self._protocol == 'KW': - # try to get sync, exit if it fails - if not self._KW_get_sync(): - return None - - self._send_bytes(packet) - self.logger.debug(f'Successfully sent packet: {self._bytes2hexstring(packet)}') - except IOError as io: - raise IOError(f'IO Error: {io}') - return None - except Exception as e: - raise Exception(f'Exception while sending: {e}') - return None - - # receive response - response_packet = bytearray() - self.logger.debug(f'Trying to receive {packetlen_response} bytes of the response') - chunk = self._read_bytes(packetlen_response) - - if self._protocol == 'P300': - self.logger.debug(f'Received {len(chunk)} bytes chunk of response as hexstring {self._bytes2hexstring(chunk)} and as bytes {chunk}') - if len(chunk) != 0: - if chunk[:1] == self._int2bytes(self._controlset['Error'], 1): - self.logger.error(f'Interface returned error! response was: {chunk}') - elif len(chunk) == 1 and chunk[:1] == self._int2bytes(self._controlset['Not_initiated'], 1): - self.logger.error('Received invalid chunk, connection not initialized. Forcing re-initialize...') - self._initialized = False - elif chunk[:1] != self._int2bytes(self._controlset['Acknowledge'], 1): - self.logger.error(f'Received invalid chunk, not starting with ACK! response was: {chunk}') - self._error_count += 1 - if self._error_count >= 5: - self.logger.warning('Encountered 5 invalid chunks in sequence. Maybe communication was lost, re-initializing') - self._initialized = False - else: - response_packet.extend(chunk) - self._error_count = 0 - return response_packet - else: - self.logger.error(f'Received 0 bytes chunk - ignoring response_packet! chunk was: {chunk}') - elif self._protocol == 'KW': - self.logger.debug(f'Received {len(chunk)} bytes chunk of response as hexstring {self._bytes2hexstring(chunk)} and as bytes {chunk}') - if len(chunk) != 0: - response_packet.extend(chunk) - return response_packet - else: - self.logger.error('Received 0 bytes chunk - this probably is a communication error, possibly a wrong datapoint address?') - else: - raise Exception('Interface not initialized!') - except IOError as io: - self.logger.error(f'send_command_packet failed with IO error: {io}') - self.logger.error('Trying to reconnect (disconnecting, connecting') - self._disconnect() + res = self._connection._send_init_on_send() except Exception as e: - self.logger.error(f'send_command_packet failed with error: {e}') - finally: - try: - self._lock.release() - except RuntimeError: - pass - - # if we didn't return with data earlier, we hit an error. Act accordingly - return None - - def _send_bytes(self, packet): - ''' - Send data to device - - :param packet: Data to be sent - :type packet: bytearray - :return: Returns False, if no connection is established or write failed; True otherwise - :rtype: bool - ''' - if not self._connected: + err = e + if not res: + self.logger.info(f'Could not initialize communication using protocol {protocol}. {err if err else ""}') return False + self._result = None try: - self._serial.write(packet) - except serial.SerialTimeoutException: - return False - - # self.logger.debug(f'send_bytes: Sent {packet}') - return True - - def _read_bytes(self, length): - ''' - Try to read bytes from device - - :param length: Number of bytes to read - :type length: int - :return: Number of bytes actually read - :rtype: int - ''' - if not self._connected: - return 0 - - totalreadbytes = bytes() - # self.logger.debug('read_bytes: Start read') - starttime = time.time() - - # don't wait for input indefinitely, stop after self._timeout seconds - while time.time() <= starttime + self._timeout: - readbyte = self._serial.read() - self._lastbyte = readbyte - # self.logger.debug(f'read_bytes: Read {readbyte}') - if readbyte != b'': - self._lastbytetime = time.time() - else: - return totalreadbytes - totalreadbytes += readbyte - if len(totalreadbytes) >= length: - return totalreadbytes - - # timeout reached, did we read anything? - if not totalreadbytes: - - # just in case, force plugin to reconnect - self._connected = False - self._initialized = False - - # return what we got so far, might be 0 - return totalreadbytes - - def _process_response(self, response, commandname='', read_response=True, update_item=True): - ''' - Process device response data, try to parse type and value and assign value to associated item - - :param response: Data received from device - :type response: bytearray - :param commandname: Commandname used for request (only needed for KW protocol) - :type commandname: str - :param read_response: True if command was read command and value is expected, False if only status byte is expected (only needed for KW protocol) - :type read_response: bool - :param update_item: True if value should be written to corresponding item - :type update_item: bool - ''' - res = self._parse_response(response, commandname, read_response) - - # None means error on read/parse or write reponse. Errors are already logged, so no further action necessary - if res is None: - return - - # write returns True on success - if res is True: - return True - - # assign results - (value, commandcode) = res - - # get command config - commandname = self._commandname_by_commandcode(commandcode) - commandconf = self._commandset[commandname] - commandunit = commandconf['unit'] - - # update items if commandcode is in item-dict - if commandcode in self._params.keys(): - - # Find corresponding item - item = self._params[commandcode]['item'] - self.logger.debug(f'Corresponding item {item} for command {commandname}') - - # Update item - if update_item: - self.logger.debug(f'Updating item {item} with value {value}') - if commandunit == 'CT': - # Split timer list and put it the child items, which were created by struct.timer in iso time format - try: - for child in item.return_children(): - child_item = str(child.id()) - if child_item.endswith('an1'): - child(value[0]['An'], self.get_shortname()) - # child(datetime.strptime(value[0]['An'], '%H:%M').time().isoformat()) - elif child_item.endswith('aus1'): - child(value[0]['Aus'], self.get_shortname()) - elif child_item.endswith('an2'): - child(value[1]['An'], self.get_shortname()) - elif child_item.endswith('aus2'): - child(value[1]['Aus'], self.get_shortname()) - elif child_item.endswith('an3'): - child(value[2]['An'], self.get_shortname()) - elif child_item.endswith('aus3'): - child(value[2]['Aus'], self.get_shortname()) - elif child_item.endswith('an4'): - child(value[3]['An'], self.get_shortname()) - elif child_item.endswith('aus4'): - child(value[3]['Aus'], self.get_shortname()) - except KeyError: - self.logger.debug('No child items for timer found (use timer.structs) or value no valid') - - # save value to item - item(value, self.get_shortname()) - else: - self.logger.debug(f'Not updating item {item} as not requested') - else: - if (commandcode not in self._timer_cmds) and update_item: - self.logger.error(f'Should update item with response to a command not in item config: {commandcode}. This shouldn''t happen..') - - # Process response for timers in timer-dict using the commandcode - if commandcode in self._timer_cmds: - self.logger.debug(f'process_response_timer: {commandcode}') - - # Find timer application - for timer in self._application_timer: - if commandcode in self._application_timer[timer]['commandcodes']: - timer_app = timer - - # Fill timer dict - if timer_app not in self._viess_timer_dict: - self._viess_timer_dict[timer_app] = {} - - self._viess_timer_dict[timer_app][commandname] = value - self.logger.debug(f'Viessmann timer dict: {self._viess_timer_dict}') - -# -# convert data types -# - - def _build_valuebytes_from_value(self, value, commandconf): - ''' - Convert value to formatted bytearray for write commands - :param value: Value to send - :param commandconf: configuration set for requested command - :type commandconf: dict - :return: bytearray with value if successful, None if error - ''' - try: - commandvaluebytes = commandconf['len'] - commandunit = commandconf['unit'] - set_allowed = bool(commandconf['set']) - if 'min_value' in commandconf: - min_allowed_value = commandconf['min_value'] - else: - min_allowed_value = None - if 'max_value' in commandconf: - max_allowed_value = commandconf['max_value'] - else: - max_allowed_value = None - except KeyError: - self.logger.error(f'Error in command configuration {commandconf}, aborting') - return None - - # unit HEX = hex values as string is only for read requests (debugging). Don't even try... - if commandunit == 'HEX': - - self.logger.error(f'Error in command configuration {commandconf}: unit HEX is not writable, aborting') - return None - - if commandunit == 'BA': - - # try to convert BA string to byte value, setting str values will fail - # this will not work properly if multiple entries have the same value! - try: - value = int(dict(map(reversed, self._operatingmodes.items()))[value]) - commandunit = 'IUNON' - except KeyError: - # value doesn't exist in operatingmodes. don't know what to do - self.logger.error(f'Value {value} not defined in operating modes for device {self._heating_type}') - return None - - try: - unitconf = self._unitset[commandunit] - except KeyError: - self.logger.error(f'Error: unit {commandunit} not found in unit set {self._unitset}') - return None - - try: - valuetype = unitconf['type'] - valuereadtransform = unitconf['read_value_transform'] - except KeyError: - self.logger.error(f'Error in unit configuration {unitconf} for unit {commandunit}, aborting') - return None - - self.logger.debug(f'Unit defined to {commandunit} with config{unitconf}') - - # check if writing is allowed for this address - if not set_allowed: - self.logger.error(f'Command {self._commandname_by_commandcode(commandconf["addr"])} is not configured for writing') - return None - - # check if value is empty - if value is None or value == '': - self.logger.error(f'Command value for command {self._commandname_by_commandcode(commandconf["addr"])} is empty, not possible to send (check item, command and unit configuration') - return None - - # check if value to be written is in allowed range - if (min_allowed_value is not None and min_allowed_value > value) or (max_allowed_value is not None and max_allowed_value < value): - self.logger.error(f'Invalid range - value {value} not in range [{min_allowed_value}, {max_allowed_value}]') - return None - - try: - # Create valuebytes - if valuetype == 'datetime' or valuetype == 'date': - try: - datestring = dateutil.parser.isoparse(value).strftime('%Y%m%d%w%H%M%S') - # Viessmann erwartet 2 digits für Wochentag, daher wird hier noch eine 0 eingefügt - datestring = datestring[:8] + '0' + datestring[8:] - valuebytes = bytes.fromhex(datestring) - self.logger.debug(f'Created value bytes for type {valuetype} as bytes: {valuebytes}') - except Exception as e: - self.logger.error(f'Incorrect data format, YYYY-MM-DD expected; Error: {e}') - return None - elif valuetype == 'timer': - try: - times = '' - for switching_time in value: - an = self._encode_timer(switching_time['An']) - aus = self._encode_timer(switching_time['Aus']) - times += f'{an:02x}{aus:02x}' - valuebytes = bytes.fromhex(times) - self.logger.debug(f'Created value bytes for type {valuetype} as hexstring: {self._bytes2hexstring(valuebytes)} and as bytes: {valuebytes}') - except Exception as e: - self.logger.error(f'Incorrect data format, (An: hh:mm Aus: hh:mm) expected; Error: {e}') - return None - # valuetype 'list' is transformed to listentry via index on read, but written directly as int, so numerical transform could apply - elif valuetype == 'integer' or valuetype == 'list': - # transform value is numerical -> multiply value with it - if self._isfloat(valuereadtransform): - value = self._value_transform_write(value, valuereadtransform) - self.logger.debug(f'Transformed value using method "* {valuereadtransform}" to {value}') - elif valuereadtransform == 'bool': - value = bool(value) - else: - value = int(value) - valuebytes = self._int2bytes(value, commandvaluebytes, byteorder='little') - self.logger.debug(f'Created value bytes for type {valuetype} as hexstring: {self._bytes2hexstring(valuebytes)} and as bytes: {valuebytes}') - else: - self.logger.error(f'Type {valuetype} not definied for creating write command bytes') - return None + self.read_temp_addr('00f8', 2, 0, False) except Exception as e: - self.logger.debug(f'_build_valuebytes_from_value failed with unexpected error: {e}') - return None - - return valuebytes + err = e - def _build_command_packet(self, commandname, value=None, KWFollowUp=False): - ''' - Create formatted command sequence from command name. - If value is None, a read packet will be built, a write packet otherwise + if self._result is None: + raise ValueError(f'Error on communicating with the device, no response received. {err if err else ""}') - :param commandname: Command for which to create command sequence as defined in commands.py - :type commandname: str - :param value: Write value if command is to be written - :param KWFollowUp: create read sequence for KW protocol if multiple read commands will be sent without individual sync - :type KWFollowUp: bool - :return: tuple of (command sequence, expected response len), (None, 0) if error occured - :rtype: tuple (bytearray, int) - ''' + # let it go... + self._connection.close() - # A read_request telegram looks like this: - # P300: ACK (1 byte), startbyte (1 byte), data length in bytes (1 byte), request/response (1 byte), read/write (1 byte), addr (2 byte), amount of value bytes expected in answer (1 byte), checksum (1 byte) - # KW: startbyte (1 byte), read/write (1 byte), addr (2 bytes), amount of value bytes expected in answer (1 byte) - # A write_request telegram looks like this: - # P300: ACK (1 byte), startbyte (1 byte), data length in bytes (1 byte), request/response (1 byte), read/write (1 byte), addr (2 byte), amount of bytes to be written (1 byte), value (bytes as per last byte), checksum (1 byte) - # KW: startbyte (1 byte), read/write (1 byte), addr (2 bytes), length of value (1 byte), value bytes (1-4 bytes) - - write = value is not None - self.logger.debug(f'Build {"write" if write else "read"} packet for command {commandname}') - - # Get command config - commandconf = self._commandset[commandname] - commandcode = (commandconf['addr']).lower() - commandvaluebytes = commandconf['len'] - - if write: - valuebytes = self._build_valuebytes_from_value(value, commandconf) - # can't write 'no value'... - if not valuebytes: - return (None, 0) - - # Calculate length of payload (only needed for P300) - payloadlength = int(self._controlset.get('Command_bytes_write', 0)) + int(commandvaluebytes) - self.logger.debug(f'Payload length is: {payloadlength} bytes') - - # Build packet for read commands - # - # at the moment this only has to differentiate between protocols P300 and KW - # these are basically similar, only P300 is an evolution of KW adding - # stateful connections, command length and checksum - # - # so for the time being the easy way is one code path for both protocols which - # omits P300 elements from the built byte string. - # Later additions of other protocols (like GWG) might have to bring a second - # code path for proper processing - packet = bytearray() - if not KWFollowUp: - packet.extend(self._int2bytes(self._controlset['StartByte'], 1)) - if self._protocol == 'P300': - if write: - packet.extend(self._int2bytes(payloadlength, 1)) - else: - packet.extend(self._int2bytes(self._controlset['Command_bytes_read'], 1)) - packet.extend(self._int2bytes(self._controlset['Request'], 1)) - - if write: - packet.extend(self._int2bytes(self._controlset['Write'], 1)) - else: - packet.extend(self._int2bytes(self._controlset['Read'], 1)) - packet.extend(bytes.fromhex(commandcode)) - packet.extend(self._int2bytes(commandvaluebytes, 1)) - if write: - packet.extend(valuebytes) - if self._protocol == 'P300': - packet.extend(self._int2bytes(self._calc_checksum(packet), 1)) - - if self._protocol == 'P300': - responselen = int(self._controlset['Command_bytes_read']) + 4 + (0 if write else int(commandvaluebytes)) + if self._result is not None: + return self._result else: - responselen = 1 if write else int(commandvaluebytes) - - if write: - self.logger.debug(f'Created command {commandname} to be sent as hexstring: {self._bytes2hexstring(packet)} and as bytes: {packet} with value {value} (transformed to value byte {self._bytes2hexstring(valuebytes)})') - else: - self.logger.debug(f'Created command {commandname} to be sent as hexstring: {self._bytes2hexstring(packet)} and as bytes: {packet}') - - return (packet, responselen) - - def _parse_response(self, response, commandname='', read_response=True): - ''' - Process device response data, try to parse type and value - - :param response: Data received from device - :type response: bytearray - :param commandname: Commandname used for request (only needed for KW protocol) - :type commandname: str - :param read_response: True if command was read command and value is expected, False if only status byte is expected (only needed for KW protocol) - :type read_response: bool - :return: tuple of (parsed response value, commandcode) or None if error - ''' - if self._protocol == 'P300': - - # A read_response telegram looks like this: ACK (1 byte), startbyte (1 byte), data length in bytes (1 byte), request/response (1 byte), read/write (1 byte), addr (2 byte), amount of valuebytes (1 byte), value (bytes as per last byte), checksum (1 byte) - # A write_response telegram looks like this: ACK (1 byte), startbyte (1 byte), data length in bytes (1 byte), request/response (1 byte), read/write (1 byte), addr (2 byte), amount of bytes written (1 byte), checksum (1 byte) - - # Validate checksum - checksum = self._calc_checksum(response[1:len(response) - 1]) # first, cut first byte (ACK) and last byte (checksum) and then calculate checksum - received_checksum = response[len(response) - 1] - if received_checksum != checksum: - self.logger.error(f'Calculated checksum {checksum} does not match received checksum of {received_checksum}! Ignoring reponse') - return None - - # Extract command/address, valuebytes and valuebytecount out of response - commandcode = response[5:7].hex() - responsetypecode = response[3] # 0x00 = query, 0x01 = reply, 0x03 = error - responsedatacode = response[4] # 0x01 = ReadData, 0x02 = WriteData, 0x07 = Function Call - valuebytecount = response[7] - - # Extract databytes out of response - rawdatabytes = bytearray() - rawdatabytes.extend(response[8:8 + (valuebytecount)]) - elif self._protocol == 'KW': - - # imitate P300 response code data for easier combined handling afterwards - # a read_response telegram consists only of the value bytes - # a write_response telegram is 0x00 for OK, 0xXX for error - if commandname == '': - self.logger.error('trying to parse KW protocol response, but commandname not set in _parse_response. This should not happen...') - return None - - responsetypecode = 1 - commandcode = self._commandset[commandname]['addr'].lower() - valuebytecount = len(response) - rawdatabytes = response - - if read_response: - # value response to read request, error detection by empty = no response - responsedatacode = 1 - if len(rawdatabytes) == 0: - # error, no answer means wrong address (?) - responsetypecode = 3 - else: - # status response to write request - responsedatacode = 2 - if (len(rawdatabytes) == 1 and rawdatabytes[0] != 0) or len(rawdatabytes) == 0: - # error if status reply is not 0x00 - responsetypecode = 3 - - self.logger.debug(f'Response decoded to: commandcode: {commandcode}, responsedatacode: {responsedatacode}, valuebytecount: {valuebytecount}, responsetypecode: {responsetypecode}') - self.logger.debug(f'Rawdatabytes formatted: {self._bytes2hexstring(rawdatabytes)} and unformatted: {rawdatabytes}') - - # Process response for items if response and not error - # added: only in P300 or if read_response is set, do not try if KW replies with 0x00 (OK) - if responsedatacode == 1 and responsetypecode != 3 and (self._protocol == 'P300' or read_response): - - # parse response if command config is available - commandname = self._commandname_by_commandcode(commandcode) - if commandname is None: - self.logger.error(f'Received response for unknown address point {commandcode}') - return None - - # Get command and respective unit config - commandconf = self._commandset[commandname] - commandvaluebytes = commandconf['len'] - commandunit = commandconf['unit'] - unitconf = self._unitset.get(commandunit) - if not unitconf: - self.logger.error(f'Unit configuration not found for unit {commandunit} in protocol {self._protocol}. This is a configuration error in commands.py, please fix') - return None - commandsigned = unitconf['signed'] - valuetransform = unitconf['read_value_transform'] - - # start value decode - if commandunit == 'CT': - timer = self._decode_timer(rawdatabytes.hex()) - # fill list - timer = [{'An': on_time, 'Aus': off_time} - for on_time, off_time in zip(timer, timer)] - value = timer - self.logger.debug(f'Matched command {commandname} and read transformed timer {value} and byte length {commandvaluebytes}') - elif commandunit == 'TI': - # decode datetime - value = datetime.strptime(rawdatabytes.hex(), '%Y%m%d%W%H%M%S').isoformat() - self.logger.debug(f'Matched command {commandname} and read transformed datetime {value} and byte length {commandvaluebytes}') - elif commandunit == 'DA': - # decode date - value = datetime.strptime(rawdatabytes.hex(), '%Y%m%d%W%H%M%S').date().isoformat() - self.logger.debug(f'Matched command {commandname} and read transformed datetime {value} and byte length {commandvaluebytes}') - elif commandunit == 'ES': - # erstes Byte = Fehlercode; folgenden 8 Byte = Systemzeit - errorcode = (rawdatabytes[:1]).hex() - # errorquerytime = (rawdatabytes[1:8]).hex() - value = self._error_decode(errorcode) - self.logger.debug(f'Matched command {commandname} and read transformed errorcode {value} (raw value was {errorcode}) and byte length {commandvaluebytes}') - elif commandunit == 'SC': - # erstes Byte = Anlagenschema - systemschemescode = (rawdatabytes[:1]).hex() - value = self._systemscheme_decode(systemschemescode) - self.logger.debug(f'Matched command {commandname} and read transformed system scheme {value} (raw value was {systemschemescode}) and byte length {commandvaluebytes}') - elif commandunit == 'BA': - operatingmodecode = (rawdatabytes[:1]).hex() - value = self._operatingmode_decode(operatingmodecode) - self.logger.debug(f'Matched command {commandname} and read transformed operating mode {value} (raw value was {operatingmodecode}) and byte length {commandvaluebytes}') - elif commandunit == 'DT': - # device type has 8 bytes, but first 4 bytes are device type indicator - devicetypebytes = rawdatabytes[:2].hex() - value = self._devicetype_decode(devicetypebytes).upper() - self.logger.debug(f'Matched command {commandname} and read transformed device type {value} (raw value was {devicetypebytes}) and byte length {commandvaluebytes}') - elif commandunit == 'SN': - # serial number has 7 bytes, - serialnumberbytes = rawdatabytes[:7] - value = self._serialnumber_decode(serialnumberbytes) - self.logger.debug(f'Matched command {commandname} and read transformed device type {value} (raw value was {serialnumberbytes}) and byte length {commandvaluebytes}') - elif commandunit == 'HEX': - # hex string for debugging purposes - hexstr = rawdatabytes.hex() - value = ' '.join([hexstr[i:i + 2] for i in range(0, len(hexstr), 2)]) - self.logger.debug(f'Read hex bytes {value}') - else: - rawvalue = self._bytes2int(rawdatabytes, commandsigned) - value = self._value_transform_read(rawvalue, valuetransform) - self.logger.debug(f'Matched command {commandname} and read transformed value {value} (integer raw value was {rawvalue}) and byte length {commandvaluebytes}') - - # assign to dict for use by other functions - self._last_values[commandcode] = value - - return (value, commandcode) - - # Handling of write command response if not error - elif responsedatacode == 2 and responsetypecode != 3: - self.logger.debug(f'Write request of adress {commandcode} successfull writing {valuebytecount} bytes') - return True - else: - self.logger.error(f'Write request of adress {commandcode} NOT successfull writing {valuebytecount} bytes') return None - def _viess_dict_to_uzsu_dict(self): - ''' - Convert data read from device to UZSU compatible struct. - Input is taken from self._viess_timer_dict, output is written to - self._uzsu_dict - ''' - dict_timer = {} - empty_time = '00:00' - shitems = Items.get_instance() - - try: - sunset = shitems.return_item('env.location.sunset')().strftime('%H:%M') - sunrise = shitems.return_item('env.location.sunrise')().strftime('%H:%M') - except (AttributeError, ValueError): - sunset = '21:00' - sunrise = '06:00' - - # convert all switching times with corresponding app and days to timer-dict - for application in self._viess_timer_dict: - if application not in dict_timer: - dict_timer[application] = {} - for application_day in self._viess_timer_dict[application]: - timer = self._viess_timer_dict[application][application_day] - day = application_day[(application_day.rfind('_') + 1):len(application_day)].lower() - - # normalize days - for element in self._wochentage: - if day in self._wochentage[element]: - weekday = element - - for entry in timer: - for event, sw_time in entry.items(): - if sw_time != empty_time: - value = 1 if event == 'An' else 0 - if sw_time not in dict_timer[application]: - dict_timer[application][sw_time] = {} - if value not in dict_timer[application][sw_time]: - dict_timer[application][sw_time][value] = [] - dict_timer[application][sw_time][value].append(weekday) - - self.logger.debug(f'Viessmann timer dict for UZSU: {dict_timer}') - - # find items, read UZSU-dict, convert to list of switching times, update item - for application in dict_timer: - item = self._application_timer[application]['item'] - - # read UZSU-dict (or use preset if empty) - uzsu_dict = item() - if not item(): - uzsu_dict = {'lastvalue': '0', 'sunset': sunset, 'list': [], 'active': True, 'interpolation': {'initage': '', 'initialized': True, 'itemtype': 'bool', 'interval': '', 'type': 'none'}, 'sunrise': sunrise} - - # create empty list - uzsu_dict['list'] = [] - - # fill list with switching times - for sw_time in sorted(dict_timer[application].keys()): - for key in dict_timer[application][sw_time]: - rrule = 'FREQ=WEEKLY;BYDAY=' + ','.join(dict_timer[application][sw_time][key]) - uzsu_dict['list'].append({'time': sw_time, 'rrule': rrule, 'value': str(key), 'active': True}) - - # update item - item(uzsu_dict, self.get_shortname()) - - def _uzsu_dict_to_viess_timer(self, timer_app, uzsu_dict): - ''' - Convert UZSU dict from item/visu for selected application into separate - on/off time events and write all timers to the device - - :param timer_app: Application for which the timer should be written, as in commands.py - :type timer_app: str - :param uzsu_dict: UZSU-compatible dict with timer data - :type uzsu_dict: dict - ''' - if self._timerread: - - # set variables - commandnames = set() - timer_dict = {} - an = {} - aus = {} - - # quit if timer_app not defined - if timer_app not in self._application_timer: - return - - commandnames.update([self._commandname_by_commandcode(code) for code in self._application_timer[timer_app]['commandcodes']]) - self.logger.debug(f'Commandnames: {commandnames}') - - # find switching times and create lists for on and off operations - for sw_time in uzsu_dict['list']: - myDays = sw_time['rrule'].split(';')[1].split('=')[1].split(',') - for day in myDays: - if sw_time['value'] == '1' and sw_time['active']: - if day not in an: - an[day] = [] - an[day].append(sw_time['time']) - for day in myDays: - if sw_time['value'] == '0' and sw_time['active']: - if day not in aus: - aus[day] = [] - aus[day].append(sw_time['time']) - - # sort daily lists - for day in an: - an[day].sort() - self.logger.debug(f'An: {an}') - for day in aus: - aus[day].sort() - self.logger.debug(f'Aus: {aus}') - - # create timer dict in Viessmann format for all weekdays - for commandname in commandnames: - self.logger.debug(f'Commandname in process: {commandname}') - # create empty dict - timer_dict[commandname] = [{'An': '00:00', 'Aus': '00:00'}, {'An': '00:00', 'Aus': '00:00'}, {'An': '00:00', 'Aus': '00:00'}, {'An': '00:00', 'Aus': '00:00'}] - # get current day - wday = commandname[(commandname.rfind('_') + 1):len(commandname)].lower() - # normalize day - for element in self._wochentage: - if wday in self._wochentage[element]: - wday = element - # transfer switching times - for idx, val in enumerate(an[wday]): - timer_dict[commandname][idx]['An'] = val - for idx, val in enumerate(aus[wday]): - timer_dict[commandname][idx]['Aus'] = val - self.logger.debug(f'Timer-dict for update of items: {timer_dict}') - - # write all timer dicts to device - for commandname in timer_dict: - value = timer_dict[commandname] - self.logger.debug(f'Got item value to be written: {value} on command name {commandname}') - self._send_command(commandname, value) - - def _calc_checksum(self, packet): - ''' - Calculate checksum for P300 protocol packets - - :parameter packet: Data packet for which to calculate checksum - :type packet: bytearray - :return: Calculated checksum - :rtype: int - ''' - checksum = 0 - if len(packet) > 0: - if packet[:1] == b'\x41': - packet = packet[1:] - checksum = sum(packet) - checksum = checksum - int(checksum / 256) * 256 - else: - self.logger.error('bytes to calculate checksum from not starting with start byte') - else: - self.logger.error('No bytes received to calculate checksum') - return checksum - - def _int2bytes(self, value, length, signed=False, byteorder='big'): - ''' - Convert value to bytearray with respect to defined length and sign format. - Value exceeding limit set by length and sign will be truncated - - :parameter value: Value to convert - :type value: int - :parameter length: number of bytes to create - :type length: int - :parameter signed: True if result should be a signed int, False for unsigned - :type signed: bool - :return: Converted value - :rtype: bytearray - ''' - value = value % (2 ** (length * 8)) - return value.to_bytes(length, byteorder=byteorder, signed=signed) - - def _bytes2int(self, rawbytes, signed): - ''' - Convert bytearray to value with respect to sign format - - :parameter rawbytes: Bytes to convert - :type value: bytearray - :parameter signed: True if result should be a signed int, False for unsigned - :type signed: bool - :return: Converted value - :rtype: int - ''' - return int.from_bytes(rawbytes, byteorder='little', signed=signed) - - def _bytes2hexstring(self, bytesvalue): - ''' - Create hex-formatted string from bytearray - :param bytesvalue: Bytes to convert - :type bytesvalue: bytearray - :return: Converted hex string - :rtype: str - ''' - return ''.join(f'{c:02x}' for c in bytesvalue) - - def _decode_rawvalue(self, rawdatabytes, commandsigned): - ''' - Convert little-endian byte sequence to int value - - :param rawdatabytes: Bytes to convert - :type rawdatabytes: bytearray - :param commandsigned: 'signed' if value should be interpreted as signed - :type commandsigned: str - :return: Converted value - :rtype: int - ''' - rawvalue = 0 - for i in range(len(rawdatabytes)): - leftbyte = rawdatabytes[0] - value = int(leftbyte * pow(256, i)) - rawvalue += value - rawdatabytes = rawdatabytes[1:] - # Signed/Unsigned berücksichtigen - if commandsigned == 'signed' and rawvalue > int(pow(256, i) / 2 - 1): - rawvalue = (pow(256, i) - rawvalue) * (-1) - return rawvalue - - def _decode_timer(self, rawdatabytes): - ''' - Generator to convert byte sequence to a number of time strings hh:mm - - :param rawdatabytes: Bytes to convert - :type rawdatabytes: bytearray - ''' - while rawdatabytes: - hours, minutes = divmod(int(rawdatabytes[:2], 16), 8) - if minutes >= 6 or hours >= 24: - # not a valid time - yield '00:00' - else: - yield f'{hours:02d}:{(minutes * 10):02d}' - rawdatabytes = rawdatabytes[2:] - return None - - def _encode_timer(self, switching_time): - ''' - Convert time string to encoded time value for timer application - - :param switching_time: time value in 'hh:mm' format - :type switching_time: str - :return: Encoded time value - :rtype: int - ''' - if switching_time == '00:00': - return 0xff - clocktime = re.compile(r'(\d\d):(\d\d)') - mo = clocktime.search(switching_time) - number = int(mo.group(1)) * 8 + int(mo.group(2)) // 10 - return number - - def _value_transform_read(self, value, transform): - ''' - Transform value according to protocol specification for writing to device - - :param value: Value to transform - :param transform: Specification for transforming - :return: Transformed value - ''' - if transform == 'bool': - return bool(value) - elif self._isfloat(transform): - return round(value / float(transform), 2) - else: - return int(value) - - def _value_transform_write(self, value, transform): - ''' - Transform value according to protocol requirement after reading from device - - :param value: Value to transform - :type value: int - :param transform: Specification for transforming - :type transform: int - :return: Transformed value - :rtype: int - ''' - # as transform and value can be float and by error possibly str, we try to float both - return int(float(value) * float(transform)) - - def _error_decode(self, value): - ''' - Decode error value from device if defined, else return error as string - ''' - value = str(value).upper() - if value in self._errorset: - errorstring = str(self._errorset[value]) - else: - errorstring = str(value) - return errorstring - - def _systemscheme_decode(self, value): - ''' - Decode schema value from device if possible, else return schema as string - ''' - if value in self._systemschemes: - systemscheme = str(self._systemschemes[value]) - else: - systemscheme = str(value) - return systemscheme - - def _operatingmode_decode(self, value): - ''' - Decode operating mode value from device if possible, else return mode as string - ''' - if value in self._operatingmodes: - operatingmode = str(self._operatingmodes[value]) - else: - operatingmode = str(value) - return operatingmode - - def _devicetype_decode(self, value): - ''' - Decode device type value if possible, else return device type as string - ''' - if value in self._devicetypes: - devicetypes = str(self._devicetypes[value]) - else: - devicetypes = str(value) - return devicetypes - - def _serialnumber_decode(self, serialnumberbytes): - ''' - Decode serial number from device response - ''' - serialnumber = 0 - serialnumberbytes.reverse() - for byte in range(0, len(serialnumberbytes)): - serialnumber += (serialnumberbytes[byte] - 48) * 10 ** byte - return hex(serialnumber).upper() - - def _commandname_by_commandcode(self, commandcode): - ''' - Find matching command name from commands.py for given command address - - :param commandcode: address of command - :type commandcode: str - :return: name of matching command or None if not found - ''' - for commandname in self._commandset.keys(): - if self._commandset[commandname]['addr'].lower() == commandcode.lower(): - return commandname - return None - - def _isfloat(self, value): - ''' - Test if string is decimal number - - :param value: expression to test - :type value: str - :return: True if value can be converted to a float, False otherwise - ''' - try: - float(value) - return True - except ValueError: - return False - -# -# webinterface -# - - def init_webinterface(self): - ''' - Initialize the web interface for this plugin - - This method is only needed if the plugin is implementing a web interface - ''' - try: - self.mod_http = Modules.get_instance().get_module('http') # try/except to handle running in a core version that does not support modules - except NameError: - self.mod_http = None - if self.mod_http is None: - self.logger.warning('Not initializing the web interface') - return False - - if 'SmartPluginWebIf' not in list(sys.modules['lib.model.smartplugin'].__dict__): - self.logger.warning('Web interface needs SmartHomeNG v1.5 or later. Not initializing the web interface') - return False - - # set application configuration for cherrypy - webif_dir = self.path_join(self.get_plugin_dir(), 'webif') - config = { - '/': { - 'tools.staticdir.root': webif_dir, - }, - '/static': { - 'tools.staticdir.on': True, - 'tools.staticdir.dir': 'static' - } - } - - # Register the web interface as a cherrypy app - self.mod_http.register_webif(WebInterface(webif_dir, self, self._commandset), - self.get_shortname(), - config, - self.get_classname(), self.get_instance_name(), - description='') - - return True - - -# ------------------------------------------ -# Webinterface of the plugin -# ------------------------------------------ - -class WebInterface(SmartPluginWebIf): - - def __init__(self, webif_dir, plugin, cmdset): - ''' - Initialization of instance of class WebInterface - - :param webif_dir: directory where the webinterface of the plugin resides - :param plugin: instance of the plugin - :type webif_dir: str - :type plugin: object - ''' - self.logger = logging.getLogger(__name__) - self.webif_dir = webif_dir - self.plugin = plugin - self.tplenv = self.init_template_environment() - - self.items = Items.get_instance() - - self.cmdset = cmdset - - self._last_read = {} - self._last_read['last'] = {'addr': None, 'val': '', 'cmd': ''} - - self._read_addr = None - self._read_cmd = '' - self._read_val = '' - - @cherrypy.expose - def index(self, reload=None): - ''' - Build index.html for cherrypy - - Render the template and return the html file to be delivered to the browser - - :return: contents of the template after beeing rendered - ''' - tmpl = self.tplenv.get_template('index.html') - # add values to be passed to the Jinja2 template eg: tmpl.render(p=self.plugin, interface=interface, ...) - - return tmpl.render(p=self.plugin, - items=sorted(self.items.return_items(), key=lambda k: str.lower(k['_path'])), - cmds=self.cmdset, - units=sorted(list(self.plugin._unitset.keys())), - last_read_addr=self._last_read['last']['addr'], - last_read_value=self._last_read['last']['val'], - last_read_cmd=self._last_read['last']['cmd'] - ) - - @cherrypy.expose - def submit(self, button=None, addr=None, length=0, unit=None, clear=False): - ''' - Submit handler for Ajax - ''' - if button is not None: - - read_val = self.plugin.read_addr(button) - if read_val is None: - self.logger.debug(f'Error trying to read addr {button} submitted by WebIf') - read_val = 'Fehler beim Lesen' - else: - read_cmd = self.plugin._commandname_by_commandcode(button) - if read_cmd is not None: - self._last_read[button] = {'addr': button, 'cmd': read_cmd, 'val': read_val} - self._last_read['last'] = self._last_read[button] - - elif addr is not None and unit is not None and length.isnumeric(): - - read_val = self.plugin.read_temp_addr(addr, int(length), unit) - if read_val is None: - self.logger.debug(f'Error trying to read custom addr {button} submitted by WebIf') - read_val = 'Fehler beim Lesen' - else: - self._last_read[addr] = {'addr': addr, 'cmd': f'custom ({addr})', 'val': read_val} - self._last_read['last'] = self._last_read[addr] - - elif clear: - for addr in self._last_read: - self._last_read[addr]['val'] = '' - self._last_read['last'] = {'addr': None, 'val': '', 'cmd': ''} - - cherrypy.response.headers['Content-Type'] = 'application/json' - return json.dumps(self._last_read).encode('utf-8') - - -# ------------------------------------------ -# The following code is for standalone use of the plugin to identify the device -# ------------------------------------------ - -def get_device_type(v, protocol): - - # try to connect and read device type info from 0x00f8 - print(f'Trying protocol {protocol} on device {serialport}') - - # first, initialize Viessmann object for use - v.alive = True - v._protocol = protocol - - # setup protocol controlset - v._controlset = commands.controlset[protocol] - res = v._connect() - if not res: - logger.info(f'Connection to {serialport} failed. Please check connection.') - return None - - res = v._init_communication() - if not res: - logger.info(f'Could not initialize communication using protocol {protocol}.') - return False - - # we are connected to the IR head - - # set needed unit - v._unitset = { - 'DT': {'unit_de': 'DeviceType', 'type': 'list', 'signed': False, 'read_value_transform': 'non'} - } - - # set needed command. DeviceType command is (hopefully) the same in all devices... - v._commandset = { - 'DT': {'addr': '00f8', 'len': 2, 'unit': 'DT', 'set': False}, - } - - # we leave this empty so we get the DT code back - v._devicetypes = {} - - # this is protocol dependent, so easier to let the Class work this out... - (packet, responselen) = v._build_command_packet('DT') - if packet is None: - raise ValueError('No command packet received for address 00f8. This shouldn\'t happen...') - - # send it - response_packet = v._send_command_packet(packet, responselen) - if response_packet is None: - raise ValueError('Error on communicating with the device, no response received. Unknown error.') - - # let it go... - v._disconnect() - - (val, code) = v._parse_response(response_packet, 'DT') - - if val is not None: - return val - else: - return None + def _cb_standalone(self, command, value, by): + self._result = value if __name__ == '__main__': - - usage = ''' - Usage: - ---------------------------------------------------------------------------------- - - This plugin is meant to be used inside SmartHomeNG. - - For diagnostic purposes, you can run it as a standalone Python program from the - command line. It will try to communicate with a connected Viessmann heating system - and return the device type and the necessary protocol for setting up your plugin - in SmartHomeNG. - - You need to call this plugin with the serial interface as the first parameter, e.g. - - ./__init__.py /dev/ttyUSB0 - - If you call it with -v as a second parameter, you get additional debug information: - - ./__init__.py /dev/ttyUSB0 -v - - ''' - - logger = logging.getLogger(__name__) - logger.setLevel(logging.CRITICAL) - ch = logging.StreamHandler() - ch.setLevel(logging.DEBUG) - - # create formatter and add it to the handlers - formatter = logging.Formatter('%(asctime)s - %(message)s @ %(lineno)d') - ch.setFormatter(formatter) - - # add the handlers to the logger - logger.addHandler(ch) - - serialport = "" - - if len(sys.argv) == 2: - serialport = sys.argv[1] - elif len(sys.argv) == 3 and sys.argv[2] == '-v': - serialport = sys.argv[1] - logger.setLevel(logging.DEBUG) - else: - print(usage) - exit() - - print("This is Viessmann plugin running in standalone mode") - print("===================================================") - - v = Viessmann(None, standalone=serialport, logger=logger) - - for proto in ('P300', 'KW'): - - res = get_device_type(v, proto) - if res is None: - - # None means no connection, no further tries - print(f'Connection could not be established to {serialport}. Please check connection.') - break - - if res is False: - - # False means no comm init (only P300), go on - print(f'Communication could not be established using protocol {proto}.') - else: - - # anything else should be the devices answer, try to decode and quit - print(f'Device ID is {res}, device type is {commands.devicetypes.get(res, "unknown")} using protocol {proto}') - # break - - print('Done.') + s = Standalone(sdp_viessmann, sys.argv[0]) diff --git a/viessmann/README.md b/viessmann/_pv_1_2_3/README.md similarity index 100% rename from viessmann/README.md rename to viessmann/_pv_1_2_3/README.md diff --git a/viessmann/_pv_1_2_3/__init__.py b/viessmann/_pv_1_2_3/__init__.py new file mode 100755 index 000000000..11bd79325 --- /dev/null +++ b/viessmann/_pv_1_2_3/__init__.py @@ -0,0 +1,2122 @@ +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab + +######################################################################### +# Copyright 2020 Michael Wenzel +# Copyright 2020 Sebastian Helms +######################################################################### +# Viessmann-Plugin for SmartHomeNG. https://github.com/smarthomeNG// +# +# This plugin is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This plugin is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this plugin. If not, see . +######################################################################### + +import logging +import sys +import time +import re +import json +import serial +import threading +from datetime import datetime +import dateutil.parser +import cherrypy + +if __name__ == '__main__': + # just needed for standalone mode + + class SmartPlugin(): + pass + + class SmartPluginWebIf(): + pass + + import os + BASE = os.path.sep.join(os.path.realpath(__file__).split(os.path.sep)[:-3]) + sys.path.insert(0, BASE) + import commands + +else: + from . import commands + + from lib.item import Items + from lib.model.smartplugin import SmartPlugin, SmartPluginWebIf, Modules + + from bin.smarthome import VERSION + + +class Viessmann(SmartPlugin): + ''' + Main class of the plugin. Provides communication with Viessmann heating systems + via serial / USB-to-serial connections to read values and set operating parameters. + + Supported device types must be defined in ./commands.py. + ''' + ALLOW_MULTIINSTANCE = False + + PLUGIN_VERSION = '1.2.3' + +# +# public methods +# + + def __init__(self, sh, *args, standalone='', logger=None, **kwargs): + + # Call init code of parent class (SmartPlugin) + super().__init__() + + # standalone mode: just setup basic info + if standalone: + self._serialport = standalone + self._timeout = 3 + self.logger = logger + self._standalone = True + + else: + # Get plugin parameter + self._serialport = self.get_parameter_value('serialport') + self._heating_type = self.get_parameter_value('heating_type') + self._protocol = self.get_parameter_value('protocol') + self._timeout = self.get_parameter_value('timeout') + self._standalone = False + + # Set variables + self._error_count = 0 + self._params = {} # Item dict + self._init_cmds = [] # List of command codes for read at init + self._cyclic_cmds = {} # Dict of command codes with cylce-times for cyclic readings + self._application_timer = {} # Dict of application timer with command codes and values + self._timer_cmds = [] # List of command codes for timer + self._viess_timer_dict = {} + self._last_values = {} + self._balist_item = None # list of last value per command code + self._lock = threading.Lock() + self._initread = False + self._timerread = False + self._connected = False + self._initialized = False + self._lastbyte = b'' + self._lastbytetime = 0 + self._cyclic_update_active = False + self._wochentage = { + 'MO': ['mo', 'montag', 'monday'], + 'TU': ['di', 'dienstag', 'tuesday'], + 'WE': ['mi', 'mittwoch', 'wednesday'], + 'TH': ['do', 'donnerstag', 'thursday'], + 'FR': ['fr', 'freitag', 'friday'], + 'SA': ['sa', 'samstag', 'saturday'], + 'SU': ['so', 'sonntag', 'sunday']} + + # if running standalone, don't initialize command sets + if not sh: + return + + # initialize logger if necessary + if '.'.join(VERSION.split('.', 2)[:2]) <= '1.5': + self.logger = logging.getLogger(__name__) + + self._config_loaded = False + + if not self._load_configuration(): + return None + + # Init web interface + self.init_webinterface() + + def run(self): + ''' + Run method for the plugin + ''' + if not self._config_loaded: + if not self._load_configuration(): + return + self.alive = True + self._connect() + self._read_initial_values() + self._read_timers() + + def stop(self): + ''' + Stop method for the plugin + ''' + self.alive = False + if self.scheduler_get('cyclic'): + self.scheduler_remove('cyclic') + self._disconnect() + # force reload of configuration on restart + self._config_loaded = False + + def parse_item(self, item): + ''' + Method for parsing items. + If the item carries any viess_* field, this item is registered to the plugin. + + :param item: The item to process. + :type item: object + + :return: The item update method to be triggered if the item is changed, or None. + :rtype: object + ''' + # Process the update config + if self.has_iattr(item.conf, 'viess_update'): + self.logger.debug(f'Item for requesting update for all items triggered: {item}') + return self.update_item + + # Process the timer config and fill timer dict + if self.has_iattr(item.conf, 'viess_timer'): + timer_app = self.get_iattr_value(item.conf, 'viess_timer') + for commandname in self._commandset: + if commandname.startswith(timer_app): + commandconf = self._commandset[commandname] + self.logger.debug(f'Process the timer config, commandname: {commandname}') + # {'addr': '2100', 'len': 8, 'unit': 'CT', 'set': True} + commandcode = (commandconf['addr']).lower() + if timer_app not in self._application_timer: + self._application_timer[timer_app] = {'item': item, 'commandcodes': []} + if commandcode not in self._application_timer[timer_app]['commandcodes']: + self._application_timer[timer_app]['commandcodes'].append(commandcode) + self._application_timer[timer_app]['commandcodes'].sort() + self.logger.info(f'Loaded Application Timer {self._application_timer}') + # self._application_timer: {'Timer_M2': {'item': Item: heizung.heizkreis_m2.schaltzeiten, 'commandcodes': ['3000', '3008', '3010', '3018', '3020', '3028', '3030']}, 'Timer_Warmwasser': {'item': Item: heizung.warmwasser.schaltzeiten, 'commandcodes': ['2100', '2108', '2110', '2118', '2120', '2128', '2130']}} + + for subdict in self._application_timer: + for commandcode in self._application_timer[subdict]['commandcodes']: + if commandcode not in self._timer_cmds: + self._timer_cmds.append(commandcode) + self._timer_cmds.sort() + self.logger.debug(f'Loaded Timer commands {self._timer_cmds}') + return self.update_item + + # Process the read config + if self.has_iattr(item.conf, 'viess_read'): + commandname = self.get_iattr_value(item.conf, 'viess_read') + if commandname is None or commandname not in self._commandset: + self.logger.error(f'Item {item} contains invalid read command {commandname}!') + return None + + # Remember the read config to later update this item if the configured response comes in + self.logger.info(f'Item {item} reads by using command {commandname}') + commandconf = self._commandset[commandname] + commandcode = (commandconf['addr']).lower() + + # Fill item dict + self._params[commandcode] = {'item': item, 'commandname': commandname} + self.logger.debug(f'Loaded params {self._params}') + + # Allow items to be automatically initiated on startup + if self.has_iattr(item.conf, 'viess_init') and self.get_iattr_value(item.conf, 'viess_init'): + self.logger.info(f'Item {item} is initialized on startup') + if commandcode not in self._init_cmds: + self._init_cmds.append(commandcode) + self.logger.debug(f'CommandCodes should be read at init: {self._init_cmds}') + + # Allow items to be cyclically updated + if self.has_iattr(item.conf, 'viess_read_cycle'): + cycle = int(self.get_iattr_value(item.conf, 'viess_read_cycle')) + self.logger.info(f'Item {item} should read cyclic every {cycle} seconds') + nexttime = time.time() + cycle + if commandcode not in self._cyclic_cmds: + self._cyclic_cmds[commandcode] = {'cycle': cycle, 'nexttime': nexttime} + else: + # If another item requested this command already with a longer cycle, use the shorter cycle now + if self._cyclic_cmds[commandcode]['cycle'] > cycle: + self._cyclic_cmds[commandcode]['cycle'] = cycle + self.logger.debug(f'CommandCodes should be read cyclic: {self._cyclic_cmds}') + + # Process the write config + if self.has_iattr(item.conf, 'viess_send'): + if self.get_iattr_value(item.conf, 'viess_send'): + commandname = self.get_iattr_value(item.conf, 'viess_read') + else: + commandname = self.get_iattr_value(item.conf, 'viess_send') + + if commandname is None or commandname not in self._commandset: + self.logger.error(f'Item {item} contains invalid write command {commandname}!') + return None + else: + self.logger.info(f'Item {item} to be written by using command {commandname}') + return self.update_item + + # get operating modes list + if self.has_iattr(item.conf, 'viess_ba_list'): + self._balist_item = item + self.logger.info(f'Item {item} wants list of operating modes') + + def parse_logic(self, logic): + pass + + def update_item(self, item, caller=None, source=None, dest=None): + ''' + Callback method for sending values to the plugin when a registered item has changed + + :param item: item to be updated towards the plugin + :param caller: if given it represents the callers name + :param source: if given it represents the source + :param dest: if given it represents the dest + ''' + if self.alive and caller != self.get_shortname(): + self.logger.info(f'Update item: {item.id()}, item has been changed outside this plugin') + self.logger.debug(f'update_item was called with item {item} from caller {caller}, source {source} and dest {dest}') + + if self.has_iattr(item.conf, 'viess_send'): + # Send write command + if self.get_iattr_value(item.conf, 'viess_send'): + commandname = self.get_iattr_value(item.conf, 'viess_read') + else: + commandname = self.get_iattr_value(item.conf, 'viess_send') + value = item() + self.logger.debug(f'Got item value to be written: {value} on command name {commandname}') + if not self._send_command(commandname, value): + # create_write_command() liefert False, wenn das Schreiben fehlgeschlagen ist + # -> dann auch keine weitere Verarbeitung + self.logger.debug(f'Write for {commandname} with value {value} failed, reverting value, canceling followup actions') + item(item.property.last_value, self.get_shortname()) + return None + + # If a read command should be sent after write + if self.has_iattr(item.conf, 'viess_read') and self.has_iattr(item.conf, 'viess_read_afterwrite'): + readcommandname = self.get_iattr_value(item.conf, 'viess_read') + readafterwrite = self.get_iattr_value(item.conf, 'viess_read_afterwrite') + self.logger.debug(f'Attempting read after write for item {item}, command {readcommandname}, delay {readafterwrite}') + if readcommandname is not None and readafterwrite is not None: + aw = float(readafterwrite) + time.sleep(aw) + self._send_command(readcommandname) + + # If commands should be triggered after this write + if self.has_iattr(item.conf, 'viess_trigger'): + trigger = self.get_iattr_value(item.conf, 'viess_trigger') + if trigger is None: + self.logger.error(f'Item {item} contains invalid trigger command list {trigger}!') + else: + tdelay = 5 # default delay + if self.has_iattr(item.conf, 'viess_trigger_afterwrite'): + tdelay = float(self.get_iattr_value(item.conf, 'viess_trigger_afterwrite')) + if type(trigger) != list: + trigger = [trigger] + for triggername in trigger: + triggername = triggername.strip() + if triggername is not None and readafterwrite is not None: + self.logger.debug(f'Triggering command {triggername} after write for item {item}') + time.sleep(tdelay) + self._send_command(triggername) + + elif self.has_iattr(item.conf, 'viess_timer'): + timer_app = self.get_iattr_value(item.conf, 'viess_timer') + uzsu_dict = item() + self.logger.debug(f'Got changed UZSU timer: {uzsu_dict} on timer application {timer_app}') + self._uzsu_dict_to_viess_timer(timer_app, uzsu_dict) + + elif self.has_iattr(item.conf, 'viess_update'): + if item(): + self.logger.debug('Reading of all values/items has been requested') + self.update_all_read_items() + + def send_cyclic_cmds(self): + ''' + Recall function for shng scheduler. Reads all values configured to be read cyclically. + ''' + # check if another cyclic cmd run is still active + if self._cyclic_update_active: + self.logger.warning('Triggered cyclic command read, but previous cyclic run is still active. Check device and cyclic configuration (too much/too short?)') + return + else: + self.logger.info('Triggering cyclic command read') + + # set lock + self._cyclic_update_active = True + currenttime = time.time() + read_items = 0 + todo = [] + for commandcode in list(self._cyclic_cmds.keys()): + + entry = self._cyclic_cmds[commandcode] + # Is the command already due? + if entry['nexttime'] <= currenttime: + todo.append(commandcode) + + if self._protocol == 'KW': + # see if we got to do anything - maybe no items are due to be read? + if len(todo) > 0: + self._KW_send_multiple_read_commands(todo) + for addr in todo: + self._cyclic_cmds[addr]['nexttime'] = currenttime + self._cyclic_cmds[addr]['cycle'] + read_items = len(todo) + else: + for addr in todo: + # as this loop can take considerable time, repeatedly check if shng wants to stop + if not self.alive: + self.logger.info('shng issued stop command, canceling cyclic read.') + return + + commandname = self._commandname_by_commandcode(addr) + self.logger.debug(f'Triggering cyclic read command: {commandname}') + self._send_command(commandname, ) + self._cyclic_cmds[addr]['nexttime'] = currenttime + self._cyclic_cmds[addr]['cycle'] + read_items += 1 + + self._cyclic_update_active = False + if read_items: + self.logger.debug(f'cyclic command read took {(time.time() - currenttime):.1f} seconds for {read_items} items') + + def update_all_read_items(self): + ''' + Read all values preset in commands.py as readable + ''' + for commandcode in list(self._params.keys()): + commandname = self._commandname_by_commandcode(commandcode) + self.logger.debug(f'Triggering read command: {commandname} for requested value update') + self._send_command(commandname) + + def read_addr(self, addr): + ''' + Tries to read a data point indepently of item config + + :param addr: data point addr (2 byte hex address) + :type addr: str + :return: Value if read is successful, None otherwise + ''' + addr = addr.lower() + + commandname = self._commandname_by_commandcode(addr) + if commandname is None: + self.logger.debug(f'Address {addr} not defined in commandset, aborting') + return None + + self.logger.debug(f'Attempting to read address {addr} for command {commandname}') + + (packet, responselen) = self._build_command_packet(commandname) + if packet is None: + return None + + response_packet = self._send_command_packet(packet, responselen) + if response_packet is None: + return None + + res = self._parse_response(response_packet, commandname) + if res is None: + return None + + (value, commandcode) = res + + return value + + def read_temp_addr(self, addr, length, unit): + ''' + Tries to read an arbitrary supplied data point indepently of device config + + :param addr: data point addr (2 byte hex address) + :type addr: str + :param len: Length (in byte) expected from address read + :type len: num + :param unit: Unit code from commands.py + :type unit: str + :return: Value if read is successful, None otherwise + ''' + # as we have no reference whatever concerning the supplied data, we do a few sanity checks... + + addr = addr.lower() + + if len(addr) != 4: # addresses are 2 bytes + self.logger.warning(f'temp address: address not 4 digits long: {addr}') + return None + + for c in addr: # addresses are hex strings + if c not in '0123456789abcdef': + self.logger.warning(f'temp address: address digit "{c}" is not hex char') + return None + + if length < 1 or length > 32: # empiritistical choice + self.logger.warning(f'temp address: len is not > 0 and < 33: {len}') + return None + + if unit not in self._unitset: # units need to be predefined + self.logger.warning(f'temp address: unit {unit} not in unitset. Cannot use custom units') + return None + + # addr already known? + if addr in self._commandset: + cmd = self._commandname_by_commandcode(addr) + self.logger.info(f'temp address {addr} already known for command {cmd}') + else: + # create temp commandset + cmd = 'temp_cmd' + cmdconf = {'addr': addr, 'len': length, 'unit': unit, 'set': False} + self.logger.debug(f'Adding temporary command config {cmdconf} for command temp_cmd') + self._commandset[cmd] = cmdconf + + res = self.read_addr(addr) + + if cmd == 'temp_cmd': + del self._commandset['temp_cmd'] + + return res + + def write_addr(self, addr, value): + ''' + Tries to write a data point indepently of item config + + :param addr: data point addr (2 byte hex address) + :type addr: str + :param value: value to write + :return: Value if read is successful, None otherwise + ''' + addr = addr.lower() + + commandname = self._commandname_by_commandcode(addr) + if commandname is None: + self.logger.debug(f'Address {addr} not defined in commandset, aborting') + return None + + self.logger.debug(f'Attempting to write address {addr} with value {value} for command {commandname}') + + (packet, responselen) = self._build_command_packet(commandname, value) + if packet is None: + return None + + response_packet = self._send_command_packet(packet, responselen) + if response_packet is None: + return None + + return self._parse_response(response_packet, commandname) + +# +# initialization methods +# + + def _load_configuration(self): + ''' + Load configuration sets from commands.py + ''' + + # Load protocol dependent sets + if self._protocol in commands.controlset and self._protocol in commands.errorset and self._protocol in commands.unitset and self._protocol in commands.returnstatus and self._protocol in commands.setreturnstatus: + self._controlset = commands.controlset[self._protocol] + self.logger.debug(f'Loaded controlset for protocol {self._controlset}') + self._errorset = commands.errorset[self._protocol] + self.logger.debug(f'Loaded errors for protocol {self._errorset}') + self._unitset = commands.unitset[self._protocol] + self.logger.debug(f'Loaded units for protocol {self._unitset}') + self._devicetypes = commands.devicetypes + self.logger.debug(f'Loaded device types for protocol {self._devicetypes}') + self._returnstatus = commands.returnstatus[self._protocol] + self.logger.debug(f'Loaded return status for protocol {self._returnstatus}') + self._setreturnstatus = commands.setreturnstatus[self._protocol] + self.logger.debug(f'Loaded set return status for protocol {self._setreturnstatus}') + else: + self.logger.error(f'Sets for protocol {self._protocol} could not be found or incomplete!') + return False + + # Load device dependent sets + if self._heating_type in commands.commandset and self._heating_type in commands.operatingmodes and self._heating_type in commands.systemschemes: + self._commandset = commands.commandset[self._heating_type] + self.logger.debug(f'Loaded commands for heating type {self._commandset}') + self._operatingmodes = commands.operatingmodes[self._heating_type] + self.logger.debug(f'Loaded operating modes for heating type {self._operatingmodes}') + self._systemschemes = commands.systemschemes[self._heating_type] + self.logger.debug(f'Loaded system schemes for heating type {self._systemschemes}') + else: + sets = [] + if self._heating_type not in commands.commandset: + sets += 'command' + if self._heating_type not in commands.operatingmodes: + sets += 'operating modes' + if self._heating_type not in commands.systemschemes: + sets += 'system schemes' + + self.logger.error(f'Sets {", ".join(sets)} for heating type {self._heating_type} could not be found!') + return False + + self.logger.info(f'Loaded configuration for heating type {self._heating_type} with protocol {self._protocol}') + self._config_loaded = True + return True + + def _connect(self): + ''' + Tries to establish a connection to the serial reading device. To prevent + multiple concurrent connection locking is used. + + :return: Returns True if connection was established, False otherwise + :rtype: bool + ''' + if not self.alive: + return False + + if self._connected and self._serial: + return True + + self._lock.acquire() + try: + self.logger.debug(f'Connecting to {self._serialport}..') + self._serial = serial.Serial() + self._serial.baudrate = self._controlset['Baudrate'] + self._serial.parity = self._controlset['Parity'] + self._serial.bytesize = self._controlset['Bytesize'] + self._serial.stopbits = self._controlset['Stopbits'] + self._serial.port = self._serialport + + # both of the following timeout values are determined by trial and error + if self._protocol == 'KW': + # needed to "capture" the 0x05 sync bytes + self._serial.timeout = 1.0 + else: + # not too long to prevent lags in communication. + self._serial.timeout = 0.5 + self._serial.open() + self._connected = True + self.logger.info(f'Connected to {self._serialport}') + self._connection_attempts = 0 + if not self._standalone and not self.scheduler_get('cyclic'): + self._create_cyclic_scheduler() + return True + except Exception as e: + self.logger.error(f'Could not _connect to {self._serialport}; Error: {e}') + return False + finally: + self._lock.release() + + def _disconnect(self): + ''' + Disconnect any connected devices. + ''' + self._connected = False + self._initialized = False + try: + self._serial.close() + except IOError: + pass + self._serial = None + try: + self._lock.release() + except RuntimeError: + pass + self.logger.info('Disconnected') + + def _init_communication(self): + ''' + After connecting to the device, setup the communication protocol + + :return: Returns True, if communication was established successfully, False otherwise + :rtype: bool + ''' + # just try to connect anyway; if connected, this does nothing and no harm, if not, it connects + if not self._connect(): + + self.logger.error('Init communication not possible as connect failed.') + return False + + # initialization only necessary for P300 protocol... + if self._protocol == 'P300': + + # init procedure is + # interface: 0x04 (reset) + # device: 0x05 (repeated) + # interface: 0x160000 (sync) + # device: 0x06 (sync ok) + # interface: resume communication, periodically send 0x160000 as keepalive if necessary + + self.logger.debug('Init Communication....') + is_initialized = False + initstringsent = False + self.logger.debug(f'send_bytes: Send reset command {self._int2bytes(self._controlset["Reset_Command"], 1)}') + self._send_bytes(self._int2bytes(self._controlset['Reset_Command'], 1)) + readbyte = self._read_bytes(1) + self.logger.debug(f'read_bytes: read {readbyte}, last byte is {self._lastbyte}') + + for i in range(0, 10): + if initstringsent and self._lastbyte == self._int2bytes(self._controlset['Acknowledge'], 1): + is_initialized = True + self.logger.debug('Device acknowledged initialization') + break + if self._lastbyte == self._int2bytes(self._controlset['Not_initiated'], 1): + self._send_bytes(self._int2bytes(self._controlset['Sync_Command'], 3)) + self.logger.debug(f'send_bytes: Send sync command {self._int2bytes(self._controlset["Sync_Command"], 3)}') + initstringsent = True + elif self._lastbyte == self._int2bytes(self._controlset['Init_Error'], 1): + self.logger.error(f'The interface has reported an error (\x15), loop increment {i}') + self._send_bytes(self._int2bytes(self._controlset['Reset_Command'], 1)) + self.logger.debug(f'send_bytes: Send reset command {self._int2bytes(self._controlset["Reset_Command"], 1)}') + initstringsent = False + else: + self._send_bytes(self._int2bytes(self._controlset['Reset_Command'], 1)) + self.logger.debug(f'send_bytes: Send reset command {self._int2bytes(self._controlset["Reset_Command"], 1)}') + initstringsent = False + readbyte = self._read_bytes(1) + self.logger.debug(f'read_bytes: read {readbyte}, last byte is {self._lastbyte}') + + self.logger.debug(f'Communication initialized: {is_initialized}') + self._initialized = is_initialized + + else: # at the moment the only other supported protocol is 'KW' which is not stateful + is_initialized = True + self._initialized = is_initialized + + return is_initialized + + def _create_cyclic_scheduler(self): + ''' + Setup the scheduler to handle cyclic read commands and find the proper time for the cycle. + ''' + if not self.alive: + return + + shortestcycle = -1 + # find shortest cycle + for commandname in list(self._cyclic_cmds.keys()): + entry = self._cyclic_cmds[commandname] + if shortestcycle == -1 or entry['cycle'] < shortestcycle: + shortestcycle = entry['cycle'] + # Start the worker thread + if shortestcycle != -1: + # Balance unnecessary calls and precision + workercycle = int(shortestcycle / 2) + # just in case it already exists... + if self.scheduler_get('cyclic'): + self.scheduler_remove('cyclic') + self.scheduler_add('cyclic', self.send_cyclic_cmds, cycle=workercycle, prio=5, offset=0) + self.logger.info(f'Added cyclic worker thread ({workercycle} sec cycle). Shortest item update cycle found: {shortestcycle} sec') + + def _read_initial_values(self): + ''' + Read all values configured to be read at startup / connection + ''' + if self._balist_item is not None: + balist = list(self._operatingmodes.values()) + self._balist_item(balist, self.get_shortname()) + self.logger.info(f'writing list of operating modes ({len(balist)} entries) to item {self._balist_item}') + + if self._init_cmds != []: + self.logger.info('Starting initial read commands.') + if self._protocol == 'KW': + self._KW_send_multiple_read_commands(self._init_cmds) + else: + for commandcode in self._init_cmds: + commandname = self._commandname_by_commandcode(commandcode) + self.logger.debug(f'send_init_commands {commandname}') + self._send_command(commandname) + self._initread = True + self.logger.debug(f'self._initread = {self._initread}') + + # + # send and receive commands + # + + def _read_timers(self): + ''' + Read all configured timer values from device and create uzsu timer dict + ''' + if self._application_timer is not []: + self.logger.debug('Starting timer read commands.') + for timer_app in self._application_timer: + for commandcode in self._application_timer[timer_app]['commandcodes']: + commandname = self._commandname_by_commandcode(commandcode) + self.logger.debug(f'send_timer_commands {commandname}') + self._send_command(commandname) + self._timerread = True + self.logger.debug(f'Timer Readout done = {self._timerread}') + self._viess_dict_to_uzsu_dict() + + def _send_command(self, commandname, value=None): + ''' + Create formatted command sequence from command name and send to device + + Note: The implementation detail results in "write if value present, read if value is None". + I have not found anything wrong with this; if any use case needs a specific read/write + selection, please tell me. + + :param commandname: Command for which to create command sequence as defined in commands.py + :type commandname: str + :param value: Value to write to device, None if command is read command + ''' + if value is not None: + self.logger.debug(f'Got a new write job: Command {commandname} with value {value}') + else: + self.logger.debug(f'Got a new read job: Command {commandname}') + + # Build packet with value bytes for write commands + (packet, responselen) = self._build_command_packet(commandname, value) + + # quit if no packet (error on packet build) + if packet is None: + return False + + if value is not None and self._protocol == 'KW': + read_response = False + else: + read_response = True + + # hand over built packet to send_command_packet + response_packet = self._send_command_packet(packet, responselen) + + # process response + if response_packet is None: + return False + + result = self._process_response(response_packet, commandname, read_response) + return result + + def _KW_send_multiple_read_commands(self, commandcodes): + ''' + Takes list of commandnames, builds all command packets and tries to send them in one go. + This only works for read commands and only with KW protocol. + On error the whole remaining read process is aborted, no retries or continuation is attempted. + + :param commandnames: List of commands for which to create command sequence as defined in commands.py + :type commandname: str + ''' + if self._protocol != 'KW': + self.logger.error(f'Called _KW_send_multiple_read_commands, but protocol is {self._protocol}. This shouldn\'t happen..') + return + + self.logger.debug(f'Got a new bulk read job: Commands {commandcodes}') + + bulk = {} + + # Build packets with value bytes for write commands + for addr in commandcodes: + commandname = self._commandname_by_commandcode(addr) + (packet, responselen) = self._build_command_packet(commandname, None, True) + + if packet: + bulk[addr] = {'packet': packet, 'responselen': responselen, 'command': commandname} + + # quit if no packet (error on packet build) + if not bulk: + return + + if not self._connected: + self.logger.error('Not connected, trying to reconnect.') + if not self._connect(): + self.logger.error('Could not connect to serial device') + return + + self._lock.acquire() + try: + self._init_communication() + + replies = {} + + if not self._KW_get_sync(): + return + + first_cmd = True + first_packet = bytearray(self._int2bytes(self._controlset['StartByte'], 1)) + + for addr in bulk.keys(): + + if first_cmd: + # make sure that the first sent packet has the StartByte (0x01) lead byte set + # this way the first packet actually sent has the start byte, regardless of bulk.keys() order + first_packet.extend(bulk[addr]['packet']) + bulk[addr]['packet'] = first_packet + first_cmd = False + + # send query + try: + self._send_bytes(bulk[addr]['packet']) + self.logger.debug(f'Successfully sent packet: {self._bytes2hexstring(bulk[addr]["packet"])}') + except IOError as io: + raise IOError(f'IO Error: {io}') + return + except Exception as e: + raise Exception(f'Exception while sending: {e}') + return + + # receive response + replies[addr] = bytearray() + try: + self.logger.debug(f'Trying to receive {bulk[addr]["responselen"]} bytes of the response') + chunk = self._read_bytes(bulk[addr]['responselen']) + + self.logger.debug(f'Received {len(chunk)} bytes chunk of response as hexstring {self._bytes2hexstring(chunk)} and as bytes {chunk}') + if len(chunk) != 0: + replies[addr].extend(chunk) + else: + self.logger.error(f'Received 0 bytes chunk from {addr} - this probably is a communication error, possibly a wrong datapoint address?') + return + except IOError as io: + raise IOError(f'IO Error: {io}') + return + except Exception as e: + raise Exception(f'Error receiving response: {e}') + return + + # sent all read requests, time to parse the replies + # do this inside the _lock-block so this doesn't interfere with + # possible cyclic read data assignments + for addr in bulk.keys(): + if len(replies[addr]) > 0: + self._process_response(replies[addr], bulk[addr]['command'], True) + + except IOError as io: + self.logger.error(f'KW_send_multiple_read_commands failed with IO error: {io}') + self.logger.error('Trying to reconnect (disconnecting, connecting') + self._disconnect() + return + except Exception as e: + self.logger.error(f'KW_send_multiple_read_commands failed with error: {e}') + return + finally: + try: + self._lock.release() + except RuntimeError: + pass + + def _KW_get_sync(self): + ''' + Try to get a sync packet (0x05) from heating system to be able to send commands + + :return: True if sync packet received, False otherwise (after retries) + :rtype: bool + ''' + if not self._connected or self._protocol != 'KW': + return False # don't even try. We only want to be called by _send_command_packet, which just before executed connect() + + retries = 5 + + # try to reset communication, especially if previous P300 comms is still open + self._send_bytes(self._int2bytes(self._controlset['Reset_Command'], 1)) + + attempt = 0 + while attempt < retries: + self.logger.debug(f'Starting sync loop - attempt {attempt + 1}/{retries}') + + self._serial.reset_input_buffer() + chunk = self._read_bytes(1) + # enable for 'raw' debugging + # self.logger.debug(f'sync loop - got {self._bytes2hexstring(chunk)}') + if chunk == self._int2bytes(self._controlset['Not_initiated'], 1, False): + self.logger.debug('Got sync. Commencing command send') + return True + time.sleep(.8) + attempt = attempt + 1 + self.logger.error(f'Sync not acquired after {attempt} attempts') + self._disconnect() + + return False + + def _send_command_packet(self, packet, packetlen_response): + ''' + Send command sequence to device + + :param packet: Command sequence to send + :type packet: bytearray + :param packetlen_response: number of bytes expected in reply + :type packetlen_response: int + :param read_response: True if command was read command and value is expected, False if only status byte is expected (only needed for KW protocol) + :type read_response: bool + :return: Response packet (bytearray) if no error occured, None otherwise + ''' + if not self._connected: + self.logger.error('Not connected, trying to reconnect.') + if not self._connect(): + self.logger.error('Could not connect to serial device') + return None + + self._lock.acquire() + try: + if not self._initialized or (time.time() - 500) > self._lastbytetime: + if self._protocol == 'P300': + if self._initialized: + self.logger.debug('Communication timed out, trying to reestablish communication.') + else: + self.logger.info('Communication no longer initialized, trying to reestablish.') + self._init_communication() + + if self._initialized: + # send query + try: + if self._protocol == 'KW': + # try to get sync, exit if it fails + if not self._KW_get_sync(): + return None + + self._send_bytes(packet) + self.logger.debug(f'Successfully sent packet: {self._bytes2hexstring(packet)}') + except IOError as io: + raise IOError(f'IO Error: {io}') + return None + except Exception as e: + raise Exception(f'Exception while sending: {e}') + return None + + # receive response + response_packet = bytearray() + self.logger.debug(f'Trying to receive {packetlen_response} bytes of the response') + chunk = self._read_bytes(packetlen_response) + + if self._protocol == 'P300': + self.logger.debug(f'Received {len(chunk)} bytes chunk of response as hexstring {self._bytes2hexstring(chunk)} and as bytes {chunk}') + if len(chunk) != 0: + if chunk[:1] == self._int2bytes(self._controlset['Error'], 1): + self.logger.error(f'Interface returned error! response was: {chunk}') + elif len(chunk) == 1 and chunk[:1] == self._int2bytes(self._controlset['Not_initiated'], 1): + self.logger.error('Received invalid chunk, connection not initialized. Forcing re-initialize...') + self._initialized = False + elif chunk[:1] != self._int2bytes(self._controlset['Acknowledge'], 1): + self.logger.error(f'Received invalid chunk, not starting with ACK! response was: {chunk}') + self._error_count += 1 + if self._error_count >= 5: + self.logger.warning('Encountered 5 invalid chunks in sequence. Maybe communication was lost, re-initializing') + self._initialized = False + else: + response_packet.extend(chunk) + self._error_count = 0 + return response_packet + else: + self.logger.error(f'Received 0 bytes chunk - ignoring response_packet! chunk was: {chunk}') + elif self._protocol == 'KW': + self.logger.debug(f'Received {len(chunk)} bytes chunk of response as hexstring {self._bytes2hexstring(chunk)} and as bytes {chunk}') + if len(chunk) != 0: + response_packet.extend(chunk) + return response_packet + else: + self.logger.error('Received 0 bytes chunk - this probably is a communication error, possibly a wrong datapoint address?') + else: + raise Exception('Interface not initialized!') + except IOError as io: + self.logger.error(f'send_command_packet failed with IO error: {io}') + self.logger.error('Trying to reconnect (disconnecting, connecting') + self._disconnect() + except Exception as e: + self.logger.error(f'send_command_packet failed with error: {e}') + finally: + try: + self._lock.release() + except RuntimeError: + pass + + # if we didn't return with data earlier, we hit an error. Act accordingly + return None + + def _send_bytes(self, packet): + ''' + Send data to device + + :param packet: Data to be sent + :type packet: bytearray + :return: Returns False, if no connection is established or write failed; True otherwise + :rtype: bool + ''' + if not self._connected: + return False + + try: + self._serial.write(packet) + except serial.SerialTimeoutException: + return False + + # self.logger.debug(f'send_bytes: Sent {packet}') + return True + + def _read_bytes(self, length): + ''' + Try to read bytes from device + + :param length: Number of bytes to read + :type length: int + :return: Number of bytes actually read + :rtype: int + ''' + if not self._connected: + return 0 + + totalreadbytes = bytes() + # self.logger.debug('read_bytes: Start read') + starttime = time.time() + + # don't wait for input indefinitely, stop after self._timeout seconds + while time.time() <= starttime + self._timeout: + readbyte = self._serial.read() + self._lastbyte = readbyte + # self.logger.debug(f'read_bytes: Read {readbyte}') + if readbyte != b'': + self._lastbytetime = time.time() + else: + return totalreadbytes + totalreadbytes += readbyte + if len(totalreadbytes) >= length: + return totalreadbytes + + # timeout reached, did we read anything? + if not totalreadbytes: + + # just in case, force plugin to reconnect + self._connected = False + self._initialized = False + + # return what we got so far, might be 0 + return totalreadbytes + + def _process_response(self, response, commandname='', read_response=True, update_item=True): + ''' + Process device response data, try to parse type and value and assign value to associated item + + :param response: Data received from device + :type response: bytearray + :param commandname: Commandname used for request (only needed for KW protocol) + :type commandname: str + :param read_response: True if command was read command and value is expected, False if only status byte is expected (only needed for KW protocol) + :type read_response: bool + :param update_item: True if value should be written to corresponding item + :type update_item: bool + ''' + res = self._parse_response(response, commandname, read_response) + + # None means error on read/parse or write reponse. Errors are already logged, so no further action necessary + if res is None: + return + + # write returns True on success + if res is True: + return True + + # assign results + (value, commandcode) = res + + # get command config + commandname = self._commandname_by_commandcode(commandcode) + commandconf = self._commandset[commandname] + commandunit = commandconf['unit'] + + # update items if commandcode is in item-dict + if commandcode in self._params.keys(): + + # Find corresponding item + item = self._params[commandcode]['item'] + self.logger.debug(f'Corresponding item {item} for command {commandname}') + + # Update item + if update_item: + self.logger.debug(f'Updating item {item} with value {value}') + if commandunit == 'CT': + # Split timer list and put it the child items, which were created by struct.timer in iso time format + try: + for child in item.return_children(): + child_item = str(child.id()) + if child_item.endswith('an1'): + child(value[0]['An'], self.get_shortname()) + # child(datetime.strptime(value[0]['An'], '%H:%M').time().isoformat()) + elif child_item.endswith('aus1'): + child(value[0]['Aus'], self.get_shortname()) + elif child_item.endswith('an2'): + child(value[1]['An'], self.get_shortname()) + elif child_item.endswith('aus2'): + child(value[1]['Aus'], self.get_shortname()) + elif child_item.endswith('an3'): + child(value[2]['An'], self.get_shortname()) + elif child_item.endswith('aus3'): + child(value[2]['Aus'], self.get_shortname()) + elif child_item.endswith('an4'): + child(value[3]['An'], self.get_shortname()) + elif child_item.endswith('aus4'): + child(value[3]['Aus'], self.get_shortname()) + except KeyError: + self.logger.debug('No child items for timer found (use timer.structs) or value no valid') + + # save value to item + item(value, self.get_shortname()) + else: + self.logger.debug(f'Not updating item {item} as not requested') + else: + if (commandcode not in self._timer_cmds) and update_item: + self.logger.error(f'Should update item with response to a command not in item config: {commandcode}. This shouldn''t happen..') + + # Process response for timers in timer-dict using the commandcode + if commandcode in self._timer_cmds: + self.logger.debug(f'process_response_timer: {commandcode}') + + # Find timer application + for timer in self._application_timer: + if commandcode in self._application_timer[timer]['commandcodes']: + timer_app = timer + + # Fill timer dict + if timer_app not in self._viess_timer_dict: + self._viess_timer_dict[timer_app] = {} + + self._viess_timer_dict[timer_app][commandname] = value + self.logger.debug(f'Viessmann timer dict: {self._viess_timer_dict}') + +# +# convert data types +# + + def _build_valuebytes_from_value(self, value, commandconf): + ''' + Convert value to formatted bytearray for write commands + :param value: Value to send + :param commandconf: configuration set for requested command + :type commandconf: dict + :return: bytearray with value if successful, None if error + ''' + try: + commandvaluebytes = commandconf['len'] + commandunit = commandconf['unit'] + set_allowed = bool(commandconf['set']) + if 'min_value' in commandconf: + min_allowed_value = commandconf['min_value'] + else: + min_allowed_value = None + if 'max_value' in commandconf: + max_allowed_value = commandconf['max_value'] + else: + max_allowed_value = None + except KeyError: + self.logger.error(f'Error in command configuration {commandconf}, aborting') + return None + + # unit HEX = hex values as string is only for read requests (debugging). Don't even try... + if commandunit == 'HEX': + + self.logger.error(f'Error in command configuration {commandconf}: unit HEX is not writable, aborting') + return None + + if commandunit == 'BA': + + # try to convert BA string to byte value, setting str values will fail + # this will not work properly if multiple entries have the same value! + try: + value = int(dict(map(reversed, self._operatingmodes.items()))[value]) + commandunit = 'IUNON' + except KeyError: + # value doesn't exist in operatingmodes. don't know what to do + self.logger.error(f'Value {value} not defined in operating modes for device {self._heating_type}') + return None + + try: + unitconf = self._unitset[commandunit] + except KeyError: + self.logger.error(f'Error: unit {commandunit} not found in unit set {self._unitset}') + return None + + try: + valuetype = unitconf['type'] + valuereadtransform = unitconf['read_value_transform'] + except KeyError: + self.logger.error(f'Error in unit configuration {unitconf} for unit {commandunit}, aborting') + return None + + self.logger.debug(f'Unit defined to {commandunit} with config{unitconf}') + + # check if writing is allowed for this address + if not set_allowed: + self.logger.error(f'Command {self._commandname_by_commandcode(commandconf["addr"])} is not configured for writing') + return None + + # check if value is empty + if value is None or value == '': + self.logger.error(f'Command value for command {self._commandname_by_commandcode(commandconf["addr"])} is empty, not possible to send (check item, command and unit configuration') + return None + + # check if value to be written is in allowed range + if (min_allowed_value is not None and min_allowed_value > value) or (max_allowed_value is not None and max_allowed_value < value): + self.logger.error(f'Invalid range - value {value} not in range [{min_allowed_value}, {max_allowed_value}]') + return None + + try: + # Create valuebytes + if valuetype == 'datetime' or valuetype == 'date': + try: + datestring = dateutil.parser.isoparse(value).strftime('%Y%m%d%w%H%M%S') + # Viessmann erwartet 2 digits für Wochentag, daher wird hier noch eine 0 eingefügt + datestring = datestring[:8] + '0' + datestring[8:] + valuebytes = bytes.fromhex(datestring) + self.logger.debug(f'Created value bytes for type {valuetype} as bytes: {valuebytes}') + except Exception as e: + self.logger.error(f'Incorrect data format, YYYY-MM-DD expected; Error: {e}') + return None + elif valuetype == 'timer': + try: + times = '' + for switching_time in value: + an = self._encode_timer(switching_time['An']) + aus = self._encode_timer(switching_time['Aus']) + times += f'{an:02x}{aus:02x}' + valuebytes = bytes.fromhex(times) + self.logger.debug(f'Created value bytes for type {valuetype} as hexstring: {self._bytes2hexstring(valuebytes)} and as bytes: {valuebytes}') + except Exception as e: + self.logger.error(f'Incorrect data format, (An: hh:mm Aus: hh:mm) expected; Error: {e}') + return None + # valuetype 'list' is transformed to listentry via index on read, but written directly as int, so numerical transform could apply + elif valuetype == 'integer' or valuetype == 'list': + # transform value is numerical -> multiply value with it + if self._isfloat(valuereadtransform): + value = self._value_transform_write(value, valuereadtransform) + self.logger.debug(f'Transformed value using method "* {valuereadtransform}" to {value}') + elif valuereadtransform == 'bool': + value = bool(value) + else: + value = int(value) + valuebytes = self._int2bytes(value, commandvaluebytes, byteorder='little') + self.logger.debug(f'Created value bytes for type {valuetype} as hexstring: {self._bytes2hexstring(valuebytes)} and as bytes: {valuebytes}') + else: + self.logger.error(f'Type {valuetype} not definied for creating write command bytes') + return None + except Exception as e: + self.logger.debug(f'_build_valuebytes_from_value failed with unexpected error: {e}') + return None + + return valuebytes + + def _build_command_packet(self, commandname, value=None, KWFollowUp=False): + ''' + Create formatted command sequence from command name. + If value is None, a read packet will be built, a write packet otherwise + + :param commandname: Command for which to create command sequence as defined in commands.py + :type commandname: str + :param value: Write value if command is to be written + :param KWFollowUp: create read sequence for KW protocol if multiple read commands will be sent without individual sync + :type KWFollowUp: bool + :return: tuple of (command sequence, expected response len), (None, 0) if error occured + :rtype: tuple (bytearray, int) + ''' + + # A read_request telegram looks like this: + # P300: ACK (1 byte), startbyte (1 byte), data length in bytes (1 byte), request/response (1 byte), read/write (1 byte), addr (2 byte), amount of value bytes expected in answer (1 byte), checksum (1 byte) + # KW: startbyte (1 byte), read/write (1 byte), addr (2 bytes), amount of value bytes expected in answer (1 byte) + # A write_request telegram looks like this: + # P300: ACK (1 byte), startbyte (1 byte), data length in bytes (1 byte), request/response (1 byte), read/write (1 byte), addr (2 byte), amount of bytes to be written (1 byte), value (bytes as per last byte), checksum (1 byte) + # KW: startbyte (1 byte), read/write (1 byte), addr (2 bytes), length of value (1 byte), value bytes (1-4 bytes) + + write = value is not None + self.logger.debug(f'Build {"write" if write else "read"} packet for command {commandname}') + + # Get command config + commandconf = self._commandset[commandname] + commandcode = (commandconf['addr']).lower() + commandvaluebytes = commandconf['len'] + + if write: + valuebytes = self._build_valuebytes_from_value(value, commandconf) + # can't write 'no value'... + if not valuebytes: + return (None, 0) + + # Calculate length of payload (only needed for P300) + payloadlength = int(self._controlset.get('Command_bytes_write', 0)) + int(commandvaluebytes) + self.logger.debug(f'Payload length is: {payloadlength} bytes') + + # Build packet for read commands + # + # at the moment this only has to differentiate between protocols P300 and KW + # these are basically similar, only P300 is an evolution of KW adding + # stateful connections, command length and checksum + # + # so for the time being the easy way is one code path for both protocols which + # omits P300 elements from the built byte string. + # Later additions of other protocols (like GWG) might have to bring a second + # code path for proper processing + packet = bytearray() + if not KWFollowUp: + packet.extend(self._int2bytes(self._controlset['StartByte'], 1)) + if self._protocol == 'P300': + if write: + packet.extend(self._int2bytes(payloadlength, 1)) + else: + packet.extend(self._int2bytes(self._controlset['Command_bytes_read'], 1)) + packet.extend(self._int2bytes(self._controlset['Request'], 1)) + + if write: + packet.extend(self._int2bytes(self._controlset['Write'], 1)) + else: + packet.extend(self._int2bytes(self._controlset['Read'], 1)) + packet.extend(bytes.fromhex(commandcode)) + packet.extend(self._int2bytes(commandvaluebytes, 1)) + if write: + packet.extend(valuebytes) + if self._protocol == 'P300': + packet.extend(self._int2bytes(self._calc_checksum(packet), 1)) + + if self._protocol == 'P300': + responselen = int(self._controlset['Command_bytes_read']) + 4 + (0 if write else int(commandvaluebytes)) + else: + responselen = 1 if write else int(commandvaluebytes) + + if write: + self.logger.debug(f'Created command {commandname} to be sent as hexstring: {self._bytes2hexstring(packet)} and as bytes: {packet} with value {value} (transformed to value byte {self._bytes2hexstring(valuebytes)})') + else: + self.logger.debug(f'Created command {commandname} to be sent as hexstring: {self._bytes2hexstring(packet)} and as bytes: {packet}') + + return (packet, responselen) + + def _parse_response(self, response, commandname='', read_response=True): + ''' + Process device response data, try to parse type and value + + :param response: Data received from device + :type response: bytearray + :param commandname: Commandname used for request (only needed for KW protocol) + :type commandname: str + :param read_response: True if command was read command and value is expected, False if only status byte is expected (only needed for KW protocol) + :type read_response: bool + :return: tuple of (parsed response value, commandcode) or None if error + ''' + if self._protocol == 'P300': + + # A read_response telegram looks like this: ACK (1 byte), startbyte (1 byte), data length in bytes (1 byte), request/response (1 byte), read/write (1 byte), addr (2 byte), amount of valuebytes (1 byte), value (bytes as per last byte), checksum (1 byte) + # A write_response telegram looks like this: ACK (1 byte), startbyte (1 byte), data length in bytes (1 byte), request/response (1 byte), read/write (1 byte), addr (2 byte), amount of bytes written (1 byte), checksum (1 byte) + + # Validate checksum + checksum = self._calc_checksum(response[1:len(response) - 1]) # first, cut first byte (ACK) and last byte (checksum) and then calculate checksum + received_checksum = response[len(response) - 1] + if received_checksum != checksum: + self.logger.error(f'Calculated checksum {checksum} does not match received checksum of {received_checksum}! Ignoring reponse') + return None + + # Extract command/address, valuebytes and valuebytecount out of response + commandcode = response[5:7].hex() + responsetypecode = response[3] # 0x00 = query, 0x01 = reply, 0x03 = error + responsedatacode = response[4] # 0x01 = ReadData, 0x02 = WriteData, 0x07 = Function Call + valuebytecount = response[7] + + # Extract databytes out of response + rawdatabytes = bytearray() + rawdatabytes.extend(response[8:8 + (valuebytecount)]) + elif self._protocol == 'KW': + + # imitate P300 response code data for easier combined handling afterwards + # a read_response telegram consists only of the value bytes + # a write_response telegram is 0x00 for OK, 0xXX for error + if commandname == '': + self.logger.error('trying to parse KW protocol response, but commandname not set in _parse_response. This should not happen...') + return None + + responsetypecode = 1 + commandcode = self._commandset[commandname]['addr'].lower() + valuebytecount = len(response) + rawdatabytes = response + + if read_response: + # value response to read request, error detection by empty = no response + responsedatacode = 1 + if len(rawdatabytes) == 0: + # error, no answer means wrong address (?) + responsetypecode = 3 + else: + # status response to write request + responsedatacode = 2 + if (len(rawdatabytes) == 1 and rawdatabytes[0] != 0) or len(rawdatabytes) == 0: + # error if status reply is not 0x00 + responsetypecode = 3 + + self.logger.debug(f'Response decoded to: commandcode: {commandcode}, responsedatacode: {responsedatacode}, valuebytecount: {valuebytecount}, responsetypecode: {responsetypecode}') + self.logger.debug(f'Rawdatabytes formatted: {self._bytes2hexstring(rawdatabytes)} and unformatted: {rawdatabytes}') + + # Process response for items if response and not error + # added: only in P300 or if read_response is set, do not try if KW replies with 0x00 (OK) + if responsedatacode == 1 and responsetypecode != 3 and (self._protocol == 'P300' or read_response): + + # parse response if command config is available + commandname = self._commandname_by_commandcode(commandcode) + if commandname is None: + self.logger.error(f'Received response for unknown address point {commandcode}') + return None + + # Get command and respective unit config + commandconf = self._commandset[commandname] + commandvaluebytes = commandconf['len'] + commandunit = commandconf['unit'] + unitconf = self._unitset.get(commandunit) + if not unitconf: + self.logger.error(f'Unit configuration not found for unit {commandunit} in protocol {self._protocol}. This is a configuration error in commands.py, please fix') + return None + commandsigned = unitconf['signed'] + valuetransform = unitconf['read_value_transform'] + + # start value decode + if commandunit == 'CT': + timer = self._decode_timer(rawdatabytes.hex()) + # fill list + timer = [{'An': on_time, 'Aus': off_time} + for on_time, off_time in zip(timer, timer)] + value = timer + self.logger.debug(f'Matched command {commandname} and read transformed timer {value} and byte length {commandvaluebytes}') + elif commandunit == 'TI': + # decode datetime + value = datetime.strptime(rawdatabytes.hex(), '%Y%m%d%W%H%M%S').isoformat() + self.logger.debug(f'Matched command {commandname} and read transformed datetime {value} and byte length {commandvaluebytes}') + elif commandunit == 'DA': + # decode date + value = datetime.strptime(rawdatabytes.hex(), '%Y%m%d%W%H%M%S').date().isoformat() + self.logger.debug(f'Matched command {commandname} and read transformed datetime {value} and byte length {commandvaluebytes}') + elif commandunit == 'ES': + # erstes Byte = Fehlercode; folgenden 8 Byte = Systemzeit + errorcode = (rawdatabytes[:1]).hex() + # errorquerytime = (rawdatabytes[1:8]).hex() + value = self._error_decode(errorcode) + self.logger.debug(f'Matched command {commandname} and read transformed errorcode {value} (raw value was {errorcode}) and byte length {commandvaluebytes}') + elif commandunit == 'SC': + # erstes Byte = Anlagenschema + systemschemescode = (rawdatabytes[:1]).hex() + value = self._systemscheme_decode(systemschemescode) + self.logger.debug(f'Matched command {commandname} and read transformed system scheme {value} (raw value was {systemschemescode}) and byte length {commandvaluebytes}') + elif commandunit == 'BA': + operatingmodecode = (rawdatabytes[:1]).hex() + value = self._operatingmode_decode(operatingmodecode) + self.logger.debug(f'Matched command {commandname} and read transformed operating mode {value} (raw value was {operatingmodecode}) and byte length {commandvaluebytes}') + elif commandunit == 'DT': + # device type has 8 bytes, but first 4 bytes are device type indicator + devicetypebytes = rawdatabytes[:2].hex() + value = self._devicetype_decode(devicetypebytes).upper() + self.logger.debug(f'Matched command {commandname} and read transformed device type {value} (raw value was {devicetypebytes}) and byte length {commandvaluebytes}') + elif commandunit == 'SN': + # serial number has 7 bytes, + serialnumberbytes = rawdatabytes[:7] + value = self._serialnumber_decode(serialnumberbytes) + self.logger.debug(f'Matched command {commandname} and read transformed device type {value} (raw value was {serialnumberbytes}) and byte length {commandvaluebytes}') + elif commandunit == 'HEX': + # hex string for debugging purposes + hexstr = rawdatabytes.hex() + value = ' '.join([hexstr[i:i + 2] for i in range(0, len(hexstr), 2)]) + self.logger.debug(f'Read hex bytes {value}') + else: + rawvalue = self._bytes2int(rawdatabytes, commandsigned) + value = self._value_transform_read(rawvalue, valuetransform) + self.logger.debug(f'Matched command {commandname} and read transformed value {value} (integer raw value was {rawvalue}) and byte length {commandvaluebytes}') + + # assign to dict for use by other functions + self._last_values[commandcode] = value + + return (value, commandcode) + + # Handling of write command response if not error + elif responsedatacode == 2 and responsetypecode != 3: + self.logger.debug(f'Write request of adress {commandcode} successfull writing {valuebytecount} bytes') + return True + else: + self.logger.error(f'Write request of adress {commandcode} NOT successfull writing {valuebytecount} bytes') + return None + + def _viess_dict_to_uzsu_dict(self): + ''' + Convert data read from device to UZSU compatible struct. + Input is taken from self._viess_timer_dict, output is written to + self._uzsu_dict + ''' + dict_timer = {} + empty_time = '00:00' + shitems = Items.get_instance() + + try: + sunset = shitems.return_item('env.location.sunset')().strftime('%H:%M') + sunrise = shitems.return_item('env.location.sunrise')().strftime('%H:%M') + except (AttributeError, ValueError): + sunset = '21:00' + sunrise = '06:00' + + # convert all switching times with corresponding app and days to timer-dict + for application in self._viess_timer_dict: + if application not in dict_timer: + dict_timer[application] = {} + for application_day in self._viess_timer_dict[application]: + timer = self._viess_timer_dict[application][application_day] + day = application_day[(application_day.rfind('_') + 1):len(application_day)].lower() + + # normalize days + for element in self._wochentage: + if day in self._wochentage[element]: + weekday = element + + for entry in timer: + for event, sw_time in entry.items(): + if sw_time != empty_time: + value = 1 if event == 'An' else 0 + if sw_time not in dict_timer[application]: + dict_timer[application][sw_time] = {} + if value not in dict_timer[application][sw_time]: + dict_timer[application][sw_time][value] = [] + dict_timer[application][sw_time][value].append(weekday) + + self.logger.debug(f'Viessmann timer dict for UZSU: {dict_timer}') + + # find items, read UZSU-dict, convert to list of switching times, update item + for application in dict_timer: + item = self._application_timer[application]['item'] + + # read UZSU-dict (or use preset if empty) + uzsu_dict = item() + if not item(): + uzsu_dict = {'lastvalue': '0', 'sunset': sunset, 'list': [], 'active': True, 'interpolation': {'initage': '', 'initialized': True, 'itemtype': 'bool', 'interval': '', 'type': 'none'}, 'sunrise': sunrise} + + # create empty list + uzsu_dict['list'] = [] + + # fill list with switching times + for sw_time in sorted(dict_timer[application].keys()): + for key in dict_timer[application][sw_time]: + rrule = 'FREQ=WEEKLY;BYDAY=' + ','.join(dict_timer[application][sw_time][key]) + uzsu_dict['list'].append({'time': sw_time, 'rrule': rrule, 'value': str(key), 'active': True}) + + # update item + item(uzsu_dict, self.get_shortname()) + + def _uzsu_dict_to_viess_timer(self, timer_app, uzsu_dict): + ''' + Convert UZSU dict from item/visu for selected application into separate + on/off time events and write all timers to the device + + :param timer_app: Application for which the timer should be written, as in commands.py + :type timer_app: str + :param uzsu_dict: UZSU-compatible dict with timer data + :type uzsu_dict: dict + ''' + if self._timerread: + + # set variables + commandnames = set() + timer_dict = {} + an = {} + aus = {} + + # quit if timer_app not defined + if timer_app not in self._application_timer: + return + + commandnames.update([self._commandname_by_commandcode(code) for code in self._application_timer[timer_app]['commandcodes']]) + self.logger.debug(f'Commandnames: {commandnames}') + + # find switching times and create lists for on and off operations + for sw_time in uzsu_dict['list']: + myDays = sw_time['rrule'].split(';')[1].split('=')[1].split(',') + for day in myDays: + if sw_time['value'] == '1' and sw_time['active']: + if day not in an: + an[day] = [] + an[day].append(sw_time['time']) + for day in myDays: + if sw_time['value'] == '0' and sw_time['active']: + if day not in aus: + aus[day] = [] + aus[day].append(sw_time['time']) + + # sort daily lists + for day in an: + an[day].sort() + self.logger.debug(f'An: {an}') + for day in aus: + aus[day].sort() + self.logger.debug(f'Aus: {aus}') + + # create timer dict in Viessmann format for all weekdays + for commandname in commandnames: + self.logger.debug(f'Commandname in process: {commandname}') + # create empty dict + timer_dict[commandname] = [{'An': '00:00', 'Aus': '00:00'}, {'An': '00:00', 'Aus': '00:00'}, {'An': '00:00', 'Aus': '00:00'}, {'An': '00:00', 'Aus': '00:00'}] + # get current day + wday = commandname[(commandname.rfind('_') + 1):len(commandname)].lower() + # normalize day + for element in self._wochentage: + if wday in self._wochentage[element]: + wday = element + # transfer switching times + for idx, val in enumerate(an[wday]): + timer_dict[commandname][idx]['An'] = val + for idx, val in enumerate(aus[wday]): + timer_dict[commandname][idx]['Aus'] = val + self.logger.debug(f'Timer-dict for update of items: {timer_dict}') + + # write all timer dicts to device + for commandname in timer_dict: + value = timer_dict[commandname] + self.logger.debug(f'Got item value to be written: {value} on command name {commandname}') + self._send_command(commandname, value) + + def _calc_checksum(self, packet): + ''' + Calculate checksum for P300 protocol packets + + :parameter packet: Data packet for which to calculate checksum + :type packet: bytearray + :return: Calculated checksum + :rtype: int + ''' + checksum = 0 + if len(packet) > 0: + if packet[:1] == b'\x41': + packet = packet[1:] + checksum = sum(packet) + checksum = checksum - int(checksum / 256) * 256 + else: + self.logger.error('bytes to calculate checksum from not starting with start byte') + else: + self.logger.error('No bytes received to calculate checksum') + return checksum + + def _int2bytes(self, value, length, signed=False, byteorder='big'): + ''' + Convert value to bytearray with respect to defined length and sign format. + Value exceeding limit set by length and sign will be truncated + + :parameter value: Value to convert + :type value: int + :parameter length: number of bytes to create + :type length: int + :parameter signed: True if result should be a signed int, False for unsigned + :type signed: bool + :return: Converted value + :rtype: bytearray + ''' + value = value % (2 ** (length * 8)) + return value.to_bytes(length, byteorder=byteorder, signed=signed) + + def _bytes2int(self, rawbytes, signed): + ''' + Convert bytearray to value with respect to sign format + + :parameter rawbytes: Bytes to convert + :type value: bytearray + :parameter signed: True if result should be a signed int, False for unsigned + :type signed: bool + :return: Converted value + :rtype: int + ''' + return int.from_bytes(rawbytes, byteorder='little', signed=signed) + + def _bytes2hexstring(self, bytesvalue): + ''' + Create hex-formatted string from bytearray + :param bytesvalue: Bytes to convert + :type bytesvalue: bytearray + :return: Converted hex string + :rtype: str + ''' + return ''.join(f'{c:02x}' for c in bytesvalue) + + def _decode_rawvalue(self, rawdatabytes, commandsigned): + ''' + Convert little-endian byte sequence to int value + + :param rawdatabytes: Bytes to convert + :type rawdatabytes: bytearray + :param commandsigned: 'signed' if value should be interpreted as signed + :type commandsigned: str + :return: Converted value + :rtype: int + ''' + rawvalue = 0 + for i in range(len(rawdatabytes)): + leftbyte = rawdatabytes[0] + value = int(leftbyte * pow(256, i)) + rawvalue += value + rawdatabytes = rawdatabytes[1:] + # Signed/Unsigned berücksichtigen + if commandsigned == 'signed' and rawvalue > int(pow(256, i) / 2 - 1): + rawvalue = (pow(256, i) - rawvalue) * (-1) + return rawvalue + + def _decode_timer(self, rawdatabytes): + ''' + Generator to convert byte sequence to a number of time strings hh:mm + + :param rawdatabytes: Bytes to convert + :type rawdatabytes: bytearray + ''' + while rawdatabytes: + hours, minutes = divmod(int(rawdatabytes[:2], 16), 8) + if minutes >= 6 or hours >= 24: + # not a valid time + yield '00:00' + else: + yield f'{hours:02d}:{(minutes * 10):02d}' + rawdatabytes = rawdatabytes[2:] + return None + + def _encode_timer(self, switching_time): + ''' + Convert time string to encoded time value for timer application + + :param switching_time: time value in 'hh:mm' format + :type switching_time: str + :return: Encoded time value + :rtype: int + ''' + if switching_time == '00:00': + return 0xff + clocktime = re.compile(r'(\d\d):(\d\d)') + mo = clocktime.search(switching_time) + number = int(mo.group(1)) * 8 + int(mo.group(2)) // 10 + return number + + def _value_transform_read(self, value, transform): + ''' + Transform value according to protocol specification for writing to device + + :param value: Value to transform + :param transform: Specification for transforming + :return: Transformed value + ''' + if transform == 'bool': + return bool(value) + elif self._isfloat(transform): + return round(value / float(transform), 2) + else: + return int(value) + + def _value_transform_write(self, value, transform): + ''' + Transform value according to protocol requirement after reading from device + + :param value: Value to transform + :type value: int + :param transform: Specification for transforming + :type transform: int + :return: Transformed value + :rtype: int + ''' + # as transform and value can be float and by error possibly str, we try to float both + return int(float(value) * float(transform)) + + def _error_decode(self, value): + ''' + Decode error value from device if defined, else return error as string + ''' + value = str(value).upper() + if value in self._errorset: + errorstring = str(self._errorset[value]) + else: + errorstring = str(value) + return errorstring + + def _systemscheme_decode(self, value): + ''' + Decode schema value from device if possible, else return schema as string + ''' + if value in self._systemschemes: + systemscheme = str(self._systemschemes[value]) + else: + systemscheme = str(value) + return systemscheme + + def _operatingmode_decode(self, value): + ''' + Decode operating mode value from device if possible, else return mode as string + ''' + if value in self._operatingmodes: + operatingmode = str(self._operatingmodes[value]) + else: + operatingmode = str(value) + return operatingmode + + def _devicetype_decode(self, value): + ''' + Decode device type value if possible, else return device type as string + ''' + if value in self._devicetypes: + devicetypes = str(self._devicetypes[value]) + else: + devicetypes = str(value) + return devicetypes + + def _serialnumber_decode(self, serialnumberbytes): + ''' + Decode serial number from device response + ''' + serialnumber = 0 + serialnumberbytes.reverse() + for byte in range(0, len(serialnumberbytes)): + serialnumber += (serialnumberbytes[byte] - 48) * 10 ** byte + return hex(serialnumber).upper() + + def _commandname_by_commandcode(self, commandcode): + ''' + Find matching command name from commands.py for given command address + + :param commandcode: address of command + :type commandcode: str + :return: name of matching command or None if not found + ''' + for commandname in self._commandset.keys(): + if self._commandset[commandname]['addr'].lower() == commandcode.lower(): + return commandname + return None + + def _isfloat(self, value): + ''' + Test if string is decimal number + + :param value: expression to test + :type value: str + :return: True if value can be converted to a float, False otherwise + ''' + try: + float(value) + return True + except ValueError: + return False + +# +# webinterface +# + + def init_webinterface(self): + ''' + Initialize the web interface for this plugin + + This method is only needed if the plugin is implementing a web interface + ''' + try: + self.mod_http = Modules.get_instance().get_module('http') # try/except to handle running in a core version that does not support modules + except NameError: + self.mod_http = None + if self.mod_http is None: + self.logger.warning('Not initializing the web interface') + return False + + if 'SmartPluginWebIf' not in list(sys.modules['lib.model.smartplugin'].__dict__): + self.logger.warning('Web interface needs SmartHomeNG v1.5 or later. Not initializing the web interface') + return False + + # set application configuration for cherrypy + webif_dir = self.path_join(self.get_plugin_dir(), 'webif') + config = { + '/': { + 'tools.staticdir.root': webif_dir, + }, + '/static': { + 'tools.staticdir.on': True, + 'tools.staticdir.dir': 'static' + } + } + + # Register the web interface as a cherrypy app + self.mod_http.register_webif(WebInterface(webif_dir, self, self._commandset), + self.get_shortname(), + config, + self.get_classname(), self.get_instance_name(), + description='') + + return True + + +# ------------------------------------------ +# Webinterface of the plugin +# ------------------------------------------ + +class WebInterface(SmartPluginWebIf): + + def __init__(self, webif_dir, plugin, cmdset): + ''' + Initialization of instance of class WebInterface + + :param webif_dir: directory where the webinterface of the plugin resides + :param plugin: instance of the plugin + :type webif_dir: str + :type plugin: object + ''' + self.logger = logging.getLogger(__name__) + self.webif_dir = webif_dir + self.plugin = plugin + self.tplenv = self.init_template_environment() + + self.items = Items.get_instance() + + self.cmdset = cmdset + + self._last_read = {} + self._last_read['last'] = {'addr': None, 'val': '', 'cmd': ''} + + self._read_addr = None + self._read_cmd = '' + self._read_val = '' + + @cherrypy.expose + def index(self, reload=None): + ''' + Build index.html for cherrypy + + Render the template and return the html file to be delivered to the browser + + :return: contents of the template after beeing rendered + ''' + tmpl = self.tplenv.get_template('index.html') + # add values to be passed to the Jinja2 template eg: tmpl.render(p=self.plugin, interface=interface, ...) + + return tmpl.render(p=self.plugin, + items=sorted(self.items.return_items(), key=lambda k: str.lower(k['_path'])), + cmds=self.cmdset, + units=sorted(list(self.plugin._unitset.keys())), + last_read_addr=self._last_read['last']['addr'], + last_read_value=self._last_read['last']['val'], + last_read_cmd=self._last_read['last']['cmd'] + ) + + @cherrypy.expose + def submit(self, button=None, addr=None, length=0, unit=None, clear=False): + ''' + Submit handler for Ajax + ''' + if button is not None: + + read_val = self.plugin.read_addr(button) + if read_val is None: + self.logger.debug(f'Error trying to read addr {button} submitted by WebIf') + read_val = 'Fehler beim Lesen' + else: + read_cmd = self.plugin._commandname_by_commandcode(button) + if read_cmd is not None: + self._last_read[button] = {'addr': button, 'cmd': read_cmd, 'val': read_val} + self._last_read['last'] = self._last_read[button] + + elif addr is not None and unit is not None and length.isnumeric(): + + read_val = self.plugin.read_temp_addr(addr, int(length), unit) + if read_val is None: + self.logger.debug(f'Error trying to read custom addr {button} submitted by WebIf') + read_val = 'Fehler beim Lesen' + else: + self._last_read[addr] = {'addr': addr, 'cmd': f'custom ({addr})', 'val': read_val} + self._last_read['last'] = self._last_read[addr] + + elif clear: + for addr in self._last_read: + self._last_read[addr]['val'] = '' + self._last_read['last'] = {'addr': None, 'val': '', 'cmd': ''} + + cherrypy.response.headers['Content-Type'] = 'application/json' + return json.dumps(self._last_read).encode('utf-8') + + +# ------------------------------------------ +# The following code is for standalone use of the plugin to identify the device +# ------------------------------------------ + +def get_device_type(v, protocol): + + # try to connect and read device type info from 0x00f8 + print(f'Trying protocol {protocol} on device {serialport}') + + # first, initialize Viessmann object for use + v.alive = True + v._protocol = protocol + + # setup protocol controlset + v._controlset = commands.controlset[protocol] + res = v._connect() + if not res: + logger.info(f'Connection to {serialport} failed. Please check connection.') + return None + + res = v._init_communication() + if not res: + logger.info(f'Could not initialize communication using protocol {protocol}.') + return False + + # we are connected to the IR head + + # set needed unit + v._unitset = { + 'DT': {'unit_de': 'DeviceType', 'type': 'list', 'signed': False, 'read_value_transform': 'non'} + } + + # set needed command. DeviceType command is (hopefully) the same in all devices... + v._commandset = { + 'DT': {'addr': '00f8', 'len': 2, 'unit': 'DT', 'set': False}, + } + + # we leave this empty so we get the DT code back + v._devicetypes = {} + + # this is protocol dependent, so easier to let the Class work this out... + (packet, responselen) = v._build_command_packet('DT') + if packet is None: + raise ValueError('No command packet received for address 00f8. This shouldn\'t happen...') + + # send it + response_packet = v._send_command_packet(packet, responselen) + if response_packet is None: + raise ValueError('Error on communicating with the device, no response received. Unknown error.') + + # let it go... + v._disconnect() + + (val, code) = v._parse_response(response_packet, 'DT') + + if val is not None: + return val + else: + return None + + +if __name__ == '__main__': + + usage = ''' + Usage: + ---------------------------------------------------------------------------------- + + This plugin is meant to be used inside SmartHomeNG. + + For diagnostic purposes, you can run it as a standalone Python program from the + command line. It will try to communicate with a connected Viessmann heating system + and return the device type and the necessary protocol for setting up your plugin + in SmartHomeNG. + + You need to call this plugin with the serial interface as the first parameter, e.g. + + ./__init__.py /dev/ttyUSB0 + + If you call it with -v as a second parameter, you get additional debug information: + + ./__init__.py /dev/ttyUSB0 -v + + ''' + + logger = logging.getLogger(__name__) + logger.setLevel(logging.CRITICAL) + ch = logging.StreamHandler() + ch.setLevel(logging.DEBUG) + + # create formatter and add it to the handlers + formatter = logging.Formatter('%(asctime)s - %(message)s @ %(lineno)d') + ch.setFormatter(formatter) + + # add the handlers to the logger + logger.addHandler(ch) + + serialport = "" + + if len(sys.argv) == 2: + serialport = sys.argv[1] + elif len(sys.argv) == 3 and sys.argv[2] == '-v': + serialport = sys.argv[1] + logger.setLevel(logging.DEBUG) + else: + print(usage) + exit() + + print("This is Viessmann plugin running in standalone mode") + print("===================================================") + + v = Viessmann(None, standalone=serialport, logger=logger) + + for proto in ('P300', 'KW'): + + res = get_device_type(v, proto) + if res is None: + + # None means no connection, no further tries + print(f'Connection could not be established to {serialport}. Please check connection.') + break + + if res is False: + + # False means no comm init (only P300), go on + print(f'Communication could not be established using protocol {proto}.') + else: + + # anything else should be the devices answer, try to decode and quit + print(f'Device ID is {res}, device type is {commands.devicetypes.get(res, "unknown")} using protocol {proto}') + # break + + print('Done.') diff --git a/viessmann/_pv_1_2_3/commands.py b/viessmann/_pv_1_2_3/commands.py new file mode 100755 index 000000000..34d97e67e --- /dev/null +++ b/viessmann/_pv_1_2_3/commands.py @@ -0,0 +1,900 @@ +# !/usr/bin/env python +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# Copyright 2020 Michael Wenzel +# Copyright 2020 Sebastian Helms +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# Viessmann-Plugin for SmartHomeNG. https://github.com/smarthomeNG// +# +# This plugin is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This plugin is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this plugin. If not, see . +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +controlset = { + 'P300': { + 'Baudrate': 4800, + 'Bytesize': 8, # 'EIGHTBITS' + 'Parity': 'E', # 'PARITY_EVEN', + 'Stopbits': 2, # 'STOPBITS_TWO', + 'StartByte': 0x41, + 'Request': 0x00, + 'Response': 0x01, + 'Error': 0x03, + 'Read': 0x01, + 'Write': 0x02, + 'Function_Call': 0x7, + 'Acknowledge': 0x06, + 'Not_initiated': 0x05, + 'Init_Error': 0x15, + 'Reset_Command': 0x04, + 'Reset_Command_Response': 0x05, + 'Sync_Command': 0x160000, + 'Sync_Command_Response': 0x06, + 'Command_bytes_read': 5, + 'Command_bytes_write': 5, + # init: send'Reset_Command' receive'Reset_Command_Response' send'Sync_Command' + # request: send('StartByte' 'Länge der Nutzdaten als Anzahl der Bytes zwischen diesem Byte und der Prüfsumme' 'Request' 'Read' 'addr' 'checksum') + # request_response: receive('Acknowledge' 'StartByte' 'Länge der Nutzdaten als Anzahl der Bytes zwischen diesem Byte und der Prüfsumme' 'Response' 'Read' 'addr' 'Anzahl der Bytes des Wertes' 'Wert' 'checksum') + }, + 'KW': { + 'Baudrate': 4800, + 'Bytesize': 8, # 'EIGHTBITS' + 'Parity': 'E', # 'PARITY_EVEN', + 'Stopbits': 2, # 'STOPBITS_TWO', + 'StartByte': 0x01, + 'Read': 0xF7, + 'Write': 0xF4, + 'Acknowledge': 0x01, + 'Reset_Command': 0x04, + 'Not_initiated': 0x05, + 'Write_Ack': 0x00, + }, +} + +commandset = { + 'V200KO1B': { + # Kessel + 'Aussentemperatur': {'addr': '0800', 'len': 2, 'unit': 'IS10', 'set': False}, # Aussentemperatur + 'Aussentemperatur_TP': {'addr': '5525', 'len': 2, 'unit': 'IS10', 'set': False}, # Aussentemperatur_tiefpass + 'Aussentemperatur_Dp': {'addr': '5527', 'len': 2, 'unit': 'IS10', 'set': False}, # Aussentemperatur in Grad C (Gedaempft) + 'Kesseltemperatur': {'addr': '0802', 'len': 2, 'unit': 'IU10', 'set': False}, # Kesseltemperatur + 'Kesseltemperatur_TP': {'addr': '0810', 'len': 2, 'unit': 'IU10', 'set': False}, # Kesseltemperatur_tiefpass + 'Kesselsolltemperatur': {'addr': '555a', 'len': 2, 'unit': 'IU10', 'set': False}, # Kesselsolltemperatur + 'Temp_Speicher_Ladesensor': {'addr': '0812', 'len': 2, 'unit': 'IU10', 'set': False}, # Temperatur Speicher Ladesensor Komfortsensor + 'Auslauftemperatur': {'addr': '0814', 'len': 2, 'unit': 'IU10', 'set': False}, # Auslauftemperatur + 'Abgastemperatur': {'addr': '0816', 'len': 2, 'unit': 'IU10', 'set': False}, # Abgastemperatur + 'Gem_Vorlauftemperatur': {'addr': '081a', 'len': 2, 'unit': 'IU10', 'set': False}, # Gem. Vorlauftemperatur + 'Relais_K12': {'addr': '0842', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Relais K12 Interne Anschlußerweiterung + 'Eingang_0-10_V': {'addr': '0a86', 'len': 1, 'unit': 'IUINT', 'set': False}, # Eingang 0-10 V + 'EA1_Kontakt_0': {'addr': '0a90', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # EA1: Kontakt 0 + 'EA1_Kontakt_1': {'addr': '0a91', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # EA1: Kontakt 1 + 'EA1_Kontakt_2': {'addr': '0a92', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # EA1: Kontakt 2 + 'EA1_Externer_Soll_0-10V': {'addr': '0a93', 'len': 1, 'unit': 'IUINT', 'set': False}, # EA1: Externer Sollwert 0-10V + 'EA1_Relais_0': {'addr': '0a95', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # EA1: Relais 0 + 'AM1_Ausgang_1': {'addr': '0aa0', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # AM1 Ausgang 1 + 'AM1_Ausgang_2': {'addr': '0aa1', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # AM1 Ausgang 2 + 'TempKOffset': {'addr': '6760', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 1193045}, # Kesseloffset KT ueber WWsoll in Grad C + 'Systemtime': {'addr': '088e', 'len': 8, 'unit': 'TI', 'set': True}, # Systemzeit + 'Anlagenschema': {'addr': '7700', 'len': 2, 'unit': 'SC', 'set': False}, # Anlagenschema + 'Anlagentyp': {'addr': '00f8', 'len': 2, 'unit': 'DT', 'set': False}, # Heizungstyp + 'Inventory': {'addr': '08e0', 'len': 7, 'unit': 'SN', 'set': False}, # Sachnummer + 'CtrlId': {'addr': '08e0', 'len': 7, 'unit': 'DT', 'set': False}, # Reglerkennung + # Fehler + 'Sammelstoerung': {'addr': '0a82', 'len': 1, 'unit': 'RT', 'set': False}, # Sammelstörung + 'Error0': {'addr': '7507', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 1 + 'Error1': {'addr': '7510', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 2 + 'Error2': {'addr': '7519', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 3 + 'Error3': {'addr': '7522', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 4 + 'Error4': {'addr': '752b', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 5 + 'Error5': {'addr': '7534', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 6 + 'Error6': {'addr': '753d', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 7 + 'Error7': {'addr': '7546', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 8 + 'Error8': {'addr': '754f', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 9 + 'Error9': {'addr': '7558', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 10 + # Pumpen + 'Speicherladepumpe': {'addr': '6513', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Speicherladepumpe + 'Zirkulationspumpe': {'addr': '6515', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Zirkulationspumpe + 'Interne_Pumpe': {'addr': '7660', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Interne Pumpe + 'Heizkreispumpe_A1M1': {'addr': '2906', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Heizkreispumpe A1 + 'Heizkreispumpe_A1M1_RPM': {'addr': '7663', 'len': 1, 'unit': 'IUNON', 'set': False}, # Heizkreispumpe A1M1 Drehzahl + 'Heizkreispumpe_M2': {'addr': '3906', 'len': 1, 'unit': 'IUINT', 'set': False}, # Heizkreispumpe M2 + 'Heizkreispumpe_M2_RPM': {'addr': '7665', 'len': 1, 'unit': 'IUNON', 'set': False}, # Heizkreispumpe M2 Drehzahl + 'Relais_Status_Pumpe_A1M1': {'addr': 'a152', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Relais-Status Heizkreispumpe 1 + # Brenner + 'Brennerstarts': {'addr': '088a', 'len': 4, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 1193045}, # Brennerstarts + 'Brenner_Betriebsstunden': {'addr': '08a7', 'len': 4, 'unit': 'IU3600', 'set': True, 'min_value': 0, 'max_value': 1193045}, # Brenner-Betriebsstunden + 'Brennerstatus_1': {'addr': '0842', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Brennerstatus Stufe1 + 'Brennerstatus_2': {'addr': '0849', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Brennerstatus Stufe2 + 'Oeldurchsatz': {'addr': '5726', 'len': 4, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 1193045}, # Oeldurchsatz Brenner in Dezi-Liter pro Stunde + 'Oelverbrauch': {'addr': '7574', 'len': 4, 'unit': 'IS1000', 'set': True}, # Oelverbrauch kumuliert + # Solar + 'Nachladeunterdrueckung': {'addr': '6551', 'len': 1, 'unit': 'IUBOOL', 'set': False}, + 'SolarPumpe': {'addr': '6552', 'len': 1, 'unit': 'IUBOOL', 'set': False}, + 'Kollektortemperatur': {'addr': '6564', 'len': 2, 'unit': 'IS10', 'set': False}, + 'Speichertemperatur': {'addr': '6566', 'len': 2, 'unit': 'IU10', 'set': False}, + 'Solar_Betriebsstunden': {'addr': '6568', 'len': 4, 'unit': 'IU100', 'set': False}, + 'Solarsteuerung': {'addr': '7754', 'len': 2, 'unit': 'IUINT', 'set': False}, + # Heizkreis A1M1 + 'Raumtemperatur_A1M1': {'addr': '0896', 'len': 1, 'unit': 'ISNON', 'set': False}, # Raumtemperatur A1M1 + 'Raumtemperatur_Soll_Normalbetrieb_A1M1': {'addr': '2306', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 3, 'max_value': 37}, # Raumtemperatur Soll Normalbetrieb A1M1 + 'Raumtemperatur_Soll_Red_Betrieb_A1M1': {'addr': '2307', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 3, 'max_value': 37}, # Raumtemperatur Soll Reduzierter Betrieb A1M1 + 'Raumtemperatur_Soll_Party_Betrieb_A1M1': {'addr': '2308', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 3, 'max_value': 37}, # Raumtemperatur Soll Party Betrieb A1M1 + 'Aktuelle_Betriebsart_A1M1': {'addr': '2301', 'len': 1, 'unit': 'BA', 'set': False}, # Aktuelle Betriebsart A1M1 + 'Betriebsart_A1M1': {'addr': '2323', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 4}, # Betriebsart A1M1 + 'Sparbetrieb_A1M1': {'addr': '2302', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Sparbetrieb A1M1 + 'Zustand_Sparbetrieb_A1M1': {'addr': '2331', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Zustand Sparbetrieb A1M1 + 'Partybetrieb_A1M1': {'addr': '2303', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Partybetrieb A1M1 + 'Zustand_Partybetrieb_A1M1': {'addr': '2330', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Zustand Partybetrieb A1M1 + 'Vorlauftemperatur_A1M1': {'addr': '2900', 'len': 2, 'unit': 'IU10', 'set': False}, # Vorlauftemperatur A1M1 + 'Vorlauftemperatur_Soll_A1M1': {'addr': '2544', 'len': 2, 'unit': 'IU10', 'set': False}, # Vorlauftemperatur Soll A1M1 + 'StatusFrost_A1M1': {'addr': '2500', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Status Frostwarnung A1M1 + 'Externe_Raumsolltemperatur_Normal_A1M1': {'addr': '2321', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 37}, # Externe Raumsolltemperatur Normal A1M1 + 'Externe_Betriebsartenumschaltung_A1M1': {'addr': '2549', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 4}, # Externe Betriebsartenumschaltung A1M1 + 'Speichervorrang_A1M1': {'addr': '27a2', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 15}, # Speichervorrang auf Heizkreispumpe und Mischer + 'Frostschutzgrenze_A1M1': {'addr': '27a3', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': -9, 'max_value': 15}, # Frostschutzgrenze + 'Frostschutz_A1M1': {'addr': '27a4', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Frostschutzgrenze + 'Heizkreispumpenlogik_A1M1': {'addr': '27a5', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 15}, # HeizkreispumpenlogikFunktion + 'Sparschaltung_A1M1': {'addr': '27a6', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 5, 'max_value': 35}, # AbsolutSommersparschaltung + 'Mischersparfunktion_A1M1': {'addr': '27a7', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Mischersparfunktion + 'Pumpenstillstandzeit_A1M1': {'addr': '27a9', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 15}, # Pumpenstillstandzeit + 'Vorlauftemperatur_min_A1M1': {'addr': '27c5', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 1, 'max_value': 127}, # Minimalbegrenzung der Vorlauftemperatur + 'Vorlauftemperatur_max_A1M1': {'addr': '27c6', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 10, 'max_value': 127}, # Maximalbegrenzung der Vorlauftemperatur + 'Neigung_Heizkennlinie_A1M1': {'addr': '27d3', 'len': 1, 'unit': 'IU10', 'set': True, 'min_value': 0.2, 'max_value': 3.5}, # Neigung Heizkennlinie A1M1 + 'Niveau_Heizkennlinie_A1M1': {'addr': '27d4', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': -13, 'max_value': 40}, # Niveau Heizkennlinie A1M1 + 'Partybetrieb_Zeitbegrenzung_A1M1': {'addr': '27f2', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 12}, # Zeitliche Begrenzung für Partybetrieb oder externe BetriebsprogrammUmschaltung mit Taster + 'Temperaturgrenze_red_Betrieb_A1M1': {'addr': '27f8', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': -61, 'max_value': 10}, # Temperaturgrenze für Aufhebung des reduzierten Betriebs -5 ºC + 'Temperaturgrenze_red_Raumtemp_A1M1': {'addr': '27f9', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': -60, 'max_value': 10}, # Temperaturgrenze für Anhebung des reduzierten RaumtemperaturSollwertes + 'Vorlauftemperatur_Erhoehung_Soll_A1M1': {'addr': '27fa', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 50}, # Erhöhung des Kesselwasser- bzw. Vorlauftemperatur-Sollwertes beim Übergang von Betrieb mit reduzierter Raumtemperatur in den Betrieb mit normaler Raumtemperatur um 20 % + 'Vorlauftemperatur_Erhoehung_Zeit_A1M1': {'addr': '27fa', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 150}, # Zeitdauer für die Erhöhung des Kesselwasser bzw.VorlauftemperaturSollwertes (siehe Codieradresse „FA“) 60 min. + # Heizkreis M2 + 'Raumtemperatur_M2': {'addr': '0898', 'len': 1, 'unit': 'ISNON', 'set': False}, # Raumtemperatur + 'Raumtemperatur_Soll_Normalbetrieb_M2': {'addr': '3306', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 3, 'max_value': 37}, # Raumtemperatur Soll Normalbetrieb + 'Raumtemperatur_Soll_Red_Betrieb_M2': {'addr': '3307', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 3, 'max_value': 37}, # Raumtemperatur Soll Reduzierter Betrieb + 'Raumtemperatur_Soll_Party_Betrieb_M2': {'addr': '3308', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 3, 'max_value': 37}, # Raumtemperatur Soll Party Betrieb + 'Aktuelle_Betriebsart_M2': {'addr': '3301', 'len': 1, 'unit': 'BA', 'set': False}, # Aktuelle Betriebsart + 'Betriebsart_M2': {'addr': '3323', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 4}, # Betriebsart + 'Sparbetrieb_M2': {'addr': '3302', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Sparbetrieb + 'Zustand_Sparbetrieb_M2': {'addr': '3331', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Zustand Sparbetrieb + 'Partybetrieb_M2': {'addr': '3303', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Partybetrieb + 'Zustand_Partybetrieb_M2': {'addr': '3330', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Zustand Partybetrieb + 'Vorlauftemperatur_M2': {'addr': '3900', 'len': 2, 'unit': 'IU10', 'set': False}, # Vorlauftemperatur + 'Vorlauftemperatur_Soll_M2': {'addr': '3544', 'len': 2, 'unit': 'IU10', 'set': False}, # Vorlauftemperatur Soll + 'StatusFrost_M2': {'addr': '3500', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Status Frostwarnung + 'Externe_Raumsolltemperatur_Normal_M2': {'addr': '3321', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 37}, # Externe Raumsolltemperatur Normal + 'Externe_Betriebsartenumschaltung_M2': {'addr': '3549', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 4}, # Externe Betriebsartenumschaltung + 'Speichervorrang_M2': {'addr': '37a2', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 15}, # Speichervorrang auf Heizkreispumpe und Mischer + 'Frostschutzgrenze_M2': {'addr': '37a3', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': -9, 'max_value': 15}, # Frostschutzgrenze + 'Frostschutz_M2': {'addr': '37a4', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Frostschutzgrenze + 'Heizkreispumpenlogik_M2': {'addr': '37a5', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 15}, # HeizkreispumpenlogikFunktion + 'Sparschaltung_M2': {'addr': '37a6', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 5, 'max_value': 35}, # AbsolutSommersparschaltung + 'Mischersparfunktion_M2': {'addr': '37a7', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Mischersparfunktion + 'Pumpenstillstandzeit_M2': {'addr': '37a9', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 15}, # Pumpenstillstandzeit + 'Vorlauftemperatur_min_M2': {'addr': '37c5', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 1, 'max_value': 127}, # Minimalbegrenzung der Vorlauftemperatur + 'Vorlauftemperatur_max_M2': {'addr': '37c6', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 10, 'max_value': 127}, # Maximalbegrenzung der Vorlauftemperatur + 'Neigung_Heizkennlinie_M2': {'addr': '37d3', 'len': 1, 'unit': 'IU10', 'set': True, 'min_value': 0.2, 'max_value': 3.5}, # Neigung Heizkennlinie + 'Niveau_Heizkennlinie_M2': {'addr': '37d4', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': -13, 'max_value': 40}, # Niveau Heizkennlinie + 'Partybetrieb_Zeitbegrenzung_M2': {'addr': '37f2', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 12}, # Zeitliche Begrenzung für Partybetrieb oder externe BetriebsprogrammUmschaltung mit Taster + 'Temperaturgrenze_red_Betrieb_M2': {'addr': '37f8', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': -61, 'max_value': 10}, # Temperaturgrenze für Aufhebung des reduzierten Betriebs -5 ºC + 'Temperaturgrenze_red_Raumtemp_M2': {'addr': '37f9', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': -60, 'max_value': 10}, # Temperaturgrenze für Anhebung des reduzierten RaumtemperaturSollwertes + 'Vorlauftemperatur_Erhoehung_Soll_M2': {'addr': '37fa', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 50}, # Erhöhung des Kesselwasser- bzw. Vorlauftemperatur-Sollwertes beim Übergang von Betrieb mit reduzierter Raumtemperatur in den Betrieb mit normaler Raumtemperatur um 20 % + 'Vorlauftemperatur_Erhoehung_Zeit_M2': {'addr': '37fb', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 150}, # Zeitdauer für die Erhöhung des Kesselwasser bzw.VorlauftemperaturSollwertes (siehe Codieradresse „FA“) 60 min. + # Warmwasser + 'Warmwasser_Temperatur': {'addr': '0804', 'len': 2, 'unit': 'IU10', 'set': False}, # Warmwassertemperatur in Grad C + 'Warmwasser_Solltemperatur': {'addr': '6300', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 10, 'max_value': 95}, # Warmwasser-Solltemperatur + 'Status_Warmwasserbereitung': {'addr': '650a', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Satus Warmwasserbereitung + 'WarmwasserPumpenNachlauf': {'addr': '6762', 'len': 2, 'unit': 'ISNON' , 'set': True, 'min_value': 0, 'max_value': 1}, # Warmwasserpumpennachlauf + # Ferienprogramm HK_A1M1 + 'Ferienprogramm_A1M1': {'addr': '2535', 'len': 1, 'unit': 'IUINT', 'set': False}, # Ferienprogramm A1M1 + 'Ferien_Abreisetag_A1M1': {'addr': '2309', 'len': 8, 'unit': 'DA', 'set': True}, # Ferien Abreisetag A1M1 + 'Ferien_Rückreisetag_A1M1': {'addr': '2311', 'len': 8, 'unit': 'DA', 'set': True}, # Ferien Rückreisetag A1M1 + # Ferienprogramm HK_M2 + 'Ferienprogramm_M2': {'addr': '3535', 'len': 1, 'unit': 'IUINT', 'set': False}, # Ferienprogramm M2 + 'Ferien_Abreisetag_M2': {'addr': '3309', 'len': 8, 'unit': 'DA', 'set': True}, # Ferien Abreisetag M2 + 'Ferien_Rückreisetag_M2': {'addr': '3311', 'len': 8, 'unit': 'DA', 'set': True}, # Ferien Rückreisetag M2 + # Schaltzeiten Warmwasser + 'Timer_Warmwasser_Mo': {'addr': '2100', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Montag + 'Timer_Warmwasser_Di': {'addr': '2108', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Dienstag + 'Timer_Warmwasser_Mi': {'addr': '2110', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Mittwoch + 'Timer_Warmwasser_Do': {'addr': '2118', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Donnerstag + 'Timer_Warmwasser_Fr': {'addr': '2120', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Freitag + 'Timer_Warmwasser_Sa': {'addr': '2128', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Samstag + 'Timer_Warmwasser_So': {'addr': '2130', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Sonntag + # Schaltzeiten HK_A1M1 + 'Timer_A1M1_Mo': {'addr': '2000', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Montag + 'Timer_A1M1_Di': {'addr': '2008', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Dienstag + 'Timer_A1M1_Mi': {'addr': '2010', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Mittwoch + 'Timer_A1M1_Do': {'addr': '2018', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Donnerstag + 'Timer_A1M1_Fr': {'addr': '2020', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Freitag + 'Timer_A1M1_Sa': {'addr': '2028', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Samstag + 'Timer_A1M1_So': {'addr': '2030', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Sonntag + # Schaltzeiten HK_M2 + 'Timer_M2_Mo': {'addr': '3000', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Montag + 'Timer_M2_Di': {'addr': '3008', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Dienstag + 'Timer_M2_Mi': {'addr': '3010', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Mittwoch + 'Timer_M2_Do': {'addr': '3018', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Donnerstag + 'Timer_M2_Fr': {'addr': '3020', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Freitag + 'Timer_M2_Sa': {'addr': '3028', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Samstag + 'Timer_M2_So': {'addr': '3030', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Sonntag + # Schaltzeiten Zirkulation + 'Timer_Zirku_Mo': {'addr': '2200', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Zirkulationspumpe Montag + 'Timer_Zirku_Di': {'addr': '2208', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Zirkulationspumpe Dienstag + 'Timer_Zirku_Mi': {'addr': '2210', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Zirkulationspumpe Mittwoch + 'Timer_Zirku_Do': {'addr': '2218', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Zirkulationspumpe Donnerstag + 'Timer_Zirku_Fr': {'addr': '2220', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Zirkulationspumpe Freitag + 'Timer_Zirku_Sa': {'addr': '2228', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Zirkulationspumpe Samstag + 'Timer_Zirku_So': {'addr': '2230', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Zirkulationspumpe Sonntag + }, + 'V200HO1C': { + # Allgemein + 'Anlagentyp': {'addr': '00f8', 'len': 2, 'unit': 'DT', 'set': False}, # Heizungstyp + 'Anlagenschema': {'addr': '7700', 'len': 2, 'unit': 'SC', 'set': False}, # Anlagenschema + 'Frostgefahr': {'addr': '2510', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Frostgefahr + 'Aussentemperatur_TP': {'addr': '5525', 'len': 2, 'unit': 'IS10', 'set': False}, # Aussentemperatur_tiefpass + 'Aussentemperatur_Dp': {'addr': '5527', 'len': 2, 'unit': 'IS10', 'set': False}, # Aussentemperatur in Grad C (Gedaempft) + 'Anlagenleistung': {'addr': 'a38f', 'len': 2, 'unit': 'IS10', 'set': False}, # Anlagenleistung + # Kessel + 'Kesseltemperatur_TP': {'addr': '0810', 'len': 2, 'unit': 'IU10', 'set': False}, # Kesseltemperatur_tiefpass + 'Kesselsolltemperatur': {'addr': '555a', 'len': 2, 'unit': 'IU10', 'set': False}, # Kesselsolltemperatur + 'Abgastemperatur': {'addr': '0816', 'len': 2, 'unit': 'IU10', 'set': False}, # Abgastemperatur + # Fehler + 'Sammelstoerung': {'addr': '0a82', 'len': 1, 'unit': 'RT', 'set': False}, # Sammelstörung + 'Error0': {'addr': '7507', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 1 + 'Error1': {'addr': '7510', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 2 + 'Error2': {'addr': '7519', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 3 + 'Error3': {'addr': '7522', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 4 + 'Error4': {'addr': '752b', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 5 + 'Error5': {'addr': '7534', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 6 + 'Error6': {'addr': '753d', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 7 + 'Error7': {'addr': '7546', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 8 + 'Error8': {'addr': '754f', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 9 + 'Error9': {'addr': '7558', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 10 + # Pumpen + 'Speicherladepumpe': {'addr': '6513', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Speicherladepumpe für Warmwasser + 'Zirkulationspumpe': {'addr': '6515', 'len': 1, 'unit': 'IUBOOL', 'set': True}, # Zirkulationspumpe + 'Interne_Pumpe': {'addr': '7660', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Interne Pumpe + 'Heizkreispumpe_HK1': {'addr': '2906', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Heizkreispumpe A1 + 'Heizkreispumpe_HK2': {'addr': '3906', 'len': 1, 'unit': 'IUINT', 'set': False}, # Heizkreispumpe M2 + # Brenner + 'Brennerstarts': {'addr': '088a', 'len': 4, 'unit': 'ISNON', 'set': False}, # Brennerstarts + 'Brennerleistung': {'addr': 'a305', 'len': 2, 'unit': 'IS10', 'set': False}, # Brennerleistung + 'Brenner_Betriebsstunden': {'addr': '08a7', 'len': 4, 'unit': 'IU3600', 'set': False}, # Brenner-Betriebsstunden + # Solar + 'SolarPumpe': {'addr': '6552', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Solarpumpe + 'Kollektortemperatur': {'addr': '6564', 'len': 2, 'unit': 'IS10', 'set': False}, # Kollektortemperatur + 'Speichertemperatur': {'addr': '6566', 'len': 2, 'unit': 'IU10', 'set': False}, # Spichertemperatur + 'Solar_Betriebsstunden': {'addr': '6568', 'len': 4, 'unit': 'IU100', 'set': False}, # Solar Betriebsstunden + 'Solar_Waermemenge': {'addr': '6560', 'len': 2, 'unit': 'IUINT', 'set': False}, # Solar Waermemenge + 'Solar_Ausbeute': {'addr': 'cf30', 'len': 4, 'unit': 'IUINT', 'set': False}, # Solar Ausbeute + # Heizkreis 1 + 'Betriebsart_HK1': {'addr': '2500', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 3}, # Betriebsart (0=Abschaltbetrieb, 1=Red. Betrieb, 2=Normalbetrieb (Schaltuhr), 3=Normalbetrieb (Dauernd)) + 'Heizart_HK1': {'addr': '2323', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 4}, # Heizart (0=Abschaltbetrieb, 1=Nur Warmwasser, 2=Heizen und Warmwasser, 3=Normalbetrieb (Reduziert), 4=Normalbetrieb (Dauernd)) + 'Vorlauftemperatur_Soll_HK1': {'addr': '2544', 'len': 2, 'unit': 'IU10', 'set': False}, # Vorlauftemperatur Soll + 'Vorlauftemperatur_HK1': {'addr': '2900', 'len': 2, 'unit': 'IU10', 'set': False}, # Vorlauftemperatur Ist + # Heizkreis 2 + 'Betriebsart_HK2': {'addr': '3500', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 3}, # Betriebsart (0=Abschaltbetrieb, 1=Red. Betrieb, 2=Normalbetrieb (Schaltuhr), 3=Normalbetrieb (Dauernd)) + 'Heizart_HK2': {'addr': '3323', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 4}, # Heizart (0=Abschaltbetrieb, 1=Nur Warmwasser, 2=Heizen und Warmwasser, 3=Normalbetrieb (Reduziert), 4=Normalbetrieb (Dauernd)) + 'Vorlauftemperatur_Soll_HK2': {'addr': '3544', 'len': 2, 'unit': 'IU10', 'set': False}, # Vorlauftemperatur Soll + 'Vorlauftemperatur_HK2': {'addr': '3900', 'len': 2, 'unit': 'IU10', 'set': False}, # Vorlauftemperatur Ist + # Warmwasser + 'Warmwasser_Temperatur': {'addr': '0812', 'len': 2, 'unit': 'IU10', 'set': False}, # Warmwassertemperatur in Grad C + 'Warmwasser_Solltemperatur': {'addr': '6300', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 10, 'max_value': 80}, # Warmwasser-Solltemperatur + 'Warmwasser_Austrittstemperatur': {'addr': '0814', 'len': 2, 'unit': 'IU10', 'set': False}, # Warmwasseraustrittstemperatur in Grad C + }, + 'V200KW2': { + # Allgemein + 'Anlagentyp': {'addr': '00f8', 'len': 2, 'unit': 'DT', 'set': False}, # Ermittle Device Typ der Anlage + 'Anlagenschema': {'addr': '7700', 'len': 2, 'unit': 'SC', 'set': False}, # Anlagenschema + 'AnlagenSoftwareIndex': {'addr': '7330', 'len': 1, 'unit': 'IUNON', 'set': False}, # Bedienteil SoftwareIndex + 'Aussentemperatur': {'addr': '0800', 'len': 2, 'unit': 'IS10', 'set': False}, # Aussentemperatur_tiefpass + 'Aussentemperatur_Dp': {'addr': '5527', 'len': 2, 'unit': 'IS10', 'set': False}, # Aussentemperatur in Grad C (Gedaempft) + 'Systemtime': {'addr': '088e', 'len': 8, 'unit': 'TI', 'set': True}, # Systemzeit + # Kessel + 'TempKOffset': {'addr': '6760', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 10, 'max_value': 50}, # Kesseloffset KT ueber WWsoll in Grad C + 'Kesseltemperatur': {'addr': '0802', 'len': 2, 'unit': 'IU10', 'set': False}, # Kesseltemperatur + 'Kesselsolltemperatur': {'addr': '5502', 'len': 2, 'unit': 'IU10', 'set': True}, # Kesselsolltemperatur + # Fehler + 'Sammelstoerung': {'addr': '0847', 'len': 1, 'unit': 'RT', 'set': False}, # Sammelstörung + 'Brennerstoerung': {'addr': '0883', 'len': 1, 'unit': 'RT', 'set': False}, + 'Error0': {'addr': '7507', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 1 + 'Error1': {'addr': '7510', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 2 + 'Error2': {'addr': '7519', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 3 + 'Error3': {'addr': '7522', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 4 + 'Error4': {'addr': '752b', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 5 + 'Error5': {'addr': '7534', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 6 + 'Error6': {'addr': '753d', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 7 + 'Error7': {'addr': '7546', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 8 + 'Error8': {'addr': '754f', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 9 + 'Error9': {'addr': '7558', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 10 + # Pumpen + 'Speicherladepumpe': {'addr': '0845', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Speicherladepumpe für Warmwasser + 'Zirkulationspumpe': {'addr': '0846', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Zirkulationspumpe + 'Heizkreispumpe_A1M1': {'addr': '2906', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Heizkreispumpe A1M1 + 'Heizkreispumpe_M2': {'addr': '3906', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Heizkreispumpe M2 + # Brenner + 'Brennertyp': {'addr': 'a30b', 'len': 1, 'unit': 'IUNON', 'set': False}, # Brennertyp 0=einstufig 1=zweistufig 2=modulierend + 'Brennerstufe': {'addr': '551e', 'len': 1, 'unit': 'RT', 'set': False}, # Ermittle die aktuelle Brennerstufe + 'Brennerstarts': {'addr': '088a', 'len': 2, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 1193045}, # Brennerstarts + 'Brennerstatus_1': {'addr': '55d3', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Brennerstatus Stufe1 + 'Brennerstatus_2': {'addr': '0849', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Brennerstatus Stufe2 + 'Brenner_BetriebsstundenStufe1': {'addr': '0886', 'len': 4, 'unit': 'IU3600', 'set': True, 'min_value': 0, 'max_value': 1193045}, # Brenner-Betriebsstunden Stufe 1 + 'Brenner_BetriebsstundenStufe2': {'addr': '08a3', 'len': 4, 'unit': 'IU3600', 'set': True, 'min_value': 0, 'max_value': 1193045}, # Brenner-Betriebsstunden Stufe 2 + # Heizkreis A1M1 + 'Betriebsart_A1M1': {'addr': '2301', 'len': 1, 'unit': 'BA', 'set': True}, # Betriebsart A1M1 + 'Aktuelle_Betriebsart_A1M1': {'addr': '2500', 'len': 1, 'unit': 'BA', 'set': False}, # Aktuelle Betriebsart A1M1 + 'Sparbetrieb_A1M1': {'addr': '2302', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Sparbetrieb A1M1 + 'Partybetrieb_A1M1_Zeit': {'addr': '27f2', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 12}, # Partyzeit M2 + 'Partybetrieb_A1M1': {'addr': '2303', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Partybetrieb A1M1 + 'Vorlauftemperatur_A1M1': {'addr': '2900', 'len': 2, 'unit': 'IU10', 'set': False}, # Vorlauftemperatur A1M1 + 'Vorlauftemperatur_Soll_A1M1': {'addr': '2544', 'len': 2, 'unit': 'IU10', 'set': False}, # Vorlauftemperatur Soll A1M1 + 'Raumtemperatur_Soll_Normalbetrieb_A1M1': {'addr': '2306', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 4, 'max_value': 37}, # Raumtemperatur Soll Normalbetrieb A1M1 + 'Raumtemperatur_Soll_Red_Betrieb_A1M1': {'addr': '2307', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 4, 'max_value': 37}, # Raumtemperatur Soll Reduzierter Betrieb A1M1 + 'Raumtemperatur_Soll_Party_Betrieb_A1M1': {'addr': '2308', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 4, 'max_value': 37}, # Raumtemperatur Soll Party Betrieb A1M1 + 'Neigung_Heizkennlinie_A1M1': {'addr': '2305', 'len': 1, 'unit': 'IU10', 'set': True, 'min_value': 0.2, 'max_value': 3.5}, # Neigung Heizkennlinie A1M1 + 'Niveau_Heizkennlinie_A1M1': {'addr': '2304', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': -13, 'max_value': 40}, # Niveau Heizkennlinie A1M1 + 'MischerM1': {'addr': '254c', 'len': 1, 'unit': 'IUPR', 'set': False}, # Ermittle Mischerposition M1 + 'Heizkreispumpenlogik_A1M1': {'addr': '27a5', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 15}, # 0=ohne HPL-Funktion, 1=AT > RTsoll + 5 K, 2=AT > RTsoll + 4 K, 3=AT > RTsoll + 3 K, 4=AT > RTsoll + 2 K, 5=AT > RTsoll + 1 K, 6=AT > RTsoll, 7=AT > RTsoll - 1 K, 8=AT > RTsoll - 2 K, 9=AT > RTsoll - 3 K, 10=AT > RTsoll - 4 K, 11=AT > RTsoll - 5 K, 12=AT > RTsoll - 6 K, 13=AT > RTsoll - 7 K, 14=AT > RTsoll - 8 K, 15=AT > RTsoll - 9 K + 'Sparschaltung_A1M1': {'addr': '27a6', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 5, 'max_value': 36}, # AbsolutSommersparschaltung + # Heizkreis M2 + 'Betriebsart_M2': {'addr': '3301', 'len': 1, 'unit': 'BA', 'set': True}, # Betriebsart M2 + 'Aktuelle_Betriebsart_M2': {'addr': '3500', 'len': 1, 'unit': 'BA', 'set': False}, # Aktuelle Betriebsart M2 + 'Sparbetrieb_M2': {'addr': '3302', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Sparbetrieb + 'Partybetrieb_M2': {'addr': '3303', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Partybetrieb A1M1 + 'Partybetrieb_M2_Zeit': {'addr': '37f2', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 12}, # Partyzeit M2 + 'Raumtemperatur_Soll_Normalbetrieb_M2': {'addr': '3306', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 4, 'max_value': 37}, # Raumtemperatur Soll Normalbetrieb + 'Raumtemperatur_Soll_Red_Betrieb_M2': {'addr': '3307', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 4, 'max_value': 37}, # Raumtemperatur Soll Reduzierter Betrieb + 'Raumtemperatur_Soll_Party_Betrieb_M2': {'addr': '3308', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 4, 'max_value': 37}, # Raumtemperatur Soll Party Betrieb + 'Neigung_Heizkennlinie_M2': {'addr': '3305', 'len': 1, 'unit': 'IU10', 'set': True, 'min_value': 0.2, 'max_value': 3.5}, # Neigung Heizkennlinie M2 + 'Niveau_Heizkennlinie_M2': {'addr': '3304', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': -13, 'max_value': 40}, # Niveau Heizkennlinie M2 + 'MischerM2': {'addr': '354c', 'len': 1, 'unit': 'IUPR', 'set': False}, # Ermittle Mischerposition M2 + 'MischerM2Auf': {'addr': '084d', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # MischerM2 Auf 0=AUS;1=EIN + 'MischerM2Zu': {'addr': '084c', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # MischerM2 Zu 0=AUS;1=EIN + 'Vorlauftemperatur_Soll_M2': {'addr': '37c6', 'len': 2, 'unit': 'IU10', 'set': True, 'min_value': 10, 'max_value': 80}, # Vorlauftemperatur Soll + 'Vorlauftemperatur_M2': {'addr': '080c', 'len': 2, 'unit': 'IU10', 'set': False}, # Vorlauftemperatur Ist + 'Vorlauftemperatur_min_M2': {'addr': '37c5', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 1, 'max_value': 127}, # Minimalbegrenzung der Vorlauftemperatur + 'Vorlauftemperatur_max_M2': {'addr': '37c6', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 1, 'max_value': 127}, # Maximalbegrenzung der Vorlauftemperatur + 'Heizkreispumpenlogik_M2': {'addr': '37a5', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 15}, # 0=ohne HPL-Funktion, 1=AT > RTsoll + 5 K, 2=AT > RTsoll + 4 K, 3=AT > RTsoll + 3 K, 4=AT > RTsoll + 2 K, 5=AT > RTsoll + 1 K, 6=AT > RTsoll, 7=AT > RTsoll - 1 K, 8=AT > RTsoll - 2 K, 9=AT > RTsoll - 3 K, 10=AT > RTsoll - 4 K, 11=AT > RTsoll - 5 K, 12=AT > RTsoll - 6 K, 13=AT > RTsoll - 7 K, 14=AT > RTsoll - 8 K, 15=AT > RTsoll - 9 K + 'Sparschaltung_M2': {'addr': '37a6', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 5, 'max_value': 36}, # AbsolutSommersparschaltung + 'StatusKlemme2': {'addr': '3904', 'len': 1, 'unit': 'IUINT', 'set': False}, # 0=OK, 1=Kurzschluss, 2=nicht vorhanden, 3-5=Referenzfehler, 6=nicht vorhanden + 'StatusKlemme17': {'addr': '3905', 'len': 1, 'unit': 'IUINT', 'set': False}, # 0=OK, 1=Kurzschluss, 2=nicht vorhanden, 3-5=Referenzfehler, 6=nicht vorhanden + # Warmwasser + 'Warmwasser_Status': {'addr': '650A', 'len': 1, 'unit': 'IUNON', 'set': False}, # 0=Ladung inaktiv, 1=in Ladung, 2=im Nachlauf + 'Warmwasser_KesselOffset': {'addr': '6760', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 10, 'max_value': 50}, # Warmwasser Kessel Offset in K + 'Warmwasser_BeiPartyDNormal': {'addr': '6764', 'len': 1, 'unit': 'IUNON', 'set': True, 'min_value': 0, 'max_value': 2}, # WW Heizen bei Party 0=AUS, 1=nach Schaltuhr, 2=EIN + 'Warmwasser_Temperatur': {'addr': '0804', 'len': 2, 'unit': 'IU10', 'set': False}, # Warmwassertemperatur in Grad C + 'Warmwasser_Solltemperatur': {'addr': '6300', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 10, 'max_value': 80}, # Warmwasser-Solltemperatur + 'Warmwasser_SolltemperaturAktuell': {'addr': '6500', 'len': 1, 'unit': 'IU10' , 'set': False}, # Warmwasser-Solltemperatur aktuell + 'Warmwasser_SollwertMax': {'addr': '675a', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # 0=inaktiv, 1=aktiv + # Ferienprogramm HK_A1M1 + 'Ferienprogramm_A1M1': {'addr': '2535', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Ferienprogramm A1M1 0=inaktiv 1=aktiv + 'Ferien_Abreisetag_A1M1': {'addr': '2309', 'len': 8, 'unit': 'DA', 'set': True}, # Ferien Abreisetag A1M1 + 'Ferien_Rückreisetag_A1M1': {'addr': '2311', 'len': 8, 'unit': 'DA', 'set': True}, # Ferien Rückreisetag A1M1 + # Ferienprogramm HK_M2 + 'Ferienprogramm_M2': {'addr': '3535', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Ferienprogramm M2 0=inaktiv 1=aktiv + 'Ferien_Abreisetag_M2': {'addr': '3309', 'len': 8, 'unit': 'DA', 'set': True}, # Ferien Abreisetag M2 + 'Ferien_Rückreisetag_M2': {'addr': '3311', 'len': 8, 'unit': 'DA', 'set': True}, # Ferien Rückreisetag M2 + # Schaltzeiten Warmwasser + 'Timer_Warmwasser_Mo': {'addr': '2100', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Montag + 'Timer_Warmwasser_Di': {'addr': '2108', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Dienstag + 'Timer_Warmwasser_Mi': {'addr': '2110', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Mittwoch + 'Timer_Warmwasser_Do': {'addr': '2118', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Donnerstag + 'Timer_Warmwasser_Fr': {'addr': '2120', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Freitag + 'Timer_Warmwasser_Sa': {'addr': '2128', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Samstag + 'Timer_Warmwasser_So': {'addr': '2130', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Sonntag + # Schaltzeiten HK_A1M1 + 'Timer_A1M1_Mo': {'addr': '2000', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Montag + 'Timer_A1M1_Di': {'addr': '2008', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Dienstag + 'Timer_A1M1_Mi': {'addr': '2010', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Mittwoch + 'Timer_A1M1_Do': {'addr': '2018', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Donnerstag + 'Timer_A1M1_Fr': {'addr': '2020', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Freitag + 'Timer_A1M1_Sa': {'addr': '2028', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Samstag + 'Timer_A1M1_So': {'addr': '2030', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Sonntag + # Schaltzeiten HK_M2 + 'Timer_M2_Mo': {'addr': '3000', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Montag + 'Timer_M2_Di': {'addr': '3008', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Dienstag + 'Timer_M2_Mi': {'addr': '3010', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Mittwoch + 'Timer_M2_Do': {'addr': '3018', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Donnerstag + 'Timer_M2_Fr': {'addr': '3020', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Freitag + 'Timer_M2_Sa': {'addr': '3028', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Samstag + 'Timer_M2_So': {'addr': '3030', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Sonntag + }, + 'V200WO1C': { + # generelle Infos + 'Anlagentyp': {'addr': '00f8', 'len': 2, 'unit': 'DT', 'set': False}, # getAnlTyp -- Information - Allgemein: Anlagentyp (204D) + 'Aussentemperatur': {'addr': '0101', 'len': 2, 'unit': 'IS10', 'set': False}, # getTempA -- Information - Allgemein: Aussentemperatur (-40..70) + # Anlagenstatus + 'Betriebsart': {'addr': 'b000', 'len': 1, 'unit': 'BA', 'set': True}, # getBetriebsart -- Bedienung HK1 - Heizkreis 1: Betriebsart (Textstring) + 'Manuell': {'addr': 'b020', 'len': 1, 'unit': 'IUNON', 'set': True, 'min_value': 0, 'max_value': 2}, # getManuell / setManuell -- 0 = normal, 1 = manueller Heizbetrieb, 2 = 1x Warmwasser auf Temp2 + 'Sekundaerpumpe': {'addr': '0484', 'len': 1, 'unit': 'RT', 'set': False}, # getStatusSekP -- Diagnose - Anlagenuebersicht: Sekundaerpumpe 1 (0..1) + 'Heizkreispumpe': {'addr': '048d', 'len': 1, 'unit': 'RT', 'set': False}, # getStatusPumpe -- Information - Heizkreis HK1: Heizkreispumpe (0..1) + 'Zirkulationspumpe': {'addr': '0490', 'len': 1, 'unit': 'RT', 'set': False}, # getStatusPumpeZirk -- Information - Warmwasser: Zirkulationspumpe (0..1) + 'VentilHeizenWW': {'addr': '0494', 'len': 1, 'unit': 'RT', 'set': False}, # getStatusVentilWW -- Diagnose - Waermepumpe: 3-W-Ventil Heizen WW1 (0 (Heizen)..1 (WW)) + 'Vorlaufsolltemp': {'addr': '1800', 'len': 2, 'unit': 'IS10', 'set': False}, # getTempVLSoll -- Diagnose - Heizkreis HK1: Vorlaufsolltemperatur HK1 (0..95) + 'Outdoor_Fanspeed': {'addr': '1a52', 'len': 1, 'unit': 'IUNON', 'set': False}, # getSpdFanOut -- Outdoor Fanspeed + 'Status_Fanspeed': {'addr': '1a53', 'len': 1, 'unit': 'IUNON', 'set': False}, # getSpdFan -- Geschwindigkeit Luefter + 'Kompressor_Freq': {'addr': '1a54', 'len': 1, 'unit': 'IUNON', 'set': False}, # getSpdKomp -- Compressor Frequency + # Temperaturen + 'SolltempWarmwasser': {'addr': '6000', 'len': 2, 'unit': 'IS10', 'set': True, 'min_value': 10, 'max_value': 60}, # getTempWWSoll -- Bedienung WW - Betriebsdaten WW: Warmwassersolltemperatur (10..60 (95)) + 'VorlauftempSek': {'addr': '0105', 'len': 2, 'unit': 'IS10', 'set': False}, # getTempSekVL -- Information - Heizkreis HK1: Vorlauftemperatur Sekundaer 1 (0..95) + 'RuecklauftempSek': {'addr': '0106', 'len': 2, 'unit': 'IS10', 'set': False}, # getTempSekRL -- Diagnose - Anlagenuebersicht: Ruecklauftemperatur Sekundaer 1 (0..95) + 'Warmwassertemperatur': {'addr': '010d', 'len': 2, 'unit': 'IS10', 'set': False}, # getTempWWIstOben -- Information - Warmwasser: Warmwassertemperatur oben (0..95) + # Stellwerte + 'Raumsolltemp': {'addr': '2000', 'len': 2, 'unit': 'IS10', 'set': False}, # getTempRaumSollNormal -- Bedienung HK1 - Heizkreis 1: Raumsolltemperatur normal (10..30) + 'RaumsolltempReduziert': {'addr': '2001', 'len': 2, 'unit': 'IS10', 'set': False}, # getTempRaumSollRed -- Bedienung HK1 - Heizkreis 1: Raumsolltemperatur reduzierter Betrieb (10..30) + 'HeizkennlinieNiveau': {'addr': '2006', 'len': 2, 'unit': 'IS10', 'set': False}, # getHKLNiveau -- Bedienung HK1 - Heizkreis 1: Niveau der Heizkennlinie (-15..40) + 'HeizkennlinieNeigung': {'addr': '2007', 'len': 2, 'unit': 'IS10', 'set': False}, # getHKLNeigung -- Bedienung HK1 - Heizkreis 1: Neigung der Heizkennlinie (0..35) + 'RaumsolltempParty': {'addr': '2022', 'len': 2, 'unit': 'IS10', 'set': False}, # getTempRaumSollParty -- Bedienung HK1 - Heizkreis 1: Party Solltemperatur (10..30) + # Statistiken / Laufzeiten + 'EinschaltungenSekundaer': {'addr': '0504', 'len': 4, 'unit': 'IUNON', 'set': False}, # getAnzQuelleSek -- Statistik - Schaltzyklen Anlage: Einschaltungen Sekundaerquelle (?) + 'EinschaltungenHeizstab1': {'addr': '0508', 'len': 4, 'unit': 'IUNON', 'set': False}, # getAnzHeizstabSt1 -- Statistik - Schaltzyklen Anlage: Einschaltungen Heizstab Stufe 1 (?) + 'EinschaltungenHeizstab2': {'addr': '0509', 'len': 4, 'unit': 'IUNON', 'set': False}, # getAnzHeizstabSt2 -- Statistik - Schaltzyklen Anlage: Einschaltungen Heizstab Stufe 2 (?) + 'EinschaltungenHK': {'addr': '050d', 'len': 4, 'unit': 'IUNON', 'set': False}, # getAnzHK -- Statistik - Schaltzyklen Anlage: Einschaltungen Heizkreis (?) + 'LZSekundaerpumpe': {'addr': '0584', 'len': 4, 'unit': 'IU3600', 'set': False}, # getLZPumpeSek -- Statistik - Betriebsstunden Anlage: Betriebsstunden Sekundaerpumpe (?) + 'LZHeizstab1': {'addr': '0588', 'len': 4, 'unit': 'IU3600', 'set': False}, # getLZHeizstabSt1 -- Statistik - Betriebsstunden Anlage: Betriebsstunden Heizstab Stufe 1 (?) + 'LZHeizstab2': {'addr': '0589', 'len': 4, 'unit': 'IU3600', 'set': False}, # getLZHeizstabSt2 -- Statistik - Betriebsstunden Anlage: Betriebsstunden Heizstab Stufe 2 (?) + 'LZPumpeHK': {'addr': '058d', 'len': 4, 'unit': 'IU3600', 'set': False}, # getLZPumpe -- Statistik - Betriebsstunden Anlage: Betriebsstunden Pumpe HK1 (0..1150000) + 'LZWWVentil': {'addr': '0594', 'len': 4, 'unit': 'IU3600', 'set': False}, # getLZVentilWW -- Statistik - Betriebsstunden Anlage: Betriebsstunden Warmwasserventil (?) + 'LZVerdichterStufe1': {'addr': '1620', 'len': 4, 'unit': 'IUNON', 'set': False}, # getLZVerdSt1 -- Statistik - Betriebsstunden Anlage: Betriebsstunden Verdichter auf Stufe 1 (?) + 'LZVerdichterStufe2': {'addr': '1622', 'len': 4, 'unit': 'IUNON', 'set': False}, # getLZVerdSt2 -- Statistik - Betriebsstunden Anlage: Betriebsstunden Verdichter auf Stufe 2 (?) + 'LZVerdichterStufe3': {'addr': '1624', 'len': 4, 'unit': 'IUNON', 'set': False}, # getLZVerdSt3 -- Statistik - Betriebsstunden Anlage: Betriebsstunden Verdichter auf Stufe 3 (?) + 'LZVerdichterStufe4': {'addr': '1626', 'len': 4, 'unit': 'IUNON', 'set': False}, # getLZVerdSt4 -- Statistik - Betriebsstunden Anlage: Betriebsstunden Verdichter auf Stufe 4 (?) + 'LZVerdichterStufe5': {'addr': '1628', 'len': 4, 'unit': 'IUNON', 'set': False}, # getLZVerdSt5 -- Statistik - Betriebsstunden Anlage: Betriebsstunden Verdichter auf Stufe 5 (?) + 'VorlauftempSekMittel': {'addr': '16b2', 'len': 2, 'unit': 'IS10', 'set': False}, # getTempSekVLMittel -- Statistik - Energiebilanz: mittlere sek. Vorlauftemperatur (0..95) + 'RuecklauftempSekMittel': {'addr': '16b3', 'len': 2, 'unit': 'IS10', 'set': False}, # getTempSekRLMittel -- Statistik - Energiebilanz: mittlere sek.Temperatur RL1 (0..95) + 'OAT_Temperature': {'addr': '1a5c', 'len': 1, 'unit': 'IUNON', 'set': False}, # getTempOAT -- OAT Temperature + 'ICT_Temperature': {'addr': '1a5d', 'len': 1, 'unit': 'IUNON', 'set': False}, # getTempICT -- OCT Temperature + 'CCT_Temperature': {'addr': '1a5e', 'len': 1, 'unit': 'IUNON', 'set': False}, # getTempCCT -- CCT Temperature + 'HST_Temperature': {'addr': '1a5f', 'len': 1, 'unit': 'IUNON', 'set': False}, # getTempHST -- HST Temperature + 'OMT_Temperature': {'addr': '1a60', 'len': 1, 'unit': 'IUNON', 'set': False}, # getTempOMT -- OMT Temperature + 'LZVerdichterWP': {'addr': '5005', 'len': 4, 'unit': 'IU3600', 'set': False}, # getLZWP -- Statistik - Betriebsstunden Anlage: Betriebsstunden Waermepumpe (0..1150000) + 'SollLeistungVerdichter': {'addr': '5030', 'len': 1, 'unit': 'IUNON', 'set': False}, # getPwrSollVerdichter -- Diagnose - Anlagenuebersicht: Soll-Leistung Verdichter 1 (0..100) + 'WaermeWW12M': {'addr': '1660', 'len': 4, 'unit': 'IU10', 'set': False}, # Wärmeenergie für WW-Bereitung der letzten 12 Monate (kWh) + 'ElektroWW12M': {'addr': '1670', 'len': 4, 'unit': 'IU10', 'set': False}, # elektr. Energie für WW-Bereitung der letzten 12 Monate (kWh) + }, +} + +unitset = { + 'P300': { + 'BA': {'unit_de': 'Betriebsart', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: BA + 'CT': {'unit_de': 'CycleTime', 'type': 'timer', 'signed': False, 'read_value_transform': 'non'}, # vito unit: CT + 'DT': {'unit_de': 'DeviceType', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: DT + 'ES': {'unit_de': 'ErrorState', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: ES + 'IU2': {'unit_de': 'INT unsigned 2', 'type': 'integer', 'signed': False, 'read_value_transform': '2'}, # vito unit: UT1U, PR1 + 'IU10': {'unit_de': 'INT unsigned 10', 'type': 'integer', 'signed': False, 'read_value_transform': '10'}, # vito unit: + 'IU100': {'unit_de': 'INT unsigned 100', 'type': 'integer', 'signed': False, 'read_value_transform': '100'}, # vito unit: + 'IU3600': {'unit_de': 'INT unsigned 3600', 'type': 'integer', 'signed': False, 'read_value_transform': '3600'}, # vito unit: CS + 'IUBOOL': {'unit_de': 'INT unsigned bool', 'type': 'integer', 'signed': False, 'read_value_transform': 'bool'}, # vito unit: + 'IUINT': {'unit_de': 'INT unsigned int', 'type': 'integer', 'signed': False, 'read_value_transform': '1'}, # vito unit: + 'IUNON': {'unit_de': 'INT unsigned non', 'type': 'integer', 'signed': False, 'read_value_transform': 'non'}, # vito unit: UTI, CO + 'IS2': {'unit_de': 'INT signed 2', 'type': 'integer', 'signed': True, 'read_value_transform': '2'}, # vito unit: UT1, PR + 'IS10': {'unit_de': 'INT signed 10', 'type': 'integer', 'signed': True, 'read_value_transform': '10'}, # vito unit: UT, UN + 'IS100': {'unit_de': 'INT signed 100', 'type': 'integer', 'signed': True, 'read_value_transform': '100'}, # vito unit: + 'IS1000': {'unit_de': 'INT signed 1000', 'type': 'integer', 'signed': True, 'read_value_transform': '1000'}, # vito unit: + 'ISNON': {'unit_de': 'INT signed non', 'type': 'integer', 'signed': True, 'read_value_transform': 'non'}, # vito unit: + 'RT': {'unit_de': 'ReturnStatus', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: ST, RT + 'SC': {'unit_de': 'SystemScheme', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: + 'SN': {'unit_de': 'Sachnummer', 'type': 'serial', 'signed': False, 'read_value_transform': 'non'}, # vito unit: + 'SR': {'unit_de': 'SetReturnStatus', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: + 'TI': {'unit_de': 'SystemTime', 'type': 'datetime', 'signed': False, 'read_value_transform': 'non'}, # vito unit: TI + 'DA': {'unit_de': 'Date', 'type': 'date', 'signed': False, 'read_value_transform': 'non'}, # vito unit: + 'HEX': {'unit_de': 'HexString', 'type': 'string', 'signed': False, 'read_value_transform': 'hex'}, # vito unit: + }, + 'KW': { + 'BA': {'unit_de': 'Betriebsart', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: BA + 'CT': {'unit_de': 'CycleTime', 'type': 'timer', 'signed': False, 'read_value_transform': 'non'}, # vito unit: CT + 'DT': {'unit_de': 'DeviceType', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: DT + 'ES': {'unit_de': 'ErrorState', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: ES + 'IU2': {'unit_de': 'INT unsigned 2', 'type': 'integer', 'signed': False, 'read_value_transform': '2'}, # vito unit: UT1U, PR1 + 'IU10': {'unit_de': 'INT unsigned 10', 'type': 'integer', 'signed': False, 'read_value_transform': '10'}, # vito unit: + 'IU100': {'unit_de': 'INT unsigned 100', 'type': 'integer', 'signed': False, 'read_value_transform': '100'}, # vito unit: + 'IU1000': {'unit_de': 'INT unsigned 1000', 'type': 'integer', 'signed': False, 'read_value_transform': '1000'}, # vito unit: + 'IU3600': {'unit_de': 'INT unsigned 3600', 'type': 'integer', 'signed': False, 'read_value_transform': '3600'}, # vito unit: CS + 'IUPR': {'unit_de': 'INT unsigned 2.55', 'type': 'integer', 'signed': False, 'read_value_transform': '2.55'}, # vito unit: PP + 'IUBOOL': {'unit_de': 'INT unsigned bool', 'type': 'integer', 'signed': False, 'read_value_transform': 'bool'}, # vito unit: + 'IUINT': {'unit_de': 'INT unsigned int', 'type': 'integer', 'signed': False, 'read_value_transform': '1'}, # vito unit: + 'IUNON': {'unit_de': 'INT unsigned non', 'type': 'integer', 'signed': False, 'read_value_transform': 'non'}, # vito unit: UTI, CO + 'IS2': {'unit_de': 'INT signed 2', 'type': 'integer', 'signed': True, 'read_value_transform': '2'}, # vito unit: UT1, PR + 'IS10': {'unit_de': 'INT signed 10', 'type': 'integer', 'signed': True, 'read_value_transform': '10'}, # vito unit: UT, UN + 'IS100': {'unit_de': 'INT signed 100', 'type': 'integer', 'signed': True, 'read_value_transform': '100'}, # vito unit: + 'IS1000': {'unit_de': 'INT signed 1000', 'type': 'integer', 'signed': True, 'read_value_transform': '1000'}, # vito unit: + 'ISNON': {'unit_de': 'INT signed non', 'type': 'integer', 'signed': True, 'read_value_transform': 'non'}, # vito unit: + 'RT': {'unit_de': 'ReturnStatus', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: ST, RT + 'BT': {'unit_de': 'Brennertyp', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: + 'SC': {'unit_de': 'SystemScheme', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: + 'SN': {'unit_de': 'Sachnummer', 'type': 'serial', 'signed': False, 'read_value_transform': 'non'}, # vito unit: + 'SR': {'unit_de': 'SetReturnStatus', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: + 'TI': {'unit_de': 'SystemTime', 'type': 'datetime', 'signed': False, 'read_value_transform': 'non'}, # vito unit: TI + 'DA': {'unit_de': 'Date', 'type': 'date', 'signed': False, 'read_value_transform': 'non'}, # vito unit: + 'HEX': {'unit_de': 'HexString', 'type': 'string', 'signed': False, 'read_value_transform': 'hex'}, # vito unit: + } +} + +errorset = { + 'P300': { + '00': 'Regelbetrieb (kein Fehler)', + '0F': 'Wartung (fuer Reset Codieradresse 24 auf 0 stellen)', + '10': 'Kurzschluss Aussentemperatursensor', + '18': 'Unterbrechung Aussentemperatursensor', + '19': 'Unterbrechung Kommunikation Außentemperatursensor RF', + '1D': 'Keine Kommunikation mit Sensor', + '1E': 'Strömungssensor defekt', + '1F': 'Strömungssensor defekt', + '20': 'Kurzschluss Vorlauftemperatursensor', + '21': 'Kurzschluss Ruecklauftemperatursensor', + '28': 'Unterbrechung Aussentemperatursensor / Vorlauftemperatursensor Anlage', + '29': 'Unterbrechung Ruecklauftemperatursensor', + '30': 'Kurzschluss Kesseltemperatursensor', + '38': 'Unterbrechung Kesseltemperatursensor', + '40': 'Kurzschluss Vorlauftemperatursensor M2', + '42': 'Unterbrechung Vorlauftemperatursensor M2', + '44': 'Kurzschluss Vorlauftemperatursensor Heizkreis 3', + '48': 'Unterbrechung Vorlauftemperatursensor Heizkreis 3', + '50': 'Kurzschluss Speichertemperatursensor', + '51': 'Kurzschluss Auslauftemperatursensor', + '58': 'Unterbrechung Speichertemperatursensor', + '59': 'Unterbrechung Auslauftemperatursensor', + '92': 'Solar: Kurzschluss Kollektortemperatursensor', + '93': 'Solar: Kurzschluss Sensor S3', + '94': 'Solar: Kurzschluss Speichertemperatursensor', + '9A': 'Solar: Unterbrechung Kollektortemperatursensor', + '9B': 'Solar: Unterbrechung Sensor S3', + '9C': 'Solar: Unterbrechung Speichertemperatursensor', + '9E': 'Solar: Zu geringer bzw. kein Volumenstrom oder Temperaturwächter ausgeloest', + '9F': 'Solar: Fehlermeldung Solarteil (siehe Solarregler)', + 'A4': 'Amx. Anlagendruck überschritten', + 'A7': 'Bedienteil defekt', + 'A8': 'Luft in der internen Umwaelzpumpe oder Mindest-Volumenstrom nicht erreicht', + 'B0': 'Kurzschluss Abgastemperatursensor', + 'B1': 'Kommunikationsfehler Bedieneinheit', + 'B4': 'Interner Fehler (Elektronik)', + 'B5': 'Interner Fehler (Elektronik)', + 'B6': 'Ungueltige Hardwarekennung (Elektronik)', + 'B7': 'Interner Fehler (Kesselkodierstecker)', + 'B8': 'Unterbrechung Abgastemperatursensor', + 'B9': 'Interner Fehler (Dateneingabe wiederholen)', + 'BA': 'Kommunikationsfehler Erweiterungssatz fuer Mischerkreis M2', + 'BB': 'Kommunikationsfehler Erweiterungssatz fuer Mischerkreis 3', + 'BC': 'Kommunikationsfehler Fernbedienung Vitorol, Heizkreis M1', + 'BD': 'Kommunikationsfehler Fernbedienung Vitorol, Heizkreis M2', + 'BE': 'Falsche Codierung Fernbedienung Vitorol', + 'BF': 'Falsches Kommunikationsmodul LON', + 'C1': 'Externe Sicherheitseinrichtung (Kessel kuehlt aus)', + 'C2': 'Kommunikationsfehler Solarregelung', + 'C3': 'Kommunikationsfehler Erweiterung AM1', + 'C4': 'Kommunikationsfehler Erweiterumg Open Therm', + 'C5': 'Kommunikationsfehler drehzahlgeregelte Heizkreispumpe, Heizkreis M1', + 'C6': 'Kommunikationsfehler drehzahlgeregelte Heizkreispumpe, Heizkreis M2', + 'C7': 'Falsche Codierung der Heizkreispumpe', + 'C8': 'Kommunikationsfehler drehzahlgeregelte, externe Heizkreispumpe 3', + 'C9': 'Stoermeldeeingang am Schaltmodul-V aktiv', + 'CD': 'Kommunikationsfehler Vitocom 100 (KM-BUS)', + 'CE': 'Kommunikationsfehler Schaltmodul-V', + 'CF': 'Kommunikationsfehler LON Modul', + 'D1': 'Brennerstoerung', + 'D4': 'Sicherheitstemperaturbegrenzer hat ausgeloest oder Stoermeldemodul nicht richtig gesteckt', + 'D6': 'Eingang DE1 an Erweiterung EA1 meldet eine Stoerung', + 'D7': 'Eingang DE2 an Erweiterung EA1 meldet eine Stoerung', + 'D8': 'Eingang DE3 an Erweiterung EA1 meldet eine Stoerung', + 'DA': 'Kurzschluss Raumtemperatursensor, Heizkreis M1', + 'DB': 'Kurzschluss Raumtemperatursensor, Heizkreis M2', + 'DC': 'Kurzschluss Raumtemperatursensor, Heizkreis 3', + 'DD': 'Unterbrechung Raumtemperatursensor, Heizkreis M1', + 'DE': 'Unterbrechung Raumtemperatursensor, Heizkreis M2', + 'DF': 'Unterbrechung Raumtemperatursensor, Heizkreis 3', + 'E0': 'Fehler externer LON Teilnehmer', + 'E1': 'Isolationsstrom waehrend des Kalibrierens zu hoch', + 'E3': 'Zu geringe Wärmeabnahme während des Kalibrierens, Temperaturwächter hat ausgeschaltet', + 'E4': 'Fehler Versorgungsspannung', + 'E5': 'Interner Fehler, Flammenverstärker(Ionisationselektrode)', + 'E6': 'Abgas- / Zuluftsystem verstopft, Anlagendruck zu niedrig', + 'E7': 'Ionisationsstrom waehrend des Kalibrierens zu gering', + 'E8': 'Ionisationsstrom nicht im gültigen Bereich', + 'EA': 'Ionisationsstrom waehrend des Kalibrierens nicht im gueltigen Bereich', + 'EB': 'Wiederholter Flammenverlust waehrend des Kalibrierens', + 'EC': 'Parameterfehler waehrend des Kalibrierens', + 'ED': 'Interner Fehler', + 'EE': 'Flammensignal ist bei Brennerstart nicht vorhanden oder zu gering', + 'EF': 'Flammenverlust direkt nach Flammenbildung (waehrend der Sicherheitszeit)', + 'F0': 'Interner Fehler (Regelung tauschen)', + 'F1': 'Abgastemperaturbegrenzer ausgeloest', + 'F2': 'Temperaturbegrenzer ausgeloest', + 'F3': 'Flammensigal beim Brennerstart bereits vorhanden', + 'F4': 'Flammensigal nicht vorhanden', + 'F7': 'Differenzdrucksensor defekt, Kurzschluss ider Wasserdrucksensor', + 'F8': 'Brennstoffventil schliesst zu spaet', + 'F9': 'Geblaesedrehzahl beim Brennerstart zu niedrig', + 'FA': 'Geblaesestillstand nicht erreicht', + 'FC': 'Gaskombiregler defekt oder fehlerhafte Ansteuerung Modulationsventil oder Abgasweg versperrt', + 'FD': 'Fehler Gasfeuerungsautomat, Kesselkodierstecker fehlt(in Verbindung mit B7)', + 'FE': 'Starkes Stoerfeld (EMV) in der Naehe oder Elektronik defekt', + 'FF': 'Starkes Stoerfeld (EMV) in der Naehe oder interner Fehler' + }, + 'KW': { + '00': 'Regelbetrieb (kein Fehler)', + '0F': 'Wartung (fuer Reset Codieradresse 24 auf 0 stellen)', + '10': 'Kurzschluss Aussentemperatursensor', + '18': 'Unterbrechung Aussentemperatursensor', + '19': 'Unterbrechung Kommunikation Außentemperatursensor RF', + '1D': 'Keine Kommunikation mit Sensor', + '1E': 'Strömungssensor defekt', + '1F': 'Strömungssensor defekt', + '20': 'Kurzschluss Vorlauftemperatursensor', + '21': 'Kurzschluss Ruecklauftemperatursensor', + '28': 'Unterbrechung Aussentemperatursensor / Vorlauftemperatursensor Anlage', + '29': 'Unterbrechung Ruecklauftemperatursensor', + '30': 'Kurzschluss Kesseltemperatursensor', + '38': 'Unterbrechung Kesseltemperatursensor', + '40': 'Kurzschluss Vorlauftemperatursensor M2', + '42': 'Unterbrechung Vorlauftemperatursensor M2', + '44': 'Kurzschluss Vorlauftemperatursensor Heizkreis 3', + '48': 'Unterbrechung Vorlauftemperatursensor Heizkreis 3', + '50': 'Kurzschluss Speichertemperatursensor', + '51': 'Kurzschluss Auslauftemperatursensor', + '58': 'Unterbrechung Speichertemperatursensor', + '59': 'Unterbrechung Auslauftemperatursensor', + '92': 'Solar: Kurzschluss Kollektortemperatursensor', + '93': 'Solar: Kurzschluss Sensor S3', + '94': 'Solar: Kurzschluss Speichertemperatursensor', + '9A': 'Solar: Unterbrechung Kollektortemperatursensor', + '9B': 'Solar: Unterbrechung Sensor S3', + '9C': 'Solar: Unterbrechung Speichertemperatursensor', + '9E': 'Solar: Zu geringer bzw. kein Volumenstrom oder Temperaturwächter ausgeloest', + '9F': 'Solar: Fehlermeldung Solarteil (siehe Solarregler)', + 'A4': 'Amx. Anlagendruck überschritten', + 'A7': 'Bedienteil defekt', + 'A8': 'Luft in der internen Umwaelzpumpe oder Mindest-Volumenstrom nicht erreicht', + 'B0': 'Kurzschluss Abgastemperatursensor', + 'B1': 'Kommunikationsfehler Bedieneinheit', + 'B4': 'Interner Fehler (Elektronik)', + 'B5': 'Interner Fehler (Elektronik)', + 'B6': 'Ungueltige Hardwarekennung (Elektronik)', + 'B7': 'Interner Fehler (Kesselkodierstecker)', + 'B8': 'Unterbrechung Abgastemperatursensor', + 'B9': 'Interner Fehler (Dateneingabe wiederholen)', + 'BA': 'Kommunikationsfehler Erweiterungssatz fuer Mischerkreis M2', + 'BB': 'Kommunikationsfehler Erweiterungssatz fuer Mischerkreis 3', + 'BC': 'Kommunikationsfehler Fernbedienung Vitorol, Heizkreis M1', + 'BD': 'Kommunikationsfehler Fernbedienung Vitorol, Heizkreis M2', + 'BE': 'Falsche Codierung Fernbedienung Vitorol', + 'BF': 'Falsches Kommunikationsmodul LON', + 'C1': 'Externe Sicherheitseinrichtung (Kessel kuehlt aus)', + 'C2': 'Kommunikationsfehler Solarregelung', + 'C3': 'Kommunikationsfehler Erweiterung AM1', + 'C4': 'Kommunikationsfehler Erweiterumg Open Therm', + 'C5': 'Kommunikationsfehler drehzahlgeregelte Heizkreispumpe, Heizkreis M1', + 'C6': 'Kommunikationsfehler drehzahlgeregelte Heizkreispumpe, Heizkreis M2', + 'C7': 'Falsche Codierung der Heizkreispumpe', + 'C8': 'Kommunikationsfehler drehzahlgeregelte, externe Heizkreispumpe 3', + 'C9': 'Stoermeldeeingang am Schaltmodul-V aktiv', + 'CD': 'Kommunikationsfehler Vitocom 100 (KM-BUS)', + 'CE': 'Kommunikationsfehler Schaltmodul-V', + 'CF': 'Kommunikationsfehler LON Modul', + 'D1': 'Brennerstoerung', + 'D4': 'Sicherheitstemperaturbegrenzer hat ausgeloest oder Stoermeldemodul nicht richtig gesteckt', + 'D6': 'Eingang DE1 an Erweiterung EA1 meldet eine Stoerung', + 'D7': 'Eingang DE2 an Erweiterung EA1 meldet eine Stoerung', + 'D8': 'Eingang DE3 an Erweiterung EA1 meldet eine Stoerung', + 'DA': 'Kurzschluss Raumtemperatursensor, Heizkreis M1', + 'DB': 'Kurzschluss Raumtemperatursensor, Heizkreis M2', + 'DC': 'Kurzschluss Raumtemperatursensor, Heizkreis 3', + 'DD': 'Unterbrechung Raumtemperatursensor, Heizkreis M1', + 'DE': 'Unterbrechung Raumtemperatursensor, Heizkreis M2', + 'DF': 'Unterbrechung Raumtemperatursensor, Heizkreis 3', + 'E0': 'Fehler externer LON Teilnehmer', + 'E1': 'Isolationsstrom waehrend des Kalibrierens zu hoch', + 'E3': 'Zu geringe Wärmeabnahme während des Kalibrierens, Temperaturwächter hat ausgeschaltet', + 'E4': 'Fehler Versorgungsspannung', + 'E5': 'Interner Fehler, Flammenverstärker(Ionisationselektrode)', + 'E6': 'Abgas- / Zuluftsystem verstopft, Anlagendruck zu niedrig', + 'E7': 'Ionisationsstrom waehrend des Kalibrierens zu gering', + 'E8': 'Ionisationsstrom nicht im gültigen Bereich', + 'EA': 'Ionisationsstrom waehrend des Kalibrierens nicht im gueltigen Bereich', + 'EB': 'Wiederholter Flammenverlust waehrend des Kalibrierens', + 'EC': 'Parameterfehler waehrend des Kalibrierens', + 'ED': 'Interner Fehler', + 'EE': 'Flammensignal ist bei Brennerstart nicht vorhanden oder zu gering', + 'EF': 'Flammenverlust direkt nach Flammenbildung (waehrend der Sicherheitszeit)', + 'F0': 'Interner Fehler (Regelung tauschen)', + 'F1': 'Abgastemperaturbegrenzer ausgeloest', + 'F2': 'Temperaturbegrenzer ausgeloest', + 'F3': 'Flammensigal beim Brennerstart bereits vorhanden', + 'F4': 'Flammensigal nicht vorhanden', + 'F7': 'Differenzdrucksensor defekt, Kurzschluss ider Wasserdrucksensor', + 'F8': 'Brennstoffventil schliesst zu spaet', + 'F9': 'Geblaesedrehzahl beim Brennerstart zu niedrig', + 'FA': 'Geblaesestillstand nicht erreicht', + 'FC': 'Gaskombiregler defekt oder fehlerhafte Ansteuerung Modulationsventil oder Abgasweg versperrt', + 'FD': 'Fehler Gasfeuerungsautomat, Kesselkodierstecker fehlt(in Verbindung mit B7)', + 'FE': 'Starkes Stoerfeld (EMV) in der Naehe oder Elektronik defekt', + 'FF': 'Starkes Stoerfeld (EMV) in der Naehe oder interner Fehler' + }, +} + +operatingmodes = { + 'V200KW2': { + '00': 'Warmwasser (Schaltzeiten)', + '01': 'reduziert Heizen (dauernd)', + '02': 'normal Heizen (dauernd)', + '04': 'Heizen und Warmwasser (FS)', + '03': 'Heizen und Warmwasser (Schaltzeiten)', + '05': 'Standby', + }, + 'V200KO1B': { + '00': 'Warmwasser (Schaltzeiten)', + '01': 'reduziert Heizen (dauernd)', + '02': 'normal Heizen (dauernd)', + '04': 'Heizen und Warmwasser (FS)', + '03': 'Heizen und Warmwasser (Schaltzeiten)', + '05': 'Standby', + }, + 'aktuelle_Betriebsart': { + '00': 'Abschaltbetrieb', + '01': 'Reduzierter Betrieb', + '02': 'Normalbetrieb', + '03': 'Dauernd Normalbetrieb', + }, + 'V200WO1C': { + '00': 'Abschaltbetrieb', + '01': 'Warmwasser', + '02': 'Heizen und Warmwasser', + '03': 'undefiniert', + '04': 'dauernd reduziert', + '05': 'dauernd normal', + '06': 'normal Abschalt', + '07': 'nur kühlen', + }, + 'V200HO1C': { + '00': 'Abschaltbetrieb', + '01': 'Warmwasser', + '02': 'Heizen und Warmwasser', + '03': 'Normal reduziert', + '04': 'Normal dauernd' + } +} + +systemschemes = { + 'V200KW2': { + '00': '-', + '01': 'A1', + '02': 'A1 + WW', + '03': 'M2', + '04': 'M2 + WW', + '05': 'A1 + M2', + '06': 'A1 + M2 + WW', + '07': 'M2 + M3', + '08': 'M2 + M3 + WW', + '09': 'M2 + M3 + WW', + '10': 'A1 + M2 + M3 + WW' + }, + 'V200KO1B': { + '01': 'A1', + '02': 'A1 + WW', + '04': 'M2', + '03': 'M2 + WW', + '05': 'A1 + M2', + '06': 'A1 + M2 + WW' + }, + 'V200WO1C': { + '01': 'WW', + '02': 'HK + WW', + '04': 'HK + WW', + '05': 'HK + WW' + }, + 'V200HO1C': { + '01': 'WW', + '02': 'HK + WW', + '04': 'HK + WW', + '05': 'HK + WW' + } +} + +devicetypes = { + '2098': 'V200KW2', # Protokoll: KW + '2053': 'GWG_VBEM', # Protokoll: GWG + '20CB': 'VScotHO1', # Protokoll: P300 + '2094': 'V200KW1', # Protokoll: KW + '209F': 'V200KO1B', # Protokoll: P300 + '204D': 'V200WO1C', # Protokoll: P300 + '20B8': 'V333MW1', + '20A0': 'V100GC1', + '20C2': 'VDensHO1', + '20A4': 'V200GW1', + '20C8': 'VPlusHO1', + '2046': 'V200WO1', + '2047': 'V200WO1', + '2049': 'V200WO1', + '2032': 'VBC550', + '2033': 'VBC550' +} + +returnstatus = { + 'P300': { + '00': '0', + '01': '1', + '03': '2', + 'AA': 'NOT OK', + # At least for device 20CB the heating circuit pump returns status 03 when it's on and the heating runs in in night mode + }, + 'KW': { + '00': '0', + '01': '1', + '03': '2', + 'AA': 'NOT OK', + }, +} + +setreturnstatus = { + 'P300': { + '00': 'OK', + '05': 'SYNC (NOT OK)', + }, + 'KW': { + '00': 'OK', + '05': 'SYNC (NOT OK)', + }, +} + + +# P300 Protokoll +# +# Beispiel +# +# Senden 41 5 0 1 55 25 2 82 +# Read Request -- - - - ----- - -- +# | | | | | | +------- Prüfsumme (Summe über alley Bytes ohne die 41; [hex]5+0+1+55+25+2 = [dez]5+0+1+(5x16)+5+(2x16)+5+2 = 130dez = 82hex +# | | | | | +---------- XX Anzahl der Bytes, die in der Antwort erwartet werden +# | | | | +-------------- XX XX 2 byte Adresse der Daten oder Prozedur +# | | | +------------------ XX 01 = ReadData, 02 = WriteData, 07 = Function Call +# | | +-------------------- XX 00 = Anfrage, 01 = Antwort, 03 = Fehler +# | +---------------------- Länge der Nutzdaten (Anzahl der Bytes zwischen dem Telegramm-Start-Byte (0x41) und der Prüfsumme) +# +------------------------ Telegramm-Start-Byte +# +# Empfangen : 6 ----------------------- OK (Antwort auf 0x16 0x00 0x00 und auf korrekt empfangene Telegramme) +# 5 ----------------------- Schnittstelle ist aktiv und wartet auf eine Initialisierung +# 15 ----------------------- Schnittstelle meldet einen Fehler zurück +# +# 41 7 1 1 55 25 2 EF 0 74 +# -- - - - ----- - ---- -- +# | | | | | | | +-- Prüfsumme (Summe über alley Bytes ohne die 41; [hex]7+1+1+55+25+2+EF+0 = [dez]7+1+1+(5x16)+5+(2x16)+5+2+(14*16)+(15*16)+0 = [dez]7+1+1+(80)+5+(32)+5+2+(224)+(15)+0 = 372dez = 1.74hex) +# | | | | | | +------ Wert +# | | | | | +---------- XX Anzahl der Bytes, die in der Antwort erwartet werden +# | | | | +-------------- XX XX 2 byte Adresse der Daten oder Prozedur +# | | | +------------------ XX 01 = ReadData, 02 = WriteData, 07 = Function Call +# | | +-------------------- XX 00 = Anfrage, 01 = Antwort, 03 = Fehler +# | +---------------------- Länge der Nutzdaten (Anzahl der Bytes zwischen dem Telegramm-Start-Byte (0x41) und der Prüfsumme) +# +------------------------ Telegramm-Start-Byte +# Kommunikationsbeispiele +# Information Kessel Außentemperatur read 2-Byte -60..60 0x5525 +# DATA TX: 41 5 0 1 55 25 2 82 +# DATA RX: 41 7 1 1 55 25 2 EF 0 74 --> 00EF = 239 --> 23.9°C (Faktor 0.1) +# --> Senden 41 5 0 1 55 25 2 82 +# -- - - - ----- - -- +# | | | | | | +-- Prüfsumme (Summe über alley Bytes ohne die 41; [hex]5+0+1+55+25+2 = [dez]5+0+1+(5x16)+5+(2x16)+5+2 = 130dez = 82hex +# | | | | | +----- XX Anzahl der Bytes, die in der Antwort erwartet werden +# | | | | +--------- XX XX 2 byte Adresse der Daten oder Prozedur +# | | | +------------- XX 01 = ReadData, 02 = WriteData, 07 = Function Call +# | | +--------------- XX 00 = Anfrage, 01 = Antwort, 03 = Fehler +# | +----------------- Länge der Nutzdaten (Anzahl der Bytes zwischen dem Telegramm-Start-Byte (0x41) und der Prüfsumme) +# +------------------- Telegramm-Start-Byte +# +# --> Empfangen 6 41 7 1 1 55 25 2 EF 0 74 +# - -- - - - ----- - ---- -- +# | | | | | | | | +-- Prüfsumme (Summe über alley Bytes ohne die 41; [hex]7+1+1+55+25+2+EF+0 = [dez]7+1+1+(5x16)+5+(2x16)+5+2+(14*16)+(15*16)+0 = [dez]7+1+1+(80)+5+(32)+5+2+(224)+(15)+0 = 372dez = 1.74hex) +# | | | | | | | +------ Wert +# | | | | | | +---------- XX Anzahl der Bytes, die in der Antwort erwartet werden +# | | | | | +-------------- XX XX 2 byte Adresse der Daten oder Prozedur +# | | | | +------------------ XX 01 = ReadData, 02 = WriteData, 07 = Function Call +# | | | +-------------------- XX 00 = Anfrage, 01 = Antwort, 03 = Fehler +# | | +---------------------- Länge der Nutzdaten (Anzahl der Bytes zwischen dem Telegramm-Start-Byte (0x41) und der Prüfsumme) +# | +------------------------ Telegramm-Start-Byte +# +--------------------------- OK (Antwort auf 0x16 0x00 0x00 und auf korrekt empfangene Telegramme) +# +# --> Antwort: 0x00EF = 239 = 23.9° diff --git a/viessmann/locale.yaml b/viessmann/_pv_1_2_3/locale.yaml similarity index 100% rename from viessmann/locale.yaml rename to viessmann/_pv_1_2_3/locale.yaml diff --git a/viessmann/_pv_1_2_3/plugin.yaml b/viessmann/_pv_1_2_3/plugin.yaml new file mode 100755 index 000000000..a77d0a8ed --- /dev/null +++ b/viessmann/_pv_1_2_3/plugin.yaml @@ -0,0 +1,231 @@ +%YAML 1.1 +# vim: set et ts=4 sts=4 sw=4 ai ff=unix nu wrap : +--- + +# Metadata for the Smart-Plugin +plugin: + # Global plugin attributes + type: interface # plugin type (gateway, interface, protocol, system, web) + description: + de: 'Lesen und Schreiben von Werten einer Viessmann Heizung' + en: 'Read and write data of a Viessmann heating system' + maintainer: Morg + tester: sisamiwe, tcr82 + keywords: viessmann heating optolink + state: ready # change to ready when done with development + version: 1.2.3 # Plugin version + sh_minversion: 1.6.0 # minimum shNG version to use this plugin + py_minversion: 3.6 + multi_instance: false # plugin supports multi instance + restartable: true + classname: Viessmann # class containing the plugin + support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1455991-viessmann-plugin-neuentwicklung-python-hilfe/ + +parameters: + # Definition of parameters to be configured in etc/plugin.yaml + serialport: + type: str + default: '' + description: + de: 'Serieller Port, an dem der Lesekopf angeschlossen ist' + en: 'Serial port the device is connected to' + + heating_type: + type: str + default: '' + description: + de: 'Gerätetype der Heizung' + en: 'Device type of heating system' + + protocol: + type: str + default: 'P300' + valid list: + - 'P300' + - 'KW' + description: + de: 'Protokoll der Heizung' + en: 'Protocol of heating system' + + timeout: + type: num + default: 1.5 + description: + de: 'Zeitbegrenzung für das Lesen vom seriellen Port in Sekunden' + en: 'Timeout for serial read operations in seconds' + +item_attributes: + # Definition of item attributes defined by this plugin + viess_send: + type: str + description: + de: 'Änderung des Items wird mit konfiguriertem Kommando an die Heizung geschickt' + en: 'Changes to this item result in sending the configured command to the heating system' + + viess_read: + type: str + description: + de: 'Liest Wert mit konfiguriertem Kommando aus der Heizung aus' + en: 'The item value should be read by using the configured command' + + viess_read_afterwrite: + type: num + description: + de: 'Konfiguriert eine Verzögerung in Sekunden nachdem ein Lesekommando nach einem Schreibkommando an die Heizung geschickt wird' + en: 'Configures delay in seconds to issue a read command after write command' + + viess_read_cycle: + type: num + description: + de: 'Konfiguriert ein Intervall in Sekunden für das Lesekommando' + en: 'Configures a interval in seconds for the read command' + + viess_init: + type: bool + description: + de: 'Konfiguriert, ob der Wert aus der Heizung initialisiert werden soll' + en: 'Configures to initialize the item value with the value from the KWL system' + + viess_trigger: + type: list(str) + description: + de: 'Konfiguriert Lesekommandos, die nach einem Schreibvorgang auf das Item aufgerufen werden' + en: 'Configures read commands after an update to the item' + + viess_trigger_afterwrite: + type: num + description: + de: 'Konfiguriert eine Verzögerung in Sekunden, bis ein Trigger ausgeführt werden soll, nachdem ein Wert gesetzt wurde' + en: 'Configures delay in seconds to run trigger commands after item update' + + viess_update: + type: bool + description: + de: 'Liest alle konfigurierten Items neu, wenn es auf True gesetzt wird' + en: 'Triggers reading of all configured items if set to True' + + viess_timer: + type: str + description: + de: 'Liest alle Timer zur übergebenen Anwendung (z.B. Heizkreis_A1M1) und stellt diese für die Nutzung mit UZSU zur Verfügung' + en: 'Provides an UZSU-compatible dict with all timers for the given application (e.g. Heizkreis_A1M1)' + + viess_ba_list: + type: bool + description: + de: 'Gibt nach der Initialisierung eine Liste aller für die konfigurierte Heizung gültigen Betriebsarten zurück' + en: 'Returns a list of valid operating modes for the configured device type after initialization' + +item_structs: + timer: + name: Schaltzeiten in Einzelzeiten fuer An und Aus + + an1: + name: erste Anschaltzeit + type: str + visu_acl: rw + + aus1: + name: erste Ausschaltzeit + type: str + visu_acl: rw + + an2: + name: zweite Anschaltzeit + type: str + visu_acl: rw + + aus2: + name: zweite Ausschaltzeit + type: str + visu_acl: rw + + an3: + name: dritte Anschaltzeit + type: str + visu_acl: rw + + aus3: + name: dritte Ausschaltzeit + type: str + visu_acl: rw + + an4: + name: vierte Anschaltzeit + type: str + visu_acl: rw + + aus4: + name: vierte Ausschaltzeit + type: str + visu_acl: rw + + betriebsart: + name: Betriebsart in string wandeln + + betriebsart_str: + type: str + eval: "'Neustart' if value == '' else ['Standby', 'Warmwasser (Schaltzeiten)', 'Heizen und Warmwasser (Schaltzeiten)', 'reduziert Heizen (dauernd)', 'normal Heizen (dauernd)'][int(value)]" + eval_trigger: .. + +logic_parameters: NONE +# Definition of logic parameters defined by this plugin + +plugin_functions: + update_all_read_items: + type: NONE + description: + de: 'Stößt das Lesen aller konfigurierten Items an' + en: 'Triggers reading of all configured items' + read_addr: + type: foo + description: + de: 'Stößt das Lesen des angegebenen Datenpunkts an, der nicht an ein Item gebunden sein muss. Es erfolgt keine Zuweisung an ein Item. Rückgabewert ist der gelesene Wert, oder NONE bei Fehler' + en: 'Triggers reading of the supplied data point, which doesn''t have to be bound to an item. Result will not be assigned to an item. Return value is the read value, or NONE if an error occurred' + parameters: + addr: + type: str + description: + de: 'Vierstellige Hex-Adresse des Datenpunktes' + en: 'Four-digit hex address of the data point' + read_temp_addr: + type: foo + description: + de: 'Stößt das Lesen eines beliebigen Datenpunkts an, der nicht konfiguriert oder bekannt sein muss. Es erfolgt keine Zuweisung an ein Item. Rückgabewert ist der gelesene Wert, oder NONE bei Fehler' + en: 'Triggers reading of an arbitrary data point, which doesn''t have to be configured or known. Result will not be assigned to an item. Return value is the read value, or NONE if an error occurred' + parameters: + addr: + type: str + mandatory: yes + description: + de: 'Vierstellige Hex-Adresse des Datenpunktes' + en: 'Four-digit hex address of the data point' + length: + type: int + mandatory: yes + description: + de: 'Länge der Geräteantwort in Bytes (1-8)' + en: 'Lengh of device response in bytes (1-8)' + valid_min: 1 + valid_max: 8 + unit: + type: str + mandatory: yes + description: + de: 'Einheitencode für die Konvertierung der Antwort. Muss in der Protokollkonfiguration ``unitset`` in commands.py definiert sein' + en: 'Unit code for converting the response value. Needs to be defined in the protocol configuration ``unitset`` in commands.py' + write_addr: + type: foo + description: + de: 'Stößt das Schreiben des angegebenen Datenpunkts an, der nicht an ein Item gebunden sein muss. Der übergebene Wert muss zum konfigurierten Datentyp passen' + en: 'Triggers writing of the supplied data point, which doesn''t have to be bound to an item. The submitted value must match the configured data type' + parameters: + addr: + type: str + description: + de: 'Vierstellige Hex-Adresse des Datenpunktes' + en: 'Four-digit hex address of the data point' + value: + description: + de: 'Zu schreibender Wert' + en: 'Value to be written' diff --git a/viessmann/requirements.txt b/viessmann/_pv_1_2_3/requirements.txt similarity index 100% rename from viessmann/requirements.txt rename to viessmann/_pv_1_2_3/requirements.txt diff --git a/viessmann/user_doc.rst b/viessmann/_pv_1_2_3/user_doc.rst similarity index 100% rename from viessmann/user_doc.rst rename to viessmann/_pv_1_2_3/user_doc.rst diff --git a/viessmann/webif/static/datatables_min.css b/viessmann/_pv_1_2_3/webif/static/datatables_min.css similarity index 100% rename from viessmann/webif/static/datatables_min.css rename to viessmann/_pv_1_2_3/webif/static/datatables_min.css diff --git a/viessmann/webif/static/datatables_min.js b/viessmann/_pv_1_2_3/webif/static/datatables_min.js similarity index 100% rename from viessmann/webif/static/datatables_min.js rename to viessmann/_pv_1_2_3/webif/static/datatables_min.js diff --git a/viessmann/_pv_1_2_3/webif/static/img/plugin_logo.svg b/viessmann/_pv_1_2_3/webif/static/img/plugin_logo.svg new file mode 100755 index 000000000..16c50e23d --- /dev/null +++ b/viessmann/_pv_1_2_3/webif/static/img/plugin_logo.svg @@ -0,0 +1 @@ +Element 1 \ No newline at end of file diff --git a/viessmann/webif/static/img/sort_asc.png b/viessmann/_pv_1_2_3/webif/static/img/sort_asc.png similarity index 100% rename from viessmann/webif/static/img/sort_asc.png rename to viessmann/_pv_1_2_3/webif/static/img/sort_asc.png diff --git a/viessmann/webif/static/img/sort_asc_disabled.png b/viessmann/_pv_1_2_3/webif/static/img/sort_asc_disabled.png similarity index 100% rename from viessmann/webif/static/img/sort_asc_disabled.png rename to viessmann/_pv_1_2_3/webif/static/img/sort_asc_disabled.png diff --git a/viessmann/webif/static/img/sort_both.png b/viessmann/_pv_1_2_3/webif/static/img/sort_both.png similarity index 100% rename from viessmann/webif/static/img/sort_both.png rename to viessmann/_pv_1_2_3/webif/static/img/sort_both.png diff --git a/viessmann/webif/static/img/sort_desc.png b/viessmann/_pv_1_2_3/webif/static/img/sort_desc.png similarity index 100% rename from viessmann/webif/static/img/sort_desc.png rename to viessmann/_pv_1_2_3/webif/static/img/sort_desc.png diff --git a/viessmann/webif/static/img/sort_desc_disabled.png b/viessmann/_pv_1_2_3/webif/static/img/sort_desc_disabled.png similarity index 100% rename from viessmann/webif/static/img/sort_desc_disabled.png rename to viessmann/_pv_1_2_3/webif/static/img/sort_desc_disabled.png diff --git a/viessmann/_pv_1_2_3/webif/templates/index.html b/viessmann/_pv_1_2_3/webif/templates/index.html new file mode 100755 index 000000000..d522eeefb --- /dev/null +++ b/viessmann/_pv_1_2_3/webif/templates/index.html @@ -0,0 +1,264 @@ + +{% extends "base_plugin.html" %} +{% set tabcount = 2 %} +{% set tab1title = _('Viessmann Items') %} +{% set tab2title = _('Alle Datenpunkte') %} +{% set language = p.get_sh().get_defaultlanguage() %} +{% if last_read_cmd != "" %} +{% set start_tab = 3 %} +{% endif %} +{% if language not in ['en','de'] %} +{% set language = 'en' %} +{% endif %} + +{% block pluginscripts %} +/* + * The combined file was created by the DataTables downloader builder: + * https://datatables.net/download + * + * To rebuild or modify this file with the latest versions of the included + * software please visit: + * https://datatables.net/download/#dt/dt-1.10.21/fh-3.1.7/r-2.2.5 + * + * Included libraries: + * DataTables 1.10.21, FixedHeader 3.1.7, Responsive 2.2.5 + */ + + + + +{% endblock pluginscripts %} + + +{% block headtable %} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    {{ _('Serieller Port') }}{{ p._serialport }}{{ _('Anzahl Items') }}{{ p._params|length }}
    {{ _('Heizungstyp') }}{{ p._heating_type }}{{ _('Verbunden') }}{{ p._connected }}
    {{ _('Protokoll') }}{{ p._protocol }}{{ _('Verbindung aktiv') }}{{ p._initialized }}
    {{ _('Letzter manuell gelesener Wert') }}{{ last_read_cmd + ": " if last_read_cmd else '---' }} {{ last_read_value }}
    +{% endblock headtable %} + +{% block buttons %} + +{% endblock %} + +{% block bodytab1 %} +
    +
    + {% if p._params|length %} + + + + + + + + + + + + + {% for commandcode in p._params %} + + + + + + + + + {% endfor %} + +
    {{ _('Item') }}{{ _('Datenpunkt') }}{{ _('Befehlsname') }}{{ _('Typ') }}{{ _('Wert') }}{{ _('Letzte Aktualisierung') }}
    {{ p._params[commandcode]['item'].path() }}{{ commandcode }}{{ p._params[commandcode]['commandname'] }}{{ p._params[commandcode]['item'].type() }}{{ p._params[commandcode]['item']() }}{{ p._params[commandcode]['item'].last_update() }}
    + {% endif %} +
    +
    +{% endblock bodytab1 %} + +{% block bodytab2 %} +
    +
    + {% if cmds|length %} +
    + + + + + + + + + + + + + + + + + + + + + + + + + {% for cmd in cmds.keys() %} + + + + + + + + + + {% endfor %} + +
    {{ _('Befehlsname') }}{{ _('Datenpunkt') }}{{ _('Länge') }}{{ _('Einheit') }}{{ _('Lesen/Schreiben') }}{{ _('Datenpunkt lesen') }}{{ _('gelesener Wert') }}
    {{ _('_Custom') }} + + False 
    {{ cmd }}{{ cmds[cmd]['addr'] }}{{ cmds[cmd]['len'] }}{{ cmds[cmd]['unit'] }}{{ cmds[cmd]['set'] }} 
    +
    + {% endif %} +
    +
    +{% endblock bodytab2 %} diff --git a/viessmann/commands.py b/viessmann/commands.py old mode 100755 new mode 100644 index 34d97e67e..e71a33e89 --- a/viessmann/commands.py +++ b/viessmann/commands.py @@ -1,900 +1,782 @@ -# !/usr/bin/env python +#!/usr/bin/env python3 # vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -# Copyright 2020 Michael Wenzel -# Copyright 2020 Sebastian Helms -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -# Viessmann-Plugin for SmartHomeNG. https://github.com/smarthomeNG// -# -# This plugin is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This plugin is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. + +""" commands for dev viessmann """ + +# models defined: # -# You should have received a copy of the GNU General Public License -# along with this plugin. If not, see . -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -controlset = { - 'P300': { - 'Baudrate': 4800, - 'Bytesize': 8, # 'EIGHTBITS' - 'Parity': 'E', # 'PARITY_EVEN', - 'Stopbits': 2, # 'STOPBITS_TWO', - 'StartByte': 0x41, - 'Request': 0x00, - 'Response': 0x01, - 'Error': 0x03, - 'Read': 0x01, - 'Write': 0x02, - 'Function_Call': 0x7, - 'Acknowledge': 0x06, - 'Not_initiated': 0x05, - 'Init_Error': 0x15, - 'Reset_Command': 0x04, - 'Reset_Command_Response': 0x05, - 'Sync_Command': 0x160000, - 'Sync_Command_Response': 0x06, - 'Command_bytes_read': 5, - 'Command_bytes_write': 5, - # init: send'Reset_Command' receive'Reset_Command_Response' send'Sync_Command' - # request: send('StartByte' 'Länge der Nutzdaten als Anzahl der Bytes zwischen diesem Byte und der Prüfsumme' 'Request' 'Read' 'addr' 'checksum') - # request_response: receive('Acknowledge' 'StartByte' 'Länge der Nutzdaten als Anzahl der Bytes zwischen diesem Byte und der Prüfsumme' 'Response' 'Read' 'addr' 'Anzahl der Bytes des Wertes' 'Wert' 'checksum') - }, - 'KW': { - 'Baudrate': 4800, - 'Bytesize': 8, # 'EIGHTBITS' - 'Parity': 'E', # 'PARITY_EVEN', - 'Stopbits': 2, # 'STOPBITS_TWO', - 'StartByte': 0x01, - 'Read': 0xF7, - 'Write': 0xF4, - 'Acknowledge': 0x01, - 'Reset_Command': 0x04, - 'Not_initiated': 0x05, - 'Write_Ack': 0x00, - }, -} +# V200KW2 +# V200KO1B +# V200WO1C +# V200HO1C + -commandset = { +commands = { + 'ALL': { + 'Anlagentyp': {'read': True, 'write': False, 'opcode': '00f8', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 2}, 'lookup': 'devicetypes', 'item_attrs': {'no_read_groups': True, 'attributes': {'md_read_initial': True}}}, # getAnlTyp -- Information - Allgemein: Anlagentyp (204D) + }, 'V200KO1B': { - # Kessel - 'Aussentemperatur': {'addr': '0800', 'len': 2, 'unit': 'IS10', 'set': False}, # Aussentemperatur - 'Aussentemperatur_TP': {'addr': '5525', 'len': 2, 'unit': 'IS10', 'set': False}, # Aussentemperatur_tiefpass - 'Aussentemperatur_Dp': {'addr': '5527', 'len': 2, 'unit': 'IS10', 'set': False}, # Aussentemperatur in Grad C (Gedaempft) - 'Kesseltemperatur': {'addr': '0802', 'len': 2, 'unit': 'IU10', 'set': False}, # Kesseltemperatur - 'Kesseltemperatur_TP': {'addr': '0810', 'len': 2, 'unit': 'IU10', 'set': False}, # Kesseltemperatur_tiefpass - 'Kesselsolltemperatur': {'addr': '555a', 'len': 2, 'unit': 'IU10', 'set': False}, # Kesselsolltemperatur - 'Temp_Speicher_Ladesensor': {'addr': '0812', 'len': 2, 'unit': 'IU10', 'set': False}, # Temperatur Speicher Ladesensor Komfortsensor - 'Auslauftemperatur': {'addr': '0814', 'len': 2, 'unit': 'IU10', 'set': False}, # Auslauftemperatur - 'Abgastemperatur': {'addr': '0816', 'len': 2, 'unit': 'IU10', 'set': False}, # Abgastemperatur - 'Gem_Vorlauftemperatur': {'addr': '081a', 'len': 2, 'unit': 'IU10', 'set': False}, # Gem. Vorlauftemperatur - 'Relais_K12': {'addr': '0842', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Relais K12 Interne Anschlußerweiterung - 'Eingang_0-10_V': {'addr': '0a86', 'len': 1, 'unit': 'IUINT', 'set': False}, # Eingang 0-10 V - 'EA1_Kontakt_0': {'addr': '0a90', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # EA1: Kontakt 0 - 'EA1_Kontakt_1': {'addr': '0a91', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # EA1: Kontakt 1 - 'EA1_Kontakt_2': {'addr': '0a92', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # EA1: Kontakt 2 - 'EA1_Externer_Soll_0-10V': {'addr': '0a93', 'len': 1, 'unit': 'IUINT', 'set': False}, # EA1: Externer Sollwert 0-10V - 'EA1_Relais_0': {'addr': '0a95', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # EA1: Relais 0 - 'AM1_Ausgang_1': {'addr': '0aa0', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # AM1 Ausgang 1 - 'AM1_Ausgang_2': {'addr': '0aa1', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # AM1 Ausgang 2 - 'TempKOffset': {'addr': '6760', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 1193045}, # Kesseloffset KT ueber WWsoll in Grad C - 'Systemtime': {'addr': '088e', 'len': 8, 'unit': 'TI', 'set': True}, # Systemzeit - 'Anlagenschema': {'addr': '7700', 'len': 2, 'unit': 'SC', 'set': False}, # Anlagenschema - 'Anlagentyp': {'addr': '00f8', 'len': 2, 'unit': 'DT', 'set': False}, # Heizungstyp - 'Inventory': {'addr': '08e0', 'len': 7, 'unit': 'SN', 'set': False}, # Sachnummer - 'CtrlId': {'addr': '08e0', 'len': 7, 'unit': 'DT', 'set': False}, # Reglerkennung - # Fehler - 'Sammelstoerung': {'addr': '0a82', 'len': 1, 'unit': 'RT', 'set': False}, # Sammelstörung - 'Error0': {'addr': '7507', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 1 - 'Error1': {'addr': '7510', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 2 - 'Error2': {'addr': '7519', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 3 - 'Error3': {'addr': '7522', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 4 - 'Error4': {'addr': '752b', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 5 - 'Error5': {'addr': '7534', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 6 - 'Error6': {'addr': '753d', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 7 - 'Error7': {'addr': '7546', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 8 - 'Error8': {'addr': '754f', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 9 - 'Error9': {'addr': '7558', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 10 - # Pumpen - 'Speicherladepumpe': {'addr': '6513', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Speicherladepumpe - 'Zirkulationspumpe': {'addr': '6515', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Zirkulationspumpe - 'Interne_Pumpe': {'addr': '7660', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Interne Pumpe - 'Heizkreispumpe_A1M1': {'addr': '2906', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Heizkreispumpe A1 - 'Heizkreispumpe_A1M1_RPM': {'addr': '7663', 'len': 1, 'unit': 'IUNON', 'set': False}, # Heizkreispumpe A1M1 Drehzahl - 'Heizkreispumpe_M2': {'addr': '3906', 'len': 1, 'unit': 'IUINT', 'set': False}, # Heizkreispumpe M2 - 'Heizkreispumpe_M2_RPM': {'addr': '7665', 'len': 1, 'unit': 'IUNON', 'set': False}, # Heizkreispumpe M2 Drehzahl - 'Relais_Status_Pumpe_A1M1': {'addr': 'a152', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Relais-Status Heizkreispumpe 1 - # Brenner - 'Brennerstarts': {'addr': '088a', 'len': 4, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 1193045}, # Brennerstarts - 'Brenner_Betriebsstunden': {'addr': '08a7', 'len': 4, 'unit': 'IU3600', 'set': True, 'min_value': 0, 'max_value': 1193045}, # Brenner-Betriebsstunden - 'Brennerstatus_1': {'addr': '0842', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Brennerstatus Stufe1 - 'Brennerstatus_2': {'addr': '0849', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Brennerstatus Stufe2 - 'Oeldurchsatz': {'addr': '5726', 'len': 4, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 1193045}, # Oeldurchsatz Brenner in Dezi-Liter pro Stunde - 'Oelverbrauch': {'addr': '7574', 'len': 4, 'unit': 'IS1000', 'set': True}, # Oelverbrauch kumuliert - # Solar - 'Nachladeunterdrueckung': {'addr': '6551', 'len': 1, 'unit': 'IUBOOL', 'set': False}, - 'SolarPumpe': {'addr': '6552', 'len': 1, 'unit': 'IUBOOL', 'set': False}, - 'Kollektortemperatur': {'addr': '6564', 'len': 2, 'unit': 'IS10', 'set': False}, - 'Speichertemperatur': {'addr': '6566', 'len': 2, 'unit': 'IU10', 'set': False}, - 'Solar_Betriebsstunden': {'addr': '6568', 'len': 4, 'unit': 'IU100', 'set': False}, - 'Solarsteuerung': {'addr': '7754', 'len': 2, 'unit': 'IUINT', 'set': False}, - # Heizkreis A1M1 - 'Raumtemperatur_A1M1': {'addr': '0896', 'len': 1, 'unit': 'ISNON', 'set': False}, # Raumtemperatur A1M1 - 'Raumtemperatur_Soll_Normalbetrieb_A1M1': {'addr': '2306', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 3, 'max_value': 37}, # Raumtemperatur Soll Normalbetrieb A1M1 - 'Raumtemperatur_Soll_Red_Betrieb_A1M1': {'addr': '2307', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 3, 'max_value': 37}, # Raumtemperatur Soll Reduzierter Betrieb A1M1 - 'Raumtemperatur_Soll_Party_Betrieb_A1M1': {'addr': '2308', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 3, 'max_value': 37}, # Raumtemperatur Soll Party Betrieb A1M1 - 'Aktuelle_Betriebsart_A1M1': {'addr': '2301', 'len': 1, 'unit': 'BA', 'set': False}, # Aktuelle Betriebsart A1M1 - 'Betriebsart_A1M1': {'addr': '2323', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 4}, # Betriebsart A1M1 - 'Sparbetrieb_A1M1': {'addr': '2302', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Sparbetrieb A1M1 - 'Zustand_Sparbetrieb_A1M1': {'addr': '2331', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Zustand Sparbetrieb A1M1 - 'Partybetrieb_A1M1': {'addr': '2303', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Partybetrieb A1M1 - 'Zustand_Partybetrieb_A1M1': {'addr': '2330', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Zustand Partybetrieb A1M1 - 'Vorlauftemperatur_A1M1': {'addr': '2900', 'len': 2, 'unit': 'IU10', 'set': False}, # Vorlauftemperatur A1M1 - 'Vorlauftemperatur_Soll_A1M1': {'addr': '2544', 'len': 2, 'unit': 'IU10', 'set': False}, # Vorlauftemperatur Soll A1M1 - 'StatusFrost_A1M1': {'addr': '2500', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Status Frostwarnung A1M1 - 'Externe_Raumsolltemperatur_Normal_A1M1': {'addr': '2321', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 37}, # Externe Raumsolltemperatur Normal A1M1 - 'Externe_Betriebsartenumschaltung_A1M1': {'addr': '2549', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 4}, # Externe Betriebsartenumschaltung A1M1 - 'Speichervorrang_A1M1': {'addr': '27a2', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 15}, # Speichervorrang auf Heizkreispumpe und Mischer - 'Frostschutzgrenze_A1M1': {'addr': '27a3', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': -9, 'max_value': 15}, # Frostschutzgrenze - 'Frostschutz_A1M1': {'addr': '27a4', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Frostschutzgrenze - 'Heizkreispumpenlogik_A1M1': {'addr': '27a5', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 15}, # HeizkreispumpenlogikFunktion - 'Sparschaltung_A1M1': {'addr': '27a6', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 5, 'max_value': 35}, # AbsolutSommersparschaltung - 'Mischersparfunktion_A1M1': {'addr': '27a7', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Mischersparfunktion - 'Pumpenstillstandzeit_A1M1': {'addr': '27a9', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 15}, # Pumpenstillstandzeit - 'Vorlauftemperatur_min_A1M1': {'addr': '27c5', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 1, 'max_value': 127}, # Minimalbegrenzung der Vorlauftemperatur - 'Vorlauftemperatur_max_A1M1': {'addr': '27c6', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 10, 'max_value': 127}, # Maximalbegrenzung der Vorlauftemperatur - 'Neigung_Heizkennlinie_A1M1': {'addr': '27d3', 'len': 1, 'unit': 'IU10', 'set': True, 'min_value': 0.2, 'max_value': 3.5}, # Neigung Heizkennlinie A1M1 - 'Niveau_Heizkennlinie_A1M1': {'addr': '27d4', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': -13, 'max_value': 40}, # Niveau Heizkennlinie A1M1 - 'Partybetrieb_Zeitbegrenzung_A1M1': {'addr': '27f2', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 12}, # Zeitliche Begrenzung für Partybetrieb oder externe BetriebsprogrammUmschaltung mit Taster - 'Temperaturgrenze_red_Betrieb_A1M1': {'addr': '27f8', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': -61, 'max_value': 10}, # Temperaturgrenze für Aufhebung des reduzierten Betriebs -5 ºC - 'Temperaturgrenze_red_Raumtemp_A1M1': {'addr': '27f9', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': -60, 'max_value': 10}, # Temperaturgrenze für Anhebung des reduzierten RaumtemperaturSollwertes - 'Vorlauftemperatur_Erhoehung_Soll_A1M1': {'addr': '27fa', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 50}, # Erhöhung des Kesselwasser- bzw. Vorlauftemperatur-Sollwertes beim Übergang von Betrieb mit reduzierter Raumtemperatur in den Betrieb mit normaler Raumtemperatur um 20 % - 'Vorlauftemperatur_Erhoehung_Zeit_A1M1': {'addr': '27fa', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 150}, # Zeitdauer für die Erhöhung des Kesselwasser bzw.VorlauftemperaturSollwertes (siehe Codieradresse „FA“) 60 min. - # Heizkreis M2 - 'Raumtemperatur_M2': {'addr': '0898', 'len': 1, 'unit': 'ISNON', 'set': False}, # Raumtemperatur - 'Raumtemperatur_Soll_Normalbetrieb_M2': {'addr': '3306', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 3, 'max_value': 37}, # Raumtemperatur Soll Normalbetrieb - 'Raumtemperatur_Soll_Red_Betrieb_M2': {'addr': '3307', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 3, 'max_value': 37}, # Raumtemperatur Soll Reduzierter Betrieb - 'Raumtemperatur_Soll_Party_Betrieb_M2': {'addr': '3308', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 3, 'max_value': 37}, # Raumtemperatur Soll Party Betrieb - 'Aktuelle_Betriebsart_M2': {'addr': '3301', 'len': 1, 'unit': 'BA', 'set': False}, # Aktuelle Betriebsart - 'Betriebsart_M2': {'addr': '3323', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 4}, # Betriebsart - 'Sparbetrieb_M2': {'addr': '3302', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Sparbetrieb - 'Zustand_Sparbetrieb_M2': {'addr': '3331', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Zustand Sparbetrieb - 'Partybetrieb_M2': {'addr': '3303', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Partybetrieb - 'Zustand_Partybetrieb_M2': {'addr': '3330', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Zustand Partybetrieb - 'Vorlauftemperatur_M2': {'addr': '3900', 'len': 2, 'unit': 'IU10', 'set': False}, # Vorlauftemperatur - 'Vorlauftemperatur_Soll_M2': {'addr': '3544', 'len': 2, 'unit': 'IU10', 'set': False}, # Vorlauftemperatur Soll - 'StatusFrost_M2': {'addr': '3500', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Status Frostwarnung - 'Externe_Raumsolltemperatur_Normal_M2': {'addr': '3321', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 37}, # Externe Raumsolltemperatur Normal - 'Externe_Betriebsartenumschaltung_M2': {'addr': '3549', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 4}, # Externe Betriebsartenumschaltung - 'Speichervorrang_M2': {'addr': '37a2', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 15}, # Speichervorrang auf Heizkreispumpe und Mischer - 'Frostschutzgrenze_M2': {'addr': '37a3', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': -9, 'max_value': 15}, # Frostschutzgrenze - 'Frostschutz_M2': {'addr': '37a4', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Frostschutzgrenze - 'Heizkreispumpenlogik_M2': {'addr': '37a5', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 15}, # HeizkreispumpenlogikFunktion - 'Sparschaltung_M2': {'addr': '37a6', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 5, 'max_value': 35}, # AbsolutSommersparschaltung - 'Mischersparfunktion_M2': {'addr': '37a7', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Mischersparfunktion - 'Pumpenstillstandzeit_M2': {'addr': '37a9', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 15}, # Pumpenstillstandzeit - 'Vorlauftemperatur_min_M2': {'addr': '37c5', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 1, 'max_value': 127}, # Minimalbegrenzung der Vorlauftemperatur - 'Vorlauftemperatur_max_M2': {'addr': '37c6', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 10, 'max_value': 127}, # Maximalbegrenzung der Vorlauftemperatur - 'Neigung_Heizkennlinie_M2': {'addr': '37d3', 'len': 1, 'unit': 'IU10', 'set': True, 'min_value': 0.2, 'max_value': 3.5}, # Neigung Heizkennlinie - 'Niveau_Heizkennlinie_M2': {'addr': '37d4', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': -13, 'max_value': 40}, # Niveau Heizkennlinie - 'Partybetrieb_Zeitbegrenzung_M2': {'addr': '37f2', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 12}, # Zeitliche Begrenzung für Partybetrieb oder externe BetriebsprogrammUmschaltung mit Taster - 'Temperaturgrenze_red_Betrieb_M2': {'addr': '37f8', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': -61, 'max_value': 10}, # Temperaturgrenze für Aufhebung des reduzierten Betriebs -5 ºC - 'Temperaturgrenze_red_Raumtemp_M2': {'addr': '37f9', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': -60, 'max_value': 10}, # Temperaturgrenze für Anhebung des reduzierten RaumtemperaturSollwertes - 'Vorlauftemperatur_Erhoehung_Soll_M2': {'addr': '37fa', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 50}, # Erhöhung des Kesselwasser- bzw. Vorlauftemperatur-Sollwertes beim Übergang von Betrieb mit reduzierter Raumtemperatur in den Betrieb mit normaler Raumtemperatur um 20 % - 'Vorlauftemperatur_Erhoehung_Zeit_M2': {'addr': '37fb', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 150}, # Zeitdauer für die Erhöhung des Kesselwasser bzw.VorlauftemperaturSollwertes (siehe Codieradresse „FA“) 60 min. - # Warmwasser - 'Warmwasser_Temperatur': {'addr': '0804', 'len': 2, 'unit': 'IU10', 'set': False}, # Warmwassertemperatur in Grad C - 'Warmwasser_Solltemperatur': {'addr': '6300', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 10, 'max_value': 95}, # Warmwasser-Solltemperatur - 'Status_Warmwasserbereitung': {'addr': '650a', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Satus Warmwasserbereitung - 'WarmwasserPumpenNachlauf': {'addr': '6762', 'len': 2, 'unit': 'ISNON' , 'set': True, 'min_value': 0, 'max_value': 1}, # Warmwasserpumpennachlauf - # Ferienprogramm HK_A1M1 - 'Ferienprogramm_A1M1': {'addr': '2535', 'len': 1, 'unit': 'IUINT', 'set': False}, # Ferienprogramm A1M1 - 'Ferien_Abreisetag_A1M1': {'addr': '2309', 'len': 8, 'unit': 'DA', 'set': True}, # Ferien Abreisetag A1M1 - 'Ferien_Rückreisetag_A1M1': {'addr': '2311', 'len': 8, 'unit': 'DA', 'set': True}, # Ferien Rückreisetag A1M1 - # Ferienprogramm HK_M2 - 'Ferienprogramm_M2': {'addr': '3535', 'len': 1, 'unit': 'IUINT', 'set': False}, # Ferienprogramm M2 - 'Ferien_Abreisetag_M2': {'addr': '3309', 'len': 8, 'unit': 'DA', 'set': True}, # Ferien Abreisetag M2 - 'Ferien_Rückreisetag_M2': {'addr': '3311', 'len': 8, 'unit': 'DA', 'set': True}, # Ferien Rückreisetag M2 - # Schaltzeiten Warmwasser - 'Timer_Warmwasser_Mo': {'addr': '2100', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Montag - 'Timer_Warmwasser_Di': {'addr': '2108', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Dienstag - 'Timer_Warmwasser_Mi': {'addr': '2110', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Mittwoch - 'Timer_Warmwasser_Do': {'addr': '2118', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Donnerstag - 'Timer_Warmwasser_Fr': {'addr': '2120', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Freitag - 'Timer_Warmwasser_Sa': {'addr': '2128', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Samstag - 'Timer_Warmwasser_So': {'addr': '2130', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Sonntag - # Schaltzeiten HK_A1M1 - 'Timer_A1M1_Mo': {'addr': '2000', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Montag - 'Timer_A1M1_Di': {'addr': '2008', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Dienstag - 'Timer_A1M1_Mi': {'addr': '2010', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Mittwoch - 'Timer_A1M1_Do': {'addr': '2018', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Donnerstag - 'Timer_A1M1_Fr': {'addr': '2020', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Freitag - 'Timer_A1M1_Sa': {'addr': '2028', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Samstag - 'Timer_A1M1_So': {'addr': '2030', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Sonntag - # Schaltzeiten HK_M2 - 'Timer_M2_Mo': {'addr': '3000', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Montag - 'Timer_M2_Di': {'addr': '3008', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Dienstag - 'Timer_M2_Mi': {'addr': '3010', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Mittwoch - 'Timer_M2_Do': {'addr': '3018', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Donnerstag - 'Timer_M2_Fr': {'addr': '3020', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Freitag - 'Timer_M2_Sa': {'addr': '3028', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Samstag - 'Timer_M2_So': {'addr': '3030', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Sonntag - # Schaltzeiten Zirkulation - 'Timer_Zirku_Mo': {'addr': '2200', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Zirkulationspumpe Montag - 'Timer_Zirku_Di': {'addr': '2208', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Zirkulationspumpe Dienstag - 'Timer_Zirku_Mi': {'addr': '2210', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Zirkulationspumpe Mittwoch - 'Timer_Zirku_Do': {'addr': '2218', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Zirkulationspumpe Donnerstag - 'Timer_Zirku_Fr': {'addr': '2220', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Zirkulationspumpe Freitag - 'Timer_Zirku_Sa': {'addr': '2228', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Zirkulationspumpe Samstag - 'Timer_Zirku_So': {'addr': '2230', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Zirkulationspumpe Sonntag + 'Allgemein': { + 'Temperatur': { + 'Aussen': {'read': True, 'write': False, 'opcode': '0800', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, # Aussentemperatur + 'Aussen_TP': {'read': True, 'write': False, 'opcode': '5525', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, # Aussentemperatur_tiefpass + 'Aussen_Dp': {'read': True, 'write': False, 'opcode': '5527', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, # Aussentemperatur in Grad C (Gedaempft) + 'Speicher_Ladesensor': {'read': True, 'write': False, 'opcode': '0812', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Temperatur Speicher Ladesensor Komfortsensor + 'Auslauf': {'read': True, 'write': False, 'opcode': '0814', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Auslauftemperatur + 'Abgas': {'read': True, 'write': False, 'opcode': '0816', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Abgastemperatur + 'Gem_Vorlauf': {'read': True, 'write': False, 'opcode': '081a', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Gem. Vorlauftemperatur + }, + 'Relais_K12': {'read': True, 'write': False, 'opcode': '0842', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Relais K12 Interne Anschlußerweiterung + 'Eingang_0-10_V': {'read': True, 'write': False, 'opcode': '0a86', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Eingang 0-10 V + 'EA1_Kontakt_0': {'read': True, 'write': False, 'opcode': '0a90', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # EA1: Kontakt 0 + 'EA1_Kontakt_1': {'read': True, 'write': False, 'opcode': '0a91', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # EA1: Kontakt 1 + 'EA1_Kontakt_2': {'read': True, 'write': False, 'opcode': '0a92', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # EA1: Kontakt 2 + 'EA1_Externer_Soll_0-10V': {'read': True, 'write': False, 'opcode': '0a93', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # EA1: Externer Sollwert 0-10V + 'EA1_Relais_0': {'read': True, 'write': False, 'opcode': '0a95', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # EA1: Relais 0 + 'AM1_Ausgang_1': {'read': True, 'write': False, 'opcode': '0aa0', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # AM1 Ausgang 1 + 'AM1_Ausgang_2': {'read': True, 'write': False, 'opcode': '0aa1', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # AM1 Ausgang 2 + 'TempKOffset': {'read': True, 'write': True, 'opcode': '6760', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 1193045}}, # Kesseloffset KT ueber WWsoll in Grad C + 'Systemtime': {'read': True, 'write': True, 'opcode': '088e', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'T', 'params': {'value': 'VAL', 'len': 8}}, # Systemzeit + 'Anlagenschema': {'read': True, 'write': False, 'opcode': '7700', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 2}, 'lookup': 'systemschemes'}, # Anlagenschema + 'Inventory': {'read': True, 'write': False, 'opcode': '08e0', 'reply_pattern': '*', 'item_type': 'str', 'dev_datatype': 'S', 'params': {'value': 'VAL', 'len': 7}}, # Sachnummer + 'CtrlId': {'read': True, 'write': False, 'opcode': '08e0', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 7}, 'lookup': 'devicetypes'}, # Reglerkennung + }, + 'Kessel': { + # Kessel + 'Ist': {'read': True, 'write': False, 'opcode': '0802', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Kesseltemperatur + 'TP': {'read': True, 'write': False, 'opcode': '0810', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Kesseltemperatur_tiefpass + 'Soll': {'read': True, 'write': False, 'opcode': '555a', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Kesselsolltemperatur + }, + 'Fehler': { + # Fehler + 'Sammelstoerung': {'read': True, 'write': False, 'opcode': '0a82', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'lookup': 'returnstatus'}, # Sammelstörung + 'Error0': {'read': True, 'write': False, 'opcode': '7507', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 1 + 'Error1': {'read': True, 'write': False, 'opcode': '7510', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 2 + 'Error2': {'read': True, 'write': False, 'opcode': '7519', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 3 + 'Error3': {'read': True, 'write': False, 'opcode': '7522', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 4 + 'Error4': {'read': True, 'write': False, 'opcode': '752b', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 5 + 'Error5': {'read': True, 'write': False, 'opcode': '7534', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 6 + 'Error6': {'read': True, 'write': False, 'opcode': '753d', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 7 + 'Error7': {'read': True, 'write': False, 'opcode': '7546', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 8 + 'Error8': {'read': True, 'write': False, 'opcode': '754f', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 9 + 'Error9': {'read': True, 'write': False, 'opcode': '7558', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 10 + }, + 'Pumpen': { + # Pumpen + 'Speicherlade': {'read': True, 'write': False, 'opcode': '6513', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Speicherladepumpe + 'Zirkulation': {'read': True, 'write': False, 'opcode': '6515', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Zirkulationspumpe + 'Intern': {'read': True, 'write': False, 'opcode': '7660', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Interne Pumpe + 'Heizkreis_A1M1': {'read': True, 'write': False, 'opcode': '2906', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Heizkreispumpe A1 + 'Heizkreis_A1M1_RPM': {'read': True, 'write': False, 'opcode': '7663', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Heizkreispumpe A1M1 Drehzahl + 'Heizkreis_M2': {'read': True, 'write': False, 'opcode': '3906', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Heizkreispumpe M2 + 'Heizkreis_M2_RPM': {'read': True, 'write': False, 'opcode': '7665', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Heizkreispumpe M2 Drehzahl + 'Relais_Status': {'read': True, 'write': False, 'opcode': 'a152', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Relais-Status Heizkreispumpe 1 + }, + 'Brenner': { + # Brenner + 'Starts': {'read': True, 'write': True, 'opcode': '088a', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 4}, 'cmd_settings': {'force_min': 0, 'force_max': 1193045}}, # Brennerstarts + 'Betriebsstunden': {'read': True, 'write': True, 'opcode': '08a7', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 3600, 'len': 4}, 'cmd_settings': {'force_min': 0, 'force_max': 1193045}}, # Brenner-Betriebsstunden + 'Status_1': {'read': True, 'write': False, 'opcode': '0842', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Brennerstatus Stufe1 + 'Status_2': {'read': True, 'write': False, 'opcode': '0849', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Brennerstatus Stufe2 + 'Oeldurchsatz': {'read': True, 'write': True, 'opcode': '5726', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 4}, 'cmd_settings': {'force_min': 0, 'force_max': 1193045}}, # Oeldurchsatz Brenner in Dezi-Liter pro Stunde + 'Oelverbrauch': {'read': True, 'write': True, 'opcode': '7574', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 1000, 'signed': True, 'len': 4}}, # Oelverbrauch kumuliert + }, + 'Solar': { + # Solar + 'Nachladeunterdrueckung': {'read': True, 'write': False, 'opcode': '6551', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, + 'Pumpe': {'read': True, 'write': False, 'opcode': '6552', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, + 'Kollektortemperatur': {'read': True, 'write': False, 'opcode': '6564', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, + 'Speichertemperatur': {'read': True, 'write': False, 'opcode': '6566', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, + 'Betriebsstunden': {'read': True, 'write': False, 'opcode': '6568', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 100, 'len': 4}}, + 'Steuerung': {'read': True, 'write': False, 'opcode': '7754', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 2}}, + }, + 'Heizkreis': { + 'A1M1': { + # Heizkreis A1M1 + 'Temperatur': { + 'Raum': { + 'Ist': {'read': True, 'write': False, 'opcode': '0896', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}}, # Raumtemperatur A1M1 + 'Soll_Normalbetrieb': {'read': True, 'write': True, 'opcode': '2306', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 3, 'force_max': 37}}, # Raumtemperatur Soll Normalbetrieb A1M1 + 'Soll_Red_Betrieb': {'read': True, 'write': True, 'opcode': '2307', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 3, 'force_max': 37}}, # Raumtemperatur Soll Reduzierter Betrieb A1M1 + 'Soll_Party_Betrieb': {'read': True, 'write': True, 'opcode': '2308', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 3, 'force_max': 37}}, # Raumtemperatur Soll Party Betrieb A1M1 + }, + 'Vorlauf': { + 'Ist': {'read': True, 'write': False, 'opcode': '2900', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Vorlauftemperatur A1M1 + 'Soll': {'read': True, 'write': False, 'opcode': '2544', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Vorlauftemperatur Soll A1M1 + 'Min': {'read': True, 'write': True, 'opcode': '27c5', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 1, 'force_max': 127}}, # Minimalbegrenzung der Vorlauftemperatur + 'Max': {'read': True, 'write': True, 'opcode': '27c6', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 10, 'force_max': 127}}, # Maximalbegrenzung der Vorlauftemperatur + 'Erhoehung_Soll': {'read': True, 'write': True, 'opcode': '27fa', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 50}}, # Erhöhung des Kesselwasser- bzw. Vorlauftemperatur-Sollwertes beim Übergang von Betrieb mit reduzierter Raumtemperatur in den Betrieb mit normaler Raumtemperatur um 20 % + 'Erhoehung_Zeit': {'read': True, 'write': True, 'opcode': '27fa', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 150}}, # Zeitdauer für die Erhöhung des Kesselwasser bzw.VorlauftemperaturSollwertes (siehe Codieradresse „FA“) 60 min. + }, + 'Grenze_red_Betrieb': {'read': True, 'write': True, 'opcode': '27f8', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': -61, 'force_max': 10}}, # Temperaturgrenze für Aufhebung des reduzierten Betriebs -5 ºC + 'Grenze_red_Raumtemp': {'read': True, 'write': True, 'opcode': '27f9', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': -60, 'force_max': 10}}, # Temperaturgrenze für Anhebung des reduzierten RaumtemperaturSollwertes + }, + 'Status': { + 'Aktuelle_Betriebsart': {'read': True, 'write': False, 'opcode': '2301', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'lookup': 'operatingmodes'}, # Aktuelle Betriebsart A1M1 + 'Betriebsart': {'read': True, 'write': True, 'opcode': '2323', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 4}}, # Betriebsart A1M1 + 'Sparbetrieb': {'read': True, 'write': False, 'opcode': '2302', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Sparbetrieb A1M1 + 'Zustand_Sparbetrieb': {'read': True, 'write': True, 'opcode': '2331', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 1}}, # Zustand Sparbetrieb A1M1 + 'Partybetrieb': {'read': True, 'write': False, 'opcode': '2303', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Partybetrieb A1M1 + 'Zustand_Partybetrieb': {'read': True, 'write': True, 'opcode': '2330', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 1}}, # Zustand Partybetrieb A1M1 + 'StatusFrost': {'read': True, 'write': False, 'opcode': '2500', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Status Frostwarnung A1M1 + 'Externe_Raumsolltemperatur_Normal': {'read': True, 'write': True, 'opcode': '2321', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 37}}, # Externe Raumsolltemperatur Normal A1M1 + 'Externe_Betriebsartenumschaltung': {'read': True, 'write': True, 'opcode': '2549', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 4}}, # Externe Betriebsartenumschaltung A1M1 + 'Speichervorrang': {'read': True, 'write': True, 'opcode': '27a2', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 15}}, # Speichervorrang auf Heizkreispumpe und Mischer + 'Frostschutzgrenze': {'read': True, 'write': True, 'opcode': '27a3', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': -9, 'force_max': 15}}, # Frostschutzgrenze + 'Frostschutz': {'read': True, 'write': True, 'opcode': '27a4', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 1}}, # Frostschutzgrenze + 'Heizkreispumpenlogik': {'read': True, 'write': True, 'opcode': '27a5', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 15}}, # HeizkreispumpenlogikFunktion + 'Sparschaltung': {'read': True, 'write': True, 'opcode': '27a6', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 5, 'force_max': 35}}, # AbsolutSommersparschaltung + 'Mischersparfunktion': {'read': True, 'write': True, 'opcode': '27a7', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 1}}, # Mischersparfunktion + 'Pumpenstillstandzeit': {'read': True, 'write': True, 'opcode': '27a9', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 15}}, # Pumpenstillstandzeit + }, + 'Heizkennlinie': { + 'Neigung': {'read': True, 'write': True, 'opcode': '27d3', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 1}, 'cmd_settings': {'force_min': 0.2, 'force_max': 3.5}}, # Neigung Heizkennlinie A1M1 + 'Niveau': {'read': True, 'write': True, 'opcode': '27d4', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': -13, 'force_max': 40}}, # Niveau Heizkennlinie A1M1 + }, + 'Partybetrieb_Zeitbegrenzung': {'read': True, 'write': True, 'opcode': '27f2', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 12}}, # Zeitliche Begrenzung für Partybetrieb oder externe BetriebsprogrammUmschaltung mit Taster + }, + 'M2': { + # Heizkreis M2 + 'Temperatur': { + 'Raum': { + 'Ist': {'read': True, 'write': False, 'opcode': '0898', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}}, # Raumtemperatur + 'Soll_Normalbetrieb': {'read': True, 'write': True, 'opcode': '3306', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 3, 'force_max': 37}}, # Raumtemperatur Soll Normalbetrieb + 'Soll_Red_Betrieb': {'read': True, 'write': True, 'opcode': '3307', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 3, 'force_max': 37}}, # Raumtemperatur Soll Reduzierter Betrieb + 'Soll_Party_Betrieb': {'read': True, 'write': True, 'opcode': '3308', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 3, 'force_max': 37}}, # Raumtemperatur Soll Party Betrieb + }, + 'Vorlauf': { + 'Ist': {'read': True, 'write': False, 'opcode': '3900', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Vorlauftemperatur + 'Soll': {'read': True, 'write': False, 'opcode': '3544', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Vorlauftemperatur Soll + 'Min': {'read': True, 'write': True, 'opcode': '37c5', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 1, 'force_max': 127}}, # Minimalbegrenzung der Vorlauftemperatur + 'Max': {'read': True, 'write': True, 'opcode': '37c6', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 10, 'force_max': 127}}, # Maximalbegrenzung der Vorlauftemperatur + 'Erhoehung_Soll': {'read': True, 'write': True, 'opcode': '37fa', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 50}}, # Erhöhung des Kesselwasser- bzw. Vorlauftemperatur-Sollwertes beim Übergang von Betrieb mit reduzierter Raumtemperatur in den Betrieb mit normaler Raumtemperatur um 20 % + 'Erhoehung_Zeit': {'read': True, 'write': True, 'opcode': '37fb', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 150}}, # Zeitdauer für die Erhöhung des Kesselwasser bzw.VorlauftemperaturSollwertes (siehe Codieradresse „FA“) 60 min. + }, + 'Grenze_red_Betrieb': {'read': True, 'write': True, 'opcode': '37f8', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': -61, 'force_max': 10}}, # Temperaturgrenze für Aufhebung des reduzierten Betriebs -5 ºC + 'Grenze_red_Raumtemp': {'read': True, 'write': True, 'opcode': '37f9', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': -60, 'force_max': 10}}, # Temperaturgrenze für Anhebung des reduzierten RaumtemperaturSollwertes + }, + 'Status': { + 'Aktuelle_Betriebsart': {'read': True, 'write': False, 'opcode': '3301', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'lookup': 'operatingmodes'}, # Aktuelle Betriebsart + 'Betriebsart': {'read': True, 'write': True, 'opcode': '3323', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 4}}, # Betriebsart + 'Sparbetrieb': {'read': True, 'write': False, 'opcode': '3302', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Sparbetrieb + 'Zustand_Sparbetrieb': {'read': True, 'write': True, 'opcode': '3331', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 1}}, # Zustand Sparbetrieb + 'Partybetrieb': {'read': True, 'write': False, 'opcode': '3303', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Partybetrieb + 'Zustand_Partybetrieb': {'read': True, 'write': True, 'opcode': '3330', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 1}}, # Zustand Partybetrieb + 'StatusFrost': {'read': True, 'write': False, 'opcode': '3500', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Status Frostwarnung + 'Externe_Raumsolltemperatur_Normal': {'read': True, 'write': True, 'opcode': '3321', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 37}}, # Externe Raumsolltemperatur Normal + 'Externe_Betriebsartenumschaltung': {'read': True, 'write': True, 'opcode': '3549', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 4}}, # Externe Betriebsartenumschaltung + 'Speichervorrang': {'read': True, 'write': True, 'opcode': '37a2', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 15}}, # Speichervorrang auf Heizkreispumpe und Mischer + 'Frostschutzgrenze': {'read': True, 'write': True, 'opcode': '37a3', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': -9, 'force_max': 15}}, # Frostschutzgrenze + 'Frostschutz': {'read': True, 'write': True, 'opcode': '37a4', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 1}}, # Frostschutzgrenze + 'Heizkreispumpenlogik': {'read': True, 'write': True, 'opcode': '37a5', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 15}}, # HeizkreispumpenlogikFunktion + 'Sparschaltung': {'read': True, 'write': True, 'opcode': '37a6', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 5, 'force_max': 35}}, # AbsolutSommersparschaltung + 'Mischersparfunktion': {'read': True, 'write': True, 'opcode': '37a7', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 1}}, # Mischersparfunktion + 'Pumpenstillstandzeit': {'read': True, 'write': True, 'opcode': '37a9', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 15}}, # Pumpenstillstandzeit + }, + 'Heizkennlinie': { + 'Neigung': {'read': True, 'write': True, 'opcode': '37d3', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 1}, 'cmd_settings': {'force_min': 0.2, 'force_max': 3.5}}, # Neigung Heizkennlinie + 'Niveau': {'read': True, 'write': True, 'opcode': '37d4', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': -13, 'force_max': 40}}, # Niveau Heizkennlinie + }, + 'Partybetrieb_Zeitbegrenzung': {'read': True, 'write': True, 'opcode': '37f2', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 12}}, # Zeitliche Begrenzung für Partybetrieb oder externe BetriebsprogrammUmschaltung mit Taster + }, + }, + 'Warmwasser': { + # Warmwasser + 'Ist': {'read': True, 'write': False, 'opcode': '0804', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Warmwassertemperatur in Grad C + 'Soll': {'read': True, 'write': True, 'opcode': '6300', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 10, 'force_max': 95}}, # Warmwasser-Solltemperatur + 'Status': {'read': True, 'write': True, 'opcode': '650a', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 1}}, # Satus Warmwasserbereitung + 'PumpenNachlauf': {'read': True, 'write': True, 'opcode': '6762', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 2}, 'cmd_settings': {'force_min': 0, 'force_max': 1}}, # Warmwasserpumpennachlauf + }, + 'Ferienprogramm': { + 'A1M1': { + # Ferienprogramm HK + 'Status': {'read': True, 'write': False, 'opcode': '2535', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Ferienprogramm A1M1 + 'Abreisetag': {'read': True, 'write': True, 'opcode': '2309', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'D', 'params': {'value': 'VAL', 'len': 8}}, # Ferien Abreisetag A1M1 + 'Rückreisetag': {'read': True, 'write': True, 'opcode': '2311', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'D', 'params': {'value': 'VAL', 'len': 8}}, # Ferien Rückreisetag A1M1 + }, + 'M2': { + # Ferienprogramm HK + 'Status': {'read': True, 'write': False, 'opcode': '3535', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Ferienprogramm M2 + 'Abreisetag': {'read': True, 'write': True, 'opcode': '3309', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'D', 'params': {'value': 'VAL', 'len': 8}}, # Ferien Abreisetag M2 + 'Rückreisetag': {'read': True, 'write': True, 'opcode': '3311', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'D', 'params': {'value': 'VAL', 'len': 8}}, # Ferien Rückreisetag M2 + }, + }, + 'Timer': { + 'Warmwasser': { + # Schaltzeiten Warmwasser + 'Mo': {'read': True, 'write': True, 'opcode': '2100', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Warmwasserbereitung Montag + 'Di': {'read': True, 'write': True, 'opcode': '2108', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Warmwasserbereitung Dienstag + 'Mi': {'read': True, 'write': True, 'opcode': '2110', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Warmwasserbereitung Mittwoch + 'Do': {'read': True, 'write': True, 'opcode': '2118', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Warmwasserbereitung Donnerstag + 'Fr': {'read': True, 'write': True, 'opcode': '2120', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Warmwasserbereitung Freitag + 'Sa': {'read': True, 'write': True, 'opcode': '2128', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Warmwasserbereitung Samstag + 'So': {'read': True, 'write': True, 'opcode': '2130', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Warmwasserbereitung Sonntag + }, + 'A1M1': { + # Schaltzeiten HK + 'Mo': {'read': True, 'write': True, 'opcode': '2000', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Montag + 'Di': {'read': True, 'write': True, 'opcode': '2008', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Dienstag + 'Mi': {'read': True, 'write': True, 'opcode': '2010', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Mittwoch + 'Do': {'read': True, 'write': True, 'opcode': '2018', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Donnerstag + 'Fr': {'read': True, 'write': True, 'opcode': '2020', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Freitag + 'Sa': {'read': True, 'write': True, 'opcode': '2028', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Samstag + 'So': {'read': True, 'write': True, 'opcode': '2030', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Sonntag + }, + 'M2': { + # Schaltzeiten HK + 'Mo': {'read': True, 'write': True, 'opcode': '3000', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Montag + 'Di': {'read': True, 'write': True, 'opcode': '3008', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Dienstag + 'Mi': {'read': True, 'write': True, 'opcode': '3010', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Mittwoch + 'Do': {'read': True, 'write': True, 'opcode': '3018', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Donnerstag + 'Fr': {'read': True, 'write': True, 'opcode': '3020', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Freitag + 'Sa': {'read': True, 'write': True, 'opcode': '3028', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Samstag + 'So': {'read': True, 'write': True, 'opcode': '3030', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Sonntag + }, + 'Zirkulation': { + # Schaltzeiten Zirkulation + 'Mo': {'read': True, 'write': True, 'opcode': '2200', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Zirkulationspumpe Montag + 'Di': {'read': True, 'write': True, 'opcode': '2208', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Zirkulationspumpe Dienstag + 'Mi': {'read': True, 'write': True, 'opcode': '2210', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Zirkulationspumpe Mittwoch + 'Do': {'read': True, 'write': True, 'opcode': '2218', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Zirkulationspumpe Donnerstag + 'Fr': {'read': True, 'write': True, 'opcode': '2220', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Zirkulationspumpe Freitag + 'Sa': {'read': True, 'write': True, 'opcode': '2228', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Zirkulationspumpe Samstag + 'So': {'read': True, 'write': True, 'opcode': '2230', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Zirkulationspumpe Sonntag + } + } }, 'V200HO1C': { - # Allgemein - 'Anlagentyp': {'addr': '00f8', 'len': 2, 'unit': 'DT', 'set': False}, # Heizungstyp - 'Anlagenschema': {'addr': '7700', 'len': 2, 'unit': 'SC', 'set': False}, # Anlagenschema - 'Frostgefahr': {'addr': '2510', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Frostgefahr - 'Aussentemperatur_TP': {'addr': '5525', 'len': 2, 'unit': 'IS10', 'set': False}, # Aussentemperatur_tiefpass - 'Aussentemperatur_Dp': {'addr': '5527', 'len': 2, 'unit': 'IS10', 'set': False}, # Aussentemperatur in Grad C (Gedaempft) - 'Anlagenleistung': {'addr': 'a38f', 'len': 2, 'unit': 'IS10', 'set': False}, # Anlagenleistung - # Kessel - 'Kesseltemperatur_TP': {'addr': '0810', 'len': 2, 'unit': 'IU10', 'set': False}, # Kesseltemperatur_tiefpass - 'Kesselsolltemperatur': {'addr': '555a', 'len': 2, 'unit': 'IU10', 'set': False}, # Kesselsolltemperatur - 'Abgastemperatur': {'addr': '0816', 'len': 2, 'unit': 'IU10', 'set': False}, # Abgastemperatur - # Fehler - 'Sammelstoerung': {'addr': '0a82', 'len': 1, 'unit': 'RT', 'set': False}, # Sammelstörung - 'Error0': {'addr': '7507', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 1 - 'Error1': {'addr': '7510', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 2 - 'Error2': {'addr': '7519', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 3 - 'Error3': {'addr': '7522', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 4 - 'Error4': {'addr': '752b', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 5 - 'Error5': {'addr': '7534', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 6 - 'Error6': {'addr': '753d', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 7 - 'Error7': {'addr': '7546', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 8 - 'Error8': {'addr': '754f', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 9 - 'Error9': {'addr': '7558', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 10 - # Pumpen - 'Speicherladepumpe': {'addr': '6513', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Speicherladepumpe für Warmwasser - 'Zirkulationspumpe': {'addr': '6515', 'len': 1, 'unit': 'IUBOOL', 'set': True}, # Zirkulationspumpe - 'Interne_Pumpe': {'addr': '7660', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Interne Pumpe - 'Heizkreispumpe_HK1': {'addr': '2906', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Heizkreispumpe A1 - 'Heizkreispumpe_HK2': {'addr': '3906', 'len': 1, 'unit': 'IUINT', 'set': False}, # Heizkreispumpe M2 - # Brenner - 'Brennerstarts': {'addr': '088a', 'len': 4, 'unit': 'ISNON', 'set': False}, # Brennerstarts - 'Brennerleistung': {'addr': 'a305', 'len': 2, 'unit': 'IS10', 'set': False}, # Brennerleistung - 'Brenner_Betriebsstunden': {'addr': '08a7', 'len': 4, 'unit': 'IU3600', 'set': False}, # Brenner-Betriebsstunden - # Solar - 'SolarPumpe': {'addr': '6552', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Solarpumpe - 'Kollektortemperatur': {'addr': '6564', 'len': 2, 'unit': 'IS10', 'set': False}, # Kollektortemperatur - 'Speichertemperatur': {'addr': '6566', 'len': 2, 'unit': 'IU10', 'set': False}, # Spichertemperatur - 'Solar_Betriebsstunden': {'addr': '6568', 'len': 4, 'unit': 'IU100', 'set': False}, # Solar Betriebsstunden - 'Solar_Waermemenge': {'addr': '6560', 'len': 2, 'unit': 'IUINT', 'set': False}, # Solar Waermemenge - 'Solar_Ausbeute': {'addr': 'cf30', 'len': 4, 'unit': 'IUINT', 'set': False}, # Solar Ausbeute - # Heizkreis 1 - 'Betriebsart_HK1': {'addr': '2500', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 3}, # Betriebsart (0=Abschaltbetrieb, 1=Red. Betrieb, 2=Normalbetrieb (Schaltuhr), 3=Normalbetrieb (Dauernd)) - 'Heizart_HK1': {'addr': '2323', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 4}, # Heizart (0=Abschaltbetrieb, 1=Nur Warmwasser, 2=Heizen und Warmwasser, 3=Normalbetrieb (Reduziert), 4=Normalbetrieb (Dauernd)) - 'Vorlauftemperatur_Soll_HK1': {'addr': '2544', 'len': 2, 'unit': 'IU10', 'set': False}, # Vorlauftemperatur Soll - 'Vorlauftemperatur_HK1': {'addr': '2900', 'len': 2, 'unit': 'IU10', 'set': False}, # Vorlauftemperatur Ist - # Heizkreis 2 - 'Betriebsart_HK2': {'addr': '3500', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 3}, # Betriebsart (0=Abschaltbetrieb, 1=Red. Betrieb, 2=Normalbetrieb (Schaltuhr), 3=Normalbetrieb (Dauernd)) - 'Heizart_HK2': {'addr': '3323', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 4}, # Heizart (0=Abschaltbetrieb, 1=Nur Warmwasser, 2=Heizen und Warmwasser, 3=Normalbetrieb (Reduziert), 4=Normalbetrieb (Dauernd)) - 'Vorlauftemperatur_Soll_HK2': {'addr': '3544', 'len': 2, 'unit': 'IU10', 'set': False}, # Vorlauftemperatur Soll - 'Vorlauftemperatur_HK2': {'addr': '3900', 'len': 2, 'unit': 'IU10', 'set': False}, # Vorlauftemperatur Ist - # Warmwasser - 'Warmwasser_Temperatur': {'addr': '0812', 'len': 2, 'unit': 'IU10', 'set': False}, # Warmwassertemperatur in Grad C - 'Warmwasser_Solltemperatur': {'addr': '6300', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 10, 'max_value': 80}, # Warmwasser-Solltemperatur - 'Warmwasser_Austrittstemperatur': {'addr': '0814', 'len': 2, 'unit': 'IU10', 'set': False}, # Warmwasseraustrittstemperatur in Grad C + 'Allgemein': { + # Allgemein + 'Anlagenschema': {'read': True, 'write': False, 'opcode': '7700', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 2}, 'lookup': 'systemschemes'}, # Anlagenschema + 'Frostgefahr': {'read': True, 'write': False, 'opcode': '2510', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Frostgefahr + 'Anlagenleistung': {'read': True, 'write': False, 'opcode': 'a38f', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, # Anlagenleistung + 'Temperatur': { + 'Aussen_TP': {'read': True, 'write': False, 'opcode': '5525', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, # Aussentemperatur_tiefpass + 'Aussen_Dp': {'read': True, 'write': False, 'opcode': '5527', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, # Aussentemperatur in Grad C (Gedaempft) + }, + }, + 'Kessel': { + # Kessel + 'TP': {'read': True, 'write': False, 'opcode': '0810', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Kesseltemperatur_tiefpass + 'Soll': {'read': True, 'write': False, 'opcode': '555a', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Kesselsolltemperatur + 'Abgastemperatur': {'read': True, 'write': False, 'opcode': '0816', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Abgastemperatur + }, + 'Fehler': { + # Fehler + 'Sammelstoerung': {'read': True, 'write': False, 'opcode': '0a82', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'lookup': 'returnstatus'}, # Sammelstörung + 'Error0': {'read': True, 'write': False, 'opcode': '7507', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 1 + 'Error1': {'read': True, 'write': False, 'opcode': '7510', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 2 + 'Error2': {'read': True, 'write': False, 'opcode': '7519', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 3 + 'Error3': {'read': True, 'write': False, 'opcode': '7522', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 4 + 'Error4': {'read': True, 'write': False, 'opcode': '752b', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 5 + 'Error5': {'read': True, 'write': False, 'opcode': '7534', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 6 + 'Error6': {'read': True, 'write': False, 'opcode': '753d', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 7 + 'Error7': {'read': True, 'write': False, 'opcode': '7546', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 8 + 'Error8': {'read': True, 'write': False, 'opcode': '754f', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 9 + 'Error9': {'read': True, 'write': False, 'opcode': '7558', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 10 + }, + 'Pumpen': { + # Pumpen + 'Speicherlade': {'read': True, 'write': False, 'opcode': '6513', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Speicherladepumpe für Warmwasser + 'Zirkulation': {'read': True, 'write': True, 'opcode': '6515', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Zirkulationspumpe + 'Intern': {'read': True, 'write': False, 'opcode': '7660', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Interne Pumpe + 'Heizkreis_1': {'read': True, 'write': False, 'opcode': '2906', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Heizkreispumpe A1 + 'Heizkreis_2': {'read': True, 'write': False, 'opcode': '3906', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Heizkreispumpe M2 + }, + 'Brenner': { + # Brenner + 'Starts': {'read': True, 'write': False, 'opcode': '088a', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 4}}, # Brennerstarts + 'Leistung': {'read': True, 'write': False, 'opcode': 'a305', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, # Brennerleistung + 'Betriebsstunden': {'read': True, 'write': False, 'opcode': '08a7', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 3600, 'len': 4}}, # Brenner-Betriebsstunden + }, + 'Solar': { + # Solar + 'Pumpe': {'read': True, 'write': False, 'opcode': '6552', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Solarpumpe + 'Kollektortemperatur': {'read': True, 'write': False, 'opcode': '6564', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, # Kollektortemperatur + 'Speichertemperatur': {'read': True, 'write': False, 'opcode': '6566', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Spichertemperatur + 'Betriebsstunden': {'read': True, 'write': False, 'opcode': '6568', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 100, 'len': 4}}, # Solar Betriebsstunden + 'Waermemenge': {'read': True, 'write': False, 'opcode': '6560', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 2}}, # Solar Waermemenge + 'Ausbeute': {'read': True, 'write': False, 'opcode': 'cf30', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 4}}, # Solar Ausbeute + }, + 'Heizkreis': { + '1': { + # Heizkreis 1 + 'Betriebsart': {'read': True, 'write': True, 'opcode': '2500', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 3}}, # Betriebsart (0=Abschaltbetrieb, 1=Red. Betrieb, 2=Normalbetrieb (Schaltuhr), 3=Normalbetrieb (Dauernd)) + 'Heizart': {'read': True, 'write': True, 'opcode': '2323', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 4}}, # Heizart (0=Abschaltbetrieb, 1=Nur Warmwasser, 2=Heizen und Warmwasser, 3=Normalbetrieb (Reduziert), 4=Normalbetrieb (Dauernd)) + 'Temperatur': { + 'Vorlauf_Soll': {'read': True, 'write': False, 'opcode': '2544', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Vorlauftemperatur Soll + 'Vorlauf_Ist': {'read': True, 'write': False, 'opcode': '2900', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Vorlauftemperatur Ist + }, + }, + '2': { + # Heizkreis 2 + 'Betriebsart': {'read': True, 'write': True, 'opcode': '3500', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 3}}, # Betriebsart (0=Abschaltbetrieb, 1=Red. Betrieb, 2=Normalbetrieb (Schaltuhr), 3=Normalbetrieb (Dauernd)) + 'Heizart': {'read': True, 'write': True, 'opcode': '3323', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 4}}, # Heizart (0=Abschaltbetrieb, 1=Nur Warmwasser, 2=Heizen und Warmwasser, 3=Normalbetrieb (Reduziert), 4=Normalbetrieb (Dauernd)) + 'Temperatur': { + 'Vorlauf_Soll': {'read': True, 'write': False, 'opcode': '3544', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Vorlauftemperatur Soll + 'Vorlauf_Ist': {'read': True, 'write': False, 'opcode': '3900', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Vorlauftemperatur Ist + }, + }, + }, + 'Warmwasser': { + # Warmwasser + 'Ist': {'read': True, 'write': False, 'opcode': '0812', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Warmwassertemperatur in Grad C + 'Soll': {'read': True, 'write': True, 'opcode': '6300', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 10, 'force_max': 80}}, # Warmwasser-Solltemperatur + 'Austritt': {'read': True, 'write': False, 'opcode': '0814', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Warmwasseraustrittstemperatur in Grad C + }, }, 'V200KW2': { - # Allgemein - 'Anlagentyp': {'addr': '00f8', 'len': 2, 'unit': 'DT', 'set': False}, # Ermittle Device Typ der Anlage - 'Anlagenschema': {'addr': '7700', 'len': 2, 'unit': 'SC', 'set': False}, # Anlagenschema - 'AnlagenSoftwareIndex': {'addr': '7330', 'len': 1, 'unit': 'IUNON', 'set': False}, # Bedienteil SoftwareIndex - 'Aussentemperatur': {'addr': '0800', 'len': 2, 'unit': 'IS10', 'set': False}, # Aussentemperatur_tiefpass - 'Aussentemperatur_Dp': {'addr': '5527', 'len': 2, 'unit': 'IS10', 'set': False}, # Aussentemperatur in Grad C (Gedaempft) - 'Systemtime': {'addr': '088e', 'len': 8, 'unit': 'TI', 'set': True}, # Systemzeit - # Kessel - 'TempKOffset': {'addr': '6760', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 10, 'max_value': 50}, # Kesseloffset KT ueber WWsoll in Grad C - 'Kesseltemperatur': {'addr': '0802', 'len': 2, 'unit': 'IU10', 'set': False}, # Kesseltemperatur - 'Kesselsolltemperatur': {'addr': '5502', 'len': 2, 'unit': 'IU10', 'set': True}, # Kesselsolltemperatur - # Fehler - 'Sammelstoerung': {'addr': '0847', 'len': 1, 'unit': 'RT', 'set': False}, # Sammelstörung - 'Brennerstoerung': {'addr': '0883', 'len': 1, 'unit': 'RT', 'set': False}, - 'Error0': {'addr': '7507', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 1 - 'Error1': {'addr': '7510', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 2 - 'Error2': {'addr': '7519', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 3 - 'Error3': {'addr': '7522', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 4 - 'Error4': {'addr': '752b', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 5 - 'Error5': {'addr': '7534', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 6 - 'Error6': {'addr': '753d', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 7 - 'Error7': {'addr': '7546', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 8 - 'Error8': {'addr': '754f', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 9 - 'Error9': {'addr': '7558', 'len': 9, 'unit': 'ES', 'set': False}, # Fehlerhistory Eintrag 10 - # Pumpen - 'Speicherladepumpe': {'addr': '0845', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Speicherladepumpe für Warmwasser - 'Zirkulationspumpe': {'addr': '0846', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Zirkulationspumpe - 'Heizkreispumpe_A1M1': {'addr': '2906', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Heizkreispumpe A1M1 - 'Heizkreispumpe_M2': {'addr': '3906', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Heizkreispumpe M2 - # Brenner - 'Brennertyp': {'addr': 'a30b', 'len': 1, 'unit': 'IUNON', 'set': False}, # Brennertyp 0=einstufig 1=zweistufig 2=modulierend - 'Brennerstufe': {'addr': '551e', 'len': 1, 'unit': 'RT', 'set': False}, # Ermittle die aktuelle Brennerstufe - 'Brennerstarts': {'addr': '088a', 'len': 2, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 1193045}, # Brennerstarts - 'Brennerstatus_1': {'addr': '55d3', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Brennerstatus Stufe1 - 'Brennerstatus_2': {'addr': '0849', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Brennerstatus Stufe2 - 'Brenner_BetriebsstundenStufe1': {'addr': '0886', 'len': 4, 'unit': 'IU3600', 'set': True, 'min_value': 0, 'max_value': 1193045}, # Brenner-Betriebsstunden Stufe 1 - 'Brenner_BetriebsstundenStufe2': {'addr': '08a3', 'len': 4, 'unit': 'IU3600', 'set': True, 'min_value': 0, 'max_value': 1193045}, # Brenner-Betriebsstunden Stufe 2 - # Heizkreis A1M1 - 'Betriebsart_A1M1': {'addr': '2301', 'len': 1, 'unit': 'BA', 'set': True}, # Betriebsart A1M1 - 'Aktuelle_Betriebsart_A1M1': {'addr': '2500', 'len': 1, 'unit': 'BA', 'set': False}, # Aktuelle Betriebsart A1M1 - 'Sparbetrieb_A1M1': {'addr': '2302', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Sparbetrieb A1M1 - 'Partybetrieb_A1M1_Zeit': {'addr': '27f2', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 12}, # Partyzeit M2 - 'Partybetrieb_A1M1': {'addr': '2303', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Partybetrieb A1M1 - 'Vorlauftemperatur_A1M1': {'addr': '2900', 'len': 2, 'unit': 'IU10', 'set': False}, # Vorlauftemperatur A1M1 - 'Vorlauftemperatur_Soll_A1M1': {'addr': '2544', 'len': 2, 'unit': 'IU10', 'set': False}, # Vorlauftemperatur Soll A1M1 - 'Raumtemperatur_Soll_Normalbetrieb_A1M1': {'addr': '2306', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 4, 'max_value': 37}, # Raumtemperatur Soll Normalbetrieb A1M1 - 'Raumtemperatur_Soll_Red_Betrieb_A1M1': {'addr': '2307', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 4, 'max_value': 37}, # Raumtemperatur Soll Reduzierter Betrieb A1M1 - 'Raumtemperatur_Soll_Party_Betrieb_A1M1': {'addr': '2308', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 4, 'max_value': 37}, # Raumtemperatur Soll Party Betrieb A1M1 - 'Neigung_Heizkennlinie_A1M1': {'addr': '2305', 'len': 1, 'unit': 'IU10', 'set': True, 'min_value': 0.2, 'max_value': 3.5}, # Neigung Heizkennlinie A1M1 - 'Niveau_Heizkennlinie_A1M1': {'addr': '2304', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': -13, 'max_value': 40}, # Niveau Heizkennlinie A1M1 - 'MischerM1': {'addr': '254c', 'len': 1, 'unit': 'IUPR', 'set': False}, # Ermittle Mischerposition M1 - 'Heizkreispumpenlogik_A1M1': {'addr': '27a5', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 15}, # 0=ohne HPL-Funktion, 1=AT > RTsoll + 5 K, 2=AT > RTsoll + 4 K, 3=AT > RTsoll + 3 K, 4=AT > RTsoll + 2 K, 5=AT > RTsoll + 1 K, 6=AT > RTsoll, 7=AT > RTsoll - 1 K, 8=AT > RTsoll - 2 K, 9=AT > RTsoll - 3 K, 10=AT > RTsoll - 4 K, 11=AT > RTsoll - 5 K, 12=AT > RTsoll - 6 K, 13=AT > RTsoll - 7 K, 14=AT > RTsoll - 8 K, 15=AT > RTsoll - 9 K - 'Sparschaltung_A1M1': {'addr': '27a6', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 5, 'max_value': 36}, # AbsolutSommersparschaltung - # Heizkreis M2 - 'Betriebsart_M2': {'addr': '3301', 'len': 1, 'unit': 'BA', 'set': True}, # Betriebsart M2 - 'Aktuelle_Betriebsart_M2': {'addr': '3500', 'len': 1, 'unit': 'BA', 'set': False}, # Aktuelle Betriebsart M2 - 'Sparbetrieb_M2': {'addr': '3302', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Sparbetrieb - 'Partybetrieb_M2': {'addr': '3303', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # Partybetrieb A1M1 - 'Partybetrieb_M2_Zeit': {'addr': '37f2', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 0, 'max_value': 12}, # Partyzeit M2 - 'Raumtemperatur_Soll_Normalbetrieb_M2': {'addr': '3306', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 4, 'max_value': 37}, # Raumtemperatur Soll Normalbetrieb - 'Raumtemperatur_Soll_Red_Betrieb_M2': {'addr': '3307', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 4, 'max_value': 37}, # Raumtemperatur Soll Reduzierter Betrieb - 'Raumtemperatur_Soll_Party_Betrieb_M2': {'addr': '3308', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 4, 'max_value': 37}, # Raumtemperatur Soll Party Betrieb - 'Neigung_Heizkennlinie_M2': {'addr': '3305', 'len': 1, 'unit': 'IU10', 'set': True, 'min_value': 0.2, 'max_value': 3.5}, # Neigung Heizkennlinie M2 - 'Niveau_Heizkennlinie_M2': {'addr': '3304', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': -13, 'max_value': 40}, # Niveau Heizkennlinie M2 - 'MischerM2': {'addr': '354c', 'len': 1, 'unit': 'IUPR', 'set': False}, # Ermittle Mischerposition M2 - 'MischerM2Auf': {'addr': '084d', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # MischerM2 Auf 0=AUS;1=EIN - 'MischerM2Zu': {'addr': '084c', 'len': 1, 'unit': 'IUBOOL', 'set': True, 'min_value': 0, 'max_value': 1}, # MischerM2 Zu 0=AUS;1=EIN - 'Vorlauftemperatur_Soll_M2': {'addr': '37c6', 'len': 2, 'unit': 'IU10', 'set': True, 'min_value': 10, 'max_value': 80}, # Vorlauftemperatur Soll - 'Vorlauftemperatur_M2': {'addr': '080c', 'len': 2, 'unit': 'IU10', 'set': False}, # Vorlauftemperatur Ist - 'Vorlauftemperatur_min_M2': {'addr': '37c5', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 1, 'max_value': 127}, # Minimalbegrenzung der Vorlauftemperatur - 'Vorlauftemperatur_max_M2': {'addr': '37c6', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 1, 'max_value': 127}, # Maximalbegrenzung der Vorlauftemperatur - 'Heizkreispumpenlogik_M2': {'addr': '37a5', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 0, 'max_value': 15}, # 0=ohne HPL-Funktion, 1=AT > RTsoll + 5 K, 2=AT > RTsoll + 4 K, 3=AT > RTsoll + 3 K, 4=AT > RTsoll + 2 K, 5=AT > RTsoll + 1 K, 6=AT > RTsoll, 7=AT > RTsoll - 1 K, 8=AT > RTsoll - 2 K, 9=AT > RTsoll - 3 K, 10=AT > RTsoll - 4 K, 11=AT > RTsoll - 5 K, 12=AT > RTsoll - 6 K, 13=AT > RTsoll - 7 K, 14=AT > RTsoll - 8 K, 15=AT > RTsoll - 9 K - 'Sparschaltung_M2': {'addr': '37a6', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 5, 'max_value': 36}, # AbsolutSommersparschaltung - 'StatusKlemme2': {'addr': '3904', 'len': 1, 'unit': 'IUINT', 'set': False}, # 0=OK, 1=Kurzschluss, 2=nicht vorhanden, 3-5=Referenzfehler, 6=nicht vorhanden - 'StatusKlemme17': {'addr': '3905', 'len': 1, 'unit': 'IUINT', 'set': False}, # 0=OK, 1=Kurzschluss, 2=nicht vorhanden, 3-5=Referenzfehler, 6=nicht vorhanden - # Warmwasser - 'Warmwasser_Status': {'addr': '650A', 'len': 1, 'unit': 'IUNON', 'set': False}, # 0=Ladung inaktiv, 1=in Ladung, 2=im Nachlauf - 'Warmwasser_KesselOffset': {'addr': '6760', 'len': 1, 'unit': 'IUINT', 'set': True, 'min_value': 10, 'max_value': 50}, # Warmwasser Kessel Offset in K - 'Warmwasser_BeiPartyDNormal': {'addr': '6764', 'len': 1, 'unit': 'IUNON', 'set': True, 'min_value': 0, 'max_value': 2}, # WW Heizen bei Party 0=AUS, 1=nach Schaltuhr, 2=EIN - 'Warmwasser_Temperatur': {'addr': '0804', 'len': 2, 'unit': 'IU10', 'set': False}, # Warmwassertemperatur in Grad C - 'Warmwasser_Solltemperatur': {'addr': '6300', 'len': 1, 'unit': 'ISNON', 'set': True, 'min_value': 10, 'max_value': 80}, # Warmwasser-Solltemperatur - 'Warmwasser_SolltemperaturAktuell': {'addr': '6500', 'len': 1, 'unit': 'IU10' , 'set': False}, # Warmwasser-Solltemperatur aktuell - 'Warmwasser_SollwertMax': {'addr': '675a', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # 0=inaktiv, 1=aktiv - # Ferienprogramm HK_A1M1 - 'Ferienprogramm_A1M1': {'addr': '2535', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Ferienprogramm A1M1 0=inaktiv 1=aktiv - 'Ferien_Abreisetag_A1M1': {'addr': '2309', 'len': 8, 'unit': 'DA', 'set': True}, # Ferien Abreisetag A1M1 - 'Ferien_Rückreisetag_A1M1': {'addr': '2311', 'len': 8, 'unit': 'DA', 'set': True}, # Ferien Rückreisetag A1M1 - # Ferienprogramm HK_M2 - 'Ferienprogramm_M2': {'addr': '3535', 'len': 1, 'unit': 'IUBOOL', 'set': False}, # Ferienprogramm M2 0=inaktiv 1=aktiv - 'Ferien_Abreisetag_M2': {'addr': '3309', 'len': 8, 'unit': 'DA', 'set': True}, # Ferien Abreisetag M2 - 'Ferien_Rückreisetag_M2': {'addr': '3311', 'len': 8, 'unit': 'DA', 'set': True}, # Ferien Rückreisetag M2 - # Schaltzeiten Warmwasser - 'Timer_Warmwasser_Mo': {'addr': '2100', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Montag - 'Timer_Warmwasser_Di': {'addr': '2108', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Dienstag - 'Timer_Warmwasser_Mi': {'addr': '2110', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Mittwoch - 'Timer_Warmwasser_Do': {'addr': '2118', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Donnerstag - 'Timer_Warmwasser_Fr': {'addr': '2120', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Freitag - 'Timer_Warmwasser_Sa': {'addr': '2128', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Samstag - 'Timer_Warmwasser_So': {'addr': '2130', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Warmwasserbereitung Sonntag - # Schaltzeiten HK_A1M1 - 'Timer_A1M1_Mo': {'addr': '2000', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Montag - 'Timer_A1M1_Di': {'addr': '2008', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Dienstag - 'Timer_A1M1_Mi': {'addr': '2010', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Mittwoch - 'Timer_A1M1_Do': {'addr': '2018', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Donnerstag - 'Timer_A1M1_Fr': {'addr': '2020', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Freitag - 'Timer_A1M1_Sa': {'addr': '2028', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Samstag - 'Timer_A1M1_So': {'addr': '2030', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Sonntag - # Schaltzeiten HK_M2 - 'Timer_M2_Mo': {'addr': '3000', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Montag - 'Timer_M2_Di': {'addr': '3008', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Dienstag - 'Timer_M2_Mi': {'addr': '3010', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Mittwoch - 'Timer_M2_Do': {'addr': '3018', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Donnerstag - 'Timer_M2_Fr': {'addr': '3020', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Freitag - 'Timer_M2_Sa': {'addr': '3028', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Samstag - 'Timer_M2_So': {'addr': '3030', 'len': 8, 'unit': 'CT', 'set': True}, # Timer Heizkreis_A1M1 Sonntag + 'Allgemein': { + # Allgemein + 'Temperatur': { + 'Aussen': {'read': True, 'write': False, 'opcode': '0800', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, # Aussentemperatur_tiefpass + 'Aussen_Dp': {'read': True, 'write': False, 'opcode': '5527', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, # Aussentemperatur in Grad C (Gedaempft) + }, + 'Anlagenschema': {'read': True, 'write': False, 'opcode': '7700', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 2}, 'lookup': 'systemschemes'}, # Anlagenschema + 'AnlagenSoftwareIndex': {'read': True, 'write': False, 'opcode': '7330', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Bedienteil SoftwareIndex + 'Systemtime': {'read': True, 'write': True, 'opcode': '088e', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'T', 'params': {'value': 'VAL', 'len': 8}}, # Systemzeit + }, + 'Kessel': { + # Kessel + 'TempKOffset': {'read': True, 'write': True, 'opcode': '6760', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 10, 'force_max': 50}}, # Kesseloffset KT ueber WWsoll in Grad C + 'Ist': {'read': True, 'write': False, 'opcode': '0802', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Kesseltemperatur + 'Soll': {'read': True, 'write': True, 'opcode': '5502', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Kesselsolltemperatur + }, + 'Fehler': { + # Fehler + 'Sammelstoerung': {'read': True, 'write': False, 'opcode': '0847', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'lookup': 'returnstatus'}, # Sammelstörung + 'Brennerstoerung': {'read': True, 'write': False, 'opcode': '0883', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'lookup': 'returnstatus'}, + 'Error0': {'read': True, 'write': False, 'opcode': '7507', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 1 + 'Error1': {'read': True, 'write': False, 'opcode': '7510', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 2 + 'Error2': {'read': True, 'write': False, 'opcode': '7519', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 3 + 'Error3': {'read': True, 'write': False, 'opcode': '7522', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 4 + 'Error4': {'read': True, 'write': False, 'opcode': '752b', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 5 + 'Error5': {'read': True, 'write': False, 'opcode': '7534', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 6 + 'Error6': {'read': True, 'write': False, 'opcode': '753d', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 7 + 'Error7': {'read': True, 'write': False, 'opcode': '7546', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 8 + 'Error8': {'read': True, 'write': False, 'opcode': '754f', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 9 + 'Error9': {'read': True, 'write': False, 'opcode': '7558', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 9}, 'lookup': 'errors'}, # Fehlerhistory Eintrag 10 + }, + 'Pumpen': { + # Pumpen + 'Speicherlade': {'read': True, 'write': False, 'opcode': '0845', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Speicherladepumpe für Warmwasser + 'Zirkulation': {'read': True, 'write': False, 'opcode': '0846', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Zirkulationspumpe + 'Heizkreis_A1M1': {'read': True, 'write': False, 'opcode': '2906', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Heizkreispumpe A1M1 + 'Heizkreis_M2': {'read': True, 'write': False, 'opcode': '3906', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Heizkreispumpe M2 + }, + 'Brenner': { + # Brenner + 'Typ': {'read': True, 'write': False, 'opcode': 'a30b', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Brennertyp 0=einstufig 1=zweistufig 2=modulierend + 'Stufe': {'read': True, 'write': False, 'opcode': '551e', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'lookup': 'returnstatus'}, # Ermittle die aktuelle Brennerstufe + 'Starts': {'read': True, 'write': True, 'opcode': '088a', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 2}, 'cmd_settings': {'force_min': 0, 'force_max': 1193045}}, # Brennerstarts + 'Status_1': {'read': True, 'write': False, 'opcode': '55d3', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Brennerstatus Stufe1 + 'Status_2': {'read': True, 'write': False, 'opcode': '0849', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Brennerstatus Stufe2 + 'BetriebsstundenStufe1': {'read': True, 'write': True, 'opcode': '0886', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 3600, 'len': 4}, 'cmd_settings': {'force_min': 0, 'force_max': 1193045}}, # Brenner-Betriebsstunden Stufe 1 + 'BetriebsstundenStufe2': {'read': True, 'write': True, 'opcode': '08a3', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 3600, 'len': 4}, 'cmd_settings': {'force_min': 0, 'force_max': 1193045}}, # Brenner-Betriebsstunden Stufe 2 + }, + 'Heizkreis': { + 'A1M1': { + # Heizkreis A1M1 + 'Temperatur': { + 'Raum': { + 'Soll_Normal': {'read': True, 'write': True, 'opcode': '2306', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 4, 'force_max': 37}}, # Raumtemperatur Soll Normalbetrieb A1M1 + 'Soll_Reduziert': {'read': True, 'write': True, 'opcode': '2307', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 4, 'force_max': 37}}, # Raumtemperatur Soll Reduzierter Betrieb A1M1 + 'Soll_Party': {'read': True, 'write': True, 'opcode': '2308', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 4, 'force_max': 37}}, # Raumtemperatur Soll Party Betrieb A1M1 + }, + 'Vorlauf': { + 'Ist': {'read': True, 'write': False, 'opcode': '2900', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Vorlauftemperatur A1M1 + 'Soll': {'read': True, 'write': False, 'opcode': '2544', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Vorlauftemperatur Soll A1M1 + }, + }, + 'Betriebsart': {'read': True, 'write': True, 'opcode': '2301', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'lookup': 'operatingmodes'}, # Betriebsart A1M1 + 'Aktuelle_Betriebsart': {'read': True, 'write': False, 'opcode': '2500', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'lookup': 'operatingmodes'}, # Aktuelle Betriebsart A1M1 + 'Sparbetrieb': {'read': True, 'write': True, 'opcode': '2302', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 1}}, # Sparbetrieb A1M1 + 'Partybetrieb_Zeit': {'read': True, 'write': True, 'opcode': '27f2', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 12}}, # Partyzeit M2 + 'Partybetrieb': {'read': True, 'write': True, 'opcode': '2303', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 1}}, # Partybetrieb A1M1 + 'MischerM1': {'read': True, 'write': False, 'opcode': '254c', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 2.55, 'len': 1}}, # Ermittle Mischerposition M1 + 'Heizkreispumpenlogik': {'read': True, 'write': True, 'opcode': '27a5', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 15}}, # 0=ohne HPL-Funktion, 1=AT > RTsoll + 5 K, 2=AT > RTsoll + 4 K, 3=AT > RTsoll + 3 K, 4=AT > RTsoll + 2 K, 5=AT > RTsoll + 1 K, 6=AT > RTsoll, 7=AT > RTsoll - 1 K, 8=AT > RTsoll - 2 K, 9=AT > RTsoll - 3 K, 10=AT > RTsoll - 4 K, 11=AT > RTsoll - 5 K, 12=AT > RTsoll - 6 K, 13=AT > RTsoll - 7 K, 14=AT > RTsoll - 8 K, 15=AT > RTsoll - 9 K + 'Sparschaltung': {'read': True, 'write': True, 'opcode': '27a6', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 5, 'force_max': 36}}, # AbsolutSommersparschaltung + 'Heizkennlinie': { + 'Neigung': {'read': True, 'write': True, 'opcode': '2305', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 1}, 'cmd_settings': {'force_min': 0.2, 'force_max': 3.5}}, # Neigung Heizkennlinie A1M1 + 'Niveau': {'read': True, 'write': True, 'opcode': '2304', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': -13, 'force_max': 40}}, # Niveau Heizkennlinie A1M1 + }, + }, + 'M2': { + # Heizkreis M2 + 'Temperatur': { + 'Raum': { + 'Soll_Normal': {'read': True, 'write': True, 'opcode': '3306', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 4, 'force_max': 37}}, # Raumtemperatur Soll Normalbetrieb + 'Soll_Reduziert': {'read': True, 'write': True, 'opcode': '3307', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 4, 'force_max': 37}}, # Raumtemperatur Soll Reduzierter Betrieb + 'Soll_Party': {'read': True, 'write': True, 'opcode': '3308', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 4, 'force_max': 37}}, # Raumtemperatur Soll Party Betrieb + }, + 'Vorlauf': { + 'Soll': {'read': True, 'write': True, 'opcode': '37c6', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}, 'cmd_settings': {'force_min': 10, 'force_max': 80}}, # Vorlauftemperatur Soll + 'Ist': {'read': True, 'write': False, 'opcode': '080c', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Vorlauftemperatur Ist + 'Min': {'read': True, 'write': True, 'opcode': '37c5', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 1, 'force_max': 127}}, # Minimalbegrenzung der Vorlauftemperatur + 'Max': {'read': True, 'write': True, 'opcode': '37c6', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 1, 'force_max': 127}}, # Maximalbegrenzung der Vorlauftemperatur + }, + }, + 'Betriebsart': {'read': True, 'write': True, 'opcode': '3301', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'lookup': 'operatingmodes'}, # Betriebsart M2 + 'Aktuelle_Betriebsart': {'read': True, 'write': False, 'opcode': '3500', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'lookup': 'operatingmodes'}, # Aktuelle Betriebsart M2 + 'Sparbetrieb': {'read': True, 'write': True, 'opcode': '3302', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 1}}, # Sparbetrieb + 'Partybetrieb': {'read': True, 'write': True, 'opcode': '3303', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 1}}, # Partybetrieb A1M1 + 'Partybetrieb_Zeit': {'read': True, 'write': True, 'opcode': '37f2', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 12}}, # Partyzeit M2 + 'MischerM2': {'read': True, 'write': False, 'opcode': '354c', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 2.55, 'len': 1}}, # Ermittle Mischerposition M2 + 'MischerM2Auf': {'read': True, 'write': True, 'opcode': '084d', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 1}}, # MischerM2 Auf 0=AUS;1=EIN + 'MischerM2Zu': {'read': True, 'write': True, 'opcode': '084c', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 1}}, # MischerM2 Zu 0=AUS;1=EIN + 'Heizkreispumpenlogik': {'read': True, 'write': True, 'opcode': '37a5', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 15}}, # 0=ohne HPL-Funktion, 1=AT > RTsoll + 5 K, 2=AT > RTsoll + 4 K, 3=AT > RTsoll + 3 K, 4=AT > RTsoll + 2 K, 5=AT > RTsoll + 1 K, 6=AT > RTsoll, 7=AT > RTsoll - 1 K, 8=AT > RTsoll - 2 K, 9=AT > RTsoll - 3 K, 10=AT > RTsoll - 4 K, 11=AT > RTsoll - 5 K, 12=AT > RTsoll - 6 K, 13=AT > RTsoll - 7 K, 14=AT > RTsoll - 8 K, 15=AT > RTsoll - 9 K + 'Sparschaltung': {'read': True, 'write': True, 'opcode': '37a6', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 5, 'force_max': 36}}, # AbsolutSommersparschaltung + 'StatusKlemme2': {'read': True, 'write': False, 'opcode': '3904', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # 0=OK, 1=Kurzschluss, 2=nicht vorhanden, 3-5=Referenzfehler, 6=nicht vorhanden + 'StatusKlemme17': {'read': True, 'write': False, 'opcode': '3905', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # 0=OK, 1=Kurzschluss, 2=nicht vorhanden, 3-5=Referenzfehler, 6=nicht vorhanden + 'Heizkennlinie': { + 'Neigung': {'read': True, 'write': True, 'opcode': '3305', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 1}, 'cmd_settings': {'force_min': 0.2, 'force_max': 3.5}}, # Neigung Heizkennlinie M2 + 'Niveau': {'read': True, 'write': True, 'opcode': '3304', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': -13, 'force_max': 40}}, # Niveau Heizkennlinie M2 + }, + }, + }, + 'Warmwasser': { + # Warmwasser + 'Status': {'read': True, 'write': False, 'opcode': '650A', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # 0=Ladung inaktiv, 1=in Ladung, 2=im Nachlauf + 'KesselOffset': {'read': True, 'write': True, 'opcode': '6760', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 10, 'force_max': 50}}, # Warmwasser Kessel Offset in K + 'BeiPartyDNormal': {'read': True, 'write': True, 'opcode': '6764', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 2}}, # WW Heizen bei Party 0=AUS, 1=nach Schaltuhr, 2=EIN + 'Ist': {'read': True, 'write': False, 'opcode': '0804', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 2}}, # Warmwassertemperatur in Grad C + 'Soll': {'read': True, 'write': True, 'opcode': '6300', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'signed': True, 'len': 1}, 'cmd_settings': {'force_min': 10, 'force_max': 80}}, # Warmwasser-Solltemperatur + 'SollAktuell': {'read': True, 'write': False, 'opcode': '6500', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 1}}, # Warmwasser-Solltemperatur aktuell + 'SollMax': {'read': True, 'write': False, 'opcode': '675a', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # 0=inaktiv, 1=aktiv + }, + 'Ferienprogramm': { + 'A1M1': { + # Ferienprogramm HK + 'Status': {'read': True, 'write': False, 'opcode': '2535', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Ferienprogramm A1M1 0=inaktiv 1=aktiv + 'Abreisetag': {'read': True, 'write': True, 'opcode': '2309', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'D', 'params': {'value': 'VAL', 'len': 8}}, # Ferien Abreisetag A1M1 + 'Rückreisetag': {'read': True, 'write': True, 'opcode': '2311', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'D', 'params': {'value': 'VAL', 'len': 8}}, # Ferien Rückreisetag A1M1 + }, + 'M2': { + # Ferienprogramm HK + 'Status': {'read': True, 'write': False, 'opcode': '3535', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # Ferienprogramm M2 0=inaktiv 1=aktiv + 'Abreisetag': {'read': True, 'write': True, 'opcode': '3309', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'D', 'params': {'value': 'VAL', 'len': 8}}, # Ferien Abreisetag M2 + 'Rückreisetag': {'read': True, 'write': True, 'opcode': '3311', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'D', 'params': {'value': 'VAL', 'len': 8}}, # Ferien Rückreisetag M2 + }, + }, + 'Timer': { + 'Warmwasser': { + # Schaltzeiten Warmwasser + 'Mo': {'read': True, 'write': True, 'opcode': '2100', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Warmwasserbereitung Montag + 'Di': {'read': True, 'write': True, 'opcode': '2108', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Warmwasserbereitung Dienstag + 'Mi': {'read': True, 'write': True, 'opcode': '2110', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Warmwasserbereitung Mittwoch + 'Do': {'read': True, 'write': True, 'opcode': '2118', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Warmwasserbereitung Donnerstag + 'Fr': {'read': True, 'write': True, 'opcode': '2120', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Warmwasserbereitung Freitag + 'Sa': {'read': True, 'write': True, 'opcode': '2128', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Warmwasserbereitung Samstag + 'So': {'read': True, 'write': True, 'opcode': '2130', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Warmwasserbereitung Sonntag + }, + 'A1M1': { + # Schaltzeiten HK + 'Mo': {'read': True, 'write': True, 'opcode': '2000', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Montag + 'Di': {'read': True, 'write': True, 'opcode': '2008', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Dienstag + 'Mi': {'read': True, 'write': True, 'opcode': '2010', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Mittwoch + 'Do': {'read': True, 'write': True, 'opcode': '2018', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Donnerstag + 'Fr': {'read': True, 'write': True, 'opcode': '2020', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Freitag + 'Sa': {'read': True, 'write': True, 'opcode': '2028', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Samstag + 'So': {'read': True, 'write': True, 'opcode': '2030', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Sonntag + }, + 'M2': { + # Schaltzeiten HK + 'Mo': {'read': True, 'write': True, 'opcode': '3000', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Montag + 'Di': {'read': True, 'write': True, 'opcode': '3008', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Dienstag + 'Mi': {'read': True, 'write': True, 'opcode': '3010', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Mittwoch + 'Do': {'read': True, 'write': True, 'opcode': '3018', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Donnerstag + 'Fr': {'read': True, 'write': True, 'opcode': '3020', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Freitag + 'Sa': {'read': True, 'write': True, 'opcode': '3028', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Samstag + 'So': {'read': True, 'write': True, 'opcode': '3030', 'reply_pattern': '*', 'item_type': 'list', 'dev_datatype': 'C', 'params': {'value': 'VAL', 'len': 8}}, # Timer Heizkreis Sonntag + }, + }, }, 'V200WO1C': { - # generelle Infos - 'Anlagentyp': {'addr': '00f8', 'len': 2, 'unit': 'DT', 'set': False}, # getAnlTyp -- Information - Allgemein: Anlagentyp (204D) - 'Aussentemperatur': {'addr': '0101', 'len': 2, 'unit': 'IS10', 'set': False}, # getTempA -- Information - Allgemein: Aussentemperatur (-40..70) - # Anlagenstatus - 'Betriebsart': {'addr': 'b000', 'len': 1, 'unit': 'BA', 'set': True}, # getBetriebsart -- Bedienung HK1 - Heizkreis 1: Betriebsart (Textstring) - 'Manuell': {'addr': 'b020', 'len': 1, 'unit': 'IUNON', 'set': True, 'min_value': 0, 'max_value': 2}, # getManuell / setManuell -- 0 = normal, 1 = manueller Heizbetrieb, 2 = 1x Warmwasser auf Temp2 - 'Sekundaerpumpe': {'addr': '0484', 'len': 1, 'unit': 'RT', 'set': False}, # getStatusSekP -- Diagnose - Anlagenuebersicht: Sekundaerpumpe 1 (0..1) - 'Heizkreispumpe': {'addr': '048d', 'len': 1, 'unit': 'RT', 'set': False}, # getStatusPumpe -- Information - Heizkreis HK1: Heizkreispumpe (0..1) - 'Zirkulationspumpe': {'addr': '0490', 'len': 1, 'unit': 'RT', 'set': False}, # getStatusPumpeZirk -- Information - Warmwasser: Zirkulationspumpe (0..1) - 'VentilHeizenWW': {'addr': '0494', 'len': 1, 'unit': 'RT', 'set': False}, # getStatusVentilWW -- Diagnose - Waermepumpe: 3-W-Ventil Heizen WW1 (0 (Heizen)..1 (WW)) - 'Vorlaufsolltemp': {'addr': '1800', 'len': 2, 'unit': 'IS10', 'set': False}, # getTempVLSoll -- Diagnose - Heizkreis HK1: Vorlaufsolltemperatur HK1 (0..95) - 'Outdoor_Fanspeed': {'addr': '1a52', 'len': 1, 'unit': 'IUNON', 'set': False}, # getSpdFanOut -- Outdoor Fanspeed - 'Status_Fanspeed': {'addr': '1a53', 'len': 1, 'unit': 'IUNON', 'set': False}, # getSpdFan -- Geschwindigkeit Luefter - 'Kompressor_Freq': {'addr': '1a54', 'len': 1, 'unit': 'IUNON', 'set': False}, # getSpdKomp -- Compressor Frequency - # Temperaturen - 'SolltempWarmwasser': {'addr': '6000', 'len': 2, 'unit': 'IS10', 'set': True, 'min_value': 10, 'max_value': 60}, # getTempWWSoll -- Bedienung WW - Betriebsdaten WW: Warmwassersolltemperatur (10..60 (95)) - 'VorlauftempSek': {'addr': '0105', 'len': 2, 'unit': 'IS10', 'set': False}, # getTempSekVL -- Information - Heizkreis HK1: Vorlauftemperatur Sekundaer 1 (0..95) - 'RuecklauftempSek': {'addr': '0106', 'len': 2, 'unit': 'IS10', 'set': False}, # getTempSekRL -- Diagnose - Anlagenuebersicht: Ruecklauftemperatur Sekundaer 1 (0..95) - 'Warmwassertemperatur': {'addr': '010d', 'len': 2, 'unit': 'IS10', 'set': False}, # getTempWWIstOben -- Information - Warmwasser: Warmwassertemperatur oben (0..95) - # Stellwerte - 'Raumsolltemp': {'addr': '2000', 'len': 2, 'unit': 'IS10', 'set': False}, # getTempRaumSollNormal -- Bedienung HK1 - Heizkreis 1: Raumsolltemperatur normal (10..30) - 'RaumsolltempReduziert': {'addr': '2001', 'len': 2, 'unit': 'IS10', 'set': False}, # getTempRaumSollRed -- Bedienung HK1 - Heizkreis 1: Raumsolltemperatur reduzierter Betrieb (10..30) - 'HeizkennlinieNiveau': {'addr': '2006', 'len': 2, 'unit': 'IS10', 'set': False}, # getHKLNiveau -- Bedienung HK1 - Heizkreis 1: Niveau der Heizkennlinie (-15..40) - 'HeizkennlinieNeigung': {'addr': '2007', 'len': 2, 'unit': 'IS10', 'set': False}, # getHKLNeigung -- Bedienung HK1 - Heizkreis 1: Neigung der Heizkennlinie (0..35) - 'RaumsolltempParty': {'addr': '2022', 'len': 2, 'unit': 'IS10', 'set': False}, # getTempRaumSollParty -- Bedienung HK1 - Heizkreis 1: Party Solltemperatur (10..30) - # Statistiken / Laufzeiten - 'EinschaltungenSekundaer': {'addr': '0504', 'len': 4, 'unit': 'IUNON', 'set': False}, # getAnzQuelleSek -- Statistik - Schaltzyklen Anlage: Einschaltungen Sekundaerquelle (?) - 'EinschaltungenHeizstab1': {'addr': '0508', 'len': 4, 'unit': 'IUNON', 'set': False}, # getAnzHeizstabSt1 -- Statistik - Schaltzyklen Anlage: Einschaltungen Heizstab Stufe 1 (?) - 'EinschaltungenHeizstab2': {'addr': '0509', 'len': 4, 'unit': 'IUNON', 'set': False}, # getAnzHeizstabSt2 -- Statistik - Schaltzyklen Anlage: Einschaltungen Heizstab Stufe 2 (?) - 'EinschaltungenHK': {'addr': '050d', 'len': 4, 'unit': 'IUNON', 'set': False}, # getAnzHK -- Statistik - Schaltzyklen Anlage: Einschaltungen Heizkreis (?) - 'LZSekundaerpumpe': {'addr': '0584', 'len': 4, 'unit': 'IU3600', 'set': False}, # getLZPumpeSek -- Statistik - Betriebsstunden Anlage: Betriebsstunden Sekundaerpumpe (?) - 'LZHeizstab1': {'addr': '0588', 'len': 4, 'unit': 'IU3600', 'set': False}, # getLZHeizstabSt1 -- Statistik - Betriebsstunden Anlage: Betriebsstunden Heizstab Stufe 1 (?) - 'LZHeizstab2': {'addr': '0589', 'len': 4, 'unit': 'IU3600', 'set': False}, # getLZHeizstabSt2 -- Statistik - Betriebsstunden Anlage: Betriebsstunden Heizstab Stufe 2 (?) - 'LZPumpeHK': {'addr': '058d', 'len': 4, 'unit': 'IU3600', 'set': False}, # getLZPumpe -- Statistik - Betriebsstunden Anlage: Betriebsstunden Pumpe HK1 (0..1150000) - 'LZWWVentil': {'addr': '0594', 'len': 4, 'unit': 'IU3600', 'set': False}, # getLZVentilWW -- Statistik - Betriebsstunden Anlage: Betriebsstunden Warmwasserventil (?) - 'LZVerdichterStufe1': {'addr': '1620', 'len': 4, 'unit': 'IUNON', 'set': False}, # getLZVerdSt1 -- Statistik - Betriebsstunden Anlage: Betriebsstunden Verdichter auf Stufe 1 (?) - 'LZVerdichterStufe2': {'addr': '1622', 'len': 4, 'unit': 'IUNON', 'set': False}, # getLZVerdSt2 -- Statistik - Betriebsstunden Anlage: Betriebsstunden Verdichter auf Stufe 2 (?) - 'LZVerdichterStufe3': {'addr': '1624', 'len': 4, 'unit': 'IUNON', 'set': False}, # getLZVerdSt3 -- Statistik - Betriebsstunden Anlage: Betriebsstunden Verdichter auf Stufe 3 (?) - 'LZVerdichterStufe4': {'addr': '1626', 'len': 4, 'unit': 'IUNON', 'set': False}, # getLZVerdSt4 -- Statistik - Betriebsstunden Anlage: Betriebsstunden Verdichter auf Stufe 4 (?) - 'LZVerdichterStufe5': {'addr': '1628', 'len': 4, 'unit': 'IUNON', 'set': False}, # getLZVerdSt5 -- Statistik - Betriebsstunden Anlage: Betriebsstunden Verdichter auf Stufe 5 (?) - 'VorlauftempSekMittel': {'addr': '16b2', 'len': 2, 'unit': 'IS10', 'set': False}, # getTempSekVLMittel -- Statistik - Energiebilanz: mittlere sek. Vorlauftemperatur (0..95) - 'RuecklauftempSekMittel': {'addr': '16b3', 'len': 2, 'unit': 'IS10', 'set': False}, # getTempSekRLMittel -- Statistik - Energiebilanz: mittlere sek.Temperatur RL1 (0..95) - 'OAT_Temperature': {'addr': '1a5c', 'len': 1, 'unit': 'IUNON', 'set': False}, # getTempOAT -- OAT Temperature - 'ICT_Temperature': {'addr': '1a5d', 'len': 1, 'unit': 'IUNON', 'set': False}, # getTempICT -- OCT Temperature - 'CCT_Temperature': {'addr': '1a5e', 'len': 1, 'unit': 'IUNON', 'set': False}, # getTempCCT -- CCT Temperature - 'HST_Temperature': {'addr': '1a5f', 'len': 1, 'unit': 'IUNON', 'set': False}, # getTempHST -- HST Temperature - 'OMT_Temperature': {'addr': '1a60', 'len': 1, 'unit': 'IUNON', 'set': False}, # getTempOMT -- OMT Temperature - 'LZVerdichterWP': {'addr': '5005', 'len': 4, 'unit': 'IU3600', 'set': False}, # getLZWP -- Statistik - Betriebsstunden Anlage: Betriebsstunden Waermepumpe (0..1150000) - 'SollLeistungVerdichter': {'addr': '5030', 'len': 1, 'unit': 'IUNON', 'set': False}, # getPwrSollVerdichter -- Diagnose - Anlagenuebersicht: Soll-Leistung Verdichter 1 (0..100) - 'WaermeWW12M': {'addr': '1660', 'len': 4, 'unit': 'IU10', 'set': False}, # Wärmeenergie für WW-Bereitung der letzten 12 Monate (kWh) - 'ElektroWW12M': {'addr': '1670', 'len': 4, 'unit': 'IU10', 'set': False}, # elektr. Energie für WW-Bereitung der letzten 12 Monate (kWh) - }, -} - -unitset = { - 'P300': { - 'BA': {'unit_de': 'Betriebsart', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: BA - 'CT': {'unit_de': 'CycleTime', 'type': 'timer', 'signed': False, 'read_value_transform': 'non'}, # vito unit: CT - 'DT': {'unit_de': 'DeviceType', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: DT - 'ES': {'unit_de': 'ErrorState', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: ES - 'IU2': {'unit_de': 'INT unsigned 2', 'type': 'integer', 'signed': False, 'read_value_transform': '2'}, # vito unit: UT1U, PR1 - 'IU10': {'unit_de': 'INT unsigned 10', 'type': 'integer', 'signed': False, 'read_value_transform': '10'}, # vito unit: - 'IU100': {'unit_de': 'INT unsigned 100', 'type': 'integer', 'signed': False, 'read_value_transform': '100'}, # vito unit: - 'IU3600': {'unit_de': 'INT unsigned 3600', 'type': 'integer', 'signed': False, 'read_value_transform': '3600'}, # vito unit: CS - 'IUBOOL': {'unit_de': 'INT unsigned bool', 'type': 'integer', 'signed': False, 'read_value_transform': 'bool'}, # vito unit: - 'IUINT': {'unit_de': 'INT unsigned int', 'type': 'integer', 'signed': False, 'read_value_transform': '1'}, # vito unit: - 'IUNON': {'unit_de': 'INT unsigned non', 'type': 'integer', 'signed': False, 'read_value_transform': 'non'}, # vito unit: UTI, CO - 'IS2': {'unit_de': 'INT signed 2', 'type': 'integer', 'signed': True, 'read_value_transform': '2'}, # vito unit: UT1, PR - 'IS10': {'unit_de': 'INT signed 10', 'type': 'integer', 'signed': True, 'read_value_transform': '10'}, # vito unit: UT, UN - 'IS100': {'unit_de': 'INT signed 100', 'type': 'integer', 'signed': True, 'read_value_transform': '100'}, # vito unit: - 'IS1000': {'unit_de': 'INT signed 1000', 'type': 'integer', 'signed': True, 'read_value_transform': '1000'}, # vito unit: - 'ISNON': {'unit_de': 'INT signed non', 'type': 'integer', 'signed': True, 'read_value_transform': 'non'}, # vito unit: - 'RT': {'unit_de': 'ReturnStatus', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: ST, RT - 'SC': {'unit_de': 'SystemScheme', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: - 'SN': {'unit_de': 'Sachnummer', 'type': 'serial', 'signed': False, 'read_value_transform': 'non'}, # vito unit: - 'SR': {'unit_de': 'SetReturnStatus', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: - 'TI': {'unit_de': 'SystemTime', 'type': 'datetime', 'signed': False, 'read_value_transform': 'non'}, # vito unit: TI - 'DA': {'unit_de': 'Date', 'type': 'date', 'signed': False, 'read_value_transform': 'non'}, # vito unit: - 'HEX': {'unit_de': 'HexString', 'type': 'string', 'signed': False, 'read_value_transform': 'hex'}, # vito unit: - }, - 'KW': { - 'BA': {'unit_de': 'Betriebsart', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: BA - 'CT': {'unit_de': 'CycleTime', 'type': 'timer', 'signed': False, 'read_value_transform': 'non'}, # vito unit: CT - 'DT': {'unit_de': 'DeviceType', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: DT - 'ES': {'unit_de': 'ErrorState', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: ES - 'IU2': {'unit_de': 'INT unsigned 2', 'type': 'integer', 'signed': False, 'read_value_transform': '2'}, # vito unit: UT1U, PR1 - 'IU10': {'unit_de': 'INT unsigned 10', 'type': 'integer', 'signed': False, 'read_value_transform': '10'}, # vito unit: - 'IU100': {'unit_de': 'INT unsigned 100', 'type': 'integer', 'signed': False, 'read_value_transform': '100'}, # vito unit: - 'IU1000': {'unit_de': 'INT unsigned 1000', 'type': 'integer', 'signed': False, 'read_value_transform': '1000'}, # vito unit: - 'IU3600': {'unit_de': 'INT unsigned 3600', 'type': 'integer', 'signed': False, 'read_value_transform': '3600'}, # vito unit: CS - 'IUPR': {'unit_de': 'INT unsigned 2.55', 'type': 'integer', 'signed': False, 'read_value_transform': '2.55'}, # vito unit: PP - 'IUBOOL': {'unit_de': 'INT unsigned bool', 'type': 'integer', 'signed': False, 'read_value_transform': 'bool'}, # vito unit: - 'IUINT': {'unit_de': 'INT unsigned int', 'type': 'integer', 'signed': False, 'read_value_transform': '1'}, # vito unit: - 'IUNON': {'unit_de': 'INT unsigned non', 'type': 'integer', 'signed': False, 'read_value_transform': 'non'}, # vito unit: UTI, CO - 'IS2': {'unit_de': 'INT signed 2', 'type': 'integer', 'signed': True, 'read_value_transform': '2'}, # vito unit: UT1, PR - 'IS10': {'unit_de': 'INT signed 10', 'type': 'integer', 'signed': True, 'read_value_transform': '10'}, # vito unit: UT, UN - 'IS100': {'unit_de': 'INT signed 100', 'type': 'integer', 'signed': True, 'read_value_transform': '100'}, # vito unit: - 'IS1000': {'unit_de': 'INT signed 1000', 'type': 'integer', 'signed': True, 'read_value_transform': '1000'}, # vito unit: - 'ISNON': {'unit_de': 'INT signed non', 'type': 'integer', 'signed': True, 'read_value_transform': 'non'}, # vito unit: - 'RT': {'unit_de': 'ReturnStatus', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: ST, RT - 'BT': {'unit_de': 'Brennertyp', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: - 'SC': {'unit_de': 'SystemScheme', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: - 'SN': {'unit_de': 'Sachnummer', 'type': 'serial', 'signed': False, 'read_value_transform': 'non'}, # vito unit: - 'SR': {'unit_de': 'SetReturnStatus', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: - 'TI': {'unit_de': 'SystemTime', 'type': 'datetime', 'signed': False, 'read_value_transform': 'non'}, # vito unit: TI - 'DA': {'unit_de': 'Date', 'type': 'date', 'signed': False, 'read_value_transform': 'non'}, # vito unit: - 'HEX': {'unit_de': 'HexString', 'type': 'string', 'signed': False, 'read_value_transform': 'hex'}, # vito unit: + 'Allgemein': {'item_attrs': {'cycle': 45}, + 'Temperatur': { + 'Aussen': {'read': True, 'write': False, 'opcode': '0101', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, # getTempA -- Information - Allgemein: Aussentemperatur (-40..70) + }, + # Anlagenstatus + 'Betriebsart': {'read': True, 'write': True, 'opcode': 'b000', 'reply_pattern': '*', 'item_type': 'str', 'dev_datatype': 'H', 'params': {'value': 'VAL', 'len': 1}, 'lookup': 'operatingmodes', 'item_attrs': {'attributes': {'md_read_initial': True}, 'lookup_item': True}}, # getBetriebsart -- Bedienung HK1 - Heizkreis 1: Betriebsart (Textstring) + 'Manuell': {'read': True, 'write': True, 'opcode': 'b020', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'cmd_settings': {'force_min': 0, 'force_max': 2}}, # getManuell / setManuell -- 0 = normal, 1 = manueller Heizbetrieb, 2 = 1x Warmwasser auf Temp2 + # Allgemein + 'Outdoor_Fanspeed': {'read': True, 'write': False, 'opcode': '1a52', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # getSpdFanOut -- Outdoor Fanspeed + 'Status_Fanspeed': {'read': True, 'write': False, 'opcode': '1a53', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # getSpdFan -- Geschwindigkeit Luefter + 'Kompressor_Freq': {'read': True, 'write': False, 'opcode': '1a54', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # getSpdKomp -- Compressor Frequency + 'SollLeistungVerdichter': {'read': True, 'write': False, 'opcode': '5030', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # getPwrSollVerdichter -- Diagnose - Anlagenuebersicht: Soll-Leistung Verdichter 1 (0..100) + }, + 'Pumpen': { + 'Sekundaer': {'read': True, 'write': False, 'opcode': '0484', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'lookup': 'returnstatus'}, # getStatusSekP -- Diagnose - Anlagenuebersicht: Sekundaerpumpe 1 (0..1) + 'Heizkreis': {'read': True, 'write': False, 'opcode': '048d', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'lookup': 'returnstatus'}, # getStatusPumpe -- Information - Heizkreis HK1: Heizkreispumpe (0..1) + 'Zirkulation': {'read': True, 'write': False, 'opcode': '0490', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'lookup': 'returnstatus'}, # getStatusPumpeZirk -- Information - Warmwasser: Zirkulationspumpe (0..1) + }, + 'Heizkreis': { + 'Temperatur': { + 'Raum': { + 'Soll': {'read': True, 'write': False, 'opcode': '2000', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, # getTempRaumSollNormal -- Bedienung HK1 - Heizkreis 1: Raumsolltemperatur normal (10..30) + 'Soll_Reduziert': {'read': True, 'write': False, 'opcode': '2001', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, # getTempRaumSollRed -- Bedienung HK1 - Heizkreis 1: Raumsolltemperatur reduzierter Betrieb (10..30) + 'Soll_Party': {'read': True, 'write': False, 'opcode': '2022', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, # getTempRaumSollParty -- Bedienung HK1 - Heizkreis 1: Party Solltemperatur (10..30) + }, + 'Vorlauf': { + 'Ist': {'read': True, 'write': False, 'opcode': '0105', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, # getTempSekVL -- Information - Heizkreis HK1: Vorlauftemperatur Sekundaer 1 (0..95) + 'Soll': {'read': True, 'write': False, 'opcode': '1800', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, # getTempVLSoll -- Diagnose - Heizkreis HK1: Vorlaufsolltemperatur HK1 (0..95) + 'Mittel': {'read': True, 'write': False, 'opcode': '16b2', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, # getTempSekVLMittel -- Statistik - Energiebilanz: mittlere sek. Vorlauftemperatur (0..95) + }, + 'Ruecklauf': { + 'Ist': {'read': True, 'write': False, 'opcode': '0106', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, # getTempSekRL -- Diagnose - Anlagenuebersicht: Ruecklauftemperatur Sekundaer 1 (0..95) + 'Mittel': {'read': True, 'write': False, 'opcode': '16b3', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, # getTempSekRLMittel -- Statistik - Energiebilanz: mittlere sek.Temperatur RL1 (0..95) + }, + }, + 'Heizkennlinie': { + 'Niveau': {'read': True, 'write': False, 'opcode': '2006', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, # getHKLNiveau -- Bedienung HK1 - Heizkreis 1: Niveau der Heizkennlinie (-15..40) + 'Neigung': {'read': True, 'write': False, 'opcode': '2007', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, # getHKLNeigung -- Bedienung HK1 - Heizkreis 1: Neigung der Heizkennlinie (0..35) + }, + }, + 'Warmwasser': { + 'Ist': {'read': True, 'write': False, 'opcode': '010d', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}}, # getTempWWIstOben -- Information - Warmwasser: Warmwassertemperatur oben (0..95) + 'Soll': {'read': True, 'write': True, 'opcode': '6000', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'signed': True, 'len': 2}, 'cmd_settings': {'force_min': 10, 'force_max': 60}}, # getTempWWSoll -- Bedienung WW - Betriebsdaten WW: Warmwassersolltemperatur (10..60 (95)) + 'Ventil': {'read': True, 'write': False, 'opcode': '0494', 'reply_pattern': '*', 'item_type': 'bool', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}, 'lookup': 'returnstatus'}, # getStatusVentilWW -- Diagnose - Waermepumpe: 3-W-Ventil Heizen WW1 (0 (Heizen)..1 (WW)) + }, + 'Statistik': { + # Statistiken / Laufzeiten + 'Einschaltungen': { + 'Sekundaer': {'read': True, 'write': False, 'opcode': '0504', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 4}}, # getAnzQuelleSek -- Statistik - Schaltzyklen Anlage: Einschaltungen Sekundaerquelle (?) + 'Heizstab1': {'read': True, 'write': False, 'opcode': '0508', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 4}}, # getAnzHeizstabSt1 -- Statistik - Schaltzyklen Anlage: Einschaltungen Heizstab Stufe 1 (?) + 'Heizstab2': {'read': True, 'write': False, 'opcode': '0509', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 4}}, # getAnzHeizstabSt2 -- Statistik - Schaltzyklen Anlage: Einschaltungen Heizstab Stufe 2 (?) + 'HK': {'read': True, 'write': False, 'opcode': '050d', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 4}}, # getAnzHK -- Statistik - Schaltzyklen Anlage: Einschaltungen Heizkreis (?) + }, + 'Laufzeiten': { + 'Sekundaerpumpe': {'read': True, 'write': False, 'opcode': '0584', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 3600, 'len': 4}}, # getLZPumpeSek -- Statistik - Betriebsstunden Anlage: Betriebsstunden Sekundaerpumpe (?) + 'Heizstab1': {'read': True, 'write': False, 'opcode': '0588', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 3600, 'len': 4}}, # getLZHeizstabSt1 -- Statistik - Betriebsstunden Anlage: Betriebsstunden Heizstab Stufe 1 (?) + 'Heizstab2': {'read': True, 'write': False, 'opcode': '0589', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 3600, 'len': 4}}, # getLZHeizstabSt2 -- Statistik - Betriebsstunden Anlage: Betriebsstunden Heizstab Stufe 2 (?) + 'PumpeHK': {'read': True, 'write': False, 'opcode': '058d', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 3600, 'len': 4}}, # getLZPumpe -- Statistik - Betriebsstunden Anlage: Betriebsstunden Pumpe HK1 (0..1150000) + 'WWVentil': {'read': True, 'write': False, 'opcode': '0594', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 3600, 'len': 4}}, # getLZVentilWW -- Statistik - Betriebsstunden Anlage: Betriebsstunden Warmwasserventil (?) + 'VerdichterStufe1': {'read': True, 'write': False, 'opcode': '1620', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 4}}, # getLZVerdSt1 -- Statistik - Betriebsstunden Anlage: Betriebsstunden Verdichter auf Stufe 1 (?) + 'VerdichterStufe2': {'read': True, 'write': False, 'opcode': '1622', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 4}}, # getLZVerdSt2 -- Statistik - Betriebsstunden Anlage: Betriebsstunden Verdichter auf Stufe 2 (?) + 'VerdichterStufe3': {'read': True, 'write': False, 'opcode': '1624', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 4}}, # getLZVerdSt3 -- Statistik - Betriebsstunden Anlage: Betriebsstunden Verdichter auf Stufe 3 (?) + 'VerdichterStufe4': {'read': True, 'write': False, 'opcode': '1626', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 4}}, # getLZVerdSt4 -- Statistik - Betriebsstunden Anlage: Betriebsstunden Verdichter auf Stufe 4 (?) + 'VerdichterStufe5': {'read': True, 'write': False, 'opcode': '1628', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 4}}, # getLZVerdSt5 -- Statistik - Betriebsstunden Anlage: Betriebsstunden Verdichter auf Stufe 5 (?) + 'VerdichterWP': {'read': True, 'write': False, 'opcode': '5005', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 3600, 'len': 4}}, # getLZWP -- Statistik - Betriebsstunden Anlage: Betriebsstunden Waermepumpe (0..1150000) + }, + 'OAT_Temperature': {'read': True, 'write': False, 'opcode': '1a5c', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # getTempOAT -- OAT Temperature + 'ICT_Temperature': {'read': True, 'write': False, 'opcode': '1a5d', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # getTempICT -- OCT Temperature + 'CCT_Temperature': {'read': True, 'write': False, 'opcode': '1a5e', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # getTempCCT -- CCT Temperature + 'HST_Temperature': {'read': True, 'write': False, 'opcode': '1a5f', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # getTempHST -- HST Temperature + 'OMT_Temperature': {'read': True, 'write': False, 'opcode': '1a60', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 1}}, # getTempOMT -- OMT Temperature + 'WaermeWW12M': {'read': True, 'write': False, 'opcode': '1660', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 4}}, # Wärmeenergie für WW-Bereitung der letzten 12 Monate (kWh) + 'ElektroWW12M': {'read': True, 'write': False, 'opcode': '1670', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'mult': 10, 'len': 4}}, # elektr. Energie für WW-Bereitung der letzten 12 Monate (kWh) + } } } -errorset = { - 'P300': { - '00': 'Regelbetrieb (kein Fehler)', - '0F': 'Wartung (fuer Reset Codieradresse 24 auf 0 stellen)', - '10': 'Kurzschluss Aussentemperatursensor', - '18': 'Unterbrechung Aussentemperatursensor', - '19': 'Unterbrechung Kommunikation Außentemperatursensor RF', - '1D': 'Keine Kommunikation mit Sensor', - '1E': 'Strömungssensor defekt', - '1F': 'Strömungssensor defekt', - '20': 'Kurzschluss Vorlauftemperatursensor', - '21': 'Kurzschluss Ruecklauftemperatursensor', - '28': 'Unterbrechung Aussentemperatursensor / Vorlauftemperatursensor Anlage', - '29': 'Unterbrechung Ruecklauftemperatursensor', - '30': 'Kurzschluss Kesseltemperatursensor', - '38': 'Unterbrechung Kesseltemperatursensor', - '40': 'Kurzschluss Vorlauftemperatursensor M2', - '42': 'Unterbrechung Vorlauftemperatursensor M2', - '44': 'Kurzschluss Vorlauftemperatursensor Heizkreis 3', - '48': 'Unterbrechung Vorlauftemperatursensor Heizkreis 3', - '50': 'Kurzschluss Speichertemperatursensor', - '51': 'Kurzschluss Auslauftemperatursensor', - '58': 'Unterbrechung Speichertemperatursensor', - '59': 'Unterbrechung Auslauftemperatursensor', - '92': 'Solar: Kurzschluss Kollektortemperatursensor', - '93': 'Solar: Kurzschluss Sensor S3', - '94': 'Solar: Kurzschluss Speichertemperatursensor', - '9A': 'Solar: Unterbrechung Kollektortemperatursensor', - '9B': 'Solar: Unterbrechung Sensor S3', - '9C': 'Solar: Unterbrechung Speichertemperatursensor', - '9E': 'Solar: Zu geringer bzw. kein Volumenstrom oder Temperaturwächter ausgeloest', - '9F': 'Solar: Fehlermeldung Solarteil (siehe Solarregler)', - 'A4': 'Amx. Anlagendruck überschritten', - 'A7': 'Bedienteil defekt', - 'A8': 'Luft in der internen Umwaelzpumpe oder Mindest-Volumenstrom nicht erreicht', - 'B0': 'Kurzschluss Abgastemperatursensor', - 'B1': 'Kommunikationsfehler Bedieneinheit', - 'B4': 'Interner Fehler (Elektronik)', - 'B5': 'Interner Fehler (Elektronik)', - 'B6': 'Ungueltige Hardwarekennung (Elektronik)', - 'B7': 'Interner Fehler (Kesselkodierstecker)', - 'B8': 'Unterbrechung Abgastemperatursensor', - 'B9': 'Interner Fehler (Dateneingabe wiederholen)', - 'BA': 'Kommunikationsfehler Erweiterungssatz fuer Mischerkreis M2', - 'BB': 'Kommunikationsfehler Erweiterungssatz fuer Mischerkreis 3', - 'BC': 'Kommunikationsfehler Fernbedienung Vitorol, Heizkreis M1', - 'BD': 'Kommunikationsfehler Fernbedienung Vitorol, Heizkreis M2', - 'BE': 'Falsche Codierung Fernbedienung Vitorol', - 'BF': 'Falsches Kommunikationsmodul LON', - 'C1': 'Externe Sicherheitseinrichtung (Kessel kuehlt aus)', - 'C2': 'Kommunikationsfehler Solarregelung', - 'C3': 'Kommunikationsfehler Erweiterung AM1', - 'C4': 'Kommunikationsfehler Erweiterumg Open Therm', - 'C5': 'Kommunikationsfehler drehzahlgeregelte Heizkreispumpe, Heizkreis M1', - 'C6': 'Kommunikationsfehler drehzahlgeregelte Heizkreispumpe, Heizkreis M2', - 'C7': 'Falsche Codierung der Heizkreispumpe', - 'C8': 'Kommunikationsfehler drehzahlgeregelte, externe Heizkreispumpe 3', - 'C9': 'Stoermeldeeingang am Schaltmodul-V aktiv', - 'CD': 'Kommunikationsfehler Vitocom 100 (KM-BUS)', - 'CE': 'Kommunikationsfehler Schaltmodul-V', - 'CF': 'Kommunikationsfehler LON Modul', - 'D1': 'Brennerstoerung', - 'D4': 'Sicherheitstemperaturbegrenzer hat ausgeloest oder Stoermeldemodul nicht richtig gesteckt', - 'D6': 'Eingang DE1 an Erweiterung EA1 meldet eine Stoerung', - 'D7': 'Eingang DE2 an Erweiterung EA1 meldet eine Stoerung', - 'D8': 'Eingang DE3 an Erweiterung EA1 meldet eine Stoerung', - 'DA': 'Kurzschluss Raumtemperatursensor, Heizkreis M1', - 'DB': 'Kurzschluss Raumtemperatursensor, Heizkreis M2', - 'DC': 'Kurzschluss Raumtemperatursensor, Heizkreis 3', - 'DD': 'Unterbrechung Raumtemperatursensor, Heizkreis M1', - 'DE': 'Unterbrechung Raumtemperatursensor, Heizkreis M2', - 'DF': 'Unterbrechung Raumtemperatursensor, Heizkreis 3', - 'E0': 'Fehler externer LON Teilnehmer', - 'E1': 'Isolationsstrom waehrend des Kalibrierens zu hoch', - 'E3': 'Zu geringe Wärmeabnahme während des Kalibrierens, Temperaturwächter hat ausgeschaltet', - 'E4': 'Fehler Versorgungsspannung', - 'E5': 'Interner Fehler, Flammenverstärker(Ionisationselektrode)', - 'E6': 'Abgas- / Zuluftsystem verstopft, Anlagendruck zu niedrig', - 'E7': 'Ionisationsstrom waehrend des Kalibrierens zu gering', - 'E8': 'Ionisationsstrom nicht im gültigen Bereich', - 'EA': 'Ionisationsstrom waehrend des Kalibrierens nicht im gueltigen Bereich', - 'EB': 'Wiederholter Flammenverlust waehrend des Kalibrierens', - 'EC': 'Parameterfehler waehrend des Kalibrierens', - 'ED': 'Interner Fehler', - 'EE': 'Flammensignal ist bei Brennerstart nicht vorhanden oder zu gering', - 'EF': 'Flammenverlust direkt nach Flammenbildung (waehrend der Sicherheitszeit)', - 'F0': 'Interner Fehler (Regelung tauschen)', - 'F1': 'Abgastemperaturbegrenzer ausgeloest', - 'F2': 'Temperaturbegrenzer ausgeloest', - 'F3': 'Flammensigal beim Brennerstart bereits vorhanden', - 'F4': 'Flammensigal nicht vorhanden', - 'F7': 'Differenzdrucksensor defekt, Kurzschluss ider Wasserdrucksensor', - 'F8': 'Brennstoffventil schliesst zu spaet', - 'F9': 'Geblaesedrehzahl beim Brennerstart zu niedrig', - 'FA': 'Geblaesestillstand nicht erreicht', - 'FC': 'Gaskombiregler defekt oder fehlerhafte Ansteuerung Modulationsventil oder Abgasweg versperrt', - 'FD': 'Fehler Gasfeuerungsautomat, Kesselkodierstecker fehlt(in Verbindung mit B7)', - 'FE': 'Starkes Stoerfeld (EMV) in der Naehe oder Elektronik defekt', - 'FF': 'Starkes Stoerfeld (EMV) in der Naehe oder interner Fehler' - }, - 'KW': { - '00': 'Regelbetrieb (kein Fehler)', - '0F': 'Wartung (fuer Reset Codieradresse 24 auf 0 stellen)', - '10': 'Kurzschluss Aussentemperatursensor', - '18': 'Unterbrechung Aussentemperatursensor', - '19': 'Unterbrechung Kommunikation Außentemperatursensor RF', - '1D': 'Keine Kommunikation mit Sensor', - '1E': 'Strömungssensor defekt', - '1F': 'Strömungssensor defekt', - '20': 'Kurzschluss Vorlauftemperatursensor', - '21': 'Kurzschluss Ruecklauftemperatursensor', - '28': 'Unterbrechung Aussentemperatursensor / Vorlauftemperatursensor Anlage', - '29': 'Unterbrechung Ruecklauftemperatursensor', - '30': 'Kurzschluss Kesseltemperatursensor', - '38': 'Unterbrechung Kesseltemperatursensor', - '40': 'Kurzschluss Vorlauftemperatursensor M2', - '42': 'Unterbrechung Vorlauftemperatursensor M2', - '44': 'Kurzschluss Vorlauftemperatursensor Heizkreis 3', - '48': 'Unterbrechung Vorlauftemperatursensor Heizkreis 3', - '50': 'Kurzschluss Speichertemperatursensor', - '51': 'Kurzschluss Auslauftemperatursensor', - '58': 'Unterbrechung Speichertemperatursensor', - '59': 'Unterbrechung Auslauftemperatursensor', - '92': 'Solar: Kurzschluss Kollektortemperatursensor', - '93': 'Solar: Kurzschluss Sensor S3', - '94': 'Solar: Kurzschluss Speichertemperatursensor', - '9A': 'Solar: Unterbrechung Kollektortemperatursensor', - '9B': 'Solar: Unterbrechung Sensor S3', - '9C': 'Solar: Unterbrechung Speichertemperatursensor', - '9E': 'Solar: Zu geringer bzw. kein Volumenstrom oder Temperaturwächter ausgeloest', - '9F': 'Solar: Fehlermeldung Solarteil (siehe Solarregler)', - 'A4': 'Amx. Anlagendruck überschritten', - 'A7': 'Bedienteil defekt', - 'A8': 'Luft in der internen Umwaelzpumpe oder Mindest-Volumenstrom nicht erreicht', - 'B0': 'Kurzschluss Abgastemperatursensor', - 'B1': 'Kommunikationsfehler Bedieneinheit', - 'B4': 'Interner Fehler (Elektronik)', - 'B5': 'Interner Fehler (Elektronik)', - 'B6': 'Ungueltige Hardwarekennung (Elektronik)', - 'B7': 'Interner Fehler (Kesselkodierstecker)', - 'B8': 'Unterbrechung Abgastemperatursensor', - 'B9': 'Interner Fehler (Dateneingabe wiederholen)', - 'BA': 'Kommunikationsfehler Erweiterungssatz fuer Mischerkreis M2', - 'BB': 'Kommunikationsfehler Erweiterungssatz fuer Mischerkreis 3', - 'BC': 'Kommunikationsfehler Fernbedienung Vitorol, Heizkreis M1', - 'BD': 'Kommunikationsfehler Fernbedienung Vitorol, Heizkreis M2', - 'BE': 'Falsche Codierung Fernbedienung Vitorol', - 'BF': 'Falsches Kommunikationsmodul LON', - 'C1': 'Externe Sicherheitseinrichtung (Kessel kuehlt aus)', - 'C2': 'Kommunikationsfehler Solarregelung', - 'C3': 'Kommunikationsfehler Erweiterung AM1', - 'C4': 'Kommunikationsfehler Erweiterumg Open Therm', - 'C5': 'Kommunikationsfehler drehzahlgeregelte Heizkreispumpe, Heizkreis M1', - 'C6': 'Kommunikationsfehler drehzahlgeregelte Heizkreispumpe, Heizkreis M2', - 'C7': 'Falsche Codierung der Heizkreispumpe', - 'C8': 'Kommunikationsfehler drehzahlgeregelte, externe Heizkreispumpe 3', - 'C9': 'Stoermeldeeingang am Schaltmodul-V aktiv', - 'CD': 'Kommunikationsfehler Vitocom 100 (KM-BUS)', - 'CE': 'Kommunikationsfehler Schaltmodul-V', - 'CF': 'Kommunikationsfehler LON Modul', - 'D1': 'Brennerstoerung', - 'D4': 'Sicherheitstemperaturbegrenzer hat ausgeloest oder Stoermeldemodul nicht richtig gesteckt', - 'D6': 'Eingang DE1 an Erweiterung EA1 meldet eine Stoerung', - 'D7': 'Eingang DE2 an Erweiterung EA1 meldet eine Stoerung', - 'D8': 'Eingang DE3 an Erweiterung EA1 meldet eine Stoerung', - 'DA': 'Kurzschluss Raumtemperatursensor, Heizkreis M1', - 'DB': 'Kurzschluss Raumtemperatursensor, Heizkreis M2', - 'DC': 'Kurzschluss Raumtemperatursensor, Heizkreis 3', - 'DD': 'Unterbrechung Raumtemperatursensor, Heizkreis M1', - 'DE': 'Unterbrechung Raumtemperatursensor, Heizkreis M2', - 'DF': 'Unterbrechung Raumtemperatursensor, Heizkreis 3', - 'E0': 'Fehler externer LON Teilnehmer', - 'E1': 'Isolationsstrom waehrend des Kalibrierens zu hoch', - 'E3': 'Zu geringe Wärmeabnahme während des Kalibrierens, Temperaturwächter hat ausgeschaltet', - 'E4': 'Fehler Versorgungsspannung', - 'E5': 'Interner Fehler, Flammenverstärker(Ionisationselektrode)', - 'E6': 'Abgas- / Zuluftsystem verstopft, Anlagendruck zu niedrig', - 'E7': 'Ionisationsstrom waehrend des Kalibrierens zu gering', - 'E8': 'Ionisationsstrom nicht im gültigen Bereich', - 'EA': 'Ionisationsstrom waehrend des Kalibrierens nicht im gueltigen Bereich', - 'EB': 'Wiederholter Flammenverlust waehrend des Kalibrierens', - 'EC': 'Parameterfehler waehrend des Kalibrierens', - 'ED': 'Interner Fehler', - 'EE': 'Flammensignal ist bei Brennerstart nicht vorhanden oder zu gering', - 'EF': 'Flammenverlust direkt nach Flammenbildung (waehrend der Sicherheitszeit)', - 'F0': 'Interner Fehler (Regelung tauschen)', - 'F1': 'Abgastemperaturbegrenzer ausgeloest', - 'F2': 'Temperaturbegrenzer ausgeloest', - 'F3': 'Flammensigal beim Brennerstart bereits vorhanden', - 'F4': 'Flammensigal nicht vorhanden', - 'F7': 'Differenzdrucksensor defekt, Kurzschluss ider Wasserdrucksensor', - 'F8': 'Brennstoffventil schliesst zu spaet', - 'F9': 'Geblaesedrehzahl beim Brennerstart zu niedrig', - 'FA': 'Geblaesestillstand nicht erreicht', - 'FC': 'Gaskombiregler defekt oder fehlerhafte Ansteuerung Modulationsventil oder Abgasweg versperrt', - 'FD': 'Fehler Gasfeuerungsautomat, Kesselkodierstecker fehlt(in Verbindung mit B7)', - 'FE': 'Starkes Stoerfeld (EMV) in der Naehe oder Elektronik defekt', - 'FF': 'Starkes Stoerfeld (EMV) in der Naehe oder interner Fehler' +lookups = { + 'ALL': { + 'devicetypes': { + '2098': 'V200KW2', # Protokoll: KW + '2053': 'GWG_VBEM', # Protokoll: GWG + '20CB': 'VScotHO1', # Protokoll: P300 + '2094': 'V200KW1', # Protokoll: KW + '209F': 'V200KO1B', # Protokoll: P300 + '204D': 'V200WO1C', # Protokoll: P300 + '20B8': 'V333MW1', + '20A0': 'V100GC1', + '20C2': 'VDensHO1', + '20A4': 'V200GW1', + '20C8': 'VPlusHO1', + '2046': 'V200WO1', + '2047': 'V200WO1', + '2049': 'V200WO1', + '2032': 'VBC550', + '2033': 'VBC550' + }, + 'errors': { + '00': 'Regelbetrieb (kein Fehler)', + '0F': 'Wartung (fuer Reset Codieradresse 24 auf 0 stellen)', + '10': 'Kurzschluss Aussentemperatursensor', + '18': 'Unterbrechung Aussentemperatursensor', + '19': 'Unterbrechung Kommunikation Aussentemperatursensor RF', + '1D': 'Keine Kommunikation mit Sensor', + '1E': 'Strömungssensor defekt', + '1F': 'Strömungssensor defekt', + '20': 'Kurzschluss Vorlauftemperatursensor', + '21': 'Kurzschluss Ruecklauftemperatursensor', + '28': 'Unterbrechung Aussentemperatursensor / Vorlauftemperatursensor Anlage', + '29': 'Unterbrechung Ruecklauftemperatursensor', + '30': 'Kurzschluss Kesseltemperatursensor', + '38': 'Unterbrechung Kesseltemperatursensor', + '40': 'Kurzschluss Vorlauftemperatursensor M2', + '42': 'Unterbrechung Vorlauftemperatursensor M2', + '44': 'Kurzschluss Vorlauftemperatursensor Heizkreis 3', + '48': 'Unterbrechung Vorlauftemperatursensor Heizkreis 3', + '50': 'Kurzschluss Speichertemperatursensor', + '51': 'Kurzschluss Auslauftemperatursensor', + '58': 'Unterbrechung Speichertemperatursensor', + '59': 'Unterbrechung Auslauftemperatursensor', + '92': 'Solar: Kurzschluss Kollektortemperatursensor', + '93': 'Solar: Kurzschluss Sensor S3', + '94': 'Solar: Kurzschluss Speichertemperatursensor', + '9A': 'Solar: Unterbrechung Kollektortemperatursensor', + '9B': 'Solar: Unterbrechung Sensor S3', + '9C': 'Solar: Unterbrechung Speichertemperatursensor', + '9E': 'Solar: Zu geringer bzw. kein Volumenstrom oder Temperaturwächter ausgeloest', + '9F': 'Solar: Fehlermeldung Solarteil (siehe Solarregler)', + 'A4': 'Amx. Anlagendruck überschritten', + 'A7': 'Bedienteil defekt', + 'A8': 'Luft in der internen Umwaelzpumpe oder Mindest-Volumenstrom nicht erreicht', + 'B0': 'Kurzschluss Abgastemperatursensor', + 'B1': 'Kommunikationsfehler Bedieneinheit', + 'B4': 'Interner Fehler (Elektronik)', + 'B5': 'Interner Fehler (Elektronik)', + 'B6': 'Ungueltige Hardwarekennung (Elektronik)', + 'B7': 'Interner Fehler (Kesselkodierstecker)', + 'B8': 'Unterbrechung Abgastemperatursensor', + 'B9': 'Interner Fehler (Dateneingabe wiederholen)', + 'V': 'Kommunikationsfehler Erweiterungssatz fuer Mischerkreis M2', + 'BB': 'Kommunikationsfehler Erweiterungssatz fuer Mischerkreis 3', + 'BC': 'Kommunikationsfehler Fernbedienung Vitorol, Heizkreis M1', + 'BD': 'Kommunikationsfehler Fernbedienung Vitorol, Heizkreis M2', + 'BE': 'Falsche Codierung Fernbedienung Vitorol', + 'BF': 'Falsches Kommunikationsmodul LON', + 'C1': 'Externe Sicherheitseinrichtung (Kessel kuehlt aus)', + 'C2': 'Kommunikationsfehler Solarregelung', + 'C3': 'Kommunikationsfehler Erweiterung AM1', + 'C4': 'Kommunikationsfehler Erweiterumg Open Therm', + 'C5': 'Kommunikationsfehler drehzahlgeregelte Heizkreispumpe, Heizkreis M1', + 'C6': 'Kommunikationsfehler drehzahlgeregelte Heizkreispumpe, Heizkreis M2', + 'C7': 'Falsche Codierung der Heizkreispumpe', + 'C8': 'Kommunikationsfehler drehzahlgeregelte, externe Heizkreispumpe 3', + 'C9': 'Stoermeldeeingang am Schaltmodul-V aktiv', + 'CD': 'Kommunikationsfehler Vitocom 100 (KM-BUS)', + 'CE': 'Kommunikationsfehler Schaltmodul-V', + 'CF': 'Kommunikationsfehler LON Modul', + 'D1': 'Brennerstoerung', + 'D4': 'Sicherheitstemperaturbegrenzer hat ausgeloest oder Stoermeldemodul nicht richtig gesteckt', + 'D6': 'Eingang DE1 an Erweiterung EA1 meldet eine Stoerung', + 'D7': 'Eingang DE2 an Erweiterung EA1 meldet eine Stoerung', + 'D8': 'Eingang DE3 an Erweiterung EA1 meldet eine Stoerung', + 'D': 'Kurzschluss Raumtemperatursensor, Heizkreis M1', + 'DB': 'Kurzschluss Raumtemperatursensor, Heizkreis M2', + 'DC': 'Kurzschluss Raumtemperatursensor, Heizkreis 3', + 'DD': 'Unterbrechung Raumtemperatursensor, Heizkreis M1', + 'DE': 'Unterbrechung Raumtemperatursensor, Heizkreis M2', + 'DF': 'Unterbrechung Raumtemperatursensor, Heizkreis 3', + 'E0': 'Fehler externer LON Teilnehmer', + 'E1': 'Isolationsstrom waehrend des Kalibrierens zu hoch', + 'E3': 'Zu geringe Wärmeabnahme während des Kalibrierens, Temperaturwächter hat ausgeschaltet', + 'E4': 'Fehler Versorgungsspannung', + 'E5': 'Interner Fehler, Flammenverstärker(Ionisationselektrode)', + 'E6': 'Abgas- / Zuluftsystem verstopft, Anlagendruck zu niedrig', + 'E7': 'Ionisationsstrom waehrend des Kalibrierens zu gering', + 'E8': 'Ionisationsstrom nicht im gültigen Bereich', + 'EA': 'Ionisationsstrom waehrend des Kalibrierens nicht im gueltigen Bereich', + 'EB': 'Wiederholter Flammenverlust waehrend des Kalibrierens', + 'EC': 'Parameterfehler waehrend des Kalibrierens', + 'ED': 'Interner Fehler', + 'EE': 'Flammensignal ist bei Brennerstart nicht vorhanden oder zu gering', + 'EF': 'Flammenverlust direkt nach Flammenbildung (waehrend der Sicherheitszeit)', + 'F0': 'Interner Fehler (Regelung tauschen)', + 'F1': 'Abgastemperaturbegrenzer ausgeloest', + 'F2': 'Temperaturbegrenzer ausgeloest', + 'F3': 'Flammensigal beim Brennerstart bereits vorhanden', + 'F4': 'Flammensigal nicht vorhanden', + 'F7': 'Differenzdrucksensor defekt, Kurzschluss ider Wasserdrucksensor', + 'F8': 'Brennstoffventil schliesst zu spaet', + 'F9': 'Geblaesedrehzahl beim Brennerstart zu niedrig', + 'FA': 'Geblaesestillstand nicht erreicht', + 'FC': 'Gaskombiregler defekt oder fehlerhafte Ansteuerung Modulationsventil oder Abgasweg versperrt', + 'FD': 'Fehler Gasfeuerungsautomat, Kesselkodierstecker fehlt(in Verbindung mit B7)', + 'FE': 'Starkes Stoerfeld (EMV) in der Naehe oder Elektronik defekt', + 'FF': 'Starkes Stoerfeld (EMV) in der Naehe oder interner Fehler' + }, + 'operatingmodes': { + '00': 'Abschaltbetrieb', + '01': 'Reduzierter Betrieb', + '02': 'Normalbetrieb', + '03': 'Dauernd Normalbetrieb' + }, + 'returnstatus': { + '00': '0', + '01': '1', + '03': '2', + 'AA': 'NOT OK', + }, + 'setreturnstatus': { + '00': 'OK', + '05': 'SYNC (NOT OK)', + } }, -} - -operatingmodes = { 'V200KW2': { - '00': 'Warmwasser (Schaltzeiten)', - '01': 'reduziert Heizen (dauernd)', - '02': 'normal Heizen (dauernd)', - '04': 'Heizen und Warmwasser (FS)', - '03': 'Heizen und Warmwasser (Schaltzeiten)', - '05': 'Standby', + 'operatingmodes': { + '00': 'Warmwasser (Schaltzeiten)', + '01': 'reduziert Heizen (dauernd)', + '02': 'normal Heizen (dauernd)', + '04': 'Heizen und Warmwasser (FS)', + '03': 'Heizen und Warmwasser (Schaltzeiten)', + '05': 'Standby' + }, + 'systemschemes': { + '00': '-', + '01': 'A1', + '02': 'A1 + WW', + '03': 'M2', + '04': 'M2 + WW', + '05': 'A1 + M2', + '06': 'A1 + M2 + WW', + '07': 'M2 + M3', + '08': 'M2 + M3 + WW', + '09': 'M2 + M3 + WW', + '10': 'A1 + M2 + M3 + WW' + }, }, 'V200KO1B': { - '00': 'Warmwasser (Schaltzeiten)', - '01': 'reduziert Heizen (dauernd)', - '02': 'normal Heizen (dauernd)', - '04': 'Heizen und Warmwasser (FS)', - '03': 'Heizen und Warmwasser (Schaltzeiten)', - '05': 'Standby', - }, - 'aktuelle_Betriebsart': { - '00': 'Abschaltbetrieb', - '01': 'Reduzierter Betrieb', - '02': 'Normalbetrieb', - '03': 'Dauernd Normalbetrieb', + 'operatingmodes': { + '00': 'Warmwasser (Schaltzeiten)', + '01': 'reduziert Heizen (dauernd)', + '02': 'normal Heizen (dauernd)', + '04': 'Heizen und Warmwasser (FS)', + '03': 'Heizen und Warmwasser (Schaltzeiten)', + '05': 'Standby' + }, + 'systemschemes': { + '01': 'A1', + '02': 'A1 + WW', + '04': 'M2', + '03': 'M2 + WW', + '05': 'A1 + M2', + '06': 'A1 + M2 + WW' + } }, 'V200WO1C': { - '00': 'Abschaltbetrieb', - '01': 'Warmwasser', - '02': 'Heizen und Warmwasser', - '03': 'undefiniert', - '04': 'dauernd reduziert', - '05': 'dauernd normal', - '06': 'normal Abschalt', - '07': 'nur kühlen', + 'operatingmodes': { + '00': 'Abschaltbetrieb', + '01': 'Warmwasser', + '02': 'Heizen und Warmwasser', + '03': 'undefiniert', + '04': 'dauernd reduziert', + '05': 'dauernd normal', + '06': 'normal Abschalt', + '07': 'nur kühlen' + }, + 'systemschemes': { + '01': 'WW', + '02': 'HK + WW', + '04': 'HK + WW', + '05': 'HK + WW' + }, }, 'V200HO1C': { - '00': 'Abschaltbetrieb', - '01': 'Warmwasser', - '02': 'Heizen und Warmwasser', - '03': 'Normal reduziert', - '04': 'Normal dauernd' + 'operatingmodes': { + '00': 'Abschaltbetrieb', + '01': 'Warmwasser', + '02': 'Heizen und Warmwasser', + '03': 'Normal reduziert', + '04': 'Normal dauernd' + }, + 'systemschemes': { + '01': 'WW', + '02': 'HK + WW', + '04': 'HK + WW', + '05': 'HK + WW' + } } } - -systemschemes = { - 'V200KW2': { - '00': '-', - '01': 'A1', - '02': 'A1 + WW', - '03': 'M2', - '04': 'M2 + WW', - '05': 'A1 + M2', - '06': 'A1 + M2 + WW', - '07': 'M2 + M3', - '08': 'M2 + M3 + WW', - '09': 'M2 + M3 + WW', - '10': 'A1 + M2 + M3 + WW' - }, - 'V200KO1B': { - '01': 'A1', - '02': 'A1 + WW', - '04': 'M2', - '03': 'M2 + WW', - '05': 'A1 + M2', - '06': 'A1 + M2 + WW' - }, - 'V200WO1C': { - '01': 'WW', - '02': 'HK + WW', - '04': 'HK + WW', - '05': 'HK + WW' - }, - 'V200HO1C': { - '01': 'WW', - '02': 'HK + WW', - '04': 'HK + WW', - '05': 'HK + WW' - } -} - -devicetypes = { - '2098': 'V200KW2', # Protokoll: KW - '2053': 'GWG_VBEM', # Protokoll: GWG - '20CB': 'VScotHO1', # Protokoll: P300 - '2094': 'V200KW1', # Protokoll: KW - '209F': 'V200KO1B', # Protokoll: P300 - '204D': 'V200WO1C', # Protokoll: P300 - '20B8': 'V333MW1', - '20A0': 'V100GC1', - '20C2': 'VDensHO1', - '20A4': 'V200GW1', - '20C8': 'VPlusHO1', - '2046': 'V200WO1', - '2047': 'V200WO1', - '2049': 'V200WO1', - '2032': 'VBC550', - '2033': 'VBC550' -} - -returnstatus = { - 'P300': { - '00': '0', - '01': '1', - '03': '2', - 'AA': 'NOT OK', - # At least for device 20CB the heating circuit pump returns status 03 when it's on and the heating runs in in night mode - }, - 'KW': { - '00': '0', - '01': '1', - '03': '2', - 'AA': 'NOT OK', - }, -} - -setreturnstatus = { - 'P300': { - '00': 'OK', - '05': 'SYNC (NOT OK)', - }, - 'KW': { - '00': 'OK', - '05': 'SYNC (NOT OK)', - }, -} - - -# P300 Protokoll -# -# Beispiel -# -# Senden 41 5 0 1 55 25 2 82 -# Read Request -- - - - ----- - -- -# | | | | | | +------- Prüfsumme (Summe über alley Bytes ohne die 41; [hex]5+0+1+55+25+2 = [dez]5+0+1+(5x16)+5+(2x16)+5+2 = 130dez = 82hex -# | | | | | +---------- XX Anzahl der Bytes, die in der Antwort erwartet werden -# | | | | +-------------- XX XX 2 byte Adresse der Daten oder Prozedur -# | | | +------------------ XX 01 = ReadData, 02 = WriteData, 07 = Function Call -# | | +-------------------- XX 00 = Anfrage, 01 = Antwort, 03 = Fehler -# | +---------------------- Länge der Nutzdaten (Anzahl der Bytes zwischen dem Telegramm-Start-Byte (0x41) und der Prüfsumme) -# +------------------------ Telegramm-Start-Byte -# -# Empfangen : 6 ----------------------- OK (Antwort auf 0x16 0x00 0x00 und auf korrekt empfangene Telegramme) -# 5 ----------------------- Schnittstelle ist aktiv und wartet auf eine Initialisierung -# 15 ----------------------- Schnittstelle meldet einen Fehler zurück -# -# 41 7 1 1 55 25 2 EF 0 74 -# -- - - - ----- - ---- -- -# | | | | | | | +-- Prüfsumme (Summe über alley Bytes ohne die 41; [hex]7+1+1+55+25+2+EF+0 = [dez]7+1+1+(5x16)+5+(2x16)+5+2+(14*16)+(15*16)+0 = [dez]7+1+1+(80)+5+(32)+5+2+(224)+(15)+0 = 372dez = 1.74hex) -# | | | | | | +------ Wert -# | | | | | +---------- XX Anzahl der Bytes, die in der Antwort erwartet werden -# | | | | +-------------- XX XX 2 byte Adresse der Daten oder Prozedur -# | | | +------------------ XX 01 = ReadData, 02 = WriteData, 07 = Function Call -# | | +-------------------- XX 00 = Anfrage, 01 = Antwort, 03 = Fehler -# | +---------------------- Länge der Nutzdaten (Anzahl der Bytes zwischen dem Telegramm-Start-Byte (0x41) und der Prüfsumme) -# +------------------------ Telegramm-Start-Byte -# Kommunikationsbeispiele -# Information Kessel Außentemperatur read 2-Byte -60..60 0x5525 -# DATA TX: 41 5 0 1 55 25 2 82 -# DATA RX: 41 7 1 1 55 25 2 EF 0 74 --> 00EF = 239 --> 23.9°C (Faktor 0.1) -# --> Senden 41 5 0 1 55 25 2 82 -# -- - - - ----- - -- -# | | | | | | +-- Prüfsumme (Summe über alley Bytes ohne die 41; [hex]5+0+1+55+25+2 = [dez]5+0+1+(5x16)+5+(2x16)+5+2 = 130dez = 82hex -# | | | | | +----- XX Anzahl der Bytes, die in der Antwort erwartet werden -# | | | | +--------- XX XX 2 byte Adresse der Daten oder Prozedur -# | | | +------------- XX 01 = ReadData, 02 = WriteData, 07 = Function Call -# | | +--------------- XX 00 = Anfrage, 01 = Antwort, 03 = Fehler -# | +----------------- Länge der Nutzdaten (Anzahl der Bytes zwischen dem Telegramm-Start-Byte (0x41) und der Prüfsumme) -# +------------------- Telegramm-Start-Byte -# -# --> Empfangen 6 41 7 1 1 55 25 2 EF 0 74 -# - -- - - - ----- - ---- -- -# | | | | | | | | +-- Prüfsumme (Summe über alley Bytes ohne die 41; [hex]7+1+1+55+25+2+EF+0 = [dez]7+1+1+(5x16)+5+(2x16)+5+2+(14*16)+(15*16)+0 = [dez]7+1+1+(80)+5+(32)+5+2+(224)+(15)+0 = 372dez = 1.74hex) -# | | | | | | | +------ Wert -# | | | | | | +---------- XX Anzahl der Bytes, die in der Antwort erwartet werden -# | | | | | +-------------- XX XX 2 byte Adresse der Daten oder Prozedur -# | | | | +------------------ XX 01 = ReadData, 02 = WriteData, 07 = Function Call -# | | | +-------------------- XX 00 = Anfrage, 01 = Antwort, 03 = Fehler -# | | +---------------------- Länge der Nutzdaten (Anzahl der Bytes zwischen dem Telegramm-Start-Byte (0x41) und der Prüfsumme) -# | +------------------------ Telegramm-Start-Byte -# +--------------------------- OK (Antwort auf 0x16 0x00 0x00 und auf korrekt empfangene Telegramme) -# -# --> Antwort: 0x00EF = 239 = 23.9° diff --git a/viessmann/datatypes.py b/viessmann/datatypes.py new file mode 100644 index 000000000..18505599a --- /dev/null +++ b/viessmann/datatypes.py @@ -0,0 +1,181 @@ +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab + +import lib.model.sdp.datatypes as DT + +import re +import dateutil +import datetime + + +# V = Viessmann, generic numeric type +class DT_V(DT.Datatype): + def get_send_data(self, data, **kwargs): + if data is None: + return None + + len = kwargs.get('len', 1) + signed = kwargs.get('signed', False) + mult = kwargs.get('mult', 0) + if mult: + data = int(float(data) * float(mult)) + bytes = int2bytes(data, len, signed) + return bytes + + def get_shng_data(self, data, type=None, **kwargs): + signed = kwargs.get('signed', False) + mult = kwargs.get('mult', 0) + + val = bytes2int(data, signed) + if mult: + val = round(float(val) / float(mult), 2) + if isinstance(mult, int): + val = int(val) + + if type is None: + return val + else: + return type(val) + + +# S = serial +class DT_S(DT_V): + def get_send_data(self, data, **kwargs): + raise RuntimeError('write of serial number not possible') + + def get_shng_data(self, data, type=None, **kwargs): + b = data[:7] + sn = 0 + b.reverse() + for byte in range(0, len(b)): + sn += (b[byte] - 48) * 10 ** byte + return hex(sn).upper() + + +# T = time +class DT_T(DT_V): + def get_send_data(self, data, **kwargs): + try: + datestring = dateutil.parser.isoparse(data).strftime('%Y%m%d%w%H%M%S') + # Viessmann erwartet 2 digits für Wochentag, daher wird hier noch eine 0 eingefügt + datestring = datestring[:8] + '0' + datestring[8:] + valuebytes = bytes.fromhex(datestring) + self.logger.debug(f'created value bytes as bytes: {valuebytes}') + return valuebytes + except Exception as e: + raise ValueError(f'incorrect data format, YYYY-MM-DD expected. Error was: {e}') + + def get_shng_data(self, data, type=None, **kwargs): + return datetime.strptime(data.hex(), '%Y%m%d%W%H%M%S').isoformat() + + +# D = date +class DT_D(DT_T): + def get_shng_data(self, data, type=None, **kwargs): + return datetime.strptime(data.hex(), '%Y%m%d%W%H%M%S').date().isoformat() + + +# C = control timer (?) +class DT_C(DT_V): + def get_send_data(self, data, **kwargs): + try: + times = '' + for switching_time in data: + an = encode_timer(switching_time['An']) + aus = encode_timer(switching_time['Aus']) + times += f'{an:02x}{aus:02x}' + valuebytes = bytes.fromhex(times) + self.logger.debug(f'created value bytes as hexstring: {bytes2hexstring(valuebytes)} and as bytes: {valuebytes}') + except Exception as e: + raise ValueError(f'incorrect data format, (An: hh:mm Aus: hh:mm) expected. Error was: {e}') + + def get_shng_data(self, data, type=None, **kwargs): + timer = self._decode_timer(data.hex()) + return [{'An': on_time, 'Aus': off_time} for on_time, off_time in zip(timer, timer)] + + +# H = hex +class DT_H(DT_V): + def get_send_data(self, data, **kwargs): + if isinstance(data, str): + try: + data = int(data, 16) + except ValueError: + pass + + return super().get_send_data(data, **kwargs) + + def get_shng_data(self, data, type=None, **kwargs): + return data.hex() + # return ' '.join([hexstr[i:i + 2] for i in range(0, len(hexstr), 2)]) + + +""" +'BA': {'unit_de': 'Betriebsart', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: BA +'BT': {'unit_de': 'Brennertyp', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: +'CT': {'unit_de': 'CycleTime', 'type': 'timer', 'signed': False, 'read_value_transform': 'non'}, # vito unit: CT +'DA': {'unit_de': 'Date', 'type': 'date', 'signed': False, 'read_value_transform': 'non'}, # vito unit: +'DT': {'unit_de': 'DeviceType', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: DT +'ES': {'unit_de': 'ErrorState', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: ES +'HEX': {'unit_de': 'HexString', 'type': 'string', 'signed': False, 'read_value_transform': 'hex'}, # vito unit: +'IS10': {'unit_de': 'INT signed 10', 'type': 'integer', 'signed': True, 'read_value_transform': '10'}, # vito unit: UT, UN +'IS100': {'unit_de': 'INT signed 100', 'type': 'integer', 'signed': True, 'read_value_transform': '100'}, # vito unit: +'IS1000': {'unit_de': 'INT signed 1000', 'type': 'integer', 'signed': True, 'read_value_transform': '1000'}, # vito unit: +'IS2': {'unit_de': 'INT signed 2', 'type': 'integer', 'signed': True, 'read_value_transform': '2'}, # vito unit: UT1, PR +'ISNON': {'unit_de': 'INT signed non', 'type': 'integer', 'signed': True, 'read_value_transform': 'non'}, # vito unit: +'IU10': {'unit_de': 'INT unsigned 10', 'type': 'integer', 'signed': False, 'read_value_transform': '10'}, # vito unit: +'IU100': {'unit_de': 'INT unsigned 100', 'type': 'integer', 'signed': False, 'read_value_transform': '100'}, # vito unit: +'IU1000': {'unit_de': 'INT unsigned 1000', 'type': 'integer', 'signed': False, 'read_value_transform': '1000'}, # vito unit: +'IU2': {'unit_de': 'INT unsigned 2', 'type': 'integer', 'signed': False, 'read_value_transform': '2'}, # vito unit: UT1U, PR1 +'IU3600': {'unit_de': 'INT unsigned 3600', 'type': 'integer', 'signed': False, 'read_value_transform': '3600'}, # vito unit: CS +'IUBOOL': {'unit_de': 'INT unsigned bool', 'type': 'integer', 'signed': False, 'read_value_transform': 'bool'}, # vito unit: +'IUINT': {'unit_de': 'INT unsigned int', 'type': 'integer', 'signed': False, 'read_value_transform': '1'}, # vito unit: +'IUNON': {'unit_de': 'INT unsigned non', 'type': 'integer', 'signed': False, 'read_value_transform': 'non'}, # vito unit: UTI, CO +'IUPR': {'unit_de': 'INT unsigned 2.55', 'type': 'integer', 'signed': False, 'read_value_transform': '2.55'}, # vito unit: PP +'RT': {'unit_de': 'ReturnStatus', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: ST, RT +'SC': {'unit_de': 'SystemScheme', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: +'SN': {'unit_de': 'Sachnummer', 'type': 'serial', 'signed': False, 'read_value_transform': 'non'}, # vito unit: +'SR': {'unit_de': 'SetReturnStatus', 'type': 'list', 'signed': False, 'read_value_transform': 'non'}, # vito unit: +'TI': {'unit_de': 'SystemTime', 'type': 'datetime', 'signed': False, 'read_value_transform': 'non'}, # vito unit: TI +""" + + +def int2bytes(value, length=0, signed=False): + """ convert value to bytearray, see MD_Command.py """ + if not length: + length = len(value) + value = value % (2 ** (length * 8)) + return value.to_bytes(length, byteorder='big', signed=signed) + + +def bytes2int(rawbytes, signed): + """ convert bytearray to value, see MD_Command.py """ + return int.from_bytes(rawbytes, byteorder='little', signed=signed) + + +def bytes2hexstring(bytesvalue): + """ create hex-string from bytearray, see MD_Command.py """ + return ''.join(f'{c:02x}' for c in bytesvalue) + + +def decode_timer(rawdatabytes): + """ generator to convert byte sequence to a number of time strings """ + while rawdatabytes: + hours, minutes = divmod(int(rawdatabytes[:2], 16), 8) + if minutes >= 6 or hours >= 24: + # not a valid time + yield '00:00' + else: + yield f'{hours:02d}:{(minutes * 10):02d}' + rawdatabytes = rawdatabytes[2:] + return None + + +def encode_timer(switching_time): + """ convert time string to encoded time value """ + if switching_time == '00:00': + return 0xff + clocktime = re.compile(r'(\d\d):(\d\d)') + mo = clocktime.search(switching_time) + number = int(mo.group(1)) * 8 + int(mo.group(2)) // 10 + return number diff --git a/viessmann/plugin.yaml b/viessmann/plugin.yaml old mode 100755 new mode 100644 index a77d0a8ed..bcc2e307c --- a/viessmann/plugin.yaml +++ b/viessmann/plugin.yaml @@ -1,231 +1,4197 @@ %YAML 1.1 -# vim: set et ts=4 sts=4 sw=4 ai ff=unix nu wrap : --- -# Metadata for the Smart-Plugin plugin: - # Global plugin attributes - type: interface # plugin type (gateway, interface, protocol, system, web) + type: gateway + description: - de: 'Lesen und Schreiben von Werten einer Viessmann Heizung' - en: 'Read and write data of a Viessmann heating system' + de: Vde + en: Ven maintainer: Morg - tester: sisamiwe, tcr82 - keywords: viessmann heating optolink - state: ready # change to ready when done with development - version: 1.2.3 # Plugin version - sh_minversion: 1.6.0 # minimum shNG version to use this plugin + state: develop + keywords: iot device + version: 1.3.0 + sh_minversion: 1.9.3.1 py_minversion: 3.6 - multi_instance: false # plugin supports multi instance + multi_instance: false restartable: true - classname: Viessmann # class containing the plugin - support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1455991-viessmann-plugin-neuentwicklung-python-hilfe/ + classname: sdp_viessmann parameters: - # Definition of parameters to be configured in etc/plugin.yaml + serialport: type: str - default: '' - description: - de: 'Serieller Port, an dem der Lesekopf angeschlossen ist' - en: 'Serial port the device is connected to' + mandatory: true + description: Serieller Port + + model: + type: foo + default: None + description: Gerätemodell - heating_type: + viess_proto: type: str - default: '' - description: - de: 'Gerätetype der Heizung' - en: 'Device type of heating system' + default: P300 + valid_list: + - P300 + - KW + description: Kommunikationsprotokoll der Heizung - protocol: + command_class: type: str - default: 'P300' - valid list: - - 'P300' - - 'KW' - description: - de: 'Protokoll der Heizung' - en: 'Protocol of heating system' + default: SDPCommandViessmann + description: Name der Klasse für Commands - timeout: - type: num - default: 1.5 - description: - de: 'Zeitbegrenzung für das Lesen vom seriellen Port in Sekunden' - en: 'Timeout for serial read operations in seconds' + clean_structs: + type: bool + default: true + description: Entferne Struct-Elemente, die vom gewählten Modell nicht unterstützt werden item_attributes: - # Definition of item attributes defined by this plugin - viess_send: - type: str - description: - de: 'Änderung des Items wird mit konfiguriertem Kommando an die Heizung geschickt' - en: 'Changes to this item result in sending the configured command to the heating system' - viess_read: + md_command: type: str - description: - de: 'Liest Wert mit konfiguriertem Kommando aus der Heizung aus' - en: 'The item value should be read by using the configured command' - - viess_read_afterwrite: - type: num - description: - de: 'Konfiguriert eine Verzögerung in Sekunden nachdem ein Lesekommando nach einem Schreibkommando an die Heizung geschickt wird' - en: 'Configures delay in seconds to issue a read command after write command' - viess_read_cycle: - type: num description: - de: 'Konfiguriert ein Intervall in Sekunden für das Lesekommando' - en: 'Configures a interval in seconds for the read command' + de: Legt das angegebene Kommando für das Item fest + en: Assigns the given command to the item - viess_init: + md_read: type: bool + description: - de: 'Konfiguriert, ob der Wert aus der Heizung initialisiert werden soll' - en: 'Configures to initialize the item value with the value from the KWL system' + de: Liest/erhält Werte vom Gerät + en: Reads/receives data from the device - viess_trigger: + md_read_group: type: list(str) + description: - de: 'Konfiguriert Lesekommandos, die nach einem Schreibvorgang auf das Item aufgerufen werden' - en: 'Configures read commands after an update to the item' + de: Weist das Item der angegebenen Gruppe zum gesammelten Lesen zu. Gruppe kann int oder str sein, mehrere Gruppen können als Liste angegeben werden. + en: Assigns the item to the given group for collective reading. Groups can be int or str, multiple groups can be provided as a list. - viess_trigger_afterwrite: + md_read_cycle: type: num + description: - de: 'Konfiguriert eine Verzögerung in Sekunden, bis ein Trigger ausgeführt werden soll, nachdem ein Wert gesetzt wurde' - en: 'Configures delay in seconds to run trigger commands after item update' + de: Konfiguriert ein Intervall in Sekunden für regelmäßiges Lesen + en: Configures a interval in seconds for cyclic read actions - viess_update: + md_read_initial: type: bool - description: - de: 'Liest alle konfigurierten Items neu, wenn es auf True gesetzt wird' - en: 'Triggers reading of all configured items if set to True' - viess_timer: - type: str description: - de: 'Liest alle Timer zur übergebenen Anwendung (z.B. Heizkreis_A1M1) und stellt diese für die Nutzung mit UZSU zur Verfügung' - en: 'Provides an UZSU-compatible dict with all timers for the given application (e.g. Heizkreis_A1M1)' + de: Legt fest, dass der Wert beim Start vom Gerät gelesen wird + en: Sets item value to be read from the device on startup - viess_ba_list: + md_write: type: bool + description: - de: 'Gibt nach der Initialisierung eine Liste aller für die konfigurierte Heizung gültigen Betriebsarten zurück' - en: 'Returns a list of valid operating modes for the configured device type after initialization' + de: Änderung des Items werden an das Gerät gesendet + en: Changes to this item will be sent to the device -item_structs: - timer: - name: Schaltzeiten in Einzelzeiten fuer An und Aus - - an1: - name: erste Anschaltzeit - type: str - visu_acl: rw - - aus1: - name: erste Ausschaltzeit - type: str - visu_acl: rw - - an2: - name: zweite Anschaltzeit - type: str - visu_acl: rw - - aus2: - name: zweite Ausschaltzeit - type: str - visu_acl: rw - - an3: - name: dritte Anschaltzeit - type: str - visu_acl: rw - - aus3: - name: dritte Ausschaltzeit - type: str - visu_acl: rw - - an4: - name: vierte Anschaltzeit - type: str - visu_acl: rw - - aus4: - name: vierte Ausschaltzeit - type: str - visu_acl: rw - - betriebsart: - name: Betriebsart in string wandeln - - betriebsart_str: - type: str - eval: "'Neustart' if value == '' else ['Standby', 'Warmwasser (Schaltzeiten)', 'Heizen und Warmwasser (Schaltzeiten)', 'reduziert Heizen (dauernd)', 'normal Heizen (dauernd)'][int(value)]" - eval_trigger: .. + md_read_group_trigger: + type: str -logic_parameters: NONE -# Definition of logic parameters defined by this plugin + description: + de: Wenn diesem Item ein beliebiger Wert zugewiesen wird, werden alle zum Lesen konfigurierten Items der angegebenen Gruppe neu vom Gerät gelesen, bei Gruppe 0 werden alle zum Lesen konfigurierten Items neu gelesen. Das Item kann nicht gleichzeitig mit md_command belegt werden. + en: When set to any value, all items configured for reading for the given group will update their value from the device, if group is 0, all items configured for reading will update. The item cannot be used with md_command in parallel. + + md_lookup: + type: str -plugin_functions: - update_all_read_items: - type: NONE description: - de: 'Stößt das Lesen aller konfigurierten Items an' - en: 'Triggers reading of all configured items' - read_addr: - type: foo + de: Der Inhalt der Lookup-Tabelle mit dem angegebenen Namen wird beim Start einmalig als dict oder list in das Item geschrieben. + en: The lookup table with the given name will be assigned to the item in dict or list format once on startup. + + description_long: + de: "Der Inhalt der Lookup-Tabelle mit dem angegebenen Namen wird beim\nStart einmalig als dict oder list in das Item geschrieben.\n\n\nDurch Anhängen von \"#\" an den Namen der Tabelle kann die Art\nder Tabelle ausgewählt werden:\n- fwd liefert die Tabelle Gerät -> SmartHomeNG (Standard)\n- rev liefert die Tabelle SmartHomeNG -> Gerät\n- rci liefert die Tabelle SmarthomeNG -> Gerät in Kleinbuchstaben\n- list liefert die Liste der Namen für SmartHomeNG" + en: "The lookup table with the given name will be assigned to the item\nin dict or list format once on startup.\n\n\nBy appending \"#\" to the tables name the type of table can\nbe selected:\n- fwd returns the table device -> SmartHomeNG (default)\n- rev returns the table SmartHomeNG -> device\n- rci returns the table SmartHomeNG -> device in lower case\n- list return the list of names for SmartHomeNG" + + md_custom1: + type: str + description: - de: 'Stößt das Lesen des angegebenen Datenpunkts an, der nicht an ein Item gebunden sein muss. Es erfolgt keine Zuweisung an ein Item. Rückgabewert ist der gelesene Wert, oder NONE bei Fehler' - en: 'Triggers reading of the supplied data point, which doesn''t have to be bound to an item. Result will not be assigned to an item. Return value is the read value, or NONE if an error occurred' - parameters: - addr: - type: str - description: - de: 'Vierstellige Hex-Adresse des Datenpunktes' - en: 'Four-digit hex address of the data point' - read_temp_addr: - type: foo + de: Der Inhalt dieses Items kann vom jeweiligen Gerät für spezielle Zwecke genutzt werden. + en: For custom use of each device respectively. + + description_long: + de: 'Der Inhalt dieses Items kann vom jeweiligen Gerät für spezielle Zwecke genutzt werden. Durch den Parameter "recursive_custom: 1" in der Geräte-Konfiguration wird der Wert rekursiv für alle Unteritems gesetzt.' + en: 'For custom use of each device respectively. By setting "recursive_custom: 1" in the device configuration, the value of this attribute will be set for all sub-items.' + + md_custom2: + type: str + description: - de: 'Stößt das Lesen eines beliebigen Datenpunkts an, der nicht konfiguriert oder bekannt sein muss. Es erfolgt keine Zuweisung an ein Item. Rückgabewert ist der gelesene Wert, oder NONE bei Fehler' - en: 'Triggers reading of an arbitrary data point, which doesn''t have to be configured or known. Result will not be assigned to an item. Return value is the read value, or NONE if an error occurred' - parameters: - addr: - type: str - mandatory: yes - description: - de: 'Vierstellige Hex-Adresse des Datenpunktes' - en: 'Four-digit hex address of the data point' - length: - type: int - mandatory: yes - description: - de: 'Länge der Geräteantwort in Bytes (1-8)' - en: 'Lengh of device response in bytes (1-8)' - valid_min: 1 - valid_max: 8 - unit: - type: str - mandatory: yes - description: - de: 'Einheitencode für die Konvertierung der Antwort. Muss in der Protokollkonfiguration ``unitset`` in commands.py definiert sein' - en: 'Unit code for converting the response value. Needs to be defined in the protocol configuration ``unitset`` in commands.py' - write_addr: - type: foo + de: Der Inhalt dieses Items kann vom jeweiligen Gerät für spezielle Zwecke genutzt werden. + en: For custom use of each device respectively. + + description_long: + de: 'Der Inhalt dieses Items kann vom jeweiligen Gerät für spezielle Zwecke genutzt werden. Durch den Parameter "recursive_custom: 2" in der Geräte-Konfiguration wird der Wert rekursiv für alle Unteritems gesetzt.' + en: 'For custom use of each device respectively. By setting "recursive_custom: 2" in the device configuration, the value of this attribute will be set for all sub-items.' + + md_custom3: + type: str + description: - de: 'Stößt das Schreiben des angegebenen Datenpunkts an, der nicht an ein Item gebunden sein muss. Der übergebene Wert muss zum konfigurierten Datentyp passen' - en: 'Triggers writing of the supplied data point, which doesn''t have to be bound to an item. The submitted value must match the configured data type' - parameters: - addr: + de: Der Inhalt dieses Items kann vom jeweiligen Gerät für spezielle Zwecke genutzt werden. + en: For custom use of each device respectively. + + description_long: + de: 'Der Inhalt dieses Items kann vom jeweiligen Gerät für spezielle Zwecke genutzt werden. Durch den Parameter "recursive_custom: 3" in der Geräte-Konfiguration wird der Wert rekursiv für alle Unteritems gesetzt.' + en: 'For custom use of each device respectively. By setting "recursive_custom: 3" in the device configuration, the value of this attribute will be set for all sub-items.' + +item_structs: + + ALL: + + Anlagentyp: + type: num + md_command: Anlagentyp + md_read: true + md_write: false + md_read_group: + - ALL + md_read_initial: true + + V200KO1B: + + Anlagentyp: + type: num + md_command: Anlagentyp + md_read: true + md_write: false + md_read_group: + - V200KO1B + md_read_initial: true + + Allgemein: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Allgemein + + Temperatur: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Allgemein.Temperatur + + Aussen: + type: num + md_command: Allgemein.Temperatur.Aussen + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Allgemein + - Allgemein.Temperatur + + Aussen_TP: + type: num + md_command: Allgemein.Temperatur.Aussen_TP + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Allgemein + - Allgemein.Temperatur + + Aussen_Dp: + type: num + md_command: Allgemein.Temperatur.Aussen_Dp + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Allgemein + - Allgemein.Temperatur + + Speicher_Ladesensor: + type: num + md_command: Allgemein.Temperatur.Speicher_Ladesensor + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Allgemein + - Allgemein.Temperatur + + Auslauf: + type: num + md_command: Allgemein.Temperatur.Auslauf + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Allgemein + - Allgemein.Temperatur + + Abgas: + type: num + md_command: Allgemein.Temperatur.Abgas + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Allgemein + - Allgemein.Temperatur + + Gem_Vorlauf: + type: num + md_command: Allgemein.Temperatur.Gem_Vorlauf + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Allgemein + - Allgemein.Temperatur + + Relais_K12: + type: bool + md_command: Allgemein.Relais_K12 + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Allgemein + + Eingang_0-10_V: + type: bool + md_command: Allgemein.Eingang_0-10_V + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Allgemein + + EA1_Kontakt_0: + type: bool + md_command: Allgemein.EA1_Kontakt_0 + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Allgemein + + EA1_Kontakt_1: + type: bool + md_command: Allgemein.EA1_Kontakt_1 + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Allgemein + + EA1_Kontakt_2: + type: bool + md_command: Allgemein.EA1_Kontakt_2 + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Allgemein + + EA1_Externer_Soll_0-10V: + type: bool + md_command: Allgemein.EA1_Externer_Soll_0-10V + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Allgemein + + EA1_Relais_0: + type: bool + md_command: Allgemein.EA1_Relais_0 + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Allgemein + + AM1_Ausgang_1: + type: bool + md_command: Allgemein.AM1_Ausgang_1 + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Allgemein + + AM1_Ausgang_2: + type: bool + md_command: Allgemein.AM1_Ausgang_2 + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Allgemein + + TempKOffset: + type: num + md_command: Allgemein.TempKOffset + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Allgemein + + Systemtime: + type: bool + md_command: Allgemein.Systemtime + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Allgemein + + Anlagenschema: + type: num + md_command: Allgemein.Anlagenschema + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Allgemein + + Inventory: type: str - description: - de: 'Vierstellige Hex-Adresse des Datenpunktes' - en: 'Four-digit hex address of the data point' - value: - description: - de: 'Zu schreibender Wert' - en: 'Value to be written' + md_command: Allgemein.Inventory + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Allgemein + + CtrlId: + type: num + md_command: Allgemein.CtrlId + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Allgemein + + Kessel: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Kessel + + Ist: + type: num + md_command: Kessel.Ist + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Kessel + + TP: + type: num + md_command: Kessel.TP + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Kessel + + Soll: + type: num + md_command: Kessel.Soll + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Kessel + + Fehler: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Fehler + + Sammelstoerung: + type: num + md_command: Fehler.Sammelstoerung + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Fehler + + Error0: + type: num + md_command: Fehler.Error0 + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Fehler + + Error1: + type: num + md_command: Fehler.Error1 + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Fehler + + Error2: + type: num + md_command: Fehler.Error2 + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Fehler + + Error3: + type: num + md_command: Fehler.Error3 + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Fehler + + Error4: + type: num + md_command: Fehler.Error4 + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Fehler + + Error5: + type: num + md_command: Fehler.Error5 + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Fehler + + Error6: + type: num + md_command: Fehler.Error6 + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Fehler + + Error7: + type: num + md_command: Fehler.Error7 + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Fehler + + Error8: + type: num + md_command: Fehler.Error8 + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Fehler + + Error9: + type: num + md_command: Fehler.Error9 + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Fehler + + Pumpen: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Pumpen + + Speicherlade: + type: bool + md_command: Pumpen.Speicherlade + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Pumpen + + Zirkulation: + type: bool + md_command: Pumpen.Zirkulation + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Pumpen + + Intern: + type: bool + md_command: Pumpen.Intern + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Pumpen + + Heizkreis_A1M1: + type: bool + md_command: Pumpen.Heizkreis_A1M1 + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Pumpen + + Heizkreis_A1M1_RPM: + type: bool + md_command: Pumpen.Heizkreis_A1M1_RPM + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Pumpen + + Heizkreis_M2: + type: bool + md_command: Pumpen.Heizkreis_M2 + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Pumpen + + Heizkreis_M2_RPM: + type: bool + md_command: Pumpen.Heizkreis_M2_RPM + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Pumpen + + Relais_Status: + type: bool + md_command: Pumpen.Relais_Status + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Pumpen + + Brenner: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Brenner + + Starts: + type: num + md_command: Brenner.Starts + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Brenner + + Betriebsstunden: + type: num + md_command: Brenner.Betriebsstunden + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Brenner + + Status_1: + type: bool + md_command: Brenner.Status_1 + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Brenner + + Status_2: + type: bool + md_command: Brenner.Status_2 + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Brenner + + Oeldurchsatz: + type: num + md_command: Brenner.Oeldurchsatz + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Brenner + + Oelverbrauch: + type: num + md_command: Brenner.Oelverbrauch + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Brenner + + Solar: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Solar + + Nachladeunterdrueckung: + type: bool + md_command: Solar.Nachladeunterdrueckung + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Solar + + Pumpe: + type: bool + md_command: Solar.Pumpe + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Solar + + Kollektortemperatur: + type: num + md_command: Solar.Kollektortemperatur + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Solar + + Speichertemperatur: + type: num + md_command: Solar.Speichertemperatur + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Solar + + Betriebsstunden: + type: num + md_command: Solar.Betriebsstunden + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Solar + + Steuerung: + type: num + md_command: Solar.Steuerung + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Solar + + Heizkreis: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis + + A1M1: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.A1M1 + + Temperatur: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.A1M1.Temperatur + + Raum: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.A1M1.Temperatur.Raum + + Ist: + type: num + md_command: Heizkreis.A1M1.Temperatur.Raum.Ist + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Temperatur + - Heizkreis.A1M1.Temperatur.Raum + + Soll_Normalbetrieb: + type: num + md_command: Heizkreis.A1M1.Temperatur.Raum.Soll_Normalbetrieb + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Temperatur + - Heizkreis.A1M1.Temperatur.Raum + + Soll_Red_Betrieb: + type: num + md_command: Heizkreis.A1M1.Temperatur.Raum.Soll_Red_Betrieb + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Temperatur + - Heizkreis.A1M1.Temperatur.Raum + + Soll_Party_Betrieb: + type: num + md_command: Heizkreis.A1M1.Temperatur.Raum.Soll_Party_Betrieb + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Temperatur + - Heizkreis.A1M1.Temperatur.Raum + + Vorlauf: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.A1M1.Temperatur.Vorlauf + + Ist: + type: num + md_command: Heizkreis.A1M1.Temperatur.Vorlauf.Ist + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Temperatur + - Heizkreis.A1M1.Temperatur.Vorlauf + + Soll: + type: num + md_command: Heizkreis.A1M1.Temperatur.Vorlauf.Soll + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Temperatur + - Heizkreis.A1M1.Temperatur.Vorlauf + + Min: + type: num + md_command: Heizkreis.A1M1.Temperatur.Vorlauf.Min + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Temperatur + - Heizkreis.A1M1.Temperatur.Vorlauf + + Max: + type: num + md_command: Heizkreis.A1M1.Temperatur.Vorlauf.Max + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Temperatur + - Heizkreis.A1M1.Temperatur.Vorlauf + + Erhoehung_Soll: + type: num + md_command: Heizkreis.A1M1.Temperatur.Vorlauf.Erhoehung_Soll + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Temperatur + - Heizkreis.A1M1.Temperatur.Vorlauf + + Erhoehung_Zeit: + type: num + md_command: Heizkreis.A1M1.Temperatur.Vorlauf.Erhoehung_Zeit + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Temperatur + - Heizkreis.A1M1.Temperatur.Vorlauf + + Grenze_red_Betrieb: + type: num + md_command: Heizkreis.A1M1.Temperatur.Grenze_red_Betrieb + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Temperatur + + Grenze_red_Raumtemp: + type: num + md_command: Heizkreis.A1M1.Temperatur.Grenze_red_Raumtemp + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Temperatur + + Status: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.A1M1.Status + + Aktuelle_Betriebsart: + type: num + md_command: Heizkreis.A1M1.Status.Aktuelle_Betriebsart + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Status + + Betriebsart: + type: num + md_command: Heizkreis.A1M1.Status.Betriebsart + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Status + + Sparbetrieb: + type: num + md_command: Heizkreis.A1M1.Status.Sparbetrieb + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Status + + Zustand_Sparbetrieb: + type: num + md_command: Heizkreis.A1M1.Status.Zustand_Sparbetrieb + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Status + + Partybetrieb: + type: num + md_command: Heizkreis.A1M1.Status.Partybetrieb + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Status + + Zustand_Partybetrieb: + type: num + md_command: Heizkreis.A1M1.Status.Zustand_Partybetrieb + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Status + + StatusFrost: + type: num + md_command: Heizkreis.A1M1.Status.StatusFrost + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Status + + Externe_Raumsolltemperatur_Normal: + type: num + md_command: Heizkreis.A1M1.Status.Externe_Raumsolltemperatur_Normal + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Status + + Externe_Betriebsartenumschaltung: + type: num + md_command: Heizkreis.A1M1.Status.Externe_Betriebsartenumschaltung + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Status + + Speichervorrang: + type: num + md_command: Heizkreis.A1M1.Status.Speichervorrang + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Status + + Frostschutzgrenze: + type: num + md_command: Heizkreis.A1M1.Status.Frostschutzgrenze + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Status + + Frostschutz: + type: num + md_command: Heizkreis.A1M1.Status.Frostschutz + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Status + + Heizkreispumpenlogik: + type: num + md_command: Heizkreis.A1M1.Status.Heizkreispumpenlogik + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Status + + Sparschaltung: + type: num + md_command: Heizkreis.A1M1.Status.Sparschaltung + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Status + + Mischersparfunktion: + type: num + md_command: Heizkreis.A1M1.Status.Mischersparfunktion + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Status + + Pumpenstillstandzeit: + type: num + md_command: Heizkreis.A1M1.Status.Pumpenstillstandzeit + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Status + + Heizkennlinie: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.A1M1.Heizkennlinie + + Neigung: + type: num + md_command: Heizkreis.A1M1.Heizkennlinie.Neigung + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Heizkennlinie + + Niveau: + type: num + md_command: Heizkreis.A1M1.Heizkennlinie.Niveau + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Heizkennlinie + + Partybetrieb_Zeitbegrenzung: + type: num + md_command: Heizkreis.A1M1.Partybetrieb_Zeitbegrenzung + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.A1M1 + + M2: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.M2 + + Temperatur: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.M2.Temperatur + + Raum: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.M2.Temperatur.Raum + + Ist: + type: num + md_command: Heizkreis.M2.Temperatur.Raum.Ist + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Temperatur + - Heizkreis.M2.Temperatur.Raum + + Soll_Normalbetrieb: + type: num + md_command: Heizkreis.M2.Temperatur.Raum.Soll_Normalbetrieb + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Temperatur + - Heizkreis.M2.Temperatur.Raum + + Soll_Red_Betrieb: + type: num + md_command: Heizkreis.M2.Temperatur.Raum.Soll_Red_Betrieb + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Temperatur + - Heizkreis.M2.Temperatur.Raum + + Soll_Party_Betrieb: + type: num + md_command: Heizkreis.M2.Temperatur.Raum.Soll_Party_Betrieb + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Temperatur + - Heizkreis.M2.Temperatur.Raum + + Vorlauf: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.M2.Temperatur.Vorlauf + + Ist: + type: num + md_command: Heizkreis.M2.Temperatur.Vorlauf.Ist + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Temperatur + - Heizkreis.M2.Temperatur.Vorlauf + + Soll: + type: num + md_command: Heizkreis.M2.Temperatur.Vorlauf.Soll + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Temperatur + - Heizkreis.M2.Temperatur.Vorlauf + + Min: + type: num + md_command: Heizkreis.M2.Temperatur.Vorlauf.Min + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Temperatur + - Heizkreis.M2.Temperatur.Vorlauf + + Max: + type: num + md_command: Heizkreis.M2.Temperatur.Vorlauf.Max + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Temperatur + - Heizkreis.M2.Temperatur.Vorlauf + + Erhoehung_Soll: + type: num + md_command: Heizkreis.M2.Temperatur.Vorlauf.Erhoehung_Soll + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Temperatur + - Heizkreis.M2.Temperatur.Vorlauf + + Erhoehung_Zeit: + type: num + md_command: Heizkreis.M2.Temperatur.Vorlauf.Erhoehung_Zeit + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Temperatur + - Heizkreis.M2.Temperatur.Vorlauf + + Grenze_red_Betrieb: + type: num + md_command: Heizkreis.M2.Temperatur.Grenze_red_Betrieb + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Temperatur + + Grenze_red_Raumtemp: + type: num + md_command: Heizkreis.M2.Temperatur.Grenze_red_Raumtemp + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Temperatur + + Status: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.M2.Status + + Aktuelle_Betriebsart: + type: num + md_command: Heizkreis.M2.Status.Aktuelle_Betriebsart + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Status + + Betriebsart: + type: num + md_command: Heizkreis.M2.Status.Betriebsart + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Status + + Sparbetrieb: + type: num + md_command: Heizkreis.M2.Status.Sparbetrieb + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Status + + Zustand_Sparbetrieb: + type: num + md_command: Heizkreis.M2.Status.Zustand_Sparbetrieb + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Status + + Partybetrieb: + type: num + md_command: Heizkreis.M2.Status.Partybetrieb + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Status + + Zustand_Partybetrieb: + type: num + md_command: Heizkreis.M2.Status.Zustand_Partybetrieb + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Status + + StatusFrost: + type: num + md_command: Heizkreis.M2.Status.StatusFrost + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Status + + Externe_Raumsolltemperatur_Normal: + type: num + md_command: Heizkreis.M2.Status.Externe_Raumsolltemperatur_Normal + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Status + + Externe_Betriebsartenumschaltung: + type: num + md_command: Heizkreis.M2.Status.Externe_Betriebsartenumschaltung + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Status + + Speichervorrang: + type: num + md_command: Heizkreis.M2.Status.Speichervorrang + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Status + + Frostschutzgrenze: + type: num + md_command: Heizkreis.M2.Status.Frostschutzgrenze + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Status + + Frostschutz: + type: num + md_command: Heizkreis.M2.Status.Frostschutz + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Status + + Heizkreispumpenlogik: + type: num + md_command: Heizkreis.M2.Status.Heizkreispumpenlogik + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Status + + Sparschaltung: + type: num + md_command: Heizkreis.M2.Status.Sparschaltung + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Status + + Mischersparfunktion: + type: num + md_command: Heizkreis.M2.Status.Mischersparfunktion + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Status + + Pumpenstillstandzeit: + type: num + md_command: Heizkreis.M2.Status.Pumpenstillstandzeit + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Status + + Heizkennlinie: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.M2.Heizkennlinie + + Neigung: + type: num + md_command: Heizkreis.M2.Heizkennlinie.Neigung + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Heizkennlinie + + Niveau: + type: num + md_command: Heizkreis.M2.Heizkennlinie.Niveau + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Heizkennlinie + + Partybetrieb_Zeitbegrenzung: + type: num + md_command: Heizkreis.M2.Partybetrieb_Zeitbegrenzung + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Heizkreis + - Heizkreis.M2 + + Warmwasser: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Warmwasser + + Ist: + type: num + md_command: Warmwasser.Ist + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Warmwasser + + Soll: + type: num + md_command: Warmwasser.Soll + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Warmwasser + + Status: + type: bool + md_command: Warmwasser.Status + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Warmwasser + + PumpenNachlauf: + type: num + md_command: Warmwasser.PumpenNachlauf + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Warmwasser + + Ferienprogramm: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Ferienprogramm + + A1M1: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Ferienprogramm.A1M1 + + Status: + type: num + md_command: Ferienprogramm.A1M1.Status + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Ferienprogramm + - Ferienprogramm.A1M1 + + Abreisetag: + type: bool + md_command: Ferienprogramm.A1M1.Abreisetag + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Ferienprogramm + - Ferienprogramm.A1M1 + + Rückreisetag: + type: bool + md_command: Ferienprogramm.A1M1.Rückreisetag + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Ferienprogramm + - Ferienprogramm.A1M1 + + M2: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Ferienprogramm.M2 + + Status: + type: num + md_command: Ferienprogramm.M2.Status + md_read: true + md_write: false + md_read_group: + - V200KO1B + - Ferienprogramm + - Ferienprogramm.M2 + + Abreisetag: + type: bool + md_command: Ferienprogramm.M2.Abreisetag + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Ferienprogramm + - Ferienprogramm.M2 + + Rückreisetag: + type: bool + md_command: Ferienprogramm.M2.Rückreisetag + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Ferienprogramm + - Ferienprogramm.M2 + + Timer: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Timer + + Warmwasser: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Timer.Warmwasser + + Mo: + type: list + md_command: Timer.Warmwasser.Mo + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.Warmwasser + + Di: + type: list + md_command: Timer.Warmwasser.Di + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.Warmwasser + + Mi: + type: list + md_command: Timer.Warmwasser.Mi + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.Warmwasser + + Do: + type: list + md_command: Timer.Warmwasser.Do + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.Warmwasser + + Fr: + type: list + md_command: Timer.Warmwasser.Fr + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.Warmwasser + + Sa: + type: list + md_command: Timer.Warmwasser.Sa + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.Warmwasser + + So: + type: list + md_command: Timer.Warmwasser.So + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.Warmwasser + + A1M1: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Timer.A1M1 + + Mo: + type: list + md_command: Timer.A1M1.Mo + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.A1M1 + + Di: + type: list + md_command: Timer.A1M1.Di + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.A1M1 + + Mi: + type: list + md_command: Timer.A1M1.Mi + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.A1M1 + + Do: + type: list + md_command: Timer.A1M1.Do + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.A1M1 + + Fr: + type: list + md_command: Timer.A1M1.Fr + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.A1M1 + + Sa: + type: list + md_command: Timer.A1M1.Sa + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.A1M1 + + So: + type: list + md_command: Timer.A1M1.So + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.A1M1 + + M2: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Timer.M2 + + Mo: + type: list + md_command: Timer.M2.Mo + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.M2 + + Di: + type: list + md_command: Timer.M2.Di + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.M2 + + Mi: + type: list + md_command: Timer.M2.Mi + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.M2 + + Do: + type: list + md_command: Timer.M2.Do + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.M2 + + Fr: + type: list + md_command: Timer.M2.Fr + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.M2 + + Sa: + type: list + md_command: Timer.M2.Sa + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.M2 + + So: + type: list + md_command: Timer.M2.So + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.M2 + + Zirkulation: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Timer.Zirkulation + + Mo: + type: list + md_command: Timer.Zirkulation.Mo + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.Zirkulation + + Di: + type: list + md_command: Timer.Zirkulation.Di + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.Zirkulation + + Mi: + type: list + md_command: Timer.Zirkulation.Mi + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.Zirkulation + + Do: + type: list + md_command: Timer.Zirkulation.Do + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.Zirkulation + + Fr: + type: list + md_command: Timer.Zirkulation.Fr + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.Zirkulation + + Sa: + type: list + md_command: Timer.Zirkulation.Sa + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.Zirkulation + + So: + type: list + md_command: Timer.Zirkulation.So + md_read: true + md_write: true + md_read_group: + - V200KO1B + - Timer + - Timer.Zirkulation + + V200HO1C: + + Anlagentyp: + type: num + md_command: Anlagentyp + md_read: true + md_write: false + md_read_group: + - V200HO1C + md_read_initial: true + + Allgemein: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Allgemein + + Anlagenschema: + type: num + md_command: Allgemein.Anlagenschema + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Allgemein + + Frostgefahr: + type: num + md_command: Allgemein.Frostgefahr + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Allgemein + + Anlagenleistung: + type: num + md_command: Allgemein.Anlagenleistung + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Allgemein + + Temperatur: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Allgemein.Temperatur + + Aussen_TP: + type: num + md_command: Allgemein.Temperatur.Aussen_TP + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Allgemein + - Allgemein.Temperatur + + Aussen_Dp: + type: num + md_command: Allgemein.Temperatur.Aussen_Dp + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Allgemein + - Allgemein.Temperatur + + Kessel: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Kessel + + TP: + type: num + md_command: Kessel.TP + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Kessel + + Soll: + type: num + md_command: Kessel.Soll + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Kessel + + Abgastemperatur: + type: num + md_command: Kessel.Abgastemperatur + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Kessel + + Fehler: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Fehler + + Sammelstoerung: + type: num + md_command: Fehler.Sammelstoerung + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Fehler + + Error0: + type: num + md_command: Fehler.Error0 + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Fehler + + Error1: + type: num + md_command: Fehler.Error1 + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Fehler + + Error2: + type: num + md_command: Fehler.Error2 + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Fehler + + Error3: + type: num + md_command: Fehler.Error3 + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Fehler + + Error4: + type: num + md_command: Fehler.Error4 + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Fehler + + Error5: + type: num + md_command: Fehler.Error5 + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Fehler + + Error6: + type: num + md_command: Fehler.Error6 + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Fehler + + Error7: + type: num + md_command: Fehler.Error7 + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Fehler + + Error8: + type: num + md_command: Fehler.Error8 + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Fehler + + Error9: + type: num + md_command: Fehler.Error9 + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Fehler + + Pumpen: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Pumpen + + Speicherlade: + type: bool + md_command: Pumpen.Speicherlade + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Pumpen + + Zirkulation: + type: bool + md_command: Pumpen.Zirkulation + md_read: true + md_write: true + md_read_group: + - V200HO1C + - Pumpen + + Intern: + type: bool + md_command: Pumpen.Intern + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Pumpen + + Heizkreis_1: + type: bool + md_command: Pumpen.Heizkreis_1 + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Pumpen + + Heizkreis_2: + type: bool + md_command: Pumpen.Heizkreis_2 + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Pumpen + + Brenner: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Brenner + + Starts: + type: num + md_command: Brenner.Starts + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Brenner + + Leistung: + type: num + md_command: Brenner.Leistung + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Brenner + + Betriebsstunden: + type: num + md_command: Brenner.Betriebsstunden + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Brenner + + Solar: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Solar + + Pumpe: + type: bool + md_command: Solar.Pumpe + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Solar + + Kollektortemperatur: + type: num + md_command: Solar.Kollektortemperatur + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Solar + + Speichertemperatur: + type: num + md_command: Solar.Speichertemperatur + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Solar + + Betriebsstunden: + type: num + md_command: Solar.Betriebsstunden + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Solar + + Waermemenge: + type: num + md_command: Solar.Waermemenge + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Solar + + Ausbeute: + type: num + md_command: Solar.Ausbeute + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Solar + + Heizkreis: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis + + '1': + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.1 + + Betriebsart: + type: num + md_command: Heizkreis.1.Betriebsart + md_read: true + md_write: true + md_read_group: + - V200HO1C + - Heizkreis + - Heizkreis.1 + + Heizart: + type: num + md_command: Heizkreis.1.Heizart + md_read: true + md_write: true + md_read_group: + - V200HO1C + - Heizkreis + - Heizkreis.1 + + Temperatur: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.1.Temperatur + + Vorlauf_Soll: + type: num + md_command: Heizkreis.1.Temperatur.Vorlauf_Soll + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Heizkreis + - Heizkreis.1 + - Heizkreis.1.Temperatur + + Vorlauf_Ist: + type: num + md_command: Heizkreis.1.Temperatur.Vorlauf_Ist + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Heizkreis + - Heizkreis.1 + - Heizkreis.1.Temperatur + + '2': + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.2 + + Betriebsart: + type: num + md_command: Heizkreis.2.Betriebsart + md_read: true + md_write: true + md_read_group: + - V200HO1C + - Heizkreis + - Heizkreis.2 + + Heizart: + type: num + md_command: Heizkreis.2.Heizart + md_read: true + md_write: true + md_read_group: + - V200HO1C + - Heizkreis + - Heizkreis.2 + + Temperatur: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.2.Temperatur + + Vorlauf_Soll: + type: num + md_command: Heizkreis.2.Temperatur.Vorlauf_Soll + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Heizkreis + - Heizkreis.2 + - Heizkreis.2.Temperatur + + Vorlauf_Ist: + type: num + md_command: Heizkreis.2.Temperatur.Vorlauf_Ist + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Heizkreis + - Heizkreis.2 + - Heizkreis.2.Temperatur + + Warmwasser: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Warmwasser + + Ist: + type: num + md_command: Warmwasser.Ist + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Warmwasser + + Soll: + type: num + md_command: Warmwasser.Soll + md_read: true + md_write: true + md_read_group: + - V200HO1C + - Warmwasser + + Austritt: + type: num + md_command: Warmwasser.Austritt + md_read: true + md_write: false + md_read_group: + - V200HO1C + - Warmwasser + + V200KW2: + + Anlagentyp: + type: num + md_command: Anlagentyp + md_read: true + md_write: false + md_read_group: + - V200KW2 + md_read_initial: true + + Allgemein: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Allgemein + + Temperatur: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Allgemein.Temperatur + + Aussen: + type: num + md_command: Allgemein.Temperatur.Aussen + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Allgemein + - Allgemein.Temperatur + + Aussen_Dp: + type: num + md_command: Allgemein.Temperatur.Aussen_Dp + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Allgemein + - Allgemein.Temperatur + + Anlagenschema: + type: num + md_command: Allgemein.Anlagenschema + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Allgemein + + AnlagenSoftwareIndex: + type: num + md_command: Allgemein.AnlagenSoftwareIndex + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Allgemein + + Systemtime: + type: bool + md_command: Allgemein.Systemtime + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Allgemein + + Kessel: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Kessel + + TempKOffset: + type: num + md_command: Kessel.TempKOffset + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Kessel + + Ist: + type: num + md_command: Kessel.Ist + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Kessel + + Soll: + type: num + md_command: Kessel.Soll + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Kessel + + Fehler: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Fehler + + Sammelstoerung: + type: num + md_command: Fehler.Sammelstoerung + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Fehler + + Brennerstoerung: + type: num + md_command: Fehler.Brennerstoerung + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Fehler + + Error0: + type: num + md_command: Fehler.Error0 + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Fehler + + Error1: + type: num + md_command: Fehler.Error1 + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Fehler + + Error2: + type: num + md_command: Fehler.Error2 + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Fehler + + Error3: + type: num + md_command: Fehler.Error3 + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Fehler + + Error4: + type: num + md_command: Fehler.Error4 + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Fehler + + Error5: + type: num + md_command: Fehler.Error5 + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Fehler + + Error6: + type: num + md_command: Fehler.Error6 + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Fehler + + Error7: + type: num + md_command: Fehler.Error7 + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Fehler + + Error8: + type: num + md_command: Fehler.Error8 + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Fehler + + Error9: + type: num + md_command: Fehler.Error9 + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Fehler + + Pumpen: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Pumpen + + Speicherlade: + type: bool + md_command: Pumpen.Speicherlade + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Pumpen + + Zirkulation: + type: bool + md_command: Pumpen.Zirkulation + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Pumpen + + Heizkreis_A1M1: + type: bool + md_command: Pumpen.Heizkreis_A1M1 + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Pumpen + + Heizkreis_M2: + type: bool + md_command: Pumpen.Heizkreis_M2 + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Pumpen + + Brenner: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Brenner + + Typ: + type: num + md_command: Brenner.Typ + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Brenner + + Stufe: + type: num + md_command: Brenner.Stufe + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Brenner + + Starts: + type: num + md_command: Brenner.Starts + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Brenner + + Status_1: + type: bool + md_command: Brenner.Status_1 + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Brenner + + Status_2: + type: bool + md_command: Brenner.Status_2 + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Brenner + + BetriebsstundenStufe1: + type: num + md_command: Brenner.BetriebsstundenStufe1 + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Brenner + + BetriebsstundenStufe2: + type: num + md_command: Brenner.BetriebsstundenStufe2 + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Brenner + + Heizkreis: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis + + A1M1: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.A1M1 + + Temperatur: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.A1M1.Temperatur + + Raum: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.A1M1.Temperatur.Raum + + Soll_Normal: + type: num + md_command: Heizkreis.A1M1.Temperatur.Raum.Soll_Normal + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Temperatur + - Heizkreis.A1M1.Temperatur.Raum + + Soll_Reduziert: + type: num + md_command: Heizkreis.A1M1.Temperatur.Raum.Soll_Reduziert + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Temperatur + - Heizkreis.A1M1.Temperatur.Raum + + Soll_Party: + type: num + md_command: Heizkreis.A1M1.Temperatur.Raum.Soll_Party + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Temperatur + - Heizkreis.A1M1.Temperatur.Raum + + Vorlauf: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.A1M1.Temperatur.Vorlauf + + Ist: + type: num + md_command: Heizkreis.A1M1.Temperatur.Vorlauf.Ist + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Temperatur + - Heizkreis.A1M1.Temperatur.Vorlauf + + Soll: + type: num + md_command: Heizkreis.A1M1.Temperatur.Vorlauf.Soll + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Temperatur + - Heizkreis.A1M1.Temperatur.Vorlauf + + Betriebsart: + type: num + md_command: Heizkreis.A1M1.Betriebsart + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.A1M1 + + Aktuelle_Betriebsart: + type: num + md_command: Heizkreis.A1M1.Aktuelle_Betriebsart + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.A1M1 + + Sparbetrieb: + type: num + md_command: Heizkreis.A1M1.Sparbetrieb + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.A1M1 + + Partybetrieb_Zeit: + type: num + md_command: Heizkreis.A1M1.Partybetrieb_Zeit + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.A1M1 + + Partybetrieb: + type: num + md_command: Heizkreis.A1M1.Partybetrieb + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.A1M1 + + MischerM1: + type: num + md_command: Heizkreis.A1M1.MischerM1 + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.A1M1 + + Heizkreispumpenlogik: + type: num + md_command: Heizkreis.A1M1.Heizkreispumpenlogik + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.A1M1 + + Sparschaltung: + type: num + md_command: Heizkreis.A1M1.Sparschaltung + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.A1M1 + + Heizkennlinie: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.A1M1.Heizkennlinie + + Neigung: + type: num + md_command: Heizkreis.A1M1.Heizkennlinie.Neigung + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Heizkennlinie + + Niveau: + type: num + md_command: Heizkreis.A1M1.Heizkennlinie.Niveau + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.A1M1 + - Heizkreis.A1M1.Heizkennlinie + + M2: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.M2 + + Temperatur: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.M2.Temperatur + + Raum: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.M2.Temperatur.Raum + + Soll_Normal: + type: num + md_command: Heizkreis.M2.Temperatur.Raum.Soll_Normal + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Temperatur + - Heizkreis.M2.Temperatur.Raum + + Soll_Reduziert: + type: num + md_command: Heizkreis.M2.Temperatur.Raum.Soll_Reduziert + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Temperatur + - Heizkreis.M2.Temperatur.Raum + + Soll_Party: + type: num + md_command: Heizkreis.M2.Temperatur.Raum.Soll_Party + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Temperatur + - Heizkreis.M2.Temperatur.Raum + + Vorlauf: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.M2.Temperatur.Vorlauf + + Soll: + type: num + md_command: Heizkreis.M2.Temperatur.Vorlauf.Soll + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Temperatur + - Heizkreis.M2.Temperatur.Vorlauf + + Ist: + type: num + md_command: Heizkreis.M2.Temperatur.Vorlauf.Ist + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Temperatur + - Heizkreis.M2.Temperatur.Vorlauf + + Min: + type: num + md_command: Heizkreis.M2.Temperatur.Vorlauf.Min + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Temperatur + - Heizkreis.M2.Temperatur.Vorlauf + + Max: + type: num + md_command: Heizkreis.M2.Temperatur.Vorlauf.Max + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Temperatur + - Heizkreis.M2.Temperatur.Vorlauf + + Betriebsart: + type: num + md_command: Heizkreis.M2.Betriebsart + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.M2 + + Aktuelle_Betriebsart: + type: num + md_command: Heizkreis.M2.Aktuelle_Betriebsart + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.M2 + + Sparbetrieb: + type: num + md_command: Heizkreis.M2.Sparbetrieb + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.M2 + + Partybetrieb: + type: num + md_command: Heizkreis.M2.Partybetrieb + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.M2 + + Partybetrieb_Zeit: + type: num + md_command: Heizkreis.M2.Partybetrieb_Zeit + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.M2 + + MischerM2: + type: num + md_command: Heizkreis.M2.MischerM2 + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.M2 + + MischerM2Auf: + type: bool + md_command: Heizkreis.M2.MischerM2Auf + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.M2 + + MischerM2Zu: + type: bool + md_command: Heizkreis.M2.MischerM2Zu + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.M2 + + Heizkreispumpenlogik: + type: num + md_command: Heizkreis.M2.Heizkreispumpenlogik + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.M2 + + Sparschaltung: + type: num + md_command: Heizkreis.M2.Sparschaltung + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.M2 + + StatusKlemme2: + type: bool + md_command: Heizkreis.M2.StatusKlemme2 + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.M2 + + StatusKlemme17: + type: bool + md_command: Heizkreis.M2.StatusKlemme17 + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.M2 + + Heizkennlinie: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.M2.Heizkennlinie + + Neigung: + type: num + md_command: Heizkreis.M2.Heizkennlinie.Neigung + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Heizkennlinie + + Niveau: + type: num + md_command: Heizkreis.M2.Heizkennlinie.Niveau + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Heizkreis + - Heizkreis.M2 + - Heizkreis.M2.Heizkennlinie + + Warmwasser: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Warmwasser + + Status: + type: bool + md_command: Warmwasser.Status + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Warmwasser + + KesselOffset: + type: num + md_command: Warmwasser.KesselOffset + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Warmwasser + + BeiPartyDNormal: + type: num + md_command: Warmwasser.BeiPartyDNormal + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Warmwasser + + Ist: + type: num + md_command: Warmwasser.Ist + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Warmwasser + + Soll: + type: num + md_command: Warmwasser.Soll + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Warmwasser + + SollAktuell: + type: num + md_command: Warmwasser.SollAktuell + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Warmwasser + + SollMax: + type: num + md_command: Warmwasser.SollMax + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Warmwasser + + Ferienprogramm: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Ferienprogramm + + A1M1: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Ferienprogramm.A1M1 + + Status: + type: num + md_command: Ferienprogramm.A1M1.Status + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Ferienprogramm + - Ferienprogramm.A1M1 + + Abreisetag: + type: bool + md_command: Ferienprogramm.A1M1.Abreisetag + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Ferienprogramm + - Ferienprogramm.A1M1 + + Rückreisetag: + type: bool + md_command: Ferienprogramm.A1M1.Rückreisetag + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Ferienprogramm + - Ferienprogramm.A1M1 + + M2: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Ferienprogramm.M2 + + Status: + type: num + md_command: Ferienprogramm.M2.Status + md_read: true + md_write: false + md_read_group: + - V200KW2 + - Ferienprogramm + - Ferienprogramm.M2 + + Abreisetag: + type: bool + md_command: Ferienprogramm.M2.Abreisetag + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Ferienprogramm + - Ferienprogramm.M2 + + Rückreisetag: + type: bool + md_command: Ferienprogramm.M2.Rückreisetag + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Ferienprogramm + - Ferienprogramm.M2 + + Timer: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Timer + + Warmwasser: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Timer.Warmwasser + + Mo: + type: list + md_command: Timer.Warmwasser.Mo + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Timer + - Timer.Warmwasser + + Di: + type: list + md_command: Timer.Warmwasser.Di + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Timer + - Timer.Warmwasser + + Mi: + type: list + md_command: Timer.Warmwasser.Mi + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Timer + - Timer.Warmwasser + + Do: + type: list + md_command: Timer.Warmwasser.Do + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Timer + - Timer.Warmwasser + + Fr: + type: list + md_command: Timer.Warmwasser.Fr + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Timer + - Timer.Warmwasser + + Sa: + type: list + md_command: Timer.Warmwasser.Sa + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Timer + - Timer.Warmwasser + + So: + type: list + md_command: Timer.Warmwasser.So + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Timer + - Timer.Warmwasser + + A1M1: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Timer.A1M1 + + Mo: + type: list + md_command: Timer.A1M1.Mo + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Timer + - Timer.A1M1 + + Di: + type: list + md_command: Timer.A1M1.Di + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Timer + - Timer.A1M1 + + Mi: + type: list + md_command: Timer.A1M1.Mi + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Timer + - Timer.A1M1 + + Do: + type: list + md_command: Timer.A1M1.Do + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Timer + - Timer.A1M1 + + Fr: + type: list + md_command: Timer.A1M1.Fr + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Timer + - Timer.A1M1 + + Sa: + type: list + md_command: Timer.A1M1.Sa + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Timer + - Timer.A1M1 + + So: + type: list + md_command: Timer.A1M1.So + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Timer + - Timer.A1M1 + + M2: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Timer.M2 + + Mo: + type: list + md_command: Timer.M2.Mo + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Timer + - Timer.M2 + + Di: + type: list + md_command: Timer.M2.Di + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Timer + - Timer.M2 + + Mi: + type: list + md_command: Timer.M2.Mi + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Timer + - Timer.M2 + + Do: + type: list + md_command: Timer.M2.Do + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Timer + - Timer.M2 + + Fr: + type: list + md_command: Timer.M2.Fr + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Timer + - Timer.M2 + + Sa: + type: list + md_command: Timer.M2.Sa + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Timer + - Timer.M2 + + So: + type: list + md_command: Timer.M2.So + md_read: true + md_write: true + md_read_group: + - V200KW2 + - Timer + - Timer.M2 + + V200WO1C: + + Anlagentyp: + type: num + md_command: Anlagentyp + md_read: true + md_write: false + md_read_group: + - V200WO1C + md_read_initial: true + + Allgemein: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Allgemein + md_read_cycle: 45 + + Temperatur: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Allgemein.Temperatur + + Aussen: + type: num + md_command: Allgemein.Temperatur.Aussen + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Allgemein + - Allgemein.Temperatur + + Betriebsart: + type: str + md_command: Allgemein.Betriebsart + md_read: true + md_write: true + md_read_group: + - V200WO1C + - Allgemein + md_read_initial: true + + lookup: + type: list + md_lookup: operatingmodes#list + + Manuell: + type: num + md_command: Allgemein.Manuell + md_read: true + md_write: true + md_read_group: + - V200WO1C + - Allgemein + + Outdoor_Fanspeed: + type: num + md_command: Allgemein.Outdoor_Fanspeed + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Allgemein + + Status_Fanspeed: + type: num + md_command: Allgemein.Status_Fanspeed + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Allgemein + + Kompressor_Freq: + type: num + md_command: Allgemein.Kompressor_Freq + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Allgemein + + SollLeistungVerdichter: + type: num + md_command: Allgemein.SollLeistungVerdichter + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Allgemein + + Pumpen: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Pumpen + + Sekundaer: + type: bool + md_command: Pumpen.Sekundaer + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Pumpen + + Heizkreis: + type: bool + md_command: Pumpen.Heizkreis + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Pumpen + + Zirkulation: + type: bool + md_command: Pumpen.Zirkulation + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Pumpen + + Heizkreis: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis + + Temperatur: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.Temperatur + + Raum: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.Temperatur.Raum + + Soll: + type: num + md_command: Heizkreis.Temperatur.Raum.Soll + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Heizkreis + - Heizkreis.Temperatur + - Heizkreis.Temperatur.Raum + + Soll_Reduziert: + type: num + md_command: Heizkreis.Temperatur.Raum.Soll_Reduziert + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Heizkreis + - Heizkreis.Temperatur + - Heizkreis.Temperatur.Raum + + Soll_Party: + type: num + md_command: Heizkreis.Temperatur.Raum.Soll_Party + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Heizkreis + - Heizkreis.Temperatur + - Heizkreis.Temperatur.Raum + + Vorlauf: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.Temperatur.Vorlauf + + Ist: + type: num + md_command: Heizkreis.Temperatur.Vorlauf.Ist + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Heizkreis + - Heizkreis.Temperatur + - Heizkreis.Temperatur.Vorlauf + + Soll: + type: num + md_command: Heizkreis.Temperatur.Vorlauf.Soll + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Heizkreis + - Heizkreis.Temperatur + - Heizkreis.Temperatur.Vorlauf + + Mittel: + type: num + md_command: Heizkreis.Temperatur.Vorlauf.Mittel + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Heizkreis + - Heizkreis.Temperatur + - Heizkreis.Temperatur.Vorlauf + + Ruecklauf: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.Temperatur.Ruecklauf + + Ist: + type: num + md_command: Heizkreis.Temperatur.Ruecklauf.Ist + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Heizkreis + - Heizkreis.Temperatur + - Heizkreis.Temperatur.Ruecklauf + + Mittel: + type: num + md_command: Heizkreis.Temperatur.Ruecklauf.Mittel + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Heizkreis + - Heizkreis.Temperatur + - Heizkreis.Temperatur.Ruecklauf + + Heizkennlinie: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Heizkreis.Heizkennlinie + + Niveau: + type: num + md_command: Heizkreis.Heizkennlinie.Niveau + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Heizkreis + - Heizkreis.Heizkennlinie + + Neigung: + type: num + md_command: Heizkreis.Heizkennlinie.Neigung + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Heizkreis + - Heizkreis.Heizkennlinie + + Warmwasser: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Warmwasser + + Ist: + type: num + md_command: Warmwasser.Ist + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Warmwasser + + Soll: + type: num + md_command: Warmwasser.Soll + md_read: true + md_write: true + md_read_group: + - V200WO1C + - Warmwasser + + Ventil: + type: bool + md_command: Warmwasser.Ventil + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Warmwasser + + Statistik: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Statistik + + Einschaltungen: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Statistik.Einschaltungen + + Sekundaer: + type: num + md_command: Statistik.Einschaltungen.Sekundaer + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Statistik + - Statistik.Einschaltungen + + Heizstab1: + type: num + md_command: Statistik.Einschaltungen.Heizstab1 + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Statistik + - Statistik.Einschaltungen + + Heizstab2: + type: num + md_command: Statistik.Einschaltungen.Heizstab2 + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Statistik + - Statistik.Einschaltungen + + HK: + type: num + md_command: Statistik.Einschaltungen.HK + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Statistik + - Statistik.Einschaltungen + + Laufzeiten: + + read: + type: bool + enforce_updates: true + md_read_group_trigger: Statistik.Laufzeiten + + Sekundaerpumpe: + type: num + md_command: Statistik.Laufzeiten.Sekundaerpumpe + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Statistik + - Statistik.Laufzeiten + + Heizstab1: + type: num + md_command: Statistik.Laufzeiten.Heizstab1 + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Statistik + - Statistik.Laufzeiten + + Heizstab2: + type: num + md_command: Statistik.Laufzeiten.Heizstab2 + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Statistik + - Statistik.Laufzeiten + + PumpeHK: + type: num + md_command: Statistik.Laufzeiten.PumpeHK + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Statistik + - Statistik.Laufzeiten + + WWVentil: + type: num + md_command: Statistik.Laufzeiten.WWVentil + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Statistik + - Statistik.Laufzeiten + + VerdichterStufe1: + type: num + md_command: Statistik.Laufzeiten.VerdichterStufe1 + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Statistik + - Statistik.Laufzeiten + + VerdichterStufe2: + type: num + md_command: Statistik.Laufzeiten.VerdichterStufe2 + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Statistik + - Statistik.Laufzeiten + + VerdichterStufe3: + type: num + md_command: Statistik.Laufzeiten.VerdichterStufe3 + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Statistik + - Statistik.Laufzeiten + + VerdichterStufe4: + type: num + md_command: Statistik.Laufzeiten.VerdichterStufe4 + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Statistik + - Statistik.Laufzeiten + + VerdichterStufe5: + type: num + md_command: Statistik.Laufzeiten.VerdichterStufe5 + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Statistik + - Statistik.Laufzeiten + + VerdichterWP: + type: num + md_command: Statistik.Laufzeiten.VerdichterWP + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Statistik + - Statistik.Laufzeiten + + OAT_Temperature: + type: num + md_command: Statistik.OAT_Temperature + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Statistik + + ICT_Temperature: + type: num + md_command: Statistik.ICT_Temperature + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Statistik + + CCT_Temperature: + type: num + md_command: Statistik.CCT_Temperature + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Statistik + + HST_Temperature: + type: num + md_command: Statistik.HST_Temperature + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Statistik + + OMT_Temperature: + type: num + md_command: Statistik.OMT_Temperature + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Statistik + + WaermeWW12M: + type: num + md_command: Statistik.WaermeWW12M + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Statistik + + ElektroWW12M: + type: num + md_command: Statistik.ElektroWW12M + md_read: true + md_write: false + md_read_group: + - V200WO1C + - Statistik +plugin_functions: NONE +logic_parameters: NONE diff --git a/viessmann/protocol.py b/viessmann/protocol.py new file mode 100644 index 000000000..aa630def3 --- /dev/null +++ b/viessmann/protocol.py @@ -0,0 +1,490 @@ +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +######################################################################### +# Copyright 2020- Sebastian Helms Morg @ knx-user-forum +######################################################################### +# This file aims to become part of SmartHomeNG. +# https://www.smarthomeNG.de +# https://knx-user-forum.de/forum/supportforen/smarthome-py +# +# SDPProtocolViessmann for sdp_viessmann plugin +# +# SmartHomeNG is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SmartHomeNG is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SmartHomeNG. If not, see . +# +######################################################################### + +import logging + +from lib.model.sdp.globals import (CONN_SER_DIR, PLUGIN_ATTR_CB_ON_CONNECT, PLUGIN_ATTR_CB_ON_DISCONNECT, PLUGIN_ATTR_CONNECTION, PLUGIN_ATTR_CONN_AUTO_CONN, PLUGIN_ATTR_CONN_BINARY, PLUGIN_ATTR_CONN_CYCLE, PLUGIN_ATTR_CONN_RETRIES, PLUGIN_ATTR_CONN_TIMEOUT, PLUGIN_ATTR_SERIAL_BAUD, PLUGIN_ATTR_SERIAL_BSIZE, PLUGIN_ATTR_SERIAL_PARITY, PLUGIN_ATTR_SERIAL_PORT, PLUGIN_ATTR_SERIAL_STOP) +from lib.model.sdp.protocol import SDPProtocol + +from time import sleep +import threading + + +############################################################################################################################################################################################################################################# +# +# class SDPProtocol and subclasses +# +############################################################################################################################################################################################################################################# + +class SDPProtocolViessmann(SDPProtocol): + """ Protocol support for Viessmann heating systems + + This class implements a Viessmann protocol layer. By default, this uses + the P300 protocol. By supplying the 'viess_proto' attribute, the older 'KW' + protocol can be selected. + + At the moment, this is oriented towards serial connections. By supplying + your own connection type, you could try to use it over networked connections. + Be advised that the necessary "reply" client and the methods needed are not + implemented for network access as of this time... + """ + + def __init__(self, data_received_callback, name=None, **kwargs): + + self.logger = logging.getLogger(__name__) + + if SDP_standalone: + self.logger = logging.getLogger('__main__') + + self.logger.debug(f'protocol initializing from {self.__class__.__name__} with arguments {kwargs}') + + # set class properties + self._is_connected = False + self._error_count = 0 + self._lock = threading.Lock() + self._is_initialized = False + self._data_received_callback = data_received_callback + + self._controlsets = { + 'P300': { + 'baudrate': 4800, + 'bytesize': 8, + 'parity': 'E', + 'stopbits': 2, + 'timeout': 0.5, + 'startbyte': 0x41, + 'request': 0x00, + 'response': 0x01, + 'error': 0x03, + 'read': 0x01, + 'write': 0x02, + 'functioncall': 0x7, + 'acknowledge': 0x06, + 'not_initiated': 0x05, + 'init_error': 0x15, + 'reset_command': 0x04, + 'reset_command_response': 0x05, + 'sync_command': 0x160000, + 'sync_command_response': 0x06, + 'command_bytes_read': 5, + 'command_bytes_write': 5, + # init: send'Reset_Command' receive'Reset_Command_Response' send'Sync_Command' + # request: send('StartByte' 'Länge der Nutzdaten als Anzahl der Bytes zwischen diesem Byte und der Prüfsumme' 'Request' 'Read' 'addr' 'checksum') + # request_response: receive('Acknowledge' 'StartByte' 'Länge der Nutzdaten als Anzahl der Bytes zwischen diesem Byte und der Prüfsumme' 'Response' 'Read' 'addr' 'Anzahl der Bytes des Wertes' 'Wert' 'checksum') + }, + 'KW': { + 'baudrate': 4800, + 'bytesize': 8, # 'EIGHTBITS' + 'parity': 'E', # 'PARITY_EVEN', + 'stopbits': 2, # 'STOPBITS_TWO', + 'timeout': 1, + 'startbyte': 0x01, + 'read': 0xF7, + 'write': 0xF4, + 'acknowledge': 0x01, + 'reset_command': 0x04, + 'not_initiated': 0x05, + 'write_ack': 0x00, + }, + } + + # get protocol or default to P300 + self._viess_proto = kwargs.get('viess_proto', 'P300') + if self._viess_proto not in self._controlsets: + self._viess_proto = 'P300' + # select controlset for viess_proto + self._controlset = self._controlsets[self._viess_proto] + + # make sure we have a basic set of parameters for the TCP connection + self._params = {PLUGIN_ATTR_SERIAL_PORT: '', + PLUGIN_ATTR_SERIAL_BAUD: self._controlset[PLUGIN_ATTR_SERIAL_BAUD], + PLUGIN_ATTR_SERIAL_BSIZE: self._controlset[PLUGIN_ATTR_SERIAL_BSIZE], + PLUGIN_ATTR_SERIAL_PARITY: self._controlset[PLUGIN_ATTR_SERIAL_PARITY], + PLUGIN_ATTR_SERIAL_STOP: self._controlset[PLUGIN_ATTR_SERIAL_STOP], + PLUGIN_ATTR_CONN_TIMEOUT: self._controlset[PLUGIN_ATTR_CONN_TIMEOUT], + PLUGIN_ATTR_CONN_AUTO_CONN: True, + PLUGIN_ATTR_CONN_BINARY: True, + PLUGIN_ATTR_CONN_RETRIES: 0, + PLUGIN_ATTR_CONN_CYCLE: 3, + PLUGIN_ATTR_CB_ON_CONNECT: None, + PLUGIN_ATTR_CB_ON_DISCONNECT: None, + PLUGIN_ATTR_CONNECTION: CONN_SER_DIR} + self._params.update(kwargs) + + # check if some of the arguments are usable + self._set_connection_params() + + # initialize connection + self._get_connection(name=name) + + # set "method pointers" + self._send_bytes = self._connection._send_bytes + self._read_bytes = self._connection._read_bytes + + # tell someone about our actual class + self.logger.debug(f'protocol initialized from {self.__class__.__name__}') + + def _close(self): + self._is_initialized = False + super()._close() + + def _send_init_on_send(self): + """ + setup the communication protocol prior to sending + + :return: Returns True, if communication was established successfully, False otherwise + :rtype: bool + """ + if self._viess_proto == 'P300' and not self._is_initialized: + + # init procedure is + # interface: 0x04 (reset) + # device: 0x05 (repeated) + # interface: 0x160000 (sync) + # device: 0x06 (sync ok) + # interface: resume communication, periodically send 0x160000 as keepalive if necessary + + RESET = self._int2bytes(self._controlset['reset_command'], 1) + NOTINIT = self._int2bytes(self._controlset["not_initiated"], 1) + ACK = self._int2bytes(self._controlset['acknowledge'], 1) + SYNC = self._int2bytes(self._controlset['sync_command'], 3) + ERR = self._int2bytes(self._controlset['init_error'], 1) + + self.logger.debug('init communication....') + syncsent = False + + self.logger.debug(f'send_bytes: send reset command {RESET}') + self._send_bytes(RESET) + + readbyte = self._read_bytes(1) + self.logger.debug(f'read_bytes: read {readbyte}') + + for i in range(10): + if syncsent and readbyte == ACK: + self.logger.debug('device acknowledged initialization') + self._is_initialized = True + break + elif readbyte == NOTINIT: + self.logger.debug(f'send_bytes: send sync command {SYNC}') + self._send_bytes(SYNC) + syncsent = True + elif readbyte == ERR: + self.logger.error(f'interface reported an error, loop increment {i}') + self.logger.debug(f'send_bytes: send reset command {RESET}') + self._send_bytes(RESET) + syncsent = False + else: # elif readbyte != b'': + self.logger.debug(f'send_bytes: send reset command {RESET}') + self._send_bytes(RESET) + syncsent = False + readbyte = self._read_bytes(1) + self.logger.debug(f'read_bytes: read {readbyte}') + + self.logger.debug(f'communication initialized: {self._is_initialized}') + return self._is_initialized + + elif self._viess_proto == 'KW': + + retries = 5 + RESET = self._int2bytes(self._controlset['reset_command'], 1) + NOINIT = self._int2bytes(self._controlset['not_initiated'], 1, signed=False) + + # try to reset communication, especially if previous P300 comms is still open + self._send_bytes(RESET) + + attempt = 0 + while attempt < retries: + self.logger.debug(f'starting sync loop - attempt {attempt + 1}/{retries}') + + self._connection.reset_input_buffer() + chunk = self._read_bytes(1) + # enable for 'raw' debugging + # self.logger.debug(f'sync loop - got {self._bytes2hexstring(chunk)}') + if chunk == NOINIT: + self.logger.debug('got sync, commencing command send') + self._is_initialized = True + return True + sleep(.8) + attempt = attempt + 1 + self.logger.error(f'sync not acquired after {attempt} attempts') + self._close() + return False + + return True + + def _send(self, data_dict): + """ + send data. data_dict needs to contain the following information: + + data_dict['payload']: address from/to which to read/write (hex, str) + data_dict['data']['len']: length of command to send + data_dict['data']['value']: value bytes to write, None if reading + + :param data_dict: send data + :param read_response: KW only: read response value (True) or only return status byte + :type data_dict: dict + :type read_response: bool + :return: Response packet (bytearray) if no error occured, None otherwise + """ + (packet, responselen) = self._build_payload(data_dict) + + # send payload + self._lock.acquire() + try: + self._send_bytes(packet) + self.logger.debug(f'successfully sent packet {self._bytes2hexstring(packet)}') + + # receive response + response_packet = bytearray() + self.logger.debug(f'trying to receive {responselen} bytes of the response') + chunk = self._read_bytes(responselen) + if self._viess_proto == 'P300': + self.logger.debug(f'received {len(chunk)} bytes chunk of response as hexstring {self._bytes2hexstring(chunk)} and as bytes {chunk}') + if len(chunk) != 0: + if chunk[:1] == self._int2bytes(self._controlset['error'], 1): + self.logger.error(f'interface returned error, response was {chunk}') + elif len(chunk) == 1 and chunk[:1] == self._int2bytes(self._controlset['not_initiated'], 1): + self.logger.error('received invalid chunk, connection not initialized, forcing re-initialize...') + self._initialized = False + elif chunk[:1] != self._int2bytes(self._controlset['acknowledge'], 1): + self.logger.error(f'received invalid chunk, not starting with ACK, response was {chunk}') + self._error_count += 1 + if self._error_count >= 5: + self.logger.warning('encountered 5 invalid chunks in sequence, maybe communication was lost, forcing re-initialize') + self._initialized = False + else: + response_packet.extend(chunk) + self._error_count = 0 + return self._parse_response(response_packet) + else: + self.logger.error(f'received 0 bytes chunk - ignoring response_packet, chunk was {chunk}') + elif self._protocol == 'KW': + self.logger.debug(f'received {len(chunk)} bytes chunk of response as hexstring {self._bytes2hexstring(chunk)} and as bytes {chunk}') + if len(chunk) != 0: + response_packet.extend(chunk) + return self._parse_response(response_packet, data_dict['data']['value'] is None) + else: + self.logger.error('received 0 bytes chunk - this probably is a communication error, possibly a wrong datapoint address?') + except IOError as e: + self.logger.error(f'send_command_packet failed with IO error, trying to reconnect. Error was: {e}') + self._close() + except Exception as e: + self.logger.error(f'send_command_packet failed with error: {e}') + finally: + try: + self._lock.release() + except RuntimeError: + pass + + # if we didn't return with data earlier, we hit an error. Act accordingly + return None + + def _parse_response(self, response, read_response=True): + """ + Process device response data, try to parse type and value + + :param response: Data received from device + :type response: bytearray + :param read_response: True if command was read command and value is expected, False if only status byte is expected (only needed for KW protocol) + :type read_response: bool + :return: tuple of (parsed response value, commandcode) or None if error + """ + if self._viess_proto == 'P300': + + # A read_response telegram looks like this: ACK (1 byte), startbyte (1 byte), data length in bytes (1 byte), request/response (1 byte), read/write (1 byte), addr (2 byte), amount of valuebytes (1 byte), value (bytes as per last byte), checksum (1 byte) + # A write_response telegram looks like this: ACK (1 byte), startbyte (1 byte), data length in bytes (1 byte), request/response (1 byte), read/write (1 byte), addr (2 byte), amount of bytes written (1 byte), checksum (1 byte) + + # Validate checksum + checksum = self._calc_checksum(response[1:len(response) - 1]) # first, cut first byte (ACK) and last byte (checksum) and then calculate checksum + received_checksum = response[len(response) - 1] + if received_checksum != checksum: + self.logger.error(f'calculated checksum {checksum} does not match received checksum of {received_checksum}! Ignoring reponse') + return None + + # Extract command/address, valuebytes and valuebytecount out of response + responsetypecode = response[3] # 0x00 = query, 0x01 = reply, 0x03 = error + responsedatacode = response[4] # 0x01 = ReadData, 0x02 = WriteData, 0x07 = Function Call + valuebytecount = response[7] + + # Extract databytes out of response + rawdatabytes = bytearray() + rawdatabytes.extend(response[8:8 + (valuebytecount)]) + elif self._protocol == 'KW': + + # imitate P300 response code data for easier combined handling afterwards + # a read_response telegram consists only of the value bytes + # a write_response telegram is 0x00 for OK, 0xXX for error + responsetypecode = 1 + valuebytecount = len(response) + rawdatabytes = response + + if read_response: + # value response to read request, error detection by empty = no response + responsedatacode = 1 + if len(rawdatabytes) == 0: + # error, no answer means wrong address (?) + responsetypecode = 3 + else: + # status response to write request + responsedatacode = 2 + if (len(rawdatabytes) == 1 and rawdatabytes[0] != 0) or len(rawdatabytes) == 0: + # error if status reply is not 0x00 + responsetypecode = 3 + + self.logger.debug(f'Response decoded to: responsedatacode: {responsedatacode}, valuebytecount: {valuebytecount}, responsetypecode: {responsetypecode}') + + if responsetypecode == 3: + raise ValueError(f'error on reading reply {rawdatabytes}') + + if responsedatacode == 2: + self.logger.debug('write request successful') + return None + + self.logger.debug(f'read request successful, read bytes {rawdatabytes}') + return rawdatabytes + + def _build_payload(self, data_dict): + """ + create payload from data_dict. Necessary data: + + data_dict['payload']: address from/to which to read/write (hex, str) + data_dict['data']['len']: length of command to send + data_dict['data']['value']: value bytes to write, None if reading + data_dict['data']['kwseq']: packet is follow-up packet in KW + + :param data_dict: data to convert + :type data_dict: dict + :return: (packet, responselen) + :rtype: tuple + """ + try: + addr = data_dict['payload'].lower() + cmdlen = data_dict['data']['len'] + valuebytes = data_dict['data']['value'] + KWFollowUp = data_dict['data'].get('kwseq', False) + except Exception as e: + raise ValueError(f'data_dict {data_dict} not usable, data not sent. Error was: {e}') + + write = valuebytes is not None + + # build payload + if write: + payloadlength = int(self._controlset.get('command_bytes_write', 0)) + int(valuebytes) + self.logger.debug(f'Payload length is: {payloadlength} bytes') + + packet = bytearray() + if not KWFollowUp: + packet.extend(self._int2bytes(self._controlset['startbyte'], 1)) + if self._viess_proto == 'P300': + if write: + packet.extend(self._int2bytes(payloadlength, 1)) + else: + packet.extend(self._int2bytes(self._controlset['command_bytes_read'], 1)) + packet.extend(self._int2bytes(self._controlset['request'], 1)) + + if write: + packet.extend(self._int2bytes(self._controlset['write'], 1)) + else: + packet.extend(self._int2bytes(self._controlset['read'], 1)) + packet.extend(bytes.fromhex(addr)) + packet.extend(self._int2bytes(cmdlen, 1)) + if write: + packet.extend(valuebytes) + if self._viess_proto == 'P300': + packet.extend(self._int2bytes(self._calc_checksum(packet), 1)) + + if self._viess_proto == 'P300': + responselen = int(self._controlset['command_bytes_read']) + 4 + (0 if write else int(cmdlen)) + else: + responselen = 1 if write else int(cmdlen) + + if write: + self.logger.debug(f'created payload to be sent as hexstring: {self._bytes2hexstring(packet)} and as bytes: {packet} with value {self._bytes2hexstring(valuebytes)})') + else: + self.logger.debug(f'created payload to be sent as hexstring: {self._bytes2hexstring(packet)} and as bytes: {packet}') + + return (packet, responselen) + + @staticmethod + def _calc_checksum(packet): + """ + Calculate checksum for P300 protocol packets + + :parameter packet: Data packet for which to calculate checksum + :type packet: bytearray + :return: Calculated checksum + :rtype: int + """ + checksum = 0 + if len(packet) > 0: + if packet[:1] == b'\x41': + packet = packet[1:] + checksum = sum(packet) + checksum = checksum - int(checksum / 256) * 256 + return checksum + + @staticmethod + def _int2bytes(value, length, signed=False): + """ + Convert value to bytearray with respect to defined length and sign format. + Value exceeding limit set by length and sign will be truncated + + :parameter value: Value to convert + :type value: int + :parameter length: number of bytes to create + :type length: int + :parameter signed: True if result should be a signed int, False for unsigned + :type signed: bool + :return: Converted value + :rtype: bytearray + """ + value = value % (2 ** (length * 8)) + return value.to_bytes(length, byteorder='big', signed=signed) + + @staticmethod + def _bytes2int(rawbytes, signed): + """ + Convert bytearray to value with respect to sign format + + :parameter rawbytes: Bytes to convert + :type value: bytearray + :parameter signed: True if result should be a signed int, False for unsigned + :type signed: bool + :return: Converted value + :rtype: int + """ + return int.from_bytes(rawbytes, byteorder='little', signed=signed) + + @staticmethod + def _bytes2hexstring(bytesvalue): + """ + Create hex-formatted string from bytearray + :param bytesvalue: Bytes to convert + :type bytesvalue: bytearray + :return: Converted hex string + :rtype: str + """ + return ''.join(f'{c:02x}' for c in bytesvalue) diff --git a/viessmann/webif/__init__.py b/viessmann/webif/__init__.py new file mode 100644 index 000000000..0292f0c0e --- /dev/null +++ b/viessmann/webif/__init__.py @@ -0,0 +1,155 @@ +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +######################################################################### +# Copyright 2020- Sebastian Helms Morg @ knx-user-forum +######################################################################### +# This file aims to become part of SmartHomeNG. +# https://www.smarthomeNG.de +# https://knx-user-forum.de/forum/supportforen/smarthome-py +# +# MultiDevice plugin for handling arbitrary devices via network or serial +# connection. +# +# SmartHomeNG is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SmartHomeNG is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SmartHomeNG. If not, see . +# +######################################################################### + +import json + +from lib.item import Items +from lib.model.smartplugin import SmartPluginWebIf +from lib.model.sdp.globals import * +import cherrypy + + +############################################################################################################################################################################################################################################# +# +# class WebInterface +# +############################################################################################################################################################################################################################################# + +class WebInterface(SmartPluginWebIf): + + def __init__(self, webif_dir, plugin): + """ + Initialization of instance of class WebInterface + + :param webif_dir: directory where the webinterface of the plugin resides + :param plugin: instance of the plugin + :type webif_dir: str + :type plugin: object + """ + self.logger = plugin.logger + self.webif_dir = webif_dir + self.plugin = plugin + self.items = Items.get_instance() + + self.tplenv = self.init_template_environment() + + @cherrypy.expose + def index(self, reload=None): + """ + Build index.html for cherrypy + + Render the template and return the html file to be delivered to the browser + + :return: contents of the template after beeing rendered + """ + tmpl = self.tplenv.get_template('index.html') + # add values to be passed to the Jinja2 template eg: tmpl.render(p=self.plugin, interface=interface, ...) + + plgitems = [] + for item in self.items.return_items(): + if any(elem in item.property.attributes for elem in ITEM_ATTRS): + plgitems.append(item) + + return tmpl.render(p=self.plugin, + items=sorted(self.items.return_items(), key=lambda k: str.lower(k['_path'])), + item_count=0, + plgitems=plgitems, + running=self.plugin.alive, + lookups=self.plugin._commands._lookups) + + @cherrypy.expose + def submit(self, button=None, param=None): + """ + Submit handler for Ajax + """ + if button is not None: + + notify = None + + if '#' in button: + + # run/stop command + cmd, __, dev = button.partition('#') + device = self.plugin.get_device(dev) + if device: + if cmd == 'run': + self.logger.info(f'Webinterface starting device {dev}') + device.start() + elif cmd == 'stop': + self.logger.info(f'Webinterface stopping device {dev}') + device.stop() + elif '.' in button: + + # set device arg - but only when stopped + dev, __, arg = button.partition('.') + if param is not None: + param = sanitize_param(param) + try: + self.logger.info(f'Webinterface setting param {arg} of device {dev} to {param}') + self.plugin._devices[dev]['params'][arg] = param + self.plugin._update_device_params(dev) + notify = dev + '-' + arg + '-notify' + except Exception as e: + self.logger.info(f'Webinterface failed to set param {arg} of device {dev} to {param} with error {e}') + + # # possibly prepare data for returning + # read_cmd = self.plugin._commandname_by_commandcode(button) + # if read_cmd is not None: + # self._last_read[button] = {'addr': button, 'cmd': read_cmd, 'val': read_val} + # self._last_read['last'] = self._last_read[button] + + data = {'running': {dev: self.plugin._devices[dev]['device'].alive for dev in self.plugin._devices}, 'notify': notify} + + # # possibly return data to WebIf + cherrypy.response.headers['Content-Type'] = 'application/json' + return json.dumps(data).encode('utf-8') + + @cherrypy.expose + def get_data_html(self, dataSet=None): + """ + Return data to update the webpage + + For the standard update mechanism of the web interface, the dataSet to return the data for is None + + :param dataSet: Dataset for which the data should be returned (standard: None) + :return: dict with the data needed to update the web page. + """ + if dataSet is None: + # get the new data + # data = {} + pass + + # data['item'] = {} + # for i in self.plugin.items: + # data['item'][i]['value'] = self.plugin.getitemvalue(i) + # + # return it as json the the web page + # try: + # return json.dumps(data) + # except Exception as e: + # self.logger.error('get_data_html exception: {}'.format(e)) + return {} diff --git a/viessmann/webif/static/img/plugin_logo.svg b/viessmann/webif/static/img/plugin_logo.svg old mode 100755 new mode 100644 index 16c50e23d..e1c8a9993 --- a/viessmann/webif/static/img/plugin_logo.svg +++ b/viessmann/webif/static/img/plugin_logo.svg @@ -1 +1,88 @@ -Element 1 \ No newline at end of file + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +Multidevice + diff --git a/viessmann/webif/templates/index.html b/viessmann/webif/templates/index.html old mode 100755 new mode 100644 index d522eeefb..f6aa9dfbe --- a/viessmann/webif/templates/index.html +++ b/viessmann/webif/templates/index.html @@ -1,136 +1,75 @@ - {% extends "base_plugin.html" %} -{% set tabcount = 2 %} -{% set tab1title = _('Viessmann Items') %} -{% set tab2title = _('Alle Datenpunkte') %} -{% set language = p.get_sh().get_defaultlanguage() %} -{% if last_read_cmd != "" %} -{% set start_tab = 3 %} -{% endif %} -{% if language not in ['en','de'] %} -{% set language = 'en' %} -{% endif %} - -{% block pluginscripts %} -/* - * The combined file was created by the DataTables downloader builder: - * https://datatables.net/download - * - * To rebuild or modify this file with the latest versions of the included - * software please visit: - * https://datatables.net/download/#dt/dt-1.10.21/fh-3.1.7/r-2.2.5 - * - * Included libraries: - * DataTables 1.10.21, FixedHeader 3.1.7, Responsive 2.2.5 - */ - - - - {% endblock pluginscripts %} @@ -139,67 +78,139 @@ - - + + - - - - - - - - - - + + - - - - - - - - - - - + + + + +
    {{ _('Serieller Port') }}{{ p._serialport }}{{ _('Aktiv') }}{{ p.alive }} {{ _('Anzahl Items') }}{{ p._params|length }}
    {{ _('Heizungstyp') }}{{ p._heating_type }}{{ _('Verbunden') }}{{ p._connected }}{{ _('Anzahl Geräte') }}{{ p._devices|length }}
    {{ _('Protokoll') }}{{ p._protocol }}{{ _('Verbindung aktiv') }}{{ p._initialized }}
    {{ _('Letzter manuell gelesener Wert') }}{{ last_read_cmd + ": " if last_read_cmd else '---' }} {{ last_read_value }}{{ _('') }} {{ _('Anzahl Items') }}{{ plgitems|length }}
    {% endblock headtable %} + + {% block buttons %} - +{% if 1==2 %} +
    + +
    +{% endif %} {% endblock %} + +{% set tabcount = 2 %} + +{% set tab1title = "" ~ _('Geräte') ~ "" %} {% block bodytab1 %}
    - {% if p._params|length %} +
    + + + {% if devices|length %} + + {% for dev in devices %} + + + + + + + + + + + + + + + {% if devices[dev]['params']|length %} + {% for arg in devices[dev]['params'] %} + {% if not loop.first -%} + + + + {%- endif %} + + + + + {% endfor %} + {% else %} + + + + {% endif %} + + {% endfor %} +
    {{ _('Geräte-ID') }}:{{ dev }} + + +
     {{ _('Geräte-Typ') }}:{{ devices[dev]['device_type'] }} 
     {{ _('Parameter') }}:
      {{ arg }} + ({{ _('Typ') }}: {% if devices[dev]['params'][arg] is sameas true or devices[dev]['params'][arg] is sameas false %}{{ _('Bool') }}{% elif devices[dev]['params'][arg] is number %}{{ _('Zahl') }}{% else %}{{ _('String') }}{% endif %}) +
    {{ _('keine') }} 
     
    + {% endif %} +
    +
    +
    +{% endblock bodytab1 %} + + + + +{% set tab2title = " Items (" ~ plgitems|length ~ ")" %} +{% block bodytab2 %} +
    +
    + {% if plgitems|length %} - + + + + + - {% for commandcode in p._params %} + {% for item in plgitems %} - - - - - - + + + + + + + + + + {% endfor %} @@ -207,58 +218,31 @@ {% endif %} -{% endblock bodytab1 %} +{% endblock bodytab2 %} -{% block bodytab2 %} -
    -
    - {% if cmds|length %} -
    - -
    {{ _('Item') }}{{ _('Datenpunkt') }}{{ _('Gerät') }} {{ _('Befehlsname') }}{{ _('Lesen') }}{{ _('Schreiben') }}{{ _('Init') }}{{ _('Cycle') }} {{ _('Typ') }} {{ _('Wert') }} {{ _('Letzte Aktualisierung') }}
    {{ p._params[commandcode]['item'].path() }}{{ commandcode }}{{ p._params[commandcode]['commandname'] }}{{ p._params[commandcode]['item'].type() }}{{ p._params[commandcode]['item']() }}{{ p._params[commandcode]['item'].last_update() }}{{ item }}{% if 'md_device' in item.conf %}{{ item.conf['md_device'] }}{% endif %}{% if 'md_command' in item.conf %} + {{ item.conf['md_command'] }} + {% elif 'md_read_all' in item.conf %} + {{ _('update_all') }} + {% endif %}{% if 'md_read' in item.conf %}{{ item.conf['md_read'] }}{% endif %}{% if 'md_write' in item.conf %}{{ item.conf['md_write'] }}{% endif %}{% if 'md_read_initial' in item.conf %}{{ item.conf['md_read_initial'] }}{% endif %}{% if 'md_cycle' in item.conf %}{{ item.conf['md_cycle'] }}{% endif %}{{ item.type() }}{{ item() }}{{ item.last_update().strftime('%Y-%m-%d %H:%M:%S') }}
    - - - - - - - - - - - - - - - - - - - - - - {% for cmd in cmds.keys() %} - - - - - - - - - - {% endfor %} - -
    {{ _('Befehlsname') }}{{ _('Datenpunkt') }}{{ _('Länge') }}{{ _('Einheit') }}{{ _('Lesen/Schreiben') }}{{ _('Datenpunkt lesen') }}{{ _('gelesener Wert') }}
    {{ _('_Custom') }} - - False 
    {{ cmd }}{{ cmds[cmd]['addr'] }}{{ cmds[cmd]['len'] }}{{ cmds[cmd]['unit'] }}{{ cmds[cmd]['set'] }} 
    - - {% endif %} + +{% set tab3title = " Items (Baum) (" ~ plgitems|length ~ ")" %} +{% block bodytab3 %} +
    +
    -{% endblock bodytab2 %} +{% endblock bodytab3 %} + + + +{% block bodytab4 %} +{% endblock bodytab4 %} From e7091f94248765ff921d41e622197dfbb59af96e Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 27 Jul 2023 22:35:02 +0200 Subject: [PATCH 212/775] oppo plugin: remove initial read for pureaudio and eject --- oppo/commands.py | 4 ++-- oppo/plugin.yaml | 6 ------ 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/oppo/commands.py b/oppo/commands.py index d402f54c9..1c303d97f 100755 --- a/oppo/commands.py +++ b/oppo/commands.py @@ -52,10 +52,10 @@ }, 'control': { 'power': {'read': True, 'write': True, 'read_cmd': '#QPW', 'write_cmd': '#P{VALUE}', 'item_type': 'bool', 'dev_datatype': 'onoff', 'reply_pattern': ['@POFF OK (OFF)', '@PON OK (ON)', '@QPW OK (ON|OFF)', '@UPW (0|1)'], 'item_attrs': {'initial': True}}, - 'pureaudio': {'read': True, 'write': True, 'write_cmd': '#PUR', 'item_type': 'bool', 'dev_datatype': 'onoff', 'reply_pattern': '@PUR OK (ON|OFF)', 'item_attrs': {'initial': True}}, + 'pureaudio': {'read': True, 'write': True, 'write_cmd': '#PUR', 'item_type': 'bool', 'dev_datatype': 'onoff', 'reply_pattern': '@PUR OK (ON|OFF)'}, 'playpause': {'read': True, 'write': True, 'read_cmd': '#QPL', 'write_cmd': '{VALUE}', 'item_type': 'bool', 'dev_datatype': 'playpause', 'reply_pattern': ['@PLA OK {LOOKUP}$', '@PAU OK {LOOKUP}$'], 'lookup': 'PLAY'}, 'stop': {'read': True, 'write': True, 'read_cmd': '#QPL', 'write_cmd': '#STP', 'item_type': 'bool', 'dev_datatype': 'raw', 'reply_pattern': ['@STP OK (?:(FULL\s)?){LOOKUP}$'], 'lookup': 'STOP'}, - 'eject': {'read': True, 'write': True, 'write_cmd': '#EJT', 'item_type': 'bool', 'dev_datatype': 'openclose', 'reply_pattern': ['@UPL (OPEN|CLOS)', '@EJT OK (OPEN|CLOSE)'], 'item_attrs': {'initial': True, 'enforce': True}}, + 'eject': {'read': True, 'write': True, 'write_cmd': '#EJT', 'item_type': 'bool', 'dev_datatype': 'openclose', 'reply_pattern': ['@UPL (OPEN|CLOS)', '@EJT OK (OPEN|CLOSE)'], 'item_attrs': {'enforce': True}}, 'chapter': {'read': True, 'write': True, 'read_cmd': '#QCH', 'write_cmd': '#SRH C{RAW_VALUE:03}', 'item_type': 'num', 'dev_datatype': 'raw', 'reply_pattern': ['@SRH (OK|ER INVALID)', r'@QCH OK (\d{2})/(?:\d{2})']}, 'title': {'read': True, 'write': True, 'read_cmd': '#QTK', 'write_cmd': '#SRH T{RAW_VALUE:03}', 'item_type': 'num', 'dev_datatype': 'raw', 'reply_pattern': [r'@QTK OK (\d{2})/(?:\d{2})', '@SRH (OK|ER INVALID)', r'@UAT (?:[A-Z]{2}) (\d{2})/(?:\d{2}) (?:[A-Z]{3}) (?:[0-7.]{3})']}, 'next': {'read': True, 'write': True, 'write_cmd': '#NXT', 'item_type': 'bool', 'dev_datatype': 'ok', 'reply_pattern': ['@NXT (.*)'], 'item_attrs': {'enforce': True}}, diff --git a/oppo/plugin.yaml b/oppo/plugin.yaml index b6fb35fa4..b4b4da9b8 100755 --- a/oppo/plugin.yaml +++ b/oppo/plugin.yaml @@ -456,7 +456,6 @@ item_structs: oppo_command: control.pureaudio oppo_read: true oppo_write: true - oppo_read_initial: true playpause: type: bool @@ -480,7 +479,6 @@ item_structs: oppo_read: true oppo_write: true enforce_updates: true - oppo_read_initial: true chapter: type: num @@ -971,7 +969,6 @@ item_structs: oppo_command: control.pureaudio oppo_read: true oppo_write: true - oppo_read_initial: true playpause: type: bool @@ -997,7 +994,6 @@ item_structs: oppo_read: true oppo_write: true enforce_updates: true - oppo_read_initial: true chapter: type: num @@ -1490,7 +1486,6 @@ item_structs: oppo_command: control.pureaudio oppo_read: true oppo_write: true - oppo_read_initial: true playpause: type: bool @@ -1516,7 +1511,6 @@ item_structs: oppo_read: true oppo_write: true enforce_updates: true - oppo_read_initial: true chapter: type: num From 3368d866ae3cf15ae18f3579f6bbc4a660fb3821 Mon Sep 17 00:00:00 2001 From: aschwith Date: Fri, 28 Jul 2023 22:29:26 +0200 Subject: [PATCH 213/775] solarforecast: Fallback to smarthomeNG lat/long default values if no plugin specific latitude and longitude values are specified. --- solarforecast/__init__.py | 10 ++++++++-- solarforecast/plugin.yaml | 6 ++++-- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/solarforecast/__init__.py b/solarforecast/__init__.py index 7350f3523..34dc5b0d4 100755 --- a/solarforecast/__init__.py +++ b/solarforecast/__init__.py @@ -48,8 +48,14 @@ def __init__(self, sh, *args, **kwargs): self.session = requests.Session() # get the parameters for the plugin (as defined in metadata plugin.yaml): - self.latitude = self.get_parameter_value('latitude') - self.longitude = self.get_parameter_value('longitude') + if self.get_parameter_value('latitude') != '' and self.get_parameter_value('longitude') != '': + self.latitude = self.get_parameter_value('latitude') + self.longitude = self.get_parameter_value('longitude') + else: + self.logger.debug("__init__: latitude and longitude not provided, using shng system values instead.") + self.latitude = self.get_sh()._lat + self.longitude = self.get_sh()._lon + self.declination = self.get_parameter_value('declination') self.azimuth = self.get_parameter_value('azimuth') self.kwp = self.get_parameter_value('kwp') diff --git a/solarforecast/plugin.yaml b/solarforecast/plugin.yaml index 3658ecac8..9df9e2168 100755 --- a/solarforecast/plugin.yaml +++ b/solarforecast/plugin.yaml @@ -23,13 +23,15 @@ plugin: parameters: latitude: type: num - mandatory: True + mandatory: False + default: '' description: de: 'Breitengrad der Solaranlage in dezimalen Grad' en: 'Latitude of solar system in decimal degree' longitude: type: num - mandatory: True + mandatory: False + default: '' description: de: 'Laengengrad der Solaranlage in dezimalen Grad' en: 'Longitude of solar system in decimal degree' From 4b03c15ef1a8a5a68ecfbb7b51fe3e29cd7cd0ec Mon Sep 17 00:00:00 2001 From: aschwith Date: Thu, 3 Aug 2023 08:08:39 +0200 Subject: [PATCH 214/775] solarforecast: fix for plugin.yaml, lat, lon are now interpreted as strings not numbers (backward compatible) --- solarforecast/plugin.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/solarforecast/plugin.yaml b/solarforecast/plugin.yaml index 9df9e2168..ca28a40ea 100755 --- a/solarforecast/plugin.yaml +++ b/solarforecast/plugin.yaml @@ -22,14 +22,14 @@ plugin: parameters: latitude: - type: num + type: str mandatory: False default: '' description: de: 'Breitengrad der Solaranlage in dezimalen Grad' en: 'Latitude of solar system in decimal degree' longitude: - type: num + type: str mandatory: False default: '' description: From 2052d55ff4ae95416bd41897cc330477d6e068f8 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 3 Aug 2023 14:05:26 +0200 Subject: [PATCH 215/775] stateengine plugin: introduce new status function (if e.g. lamella items for sending and receiving values are separate) --- stateengine/StateEngineAction.py | 40 ++++++++++---- stateengine/StateEngineCondition.py | 70 ++++++++++++++++++------- stateengine/StateEngineConditionSet.py | 2 +- stateengine/StateEngineWebif.py | 55 ++++++++++++------- stateengine/__init__.py | 4 ++ stateengine/plugin.yaml | 6 +++ stateengine/user_doc/03_regelwerk.rst | 23 ++++---- stateengine/user_doc/05_bedingungen.rst | 11 ++-- stateengine/user_doc/06_aktionen.rst | 7 +++ 9 files changed, 154 insertions(+), 64 deletions(-) diff --git a/stateengine/StateEngineAction.py b/stateengine/StateEngineAction.py index 4f429e431..2cc9e279f 100755 --- a/stateengine/StateEngineAction.py +++ b/stateengine/StateEngineAction.py @@ -369,6 +369,7 @@ class SeActionSetItem(SeActionBase): def __init__(self, abitem, name: str): super().__init__(abitem, name) self.__item = None + self.__status = None self.__value = StateEngineValue.SeValue(self._abitem, "value") self.__mindelta = StateEngineValue.SeValue(self._abitem, "mindelta") self.__function = "set" @@ -427,19 +428,33 @@ def complete(self, item_state, evals_items=None): item = StateEngineTools.find_attribute(self._sh, item_state, "se_eval_" + self._name) self.__item = str(item) + # missing status in action: Try to find it. + if self.__status is None: + status = StateEngineTools.find_attribute(self._sh, item_state, "se_status_" + self._name) + + if status is not None: + self.__status = self._abitem.return_item(status) + if self.__mindelta.is_empty(): mindelta = StateEngineTools.find_attribute(self._sh, item_state, "se_mindelta_" + self._name) if mindelta is not None: self.__mindelta.set(mindelta) - if isinstance(self.__item, str): - pass - elif self.__item is not None: - self.__value.set_cast(self.__item.cast) - self.__mindelta.set_cast(self.__item.cast) - self._scheduler_name = "{}-SeItemDelayTimer".format(self.__item.property.path) - if self._abitem.id == self.__item.property.path: + if self.__status is not None: + self.__value.set_cast(self.__status.cast) + self.__mindelta.set_cast(self.__status.cast) + self._scheduler_name = "{}-SeItemDelayTimer".format(self.__status.property.path) + if self._abitem.id == self.__status.property.path: self._caller += '_self' + elif self.__status is None: + if isinstance(self.__item, str): + pass + elif self.__item is not None: + self.__value.set_cast(self.__item.cast) + self.__mindelta.set_cast(self.__item.cast) + self._scheduler_name = "{}-SeItemDelayTimer".format(self.__item.property.path) + if self._abitem.id == self.__item.property.path: + self._caller += '_self' # Write action to logger def write_to_logger(self): @@ -480,8 +495,13 @@ def real_execute(self, actionname: str, namevar: str = "", repeat_text: str = "" if not self.__mindelta.is_empty(): mindelta = self.__mindelta.get() - # noinspection PyCallingNonCallable - delta = float(abs(self.__item() - value)) + if self.__status is not None: + # noinspection PyCallingNonCallable + delta = float(abs(self.__status() - value)) + additionaltext = "of statusitem " + else: + delta = float(abs(self.__item() - value)) + additionaltext = "" if delta < mindelta: text = "{0}: Not setting '{1}' to '{2}' because delta '{3:.2}' is lower than mindelta '{4}'" self._log_debug(text, actionname, self.__item.property.path, value, delta, mindelta) @@ -752,6 +772,8 @@ def write_to_logger(self): self._log_debug("item from eval: {0}", self.__item) elif self.__item is not None: self._log_debug("item: {0}", self.__item.property.path) + if self.__status is not None: + self._log_debug("status: {0}", self.__status.property.path) self.__mindelta.write_to_logger() self.__value.write_to_logger() self._log_debug("force update: yes") diff --git a/stateengine/StateEngineCondition.py b/stateengine/StateEngineCondition.py index 30b25b7df..b4f1908ec 100755 --- a/stateengine/StateEngineCondition.py +++ b/stateengine/StateEngineCondition.py @@ -40,6 +40,7 @@ def __init__(self, abitem, name: str): super().__init__(abitem) self.__name = name self.__item = None + self.__status = None self.__eval = None self.__value = StateEngineValue.SeValue(self._abitem, "value", True) self.__min = StateEngineValue.SeValue(self._abitem, "min") @@ -55,7 +56,7 @@ def __init__(self, abitem, name: str): self.__error = None def __repr__(self): - return "SeCondition 'item': {}, 'eval': {}, 'value': {}".format(self.__item, self.__eval, self.__value) + return "SeCondition 'item': {}, 'status': {}, 'eval': {}, 'value': {}".format(self.__item, self.__status, self.__eval, self.__value) # set a certain function to a given value # func: Function to set ('item', 'eval', 'value', 'min', 'max', 'negate', 'changedby', 'updatedby', @@ -68,6 +69,12 @@ def set(self, func, value): "item without item: at the beginning!", value) _, _, value = value.partition(":") self.__item = self._abitem.return_item(value) + elif func == "se_status": + if ":" in value: + self._log_warning("Your status configuration '{0}' is wrong! Define a plain (relative) " + "item without item: at the beginning!", value) + _, _, value = value.partition(":") + self.__status = self._abitem.return_item(value) elif func == "se_eval": if ":" in value: self._log_warning("Your eval configuration '{0}' is wrong! Define a plain eval " @@ -96,21 +103,25 @@ def set(self, func, value): self.__negate = value elif func == "se_agenegate": self.__agenegate = value - elif func != "se_item" and func != "se_eval": + elif func != "se_item" and func != "se_eval" and func != "se_status": self._log_warning("Function '{0}' is no valid function! Please check item attribute.", func) def get(self): - eval_result = str(self.__eval) - if 'SeItem' in eval_result: - eval_result = eval_result.split('SeItem.')[1].split(' ')[0] - if 'SeCurrent' in eval_result: - eval_result = eval_result.split('SeCurrent.')[1].split(' ')[0] + _eval_result = str(self.__eval) + if 'SeItem' in _eval_result: + _eval_result = _eval_result.split('SeItem.')[1].split(' ')[0] + if 'SeCurrent' in _eval_result: + _eval_result = _eval_result.split('SeCurrent.')[1].split(' ')[0] _value_result = str(self.__value.get_for_webif()) try: _item = self.__item.property.path except Exception: _item = self.__item - result = {'item': _item, 'eval': eval_result, 'value': _value_result, + try: + _status = self.__status.property.path + except Exception: + _status = self.__status + result = {'item': _item, 'status': _status, 'eval': _eval_result, 'value': _value_result, 'min': str(self.__min), 'max': str(self.__max), 'agemin': str(self.__agemin), 'agemax': str(self.__agemax), 'negate': str(self.__negate), 'agenegate': str(self.__agenegate), @@ -130,7 +141,7 @@ def complete(self, item_state): return False # set 'eval' for some known conditions if item and eval are not set, yet - if self.__item is None and self.__eval is None: + if self.__item is None and self.__status is None and self.__eval is None: if self.__name == "weekday": self.__eval = StateEngineCurrent.values.get_weekday elif self.__name == "sun_azimut": @@ -188,6 +199,12 @@ def complete(self, item_state): if result is not None: self.__item = self._abitem.return_item(result) + # missing status in condition: Try to find it + if self.__status is None: + result = StateEngineTools.find_attribute(self._sh, item_state, "se_status_" + self.__name) + if result is not None: + self.__status = self._abitem.return_item(result) + # missing eval in condition: Try to find it if self.__eval is None: result = StateEngineTools.find_attribute(self._sh, item_state, "se_eval_" + self.__name) @@ -195,13 +212,13 @@ def complete(self, item_state): self.__eval = result # now we should have either 'item' or 'eval' set. If not, raise ValueError - if self.__item is None and self.__eval is None: - raise ValueError("Condition {}: Neither 'item' nor 'eval' given!".format(self.__name)) + if self.__item is None and self.__status is None and self.__eval is None: + raise ValueError("Condition {}: Neither 'item' nor 'status' nor 'eval' given!".format(self.__name)) - if (self.__item is not None or self.__eval is not None)\ + if (self.__item is not None or self.__status is not None or self.__eval is not None)\ and not self.__changedby.is_empty() and self.__changedbynegate is None: self.__changedbynegate = False - if (self.__item is not None or self.__eval is not None)\ + if (self.__item is not None or self.__status is not None or self.__eval is not None)\ and not self.__updatedby.is_empty() and self.__updatedbynegate is None: self.__updatedbynegate = False @@ -209,6 +226,8 @@ def complete(self, item_state): try: if self.__item is not None: self.__cast_all(self.__item.cast) + elif self.__status is not None: + self.__cast_all(self.__status.cast) elif self.__name in ("weekday", "sun_azimut", "sun_altitude", "age", "delay", "random", "month"): self.__cast_all(StateEngineTools.cast_num) elif self.__name in ( @@ -229,15 +248,15 @@ def complete(self, item_state): cond_evalitem = self.__eval and ("get_relative_item(" in self.__eval or "return_item(" in self.__eval) except Exception: cond_evalitem = False - if self.__item is None and not cond_min_max and not cond_evalitem: + if self.__item is None and self.__status is None and not cond_min_max and not cond_evalitem: raise ValueError("Condition {}: 'agemin'/'agemax' can not be used for eval!".format(self.__name)) return True # Check if condition is matching def check(self): # Ignore if no current value can be determined (should not happen as we check this earlier, but to be sure ...) - if self.__item is None and self.__eval is None: - self._log_info("Condition '{0}': No item or eval found! Considering condition as matching!", self.__name) + if self.__item is None and self.__status is None and self.__eval is None: + self._log_info("Condition '{0}': No item, status or eval found! Considering condition as matching!", self.__name) return True self._log_debug("Condition '{0}': Checking all relevant stuff", self.__name) self._log_increase_indent() @@ -266,6 +285,12 @@ def write_to_logger(self): self._log_info("item: {0} ({1})", self.__name, i.property.path) else: self._log_info("item: {0} ({1})", self.__name, self.__item.property.path) + if self.__status is not None: + if isinstance(self.__status, list): + for i in self.__status: + self._log_info("status item: {0} ({1})", self.__name, i.property.path) + else: + self._log_info("status item: {0} ({1})", self.__name, self.__status.property.path) if self.__eval is not None: if isinstance(self.__item, list): for e in self.__item: @@ -541,7 +566,7 @@ def __check_age(self): return True # Ignore if no current value can be determined - if self.__item is None and self.__eval is None: + if self.__item is None and self.__status is None and self.__eval is None: self._log_warning("Age of '{0}': No item/eval found! Considering condition as matching!", self.__name) return True @@ -616,7 +641,16 @@ def __check_age(self): # Current value of condition (based on item or eval) def __get_current(self, eval_type='value'): - if self.__item is not None: + if self.__status is not None: + # noinspection PyUnusedLocal + self._log_debug("Trying to get {} of status item {}", eval_type, self.__status) + return self.__status.property.last_change_age if eval_type == 'age' else\ + self.__status.property.last_change_by if eval_type == 'changedby' else\ + self.__status.property.last_update_by if eval_type == 'updatedby' else\ + self.__status.property.value + elif self.__item is not None: + # noinspection PyUnusedLocal + self._log_debug("Trying to get {} of item {}", eval_type, self.__item) return self.__item.property.last_change_age if eval_type == 'age' else\ self.__item.property.last_change_by if eval_type == 'changedby' else\ self.__item.property.last_update_by if eval_type == 'updatedby' else\ diff --git a/stateengine/StateEngineConditionSet.py b/stateengine/StateEngineConditionSet.py index a3a29aaca..56d950512 100755 --- a/stateengine/StateEngineConditionSet.py +++ b/stateengine/StateEngineConditionSet.py @@ -96,7 +96,7 @@ def update(self, item, grandparent_item): continue # update item/eval in this condition - if func == "se_item" or func == "se_eval": + if func == "se_item" or func == "se_eval" or func == "se_status": if name not in self.__conditions: self.__conditions[name] = StateEngineCondition.SeCondition(self._abitem, name) try: diff --git a/stateengine/StateEngineWebif.py b/stateengine/StateEngineWebif.py index 42b127ac1..b88959e24 100755 --- a/stateengine/StateEngineWebif.py +++ b/stateengine/StateEngineWebif.py @@ -154,7 +154,9 @@ def _conditionlabel(self, state, conditionset): for k, condition in enumerate(self.__states[state]['conditionsets'].get(conditionset)): condition_dict = self.__states[state]['conditionsets'][conditionset].get(condition) - item_none = condition_dict.get('item') == 'None' + + item_none = str(condition_dict.get('item')) == 'None' + status_none = str(condition_dict.get('status')) == 'None' eval_none = condition_dict.get('eval') == 'None' value_none = condition_dict.get('value') == 'None' min_none = condition_dict.get('min') == 'None' @@ -172,23 +174,37 @@ def _conditionlabel(self, state, conditionset): cond5 = not compare == 'agenegate' cond6 = not compare == 'changedbynegate' cond7 = not compare == 'updatedbynegate' - if cond1 and cond2 and cond3 and cond4 and cond5 and cond6 and cond7: - conditionlist += '' - textlength = len(str(condition_dict.get('item'))) - condition_tooltip += '{} '.format(condition_dict.get('item')) \ - if textlength > self.__textlimit else '' - info_item = str(condition_dict.get('item'))[:self.__textlimit] + '..  ' * (textlength > self.__textlimit) - info_eval = str(condition_dict.get('eval'))[:self.__textlimit] + '..  ' * (textlength > self.__textlimit) + cond8 = not compare == 'status' + if cond1 and cond2 and cond3 and cond4 and cond5 and cond6 and cond7 and cond8: + conditionlist += ''.format(compare, condition_dict.get(compare)) + if not status_none: + textlength = len(str(condition_dict.get('status'))) + condition_tooltip += '{} '.format(condition_dict.get('status')) \ + if textlength > self.__textlimit else '' + elif not item_none: + textlength = len(str(condition_dict.get('item'))) + condition_tooltip += '{} '.format(condition_dict.get('item')) \ + if textlength > self.__textlimit else '' + elif not eval_none: + textlength = len(str(condition_dict.get('eval'))) + condition_tooltip += '{} '.format(condition_dict.get('eval')) \ + if textlength > self.__textlimit else '' + else: + textlength = 0 + info_item = str(condition_dict.get('item'))[:self.__textlimit] + '..  ' * int(textlength > self.__textlimit) + info_status = str(condition_dict.get('status'))[:self.__textlimit] + '..  ' * int(textlength > self.__textlimit) + info_eval = str(condition_dict.get('eval'))[:self.__textlimit] + '..  ' * int(textlength > self.__textlimit) info_value = str(condition_dict.get(compare))[:self.__textlimit] + '..  ' * \ - (len(str(condition_dict.get(compare))) > self.__textlimit) - info = info_eval if info_item == "None" and info_eval != "None" else info_item - conditionlist += '{}'.format(info) if not item_none else '' - textlength = len(str(condition_dict.get('eval'))) - condition_tooltip += '{} '.format(condition_dict.get('eval')) \ - if textlength > self.__textlimit else '' - info = info_value if info_item == "None" and info_eval != "None" else info_eval - conditionlist += '{}'.format(info) if not eval_none and item_none else '' - conditionlist += '' + int(len(str(condition_dict.get(compare))) > self.__textlimit) + if not status_none: + info = info_status + elif not item_none: + info = info_item + elif not eval_none: + info = info_eval + else: + info = "" + conditionlist += '{}'.format(info) comparison = ">=" if not min_none and compare == "min"\ else "<=" if not max_none and compare == "max"\ else "older" if not agemin_none and compare == "agemin"\ @@ -203,13 +219,14 @@ def _conditionlabel(self, state, conditionset): and condition_dict.get('negate') == 'True')\ else "==" conditionlist += '{}'.format(comparison) - conditionlist += '"{}"'.format(info) if not item_none and not eval_none else '' + conditionlist += '"{}"'.format(info) if not item_none and not status_none and not eval_none else '' textlength = len(str(condition_dict.get(compare))) condition_tooltip += '{} '.format(condition_dict.get(compare)) \ if textlength > self.__textlimit else '' info = info_value conditionlist += '{}'.format(info) if not condition_dict.get(compare) == 'None' and ( - (eval_none and not item_none) or (not eval_none and item_none)) else '' + (eval_none and not item_none) or (eval_none and not status_none) or \ + (not eval_none and item_none) or (not eval_none and status_none)) else '' conditionlist += ' (negate)' if condition_dict.get('negate') == 'True' and "age" \ not in compare and not compare == "value" else '' conditionlist += ' (negate)' if condition_dict.get('agenegate') == 'True' and "age" in compare else '' diff --git a/stateengine/__init__.py b/stateengine/__init__.py index ce2382c9d..0455cf18a 100755 --- a/stateengine/__init__.py +++ b/stateengine/__init__.py @@ -96,6 +96,10 @@ def parse_item(self, item): item.expand_relativepathes('se_item_*', '', '') except Exception: pass + try: + item.expand_relativepathes('se_status_*', '', '') + except Exception: + pass if self.has_iattr(item.conf, "se_manual_include") or self.has_iattr(item.conf, "se_manual_exclude"): item._eval = "sh.stateengine_plugin_functions.manual_item_update_eval('" + item.id() + "', caller, source)" elif self.has_iattr(item.conf, "se_manual_invert"): diff --git a/stateengine/plugin.yaml b/stateengine/plugin.yaml index 5c2cae2ba..ec6da280d 100755 --- a/stateengine/plugin.yaml +++ b/stateengine/plugin.yaml @@ -1359,6 +1359,12 @@ item_attribute_prefixes: de: 'Definiert das Item, das in einem konkreten Zustand evaluiert oder geändert werden soll' en: 'Definition of an item that should be evaluated or changed in a specific state' + se_status_: + type: foo + description: + de: 'Definiert das Item, das in einem konkreten Zustand evaluiert werden soll' + en: 'Definition of an item that should be evaluated in a specific state' + se_eval_: type: foo description: diff --git a/stateengine/user_doc/03_regelwerk.rst b/stateengine/user_doc/03_regelwerk.rst index 8718ac663..ab841d13c 100755 --- a/stateengine/user_doc/03_regelwerk.rst +++ b/stateengine/user_doc/03_regelwerk.rst @@ -43,29 +43,28 @@ das Attribute ``se_plugin`` auf inactive zu setzen: Item-Definitionen ----------------- -Bedingungen und Aktionen beziehen sich überlicherweise auf Items wie beispielsweise +Bedingungen und Aktionen beziehen sich üblicherweise auf Items wie beispielsweise die Höhe einer Jalousie oder die Außenhelligkeit. Diese Items müssen auf Ebene des Regelwerk-Items über das Attribut -``se_item_`` bekannt gemacht werden. +``se_item_`` bekannt gemacht werden. Um einfacher zwischen Items, +die für Bedingungen und solchen, die für Aktionen genutzt werden, unterscheiden zu können, +können Items, die nur für Bedingungen gebraucht werden, mittels ``se_status_`` +deklariert werden. Diese Variante ist auch besonders dann relevant, wenn es zwei separate Items +für "Senden" und "Empfangen" gibt, also z.B. Senden der Jalousiehöhe und Empfangen des aktuellen +Werts vom KNX-Aktor. Anstatt direkt das Item in Form des absoluten oder relativen Pfades mittels ``se_item_`` zu setzen, kann auch die Angabe ``se_eval_`` genutzt werden. In diesem Fall wird eine beliebige -Funktion anstelle des Itemnamen angegeben. Dies ist sowohl für Bedingungsabfragen, -als auch für das Setzen von "dynamischen" Items möglich. +Funktion anstelle des Itemnamen angegeben. Dies ist primär für das Setzen von "dynamischen" Items +gedacht, allerdings ist es auch möglich, hier einen beliebigen Eval-Ausdruck als Bedingung festzulegen. -An dieser Stelle ist es auch möglich, über ``se_mindelta_`` zu definieren, um welchen Wert -sich ein Item mindestens geändert haben muss, um neu gesetzt zu werden. Siehe auch :ref:`Aktionen`. - -Außerdem ist es möglich, über ``se_repeat_actions`` generell zu definieren, -ob Aktionen für die Stateengine wiederholt ausgeführt werden sollen oder nicht. Diese Konfiguration -kann für einzelne Aktionen individuell über die Angabe ``repeat`` überschrieben werden. Siehe auch :ref:`Aktionen`. Beispiel se_item ================ Im Beispiel wird durch ``se_item_height`` das Item ``beispiel.raffstore1.hoehe`` dem Plugin unter dem Namen "height" bekannt gemacht. Das Item ``beispiel.wetterstation.helligkeit`` -wird durch ``se_item_brightness`` als "brightness" referenziert. +wird durch ``se_item_brightness`` (alternativ via ``se_status_brightness``) als "brightness" referenziert. Auf diese Namen beziehen sich nun in weiterer Folge Bedingungen und Aktionen. Im Beispiel wird im Zustand Nacht das Item ``beispiel.raffstore1.hoehe`` auf den Wert 100 gesetzt, sobald @@ -95,7 +94,7 @@ und Aktionen folgen auf den nächsten Seiten. Beispiel se_eval ================ -se_eval ist für Sonderfälle und etwas komplexere Konfiurationen sinnvoll, kann aber +se_eval ist für Sonderfälle und etwas komplexere Konfigurationen sinnvoll, kann aber im ersten Durchlauf ignoriert werden. Es wird daher empfohlen, als Beginner dieses Beispiel einfach zu überspringen ;) diff --git a/stateengine/user_doc/05_bedingungen.rst b/stateengine/user_doc/05_bedingungen.rst index 031914e3a..bcc4bae13 100755 --- a/stateengine/user_doc/05_bedingungen.rst +++ b/stateengine/user_doc/05_bedingungen.rst @@ -10,7 +10,7 @@ Beispiel -------- Im folgenden Beispiel wird der Zustand "Daemmerung" eingenommen, sobald -die Helligkeit (über se_item_brightness definiert) über 500 Lux liegt. +die Helligkeit (über se_item_brightness oder se_status_brightness definiert) über 500 Lux liegt. .. code-block:: yaml @@ -19,7 +19,7 @@ die Helligkeit (über se_item_brightness definiert) über 500 Lux liegt. automatik: struct: stateengine.general rules: - se_item_brightness: beispiel.wetterstation.helligkeit + se_status_brightness: beispiel.wetterstation.helligkeit Daemmerung: name: Dämmerung remark: @@ -62,7 +62,7 @@ Der zu vergleichende Wert einer Bedingung kann auf folgende Arten definiert werd - statischer Wert (also z.B. 500 Lux). Wird angegegeben mit ``value:500``, wobei das value: auch weggelassen werden kann. - Item (beispielsweise ein Item namens settings.helligkeitsschwellwert). Wird angegeben mit ``item:settings.helligkeitsschwellwert`` - Eval-Funktion (siehe auch `eval Ausdrücke `_). Wird angegeben mit ``eval:1*2*se_eval.get_relative_itemvalue('..bla')`` -- Regular Expression (siehe auch ` RegEx Howto `_) - Vergleich mittels re.fullmatch, wobei Groß/Kleinschreibung ignoriert wird. Wird angegeben mit ``regex:StateEngine Plugin:(.*)`` +- Regular Expression (siehe auch ` RegEx Howto `_) - Vergleich mittels re.fullmatch, wobei Groß/Kleinschreibung ignoriert wird. Wird angegeben mit ``regex:StateEngine Plugin:(.*)`` - Template: eine Vorlage, z.B. eine eval Funktion, die immer wieder innerhalb des StateEngine Items eingesetzt werden kann. Angegeben durch ``template:`` @@ -75,7 +75,8 @@ die jeweils mit einem Unterstrich "_" getrennt werden: - ``se_``: eindeutiger Prefix, um dem Plugin zugeordnet zu werden - ````: siehe unten. Beispiel: min = der Wert des muss mindestens dem beim Attribut angegebenen Wert entsprechen. -- ````: Hier wird entweder das im Regelwerk-Item mittels ``se_item_`` deklarierte Item oder eine besondere Bedingung (siehe unten) referenziert. +- ````: Hier wird entweder das im Regelwerk-Item mittels ``se_item_`` +oder ``se_status_`` deklarierte Item oder eine besondere Bedingung (siehe unten) referenziert. Templates für Bedingungsabfragen @@ -83,7 +84,7 @@ Templates für Bedingungsabfragen Setzt man für mehrere Bedingungsabfragen (z.B. Helligkeit, Temperatur, etc.) immer die gleichen Ausdrücke ein (z.B. eine eval-Funktion), so kann Letzteres als Template -definiert und referenziert werden. Dadurch wird die Handhabung +definiert und referenziert werden. Dadurch wird die Handhabung komplexerer Abfragen deutlich vereinfacht. Diese Templates müssen wie se_item/se_eval auf höchster Ebene des StateEngine Items (also z.B. rules) deklariert werden. diff --git a/stateengine/user_doc/06_aktionen.rst b/stateengine/user_doc/06_aktionen.rst index a663f0889..95b75479e 100755 --- a/stateengine/user_doc/06_aktionen.rst +++ b/stateengine/user_doc/06_aktionen.rst @@ -24,6 +24,12 @@ stehenden Beispiel wird der Lamellenwert abhängig vom Sonnenstand berechnet. Oh würden sich die Lamellen ständig um wenige Grad(bruchteile) ändern. Wird jedoch mindelta beispielsweise auf den Wert 10 gesetzt, findet eine Änderung erst statt, wenn sich der errechnete Wert um mindestens 10 Grad vom aktuellen Lamellenwert unterscheidet. +Im Beispiel wird auch mittels ``se_status_`` ein gesondertes Item definiert, +das den Wert vom KNX-Aktor empfängt. + +Außerdem ist es möglich, über ``se_repeat_actions`` generell zu definieren, +ob Aktionen für die Stateengine wiederholt ausgeführt werden sollen oder nicht. Diese Konfiguration +kann für einzelne Aktionen individuell über die Angabe ``repeat`` überschrieben werden. Siehe auch :ref:`Aktionen`. Beispiel zu Aktionen -------------------- @@ -43,6 +49,7 @@ Das folgende Beispiel führt je nach Zustand folgende Aktionen aus: rules: se_item_height: raffstore1.hoehe # Definition des zu ändernden Höhe-Items se_item_lamella: raffstore1.lamelle # Definition des zu ändernden Lamellen-Items + se_status_lamella: raffstore1.lamelle.status # Definition des Lamellen Statusitems se_mindelta_lamella: 10 # Mindeständerung von 10 Grad, sonst werden die Lamellen nicht aktualisiert. Daemmerung: <...> From 58dd8e8f0fd3e3d4bccfd5dbf52685462392ceff Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 3 Aug 2023 14:06:50 +0200 Subject: [PATCH 216/775] stateengine plugin: add status log message --- stateengine/StateEngineAction.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/stateengine/StateEngineAction.py b/stateengine/StateEngineAction.py index 2cc9e279f..3962336d7 100755 --- a/stateengine/StateEngineAction.py +++ b/stateengine/StateEngineAction.py @@ -463,6 +463,8 @@ def write_to_logger(self): self._log_debug("item from eval: {0}", self.__item) elif self.__item is not None: self._log_debug("item: {0}", self.__item.property.path) + if self.__status is not None: + self._log_debug("status: {0}", self.__status.property.path) self.__mindelta.write_to_logger() self.__value.write_to_logger() From f297c2433e5238a569ba6f9e660113091bd054c9 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 3 Aug 2023 14:10:17 +0200 Subject: [PATCH 217/775] stateengine plugin: some bug fixes and minor adjustments --- stateengine/StateEngineAction.py | 2 +- stateengine/StateEngineActions.py | 2 +- stateengine/StateEngineCondition.py | 10 +++++++++- stateengine/StateEngineValue.py | 10 +++++----- stateengine/StateEngineWebif.py | 2 +- 5 files changed, 17 insertions(+), 9 deletions(-) diff --git a/stateengine/StateEngineAction.py b/stateengine/StateEngineAction.py index 3962336d7..a5ba925dd 100755 --- a/stateengine/StateEngineAction.py +++ b/stateengine/StateEngineAction.py @@ -295,7 +295,7 @@ def execute(self, is_repeat: bool, allow_item_repeat: bool, state): try: state.update_name(state.state_item) _key_name = ['{}'.format(state.id), 'name'] - self.update_webif(_key_name, state.name) + self._abitem.update_webif(_key_name, state.name) _key = ['{}'.format(state.id), 'actions_leave', '{}'.format(self._name), 'delay'] self._abitem.update_webif(_key, _delay_info) except Exception: diff --git a/stateengine/StateEngineActions.py b/stateengine/StateEngineActions.py index 229c1464f..e6da5db49 100755 --- a/stateengine/StateEngineActions.py +++ b/stateengine/StateEngineActions.py @@ -390,7 +390,7 @@ def set(self, value): # item_allow_repeat: Is repeating actions generally allowed for the item? # state: state item triggering the action # additional_actions: SeActions-Instance containing actions which should be executed, too - def execute(self, is_repeat: bool, allow_item_repeat: bool, state: str, additional_actions=None): + def execute(self, is_repeat: bool, allow_item_repeat: bool, state, additional_actions=None): actions = [] for name in self.__actions: actions.append((self.__actions[name].get_order(), self.__actions[name])) diff --git a/stateengine/StateEngineCondition.py b/stateengine/StateEngineCondition.py index b4f1908ec..dd280d120 100755 --- a/stateengine/StateEngineCondition.py +++ b/stateengine/StateEngineCondition.py @@ -334,6 +334,9 @@ def __cast_all(self, cast_func): def __change_update_value(self, value, valuetype): def __convert(convert_value, convert_current): + if convert_value is None: + self._log_develop("Ignoring value None for conversion") + return convert_value, convert_current _oldvalue = convert_value try: if isinstance(convert_value, re._pattern_type): @@ -342,14 +345,19 @@ def __convert(convert_value, convert_current): if isinstance(convert_value, re.Pattern): return convert_value, convert_current if isinstance(convert_current, bool): + self.__value.set_cast(StateEngineTools.cast_bool) convert_value = StateEngineTools.cast_bool(convert_value) elif isinstance(convert_current, int): + self.__value.set_cast(StateEngineTools.cast_num) convert_value = int(StateEngineTools.cast_num(convert_value)) elif isinstance(convert_current, float): + self.__value.set_cast(StateEngineTools.cast_num) convert_value = StateEngineTools.cast_num(convert_value) * 1.0 elif isinstance(convert_current, list): + self.__value.set_cast(StateEngineTools.cast_list) convert_value = StateEngineTools.cast_list(convert_value) else: + self.__value = str(convert_value) convert_value = str(convert_value) convert_current = str(convert_current) if not type(_oldvalue) == type(convert_value): @@ -470,7 +478,7 @@ def __check_value(self): for i, _ in enumerate(min_value): min = None if min_value[i] == 'novalue' else min_value[i] max = None if max_value[i] == 'novalue' else max_value[i] - self._log_debug("Checking minvalue {} and maxvalue {}", min, max) + self._log_debug("Checking minvalue {} ({}) and maxvalue {}({}) against current {}({})", min, type(min), max, type(max), current, type(current)) if min is not None and max is not None and min > max: min, max = max, min self._log_warning("Condition {}: min must not be greater than max! " diff --git a/stateengine/StateEngineValue.py b/stateengine/StateEngineValue.py index 0dfe64255..931037566 100755 --- a/stateengine/StateEngineValue.py +++ b/stateengine/StateEngineValue.py @@ -259,8 +259,8 @@ def set(self, value, name="", reset=True, item=None): self.__template) s = None try: - cond1 = s.isdigit() - cond2 = field_value[i].isdigit() + cond1 = s.lstrip('-').replace('.','',1).isdigit() + cond2 = field_value[i].lstrip('-').replace('.','',1).isdigit() except Exception: cond1 = False cond2 = False @@ -381,9 +381,9 @@ def write_to_logger(self): if isinstance(self.__value, list): for i in self.__value: if i is not None: - self._log_debug("{0}: {1}", self.__name, i) + self._log_debug("{0}: {1} ({2})", self.__name, i, type(i)) else: - self._log_debug("{0}: {1}", self.__name, self.__value) + self._log_debug("{0}: {1} ({2})", self.__name, self.__value, type(self.__value)) if self.__regex is not None: if isinstance(self.__regex, list): for i in self.__regex: @@ -619,7 +619,7 @@ def __get_eval(self): self.__listorder[self.__listorder.index('eval:{}'.format(self.__eval))] = _newvalue values = _newvalue self._log_decrease_indent() - self._log_debug("Eval result: {0}.", values) + self._log_debug("Eval result: {0} ({1}).", values, type(values)) self._log_increase_indent() except Exception as ex: self._log_decrease_indent() diff --git a/stateengine/StateEngineWebif.py b/stateengine/StateEngineWebif.py index b88959e24..3f5afcd6e 100755 --- a/stateengine/StateEngineWebif.py +++ b/stateengine/StateEngineWebif.py @@ -52,7 +52,7 @@ def __init__(self, smarthome, abitem): fontname='Helvetica', fontsize='10') self.__nodes = {} self.__scalefactor = 0.1 - self.__textlimit = 145 + self.__textlimit = 105 self.__conditionset_count = 0 def __repr__(self): From 3945a6b365fbc25512c350b2a1c46817e8ded85d Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 3 Aug 2023 14:11:32 +0200 Subject: [PATCH 218/775] stateengine plugin: improve handling of min_delta and it's integration in the webif visu --- stateengine/StateEngineAction.py | 27 +++++++++++++++++++-------- stateengine/StateEngineWebif.py | 14 +++++++++++--- 2 files changed, 30 insertions(+), 11 deletions(-) diff --git a/stateengine/StateEngineAction.py b/stateengine/StateEngineAction.py index a5ba925dd..6f95fe98c 100755 --- a/stateengine/StateEngineAction.py +++ b/stateengine/StateEngineAction.py @@ -370,6 +370,7 @@ def __init__(self, abitem, name: str): super().__init__(abitem, name) self.__item = None self.__status = None + self.__delta = 0 self.__value = StateEngineValue.SeValue(self._abitem, "value") self.__mindelta = StateEngineValue.SeValue(self._abitem, "mindelta") self.__function = "set" @@ -504,9 +505,11 @@ def real_execute(self, actionname: str, namevar: str = "", repeat_text: str = "" else: delta = float(abs(self.__item() - value)) additionaltext = "" + + self.__delta = delta if delta < mindelta: - text = "{0}: Not setting '{1}' to '{2}' because delta '{3:.2}' is lower than mindelta '{4}'" - self._log_debug(text, actionname, self.__item.property.path, value, delta, mindelta) + text = "{0}: Not setting '{1}' to '{2}' because delta {3}'{4:.2}' is lower than mindelta '{5}'" + self._log_debug(text, actionname, self.__item.property.path, value, additionaltext, delta, mindelta) return self._execute_set_add_remove(actionname, namevar, repeat_text, self.__item, value, current_condition, previous_condition, previousstate_condition) @@ -520,13 +523,21 @@ def _execute_set_add_remove(self, actionname, namevar, repeat_text, item, value, def get(self): try: - item = str(self.__item.property.path) + _item = str(self.__item.property.path) except Exception: - item = str(self.__item) - return {'function': str(self.__function), 'item': item, - 'value': str(self.__value.get()), 'conditionset': str(self.conditionset.get()), - 'previousconditionset': str(self.previousconditionset.get()), - 'previousstate_conditionset': str(self.previousstate_conditionset.get())} + _item = str(self.__item) + _mindelta = self.__mindelta.get() + if _mindelta is None: + return {'function': str(self.__function), 'item': _item, + 'value': str(self.__value.get()), 'conditionset': str(self.conditionset.get()), + 'previousconditionset': str(self.previousconditionset.get()), + 'previousstate_conditionset': str(self.previousstate_conditionset.get())} + else: + return {'function': str(self.__function), 'item': _item, + 'value': str(self.__value.get()), 'conditionset': str(self.conditionset.get()), + 'previousconditionset': str(self.previousconditionset.get()), + 'previousstate_conditionset': str(self.previousstate_conditionset.get()), + 'delta': str(self.__delta), 'mindelta': str(_mindelta)} # Class representing a single "se_setbyattr" action diff --git a/stateengine/StateEngineWebif.py b/stateengine/StateEngineWebif.py index 3f5afcd6e..75b6dcd06 100755 --- a/stateengine/StateEngineWebif.py +++ b/stateengine/StateEngineWebif.py @@ -66,6 +66,8 @@ def _actionlabel(self, state, label_type, conditionset, previousconditionset, pr for action in self.__states[state].get(label_type): _repeat = self.__states[state][label_type][action].get('repeat') _delay = self.__states[state][label_type][action].get('delay') or 0 + _delta = self.__states[state][label_type][action].get('delta') or 0 + _mindelta = self.__states[state][label_type][action].get('mindelta') or 0 condition_necessary = 0 condition_met = True condition_count = 0 @@ -112,8 +114,9 @@ def _actionlabel(self, state, label_type, conditionset, previousconditionset, pr condition_met = False cond1 = conditionset in ['', self.__active_conditionset] and state == self.__active_state cond2 = self.__states[state]['conditionsets'].get(conditionset) is not None - fontcolor = "white" if cond1 and cond2 and (not condition_met or (_repeat is False and originaltype == 'actions_stay'))\ - else "#5c5646" if _delay > 0 else "darkred" if _delay < 0 else "black" + cond_delta = float(_delta) < float(_mindelta) + fontcolor = "white" if cond1 and cond2 and (cond_delta or (not condition_met or (_repeat is False and originaltype == 'actions_stay')))\ + else "#5c5646" if _delay > 0 else "darkred" if _delay < 0 else "#303030" if not condition_met else "black" condition_info = condition_to_meet if condition1 is False\ else previouscondition_to_meet if condition2 is False\ else previousstate_condition_to_meet if condition3 is False\ @@ -121,12 +124,17 @@ def _actionlabel(self, state, label_type, conditionset, previousconditionset, pr additionaltext = " ({} not met)".format(condition_info) if not condition_met\ else " (no repeat)" if _repeat is False and originaltype == 'actions_stay'\ else " (delay: {})".format(_delay) if _delay > 0\ - else " (wrong delay!)" if _delay < 0 else "" + else " (wrong delay!)" if _delay < 0\ + else " (delta {} < {})".format(_delta, _mindelta) if cond_delta and cond1 and cond2\ + else "" action1 = self.__states[state][label_type][action].get('function') if action1 == 'set': action2 = self.__states[state][label_type][action].get('item') value_check = self.__states[state][label_type][action].get('value') value_check = '""' if value_check == "" else value_check + is_number = value_check.lstrip('-').replace('.','',1).isdigit() + if is_number and "." in value_check: + value_check = round(float(value_check), 2) action3 = 'to {}'.format(value_check) elif action1 == 'special': action2 = self.__states[state][label_type][action].get('special') From b1440090b0ec7043d43e98faab192a6f1fca7e9b Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 3 Aug 2023 14:12:10 +0200 Subject: [PATCH 219/775] stateengine plugin: remove unneccessary code in web interface index.html --- stateengine/webif/templates/index.html | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/stateengine/webif/templates/index.html b/stateengine/webif/templates/index.html index b6b70a42f..6c5fb3be1 100755 --- a/stateengine/webif/templates/index.html +++ b/stateengine/webif/templates/index.html @@ -35,19 +35,8 @@ @@ -277,7 +295,6 @@ {% block headtable %} - @@ -397,7 +414,7 @@ class="btn-sm btn-secondary" type="button" title="{{ 'TestButton' }}" - onclick="$('#button').val('{{ item.property.path }}');$('#button_pressed').submit();" + onclick="$('#button').val('{{ item }}');$('#button_pressed').submit();" > T From 94d592735ec4d7decc2eac1e44c85c2ceef5eaf8 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Mon, 7 Aug 2023 11:52:11 +0200 Subject: [PATCH 233/775] DB_ADDON: - add item_attributes_master.py --- db_addon/item_attributes_master.py | 299 +++++++++++++++++++++++++++++ 1 file changed, 299 insertions(+) create mode 100644 db_addon/item_attributes_master.py diff --git a/db_addon/item_attributes_master.py b/db_addon/item_attributes_master.py new file mode 100644 index 000000000..00b54a8cf --- /dev/null +++ b/db_addon/item_attributes_master.py @@ -0,0 +1,299 @@ +# !/usr/bin/env python +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# Copyright 2023 Michael Wenzel +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# AVM for SmartHomeNG. https://github.com/smarthomeNG// +# +# This plugin is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This plugin is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this plugin. If not, see . +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + +import ruamel.yaml + +FILENAME_ATTRIBUTES = 'item_attributes.py' + +FILENAME_PLUGIN = 'plugin.yaml' + +ITEM_ATTRIBUTES = { + 'db_addon_fct': { + 'verbrauch_heute': {'cat': 'verbrauch', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch am heutigen Tag (Differenz zwischen aktuellem Wert und den Wert am Ende des vorherigen Tages)'}, + 'verbrauch_woche': {'cat': 'verbrauch', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch in der aktuellen Woche'}, + 'verbrauch_monat': {'cat': 'verbrauch', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch im aktuellen Monat'}, + 'verbrauch_jahr': {'cat': 'verbrauch', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch im aktuellen Jahr'}, + 'verbrauch_heute_minus1': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch gestern (heute -1 Tag) (Differenz zwischen Wert am Ende des gestrigen Tages und dem Wert am Ende des Tages davor)'}, + 'verbrauch_heute_minus2': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch vorgestern (heute -2 Tage)'}, + 'verbrauch_heute_minus3': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -3 Tage'}, + 'verbrauch_heute_minus4': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -4 Tage'}, + 'verbrauch_heute_minus5': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -5 Tage'}, + 'verbrauch_heute_minus6': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -6 Tage'}, + 'verbrauch_heute_minus7': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -7 Tage'}, + 'verbrauch_woche_minus1': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch Vorwoche (aktuelle Woche -1)'}, + 'verbrauch_woche_minus2': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch aktuelle Woche -2 Wochen'}, + 'verbrauch_woche_minus3': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch aktuelle Woche -3 Wochen'}, + 'verbrauch_woche_minus4': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch aktuelle Woche -4 Wochen'}, + 'verbrauch_monat_minus1': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch Vormonat (aktueller Monat -1)'}, + 'verbrauch_monat_minus2': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -2 Monate'}, + 'verbrauch_monat_minus3': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -3 Monate'}, + 'verbrauch_monat_minus4': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -4 Monate'}, + 'verbrauch_monat_minus12': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -12 Monate'}, + 'verbrauch_jahr_minus1': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Verbrauch Vorjahr (aktuelles Jahr -1 Jahr)'}, + 'verbrauch_jahr_minus2': {'cat': 'verbrauch', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Verbrauch aktuelles Jahr -2 Jahre'}, + 'verbrauch_rolling_12m_heute_minus1': {'cat': 'verbrauch', 'sub_cat': 'rolling', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Tages'}, + 'verbrauch_rolling_12m_woche_minus1': {'cat': 'verbrauch', 'sub_cat': 'rolling', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende der letzten Woche'}, + 'verbrauch_rolling_12m_monat_minus1': {'cat': 'verbrauch', 'sub_cat': 'rolling', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Monats'}, + 'verbrauch_rolling_12m_jahr_minus1': {'cat': 'verbrauch', 'sub_cat': 'rolling', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Jahres'}, + 'verbrauch_jahreszeitraum_minus1': {'cat': 'verbrauch', 'sub_cat': 'jahrzeit', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch seit dem 1.1. bis zum heutigen Tag des Vorjahres'}, + 'verbrauch_jahreszeitraum_minus2': {'cat': 'verbrauch', 'sub_cat': 'jahrzeit', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 2 Jahren'}, + 'verbrauch_jahreszeitraum_minus3': {'cat': 'verbrauch', 'sub_cat': 'jahrzeit', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 3 Jahren'}, + 'zaehlerstand_heute_minus1': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Tages (heute -1 Tag)'}, + 'zaehlerstand_heute_minus2': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Tages (heute -2 Tag)'}, + 'zaehlerstand_heute_minus3': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorvorletzten Tages (heute -3 Tag)'}, + 'zaehlerstand_woche_minus1': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand / Wert am Ende der vorvorletzten Woche (aktuelle Woche -1 Woche)'}, + 'zaehlerstand_woche_minus2': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand / Wert am Ende der vorletzten Woche (aktuelle Woche -2 Wochen)'}, + 'zaehlerstand_woche_minus3': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand / Wert am Ende der aktuellen Woche -3 Wochen'}, + 'zaehlerstand_monat_minus1': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Monates (aktueller Monat -1 Monat)'}, + 'zaehlerstand_monat_minus2': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Monates (aktueller Monat -2 Monate)'}, + 'zaehlerstand_monat_minus3': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand / Wert am Ende des aktuellen Monats -3 Monate'}, + 'zaehlerstand_jahr_minus1': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Jahres (aktuelles Jahr -1 Jahr)'}, + 'zaehlerstand_jahr_minus2': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Jahres (aktuelles Jahr -2 Jahre)'}, + 'zaehlerstand_jahr_minus3': {'cat': 'zaehler', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Zählerstand / Wert am Ende des aktuellen Jahres -3 Jahre'}, + 'minmax_last_24h_min': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'minimaler Wert der letzten 24h'}, + 'minmax_last_24h_max': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'maximaler Wert der letzten 24h'}, + 'minmax_last_24h_avg': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'durchschnittlicher Wert der letzten 24h'}, + 'minmax_last_7d_min': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'minimaler Wert der letzten 7 Tage'}, + 'minmax_last_7d_max': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'maximaler Wert der letzten 7 Tage'}, + 'minmax_last_7d_avg': {'cat': 'wertehistorie', 'sub_cat': 'last', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'durchschnittlicher Wert der letzten 7 Tage'}, + 'minmax_heute_min': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Minimalwert seit Tagesbeginn'}, + 'minmax_heute_max': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Maximalwert seit Tagesbeginn'}, + 'minmax_heute_avg': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Durschnittswert seit Tagesbeginn'}, + 'minmax_heute_minus1_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert gestern (heute -1 Tag)'}, + 'minmax_heute_minus1_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert gestern (heute -1 Tag)'}, + 'minmax_heute_minus1_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durchschnittswert gestern (heute -1 Tag)'}, + 'minmax_heute_minus2_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert vorgestern (heute -2 Tage)'}, + 'minmax_heute_minus2_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert vorgestern (heute -2 Tage)'}, + 'minmax_heute_minus2_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durchschnittswert vorgestern (heute -2 Tage)'}, + 'minmax_heute_minus3_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert heute vor 3 Tagen'}, + 'minmax_heute_minus3_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert heute vor 3 Tagen'}, + 'minmax_heute_minus3_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durchschnittswert heute vor 3 Tagen'}, + 'minmax_woche_min': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Minimalwert seit Wochenbeginn'}, + 'minmax_woche_max': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Maximalwert seit Wochenbeginn'}, + 'minmax_woche_minus1_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Minimalwert Vorwoche (aktuelle Woche -1)'}, + 'minmax_woche_minus1_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Maximalwert Vorwoche (aktuelle Woche -1)'}, + 'minmax_woche_minus1_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Durchschnittswert Vorwoche (aktuelle Woche -1)'}, + 'minmax_woche_minus2_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Minimalwert aktuelle Woche -2 Wochen'}, + 'minmax_woche_minus2_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Maximalwert aktuelle Woche -2 Wochen'}, + 'minmax_woche_minus2_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Durchschnittswert aktuelle Woche -2 Wochen'}, + 'minmax_monat_min': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Minimalwert seit Monatsbeginn'}, + 'minmax_monat_max': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Maximalwert seit Monatsbeginn'}, + 'minmax_monat_minus1_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Minimalwert Vormonat (aktueller Monat -1)'}, + 'minmax_monat_minus1_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Maximalwert Vormonat (aktueller Monat -1)'}, + 'minmax_monat_minus1_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Durchschnittswert Vormonat (aktueller Monat -1)'}, + 'minmax_monat_minus2_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Minimalwert aktueller Monat -2 Monate'}, + 'minmax_monat_minus2_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Maximalwert aktueller Monat -2 Monate'}, + 'minmax_monat_minus2_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Durchschnittswert aktueller Monat -2 Monate'}, + 'minmax_jahr_min': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Minimalwert seit Jahresbeginn'}, + 'minmax_jahr_max': {'cat': 'wertehistorie', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Maximalwert seit Jahresbeginn'}, + 'minmax_jahr_minus1_min': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Minimalwert Vorjahr (aktuelles Jahr -1 Jahr)'}, + 'minmax_jahr_minus1_max': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Maximalwert Vorjahr (aktuelles Jahr -1 Jahr)'}, + 'minmax_jahr_minus1_avg': {'cat': 'wertehistorie', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Durchschnittswert Vorjahr (aktuelles Jahr -1 Jahr)'}, + 'tagesmitteltemperatur_heute': {'cat': 'tagesmittel', 'sub_cat': 'onchange', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Tagesmitteltemperatur heute'}, + 'tagesmitteltemperatur_heute_minus1': {'cat': 'tagesmittel', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des letzten Tages (heute -1 Tag)'}, + 'tagesmitteltemperatur_heute_minus2': {'cat': 'tagesmittel', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des vorletzten Tages (heute -2 Tag)'}, + 'tagesmitteltemperatur_heute_minus3': {'cat': 'tagesmittel', 'sub_cat': 'timeframe', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des vorvorletzten Tages (heute -3 Tag)'}, + 'serie_minmax_monat_min_15m': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatlicher Minimalwert der letzten 15 Monate (gleitend)'}, + 'serie_minmax_monat_max_15m': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatlicher Maximalwert der letzten 15 Monate (gleitend)'}, + 'serie_minmax_monat_avg_15m': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatlicher Mittelwert der letzten 15 Monate (gleitend)'}, + 'serie_minmax_woche_min_30w': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'wöchentlicher Minimalwert der letzten 30 Wochen (gleitend)'}, + 'serie_minmax_woche_max_30w': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'wöchentlicher Maximalwert der letzten 30 Wochen (gleitend)'}, + 'serie_minmax_woche_avg_30w': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'wöchentlicher Mittelwert der letzten 30 Wochen (gleitend)'}, + 'serie_minmax_tag_min_30d': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'täglicher Minimalwert der letzten 30 Tage (gleitend)'}, + 'serie_minmax_tag_max_30d': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'täglicher Maximalwert der letzten 30 Tage (gleitend)'}, + 'serie_minmax_tag_avg_30d': {'cat': 'serie', 'sub_cat': 'minmax', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'täglicher Mittelwert der letzten 30 Tage (gleitend)'}, + 'serie_verbrauch_tag_30d': {'cat': 'serie', 'sub_cat': 'verbrauch', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Verbrauch pro Tag der letzten 30 Tage'}, + 'serie_verbrauch_woche_30w': {'cat': 'serie', 'sub_cat': 'verbrauch', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch pro Woche der letzten 30 Wochen'}, + 'serie_verbrauch_monat_18m': {'cat': 'serie', 'sub_cat': 'verbrauch', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch pro Monat der letzten 18 Monate'}, + 'serie_zaehlerstand_tag_30d': {'cat': 'serie', 'sub_cat': 'zaehler', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Zählerstand am Tagesende der letzten 30 Tage'}, + 'serie_zaehlerstand_woche_30w': {'cat': 'serie', 'sub_cat': 'zaehler', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand am Wochenende der letzten 30 Wochen'}, + 'serie_zaehlerstand_monat_18m': {'cat': 'serie', 'sub_cat': 'zaehler', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand am Monatsende der letzten 18 Monate'}, + 'serie_waermesumme_monat_24m': {'cat': 'serie', 'sub_cat': 'summe', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatliche Wärmesumme der letzten 24 Monate'}, + 'serie_kaeltesumme_monat_24m': {'cat': 'serie', 'sub_cat': 'summe', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatliche Kältesumme der letzten 24 Monate'}, + 'serie_tagesmittelwert_0d': {'cat': 'serie', 'sub_cat': 'mittel_d', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Tagesmittelwert für den aktuellen Tag'}, + 'serie_tagesmittelwert_stunde_0d': {'cat': 'serie', 'sub_cat': 'mittel_h', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Stundenmittelwert für den aktuellen Tag'}, + 'serie_tagesmittelwert_stunde_30_0d': {'cat': 'serie', 'sub_cat': 'mittel_h1', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Stundenmittelwert für den aktuellen Tag'}, + 'serie_tagesmittelwert_tag_stunde_30d': {'cat': 'serie', 'sub_cat': 'mittel_d_h', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Stundenmittelwert pro Tag der letzten 30 Tage (bspw. zur Berechnung der Tagesmitteltemperatur basierend auf den Mittelwert der Temperatur pro Stunde'}, + 'general_oldest_value': {'cat': 'gen', 'sub_cat': None, 'item_type': 'num', 'calc': 'no', 'params': False, 'description': 'Ausgabe des ältesten Wertes des entsprechenden "Parent-Items" mit database Attribut'}, + 'general_oldest_log': {'cat': 'gen', 'sub_cat': None, 'item_type': 'list', 'calc': 'no', 'params': False, 'description': 'Ausgabe des Timestamp des ältesten Eintrages des entsprechenden "Parent-Items" mit database Attribut'}, + 'kaeltesumme': {'cat': 'complex', 'sub_cat': 'summe', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Kältesumme für einen Zeitraum, db_addon_params: (year=mandatory, month=optional)'}, + 'waermesumme': {'cat': 'complex', 'sub_cat': 'summe', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Wärmesumme für einen Zeitraum, db_addon_params: (year=mandatory, month=optional)'}, + 'gruenlandtempsumme': {'cat': 'complex', 'sub_cat': None, 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Grünlandtemperatursumme für einen Zeitraum, db_addon_params: (year=mandatory)'}, + 'tagesmitteltemperatur': {'cat': 'complex', 'sub_cat': None, 'item_type': 'list', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Tagesmitteltemperatur auf Basis der stündlichen Durchschnittswerte eines Tages für die angegebene Anzahl von Tagen (timeframe=day, count=integer)'}, + 'wachstumsgradtage': {'cat': 'complex', 'sub_cat': None, 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Wachstumsgradtage auf Basis der stündlichen Durchschnittswerte eines Tages für das laufende Jahr mit an Angabe des Temperaturschwellenwertes (threshold=Schwellentemperatur)'}, + 'db_request': {'cat': 'complex', 'sub_cat': None, 'item_type': 'list', 'calc': 'group', 'params': True, 'description': 'Abfrage der DB: db_addon_params: (func=mandatory, item=mandatory, timespan=mandatory, start=optional, end=optional, count=optional, group=optional, group2=optional)'}, + 'minmax': {'cat': 'complex', 'sub_cat': None, 'item_type': 'num', 'calc': 'timeframe', 'params': True, 'description': 'Berechnet einen min/max/avg Wert für einen bestimmen Zeitraum: db_addon_params: (func=mandatory, timeframe=mandatory, start=mandatory)'}, + 'minmax_last': {'cat': 'complex', 'sub_cat': None, 'item_type': 'num', 'calc': 'timeframe', 'params': True, 'description': 'Berechnet einen min/max/avg Wert für ein bestimmtes Zeitfenster von jetzt zurück: db_addon_params: (func=mandatory, timeframe=mandatory, start=mandatory, end=mandatory)'}, + 'verbrauch': {'cat': 'complex', 'sub_cat': None, 'item_type': 'num', 'calc': 'timeframe', 'params': True, 'description': 'Berechnet einen Verbrauchswert für einen bestimmen Zeitraum: db_addon_params: (timeframe=mandatory, start=mandatory end=mandatory)'}, + 'zaehlerstand': {'cat': 'complex', 'sub_cat': None, 'item_type': 'num', 'calc': 'timeframe', 'params': True, 'description': 'Berechnet einen Zählerstand für einen bestimmen Zeitpunkt: db_addon_params: (timeframe=mandatory, start=mandatory)'}, + }, + 'db_addon_info': { + 'db_version': {'cat': 'info', 'item_type': 'str', 'calc': 'no', 'params': False, 'description': 'Version der verbundenen Datenbank'}, + }, + 'db_addon_admin': { + 'suspend': {'cat': 'admin', 'item_type': 'bool', 'calc': 'no', 'params': False, 'description': 'Unterbricht die Aktivitäten des Plugin'}, + 'recalc_all': {'cat': 'admin', 'item_type': 'bool', 'calc': 'no', 'params': False, 'description': 'Startet einen Neuberechnungslauf aller on-demand Items'}, + 'clean_cache_values': {'cat': 'admin', 'item_type': 'bool', 'calc': 'no', 'params': False, 'description': 'Löscht Plugin-Cache und damit alle im Plugin zwischengespeicherten Werte'}, + }, +} + +FILE_HEADER = """\ +# !/usr/bin/env python +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# Copyright 2023 Michael Wenzel +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# DatabaseAddOn for SmartHomeNG. https://github.com/smarthomeNG// +# +# This plugin is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This plugin is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this plugin. If not, see . +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + + +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# +# +# THIS FILE IS AUTOMATICALLY CREATED BY USING item_attributes_master.py +# +# +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + +""" + +def get_attrs(sub_dict: dict = {}) -> list: + attributes = [] + for entry in ITEM_ATTRIBUTES: + for db_addon_fct in ITEM_ATTRIBUTES[entry]: + if sub_dict.items() <= ITEM_ATTRIBUTES[entry][db_addon_fct].items(): + attributes.append(db_addon_fct) + return attributes + +def export_item_attributes_py(): + ATTRS = dict() + ATTRS['ALL_ONCHANGE_ATTRIBUTES'] = get_attrs(sub_dict={'calc': 'onchange'}) + ATTRS['ALL_DAILY_ATTRIBUTES'] = get_attrs(sub_dict={'calc': 'daily'}) + ATTRS['ALL_WEEKLY_ATTRIBUTES'] = get_attrs(sub_dict={'calc': 'weekly'}) + ATTRS['ALL_MONTHLY_ATTRIBUTES'] = get_attrs(sub_dict={'calc': 'monthly'}) + ATTRS['ALL_YEARLY_ATTRIBUTES'] = get_attrs(sub_dict={'calc': 'yearly'}) + ATTRS['ALL_NEED_PARAMS_ATTRIBUTES'] = get_attrs(sub_dict={'params': True}) + ATTRS['ALL_VERBRAUCH_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'verbrauch'}) + ATTRS['VERBRAUCH_ATTRIBUTES_ONCHANGE'] = get_attrs(sub_dict={'cat': 'verbrauch', 'sub_cat': 'onchange'}) + ATTRS['VERBRAUCH_ATTRIBUTES_TIMEFRAME'] = get_attrs(sub_dict={'cat': 'verbrauch', 'sub_cat': 'timeframe'}) + ATTRS['VERBRAUCH_ATTRIBUTES_ROLLING'] = get_attrs(sub_dict={'cat': 'verbrauch', 'sub_cat': 'rolling'}) + ATTRS['VERBRAUCH_ATTRIBUTES_JAHRESZEITRAUM'] = get_attrs(sub_dict={'cat': 'verbrauch', 'sub_cat': 'jahrzeit'}) + ATTRS['ALL_ZAEHLERSTAND_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'zaehler'}) + ATTRS['ZAEHLERSTAND_ATTRIBUTES_TIMEFRAME'] = get_attrs(sub_dict={'cat': 'zaehler', 'sub_cat': 'timeframe'}) + ATTRS['ALL_HISTORIE_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'wertehistorie'}) + ATTRS['HISTORIE_ATTRIBUTES_ONCHANGE'] = get_attrs(sub_dict={'cat': 'wertehistorie', 'sub_cat': 'onchange'}) + ATTRS['HISTORIE_ATTRIBUTES_LAST'] = get_attrs(sub_dict={'cat': 'wertehistorie', 'sub_cat': 'last'}) + ATTRS['HISTORIE_ATTRIBUTES_TIMEFRAME'] = get_attrs(sub_dict={'cat': 'wertehistorie', 'sub_cat': 'timeframe'}) + ATTRS['ALL_TAGESMITTEL_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'tagesmittel'}) + ATTRS['TAGESMITTEL_ATTRIBUTES_ONCHANGE'] = get_attrs(sub_dict={'cat': 'tagesmittel', 'sub_cat': 'onchange'}) + ATTRS['TAGESMITTEL_ATTRIBUTES_TIMEFRAME'] = get_attrs(sub_dict={'cat': 'tagesmittel', 'sub_cat': 'timeframe'}) + ATTRS['ALL_SERIE_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'serie'}) + ATTRS['SERIE_ATTRIBUTES_MINMAX'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'minmax'}) + ATTRS['SERIE_ATTRIBUTES_ZAEHLERSTAND'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'zaehler'}) + ATTRS['SERIE_ATTRIBUTES_VERBRAUCH'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'verbrauch'}) + ATTRS['SERIE_ATTRIBUTES_SUMME'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'summe'}) + ATTRS['SERIE_ATTRIBUTES_MITTEL_D'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'mittel_d'}) + ATTRS['SERIE_ATTRIBUTES_MITTEL_H'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'mittel_h'}) + ATTRS['SERIE_ATTRIBUTES_MITTEL_H1'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'mittel_h1'}) + ATTRS['SERIE_ATTRIBUTES_MITTEL_D_H'] = get_attrs(sub_dict={'cat': 'serie', 'sub_cat': 'mittel_d_h'}) + ATTRS['ALL_GEN_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'gen'}) + ATTRS['ALL_COMPLEX_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'complex'}) + + # create file and write header + f = open(FILENAME_ATTRIBUTES, "w") + f.write(FILE_HEADER) + f.close() + + # write avm_data_types + for attr, alist in ATTRS.items(): + with open(FILENAME_ATTRIBUTES, "a") as f: + print(f'{attr} = {alist!r}', file=f) + + print('item_attributes.py successfully created!') + +def create_plugin_yaml_item_attribute_valids(): + """Create valid_list of db_addon_fct based on master dict""" + + valid_list_str = """ # NOTE: valid_list is automatically created by using item_attributes_master.py""" + valid_list_desc_str = """ # NOTE: valid_list_description is automatically created by using item_attributes_master.py""" + valid_list_item_type = """ # NOTE: valid_list_item_type is automatically created by using item_attributes_master.py""" + valid_list_calculation = """ # NOTE: valid_list_calculation is automatically created by using item_attributes_master.py""" + + for db_addon_fct in ITEM_ATTRIBUTES[attribute]: + valid_list_str = f"""{valid_list_str}\n\ + - {db_addon_fct!r:<40}""" + + valid_list_desc_str = f"""{valid_list_desc_str}\n\ + - '{ITEM_ATTRIBUTES[attribute][db_addon_fct]['description']:<}'""" + + valid_list_item_type = f"""{valid_list_item_type}\n\ + - '{ITEM_ATTRIBUTES[attribute][db_addon_fct]['item_type']:<}'""" + + valid_list_calculation = f"""{valid_list_calculation}\n\ + - '{ITEM_ATTRIBUTES[attribute][db_addon_fct]['calc']:<}'""" + + valid_list_calculation = f"""{valid_list_calculation}\n\r""" + + return valid_list_str, valid_list_desc_str, valid_list_item_type, valid_list_calculation + +def update_plugin_yaml_item_attributes(): + """Update 'valid_list', 'valid_list_description', 'valid_list_item_type' and 'valid_list_calculation' of item attributes in plugin.yaml""" + + yaml = ruamel.yaml.YAML() + yaml.indent(mapping=4, sequence=4, offset=4) + yaml.width = 200 + yaml.allow_unicode = True + yaml.preserve_quotes = False + + valid_list_str, valid_list_desc_str, valid_list_item_type_str, valid_list_calc_str = create_plugin_yaml_item_attribute_valids() + + with open(FILENAME_PLUGIN, 'r', encoding="utf-8") as f: + data = yaml.load(f) + + if data.get('item_attributes', {}).get(attribute): + data['item_attributes'][attribute]['valid_list'] = yaml.load(valid_list_str) + data['item_attributes'][attribute]['valid_list_description'] = yaml.load(valid_list_desc_str) + data['item_attributes'][attribute]['valid_list_item_type'] = yaml.load(valid_list_item_type_str) + data['item_attributes'][attribute]['valid_list_calculation'] = yaml.load(valid_list_calc_str) + + with open(FILENAME_PLUGIN, 'w', encoding="utf-8") as f: + yaml.dump(data, f) + print(f"Successfully updated Attribute '{attribute}' in plugin.yaml!") + else: + print(f"Attribute '{attribute}' not defined in plugin.yaml") + +if __name__ == '__main__': + export_item_attributes_py() + for attribute in ITEM_ATTRIBUTES: + update_plugin_yaml_item_attributes() From 1aa9afb0c02b0de75e90822ea2c905bf953fa82e Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Mon, 7 Aug 2023 12:39:38 +0200 Subject: [PATCH 234/775] AVM: - FritzHome: Bugfix renewing SID in case SID has expired - CallMonitor: bugfix join listing thread - CallMonitor: reconnect if connection is lost - bump to 2.0.9 --- avm/__init__.py | 92 ++++++++++++++++++++++++++----------------------- avm/plugin.yaml | 2 +- 2 files changed, 50 insertions(+), 44 deletions(-) diff --git a/avm/__init__.py b/avm/__init__.py index abbbc6bd2..c1027b9ba 100644 --- a/avm/__init__.py +++ b/avm/__init__.py @@ -113,7 +113,7 @@ class AVM(SmartPlugin): """ Main class of the Plugin. Does all plugin specific stuff """ - PLUGIN_VERSION = '2.0.8' + PLUGIN_VERSION = '2.0.9' # ToDo: FritzHome.handle_updated_item: implement 'saturation' # ToDo: FritzHome.handle_updated_item: implement 'unmapped_hue' @@ -2132,37 +2132,38 @@ def get_sid(): for sid in get_sid(): params['sid'] = sid - try: - response = self._session.get(url=url, params=params, verify=self.verify) - except requests.exceptions.Timeout: - if self._timeout < 31: - self._timeout += 5 - msg = f"HTTP request timed out. Timeout extended by 5s to {self._timeout}" - else: - msg = "HTTP request timed out." - self.logger.info(msg) - raise FritzHttpTimeoutError(msg) - - if response.status_code == 200: - content_type = response.headers.get('content-type') - if 'json' in content_type: - return content_type, response.json() - return content_type, response.text - - elif response.status_code == 403: - msg = f"{response.status_code!r} Forbidden: 'Session-ID ungültig oder Benutzer nicht autorisiert'" - self.logger.info(msg) - raise FritzHttpInterfaceError(msg) - - elif response.status_code == 400: - msg = f"{response.status_code!r} HTTP Request fehlerhaft, Parameter sind ungültig, nicht vorhanden oder Wertebereich überschritten" - self.logger.info(f"Error {msg}, params: {params}") - raise FritzHttpRequestError(msg) + with self._session as session: + try: + response = session.get(url=url, params=params, verify=self.verify) + except requests.exceptions.Timeout: + if self._timeout < 31: + self._timeout += 5 + msg = f"HTTP request timed out. Timeout extended by 5s to {self._timeout}" + else: + msg = "HTTP request timed out." + self.logger.info(msg) + raise FritzHttpTimeoutError(msg) + + if response.status_code == 200: + content_type = response.headers.get('content-type') + if 'json' in content_type: + return content_type, response.json() + return content_type, response.text + + if response.status_code == 403: + msg = f"{response.status_code!r} Forbidden: 'Session-ID ungültig oder Benutzer nicht autorisiert'" + self.logger.info(msg) + raise FritzHttpInterfaceError(msg) + + elif response.status_code == 400: + msg = f"{response.status_code!r} HTTP Request fehlerhaft, Parameter sind ungültig, nicht vorhanden oder Wertebereich überschritten" + self.logger.info(f"Error {msg}, params: {params}") + raise FritzHttpRequestError(msg) - else: - msg = f"Error {response.status_code!r} Internal Server Error: 'Interner Fehler'" - self.logger.info(f"{msg}, params: {params}") - raise FritzAuthorizationError(msg) + else: + msg = f"Error {response.status_code!r} Internal Server Error: 'Interner Fehler'" + self.logger.info(f"{msg}, params: {params}") + raise FritzAuthorizationError(msg) def aha_request(self, cmd: str, ain: str = None, param: dict = None, result_type: str = None): """Send an AHA request. @@ -3892,15 +3893,19 @@ def disconnect(self): self._stop_counter('incoming') self._stop_counter('outgoing') - try: - self._listen_thread.join(1) - except Exception: - pass + if self._listen_thread: + try: + self._listen_thread.join(1) + except Exception as e: # AttributeError + self.logger.debug(f"Error {e!r} occurred during disconnecting of Callmonitor.") + pass - try: - self.conn.shutdown(2) - except Exception: - pass + if self.conn: + try: + self.conn.shutdown(2) + except Exception as e: + self.logger.debug(f"Error {e!r} occurred during shutdown of Callmonitor.") + pass def reconnect(self): """ @@ -4071,12 +4076,13 @@ def _listen(self, recv_buffer: int = 4096): buffer = "" while self._listen_active: data = self.conn.recv(recv_buffer) - if data == "": - self.logger.error("CallMonitor connection not open anymore.") + if data.decode('utf-8') == "" and self._listen_active: + self.logger.warning("CallMonitor connection not open anymore. Try to reconnect") + self.reconnect() else: if self.debug_log: - self.logger.debug(f"Data Received from CallMonitor: {data.decode('utf-8').strip()}") - buffer += data.decode("utf-8") + self.logger.debug(f"Data Received from CallMonitor: '{data.decode('utf-8')}'") + buffer += data.decode('utf-8') while buffer.find("\n") != -1: line, buffer = buffer.split("\n", 1) if line: diff --git a/avm/plugin.yaml b/avm/plugin.yaml index 03fcf6d65..d3f08f689 100644 --- a/avm/plugin.yaml +++ b/avm/plugin.yaml @@ -12,7 +12,7 @@ plugin: documentation: http://smarthomeng.de/user/plugins/avm/user_doc.html support: https://knx-user-forum.de/forum/supportforen/smarthome-py/934835-avm-plugin - version: 2.0.8 # Plugin version (must match the version specified in __init__.py) + version: 2.0.9 # Plugin version (must match the version specified in __init__.py) sh_minversion: 1.8 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) # py_minversion: 3.6 # minimum Python version to use for this plugin From 92a05ee3d7e6ab3c77ea8149f705fdeb310a4df9 Mon Sep 17 00:00:00 2001 From: Morg42 <43153739+Morg42@users.noreply.github.com> Date: Tue, 8 Aug 2023 15:32:04 +0200 Subject: [PATCH 235/775] updated viessmann to latest sdp --- viessmann/__init__.py | 2 +- viessmann/commands.py | 2 +- viessmann/datatypes.py | 6 +- viessmann/plugin.yaml | 3018 ++++++++++++++++++++-------------------- viessmann/protocol.py | 45 +- 5 files changed, 1551 insertions(+), 1522 deletions(-) diff --git a/viessmann/__init__.py b/viessmann/__init__.py index 1120b51c6..ff1792a04 100644 --- a/viessmann/__init__.py +++ b/viessmann/__init__.py @@ -49,7 +49,7 @@ class SmartPluginWebIf(): from .webif import WebInterface -class sdp_viessmann(SmartDevicePlugin): +class viessmann(SmartDevicePlugin): """ Device class for Viessmann heating systems. Standalone mode is automatic device type discovery diff --git a/viessmann/commands.py b/viessmann/commands.py index e71a33e89..237acfe99 100644 --- a/viessmann/commands.py +++ b/viessmann/commands.py @@ -13,7 +13,7 @@ commands = { 'ALL': { - 'Anlagentyp': {'read': True, 'write': False, 'opcode': '00f8', 'reply_pattern': '*', 'item_type': 'num', 'dev_datatype': 'V', 'params': {'value': 'VAL', 'len': 2}, 'lookup': 'devicetypes', 'item_attrs': {'no_read_groups': True, 'attributes': {'md_read_initial': True}}}, # getAnlTyp -- Information - Allgemein: Anlagentyp (204D) + 'Anlagentyp': {'read': True, 'write': False, 'opcode': '00f8', 'reply_pattern': '*', 'item_type': 'str', 'dev_datatype': 'H', 'params': {'value': 'VAL', 'len': 2}, 'lookup': 'devicetypes', 'item_attrs': {'no_read_groups': True, 'attributes': {'md_read_initial': True}}}, # getAnlTyp -- Information - Allgemein: Anlagentyp (204D) }, 'V200KO1B': { 'Allgemein': { diff --git a/viessmann/datatypes.py b/viessmann/datatypes.py index 18505599a..f3b827cf6 100644 --- a/viessmann/datatypes.py +++ b/viessmann/datatypes.py @@ -106,7 +106,11 @@ def get_send_data(self, data, **kwargs): return super().get_send_data(data, **kwargs) def get_shng_data(self, data, type=None, **kwargs): - return data.hex() + try: + return data.hex().upper() + except AttributeError: + return + # return ' '.join([hexstr[i:i + 2] for i in range(0, len(hexstr), 2)]) diff --git a/viessmann/plugin.yaml b/viessmann/plugin.yaml index bcc2e307c..5458ce165 100644 --- a/viessmann/plugin.yaml +++ b/viessmann/plugin.yaml @@ -15,7 +15,7 @@ plugin: py_minversion: 3.6 multi_instance: false restartable: true - classname: sdp_viessmann + classname: viessmann parameters: @@ -37,6 +37,13 @@ parameters: - KW description: Kommunikationsprotokoll der Heizung + suspend_item: + type: str + default: '' + description: + de: 'Item-Pfad für das Suspend-Item' + en: 'item path for suspend switch item' + command_class: type: str default: SDPCommandViessmann @@ -47,58 +54,59 @@ parameters: default: true description: Entferne Struct-Elemente, die vom gewählten Modell nicht unterstützt werden + item_attributes: - md_command: + viess_command: type: str description: de: Legt das angegebene Kommando für das Item fest en: Assigns the given command to the item - md_read: + viess_read: type: bool description: de: Liest/erhält Werte vom Gerät en: Reads/receives data from the device - md_read_group: + viess_read_group: type: list(str) description: de: Weist das Item der angegebenen Gruppe zum gesammelten Lesen zu. Gruppe kann int oder str sein, mehrere Gruppen können als Liste angegeben werden. en: Assigns the item to the given group for collective reading. Groups can be int or str, multiple groups can be provided as a list. - md_read_cycle: + viess_read_cycle: type: num description: de: Konfiguriert ein Intervall in Sekunden für regelmäßiges Lesen en: Configures a interval in seconds for cyclic read actions - md_read_initial: + viess_read_initial: type: bool description: de: Legt fest, dass der Wert beim Start vom Gerät gelesen wird en: Sets item value to be read from the device on startup - md_write: + viess_write: type: bool description: de: Änderung des Items werden an das Gerät gesendet en: Changes to this item will be sent to the device - md_read_group_trigger: + viess_read_group_trigger: type: str description: de: Wenn diesem Item ein beliebiger Wert zugewiesen wird, werden alle zum Lesen konfigurierten Items der angegebenen Gruppe neu vom Gerät gelesen, bei Gruppe 0 werden alle zum Lesen konfigurierten Items neu gelesen. Das Item kann nicht gleichzeitig mit md_command belegt werden. en: When set to any value, all items configured for reading for the given group will update their value from the device, if group is 0, all items configured for reading will update. The item cannot be used with md_command in parallel. - md_lookup: + viess_lookup: type: str description: @@ -109,7 +117,7 @@ item_attributes: de: "Der Inhalt der Lookup-Tabelle mit dem angegebenen Namen wird beim\nStart einmalig als dict oder list in das Item geschrieben.\n\n\nDurch Anhängen von \"#\" an den Namen der Tabelle kann die Art\nder Tabelle ausgewählt werden:\n- fwd liefert die Tabelle Gerät -> SmartHomeNG (Standard)\n- rev liefert die Tabelle SmartHomeNG -> Gerät\n- rci liefert die Tabelle SmarthomeNG -> Gerät in Kleinbuchstaben\n- list liefert die Liste der Namen für SmartHomeNG" en: "The lookup table with the given name will be assigned to the item\nin dict or list format once on startup.\n\n\nBy appending \"#\" to the tables name the type of table can\nbe selected:\n- fwd returns the table device -> SmartHomeNG (default)\n- rev returns the table SmartHomeNG -> device\n- rci returns the table SmartHomeNG -> device in lower case\n- list return the list of names for SmartHomeNG" - md_custom1: + viess_custom1: type: str description: @@ -120,7 +128,7 @@ item_attributes: de: 'Der Inhalt dieses Items kann vom jeweiligen Gerät für spezielle Zwecke genutzt werden. Durch den Parameter "recursive_custom: 1" in der Geräte-Konfiguration wird der Wert rekursiv für alle Unteritems gesetzt.' en: 'For custom use of each device respectively. By setting "recursive_custom: 1" in the device configuration, the value of this attribute will be set for all sub-items.' - md_custom2: + viess_custom2: type: str description: @@ -131,7 +139,7 @@ item_attributes: de: 'Der Inhalt dieses Items kann vom jeweiligen Gerät für spezielle Zwecke genutzt werden. Durch den Parameter "recursive_custom: 2" in der Geräte-Konfiguration wird der Wert rekursiv für alle Unteritems gesetzt.' en: 'For custom use of each device respectively. By setting "recursive_custom: 2" in the device configuration, the value of this attribute will be set for all sub-items.' - md_custom3: + viess_custom3: type: str description: @@ -147,232 +155,232 @@ item_structs: ALL: Anlagentyp: - type: num - md_command: Anlagentyp - md_read: true - md_write: false - md_read_group: + type: str + viess_command: Anlagentyp + viess_read: true + viess_write: false + viess_read_group: - ALL - md_read_initial: true + viess_read_initial: true V200KO1B: Anlagentyp: - type: num - md_command: Anlagentyp - md_read: true - md_write: false - md_read_group: + type: str + viess_command: Anlagentyp + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - md_read_initial: true + viess_read_initial: true Allgemein: read: type: bool enforce_updates: true - md_read_group_trigger: Allgemein + viess_read_group_trigger: Allgemein Temperatur: read: type: bool enforce_updates: true - md_read_group_trigger: Allgemein.Temperatur + viess_read_group_trigger: Allgemein.Temperatur Aussen: type: num - md_command: Allgemein.Temperatur.Aussen - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Temperatur.Aussen + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Allgemein - Allgemein.Temperatur Aussen_TP: type: num - md_command: Allgemein.Temperatur.Aussen_TP - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Temperatur.Aussen_TP + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Allgemein - Allgemein.Temperatur Aussen_Dp: type: num - md_command: Allgemein.Temperatur.Aussen_Dp - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Temperatur.Aussen_Dp + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Allgemein - Allgemein.Temperatur Speicher_Ladesensor: type: num - md_command: Allgemein.Temperatur.Speicher_Ladesensor - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Temperatur.Speicher_Ladesensor + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Allgemein - Allgemein.Temperatur Auslauf: type: num - md_command: Allgemein.Temperatur.Auslauf - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Temperatur.Auslauf + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Allgemein - Allgemein.Temperatur Abgas: type: num - md_command: Allgemein.Temperatur.Abgas - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Temperatur.Abgas + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Allgemein - Allgemein.Temperatur Gem_Vorlauf: type: num - md_command: Allgemein.Temperatur.Gem_Vorlauf - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Temperatur.Gem_Vorlauf + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Allgemein - Allgemein.Temperatur Relais_K12: type: bool - md_command: Allgemein.Relais_K12 - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Relais_K12 + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Allgemein Eingang_0-10_V: type: bool - md_command: Allgemein.Eingang_0-10_V - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Eingang_0-10_V + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Allgemein EA1_Kontakt_0: type: bool - md_command: Allgemein.EA1_Kontakt_0 - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.EA1_Kontakt_0 + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Allgemein EA1_Kontakt_1: type: bool - md_command: Allgemein.EA1_Kontakt_1 - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.EA1_Kontakt_1 + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Allgemein EA1_Kontakt_2: type: bool - md_command: Allgemein.EA1_Kontakt_2 - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.EA1_Kontakt_2 + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Allgemein EA1_Externer_Soll_0-10V: type: bool - md_command: Allgemein.EA1_Externer_Soll_0-10V - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.EA1_Externer_Soll_0-10V + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Allgemein EA1_Relais_0: type: bool - md_command: Allgemein.EA1_Relais_0 - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.EA1_Relais_0 + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Allgemein AM1_Ausgang_1: type: bool - md_command: Allgemein.AM1_Ausgang_1 - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.AM1_Ausgang_1 + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Allgemein AM1_Ausgang_2: type: bool - md_command: Allgemein.AM1_Ausgang_2 - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.AM1_Ausgang_2 + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Allgemein TempKOffset: type: num - md_command: Allgemein.TempKOffset - md_read: true - md_write: true - md_read_group: + viess_command: Allgemein.TempKOffset + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Allgemein Systemtime: type: bool - md_command: Allgemein.Systemtime - md_read: true - md_write: true - md_read_group: + viess_command: Allgemein.Systemtime + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Allgemein Anlagenschema: type: num - md_command: Allgemein.Anlagenschema - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Anlagenschema + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Allgemein Inventory: type: str - md_command: Allgemein.Inventory - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Inventory + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Allgemein CtrlId: type: num - md_command: Allgemein.CtrlId - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.CtrlId + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Allgemein @@ -381,32 +389,32 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Kessel + viess_read_group_trigger: Kessel Ist: type: num - md_command: Kessel.Ist - md_read: true - md_write: false - md_read_group: + viess_command: Kessel.Ist + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Kessel TP: type: num - md_command: Kessel.TP - md_read: true - md_write: false - md_read_group: + viess_command: Kessel.TP + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Kessel Soll: type: num - md_command: Kessel.Soll - md_read: true - md_write: false - md_read_group: + viess_command: Kessel.Soll + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Kessel @@ -415,104 +423,104 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Fehler + viess_read_group_trigger: Fehler Sammelstoerung: type: num - md_command: Fehler.Sammelstoerung - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Sammelstoerung + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Fehler Error0: type: num - md_command: Fehler.Error0 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error0 + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Fehler Error1: type: num - md_command: Fehler.Error1 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error1 + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Fehler Error2: type: num - md_command: Fehler.Error2 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error2 + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Fehler Error3: type: num - md_command: Fehler.Error3 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error3 + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Fehler Error4: type: num - md_command: Fehler.Error4 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error4 + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Fehler Error5: type: num - md_command: Fehler.Error5 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error5 + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Fehler Error6: type: num - md_command: Fehler.Error6 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error6 + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Fehler Error7: type: num - md_command: Fehler.Error7 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error7 + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Fehler Error8: type: num - md_command: Fehler.Error8 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error8 + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Fehler Error9: type: num - md_command: Fehler.Error9 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error9 + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Fehler @@ -521,77 +529,77 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Pumpen + viess_read_group_trigger: Pumpen Speicherlade: type: bool - md_command: Pumpen.Speicherlade - md_read: true - md_write: false - md_read_group: + viess_command: Pumpen.Speicherlade + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Pumpen Zirkulation: type: bool - md_command: Pumpen.Zirkulation - md_read: true - md_write: false - md_read_group: + viess_command: Pumpen.Zirkulation + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Pumpen Intern: type: bool - md_command: Pumpen.Intern - md_read: true - md_write: false - md_read_group: + viess_command: Pumpen.Intern + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Pumpen Heizkreis_A1M1: type: bool - md_command: Pumpen.Heizkreis_A1M1 - md_read: true - md_write: false - md_read_group: + viess_command: Pumpen.Heizkreis_A1M1 + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Pumpen Heizkreis_A1M1_RPM: type: bool - md_command: Pumpen.Heizkreis_A1M1_RPM - md_read: true - md_write: false - md_read_group: + viess_command: Pumpen.Heizkreis_A1M1_RPM + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Pumpen Heizkreis_M2: type: bool - md_command: Pumpen.Heizkreis_M2 - md_read: true - md_write: false - md_read_group: + viess_command: Pumpen.Heizkreis_M2 + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Pumpen Heizkreis_M2_RPM: type: bool - md_command: Pumpen.Heizkreis_M2_RPM - md_read: true - md_write: false - md_read_group: + viess_command: Pumpen.Heizkreis_M2_RPM + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Pumpen Relais_Status: type: bool - md_command: Pumpen.Relais_Status - md_read: true - md_write: false - md_read_group: + viess_command: Pumpen.Relais_Status + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Pumpen @@ -600,59 +608,59 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Brenner + viess_read_group_trigger: Brenner Starts: type: num - md_command: Brenner.Starts - md_read: true - md_write: true - md_read_group: + viess_command: Brenner.Starts + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Brenner Betriebsstunden: type: num - md_command: Brenner.Betriebsstunden - md_read: true - md_write: true - md_read_group: + viess_command: Brenner.Betriebsstunden + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Brenner Status_1: type: bool - md_command: Brenner.Status_1 - md_read: true - md_write: false - md_read_group: + viess_command: Brenner.Status_1 + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Brenner Status_2: type: bool - md_command: Brenner.Status_2 - md_read: true - md_write: false - md_read_group: + viess_command: Brenner.Status_2 + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Brenner Oeldurchsatz: type: num - md_command: Brenner.Oeldurchsatz - md_read: true - md_write: true - md_read_group: + viess_command: Brenner.Oeldurchsatz + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Brenner Oelverbrauch: type: num - md_command: Brenner.Oelverbrauch - md_read: true - md_write: true - md_read_group: + viess_command: Brenner.Oelverbrauch + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Brenner @@ -661,59 +669,59 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Solar + viess_read_group_trigger: Solar Nachladeunterdrueckung: type: bool - md_command: Solar.Nachladeunterdrueckung - md_read: true - md_write: false - md_read_group: + viess_command: Solar.Nachladeunterdrueckung + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Solar Pumpe: type: bool - md_command: Solar.Pumpe - md_read: true - md_write: false - md_read_group: + viess_command: Solar.Pumpe + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Solar Kollektortemperatur: type: num - md_command: Solar.Kollektortemperatur - md_read: true - md_write: false - md_read_group: + viess_command: Solar.Kollektortemperatur + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Solar Speichertemperatur: type: num - md_command: Solar.Speichertemperatur - md_read: true - md_write: false - md_read_group: + viess_command: Solar.Speichertemperatur + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Solar Betriebsstunden: type: num - md_command: Solar.Betriebsstunden - md_read: true - md_write: false - md_read_group: + viess_command: Solar.Betriebsstunden + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Solar Steuerung: type: num - md_command: Solar.Steuerung - md_read: true - md_write: false - md_read_group: + viess_command: Solar.Steuerung + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Solar @@ -722,35 +730,35 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis + viess_read_group_trigger: Heizkreis A1M1: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.A1M1 + viess_read_group_trigger: Heizkreis.A1M1 Temperatur: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.A1M1.Temperatur + viess_read_group_trigger: Heizkreis.A1M1.Temperatur Raum: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.A1M1.Temperatur.Raum + viess_read_group_trigger: Heizkreis.A1M1.Temperatur.Raum Ist: type: num - md_command: Heizkreis.A1M1.Temperatur.Raum.Ist - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.A1M1.Temperatur.Raum.Ist + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -759,10 +767,10 @@ item_structs: Soll_Normalbetrieb: type: num - md_command: Heizkreis.A1M1.Temperatur.Raum.Soll_Normalbetrieb - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Temperatur.Raum.Soll_Normalbetrieb + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -771,10 +779,10 @@ item_structs: Soll_Red_Betrieb: type: num - md_command: Heizkreis.A1M1.Temperatur.Raum.Soll_Red_Betrieb - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Temperatur.Raum.Soll_Red_Betrieb + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -783,10 +791,10 @@ item_structs: Soll_Party_Betrieb: type: num - md_command: Heizkreis.A1M1.Temperatur.Raum.Soll_Party_Betrieb - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Temperatur.Raum.Soll_Party_Betrieb + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -798,14 +806,14 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.A1M1.Temperatur.Vorlauf + viess_read_group_trigger: Heizkreis.A1M1.Temperatur.Vorlauf Ist: type: num - md_command: Heizkreis.A1M1.Temperatur.Vorlauf.Ist - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.A1M1.Temperatur.Vorlauf.Ist + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -814,10 +822,10 @@ item_structs: Soll: type: num - md_command: Heizkreis.A1M1.Temperatur.Vorlauf.Soll - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.A1M1.Temperatur.Vorlauf.Soll + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -826,10 +834,10 @@ item_structs: Min: type: num - md_command: Heizkreis.A1M1.Temperatur.Vorlauf.Min - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Temperatur.Vorlauf.Min + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -838,10 +846,10 @@ item_structs: Max: type: num - md_command: Heizkreis.A1M1.Temperatur.Vorlauf.Max - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Temperatur.Vorlauf.Max + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -850,10 +858,10 @@ item_structs: Erhoehung_Soll: type: num - md_command: Heizkreis.A1M1.Temperatur.Vorlauf.Erhoehung_Soll - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Temperatur.Vorlauf.Erhoehung_Soll + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -862,10 +870,10 @@ item_structs: Erhoehung_Zeit: type: num - md_command: Heizkreis.A1M1.Temperatur.Vorlauf.Erhoehung_Zeit - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Temperatur.Vorlauf.Erhoehung_Zeit + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -874,10 +882,10 @@ item_structs: Grenze_red_Betrieb: type: num - md_command: Heizkreis.A1M1.Temperatur.Grenze_red_Betrieb - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Temperatur.Grenze_red_Betrieb + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -885,10 +893,10 @@ item_structs: Grenze_red_Raumtemp: type: num - md_command: Heizkreis.A1M1.Temperatur.Grenze_red_Raumtemp - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Temperatur.Grenze_red_Raumtemp + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -899,14 +907,14 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.A1M1.Status + viess_read_group_trigger: Heizkreis.A1M1.Status Aktuelle_Betriebsart: type: num - md_command: Heizkreis.A1M1.Status.Aktuelle_Betriebsart - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.A1M1.Status.Aktuelle_Betriebsart + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -914,10 +922,10 @@ item_structs: Betriebsart: type: num - md_command: Heizkreis.A1M1.Status.Betriebsart - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Status.Betriebsart + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -925,10 +933,10 @@ item_structs: Sparbetrieb: type: num - md_command: Heizkreis.A1M1.Status.Sparbetrieb - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.A1M1.Status.Sparbetrieb + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -936,10 +944,10 @@ item_structs: Zustand_Sparbetrieb: type: num - md_command: Heizkreis.A1M1.Status.Zustand_Sparbetrieb - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Status.Zustand_Sparbetrieb + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -947,10 +955,10 @@ item_structs: Partybetrieb: type: num - md_command: Heizkreis.A1M1.Status.Partybetrieb - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.A1M1.Status.Partybetrieb + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -958,10 +966,10 @@ item_structs: Zustand_Partybetrieb: type: num - md_command: Heizkreis.A1M1.Status.Zustand_Partybetrieb - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Status.Zustand_Partybetrieb + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -969,10 +977,10 @@ item_structs: StatusFrost: type: num - md_command: Heizkreis.A1M1.Status.StatusFrost - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.A1M1.Status.StatusFrost + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -980,10 +988,10 @@ item_structs: Externe_Raumsolltemperatur_Normal: type: num - md_command: Heizkreis.A1M1.Status.Externe_Raumsolltemperatur_Normal - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Status.Externe_Raumsolltemperatur_Normal + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -991,10 +999,10 @@ item_structs: Externe_Betriebsartenumschaltung: type: num - md_command: Heizkreis.A1M1.Status.Externe_Betriebsartenumschaltung - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Status.Externe_Betriebsartenumschaltung + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -1002,10 +1010,10 @@ item_structs: Speichervorrang: type: num - md_command: Heizkreis.A1M1.Status.Speichervorrang - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Status.Speichervorrang + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -1013,10 +1021,10 @@ item_structs: Frostschutzgrenze: type: num - md_command: Heizkreis.A1M1.Status.Frostschutzgrenze - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Status.Frostschutzgrenze + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -1024,10 +1032,10 @@ item_structs: Frostschutz: type: num - md_command: Heizkreis.A1M1.Status.Frostschutz - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Status.Frostschutz + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -1035,10 +1043,10 @@ item_structs: Heizkreispumpenlogik: type: num - md_command: Heizkreis.A1M1.Status.Heizkreispumpenlogik - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Status.Heizkreispumpenlogik + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -1046,10 +1054,10 @@ item_structs: Sparschaltung: type: num - md_command: Heizkreis.A1M1.Status.Sparschaltung - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Status.Sparschaltung + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -1057,10 +1065,10 @@ item_structs: Mischersparfunktion: type: num - md_command: Heizkreis.A1M1.Status.Mischersparfunktion - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Status.Mischersparfunktion + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -1068,10 +1076,10 @@ item_structs: Pumpenstillstandzeit: type: num - md_command: Heizkreis.A1M1.Status.Pumpenstillstandzeit - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Status.Pumpenstillstandzeit + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -1082,14 +1090,14 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.A1M1.Heizkennlinie + viess_read_group_trigger: Heizkreis.A1M1.Heizkennlinie Neigung: type: num - md_command: Heizkreis.A1M1.Heizkennlinie.Neigung - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Heizkennlinie.Neigung + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -1097,10 +1105,10 @@ item_structs: Niveau: type: num - md_command: Heizkreis.A1M1.Heizkennlinie.Niveau - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Heizkennlinie.Niveau + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -1108,10 +1116,10 @@ item_structs: Partybetrieb_Zeitbegrenzung: type: num - md_command: Heizkreis.A1M1.Partybetrieb_Zeitbegrenzung - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Partybetrieb_Zeitbegrenzung + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.A1M1 @@ -1121,28 +1129,28 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.M2 + viess_read_group_trigger: Heizkreis.M2 Temperatur: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.M2.Temperatur + viess_read_group_trigger: Heizkreis.M2.Temperatur Raum: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.M2.Temperatur.Raum + viess_read_group_trigger: Heizkreis.M2.Temperatur.Raum Ist: type: num - md_command: Heizkreis.M2.Temperatur.Raum.Ist - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.M2.Temperatur.Raum.Ist + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1151,10 +1159,10 @@ item_structs: Soll_Normalbetrieb: type: num - md_command: Heizkreis.M2.Temperatur.Raum.Soll_Normalbetrieb - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Temperatur.Raum.Soll_Normalbetrieb + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1163,10 +1171,10 @@ item_structs: Soll_Red_Betrieb: type: num - md_command: Heizkreis.M2.Temperatur.Raum.Soll_Red_Betrieb - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Temperatur.Raum.Soll_Red_Betrieb + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1175,10 +1183,10 @@ item_structs: Soll_Party_Betrieb: type: num - md_command: Heizkreis.M2.Temperatur.Raum.Soll_Party_Betrieb - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Temperatur.Raum.Soll_Party_Betrieb + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1190,14 +1198,14 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.M2.Temperatur.Vorlauf + viess_read_group_trigger: Heizkreis.M2.Temperatur.Vorlauf Ist: type: num - md_command: Heizkreis.M2.Temperatur.Vorlauf.Ist - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.M2.Temperatur.Vorlauf.Ist + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1206,10 +1214,10 @@ item_structs: Soll: type: num - md_command: Heizkreis.M2.Temperatur.Vorlauf.Soll - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.M2.Temperatur.Vorlauf.Soll + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1218,10 +1226,10 @@ item_structs: Min: type: num - md_command: Heizkreis.M2.Temperatur.Vorlauf.Min - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Temperatur.Vorlauf.Min + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1230,10 +1238,10 @@ item_structs: Max: type: num - md_command: Heizkreis.M2.Temperatur.Vorlauf.Max - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Temperatur.Vorlauf.Max + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1242,10 +1250,10 @@ item_structs: Erhoehung_Soll: type: num - md_command: Heizkreis.M2.Temperatur.Vorlauf.Erhoehung_Soll - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Temperatur.Vorlauf.Erhoehung_Soll + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1254,10 +1262,10 @@ item_structs: Erhoehung_Zeit: type: num - md_command: Heizkreis.M2.Temperatur.Vorlauf.Erhoehung_Zeit - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Temperatur.Vorlauf.Erhoehung_Zeit + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1266,10 +1274,10 @@ item_structs: Grenze_red_Betrieb: type: num - md_command: Heizkreis.M2.Temperatur.Grenze_red_Betrieb - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Temperatur.Grenze_red_Betrieb + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1277,10 +1285,10 @@ item_structs: Grenze_red_Raumtemp: type: num - md_command: Heizkreis.M2.Temperatur.Grenze_red_Raumtemp - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Temperatur.Grenze_red_Raumtemp + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1291,14 +1299,14 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.M2.Status + viess_read_group_trigger: Heizkreis.M2.Status Aktuelle_Betriebsart: type: num - md_command: Heizkreis.M2.Status.Aktuelle_Betriebsart - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.M2.Status.Aktuelle_Betriebsart + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1306,10 +1314,10 @@ item_structs: Betriebsart: type: num - md_command: Heizkreis.M2.Status.Betriebsart - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Status.Betriebsart + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1317,10 +1325,10 @@ item_structs: Sparbetrieb: type: num - md_command: Heizkreis.M2.Status.Sparbetrieb - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.M2.Status.Sparbetrieb + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1328,10 +1336,10 @@ item_structs: Zustand_Sparbetrieb: type: num - md_command: Heizkreis.M2.Status.Zustand_Sparbetrieb - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Status.Zustand_Sparbetrieb + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1339,10 +1347,10 @@ item_structs: Partybetrieb: type: num - md_command: Heizkreis.M2.Status.Partybetrieb - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.M2.Status.Partybetrieb + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1350,10 +1358,10 @@ item_structs: Zustand_Partybetrieb: type: num - md_command: Heizkreis.M2.Status.Zustand_Partybetrieb - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Status.Zustand_Partybetrieb + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1361,10 +1369,10 @@ item_structs: StatusFrost: type: num - md_command: Heizkreis.M2.Status.StatusFrost - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.M2.Status.StatusFrost + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1372,10 +1380,10 @@ item_structs: Externe_Raumsolltemperatur_Normal: type: num - md_command: Heizkreis.M2.Status.Externe_Raumsolltemperatur_Normal - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Status.Externe_Raumsolltemperatur_Normal + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1383,10 +1391,10 @@ item_structs: Externe_Betriebsartenumschaltung: type: num - md_command: Heizkreis.M2.Status.Externe_Betriebsartenumschaltung - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Status.Externe_Betriebsartenumschaltung + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1394,10 +1402,10 @@ item_structs: Speichervorrang: type: num - md_command: Heizkreis.M2.Status.Speichervorrang - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Status.Speichervorrang + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1405,10 +1413,10 @@ item_structs: Frostschutzgrenze: type: num - md_command: Heizkreis.M2.Status.Frostschutzgrenze - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Status.Frostschutzgrenze + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1416,10 +1424,10 @@ item_structs: Frostschutz: type: num - md_command: Heizkreis.M2.Status.Frostschutz - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Status.Frostschutz + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1427,10 +1435,10 @@ item_structs: Heizkreispumpenlogik: type: num - md_command: Heizkreis.M2.Status.Heizkreispumpenlogik - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Status.Heizkreispumpenlogik + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1438,10 +1446,10 @@ item_structs: Sparschaltung: type: num - md_command: Heizkreis.M2.Status.Sparschaltung - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Status.Sparschaltung + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1449,10 +1457,10 @@ item_structs: Mischersparfunktion: type: num - md_command: Heizkreis.M2.Status.Mischersparfunktion - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Status.Mischersparfunktion + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1460,10 +1468,10 @@ item_structs: Pumpenstillstandzeit: type: num - md_command: Heizkreis.M2.Status.Pumpenstillstandzeit - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Status.Pumpenstillstandzeit + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1474,14 +1482,14 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.M2.Heizkennlinie + viess_read_group_trigger: Heizkreis.M2.Heizkennlinie Neigung: type: num - md_command: Heizkreis.M2.Heizkennlinie.Neigung - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Heizkennlinie.Neigung + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1489,10 +1497,10 @@ item_structs: Niveau: type: num - md_command: Heizkreis.M2.Heizkennlinie.Niveau - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Heizkennlinie.Niveau + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1500,10 +1508,10 @@ item_structs: Partybetrieb_Zeitbegrenzung: type: num - md_command: Heizkreis.M2.Partybetrieb_Zeitbegrenzung - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Partybetrieb_Zeitbegrenzung + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Heizkreis - Heizkreis.M2 @@ -1513,41 +1521,41 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Warmwasser + viess_read_group_trigger: Warmwasser Ist: type: num - md_command: Warmwasser.Ist - md_read: true - md_write: false - md_read_group: + viess_command: Warmwasser.Ist + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Warmwasser Soll: type: num - md_command: Warmwasser.Soll - md_read: true - md_write: true - md_read_group: + viess_command: Warmwasser.Soll + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Warmwasser Status: type: bool - md_command: Warmwasser.Status - md_read: true - md_write: true - md_read_group: + viess_command: Warmwasser.Status + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Warmwasser PumpenNachlauf: type: num - md_command: Warmwasser.PumpenNachlauf - md_read: true - md_write: true - md_read_group: + viess_command: Warmwasser.PumpenNachlauf + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Warmwasser @@ -1556,41 +1564,41 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Ferienprogramm + viess_read_group_trigger: Ferienprogramm A1M1: read: type: bool enforce_updates: true - md_read_group_trigger: Ferienprogramm.A1M1 + viess_read_group_trigger: Ferienprogramm.A1M1 Status: type: num - md_command: Ferienprogramm.A1M1.Status - md_read: true - md_write: false - md_read_group: + viess_command: Ferienprogramm.A1M1.Status + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Ferienprogramm - Ferienprogramm.A1M1 Abreisetag: type: bool - md_command: Ferienprogramm.A1M1.Abreisetag - md_read: true - md_write: true - md_read_group: + viess_command: Ferienprogramm.A1M1.Abreisetag + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Ferienprogramm - Ferienprogramm.A1M1 Rückreisetag: type: bool - md_command: Ferienprogramm.A1M1.Rückreisetag - md_read: true - md_write: true - md_read_group: + viess_command: Ferienprogramm.A1M1.Rückreisetag + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Ferienprogramm - Ferienprogramm.A1M1 @@ -1600,34 +1608,34 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Ferienprogramm.M2 + viess_read_group_trigger: Ferienprogramm.M2 Status: type: num - md_command: Ferienprogramm.M2.Status - md_read: true - md_write: false - md_read_group: + viess_command: Ferienprogramm.M2.Status + viess_read: true + viess_write: false + viess_read_group: - V200KO1B - Ferienprogramm - Ferienprogramm.M2 Abreisetag: type: bool - md_command: Ferienprogramm.M2.Abreisetag - md_read: true - md_write: true - md_read_group: + viess_command: Ferienprogramm.M2.Abreisetag + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Ferienprogramm - Ferienprogramm.M2 Rückreisetag: type: bool - md_command: Ferienprogramm.M2.Rückreisetag - md_read: true - md_write: true - md_read_group: + viess_command: Ferienprogramm.M2.Rückreisetag + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Ferienprogramm - Ferienprogramm.M2 @@ -1637,81 +1645,81 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Timer + viess_read_group_trigger: Timer Warmwasser: read: type: bool enforce_updates: true - md_read_group_trigger: Timer.Warmwasser + viess_read_group_trigger: Timer.Warmwasser Mo: type: list - md_command: Timer.Warmwasser.Mo - md_read: true - md_write: true - md_read_group: + viess_command: Timer.Warmwasser.Mo + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.Warmwasser Di: type: list - md_command: Timer.Warmwasser.Di - md_read: true - md_write: true - md_read_group: + viess_command: Timer.Warmwasser.Di + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.Warmwasser Mi: type: list - md_command: Timer.Warmwasser.Mi - md_read: true - md_write: true - md_read_group: + viess_command: Timer.Warmwasser.Mi + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.Warmwasser Do: type: list - md_command: Timer.Warmwasser.Do - md_read: true - md_write: true - md_read_group: + viess_command: Timer.Warmwasser.Do + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.Warmwasser Fr: type: list - md_command: Timer.Warmwasser.Fr - md_read: true - md_write: true - md_read_group: + viess_command: Timer.Warmwasser.Fr + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.Warmwasser Sa: type: list - md_command: Timer.Warmwasser.Sa - md_read: true - md_write: true - md_read_group: + viess_command: Timer.Warmwasser.Sa + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.Warmwasser So: type: list - md_command: Timer.Warmwasser.So - md_read: true - md_write: true - md_read_group: + viess_command: Timer.Warmwasser.So + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.Warmwasser @@ -1721,74 +1729,74 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Timer.A1M1 + viess_read_group_trigger: Timer.A1M1 Mo: type: list - md_command: Timer.A1M1.Mo - md_read: true - md_write: true - md_read_group: + viess_command: Timer.A1M1.Mo + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.A1M1 Di: type: list - md_command: Timer.A1M1.Di - md_read: true - md_write: true - md_read_group: + viess_command: Timer.A1M1.Di + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.A1M1 Mi: type: list - md_command: Timer.A1M1.Mi - md_read: true - md_write: true - md_read_group: + viess_command: Timer.A1M1.Mi + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.A1M1 Do: type: list - md_command: Timer.A1M1.Do - md_read: true - md_write: true - md_read_group: + viess_command: Timer.A1M1.Do + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.A1M1 Fr: type: list - md_command: Timer.A1M1.Fr - md_read: true - md_write: true - md_read_group: + viess_command: Timer.A1M1.Fr + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.A1M1 Sa: type: list - md_command: Timer.A1M1.Sa - md_read: true - md_write: true - md_read_group: + viess_command: Timer.A1M1.Sa + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.A1M1 So: type: list - md_command: Timer.A1M1.So - md_read: true - md_write: true - md_read_group: + viess_command: Timer.A1M1.So + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.A1M1 @@ -1798,74 +1806,74 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Timer.M2 + viess_read_group_trigger: Timer.M2 Mo: type: list - md_command: Timer.M2.Mo - md_read: true - md_write: true - md_read_group: + viess_command: Timer.M2.Mo + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.M2 Di: type: list - md_command: Timer.M2.Di - md_read: true - md_write: true - md_read_group: + viess_command: Timer.M2.Di + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.M2 Mi: type: list - md_command: Timer.M2.Mi - md_read: true - md_write: true - md_read_group: + viess_command: Timer.M2.Mi + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.M2 Do: type: list - md_command: Timer.M2.Do - md_read: true - md_write: true - md_read_group: + viess_command: Timer.M2.Do + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.M2 Fr: type: list - md_command: Timer.M2.Fr - md_read: true - md_write: true - md_read_group: + viess_command: Timer.M2.Fr + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.M2 Sa: type: list - md_command: Timer.M2.Sa - md_read: true - md_write: true - md_read_group: + viess_command: Timer.M2.Sa + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.M2 So: type: list - md_command: Timer.M2.So - md_read: true - md_write: true - md_read_group: + viess_command: Timer.M2.So + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.M2 @@ -1875,74 +1883,74 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Timer.Zirkulation + viess_read_group_trigger: Timer.Zirkulation Mo: type: list - md_command: Timer.Zirkulation.Mo - md_read: true - md_write: true - md_read_group: + viess_command: Timer.Zirkulation.Mo + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.Zirkulation Di: type: list - md_command: Timer.Zirkulation.Di - md_read: true - md_write: true - md_read_group: + viess_command: Timer.Zirkulation.Di + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.Zirkulation Mi: type: list - md_command: Timer.Zirkulation.Mi - md_read: true - md_write: true - md_read_group: + viess_command: Timer.Zirkulation.Mi + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.Zirkulation Do: type: list - md_command: Timer.Zirkulation.Do - md_read: true - md_write: true - md_read_group: + viess_command: Timer.Zirkulation.Do + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.Zirkulation Fr: type: list - md_command: Timer.Zirkulation.Fr - md_read: true - md_write: true - md_read_group: + viess_command: Timer.Zirkulation.Fr + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.Zirkulation Sa: type: list - md_command: Timer.Zirkulation.Sa - md_read: true - md_write: true - md_read_group: + viess_command: Timer.Zirkulation.Sa + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.Zirkulation So: type: list - md_command: Timer.Zirkulation.So - md_read: true - md_write: true - md_read_group: + viess_command: Timer.Zirkulation.So + viess_read: true + viess_write: true + viess_read_group: - V200KO1B - Timer - Timer.Zirkulation @@ -1950,45 +1958,45 @@ item_structs: V200HO1C: Anlagentyp: - type: num - md_command: Anlagentyp - md_read: true - md_write: false - md_read_group: + type: str + viess_command: Anlagentyp + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - md_read_initial: true + viess_read_initial: true Allgemein: read: type: bool enforce_updates: true - md_read_group_trigger: Allgemein + viess_read_group_trigger: Allgemein Anlagenschema: type: num - md_command: Allgemein.Anlagenschema - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Anlagenschema + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Allgemein Frostgefahr: type: num - md_command: Allgemein.Frostgefahr - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Frostgefahr + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Allgemein Anlagenleistung: type: num - md_command: Allgemein.Anlagenleistung - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Anlagenleistung + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Allgemein @@ -1997,24 +2005,24 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Allgemein.Temperatur + viess_read_group_trigger: Allgemein.Temperatur Aussen_TP: type: num - md_command: Allgemein.Temperatur.Aussen_TP - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Temperatur.Aussen_TP + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Allgemein - Allgemein.Temperatur Aussen_Dp: type: num - md_command: Allgemein.Temperatur.Aussen_Dp - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Temperatur.Aussen_Dp + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Allgemein - Allgemein.Temperatur @@ -2024,32 +2032,32 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Kessel + viess_read_group_trigger: Kessel TP: type: num - md_command: Kessel.TP - md_read: true - md_write: false - md_read_group: + viess_command: Kessel.TP + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Kessel Soll: type: num - md_command: Kessel.Soll - md_read: true - md_write: false - md_read_group: + viess_command: Kessel.Soll + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Kessel Abgastemperatur: type: num - md_command: Kessel.Abgastemperatur - md_read: true - md_write: false - md_read_group: + viess_command: Kessel.Abgastemperatur + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Kessel @@ -2058,104 +2066,104 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Fehler + viess_read_group_trigger: Fehler Sammelstoerung: type: num - md_command: Fehler.Sammelstoerung - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Sammelstoerung + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Fehler Error0: type: num - md_command: Fehler.Error0 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error0 + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Fehler Error1: type: num - md_command: Fehler.Error1 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error1 + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Fehler Error2: type: num - md_command: Fehler.Error2 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error2 + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Fehler Error3: type: num - md_command: Fehler.Error3 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error3 + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Fehler Error4: type: num - md_command: Fehler.Error4 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error4 + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Fehler Error5: type: num - md_command: Fehler.Error5 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error5 + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Fehler Error6: type: num - md_command: Fehler.Error6 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error6 + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Fehler Error7: type: num - md_command: Fehler.Error7 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error7 + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Fehler Error8: type: num - md_command: Fehler.Error8 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error8 + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Fehler Error9: type: num - md_command: Fehler.Error9 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error9 + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Fehler @@ -2164,50 +2172,50 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Pumpen + viess_read_group_trigger: Pumpen Speicherlade: type: bool - md_command: Pumpen.Speicherlade - md_read: true - md_write: false - md_read_group: + viess_command: Pumpen.Speicherlade + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Pumpen Zirkulation: type: bool - md_command: Pumpen.Zirkulation - md_read: true - md_write: true - md_read_group: + viess_command: Pumpen.Zirkulation + viess_read: true + viess_write: true + viess_read_group: - V200HO1C - Pumpen Intern: type: bool - md_command: Pumpen.Intern - md_read: true - md_write: false - md_read_group: + viess_command: Pumpen.Intern + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Pumpen Heizkreis_1: type: bool - md_command: Pumpen.Heizkreis_1 - md_read: true - md_write: false - md_read_group: + viess_command: Pumpen.Heizkreis_1 + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Pumpen Heizkreis_2: type: bool - md_command: Pumpen.Heizkreis_2 - md_read: true - md_write: false - md_read_group: + viess_command: Pumpen.Heizkreis_2 + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Pumpen @@ -2216,32 +2224,32 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Brenner + viess_read_group_trigger: Brenner Starts: type: num - md_command: Brenner.Starts - md_read: true - md_write: false - md_read_group: + viess_command: Brenner.Starts + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Brenner Leistung: type: num - md_command: Brenner.Leistung - md_read: true - md_write: false - md_read_group: + viess_command: Brenner.Leistung + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Brenner Betriebsstunden: type: num - md_command: Brenner.Betriebsstunden - md_read: true - md_write: false - md_read_group: + viess_command: Brenner.Betriebsstunden + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Brenner @@ -2250,59 +2258,59 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Solar + viess_read_group_trigger: Solar Pumpe: type: bool - md_command: Solar.Pumpe - md_read: true - md_write: false - md_read_group: + viess_command: Solar.Pumpe + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Solar Kollektortemperatur: type: num - md_command: Solar.Kollektortemperatur - md_read: true - md_write: false - md_read_group: + viess_command: Solar.Kollektortemperatur + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Solar Speichertemperatur: type: num - md_command: Solar.Speichertemperatur - md_read: true - md_write: false - md_read_group: + viess_command: Solar.Speichertemperatur + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Solar Betriebsstunden: type: num - md_command: Solar.Betriebsstunden - md_read: true - md_write: false - md_read_group: + viess_command: Solar.Betriebsstunden + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Solar Waermemenge: type: num - md_command: Solar.Waermemenge - md_read: true - md_write: false - md_read_group: + viess_command: Solar.Waermemenge + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Solar Ausbeute: type: num - md_command: Solar.Ausbeute - md_read: true - md_write: false - md_read_group: + viess_command: Solar.Ausbeute + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Solar @@ -2311,31 +2319,31 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis + viess_read_group_trigger: Heizkreis '1': read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.1 + viess_read_group_trigger: Heizkreis.1 Betriebsart: type: num - md_command: Heizkreis.1.Betriebsart - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.1.Betriebsart + viess_read: true + viess_write: true + viess_read_group: - V200HO1C - Heizkreis - Heizkreis.1 Heizart: type: num - md_command: Heizkreis.1.Heizart - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.1.Heizart + viess_read: true + viess_write: true + viess_read_group: - V200HO1C - Heizkreis - Heizkreis.1 @@ -2345,14 +2353,14 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.1.Temperatur + viess_read_group_trigger: Heizkreis.1.Temperatur Vorlauf_Soll: type: num - md_command: Heizkreis.1.Temperatur.Vorlauf_Soll - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.1.Temperatur.Vorlauf_Soll + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Heizkreis - Heizkreis.1 @@ -2360,10 +2368,10 @@ item_structs: Vorlauf_Ist: type: num - md_command: Heizkreis.1.Temperatur.Vorlauf_Ist - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.1.Temperatur.Vorlauf_Ist + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Heizkreis - Heizkreis.1 @@ -2374,24 +2382,24 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.2 + viess_read_group_trigger: Heizkreis.2 Betriebsart: type: num - md_command: Heizkreis.2.Betriebsart - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.2.Betriebsart + viess_read: true + viess_write: true + viess_read_group: - V200HO1C - Heizkreis - Heizkreis.2 Heizart: type: num - md_command: Heizkreis.2.Heizart - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.2.Heizart + viess_read: true + viess_write: true + viess_read_group: - V200HO1C - Heizkreis - Heizkreis.2 @@ -2401,14 +2409,14 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.2.Temperatur + viess_read_group_trigger: Heizkreis.2.Temperatur Vorlauf_Soll: type: num - md_command: Heizkreis.2.Temperatur.Vorlauf_Soll - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.2.Temperatur.Vorlauf_Soll + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Heizkreis - Heizkreis.2 @@ -2416,10 +2424,10 @@ item_structs: Vorlauf_Ist: type: num - md_command: Heizkreis.2.Temperatur.Vorlauf_Ist - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.2.Temperatur.Vorlauf_Ist + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Heizkreis - Heizkreis.2 @@ -2430,104 +2438,104 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Warmwasser + viess_read_group_trigger: Warmwasser Ist: type: num - md_command: Warmwasser.Ist - md_read: true - md_write: false - md_read_group: + viess_command: Warmwasser.Ist + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Warmwasser Soll: type: num - md_command: Warmwasser.Soll - md_read: true - md_write: true - md_read_group: + viess_command: Warmwasser.Soll + viess_read: true + viess_write: true + viess_read_group: - V200HO1C - Warmwasser Austritt: type: num - md_command: Warmwasser.Austritt - md_read: true - md_write: false - md_read_group: + viess_command: Warmwasser.Austritt + viess_read: true + viess_write: false + viess_read_group: - V200HO1C - Warmwasser V200KW2: Anlagentyp: - type: num - md_command: Anlagentyp - md_read: true - md_write: false - md_read_group: + type: str + viess_command: Anlagentyp + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - md_read_initial: true + viess_read_initial: true Allgemein: read: type: bool enforce_updates: true - md_read_group_trigger: Allgemein + viess_read_group_trigger: Allgemein Temperatur: read: type: bool enforce_updates: true - md_read_group_trigger: Allgemein.Temperatur + viess_read_group_trigger: Allgemein.Temperatur Aussen: type: num - md_command: Allgemein.Temperatur.Aussen - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Temperatur.Aussen + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Allgemein - Allgemein.Temperatur Aussen_Dp: type: num - md_command: Allgemein.Temperatur.Aussen_Dp - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Temperatur.Aussen_Dp + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Allgemein - Allgemein.Temperatur Anlagenschema: type: num - md_command: Allgemein.Anlagenschema - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Anlagenschema + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Allgemein AnlagenSoftwareIndex: type: num - md_command: Allgemein.AnlagenSoftwareIndex - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.AnlagenSoftwareIndex + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Allgemein Systemtime: type: bool - md_command: Allgemein.Systemtime - md_read: true - md_write: true - md_read_group: + viess_command: Allgemein.Systemtime + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Allgemein @@ -2536,32 +2544,32 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Kessel + viess_read_group_trigger: Kessel TempKOffset: type: num - md_command: Kessel.TempKOffset - md_read: true - md_write: true - md_read_group: + viess_command: Kessel.TempKOffset + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Kessel Ist: type: num - md_command: Kessel.Ist - md_read: true - md_write: false - md_read_group: + viess_command: Kessel.Ist + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Kessel Soll: type: num - md_command: Kessel.Soll - md_read: true - md_write: true - md_read_group: + viess_command: Kessel.Soll + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Kessel @@ -2570,113 +2578,113 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Fehler + viess_read_group_trigger: Fehler Sammelstoerung: type: num - md_command: Fehler.Sammelstoerung - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Sammelstoerung + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Fehler Brennerstoerung: type: num - md_command: Fehler.Brennerstoerung - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Brennerstoerung + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Fehler Error0: type: num - md_command: Fehler.Error0 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error0 + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Fehler Error1: type: num - md_command: Fehler.Error1 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error1 + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Fehler Error2: type: num - md_command: Fehler.Error2 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error2 + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Fehler Error3: type: num - md_command: Fehler.Error3 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error3 + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Fehler Error4: type: num - md_command: Fehler.Error4 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error4 + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Fehler Error5: type: num - md_command: Fehler.Error5 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error5 + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Fehler Error6: type: num - md_command: Fehler.Error6 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error6 + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Fehler Error7: type: num - md_command: Fehler.Error7 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error7 + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Fehler Error8: type: num - md_command: Fehler.Error8 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error8 + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Fehler Error9: type: num - md_command: Fehler.Error9 - md_read: true - md_write: false - md_read_group: + viess_command: Fehler.Error9 + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Fehler @@ -2685,41 +2693,41 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Pumpen + viess_read_group_trigger: Pumpen Speicherlade: type: bool - md_command: Pumpen.Speicherlade - md_read: true - md_write: false - md_read_group: + viess_command: Pumpen.Speicherlade + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Pumpen Zirkulation: type: bool - md_command: Pumpen.Zirkulation - md_read: true - md_write: false - md_read_group: + viess_command: Pumpen.Zirkulation + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Pumpen Heizkreis_A1M1: type: bool - md_command: Pumpen.Heizkreis_A1M1 - md_read: true - md_write: false - md_read_group: + viess_command: Pumpen.Heizkreis_A1M1 + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Pumpen Heizkreis_M2: type: bool - md_command: Pumpen.Heizkreis_M2 - md_read: true - md_write: false - md_read_group: + viess_command: Pumpen.Heizkreis_M2 + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Pumpen @@ -2728,68 +2736,68 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Brenner + viess_read_group_trigger: Brenner Typ: type: num - md_command: Brenner.Typ - md_read: true - md_write: false - md_read_group: + viess_command: Brenner.Typ + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Brenner Stufe: type: num - md_command: Brenner.Stufe - md_read: true - md_write: false - md_read_group: + viess_command: Brenner.Stufe + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Brenner Starts: type: num - md_command: Brenner.Starts - md_read: true - md_write: true - md_read_group: + viess_command: Brenner.Starts + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Brenner Status_1: type: bool - md_command: Brenner.Status_1 - md_read: true - md_write: false - md_read_group: + viess_command: Brenner.Status_1 + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Brenner Status_2: type: bool - md_command: Brenner.Status_2 - md_read: true - md_write: false - md_read_group: + viess_command: Brenner.Status_2 + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Brenner BetriebsstundenStufe1: type: num - md_command: Brenner.BetriebsstundenStufe1 - md_read: true - md_write: true - md_read_group: + viess_command: Brenner.BetriebsstundenStufe1 + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Brenner BetriebsstundenStufe2: type: num - md_command: Brenner.BetriebsstundenStufe2 - md_read: true - md_write: true - md_read_group: + viess_command: Brenner.BetriebsstundenStufe2 + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Brenner @@ -2798,35 +2806,35 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis + viess_read_group_trigger: Heizkreis A1M1: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.A1M1 + viess_read_group_trigger: Heizkreis.A1M1 Temperatur: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.A1M1.Temperatur + viess_read_group_trigger: Heizkreis.A1M1.Temperatur Raum: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.A1M1.Temperatur.Raum + viess_read_group_trigger: Heizkreis.A1M1.Temperatur.Raum Soll_Normal: type: num - md_command: Heizkreis.A1M1.Temperatur.Raum.Soll_Normal - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Temperatur.Raum.Soll_Normal + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.A1M1 @@ -2835,10 +2843,10 @@ item_structs: Soll_Reduziert: type: num - md_command: Heizkreis.A1M1.Temperatur.Raum.Soll_Reduziert - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Temperatur.Raum.Soll_Reduziert + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.A1M1 @@ -2847,10 +2855,10 @@ item_structs: Soll_Party: type: num - md_command: Heizkreis.A1M1.Temperatur.Raum.Soll_Party - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Temperatur.Raum.Soll_Party + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.A1M1 @@ -2862,14 +2870,14 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.A1M1.Temperatur.Vorlauf + viess_read_group_trigger: Heizkreis.A1M1.Temperatur.Vorlauf Ist: type: num - md_command: Heizkreis.A1M1.Temperatur.Vorlauf.Ist - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.A1M1.Temperatur.Vorlauf.Ist + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.A1M1 @@ -2878,10 +2886,10 @@ item_structs: Soll: type: num - md_command: Heizkreis.A1M1.Temperatur.Vorlauf.Soll - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.A1M1.Temperatur.Vorlauf.Soll + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.A1M1 @@ -2890,80 +2898,80 @@ item_structs: Betriebsart: type: num - md_command: Heizkreis.A1M1.Betriebsart - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Betriebsart + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.A1M1 Aktuelle_Betriebsart: type: num - md_command: Heizkreis.A1M1.Aktuelle_Betriebsart - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.A1M1.Aktuelle_Betriebsart + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.A1M1 Sparbetrieb: type: num - md_command: Heizkreis.A1M1.Sparbetrieb - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Sparbetrieb + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.A1M1 Partybetrieb_Zeit: type: num - md_command: Heizkreis.A1M1.Partybetrieb_Zeit - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Partybetrieb_Zeit + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.A1M1 Partybetrieb: type: num - md_command: Heizkreis.A1M1.Partybetrieb - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Partybetrieb + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.A1M1 MischerM1: type: num - md_command: Heizkreis.A1M1.MischerM1 - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.A1M1.MischerM1 + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.A1M1 Heizkreispumpenlogik: type: num - md_command: Heizkreis.A1M1.Heizkreispumpenlogik - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Heizkreispumpenlogik + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.A1M1 Sparschaltung: type: num - md_command: Heizkreis.A1M1.Sparschaltung - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Sparschaltung + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.A1M1 @@ -2973,14 +2981,14 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.A1M1.Heizkennlinie + viess_read_group_trigger: Heizkreis.A1M1.Heizkennlinie Neigung: type: num - md_command: Heizkreis.A1M1.Heizkennlinie.Neigung - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Heizkennlinie.Neigung + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.A1M1 @@ -2988,10 +2996,10 @@ item_structs: Niveau: type: num - md_command: Heizkreis.A1M1.Heizkennlinie.Niveau - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.A1M1.Heizkennlinie.Niveau + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.A1M1 @@ -3002,28 +3010,28 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.M2 + viess_read_group_trigger: Heizkreis.M2 Temperatur: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.M2.Temperatur + viess_read_group_trigger: Heizkreis.M2.Temperatur Raum: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.M2.Temperatur.Raum + viess_read_group_trigger: Heizkreis.M2.Temperatur.Raum Soll_Normal: type: num - md_command: Heizkreis.M2.Temperatur.Raum.Soll_Normal - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Temperatur.Raum.Soll_Normal + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.M2 @@ -3032,10 +3040,10 @@ item_structs: Soll_Reduziert: type: num - md_command: Heizkreis.M2.Temperatur.Raum.Soll_Reduziert - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Temperatur.Raum.Soll_Reduziert + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.M2 @@ -3044,10 +3052,10 @@ item_structs: Soll_Party: type: num - md_command: Heizkreis.M2.Temperatur.Raum.Soll_Party - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Temperatur.Raum.Soll_Party + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.M2 @@ -3059,14 +3067,14 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.M2.Temperatur.Vorlauf + viess_read_group_trigger: Heizkreis.M2.Temperatur.Vorlauf Soll: type: num - md_command: Heizkreis.M2.Temperatur.Vorlauf.Soll - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Temperatur.Vorlauf.Soll + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.M2 @@ -3075,10 +3083,10 @@ item_structs: Ist: type: num - md_command: Heizkreis.M2.Temperatur.Vorlauf.Ist - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.M2.Temperatur.Vorlauf.Ist + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.M2 @@ -3087,10 +3095,10 @@ item_structs: Min: type: num - md_command: Heizkreis.M2.Temperatur.Vorlauf.Min - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Temperatur.Vorlauf.Min + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.M2 @@ -3099,10 +3107,10 @@ item_structs: Max: type: num - md_command: Heizkreis.M2.Temperatur.Vorlauf.Max - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Temperatur.Vorlauf.Max + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.M2 @@ -3111,120 +3119,120 @@ item_structs: Betriebsart: type: num - md_command: Heizkreis.M2.Betriebsart - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Betriebsart + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.M2 Aktuelle_Betriebsart: type: num - md_command: Heizkreis.M2.Aktuelle_Betriebsart - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.M2.Aktuelle_Betriebsart + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.M2 Sparbetrieb: type: num - md_command: Heizkreis.M2.Sparbetrieb - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Sparbetrieb + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.M2 Partybetrieb: type: num - md_command: Heizkreis.M2.Partybetrieb - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Partybetrieb + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.M2 Partybetrieb_Zeit: type: num - md_command: Heizkreis.M2.Partybetrieb_Zeit - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Partybetrieb_Zeit + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.M2 MischerM2: type: num - md_command: Heizkreis.M2.MischerM2 - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.M2.MischerM2 + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.M2 MischerM2Auf: type: bool - md_command: Heizkreis.M2.MischerM2Auf - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.MischerM2Auf + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.M2 MischerM2Zu: type: bool - md_command: Heizkreis.M2.MischerM2Zu - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.MischerM2Zu + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.M2 Heizkreispumpenlogik: type: num - md_command: Heizkreis.M2.Heizkreispumpenlogik - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Heizkreispumpenlogik + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.M2 Sparschaltung: type: num - md_command: Heizkreis.M2.Sparschaltung - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Sparschaltung + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.M2 StatusKlemme2: type: bool - md_command: Heizkreis.M2.StatusKlemme2 - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.M2.StatusKlemme2 + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.M2 StatusKlemme17: type: bool - md_command: Heizkreis.M2.StatusKlemme17 - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.M2.StatusKlemme17 + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.M2 @@ -3234,14 +3242,14 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.M2.Heizkennlinie + viess_read_group_trigger: Heizkreis.M2.Heizkennlinie Neigung: type: num - md_command: Heizkreis.M2.Heizkennlinie.Neigung - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Heizkennlinie.Neigung + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.M2 @@ -3249,10 +3257,10 @@ item_structs: Niveau: type: num - md_command: Heizkreis.M2.Heizkennlinie.Niveau - md_read: true - md_write: true - md_read_group: + viess_command: Heizkreis.M2.Heizkennlinie.Niveau + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Heizkreis - Heizkreis.M2 @@ -3263,68 +3271,68 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Warmwasser + viess_read_group_trigger: Warmwasser Status: type: bool - md_command: Warmwasser.Status - md_read: true - md_write: false - md_read_group: + viess_command: Warmwasser.Status + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Warmwasser KesselOffset: type: num - md_command: Warmwasser.KesselOffset - md_read: true - md_write: true - md_read_group: + viess_command: Warmwasser.KesselOffset + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Warmwasser BeiPartyDNormal: type: num - md_command: Warmwasser.BeiPartyDNormal - md_read: true - md_write: true - md_read_group: + viess_command: Warmwasser.BeiPartyDNormal + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Warmwasser Ist: type: num - md_command: Warmwasser.Ist - md_read: true - md_write: false - md_read_group: + viess_command: Warmwasser.Ist + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Warmwasser Soll: type: num - md_command: Warmwasser.Soll - md_read: true - md_write: true - md_read_group: + viess_command: Warmwasser.Soll + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Warmwasser SollAktuell: type: num - md_command: Warmwasser.SollAktuell - md_read: true - md_write: false - md_read_group: + viess_command: Warmwasser.SollAktuell + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Warmwasser SollMax: type: num - md_command: Warmwasser.SollMax - md_read: true - md_write: false - md_read_group: + viess_command: Warmwasser.SollMax + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Warmwasser @@ -3333,41 +3341,41 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Ferienprogramm + viess_read_group_trigger: Ferienprogramm A1M1: read: type: bool enforce_updates: true - md_read_group_trigger: Ferienprogramm.A1M1 + viess_read_group_trigger: Ferienprogramm.A1M1 Status: type: num - md_command: Ferienprogramm.A1M1.Status - md_read: true - md_write: false - md_read_group: + viess_command: Ferienprogramm.A1M1.Status + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Ferienprogramm - Ferienprogramm.A1M1 Abreisetag: type: bool - md_command: Ferienprogramm.A1M1.Abreisetag - md_read: true - md_write: true - md_read_group: + viess_command: Ferienprogramm.A1M1.Abreisetag + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Ferienprogramm - Ferienprogramm.A1M1 Rückreisetag: type: bool - md_command: Ferienprogramm.A1M1.Rückreisetag - md_read: true - md_write: true - md_read_group: + viess_command: Ferienprogramm.A1M1.Rückreisetag + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Ferienprogramm - Ferienprogramm.A1M1 @@ -3377,34 +3385,34 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Ferienprogramm.M2 + viess_read_group_trigger: Ferienprogramm.M2 Status: type: num - md_command: Ferienprogramm.M2.Status - md_read: true - md_write: false - md_read_group: + viess_command: Ferienprogramm.M2.Status + viess_read: true + viess_write: false + viess_read_group: - V200KW2 - Ferienprogramm - Ferienprogramm.M2 Abreisetag: type: bool - md_command: Ferienprogramm.M2.Abreisetag - md_read: true - md_write: true - md_read_group: + viess_command: Ferienprogramm.M2.Abreisetag + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Ferienprogramm - Ferienprogramm.M2 Rückreisetag: type: bool - md_command: Ferienprogramm.M2.Rückreisetag - md_read: true - md_write: true - md_read_group: + viess_command: Ferienprogramm.M2.Rückreisetag + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Ferienprogramm - Ferienprogramm.M2 @@ -3414,81 +3422,81 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Timer + viess_read_group_trigger: Timer Warmwasser: read: type: bool enforce_updates: true - md_read_group_trigger: Timer.Warmwasser + viess_read_group_trigger: Timer.Warmwasser Mo: type: list - md_command: Timer.Warmwasser.Mo - md_read: true - md_write: true - md_read_group: + viess_command: Timer.Warmwasser.Mo + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Timer - Timer.Warmwasser Di: type: list - md_command: Timer.Warmwasser.Di - md_read: true - md_write: true - md_read_group: + viess_command: Timer.Warmwasser.Di + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Timer - Timer.Warmwasser Mi: type: list - md_command: Timer.Warmwasser.Mi - md_read: true - md_write: true - md_read_group: + viess_command: Timer.Warmwasser.Mi + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Timer - Timer.Warmwasser Do: type: list - md_command: Timer.Warmwasser.Do - md_read: true - md_write: true - md_read_group: + viess_command: Timer.Warmwasser.Do + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Timer - Timer.Warmwasser Fr: type: list - md_command: Timer.Warmwasser.Fr - md_read: true - md_write: true - md_read_group: + viess_command: Timer.Warmwasser.Fr + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Timer - Timer.Warmwasser Sa: type: list - md_command: Timer.Warmwasser.Sa - md_read: true - md_write: true - md_read_group: + viess_command: Timer.Warmwasser.Sa + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Timer - Timer.Warmwasser So: type: list - md_command: Timer.Warmwasser.So - md_read: true - md_write: true - md_read_group: + viess_command: Timer.Warmwasser.So + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Timer - Timer.Warmwasser @@ -3498,74 +3506,74 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Timer.A1M1 + viess_read_group_trigger: Timer.A1M1 Mo: type: list - md_command: Timer.A1M1.Mo - md_read: true - md_write: true - md_read_group: + viess_command: Timer.A1M1.Mo + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Timer - Timer.A1M1 Di: type: list - md_command: Timer.A1M1.Di - md_read: true - md_write: true - md_read_group: + viess_command: Timer.A1M1.Di + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Timer - Timer.A1M1 Mi: type: list - md_command: Timer.A1M1.Mi - md_read: true - md_write: true - md_read_group: + viess_command: Timer.A1M1.Mi + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Timer - Timer.A1M1 Do: type: list - md_command: Timer.A1M1.Do - md_read: true - md_write: true - md_read_group: + viess_command: Timer.A1M1.Do + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Timer - Timer.A1M1 Fr: type: list - md_command: Timer.A1M1.Fr - md_read: true - md_write: true - md_read_group: + viess_command: Timer.A1M1.Fr + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Timer - Timer.A1M1 Sa: type: list - md_command: Timer.A1M1.Sa - md_read: true - md_write: true - md_read_group: + viess_command: Timer.A1M1.Sa + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Timer - Timer.A1M1 So: type: list - md_command: Timer.A1M1.So - md_read: true - md_write: true - md_read_group: + viess_command: Timer.A1M1.So + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Timer - Timer.A1M1 @@ -3575,74 +3583,74 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Timer.M2 + viess_read_group_trigger: Timer.M2 Mo: type: list - md_command: Timer.M2.Mo - md_read: true - md_write: true - md_read_group: + viess_command: Timer.M2.Mo + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Timer - Timer.M2 Di: type: list - md_command: Timer.M2.Di - md_read: true - md_write: true - md_read_group: + viess_command: Timer.M2.Di + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Timer - Timer.M2 Mi: type: list - md_command: Timer.M2.Mi - md_read: true - md_write: true - md_read_group: + viess_command: Timer.M2.Mi + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Timer - Timer.M2 Do: type: list - md_command: Timer.M2.Do - md_read: true - md_write: true - md_read_group: + viess_command: Timer.M2.Do + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Timer - Timer.M2 Fr: type: list - md_command: Timer.M2.Fr - md_read: true - md_write: true - md_read_group: + viess_command: Timer.M2.Fr + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Timer - Timer.M2 Sa: type: list - md_command: Timer.M2.Sa - md_read: true - md_write: true - md_read_group: + viess_command: Timer.M2.Sa + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Timer - Timer.M2 So: type: list - md_command: Timer.M2.So - md_read: true - md_write: true - md_read_group: + viess_command: Timer.M2.So + viess_read: true + viess_write: true + viess_read_group: - V200KW2 - Timer - Timer.M2 @@ -3650,95 +3658,95 @@ item_structs: V200WO1C: Anlagentyp: - type: num - md_command: Anlagentyp - md_read: true - md_write: false - md_read_group: + type: str + viess_command: Anlagentyp + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - md_read_initial: true + viess_read_initial: true Allgemein: read: type: bool enforce_updates: true - md_read_group_trigger: Allgemein - md_read_cycle: 45 + viess_read_group_trigger: Allgemein + viess_read_cycle: 45 Temperatur: read: type: bool enforce_updates: true - md_read_group_trigger: Allgemein.Temperatur + viess_read_group_trigger: Allgemein.Temperatur Aussen: type: num - md_command: Allgemein.Temperatur.Aussen - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Temperatur.Aussen + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Allgemein - Allgemein.Temperatur Betriebsart: type: str - md_command: Allgemein.Betriebsart - md_read: true - md_write: true - md_read_group: + viess_command: Allgemein.Betriebsart + viess_read: true + viess_write: true + viess_read_group: - V200WO1C - Allgemein - md_read_initial: true + viess_read_initial: true lookup: type: list - md_lookup: operatingmodes#list + viess_lookup: operatingmodes#list Manuell: type: num - md_command: Allgemein.Manuell - md_read: true - md_write: true - md_read_group: + viess_command: Allgemein.Manuell + viess_read: true + viess_write: true + viess_read_group: - V200WO1C - Allgemein Outdoor_Fanspeed: type: num - md_command: Allgemein.Outdoor_Fanspeed - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Outdoor_Fanspeed + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Allgemein Status_Fanspeed: type: num - md_command: Allgemein.Status_Fanspeed - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Status_Fanspeed + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Allgemein Kompressor_Freq: type: num - md_command: Allgemein.Kompressor_Freq - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.Kompressor_Freq + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Allgemein SollLeistungVerdichter: type: num - md_command: Allgemein.SollLeistungVerdichter - md_read: true - md_write: false - md_read_group: + viess_command: Allgemein.SollLeistungVerdichter + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Allgemein @@ -3747,32 +3755,32 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Pumpen + viess_read_group_trigger: Pumpen Sekundaer: type: bool - md_command: Pumpen.Sekundaer - md_read: true - md_write: false - md_read_group: + viess_command: Pumpen.Sekundaer + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Pumpen Heizkreis: type: bool - md_command: Pumpen.Heizkreis - md_read: true - md_write: false - md_read_group: + viess_command: Pumpen.Heizkreis + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Pumpen Zirkulation: type: bool - md_command: Pumpen.Zirkulation - md_read: true - md_write: false - md_read_group: + viess_command: Pumpen.Zirkulation + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Pumpen @@ -3781,28 +3789,28 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis + viess_read_group_trigger: Heizkreis Temperatur: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.Temperatur + viess_read_group_trigger: Heizkreis.Temperatur Raum: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.Temperatur.Raum + viess_read_group_trigger: Heizkreis.Temperatur.Raum Soll: type: num - md_command: Heizkreis.Temperatur.Raum.Soll - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.Temperatur.Raum.Soll + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Heizkreis - Heizkreis.Temperatur @@ -3810,10 +3818,10 @@ item_structs: Soll_Reduziert: type: num - md_command: Heizkreis.Temperatur.Raum.Soll_Reduziert - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.Temperatur.Raum.Soll_Reduziert + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Heizkreis - Heizkreis.Temperatur @@ -3821,10 +3829,10 @@ item_structs: Soll_Party: type: num - md_command: Heizkreis.Temperatur.Raum.Soll_Party - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.Temperatur.Raum.Soll_Party + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Heizkreis - Heizkreis.Temperatur @@ -3835,14 +3843,14 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.Temperatur.Vorlauf + viess_read_group_trigger: Heizkreis.Temperatur.Vorlauf Ist: type: num - md_command: Heizkreis.Temperatur.Vorlauf.Ist - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.Temperatur.Vorlauf.Ist + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Heizkreis - Heizkreis.Temperatur @@ -3850,10 +3858,10 @@ item_structs: Soll: type: num - md_command: Heizkreis.Temperatur.Vorlauf.Soll - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.Temperatur.Vorlauf.Soll + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Heizkreis - Heizkreis.Temperatur @@ -3861,10 +3869,10 @@ item_structs: Mittel: type: num - md_command: Heizkreis.Temperatur.Vorlauf.Mittel - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.Temperatur.Vorlauf.Mittel + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Heizkreis - Heizkreis.Temperatur @@ -3875,14 +3883,14 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.Temperatur.Ruecklauf + viess_read_group_trigger: Heizkreis.Temperatur.Ruecklauf Ist: type: num - md_command: Heizkreis.Temperatur.Ruecklauf.Ist - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.Temperatur.Ruecklauf.Ist + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Heizkreis - Heizkreis.Temperatur @@ -3890,10 +3898,10 @@ item_structs: Mittel: type: num - md_command: Heizkreis.Temperatur.Ruecklauf.Mittel - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.Temperatur.Ruecklauf.Mittel + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Heizkreis - Heizkreis.Temperatur @@ -3904,24 +3912,24 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Heizkreis.Heizkennlinie + viess_read_group_trigger: Heizkreis.Heizkennlinie Niveau: type: num - md_command: Heizkreis.Heizkennlinie.Niveau - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.Heizkennlinie.Niveau + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Heizkreis - Heizkreis.Heizkennlinie Neigung: type: num - md_command: Heizkreis.Heizkennlinie.Neigung - md_read: true - md_write: false - md_read_group: + viess_command: Heizkreis.Heizkennlinie.Neigung + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Heizkreis - Heizkreis.Heizkennlinie @@ -3931,32 +3939,32 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Warmwasser + viess_read_group_trigger: Warmwasser Ist: type: num - md_command: Warmwasser.Ist - md_read: true - md_write: false - md_read_group: + viess_command: Warmwasser.Ist + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Warmwasser Soll: type: num - md_command: Warmwasser.Soll - md_read: true - md_write: true - md_read_group: + viess_command: Warmwasser.Soll + viess_read: true + viess_write: true + viess_read_group: - V200WO1C - Warmwasser Ventil: type: bool - md_command: Warmwasser.Ventil - md_read: true - md_write: false - md_read_group: + viess_command: Warmwasser.Ventil + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Warmwasser @@ -3965,51 +3973,51 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Statistik + viess_read_group_trigger: Statistik Einschaltungen: read: type: bool enforce_updates: true - md_read_group_trigger: Statistik.Einschaltungen + viess_read_group_trigger: Statistik.Einschaltungen Sekundaer: type: num - md_command: Statistik.Einschaltungen.Sekundaer - md_read: true - md_write: false - md_read_group: + viess_command: Statistik.Einschaltungen.Sekundaer + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Statistik - Statistik.Einschaltungen Heizstab1: type: num - md_command: Statistik.Einschaltungen.Heizstab1 - md_read: true - md_write: false - md_read_group: + viess_command: Statistik.Einschaltungen.Heizstab1 + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Statistik - Statistik.Einschaltungen Heizstab2: type: num - md_command: Statistik.Einschaltungen.Heizstab2 - md_read: true - md_write: false - md_read_group: + viess_command: Statistik.Einschaltungen.Heizstab2 + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Statistik - Statistik.Einschaltungen HK: type: num - md_command: Statistik.Einschaltungen.HK - md_read: true - md_write: false - md_read_group: + viess_command: Statistik.Einschaltungen.HK + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Statistik - Statistik.Einschaltungen @@ -4019,178 +4027,178 @@ item_structs: read: type: bool enforce_updates: true - md_read_group_trigger: Statistik.Laufzeiten + viess_read_group_trigger: Statistik.Laufzeiten Sekundaerpumpe: type: num - md_command: Statistik.Laufzeiten.Sekundaerpumpe - md_read: true - md_write: false - md_read_group: + viess_command: Statistik.Laufzeiten.Sekundaerpumpe + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Statistik - Statistik.Laufzeiten Heizstab1: type: num - md_command: Statistik.Laufzeiten.Heizstab1 - md_read: true - md_write: false - md_read_group: + viess_command: Statistik.Laufzeiten.Heizstab1 + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Statistik - Statistik.Laufzeiten Heizstab2: type: num - md_command: Statistik.Laufzeiten.Heizstab2 - md_read: true - md_write: false - md_read_group: + viess_command: Statistik.Laufzeiten.Heizstab2 + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Statistik - Statistik.Laufzeiten PumpeHK: type: num - md_command: Statistik.Laufzeiten.PumpeHK - md_read: true - md_write: false - md_read_group: + viess_command: Statistik.Laufzeiten.PumpeHK + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Statistik - Statistik.Laufzeiten WWVentil: type: num - md_command: Statistik.Laufzeiten.WWVentil - md_read: true - md_write: false - md_read_group: + viess_command: Statistik.Laufzeiten.WWVentil + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Statistik - Statistik.Laufzeiten VerdichterStufe1: type: num - md_command: Statistik.Laufzeiten.VerdichterStufe1 - md_read: true - md_write: false - md_read_group: + viess_command: Statistik.Laufzeiten.VerdichterStufe1 + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Statistik - Statistik.Laufzeiten VerdichterStufe2: type: num - md_command: Statistik.Laufzeiten.VerdichterStufe2 - md_read: true - md_write: false - md_read_group: + viess_command: Statistik.Laufzeiten.VerdichterStufe2 + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Statistik - Statistik.Laufzeiten VerdichterStufe3: type: num - md_command: Statistik.Laufzeiten.VerdichterStufe3 - md_read: true - md_write: false - md_read_group: + viess_command: Statistik.Laufzeiten.VerdichterStufe3 + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Statistik - Statistik.Laufzeiten VerdichterStufe4: type: num - md_command: Statistik.Laufzeiten.VerdichterStufe4 - md_read: true - md_write: false - md_read_group: + viess_command: Statistik.Laufzeiten.VerdichterStufe4 + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Statistik - Statistik.Laufzeiten VerdichterStufe5: type: num - md_command: Statistik.Laufzeiten.VerdichterStufe5 - md_read: true - md_write: false - md_read_group: + viess_command: Statistik.Laufzeiten.VerdichterStufe5 + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Statistik - Statistik.Laufzeiten VerdichterWP: type: num - md_command: Statistik.Laufzeiten.VerdichterWP - md_read: true - md_write: false - md_read_group: + viess_command: Statistik.Laufzeiten.VerdichterWP + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Statistik - Statistik.Laufzeiten OAT_Temperature: type: num - md_command: Statistik.OAT_Temperature - md_read: true - md_write: false - md_read_group: + viess_command: Statistik.OAT_Temperature + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Statistik ICT_Temperature: type: num - md_command: Statistik.ICT_Temperature - md_read: true - md_write: false - md_read_group: + viess_command: Statistik.ICT_Temperature + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Statistik CCT_Temperature: type: num - md_command: Statistik.CCT_Temperature - md_read: true - md_write: false - md_read_group: + viess_command: Statistik.CCT_Temperature + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Statistik HST_Temperature: type: num - md_command: Statistik.HST_Temperature - md_read: true - md_write: false - md_read_group: + viess_command: Statistik.HST_Temperature + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Statistik OMT_Temperature: type: num - md_command: Statistik.OMT_Temperature - md_read: true - md_write: false - md_read_group: + viess_command: Statistik.OMT_Temperature + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Statistik WaermeWW12M: type: num - md_command: Statistik.WaermeWW12M - md_read: true - md_write: false - md_read_group: + viess_command: Statistik.WaermeWW12M + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Statistik ElektroWW12M: type: num - md_command: Statistik.ElektroWW12M - md_read: true - md_write: false - md_read_group: + viess_command: Statistik.ElektroWW12M + viess_read: true + viess_write: false + viess_read_group: - V200WO1C - Statistik plugin_functions: NONE diff --git a/viessmann/protocol.py b/viessmann/protocol.py index aa630def3..315b85e98 100644 --- a/viessmann/protocol.py +++ b/viessmann/protocol.py @@ -68,6 +68,10 @@ def __init__(self, data_received_callback, name=None, **kwargs): self._is_initialized = False self._data_received_callback = data_received_callback + # try to assure no concurrent sending is done + self._send_lock = threading.Lock() + self.use_send_lock = True + self._controlsets = { 'P300': { 'baudrate': 4800, @@ -158,7 +162,10 @@ def _send_init_on_send(self): :return: Returns True, if communication was established successfully, False otherwise :rtype: bool """ - if self._viess_proto == 'P300' and not self._is_initialized: + if self._viess_proto == 'P300': + + if self._is_initialized: + return True # init procedure is # interface: 0x04 (reset) @@ -174,34 +181,44 @@ def _send_init_on_send(self): ERR = self._int2bytes(self._controlset['init_error'], 1) self.logger.debug('init communication....') - syncsent = False + self.__syncsent = False + empty_replies = 0 self.logger.debug(f'send_bytes: send reset command {RESET}') self._send_bytes(RESET) - readbyte = self._read_bytes(1) - self.logger.debug(f'read_bytes: read {readbyte}') - for i in range(10): - if syncsent and readbyte == ACK: + readbyte = self._read_bytes(1) + self.logger.debug(f'read_bytes: read {readbyte}') + + if self.__syncsent and readbyte == ACK: self.logger.debug('device acknowledged initialization') self._is_initialized = True break elif readbyte == NOTINIT: self.logger.debug(f'send_bytes: send sync command {SYNC}') self._send_bytes(SYNC) - syncsent = True + self.__syncsent = True + empty_replies = 0 elif readbyte == ERR: self.logger.error(f'interface reported an error, loop increment {i}') self.logger.debug(f'send_bytes: send reset command {RESET}') self._send_bytes(RESET) - syncsent = False - else: # elif readbyte != b'': - self.logger.debug(f'send_bytes: send reset command {RESET}') + self.__syncsent = False + empty_replies = 0 + elif readbyte == b'': + # allow for some (5) empty replies due to timing issues without breaking sync + empty_replies += 1 + if empty_replies > 5: + self.logger.debug(f'send_bytes: too many empty replies, send reset command {RESET}') + self._send_bytes(RESET) + self.__syncsent = False + empty_replies = 0 + else: + self.logger.debug(f'RESET send_bytes: send reset command {RESET}') self._send_bytes(RESET) - syncsent = False - readbyte = self._read_bytes(1) - self.logger.debug(f'read_bytes: read {readbyte}') + self.__syncsent = False + empty_replies = 0 self.logger.debug(f'communication initialized: {self._is_initialized}') return self._is_initialized @@ -392,7 +409,7 @@ def _build_payload(self, data_dict): # build payload if write: - payloadlength = int(self._controlset.get('command_bytes_write', 0)) + int(valuebytes) + payloadlength = int(self._controlset.get('command_bytes_write', 0)) + cmdlen # int(valuebytes) self.logger.debug(f'Payload length is: {payloadlength} bytes') packet = bytearray() From a6f659e12c0d2e56942a40853997f04aea4084dc Mon Sep 17 00:00:00 2001 From: Morg42 <43153739+Morg42@users.noreply.github.com> Date: Tue, 8 Aug 2023 15:34:46 +0200 Subject: [PATCH 236/775] mqtt: warn on foo item type --- mqtt/__init__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mqtt/__init__.py b/mqtt/__init__.py index 188125ca3..b208bd17f 100755 --- a/mqtt/__init__.py +++ b/mqtt/__init__.py @@ -131,6 +131,9 @@ def parse_item(self, item): if self.has_iattr(item.conf, 'mqtt_topic_in') or self.has_iattr(item.conf, 'mqtt_topic_out'): self.logger.debug("parsing item: {0}".format(item.id())) + if item.property.type == 'foo': + self.logger.warning(f"item {item.path()} has item type foo, which will not be processed by the MQTT system") + # check if mqtt module has been initialized successfully if not self.mod_mqtt: self.logger.warning("MQTT module is not initialized, not parsing item '{}'".format(item.path())) From 3666c76a5a26cea03730b2cb836910a666d98755 Mon Sep 17 00:00:00 2001 From: Morg42 <43153739+Morg42@users.noreply.github.com> Date: Tue, 8 Aug 2023 20:45:18 +0200 Subject: [PATCH 237/775] viessmann: minor fixes, update docs --- viessmann/plugin.yaml | 9 +- viessmann/user_doc.rst | 308 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 313 insertions(+), 4 deletions(-) create mode 100644 viessmann/user_doc.rst diff --git a/viessmann/plugin.yaml b/viessmann/plugin.yaml index 5458ce165..b8608f5b9 100644 --- a/viessmann/plugin.yaml +++ b/viessmann/plugin.yaml @@ -5,11 +5,11 @@ plugin: type: gateway description: - de: Vde - en: Ven + de: 'Lesen und Schreiben von Werten einer Viessmann Heizung' + en: 'Read and write data from/to a Viessmann heating system' maintainer: Morg state: develop - keywords: iot device + keywords: iot device sdp version: 1.3.0 sh_minversion: 1.9.3.1 py_minversion: 3.6 @@ -44,6 +44,7 @@ parameters: de: 'Item-Pfad für das Suspend-Item' en: 'item path for suspend switch item' + # die folgenden Attribute nicht ändern command_class: type: str default: SDPCommandViessmann @@ -103,7 +104,7 @@ item_attributes: type: str description: - de: Wenn diesem Item ein beliebiger Wert zugewiesen wird, werden alle zum Lesen konfigurierten Items der angegebenen Gruppe neu vom Gerät gelesen, bei Gruppe 0 werden alle zum Lesen konfigurierten Items neu gelesen. Das Item kann nicht gleichzeitig mit md_command belegt werden. + de: Wenn diesem Item ein beliebiger Wert zugewiesen wird, werden alle zum Lesen konfigurierten Items der angegebenen Gruppe neu vom Gerät gelesen, bei Gruppe 0 werden alle zum Lesen konfigurierten Items neu gelesen. Das Item kann nicht gleichzeitig mit viess_command belegt werden. en: When set to any value, all items configured for reading for the given group will update their value from the device, if group is 0, all items configured for reading will update. The item cannot be used with md_command in parallel. viess_lookup: diff --git a/viessmann/user_doc.rst b/viessmann/user_doc.rst new file mode 100644 index 000000000..4dab67e7c --- /dev/null +++ b/viessmann/user_doc.rst @@ -0,0 +1,308 @@ +.. index:: Plugins; viessmann +.. index:: viessmann + +========= +viessmann +========= + +.. image:: webif/static/img/plugin_logo.svg + :alt: plugin logo + :width: 300px + :height: 300px + :scale: 50 % + :align: left + +Das Viessmann-Plugin ermöglicht die Verbindung zu einer Viessmann-Heizung über einen IR-Adapter (z.B. Optolink oder Nachbauten, wie im OpenV-Wiki beschrieben) und das Lesen und Schreiben von Parametern der Heizung. +Derzeit sind das P300- und das KW-Protokoll unterstützt. Weitere Gerätetypen, die diese Protokolle unterstützen, können einfach hinzugefügt werden. Für weitere Protokolle (z.B. GWG) wird zusätzliche Entwicklungsarbeit notwendig sein. + +Details zu den betroffenen Geräten und Protokollen finden sich im +.. _OpenV-Wiki: https://github.com/openv/openv/wiki/vcontrold + +Dieses Plugin nutzt eine separate Datei ``commands.py``, in der die Definitionen für Protokolle, Gerätetypen und Befehlssätze enthalten sind. Neue Geräte können hinzugefügt werden, indem die entsprechenden Informationen in der ``commands.py`` ergänzt werden. + +Das Plugin unterstützt die serielle Kommunikation mit dem Lesekopf (ggf. über einen USB-Seriell-Adapter). + +Zur Identifizierung des Heizungstyps kann das Plugin auch im Standalone-Modus betrieben werden (s.u.) + + +Anpassungen durch Update auf sdp +-------------------------------- + +Durch die Umstellung auf sdp haben sich sowohl Änderungen in der Plugin- als auch der Item-Konfiguration geändert. + +Plugin-Konfiguration: +~~~~~~~~~~~~~~~~~~~~~ + +- der Parameter ``heating_type`` ist in ``model`` umbenannt worden +- der Parameter ``suspend_item`` ist neu hinzugefügt worden und bestimmt (bei Bedarf) das Item zum Steuern des Suspend-Modus + +Item-Konfiguration: +~~~~~~~~~~~~~~~~~~~ + +Die Item-Konfiguration von sdp wird durch mitgelieferte Structs unterstützt. Zu Details siehe weiter unten. + +Das Attribut ``viess_balist`` gibt es nicht mehr, die Funktionalität wird durch Lookup-Tabellen abgebildet. Die Lookup-Tabelle zur Betriebsart ist im Item ``Allgemein.Betriebsart.lookup`` standardmäßig verfügbar. + +Plugin-Funktionen: +~~~~~~~~~~~~~~~~~~ + +Die Funktion ``update_all_read_items`` existiert nicht mehr. SmartDevicePlugin bietet - generell - die Funktion ``read_all_commands(group='')`` an, die die gleiche Funktionalität darstellt. Hier kann eine Gruppe, eine Liste von Gruppen oder 0 (für alle Items) angegeben werden, die gelesen werden sollen. Die Konfiguration entspricht den read_groups_triggers (die intern nur diese Funktion anstoßen). + + +Changelog +--------- + +1.3.0 +~~~~~ + +- komplettes Rewrite auf Basis SmartDevicePlugin +- Umfang der unterstützten Geräte beibehalten +- breaking Change: Konfiguration (Plugin und Items) müssen angepasst werden + +1.2.2 +~~~~~ + +- Funktion zum manuellen Schreiben von Werten hinzugefügt + +1.2.0 +~~~~~ + +- Komplette Überarbeitung von Code und Webinterface (AJAX) +- Code refaktorisiert und besser strukturiert +- Funktion zum Lesen mehrerer Werte gleichzeitig im KW-Protokoll +- Verbesserte Fehler- und Locking-Behandlung +- Funktionen zum manuellen Auslesen von konfigurierten und unbekannten Adressen, z.B. zum Testen von Adressen +- Webinterface mit der Möglichkeit, Adressen manuell auszulesen + +1.1.0 +~~~~~ + +- Unterstützung für das KW-Protokoll + +1.0.0 +~~~~~ + +- Erste Version + +Anforderungen +============= + +Das Plugin benötigt die ``pyserial``-Bibliothek und einen seriellen IR-Adapter. + +Unterstützte Geräte +------------------- + +Jede Viessmann-Heizung mit Optolink-Anschluss wird grundsätzlich unterstützt. + +Derzeit sind Gerätekonfigurationen (Befehlssätze) für die folgenden Type verfügbar: + +- V200KO1B +- V200HO1C +- V200KW2 +- V200WO1C + +Weitere Gerätetypen können problemlos hinzugefügt werden, wenn die entsprechenden Befehlsadressen bekannt sind. + +Konfiguration +============= + +Diese Plugin Parameter und die Informationen zur Item-spezifischen Konfiguration des Plugins sind +unter :doc:`/plugins_doc/config/viessmann` beschrieben. + + +plugin.yaml +----------- + +.. code:: yaml + + viessmann: + protocol: P300 + plugin_name: viessmann + model: V200KO1B + serialport: /dev/ttyUSB_optolink + + +items.yaml +---------- + +Zur Vereinfachung werden fertige Structs für alle unterstützten Gerätetypen mitgeliefert. Diese können wie folgt eingebunden werden: + +.. code:: yaml + + heizungsitem: + struct: viessmann.MODEL + + +:note: Das Wort "MODEL" in der Itemkonfiguration bleibt wörtlich so stehen, sdp verwendet automatisch den entsprechend passenden Struct. + + +Sofern keine weiteren Angaben gewünscht sind, ist die Item-Konfiguration damit abgeschlossen. Da die Item-Struktur der Kommando-Struktur entspricht, werden sich die Items ändern, d.h. verschieben und ggf. umbenennen. Item-Referenzen müssen entsprechend angepasst werden. + + +Sofern eine manuelle Item-Konfiguration gewünscht wird, ist dies auch möglich. Die Verknüfpung von SmartHomeNG-Items und Heizungsparametern ist vollständig flexibel und konfigurierbar. Mit den Item-Attributen kann das Verhalten des Plugins festgelegt werden. + +Die folgenden Attribute werden unterstützt: + + +viess\_command +~~~~~~~~~~~~~~ + +Dieses Attribut legt fest, welcher Befehl ausgeführt bzw. welcher Parameter vom Gerät gelesen oder geschrieben werden soll. + +.. code:: yaml + + item: + viess_command: Allgemein.Temperatur.Aussen + + +:note: Dies entspricht prinzipiell dem bisherigen Attribut `viess_read`, ohne Aussagen über Lese- oder Schreibverhalten zu treffen. Durch die Umstellung der Befehlsstruktur müssen die Werte angepasst werden. + + +viess\_read +~~~~~~~~~~~ + +Das Item erhält Werte vom Gerät (Wert kann gelesen werden). Typ bool. (Entspricht grob dem alten Attribut `viess_read`) + + +viess\_write +~~~~~~~~~~~~ + +Der Wert des Items wird bei Änderungen an die Heizung gesendet. Typ bool. (Entspricht grob dem alten Attribut `viess_send`) + + +viess\_read\_cycle +~~~~~~~~~~~~~~~~~~ + +Mit einer Angabe in Sekunden wird ein periodisches Lesen angefordert. ``viess_read`` muss zusätzlich konfiguriert sein. + +.. code:: yaml + + item: + viess_command: Allgemein.Temperatur.Aussen + viess_read_cycle: 3600 # every hour + + +viess\_read\_initial +~~~~~~~~~~~~~~~~~~~~ + +Wenn dieses Attribut vorhanden und auf ``true`` gesetzt ist, wird das Item nach dem Start von SmartHomeNG einmalig gelesen. + +.. code:: yaml + + item: + viess_command: Allgemein.Temperatur.Aussen + viess_read_initial: true + + +viess\_read\_group: +~~~~~~~~~~~~~~~~~~~ + +Weist das Item der angegebenen Gruppe zum gesammelten Lesen zu. Die Gruppe kann alt int-Wert oder als str (Name) angegeben werden, mehrere Gruppen können als Liste zugewiesen werden. + +.. code:: yaml + + item: + viess_command: Betriebsart_A1M1 + viess_read_group: + - Status + - Betrieb + - 5 + + +Standardmäßig sind in den Structs bereits Gruppen für alle Strukturbäume vorhanden. + + +viess\_read\_group\_trigger: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Ein Item mit diesem Attribut löst das Lesen der angegebenen Gruppe(n) aus (siehe `viess_read_group`). Mehrere Gruppen können als Liste angegeben werden, wenn als Gruppe 0 angegeben wird, werden alle Werte vom Gerät gelesen. + +Dieses Attribut kann nicht gleichzeitig mit ``viess_command`` gesetzt werden. + + +viess\_lookup: +~~~~~~~~~~~~~~ + +Wenn ein Befehl mit einer Lookup-Tabelle versehen ist, kann die Lookup-Tabelle mit dem angegebenen Namen beim Start einmalig in das Item geschrieben werden. Damit können z.B. Klartextwerte für die Visualisierung angeboten werden. + +.. code:: yaml + + item: + viess_lookup: operationmode + + +:note: In den vorgefertigten Structs sind bei Items, die Werte aus Lookup-Tabellen zurückgeben, die jeweiligen Lookup-Tabellen in Unteritems mit dem Namen ``lookup`` vorhanden. + + +Beispiel +-------- + +Here you can find a configuration sample using the commands for +V200KO1B: + +.. code:: yaml + + viessmann: + struct: MODEL + + +Funktionen +========== + +read\_addr(addr) +---------------- + +Diese Funktion löst das Lesen des Parameters mit der übergebenen Adresse ``addr`` aus. Die Adresse muss als vierstellige Hex-Zahl im String-Format übergeben werden. Es können nur Adressen ausgelesen werden, die im Befehlssatz für den aktiven Heizungstyp enthalten sind. Unabhängig von der Itemkonfiguration werden durch ``read_addr()`` keine Werte an Items zugewiesen. +Der Rückgabewert ist das Ergebnis des Lesevorgangs oder None, wenn ein Fehler aufgetreten ist. + + +read\_temp\_addr(addr, length=1, mult=0, signed=False) +------------------------------------ + +Diese Funktion versucht, den Parameter an der Adresse ``addr`` zu lesen und einen Wert von ``length`` Bytes (ggf. mit einem Multiplikator ``mult`` und (nicht) vorzeichenbehaftet) zu konvertieren. Die Adresse muss als vierstellige Hex-Zahl im String-Format übergeben werden, im Gegensatz zu ``read_addr()`` aber nicht im Befehlssatz definiert sein. ``length`` ist auf Werte zwischen 1 und 8 (Bytes) beschränkt. ``mult`` gibt den Divisor an und ``signed``, ob der Wert vorzeichenbehaftet ist. +Der Rückgabewert ist das Ergebnis des Lesevorgangs oder None, wenn ein Fehler aufgetreten ist. + + +write\_addr(addr, value) +------------------------ + +Diese Funktion versucht, den Wert ``value`` an die angegebene Adresse zu schreiben. Die Adresse muss als vierstellige Hex-Zahl im String-Format übergeben werden. Es können nur Adressen beschrieben werden, die im Befehlssatz für den aktiven Heizungstyp enthalten sind. Durch ``write_addr`` werden Itemwerte nicht direkt geändert; wenn die geschriebenen Werte von der Heizung wieder ausgelesen werden (z.B. durch zyklisches Lesen), werden die geänderten Werte in die entsprechenden Items übernommen. + + +:Warning: Das Schreiben von beliebigen Werten oder Werten, deren Bedeutung nicht klar ist, kann im Heizungsgerät möglicherweise unerwartete Folgen haben. Auch eine Beschädigung der Heizung ist nicht auszuschließen. + + +:Note: Wenn eine der Plugin-Funktionen in einer Logik verwendet werden sollen, kann dies in der folgenden Form erfolgen: + +.. code::yaml + + result = sh.plugins.return_plugin('viessmann').read_temp_addr('00f8', 2, 'DT') + + +Web Interface +============= + +Im Web-Interface gibt es neben den allgemeinen Statusinformationen zum Plugin zwei Seiten. + +Auf einer Seite werden die Items aufgelistet, die Plugin-Attributen konfiguriert haben. Damit kann eine schnelle Übersicht über die Konfiguration und die aktuellen Werte geboten werden. + +Auf der zweiten Seite werden alle im aktuellen Befehlssatz enthaltenen Parameter aufgelistet. Dabei besteht für jeden Wert einzeln die Möglichkeit, einen Lesevorgang auszulösen. Die Rückgabewerte werden in die jeweilige Tabellenzeile eingetragen. Dieser entspricht der Funktion ``read_addr()``, d.h. es werden keine Item-Werte aktualisiert. + +Weiterhin kann in der Zeile für den Parameter "_Custom" eine freie Adresse angegeben werden, die analog zur Funktion ``read_temp_addr()`` einen Lesevorgang auf beliebigen Adressen erlaubt. Auch hier wird der Rückgabewert in die jeweilige Tabellenzeile eingetragen. Damit wird ermöglicht, ohne großen Aufwand Datenpunkte und deren Konfiguration (Einheit und Datenlänge) zu testen. + + +Standalone-Modus +================ + +Wenn der Heizungstyp nicht bekannt ist, kann das Plugin im Standalone-Modus (also ohne SmartHomeNG zu starten) genutzt werden. Es versucht dann, mit der Heizung zu kommunizieren und den Gerätetyp zu identizifieren. + +Dazu muss das Plugin im Plugin-Ordner direkt aufgerufen werden: + +``./__init__.py [-v]`` + +Der serielle Port ist dabei die Gerätedatei bzw. der entsprechende Port, an dem der Lesekopf angeschlossen ist, z.B. ``/dev/ttyUSB0``. Dieses Argument ist verpflichtend. + +Das optionale zweite Argument ``-v`` weist das Plugin an, zusätzliche Debug-Ausgaben zu erzeugen. Solange keine Probleme beim Aufruf auftreten, ist das nicht erforderlich. + +Sollte die Datei sich nicht starten lassen, muss ggf. der Dateimodus angepasst werden. Mit ``chmod u+x __init__.py`` kann die z.B. unter Linux erfolgen. + From b717a32276336fd9e9798ec83c6acb494771d3e9 Mon Sep 17 00:00:00 2001 From: Morg42 <43153739+Morg42@users.noreply.github.com> Date: Tue, 8 Aug 2023 21:02:27 +0200 Subject: [PATCH 238/775] adjust kodi plugin attribute --- kodi/plugin.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kodi/plugin.yaml b/kodi/plugin.yaml index aa838135d..fa25d2c8d 100644 --- a/kodi/plugin.yaml +++ b/kodi/plugin.yaml @@ -25,7 +25,7 @@ parameters: de: Netzwerkziel/-host en: network host - suspend_item_path: + suspend_item: type: str default: '' From 001d6dbd6da89a6d2b0702c7b21a60300cd2d99d Mon Sep 17 00:00:00 2001 From: msinn Date: Tue, 8 Aug 2023 22:42:48 +0200 Subject: [PATCH 239/775] database: Added option 'no' to valid list of database attribute --- database/plugin.yaml | 2 +- influxdb2/plugin.yaml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/database/plugin.yaml b/database/plugin.yaml index b232ff699..07d9cbe95 100755 --- a/database/plugin.yaml +++ b/database/plugin.yaml @@ -97,7 +97,7 @@ item_attributes: # Definition of item attributes defined by this plugin database: type: str - valid_list_ci: ['', 'yes', 'init', 'true'] + valid_list_ci: ['', 'no', 'yes', 'init', 'init2', 'true'] duplicate_use: True description: de: "Wenn auf 'yes' oder 'true' gesetzt, werden die Werte des Items in die Datenbank geschrieben. Wenn auf 'init' gesetzt, wird zusätzlich beim Start von SmartHomeNG der Wert des Items aus der Datenbank gelesen." diff --git a/influxdb2/plugin.yaml b/influxdb2/plugin.yaml index 89e12e4ea..66435a9ba 100755 --- a/influxdb2/plugin.yaml +++ b/influxdb2/plugin.yaml @@ -87,7 +87,7 @@ item_attributes: # Definition of item attributes defined by this plugin (enter 'item_attributes: NONE', if section should be empty) influxdb2: type: str - valid_list_ci: ['', 'yes', 'init', 'true'] + valid_list_ci: ['', 'no', 'yes', 'init', 'true'] description: de: "Wenn auf 'yes' oder 'true' gesetzt, werden die Werte des Items in die Datenbank geschrieben. Wenn auf 'init' gesetzt, wird zusätzlich beim Start von SmartHomeNG der Wert des Items aus der Datenbank gelesen." en: "This attribute enables the database logging when set (just use value 'yes' or 'true'). If value 'init' is used, an item will be initalized from the database after SmartHomeNG is restarted." @@ -109,7 +109,7 @@ item_attributes: database: type: str - valid_list_ci: ['', 'yes', 'init', 'true'] + valid_list_ci: ['', 'no', 'yes', 'init', 'true'] duplicate_use: True description: de: "Wenn auf 'yes' oder 'true' gesetzt, werden die Werte des Items in die Datenbank geschrieben. Wenn auf 'init' gesetzt, wird zusätzlich beim Start von SmartHomeNG der Wert des Items aus der Datenbank gelesen." From 7fac6dc90412e709daf10ce6ea6f661c303d503b Mon Sep 17 00:00:00 2001 From: msinn Date: Tue, 8 Aug 2023 22:43:51 +0200 Subject: [PATCH 240/775] homeconnect: Added requests-oauthlib to requirements --- homeconnect/requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeconnect/requirements.txt b/homeconnect/requirements.txt index d9edc73dd..fc8232c73 100755 --- a/homeconnect/requirements.txt +++ b/homeconnect/requirements.txt @@ -1 +1,2 @@ -oauthlib>=3.1.0 \ No newline at end of file +oauthlib>=3.1.0 +requests-oauthlib From 82905eda9eb38eefb623f2c18393723a96ec7e98 Mon Sep 17 00:00:00 2001 From: msinn Date: Tue, 8 Aug 2023 22:45:41 +0200 Subject: [PATCH 241/775] piratewthr: Reworked structs to use new features; Modified to use lib.env --- piratewthr/__init__.py | 56 +- piratewthr/plugin.yaml | 6749 +++------------------------------------- 2 files changed, 437 insertions(+), 6368 deletions(-) diff --git a/piratewthr/__init__.py b/piratewthr/__init__.py index be525bcd1..18a9d897d 100755 --- a/piratewthr/__init__.py +++ b/piratewthr/__init__.py @@ -37,7 +37,7 @@ class PirateWeather(SmartPlugin): - PLUGIN_VERSION = "1.2.0" + PLUGIN_VERSION = "1.2.1" # https://api.pirateweather.net/forecast/[apikey]/[latitude],[longitude] _base_url = 'https://api.pirateweather.net/forecast/' @@ -94,8 +94,8 @@ def __init__(self, sh, *args, **kwargs): self._lon = self.get_parameter_value('longitude') else: self.logger.debug("__init__: latitude and longitude not provided, using shng system values instead.") - self._lat = self.get_sh()._lat - self._lon = self.get_sh()._lon + self._lat = self.get_sh().lat + self._lon = self.get_sh().lon self._lang = self.get_parameter_value('lang') self._units = self.get_parameter_value('units') self._jsonData = {} @@ -374,53 +374,3 @@ def _build_url(self, url_type='forecast'): self.logger.error(f"_build_url: Wrong url type specified: {url_type}") return url - - def get_wind_direction8(self, deg): - - direction_array = ['N', 'NO', 'O', 'SO', 'S', 'SW', 'W', 'NW', 'N'] - - index = int( (deg % 360 + 22.5) / 45) - return direction_array[index] - - - def get_wind_direction16(self, deg): - - direction_array = ['N', 'NNO', 'NO', 'ONO', 'O', 'OSO', 'SO', 'SSO', 'S', 'SSW', 'SW', 'WSW', 'W', 'WNW', 'NW', 'NNW', 'N'] - - index = int( (deg % 360 + 11.25) / 22.5) - return direction_array[index] - - - def get_location_name(self, lat=None, lon=None): - - if lat is None: - lat = self._lat - if lon is None: - lon = self._lon - - if lat == 0 or lon == 0: - self.logger.debug(f"lat or lon are zero, not sending request: {lat=}, {lon=}") - return - - # api documentation: https://nominatim.org/release-docs/develop/api/Reverse/ - request_str = f"https://nominatim.openstreetmap.org/reverse?lat={lat}&lon={lon}&format=jsonv2" - - try: - response = requests.get(request_str) - except Exception as e: - self.logger.warning(f"get_location_name: Exception when sending GET request: {e}") - return - - try: - json_obj = response.json() - except Exception as e: - self.logger.warning(f"get_location_name: Response '{response}' is no valid json format: {e}") - return '' - - if response.status_code >= 500: - self.logger.warning(f"get_location_name: {self.get_location_name(response.status_code)}") - return '' - - #self.logger.notice(f"{json_obj['display_name']}") - #self.logger.notice(f"{json_obj['address']}") - return json_obj['address']['suburb'] diff --git a/piratewthr/plugin.yaml b/piratewthr/plugin.yaml index b20fcdc13..6254e21ad 100755 --- a/piratewthr/plugin.yaml +++ b/piratewthr/plugin.yaml @@ -11,8 +11,8 @@ plugin: keywords: weather sun wind rain precipitation #documentation: '' support: 'https://knx-user-forum.de/forum/supportforen/smarthome-py/1852685' - version: 1.2.0 # Plugin version - sh_minversion: 1.9.5.2 # minimum shNG version to use this plugin + version: 1.2.1 # Plugin version + sh_minversion: 1.9.5.4 # minimum shNG version to use this plugin #sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) multi_instance: True # plugin supports multi instance restartable: True @@ -27,14 +27,14 @@ parameters: de: 'Persönlicher API Key für pirateweather.net. Registrierung unter https://pirateweather.net/.' en: 'Your own personal API key for pirateweather.net. For your own key register to https://pirateweather.net/.' latitude: - type: str + type: num mandatory: False default: '' description: de: 'Latitude des Ortes, für den die Wetterdaten abgerufen werden sollen. Default: SmartHomeNG Settings' en: 'Latitude for the location, of which weather data is requested. Default: SmartHomeNG Settings' longitude: - type: str + type: num mandatory: False default: '' description: @@ -78,246 +78,65 @@ item_structs: name: Complete weather report from pirateweather.net - Current weather and forecasts are written to database locals: - lat: type: num pw_matchstring@instance: latitude - lon: type: num pw_matchstring@instance: longitude - timezone: type: str pw_matchstring@instance: timezone - location: type: str eval_trigger: - ..lon - eval: sh.plugins.get('piratewthr').get_location_name(sh...lat(), sh...lon()) - - - struct: - - piratewthr.current_weather - - piratewthr.forecast_hourly - - piratewthr.forecast_daily - - minutely: - - summary: - type: str - pw_matchstring@instance: minutely/summary - - icon: - type: str - pw_matchstring@instance: minutely/icon - - icon_visu: - type: str - pw_matchstring@instance: minutely/icon_visu - - alerts: - - list: - type: list - pw_matchstring@instance: alerts - - string_detail: - type: str - pw_matchstring@instance: alerts_string - - flags: - - sources: - type: str - pw_matchstring@instance: flags/sources - - units: - type: str - pw_matchstring@instance: flags/units - - nearest_station: - type: num - pw_matchstring@instance: flags/nearest-station - - weather_nodb: - name: Complete weather report from pirateweather.net - Current weather and forecasts are not to database - - latitude: - type: num - pw_matchstring@instance: latitude - - longitude: - type: num - pw_matchstring@instance: longitude - - timezone: - type: str - pw_matchstring@instance: timezone - - struct: - - piratewthr.current_weather_nodb - - piratewthr.forecast_hourly_nodb - - piratewthr.forecast_daily_nodb - - minutely: - - summary: - type: str - pw_matchstring@instance: minutely/summary - - icon: - type: str - pw_matchstring@instance: minutely/icon - - icon_visu: - type: str - pw_matchstring@instance: minutely/icon_visu - - alerts: - - list: - type: list - pw_matchstring@instance: alerts - - string_detail: - type: str - pw_matchstring@instance: alerts_string - - flags: - - sources: - type: str - pw_matchstring@instance: flags/sources - - units: - type: str - pw_matchstring@instance: flags/units - - nearest_station: - type: num - pw_matchstring@instance: flags/nearest-station - - weather_current_db: - name: Complete weather report from pirateweather.net - Only current weather is written to database - - latitude: - type: num - pw_matchstring@instance: latitude - - longitude: - type: num - pw_matchstring@instance: longitude - - timezone: - type: str - pw_matchstring@instance: timezone + eval: env.location_name(sh...lat(), sh...lon()) struct: - - piratewthr.current_weather - - piratewthr.forecast_hourly_nodb - - piratewthr.forecast_daily_nodb + - .current_weather + - .forecast_hourly + - .forecast_daily minutely: - summary: type: str pw_matchstring@instance: minutely/summary - icon: type: str pw_matchstring@instance: minutely/icon - icon_visu: type: str pw_matchstring@instance: minutely/icon_visu alerts: - list: type: list pw_matchstring@instance: alerts - string_detail: type: str pw_matchstring@instance: alerts_string flags: - sources: type: str pw_matchstring@instance: flags/sources - units: type: str pw_matchstring@instance: flags/units - nearest_station: type: num pw_matchstring@instance: flags/nearest-station - weather_current_nodb: - name: Complete weather report from pirateweather.net - No data is written to database - - latitude: - type: num - pw_matchstring@instance: latitude - - longitude: - type: num - pw_matchstring@instance: longitude - - timezone: - type: str - pw_matchstring@instance: timezone - - struct: - - piratewthr.current_weather_nodb - - piratewthr.forecast_hourly_nodb - - piratewthr.forecast_daily_nodb - - minutely: - - summary: - type: str - pw_matchstring@instance: minutely/summary - - icon: - type: str - pw_matchstring@instance: minutely/icon - - icon_visu: - type: str - pw_matchstring@instance: minutely/icon_visu - - alerts: - - list: - type: list - pw_matchstring@instance: alerts - - string_detail: - type: str - pw_matchstring@instance: alerts_string - - flags: - - sources: - type: str - pw_matchstring@instance: flags/sources - - units: - type: str - pw_matchstring@instance: flags/units - - nearest_station: - type: num - pw_matchstring@instance: flags/nearest-station current_weather: - name: Current weather of Weather report from pirateweather.net - Data is written do database + name: Current weather of Weather report from pirateweather.net - By default, data is not written to database + my_database_currently: no + my_database_maxage: 92 + currently: + my_database: ..:my_database_currently + my_database_maxage: ..:. time_epoch: type: num @@ -351,7 +170,8 @@ item_structs: precipIntensity: type: num pw_matchstring@instance: currently/precipIntensity - database: init + database: ..:my_database + database_maxage: ..:my_database_maxage precipIntensityError: type: num @@ -360,7 +180,8 @@ item_structs: precipProbability: type: num pw_matchstring@instance: currently/precipProbability - database: init + database: ..:my_database + database_maxage: ..:my_database_maxage precipType: type: str @@ -369,6218 +190,559 @@ item_structs: temperature: type: num pw_matchstring@instance: currently/temperature - database: init + database: ..:my_database + database_maxage: ..:my_database_maxage apparenttemperature: type: num pw_matchstring@instance: currently/apparentTemperature - database: init + database: ..:my_database + database_maxage: ..:my_database_maxage dewpoint: type: num pw_matchstring@instance: currently/dewPoint - database: init + database: ..:my_database + database_maxage: ..:my_database_maxage humidity: type: num pw_matchstring@instance: currently/humidity - database: init + database: ..:my_database + database_maxage: ..:my_database_maxage pressure: type: num pw_matchstring@instance: currently/pressure - database: init + database: ..:my_database + database_maxage: ..:my_database_maxage windSpeed: - type: num + struct: ._windSpeedStruct pw_matchstring@instance: currently/windSpeed - database: init windGust: - type: num + struct: ._windSpeedStruct pw_matchstring@instance: currently/windGust - database: init windBearing: - type: num + struct: ._windBearingStruct pw_matchstring@instance: currently/windBearing - database: init - - windDirectionString: - type: str - eval_trigger: ..windBearing - eval: sh.plugins.get('piratewthr').get_wind_direction16(sh...windBearing()) cloudCover: type: num pw_matchstring@instance: currently/cloudCover - database: init + database: ..:my_database + database_maxage: ..:my_database_maxage uvIndex: type: num pw_matchstring@instance: currently/uvIndex - database: init + database: ..:my_database + database_maxage: ..:my_database_maxage visibility: type: num pw_matchstring@instance: currently/visibility - database: init + database: ..:my_database + database_maxage: ..:my_database_maxage ozone: type: num pw_matchstring@instance: currently/ozone - database: init + database: ..:my_database + database_maxage: ..:my_database_maxage date: type: str pw_matchstring@instance: currently/date - database: init + database: ..:my_database + database_maxage: ..:my_database_maxage day: type: num pw_matchstring@instance: currently/day - database: init + database: ..:my_database + database_maxage: ..:my_database_maxage forecast_hourly: - name: Hourly forcast of Weather report from pirateweather.net - Data is written do database + name: Hourly forcast of Weather report from pirateweather.net - By default, data is not written to database + my_database_hourly: no + my_database_maxage: 92 + hourly: + my_database: ..:my_database_hourly + my_database_maxage: ..:. summary: type: str pw_matchstring@instance: hourly/summary - icon: type: str pw_matchstring@instance: hourly/icon - icon_visu: type: str pw_matchstring@instance: hourly/icon_visu hour0: + _hour: hour0 + struct: ._hourStruct + hour1: + _hour: hour1 + struct: ._hourStruct + hour2: + _hour: hour2 + struct: ._hourStruct + hour3: + _hour: hour3 + struct: ._hourStruct + hour4: + _hour: hour4 + struct: ._hourStruct + hour5: + _hour: hour5 + struct: ._hourStruct + hour6: + _hour: hour6 + struct: ._hourStruct + hour7: + _hour: hour7 + struct: ._hourStruct + hour8: + _hour: hour8 + struct: ._hourStruct + hour9: + _hour: hour9 + struct: ._hourStruct + hour10: + _hour: hour10 + struct: ._hourStruct + hour11: + _hour: hour11 + struct: ._hourStruct + hour12: + _hour: hour12 + struct: ._hourStruct - time_epoch: - type: num - pw_matchstring@instance: hourly/hour0/time + forecast_daily: + name: Daily forcast of Weather report from pirateweather.net - By default, data is not written to database + my_database_daily: no + my_database_maxage: 92 - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') + daily: + my_database: ..:my_database_daily + my_database_maxage: ..:. - summary: - type: str - pw_matchstring@instance: hourly/hour0/summary + summary: + type: str + pw_matchstring@instance: daily/summary + icon: + type: str + pw_matchstring@instance: daily/icon + icon_visu: + type: str + pw_matchstring@instance: daily/icon_visu - icon: - type: str - pw_matchstring@instance: hourly/hour0/icon + day0: + _day: day0 + struct: ._dayStruct + day1: + _day: day1 + struct: ._dayStruct + day2: + _day: day2 + struct: ._dayStruct + day3: + _day: day3 + struct: ._dayStruct + day4: + _day: day4 + struct: ._dayStruct + day5: + _day: day5 + struct: ._dayStruct + day6: + _day: day6 + struct: ._dayStruct + day7: + _day: day7 + struct: ._dayStruct - icon_visu: - type: str - pw_matchstring@instance: hourly/hour0/icon_visu +# --------------------------------------- - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour0/nearestStormDistance + _hourStruct: + #_hour: hour0 + my_database: ..:. + my_database_maxage: ..:. - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour0/precipIntensity - database: init - database_maxage: 92 + time_epoch: + type: num + pw_matchstring_@instance: "hourly/{..:_hour}/time" - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour0/precipIntensityError + time: + type: str + eval_trigger: ..time_epoch + eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%d. %b %Y %H:%M') - precipProbability: - type: num - pw_matchstring@instance: hourly/hour0/precipProbability - database: init - database_maxage: 92 + summary: + type: str + pw_matchstring_@instance: "hourly/{..:_hour}/summary" - precipType: - type: str - pw_matchstring@instance: hourly/hour0/precipType + icon: + type: str + pw_matchstring_@instance: "hourly/{..:_hour}/icon" - temperature: - type: num - pw_matchstring@instance: hourly/hour0/temperature - database: init - database_maxage: 92 + icon_visu: + type: str + pw_matchstring_@instance: "hourly/{..:_hour}/icon_visu" - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour0/apparentTemperature - database: init - database_maxage: 92 + nearestStormDistance: + type: num + pw_matchstring_@instance: "hourly/{..:_hour}/nearestStormDistance" - dewpoint: - type: num - pw_matchstring@instance: hourly/hour0/dewPoint - database: init - database_maxage: 92 + precipIntensity: + type: num + pw_matchstring_@instance: "hourly/{..:_hour}/precipIntensity" + database: ..:my_database + database_maxage@instance: ..:my_database_maxage - humidity: - type: num - pw_matchstring@instance: hourly/hour0/humidity - database: init - database_maxage: 92 + precipIntensityError: + type: num + pw_matchstring_@instance: "hourly/{..:_hour}/precipIntensityError" - pressure: - type: num - pw_matchstring@instance: hourly/hour0/pressure - database: init - database_maxage: 92 + precipProbability: + type: num + pw_matchstring_@instance: "hourly/{..:_hour}/precipProbability" + database: ..:my_database + database_maxage@instance: ..:my_database_maxage - windSpeed: - type: num - pw_matchstring@instance: hourly/hour0/windSpeed - database: init - database_maxage: 92 + precipType: + type: str + pw_matchstring_@instance: "hourly/{..:_hour}/precipType" - windGust: - type: num - pw_matchstring@instance: hourly/hour0/windGust - database: init - database_maxage: 92 + temperature: + type: num + pw_matchstring_@instance: "hourly/{..:_hour}/temperature" + database: ..:my_database + database_maxage@instance: ..:my_database_maxage - windBearing: - type: num - pw_matchstring@instance: hourly/hour0/windBearing - database: init - database_maxage: 92 + apparenttemperature: + type: num + pw_matchstring_@instance: "hourly/{..:_hour}/apparentTemperature" + database: ..:my_database + database_maxage@instance: ..:my_database_maxage - cloudCover: - type: num - pw_matchstring@instance: hourly/hour0/cloudCover - database: init - database_maxage: 92 + dewpoint: + type: num + pw_matchstring_@instance: "hourly/{..:_hour}/dewPoint" + database: ..:my_database + database_maxage@instance: ..:my_database_maxage - uvIndex: - type: num - pw_matchstring@instance: hourly/hour0/uvIndex - database: init - database_maxage: 92 + humidity: + type: num + pw_matchstring_@instance: "hourly/{..:_hour}/humidity" + database: ..:my_database + database_maxage@instance: ..:my_database_maxage - visibility: - type: num - pw_matchstring@instance: hourly/hour0/visibility - database: init - database_maxage: 92 + pressure: + type: num + pw_matchstring_@instance: "hourly/{..:_hour}/pressure" + database: ..:my_database + database_maxage@instance: ..:my_database_maxage - ozone: - type: num - pw_matchstring@instance: hourly/hour0/ozone - database: init - database_maxage: 92 + windSpeed: + type: num + pw_matchstring_@instance: "hourly/{..:_hour}/windSpeed" + database: ..:my_database + database_maxage@instance: ..:my_database_maxage - date: - type: str - pw_matchstring@instance: hourly/hour0/date - database: init - database_maxage: 92 + windGust: + type: num + pw_matchstring_@instance: "hourly/{..:_hour}/windGust" + database: ..:my_database + database_maxage@instance: ..:my_database_maxage - weekday: - type: str - pw_matchstring@instance: hourly/hour0/weekday - database: init - database_maxage: 92 + windBearing: + type: num + pw_matchstring_@instance: "hourly/{..:_hour}/windBearing" + database: ..:my_database + database_maxage@instance: ..:my_database_maxage - hour1: + cloudCover: + type: num + pw_matchstring_@instance: "hourly/{..:_hour}/cloudCover" + database: ..:my_database + database_maxage@instance: ..:my_database_maxage - time_epoch: - type: num - pw_matchstring@instance: hourly/hour1/time + uvIndex: + type: num + pw_matchstring_@instance: "hourly/{..:_hour}/uvIndex" + database: ..:my_database + database_maxage@instance: ..:my_database_maxage - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') + visibility: + type: num + pw_matchstring_@instance: "hourly/{..:_hour}/visibility" + database: ..:my_database + database_maxage@instance: ..:my_database_maxage - summary: - type: str - pw_matchstring@instance: hourly/hour1/summary + ozone: + type: num + pw_matchstring_@instance: "hourly/{..:_hour}/ozone" + database: ..:my_database + database_maxage@instance: ..:my_database_maxage - icon: - type: str - pw_matchstring@instance: hourly/hour1/icon + date: + type: str + pw_matchstring_@instance: "hourly/{..:_hour}/date" + database: ..:my_database + database_maxage@instance: ..:my_database_maxage - icon_visu: - type: str - pw_matchstring@instance: hourly/hour1/icon_visu + weekday: + type: str + pw_matchstring_@instance: "hourly/{..:_hour}/weekday" + database: ..:my_database + database_maxage@instance: ..:my_database_maxage - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour1/nearestStormDistance + _dayStruct: + #_day: day0 + my_database: ..:. + my_database_maxage: ..:. - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour1/precipIntensity - database: init - database_maxage: 92 + time_epoch: + type: num + pw_matchstring_@instance: "daily/{..:_day}/time" - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour1/precipIntensityError + time: + type: str + eval_trigger: ..time_epoch + eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%d. %b %Y %H:%M') - precipProbability: - type: num - pw_matchstring@instance: hourly/hour1/precipProbability - database: init - database_maxage: 92 + summary: + type: str + pw_matchstring_@instance: "daily/{..:_day}/summary" - precipType: - type: str - pw_matchstring@instance: hourly/hour1/precipType + icon: + type: str + pw_matchstring_@instance: "daily/{..:_day}/icon" - temperature: - type: num - pw_matchstring@instance: hourly/hour1/temperature - database: init - database_maxage: 92 + icon_visu: + type: str + pw_matchstring_@instance: "daily/{..:_day}/icon_visu" - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour1/apparentTemperature - database: init - database_maxage: 92 + nearestStormDistance: + type: num + pw_matchstring_@instance: "daily/{..:_day}/nearestStormDistance" - dewpoint: - type: num - pw_matchstring@instance: hourly/hour1/dewPoint - database: init - database_maxage: 92 + precipIntensity: + type: num + pw_matchstring_@instance: "daily/{..:_day}/precipIntensity" + database: ..:my_database + database_maxage@instance: ..:my_database_maxage - humidity: - type: num - pw_matchstring@instance: hourly/hour1/humidity - database: init - database_maxage: 92 + precipIntensityError: + type: num + pw_matchstring_@instance: "daily/{..:_day}/precipIntensityError" - pressure: - type: num - pw_matchstring@instance: hourly/hour1/pressure - database: init - database_maxage: 92 + precipProbability: + type: num + pw_matchstring_@instance: "daily/{..:_day}/precipProbability" + database: ..:my_database + database_maxage@instance: ..:my_database_maxage - windSpeed: - type: num - pw_matchstring@instance: hourly/hour1/windSpeed - database: init - database_maxage: 92 + precipType: + type: str + pw_matchstring_@instance: "daily/{..:_day}/precipType" - windGust: - type: num - pw_matchstring@instance: hourly/hour1/windGust - database: init - database_maxage: 92 + temperature: + type: num + pw_matchstring_@instance: "daily/{..:_day}/temperature" + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage - windBearing: - type: num - pw_matchstring@instance: hourly/hour1/windBearing - database: init - database_maxage: 92 + apparenttemperature: + type: num + pw_matchstring_@instance: "daily/{..:_day}/apparentTemperature" + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage - cloudCover: - type: num - pw_matchstring@instance: hourly/hour1/cloudCover - database: init - database_maxage: 92 + dewpoint: + type: num + pw_matchstring_@instance: "daily/{..:_day}/dewPoint" + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage - uvIndex: - type: num - pw_matchstring@instance: hourly/hour1/uvIndex - database: init - database_maxage: 92 + humidity: + type: num + pw_matchstring_@instance: "daily/{..:_day}/humidity" + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage - visibility: - type: num - pw_matchstring@instance: hourly/hour1/visibility - database: init - database_maxage: 92 + pressure: + type: num + pw_matchstring_@instance: "daily/{..:_day}/pressure" + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage - ozone: - type: num - pw_matchstring@instance: hourly/hour1/ozone - database: init - database_maxage: 92 + windSpeed: + struct: ._windSpeedStruct + pw_matchstring_@instance: "daily/{..:_day}/windSpeed" - date: - type: str - pw_matchstring@instance: hourly/hour1/date - database: init - database_maxage: 92 + windGust: + struct: ._windSpeedStruct + pw_matchstring_@instance: "daily/{..:_day}/windGust" - weekday: - type: str - pw_matchstring@instance: hourly/hour1/weekday - database: init - database_maxage: 92 + windBearing: + struct: ._windBearingStruct + pw_matchstring_@instance: "daily/{..:_day}/windBearing" - hour2: + cloudCover: + type: num + pw_matchstring_@instance: "daily/{..:_day}/cloudCover" + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage - time_epoch: - type: num - pw_matchstring@instance: hourly/hour2/time + uvIndex: + type: num + pw_matchstring_@instance: "daily/{..:_day}/uvIndex" + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') + rain: + name: Precipitation volume, mm + type: num + pw_matchstring_@instance: "daily/{..:_day}/precipAccumulation" - summary: - type: str - pw_matchstring@instance: hourly/hour2/summary + snow: + name: (where available) Snow volume, mm + type: num + pw_matchstring_@instance: "daily/{..:_day}/snow" - icon: - type: str - pw_matchstring@instance: hourly/hour2/icon + visibility: + type: num + pw_matchstring_@instance: "daily/{..:_day}/visibility" + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage - icon_visu: - type: str - pw_matchstring@instance: hourly/hour2/icon_visu + ozone: + type: num + pw_matchstring_@instance: "daily/{..:_day}/ozone" + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour2/nearestStormDistance + temperatureMin: + type: num + pw_matchstring_@instance: "daily/{..:_day}/cloudCover" + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour2/precipIntensity - database: init - database_maxage: 92 + temperatureMinTime: + type: num + pw_matchstring_@instance: "daily/{..:_day}/temperatureMinTime" + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour2/precipIntensityError + temperatureMax: + type: num + pw_matchstring_@instance: "daily/{..:_day}/temperatureMax" + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage - precipProbability: - type: num - pw_matchstring@instance: hourly/hour2/precipProbability - database: init - database_maxage: 92 + temperatureMaxTime: + type: num + pw_matchstring_@instance: "daily/{..:_day}/temperatureMaxTime" + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage - precipType: - type: str - pw_matchstring@instance: hourly/hour2/precipType + apparentTemperatureMin: + type: num + pw_matchstring_@instance: "daily/{..:_day}/apparentTemperatureMin" + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage - temperature: - type: num - pw_matchstring@instance: hourly/hour2/temperature - database: init - database_maxage: 92 + apparentTemperatureMinTime: + type: num + pw_matchstring_@instance: "daily/{..:_day}/apparentTemperatureMinTime" + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour2/apparentTemperature - database: init - database_maxage: 92 + apparentTemperatureMax: + type: num + pw_matchstring_@instance: "daily/{..:_day}/apparentTemperatureMax" + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage - dewpoint: - type: num - pw_matchstring@instance: hourly/hour2/dewPoint - database: init - database_maxage: 92 + apparentTemperatureMaxTime: + type: num + pw_matchstring_@instance: "daily/{..:_day}/apparentTemperatureMaxTime" + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage - humidity: - type: num - pw_matchstring@instance: hourly/hour2/humidity - database: init - database_maxage: 92 + date: + type: str + pw_matchstring_@instance: "daily/{..:_day}/date" + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage - pressure: - type: num - pw_matchstring@instance: hourly/hour2/pressure - database: init - database_maxage: 92 + weekday: + type: str + pw_matchstring_@instance: "daily/{..:_day}/weekday" + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage - windSpeed: - type: num - pw_matchstring@instance: hourly/hour2/windSpeed - database: init - database_maxage: 92 + precipProbability_mean: + type: num + pw_matchstring_@instance: "daily/{..:_day}/precipProbability_mean" + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage - windGust: - type: num - pw_matchstring@instance: hourly/hour2/windGust - database: init - database_maxage: 92 + precipIntensity_mean: + type: num + pw_matchstring_@instance: "daily/{..:_day}/precipIntensity_mean" + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage - windBearing: - type: num - pw_matchstring@instance: hourly/hour2/windBearing - database: init - database_maxage: 92 - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour2/cloudCover - database: init - database_maxage: 92 - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour2/uvIndex - database: init - database_maxage: 92 - - visibility: - type: num - pw_matchstring@instance: hourly/hour2/visibility - database: init - database_maxage: 92 - - ozone: - type: num - pw_matchstring@instance: hourly/hour2/ozone - database: init - database_maxage: 92 - - date: - type: str - pw_matchstring@instance: hourly/hour2/date - database: init - database_maxage: 92 - - weekday: - type: str - pw_matchstring@instance: hourly/hour2/weekday - database: init - database_maxage: 92 - - hour3: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour3/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour3/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour3/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour3/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour3/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour3/precipIntensity - database: init - database_maxage: 92 - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour3/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour3/precipProbability - database: init - database_maxage: 92 - - precipType: - type: str - pw_matchstring@instance: hourly/hour3/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour3/temperature - database: init - database_maxage: 92 - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour3/apparentTemperature - database: init - database_maxage: 92 - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour3/dewPoint - database: init - database_maxage: 92 - - humidity: - type: num - pw_matchstring@instance: hourly/hour3/humidity - database: init - database_maxage: 92 - - pressure: - type: num - pw_matchstring@instance: hourly/hour3/pressure - database: init - database_maxage: 92 - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour3/windSpeed - database: init - database_maxage: 92 - - windGust: - type: num - pw_matchstring@instance: hourly/hour3/windGust - database: init - database_maxage: 92 - - windBearing: - type: num - pw_matchstring@instance: hourly/hour3/windBearing - database: init - database_maxage: 92 - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour3/cloudCover - database: init - database_maxage: 92 - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour3/uvIndex - database: init - database_maxage: 92 - - visibility: - type: num - pw_matchstring@instance: hourly/hour3/visibility - database: init - database_maxage: 92 - - ozone: - type: num - pw_matchstring@instance: hourly/hour3/ozone - database: init - database_maxage: 92 - - date: - type: str - pw_matchstring@instance: hourly/hour3/date - database: init - database_maxage: 92 - - weekday: - type: str - pw_matchstring@instance: hourly/hour3/weekday - database: init - database_maxage: 92 - - hour4: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour4/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour4/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour4/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour4/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour4/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour4/precipIntensity - database: init - database_maxage: 92 - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour4/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour4/precipProbability - database: init - database_maxage: 92 - - precipType: - type: str - pw_matchstring@instance: hourly/hour4/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour4/temperature - database: init - database_maxage: 92 - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour4/apparentTemperature - database: init - database_maxage: 92 - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour4/dewPoint - database: init - database_maxage: 92 - - humidity: - type: num - pw_matchstring@instance: hourly/hour4/humidity - database: init - database_maxage: 92 - - pressure: - type: num - pw_matchstring@instance: hourly/hour4/pressure - database: init - database_maxage: 92 - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour4/windSpeed - database: init - database_maxage: 92 - - windGust: - type: num - pw_matchstring@instance: hourly/hour4/windGust - database: init - database_maxage: 92 - - windBearing: - type: num - pw_matchstring@instance: hourly/hour4/windBearing - database: init - database_maxage: 92 - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour4/cloudCover - database: init - database_maxage: 92 - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour4/uvIndex - database: init - database_maxage: 92 - - visibility: - type: num - pw_matchstring@instance: hourly/hour4/visibility - database: init - database_maxage: 92 - - ozone: - type: num - pw_matchstring@instance: hourly/hour4/ozone - database: init - database_maxage: 92 - - date: - type: str - pw_matchstring@instance: hourly/hour4/date - database: init - database_maxage: 92 - - weekday: - type: str - pw_matchstring@instance: hourly/hour4/weekday - database: init - database_maxage: 92 - - hour5: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour5/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour5/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour5/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour5/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour5/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour5/precipIntensity - database: init - database_maxage: 92 - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour5/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour5/precipProbability - database: init - database_maxage: 92 - - precipType: - type: str - pw_matchstring@instance: hourly/hour5/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour5/temperature - database: init - database_maxage: 92 - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour5/apparentTemperature - database: init - database_maxage: 92 - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour5/dewPoint - database: init - database_maxage: 92 - - humidity: - type: num - pw_matchstring@instance: hourly/hour5/humidity - database: init - database_maxage: 92 - - pressure: - type: num - pw_matchstring@instance: hourly/hour5/pressure - database: init - database_maxage: 92 - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour5/windSpeed - database: init - database_maxage: 92 - - windGust: - type: num - pw_matchstring@instance: hourly/hour5/windGust - database: init - database_maxage: 92 - - windBearing: - type: num - pw_matchstring@instance: hourly/hour5/windBearing - database: init - database_maxage: 92 - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour5/cloudCover - database: init - database_maxage: 92 - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour5/uvIndex - database: init - database_maxage: 92 - - visibility: - type: num - pw_matchstring@instance: hourly/hour5/visibility - database: init - database_maxage: 92 - - ozone: - type: num - pw_matchstring@instance: hourly/hour5/ozone - database: init - database_maxage: 92 - - date: - type: str - pw_matchstring@instance: hourly/hour5/date - database: init - database_maxage: 92 - - weekday: - type: str - pw_matchstring@instance: hourly/hour5/weekday - database: init - database_maxage: 92 - - hour6: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour6/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour6/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour6/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour6/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour6/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour6/precipIntensity - database: init - database_maxage: 92 - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour6/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour6/precipProbability - database: init - database_maxage: 92 - - precipType: - type: str - pw_matchstring@instance: hourly/hour6/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour6/temperature - database: init - database_maxage: 92 - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour6/apparentTemperature - database: init - database_maxage: 92 - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour6/dewPoint - database: init - database_maxage: 92 - - humidity: - type: num - pw_matchstring@instance: hourly/hour6/humidity - database: init - database_maxage: 92 - - pressure: - type: num - pw_matchstring@instance: hourly/hour6/pressure - database: init - database_maxage: 92 - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour6/windSpeed - database: init - database_maxage: 92 - - windGust: - type: num - pw_matchstring@instance: hourly/hour6/windGust - database: init - database_maxage: 92 - - windBearing: - type: num - pw_matchstring@instance: hourly/hour6/windBearing - database: init - database_maxage: 92 - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour6/cloudCover - database: init - database_maxage: 92 - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour6/uvIndex - database: init - database_maxage: 92 - - visibility: - type: num - pw_matchstring@instance: hourly/hour6/visibility - database: init - database_maxage: 92 - - ozone: - type: num - pw_matchstring@instance: hourly/hour6/ozone - database: init - database_maxage: 92 - - date: - type: str - pw_matchstring@instance: hourly/hour6/date - database: init - database_maxage: 92 - - weekday: - type: str - pw_matchstring@instance: hourly/hour6/weekday - database: init - database_maxage: 92 - - hour7: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour7/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour7/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour7/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour7/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour7/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour7/precipIntensity - database: init - database_maxage: 92 - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour7/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour7/precipProbability - database: init - database_maxage: 92 - - precipType: - type: str - pw_matchstring@instance: hourly/hour7/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour7/temperature - database: init - database_maxage: 92 - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour7/apparentTemperature - database: init - database_maxage: 92 - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour7/dewPoint - database: init - database_maxage: 92 - - humidity: - type: num - pw_matchstring@instance: hourly/hour7/humidity - database: init - database_maxage: 92 - - pressure: - type: num - pw_matchstring@instance: hourly/hour7/pressure - database: init - database_maxage: 92 - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour7/windSpeed - database: init - database_maxage: 92 - - windGust: - type: num - pw_matchstring@instance: hourly/hour7/windGust - database: init - database_maxage: 92 - - windBearing: - type: num - pw_matchstring@instance: hourly/hour7/windBearing - database: init - database_maxage: 92 - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour7/cloudCover - database: init - database_maxage: 92 - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour7/uvIndex - database: init - database_maxage: 92 - - visibility: - type: num - pw_matchstring@instance: hourly/hour7/visibility - database: init - database_maxage: 92 - - ozone: - type: num - pw_matchstring@instance: hourly/hour7/ozone - database: init - database_maxage: 92 - - date: - type: str - pw_matchstring@instance: hourly/hour7/date - database: init - database_maxage: 92 - - weekday: - type: str - pw_matchstring@instance: hourly/hour7/weekday - database: init - database_maxage: 92 - - hour8: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour8/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour8/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour8/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour8/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour8/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour8/precipIntensity - database: init - database_maxage: 92 - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour8/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour8/precipProbability - database: init - database_maxage: 92 - - precipType: - type: str - pw_matchstring@instance: hourly/hour8/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour8/temperature - database: init - database_maxage: 92 - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour8/apparentTemperature - database: init - database_maxage: 92 - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour8/dewPoint - database: init - database_maxage: 92 - - humidity: - type: num - pw_matchstring@instance: hourly/hour8/humidity - database: init - database_maxage: 92 - - pressure: - type: num - pw_matchstring@instance: hourly/hour8/pressure - database: init - database_maxage: 92 - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour8/windSpeed - database: init - database_maxage: 92 - - windGust: - type: num - pw_matchstring@instance: hourly/hour8/windGust - database: init - database_maxage: 92 - - windBearing: - type: num - pw_matchstring@instance: hourly/hour8/windBearing - database: init - database_maxage: 92 - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour8/cloudCover - database: init - database_maxage: 92 - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour8/uvIndex - database: init - database_maxage: 92 - - visibility: - type: num - pw_matchstring@instance: hourly/hour8/visibility - database: init - database_maxage: 92 - - ozone: - type: num - pw_matchstring@instance: hourly/hour8/ozone - database: init - database_maxage: 92 - - date: - type: str - pw_matchstring@instance: hourly/hour8/date - database: init - database_maxage: 92 - - weekday: - type: str - pw_matchstring@instance: hourly/hour8/weekday - database: init - database_maxage: 92 - - hour9: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour9/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour9/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour9/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour9/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour9/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour9/precipIntensity - database: init - database_maxage: 92 - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour9/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour9/precipProbability - database: init - database_maxage: 92 - - precipType: - type: str - pw_matchstring@instance: hourly/hour9/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour9/temperature - database: init - database_maxage: 92 - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour9/apparentTemperature - database: init - database_maxage: 92 - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour9/dewPoint - database: init - database_maxage: 92 - - humidity: - type: num - pw_matchstring@instance: hourly/hour9/humidity - database: init - database_maxage: 92 - - pressure: - type: num - pw_matchstring@instance: hourly/hour9/pressure - database: init - database_maxage: 92 - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour9/windSpeed - database: init - database_maxage: 92 - - windGust: - type: num - pw_matchstring@instance: hourly/hour9/windGust - database: init - database_maxage: 92 - - windBearing: - type: num - pw_matchstring@instance: hourly/hour9/windBearing - database: init - database_maxage: 92 - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour9/cloudCover - database: init - database_maxage: 92 - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour9/uvIndex - database: init - database_maxage: 92 - - visibility: - type: num - pw_matchstring@instance: hourly/hour9/visibility - database: init - database_maxage: 92 - - ozone: - type: num - pw_matchstring@instance: hourly/hour9/ozone - database: init - database_maxage: 92 - - date: - type: str - pw_matchstring@instance: hourly/hour9/date - database: init - database_maxage: 92 - - weekday: - type: str - pw_matchstring@instance: hourly/hour9/weekday - database: init - database_maxage: 92 - - hour10: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour10/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour10/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour10/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour10/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour10/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour10/precipIntensity - database: init - database_maxage: 92 - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour10/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour10/precipProbability - database: init - database_maxage: 92 - - precipType: - type: str - pw_matchstring@instance: hourly/hour10/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour10/temperature - database: init - database_maxage: 92 - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour10/apparentTemperature - database: init - database_maxage: 92 - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour10/dewPoint - database: init - database_maxage: 92 - - humidity: - type: num - pw_matchstring@instance: hourly/hour10/humidity - database: init - database_maxage: 92 - - pressure: - type: num - pw_matchstring@instance: hourly/hour10/pressure - database: init - database_maxage: 92 - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour10/windSpeed - database: init - database_maxage: 92 - - windGust: - type: num - pw_matchstring@instance: hourly/hour10/windGust - database: init - database_maxage: 92 - - windBearing: - type: num - pw_matchstring@instance: hourly/hour10/windBearing - database: init - database_maxage: 92 - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour10/cloudCover - database: init - database_maxage: 92 - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour10/uvIndex - database: init - database_maxage: 92 - - visibility: - type: num - pw_matchstring@instance: hourly/hour10/visibility - database: init - database_maxage: 92 - - ozone: - type: num - pw_matchstring@instance: hourly/hour10/ozone - database: init - database_maxage: 92 - - date: - type: str - pw_matchstring@instance: hourly/hour10/date - database: init - database_maxage: 92 - - weekday: - type: str - pw_matchstring@instance: hourly/hour10/weekday - database: init - database_maxage: 92 - - hour11: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour11/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour11/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour11/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour11/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour11/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour11/precipIntensity - database: init - database_maxage: 92 - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour11/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour11/precipProbability - database: init - database_maxage: 92 - - precipType: - type: str - pw_matchstring@instance: hourly/hour11/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour11/temperature - database: init - database_maxage: 92 - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour11/apparentTemperature - database: init - database_maxage: 92 - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour11/dewPoint - database: init - database_maxage: 92 - - humidity: - type: num - pw_matchstring@instance: hourly/hour11/humidity - database: init - database_maxage: 92 - - pressure: - type: num - pw_matchstring@instance: hourly/hour11/pressure - database: init - database_maxage: 92 - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour11/windSpeed - database: init - database_maxage: 92 - - windGust: - type: num - pw_matchstring@instance: hourly/hour11/windGust - database: init - database_maxage: 92 - - windBearing: - type: num - pw_matchstring@instance: hourly/hour11/windBearing - database: init - database_maxage: 92 - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour11/cloudCover - database: init - database_maxage: 92 - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour11/uvIndex - database: init - database_maxage: 92 - - visibility: - type: num - pw_matchstring@instance: hourly/hour11/visibility - database: init - database_maxage: 92 - - ozone: - type: num - pw_matchstring@instance: hourly/hour11/ozone - database: init - database_maxage: 92 - - date: - type: str - pw_matchstring@instance: hourly/hour11/date - database: init - database_maxage: 92 - - weekday: - type: str - pw_matchstring@instance: hourly/hour11/weekday - database: init - database_maxage: 92 - - hour12: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour12/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour12/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour12/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour12/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour12/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour12/precipIntensity - database: init - database_maxage: 92 - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour12/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour12/precipProbability - database: init - database_maxage: 92 - - precipType: - type: str - pw_matchstring@instance: hourly/hour12/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour12/temperature - database: init - database_maxage: 92 - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour12/apparentTemperature - database: init - database_maxage: 92 - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour12/dewPoint - database: init - database_maxage: 92 - - humidity: - type: num - pw_matchstring@instance: hourly/hour12/humidity - database: init - database_maxage: 92 - - pressure: - type: num - pw_matchstring@instance: hourly/hour12/pressure - database: init - database_maxage: 92 - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour12/windSpeed - database: init - database_maxage: 92 - - windGust: - type: num - pw_matchstring@instance: hourly/hour12/windGust - database: init - database_maxage: 92 - - windBearing: - type: num - pw_matchstring@instance: hourly/hour12/windBearing - database: init - database_maxage: 92 - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour12/cloudCover - database: init - database_maxage: 92 - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour12/uvIndex - database: init - database_maxage: 92 - - visibility: - type: num - pw_matchstring@instance: hourly/hour12/visibility - database: init - database_maxage: 92 - - ozone: - type: num - pw_matchstring@instance: hourly/hour12/ozone - database: init - database_maxage: 92 - - date: - type: str - pw_matchstring@instance: hourly/hour12/date - database: init - database_maxage: 92 - - weekday: - type: str - pw_matchstring@instance: hourly/hour12/weekday - database: init - database_maxage: 92 - - forecast_daily: - name: Daily forcast of Weather report from pirateweather.net - Data is written do database - daily: - - summary: - type: str - pw_matchstring@instance: daily/summary - - icon: - type: str - pw_matchstring@instance: daily/icon - - icon_visu: - type: str - pw_matchstring@instance: daily/icon_visu - - day0: - - time_epoch: - type: num - pw_matchstring@instance: daily/day0/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: daily/day0/summary - - icon: - type: str - pw_matchstring@instance: daily/day0/icon - - icon_visu: - type: str - pw_matchstring@instance: daily/day0/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: daily/day0/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: daily/day0/precipIntensity - database: init - database_maxage: 92 - - precipIntensityError: - type: num - pw_matchstring@instance: daily/day0/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: daily/day0/precipProbability - database: init - database_maxage: 92 - - precipType: - type: str - pw_matchstring@instance: daily/day0/precipType - - temperature: - type: num - pw_matchstring@instance: daily/day0/temperature - database: init - database_maxage: 92 - - apparenttemperature: - type: num - pw_matchstring@instance: daily/day0/apparentTemperature - database: init - database_maxage: 92 - - dewpoint: - type: num - pw_matchstring@instance: daily/day0/dewPoint - database: init - database_maxage: 92 - - humidity: - type: num - pw_matchstring@instance: daily/day0/humidity - database: init - database_maxage: 92 - - pressure: - type: num - pw_matchstring@instance: daily/day0/pressure - database: init - database_maxage: 92 - - windSpeed: - type: num - pw_matchstring@instance: daily/day0/windSpeed - database: init - database_maxage: 92 - - windGust: - type: num - pw_matchstring@instance: daily/day0/windGust - database: init - database_maxage: 92 - - windBearing: - type: num - pw_matchstring@instance: daily/day0/windBearing - database: init - database_maxage: 92 - - cloudCover: - type: num - pw_matchstring@instance: daily/day0/cloudCover - database: init - database_maxage: 92 - - uvIndex: - type: num - pw_matchstring@instance: daily/day0/uvIndex - database: init - database_maxage: 92 - - visibility: - type: num - pw_matchstring@instance: daily/day0/visibility - database: init - database_maxage: 92 - - ozone: - type: num - pw_matchstring@instance: daily/day0/ozone - database: init - database_maxage: 92 - - temperatureMin: - type: num - pw_matchstring@instance: daily/day0/temperatureMin - database: init - database_maxage: 92 - - temperatureMinTime: - type: num - pw_matchstring@instance: daily/day0/temperatureMinTime - database: init - database_maxage: 92 - - temperatureMax: - type: num - pw_matchstring@instance: daily/day0/temperatureMax - database: init - database_maxage: 92 - - temperatureMaxTime: - type: num - pw_matchstring@instance: daily/day0/temperatureMaxTime - database: init - database_maxage: 92 - - apparentTemperatureMin: - type: num - pw_matchstring@instance: daily/day0/apparentTemperatureMin - database: init - database_maxage: 92 - - apparentTemperatureMinTime: - type: num - pw_matchstring@instance: daily/day0/apparentTemperatureMinTime - database: init - database_maxage: 92 - - apparentTemperatureMax: - type: num - pw_matchstring@instance: daily/day0/apparentTemperatureMax - database: init - database_maxage: 92 - - apparentTemperatureMaxTime: - type: num - pw_matchstring@instance: daily/day0/apparentTemperatureMaxTime - database: init - database_maxage: 92 - - date: - type: str - pw_matchstring@instance: daily/day0/date - database: init - database_maxage: 92 - - weekday: - type: str - pw_matchstring@instance: daily/day0/weekday - database: init - database_maxage: 92 - - precipProbability_mean: - type: num - pw_matchstring@instance: daily/day0/precipProbability_mean - database: init - database_maxage: 92 - - precipIntensity_mean: - type: num - pw_matchstring@instance: daily/day0/precipIntensity_mean - database: init - database_maxage: 92 - - temperature_mean: - type: num - pw_matchstring@instance: daily/day0/temperature_mean - database: init - database_maxage: 92 - - hours: - type: dict - pw_matchstring@instance: daily/day0/hours - - day1: - - time_epoch: - type: num - pw_matchstring@instance: daily/day1/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: daily/day1/summary - - icon: - type: str - pw_matchstring@instance: daily/day1/icon - - icon_visu: - type: str - pw_matchstring@instance: daily/day1/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: daily/day1/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: daily/day1/precipIntensity - database: init - database_maxage: 92 - - precipIntensityError: - type: num - pw_matchstring@instance: daily/day1/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: daily/day1/precipProbability - database: init - database_maxage: 92 - - precipType: - type: str - pw_matchstring@instance: daily/day1/precipType - - temperature: - type: num - pw_matchstring@instance: daily/day1/temperature - database: init - database_maxage: 92 - - apparenttemperature: - type: num - pw_matchstring@instance: daily/day1/apparentTemperature - database: init - database_maxage: 92 - - dewpoint: - type: num - pw_matchstring@instance: daily/day1/dewPoint - database: init - database_maxage: 92 - - humidity: - type: num - pw_matchstring@instance: daily/day1/humidity - database: init - database_maxage: 92 - - pressure: - type: num - pw_matchstring@instance: daily/day1/pressure - database: init - database_maxage: 92 - - windSpeed: - type: num - pw_matchstring@instance: daily/day1/windSpeed - database: init - database_maxage: 92 - - windGust: - type: num - pw_matchstring@instance: daily/day1/windGust - database: init - database_maxage: 92 - - windBearing: - type: num - pw_matchstring@instance: daily/day1/windBearing - database: init - database_maxage: 92 - - cloudCover: - type: num - pw_matchstring@instance: daily/day1/cloudCover - database: init - database_maxage: 92 - - uvIndex: - type: num - pw_matchstring@instance: daily/day1/uvIndex - database: init - database_maxage: 92 - - visibility: - type: num - pw_matchstring@instance: daily/day1/visibility - database: init - database_maxage: 92 - - ozone: - type: num - pw_matchstring@instance: daily/day1/ozone - database: init - database_maxage: 92 - - temperatureMin: - type: num - pw_matchstring@instance: daily/day1/temperatureMin - database: init - database_maxage: 92 - - temperatureMinTime: - type: num - pw_matchstring@instance: daily/day1/temperatureMinTime - database: init - database_maxage: 92 - - temperatureMax: - type: num - pw_matchstring@instance: daily/day1/temperatureMax - database: init - database_maxage: 92 - - temperatureMaxTime: - type: num - pw_matchstring@instance: daily/day1/temperatureMaxTime - database: init - database_maxage: 92 - - apparentTemperatureMin: - type: num - pw_matchstring@instance: daily/day1/apparentTemperatureMin - database: init - database_maxage: 92 - - apparentTemperatureMinTime: - type: num - pw_matchstring@instance: daily/day1/apparentTemperatureMinTime - database: init - database_maxage: 92 - - apparentTemperatureMax: - type: num - pw_matchstring@instance: daily/day1/apparentTemperatureMax - database: init - database_maxage: 92 - - apparentTemperatureMaxTime: - type: num - pw_matchstring@instance: daily/day1/apparentTemperatureMaxTime - database: init - database_maxage: 92 - - date: - type: str - pw_matchstring@instance: daily/day1/date - database: init - database_maxage: 92 - - weekday: - type: str - pw_matchstring@instance: daily/day1/weekday - database: init - database_maxage: 92 - - precipProbability_mean: - type: num - pw_matchstring@instance: daily/day1/precipProbability_mean - database: init - database_maxage: 92 - - precipIntensity_mean: - type: num - pw_matchstring@instance: daily/day1/precipIntensity_mean - database: init - database_maxage: 92 - - temperature_mean: - type: num - pw_matchstring@instance: daily/day1/temperature_mean - database: init - database_maxage: 92 - - hours: - type: dict - pw_matchstring@instance: daily/day1/hours - - day2: - - time_epoch: - type: num - pw_matchstring@instance: daily/day2/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: daily/day2/summary - - icon: - type: str - pw_matchstring@instance: daily/day2/icon - - icon_visu: - type: str - pw_matchstring@instance: daily/day2/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: daily/day2/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: daily/day2/precipIntensity - database: init - database_maxage: 92 - - precipIntensityError: - type: num - pw_matchstring@instance: daily/day2/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: daily/day2/precipProbability - database: init - database_maxage: 92 - - precipType: - type: str - pw_matchstring@instance: daily/day2/precipType - - temperature: - type: num - pw_matchstring@instance: daily/day2/temperature - database: init - database_maxage: 92 - - apparenttemperature: - type: num - pw_matchstring@instance: daily/day2/apparentTemperature - database: init - database_maxage: 92 - - dewpoint: - type: num - pw_matchstring@instance: daily/day2/dewPoint - database: init - database_maxage: 92 - - humidity: - type: num - pw_matchstring@instance: daily/day2/humidity - database: init - database_maxage: 92 - - pressure: - type: num - pw_matchstring@instance: daily/day2/pressure - database: init - database_maxage: 92 - - windSpeed: - type: num - pw_matchstring@instance: daily/day2/windSpeed - database: init - database_maxage: 92 - - windGust: - type: num - pw_matchstring@instance: daily/day2/windGust - database: init - database_maxage: 92 - - windBearing: - type: num - pw_matchstring@instance: daily/day2/windBearing - database: init - database_maxage: 92 - - cloudCover: - type: num - pw_matchstring@instance: daily/day2/cloudCover - database: init - database_maxage: 92 - - uvIndex: - type: num - pw_matchstring@instance: daily/day2/uvIndex - database: init - database_maxage: 92 - - visibility: - type: num - pw_matchstring@instance: daily/day2/visibility - database: init - database_maxage: 92 - - ozone: - type: num - pw_matchstring@instance: daily/day2/ozone - database: init - database_maxage: 92 - - temperatureMin: - type: num - pw_matchstring@instance: daily/day2/temperatureMin - database: init - database_maxage: 92 - - temperatureMinTime: - type: num - pw_matchstring@instance: daily/day2/temperatureMinTime - database: init - database_maxage: 92 - - temperatureMax: - type: num - pw_matchstring@instance: daily/day2/temperatureMax - database: init - database_maxage: 92 - - temperatureMaxTime: - type: num - pw_matchstring@instance: daily/day2/temperatureMaxTime - database: init - database_maxage: 92 - - apparentTemperatureMin: - type: num - pw_matchstring@instance: daily/day2/apparentTemperatureMin - database: init - database_maxage: 92 - - apparentTemperatureMinTime: - type: num - pw_matchstring@instance: daily/day2/apparentTemperatureMinTime - database: init - database_maxage: 92 - - apparentTemperatureMax: - type: num - pw_matchstring@instance: daily/day2/apparentTemperatureMax - database: init - database_maxage: 92 - - apparentTemperatureMaxTime: - type: num - pw_matchstring@instance: daily/day2/apparentTemperatureMaxTime - database: init - database_maxage: 92 - - date: - type: str - pw_matchstring@instance: daily/day2/date - database: init - database_maxage: 92 - - weekday: - type: str - pw_matchstring@instance: daily/day2/weekday - database: init - database_maxage: 92 - - precipProbability_mean: - type: num - pw_matchstring@instance: daily/day2/precipProbability_mean - database: init - database_maxage: 92 - - precipIntensity_mean: - type: num - pw_matchstring@instance: daily/day2/precipIntensity_mean - database: init - database_maxage: 92 - - temperature_mean: - type: num - pw_matchstring@instance: daily/day2/temperature_mean - database: init - database_maxage: 92 - - hours: - type: dict - pw_matchstring@instance: daily/day2/hours - - day3: - - time_epoch: - type: num - pw_matchstring@instance: daily/day3/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: daily/day3/summary - - icon: - type: str - pw_matchstring@instance: daily/day3/icon - - icon_visu: - type: str - pw_matchstring@instance: daily/day3/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: daily/day3/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: daily/day3/precipIntensity - database: init - database_maxage: 92 - - precipIntensityError: - type: num - pw_matchstring@instance: daily/day3/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: daily/day3/precipProbability - database: init - database_maxage: 92 - - precipType: - type: str - pw_matchstring@instance: daily/day3/precipType - - temperature: - type: num - pw_matchstring@instance: daily/day3/temperature - database: init - database_maxage: 92 - - apparenttemperature: - type: num - pw_matchstring@instance: daily/day3/apparentTemperature - database: init - database_maxage: 92 - - dewpoint: - type: num - pw_matchstring@instance: daily/day3/dewPoint - database: init - database_maxage: 92 - - humidity: - type: num - pw_matchstring@instance: daily/day3/humidity - database: init - database_maxage: 92 - - pressure: - type: num - pw_matchstring@instance: daily/day3/pressure - database: init - database_maxage: 92 - - windSpeed: - type: num - pw_matchstring@instance: daily/day3/windSpeed - database: init - database_maxage: 92 - - windGust: - type: num - pw_matchstring@instance: daily/day3/windGust - database: init - database_maxage: 92 - - windBearing: - type: num - pw_matchstring@instance: daily/day3/windBearing - database: init - database_maxage: 92 - - cloudCover: - type: num - pw_matchstring@instance: daily/day3/cloudCover - database: init - database_maxage: 92 - - uvIndex: - type: num - pw_matchstring@instance: daily/day3/uvIndex - database: init - database_maxage: 92 - - visibility: - type: num - pw_matchstring@instance: daily/day3/visibility - database: init - database_maxage: 92 - - ozone: - type: num - pw_matchstring@instance: daily/day3/ozone - database: init - database_maxage: 92 - - temperatureMin: - type: num - pw_matchstring@instance: daily/day3/temperatureMin - database: init - database_maxage: 92 - - temperatureMinTime: - type: num - pw_matchstring@instance: daily/day3/temperatureMinTime - database: init - database_maxage: 92 - - temperatureMax: - type: num - pw_matchstring@instance: daily/day3/temperatureMax - database: init - database_maxage: 92 - - temperatureMaxTime: - type: num - pw_matchstring@instance: daily/day3/temperatureMaxTime - database: init - database_maxage: 92 - - apparentTemperatureMin: - type: num - pw_matchstring@instance: daily/day3/apparentTemperatureMin - database: init - database_maxage: 92 - - apparentTemperatureMinTime: - type: num - pw_matchstring@instance: daily/day3/apparentTemperatureMinTime - database: init - database_maxage: 92 - - apparentTemperatureMax: - type: num - pw_matchstring@instance: daily/day3/apparentTemperatureMax - database: init - database_maxage: 92 - - apparentTemperatureMaxTime: - type: num - pw_matchstring@instance: daily/day3/apparentTemperatureMaxTime - database: init - database_maxage: 92 - - date: - type: str - pw_matchstring@instance: daily/day3/date - database: init - database_maxage: 92 - - weekday: - type: str - pw_matchstring@instance: daily/day3/weekday - database: init - database_maxage: 92 - - day4: - - time_epoch: - type: num - pw_matchstring@instance: daily/day4/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: daily/day4/summary - - icon: - type: str - pw_matchstring@instance: daily/day4/icon - - icon_visu: - type: str - pw_matchstring@instance: daily/day4/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: daily/day4/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: daily/day4/precipIntensity - database: init - database_maxage: 92 - - precipIntensityError: - type: num - pw_matchstring@instance: daily/day4/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: daily/day4/precipProbability - database: init - database_maxage: 92 - - precipType: - type: str - pw_matchstring@instance: daily/day4/precipType - - temperature: - type: num - pw_matchstring@instance: daily/day4/temperature - database: init - database_maxage: 92 - - apparenttemperature: - type: num - pw_matchstring@instance: daily/day4/apparentTemperature - database: init - database_maxage: 92 - - dewpoint: - type: num - pw_matchstring@instance: daily/day4/dewPoint - database: init - database_maxage: 92 - - humidity: - type: num - pw_matchstring@instance: daily/day4/humidity - database: init - database_maxage: 92 - - pressure: - type: num - pw_matchstring@instance: daily/day4/pressure - database: init - database_maxage: 92 - - windSpeed: - type: num - pw_matchstring@instance: daily/day4/windSpeed - database: init - database_maxage: 92 - - windGust: - type: num - pw_matchstring@instance: daily/day4/windGust - database: init - database_maxage: 92 - - windBearing: - type: num - pw_matchstring@instance: daily/day4/windBearing - database: init - database_maxage: 92 - - cloudCover: - type: num - pw_matchstring@instance: daily/day4/cloudCover - database: init - database_maxage: 92 - - uvIndex: - type: num - pw_matchstring@instance: daily/day4/uvIndex - database: init - database_maxage: 92 - - visibility: - type: num - pw_matchstring@instance: daily/day4/visibility - database: init - database_maxage: 92 - - ozone: - type: num - pw_matchstring@instance: daily/day4/ozone - database: init - database_maxage: 92 - - temperatureMin: - type: num - pw_matchstring@instance: daily/day4/temperatureMin - database: init - database_maxage: 92 - - temperatureMinTime: - type: num - pw_matchstring@instance: daily/day4/temperatureMinTime - database: init - database_maxage: 92 - - temperatureMax: - type: num - pw_matchstring@instance: daily/day4/temperatureMax - database: init - database_maxage: 92 - - temperatureMaxTime: - type: num - pw_matchstring@instance: daily/day4/temperatureMaxTime - database: init - database_maxage: 92 - - apparentTemperatureMin: - type: num - pw_matchstring@instance: daily/day4/apparentTemperatureMin - database: init - database_maxage: 92 - - apparentTemperatureMinTime: - type: num - pw_matchstring@instance: daily/day4/apparentTemperatureMinTime - database: init - database_maxage: 92 - - apparentTemperatureMax: - type: num - pw_matchstring@instance: daily/day4/apparentTemperatureMax - database: init - database_maxage: 92 - - apparentTemperatureMaxTime: - type: num - pw_matchstring@instance: daily/day4/apparentTemperatureMaxTime - database: init - database_maxage: 92 - - date: - type: str - pw_matchstring@instance: daily/day4/date - database: init - database_maxage: 92 - - weekday: - type: str - pw_matchstring@instance: daily/day4/weekday - database: init - database_maxage: 92 - - day5: - - time_epoch: - type: num - pw_matchstring@instance: daily/day5/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: daily/day5/summary - - icon: - type: str - pw_matchstring@instance: daily/day5/icon - - icon_visu: - type: str - pw_matchstring@instance: daily/day5/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: daily/day5/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: daily/day5/precipIntensity - database: init - database_maxage: 92 - precipIntensityError: - type: num - pw_matchstring@instance: daily/day5/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: daily/day5/precipProbability - database: init - database_maxage: 92 - - precipType: - type: str - pw_matchstring@instance: daily/day5/precipType - - temperature: - type: num - pw_matchstring@instance: daily/day5/temperature - database: init - database_maxage: 92 - - apparenttemperature: - type: num - pw_matchstring@instance: daily/day5/apparentTemperature - database: init - database_maxage: 92 - - dewpoint: - type: num - pw_matchstring@instance: daily/day5/dewPoint - database: init - database_maxage: 92 - - humidity: - type: num - pw_matchstring@instance: daily/day5/humidity - database: init - database_maxage: 92 - - pressure: - type: num - pw_matchstring@instance: daily/day5/pressure - database: init - database_maxage: 92 - - windSpeed: - type: num - pw_matchstring@instance: daily/day5/windSpeed - database: init - database_maxage: 92 - - windGust: - type: num - pw_matchstring@instance: daily/day5/windGust - database: init - database_maxage: 92 - - windBearing: - type: num - pw_matchstring@instance: daily/day5/windBearing - database: init - database_maxage: 92 - - cloudCover: - type: num - pw_matchstring@instance: daily/day5/cloudCover - database: init - database_maxage: 92 - - uvIndex: - type: num - pw_matchstring@instance: daily/day5/uvIndex - database: init - database_maxage: 92 - - visibility: - type: num - pw_matchstring@instance: daily/day5/visibility - database: init - database_maxage: 92 - - ozone: - type: num - pw_matchstring@instance: daily/day5/ozone - database: init - database_maxage: 92 - - temperatureMin: - type: num - pw_matchstring@instance: daily/day5/temperatureMin - database: init - database_maxage: 92 - - temperatureMinTime: - type: num - pw_matchstring@instance: daily/day5/temperatureMinTime - database: init - database_maxage: 92 - - temperatureMax: - type: num - pw_matchstring@instance: daily/day5/temperatureMax - database: init - database_maxage: 92 - - temperatureMaxTime: - type: num - pw_matchstring@instance: daily/day5/temperatureMaxTime - database: init - database_maxage: 92 - - apparentTemperatureMin: - type: num - pw_matchstring@instance: daily/day5/apparentTemperatureMin - database: init - database_maxage: 92 - - apparentTemperatureMinTime: - type: num - pw_matchstring@instance: daily/day5/apparentTemperatureMinTime - database: init - database_maxage: 92 - - apparentTemperatureMax: - type: num - pw_matchstring@instance: daily/day5/apparentTemperatureMax - database: init - database_maxage: 92 - - apparentTemperatureMaxTime: - type: num - pw_matchstring@instance: daily/day5/apparentTemperatureMaxTime - database: init - database_maxage: 92 - - date: - type: str - pw_matchstring@instance: daily/day5/date - database: init - database_maxage: 92 - - weekday: - type: str - pw_matchstring@instance: daily/day5/weekday - database: init - database_maxage: 92 - - day6: - - time_epoch: - type: num - pw_matchstring@instance: daily/day6/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: daily/day6/summary - - icon: - type: str - pw_matchstring@instance: daily/day6/icon - - icon_visu: - type: str - pw_matchstring@instance: daily/day6/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: daily/day6/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: daily/day6/precipIntensity - database: init - database_maxage: 92 - - precipIntensityError: - type: num - pw_matchstring@instance: daily/day6/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: daily/day6/precipProbability - database: init - database_maxage: 92 - - precipType: - type: str - pw_matchstring@instance: daily/day6/precipType - - temperature: - type: num - pw_matchstring@instance: daily/day6/temperature - database: init - database_maxage: 92 - - apparenttemperature: - type: num - pw_matchstring@instance: daily/day6/apparentTemperature - database: init - database_maxage: 92 - - dewpoint: - type: num - pw_matchstring@instance: daily/day6/dewPoint - database: init - database_maxage: 92 - - humidity: - type: num - pw_matchstring@instance: daily/day6/humidity - database: init - database_maxage: 92 - - pressure: - type: num - pw_matchstring@instance: daily/day6/pressure - database: init - database_maxage: 92 - - windSpeed: - type: num - pw_matchstring@instance: daily/day6/windSpeed - database: init - database_maxage: 92 - - windGust: - type: num - pw_matchstring@instance: daily/day6/windGust - database: init - database_maxage: 92 - - windBearing: - type: num - pw_matchstring@instance: daily/day6/windBearing - database: init - database_maxage: 92 - - cloudCover: - type: num - pw_matchstring@instance: daily/day6/cloudCover - database: init - database_maxage: 92 - - uvIndex: - type: num - pw_matchstring@instance: daily/day6/uvIndex - database: init - database_maxage: 92 - - visibility: - type: num - pw_matchstring@instance: daily/day6/visibility - database: init - database_maxage: 92 - - ozone: - type: num - pw_matchstring@instance: daily/day6/ozone - database: init - database_maxage: 92 - - temperatureMin: - type: num - pw_matchstring@instance: daily/day6/temperatureMin - database: init - database_maxage: 92 - - temperatureMinTime: - type: num - pw_matchstring@instance: daily/day6/temperatureMinTime - database: init - database_maxage: 92 - - temperatureMax: - type: num - pw_matchstring@instance: daily/day6/temperatureMax - database: init - database_maxage: 92 - - temperatureMaxTime: - type: num - pw_matchstring@instance: daily/day6/temperatureMaxTime - database: init - database_maxage: 92 - - apparentTemperatureMin: - type: num - pw_matchstring@instance: daily/day6/apparentTemperatureMin - database: init - database_maxage: 92 - - apparentTemperatureMinTime: - type: num - pw_matchstring@instance: daily/day6/apparentTemperatureMinTime - database: init - database_maxage: 92 - - apparentTemperatureMax: - type: num - pw_matchstring@instance: daily/day6/apparentTemperatureMax - database: init - database_maxage: 92 - - apparentTemperatureMaxTime: - type: num - pw_matchstring@instance: daily/day6/apparentTemperatureMaxTime - database: init - database_maxage: 92 - - date: - type: str - pw_matchstring@instance: daily/day6/date - database: init - database_maxage: 92 - - weekday: - type: str - pw_matchstring@instance: daily/day6/weekday - database: init - database_maxage: 92 - - day7: - - time_epoch: - type: num - pw_matchstring@instance: daily/day7/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: daily/day7/summary - - icon: - type: str - pw_matchstring@instance: daily/day7/icon - - icon_visu: - type: str - pw_matchstring@instance: daily/day7/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: daily/day7/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: daily/day7/precipIntensity - database: init - database_maxage: 92 - - precipIntensityError: - type: num - pw_matchstring@instance: daily/day7/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: daily/day7/precipProbability - database: init - database_maxage: 92 - - precipType: - type: str - pw_matchstring@instance: daily/day7/precipType - - temperature: - type: num - pw_matchstring@instance: daily/day7/temperature - database: init - database_maxage: 92 - - apparenttemperature: - type: num - pw_matchstring@instance: daily/day7/apparentTemperature - database: init - database_maxage: 92 - - dewpoint: - type: num - pw_matchstring@instance: daily/day7/dewPoint - database: init - database_maxage: 92 - - humidity: - type: num - pw_matchstring@instance: daily/day7/humidity - database: init - database_maxage: 92 - - pressure: - type: num - pw_matchstring@instance: daily/day7/pressure - database: init - database_maxage: 92 - - windSpeed: - type: num - pw_matchstring@instance: daily/day7/windSpeed - database: init - database_maxage: 92 - - windGust: - type: num - pw_matchstring@instance: daily/day7/windGust - database: init - database_maxage: 92 - - windBearing: - type: num - pw_matchstring@instance: daily/day7/windBearing - database: init - database_maxage: 92 - - cloudCover: - type: num - pw_matchstring@instance: daily/day7/cloudCover - database: init - database_maxage: 92 - - uvIndex: - type: num - pw_matchstring@instance: daily/day7/uvIndex - database: init - database_maxage: 92 - - visibility: - type: num - pw_matchstring@instance: daily/day7/visibility - database: init - database_maxage: 92 - - ozone: - type: num - pw_matchstring@instance: daily/day7/ozone - database: init - database_maxage: 92 - - temperatureMin: - type: num - pw_matchstring@instance: daily/day7/temperatureMin - database: init - database_maxage: 92 - - temperatureMinTime: - type: num - pw_matchstring@instance: daily/day7/temperatureMinTime - database: init - database_maxage: 92 - - temperatureMax: - type: num - pw_matchstring@instance: daily/day7/temperatureMax - database: init - database_maxage: 92 - - temperatureMaxTime: - type: num - pw_matchstring@instance: daily/day7/temperatureMaxTime - database: init - database_maxage: 92 - - apparentTemperatureMin: - type: num - pw_matchstring@instance: daily/day7/apparentTemperatureMin - database: init - database_maxage: 92 - - apparentTemperatureMinTime: - type: num - pw_matchstring@instance: daily/day7/apparentTemperatureMinTime - database: init - database_maxage: 92 - - apparentTemperatureMax: - type: num - pw_matchstring@instance: daily/day7/apparentTemperatureMax - database: init - database_maxage: 92 - - apparentTemperatureMaxTime: - type: num - pw_matchstring@instance: daily/day7/apparentTemperatureMaxTime - database: init - database_maxage: 92 - - date: - type: str - pw_matchstring@instance: daily/day7/date - database: init - database_maxage: 92 - - weekday: - type: str - pw_matchstring@instance: daily/day7/weekday - database: init - database_maxage: 92 - - current_weather_nodb: - name: Current weather of Weather report from pirateweather.net - Data is NOT written do database - currently: - - time_epoch: - type: num - pw_matchstring@instance: currently/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: currently/summary - - icon: - type: str - pw_matchstring@instance: currently/icon - - icon_visu: - type: str - pw_matchstring@instance: currently/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: currently/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: currently/precipIntensity - - precipIntensityError: - type: num - pw_matchstring@instance: currently/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: currently/precipProbability - - precipType: - type: str - pw_matchstring@instance: currently/precipType - - temperature: - type: num - pw_matchstring@instance: currently/temperature - - apparenttemperature: - type: num - pw_matchstring@instance: currently/apparentTemperature - - dewpoint: - type: num - pw_matchstring@instance: currently/dewPoint - - humidity: - type: num - pw_matchstring@instance: currently/humidity - - pressure: - type: num - pw_matchstring@instance: currently/pressure - - windSpeed: - type: num - pw_matchstring@instance: currently/windSpeed - - windGust: - type: num - pw_matchstring@instance: currently/windGust - - windBearing: - type: num - pw_matchstring@instance: currently/windBearing - - cloudCover: - type: num - pw_matchstring@instance: currently/cloudCover - - uvIndex: - type: num - pw_matchstring@instance: currently/uvIndex - - visibility: - type: num - pw_matchstring@instance: currently/visibility - - ozone: - type: num - pw_matchstring@instance: currently/ozone - - date: - type: str - pw_matchstring@instance: currently/date - - day: - type: num - pw_matchstring@instance: currently/day - - forecast_hourly_nodb: - name: Hourly forcast of Weather report from pirateweather.net - Data is NOT written do database - hourly: - - summary: - type: str - pw_matchstring@instance: hourly/summary - - icon: - type: str - pw_matchstring@instance: hourly/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/icon_visu - - hour0: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour0/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour0/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour0/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour0/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour0/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour0/precipIntensity - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour0/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour0/precipProbability - - precipType: - type: str - pw_matchstring@instance: hourly/hour0/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour0/temperature - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour0/apparentTemperature - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour0/dewPoint - - humidity: - type: num - pw_matchstring@instance: hourly/hour0/humidity - - pressure: - type: num - pw_matchstring@instance: hourly/hour0/pressure - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour0/windSpeed - - windGust: - type: num - pw_matchstring@instance: hourly/hour0/windGust - - windBearing: - type: num - pw_matchstring@instance: hourly/hour0/windBearing - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour0/cloudCover - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour0/uvIndex - - visibility: - type: num - pw_matchstring@instance: hourly/hour0/visibility - - ozone: - type: num - pw_matchstring@instance: hourly/hour0/ozone - - date: - type: str - pw_matchstring@instance: hourly/hour0/date - - - weekday: - type: str - pw_matchstring@instance: hourly/hour0/weekday - - - hour1: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour1/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour1/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour1/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour1/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour1/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour1/precipIntensity - - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour1/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour1/precipProbability - - - precipType: - type: str - pw_matchstring@instance: hourly/hour1/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour1/temperature - - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour1/apparentTemperature - - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour1/dewPoint - - - humidity: - type: num - pw_matchstring@instance: hourly/hour1/humidity - - - pressure: - type: num - pw_matchstring@instance: hourly/hour1/pressure - - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour1/windSpeed - - - windGust: - type: num - pw_matchstring@instance: hourly/hour1/windGust - - - windBearing: - type: num - pw_matchstring@instance: hourly/hour1/windBearing - - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour1/cloudCover - - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour1/uvIndex - - - visibility: - type: num - pw_matchstring@instance: hourly/hour1/visibility - - - ozone: - type: num - pw_matchstring@instance: hourly/hour1/ozone - - - date: - type: str - pw_matchstring@instance: hourly/hour1/date - - - weekday: - type: str - pw_matchstring@instance: hourly/hour1/weekday - - - hour2: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour2/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour2/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour2/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour2/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour2/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour2/precipIntensity - - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour2/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour2/precipProbability - - - precipType: - type: str - pw_matchstring@instance: hourly/hour2/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour2/temperature - - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour2/apparentTemperature - - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour2/dewPoint - - - humidity: - type: num - pw_matchstring@instance: hourly/hour2/humidity - - - pressure: - type: num - pw_matchstring@instance: hourly/hour2/pressure - - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour2/windSpeed - - - windGust: - type: num - pw_matchstring@instance: hourly/hour2/windGust - - - windBearing: - type: num - pw_matchstring@instance: hourly/hour2/windBearing - - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour2/cloudCover - - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour2/uvIndex - - - visibility: - type: num - pw_matchstring@instance: hourly/hour2/visibility - - - ozone: - type: num - pw_matchstring@instance: hourly/hour2/ozone - - - date: - type: str - pw_matchstring@instance: hourly/hour2/date - - - weekday: - type: str - pw_matchstring@instance: hourly/hour2/weekday - - - hour3: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour3/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour3/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour3/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour3/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour3/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour3/precipIntensity - - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour3/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour3/precipProbability - - - precipType: - type: str - pw_matchstring@instance: hourly/hour3/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour3/temperature - - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour3/apparentTemperature - - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour3/dewPoint - - - humidity: - type: num - pw_matchstring@instance: hourly/hour3/humidity - - - pressure: - type: num - pw_matchstring@instance: hourly/hour3/pressure - - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour3/windSpeed - - - windGust: - type: num - pw_matchstring@instance: hourly/hour3/windGust - - - windBearing: - type: num - pw_matchstring@instance: hourly/hour3/windBearing - - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour3/cloudCover - - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour3/uvIndex - - - visibility: - type: num - pw_matchstring@instance: hourly/hour3/visibility - - - ozone: - type: num - pw_matchstring@instance: hourly/hour3/ozone - - - date: - type: str - pw_matchstring@instance: hourly/hour3/date - - - weekday: - type: str - pw_matchstring@instance: hourly/hour3/weekday - - - hour4: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour4/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour4/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour4/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour4/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour4/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour4/precipIntensity - - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour4/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour4/precipProbability - - - precipType: - type: str - pw_matchstring@instance: hourly/hour4/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour4/temperature - - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour4/apparentTemperature - - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour4/dewPoint - - - humidity: - type: num - pw_matchstring@instance: hourly/hour4/humidity - - - pressure: - type: num - pw_matchstring@instance: hourly/hour4/pressure - - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour4/windSpeed - - - windGust: - type: num - pw_matchstring@instance: hourly/hour4/windGust - - - windBearing: - type: num - pw_matchstring@instance: hourly/hour4/windBearing - - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour4/cloudCover - - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour4/uvIndex - - - visibility: - type: num - pw_matchstring@instance: hourly/hour4/visibility - - - ozone: - type: num - pw_matchstring@instance: hourly/hour4/ozone - - - date: - type: str - pw_matchstring@instance: hourly/hour4/date - - - weekday: - type: str - pw_matchstring@instance: hourly/hour4/weekday - - - hour5: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour5/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour5/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour5/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour5/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour5/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour5/precipIntensity - - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour5/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour5/precipProbability - - - precipType: - type: str - pw_matchstring@instance: hourly/hour5/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour5/temperature - - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour5/apparentTemperature - - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour5/dewPoint - - - humidity: - type: num - pw_matchstring@instance: hourly/hour5/humidity - - - pressure: - type: num - pw_matchstring@instance: hourly/hour5/pressure - - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour5/windSpeed - - - windGust: - type: num - pw_matchstring@instance: hourly/hour5/windGust - - - windBearing: - type: num - pw_matchstring@instance: hourly/hour5/windBearing - - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour5/cloudCover - - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour5/uvIndex - - - visibility: - type: num - pw_matchstring@instance: hourly/hour5/visibility - - - ozone: - type: num - pw_matchstring@instance: hourly/hour5/ozone - - - date: - type: str - pw_matchstring@instance: hourly/hour5/date - - - weekday: - type: str - pw_matchstring@instance: hourly/hour5/weekday - - - hour6: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour6/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour6/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour6/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour6/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour6/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour6/precipIntensity - - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour6/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour6/precipProbability - - - precipType: - type: str - pw_matchstring@instance: hourly/hour6/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour6/temperature - - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour6/apparentTemperature - - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour6/dewPoint - - - humidity: - type: num - pw_matchstring@instance: hourly/hour6/humidity - - - pressure: - type: num - pw_matchstring@instance: hourly/hour6/pressure - - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour6/windSpeed - - - windGust: - type: num - pw_matchstring@instance: hourly/hour6/windGust - - - windBearing: - type: num - pw_matchstring@instance: hourly/hour6/windBearing - - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour6/cloudCover - - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour6/uvIndex - - - visibility: - type: num - pw_matchstring@instance: hourly/hour6/visibility - - - ozone: - type: num - pw_matchstring@instance: hourly/hour6/ozone - - - date: - type: str - pw_matchstring@instance: hourly/hour6/date - - - weekday: - type: str - pw_matchstring@instance: hourly/hour6/weekday - - - hour7: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour7/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour7/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour7/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour7/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour7/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour7/precipIntensity - - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour7/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour7/precipProbability - - - precipType: - type: str - pw_matchstring@instance: hourly/hour7/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour7/temperature - - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour7/apparentTemperature - - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour7/dewPoint - - - humidity: - type: num - pw_matchstring@instance: hourly/hour7/humidity - - - pressure: - type: num - pw_matchstring@instance: hourly/hour7/pressure - - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour7/windSpeed - - - windGust: - type: num - pw_matchstring@instance: hourly/hour7/windGust - - - windBearing: - type: num - pw_matchstring@instance: hourly/hour7/windBearing - - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour7/cloudCover - - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour7/uvIndex - - - visibility: - type: num - pw_matchstring@instance: hourly/hour7/visibility - - - ozone: - type: num - pw_matchstring@instance: hourly/hour7/ozone - - - date: - type: str - pw_matchstring@instance: hourly/hour7/date - - - weekday: - type: str - pw_matchstring@instance: hourly/hour7/weekday - - - hour8: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour8/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour8/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour8/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour8/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour8/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour8/precipIntensity - - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour8/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour8/precipProbability - - - precipType: - type: str - pw_matchstring@instance: hourly/hour8/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour8/temperature - - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour8/apparentTemperature - - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour8/dewPoint - - - humidity: - type: num - pw_matchstring@instance: hourly/hour8/humidity - - - pressure: - type: num - pw_matchstring@instance: hourly/hour8/pressure - - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour8/windSpeed - - - windGust: - type: num - pw_matchstring@instance: hourly/hour8/windGust - - - windBearing: - type: num - pw_matchstring@instance: hourly/hour8/windBearing - - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour8/cloudCover - - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour8/uvIndex - - - visibility: - type: num - pw_matchstring@instance: hourly/hour8/visibility - - - ozone: - type: num - pw_matchstring@instance: hourly/hour8/ozone - - - date: - type: str - pw_matchstring@instance: hourly/hour8/date - - - weekday: - type: str - pw_matchstring@instance: hourly/hour8/weekday - - - hour9: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour9/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour9/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour9/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour9/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour9/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour9/precipIntensity - - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour9/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour9/precipProbability - - - precipType: - type: str - pw_matchstring@instance: hourly/hour9/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour9/temperature - - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour9/apparentTemperature - - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour9/dewPoint - - - humidity: - type: num - pw_matchstring@instance: hourly/hour9/humidity - - - pressure: - type: num - pw_matchstring@instance: hourly/hour9/pressure - - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour9/windSpeed - - - windGust: - type: num - pw_matchstring@instance: hourly/hour9/windGust - - - windBearing: - type: num - pw_matchstring@instance: hourly/hour9/windBearing - - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour9/cloudCover - - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour9/uvIndex - - - visibility: - type: num - pw_matchstring@instance: hourly/hour9/visibility - - - ozone: - type: num - pw_matchstring@instance: hourly/hour9/ozone - - - date: - type: str - pw_matchstring@instance: hourly/hour9/date - - - weekday: - type: str - pw_matchstring@instance: hourly/hour9/weekday - - - hour10: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour10/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour10/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour10/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour10/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour10/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour10/precipIntensity - - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour10/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour10/precipProbability - - - precipType: - type: str - pw_matchstring@instance: hourly/hour10/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour10/temperature - - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour10/apparentTemperature - - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour10/dewPoint - - - humidity: - type: num - pw_matchstring@instance: hourly/hour10/humidity - - - pressure: - type: num - pw_matchstring@instance: hourly/hour10/pressure - - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour10/windSpeed - - - windGust: - type: num - pw_matchstring@instance: hourly/hour10/windGust - - - windBearing: - type: num - pw_matchstring@instance: hourly/hour10/windBearing - - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour10/cloudCover - - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour10/uvIndex - - - visibility: - type: num - pw_matchstring@instance: hourly/hour10/visibility - - - ozone: - type: num - pw_matchstring@instance: hourly/hour10/ozone - - - date: - type: str - pw_matchstring@instance: hourly/hour10/date - - - weekday: - type: str - pw_matchstring@instance: hourly/hour10/weekday - - - hour11: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour11/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour11/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour11/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour11/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour11/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour11/precipIntensity - - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour11/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour11/precipProbability - - - precipType: - type: str - pw_matchstring@instance: hourly/hour11/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour11/temperature - - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour11/apparentTemperature - - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour11/dewPoint - - - humidity: - type: num - pw_matchstring@instance: hourly/hour11/humidity - - - pressure: - type: num - pw_matchstring@instance: hourly/hour11/pressure - - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour11/windSpeed - - - windGust: - type: num - pw_matchstring@instance: hourly/hour11/windGust - - - windBearing: - type: num - pw_matchstring@instance: hourly/hour11/windBearing - - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour11/cloudCover - - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour11/uvIndex - - - visibility: - type: num - pw_matchstring@instance: hourly/hour11/visibility - - - ozone: - type: num - pw_matchstring@instance: hourly/hour11/ozone - - - date: - type: str - pw_matchstring@instance: hourly/hour11/date - - - weekday: - type: str - pw_matchstring@instance: hourly/hour11/weekday - - - hour12: - - time_epoch: - type: num - pw_matchstring@instance: hourly/hour12/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: hourly/hour12/summary - - icon: - type: str - pw_matchstring@instance: hourly/hour12/icon - - icon_visu: - type: str - pw_matchstring@instance: hourly/hour12/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: hourly/hour12/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: hourly/hour12/precipIntensity - - - precipIntensityError: - type: num - pw_matchstring@instance: hourly/hour12/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: hourly/hour12/precipProbability - - - precipType: - type: str - pw_matchstring@instance: hourly/hour12/precipType - - temperature: - type: num - pw_matchstring@instance: hourly/hour12/temperature - - - apparenttemperature: - type: num - pw_matchstring@instance: hourly/hour12/apparentTemperature - - - dewpoint: - type: num - pw_matchstring@instance: hourly/hour12/dewPoint - - - humidity: - type: num - pw_matchstring@instance: hourly/hour12/humidity - - - pressure: - type: num - pw_matchstring@instance: hourly/hour12/pressure - - - windSpeed: - type: num - pw_matchstring@instance: hourly/hour12/windSpeed - - - windGust: - type: num - pw_matchstring@instance: hourly/hour12/windGust - - - windBearing: - type: num - pw_matchstring@instance: hourly/hour12/windBearing - - - cloudCover: - type: num - pw_matchstring@instance: hourly/hour12/cloudCover - - - uvIndex: - type: num - pw_matchstring@instance: hourly/hour12/uvIndex - - - visibility: - type: num - pw_matchstring@instance: hourly/hour12/visibility - - - ozone: - type: num - pw_matchstring@instance: hourly/hour12/ozone - - - date: - type: str - pw_matchstring@instance: hourly/hour12/date - - - weekday: - type: str - pw_matchstring@instance: hourly/hour12/weekday - - forecast_daily_nodb: - name: Daily forcast of Weather report from pirateweather.net - Data is NOT written do database - daily: - - summary: - type: str - pw_matchstring@instance: daily/summary - - icon: - type: str - pw_matchstring@instance: daily/icon - - icon_visu: - type: str - pw_matchstring@instance: daily/icon_visu - - day0: - - time_epoch: - type: num - pw_matchstring@instance: daily/day0/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: daily/day0/summary - - icon: - type: str - pw_matchstring@instance: daily/day0/icon - - icon_visu: - type: str - pw_matchstring@instance: daily/day0/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: daily/day0/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: daily/day0/precipIntensity - - - precipIntensityError: - type: num - pw_matchstring@instance: daily/day0/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: daily/day0/precipProbability - - - precipType: - type: str - pw_matchstring@instance: daily/day0/precipType - - temperature: - type: num - pw_matchstring@instance: daily/day0/temperature - - - apparenttemperature: - type: num - pw_matchstring@instance: daily/day0/apparentTemperature - - - dewpoint: - type: num - pw_matchstring@instance: daily/day0/dewPoint - - - humidity: - type: num - pw_matchstring@instance: daily/day0/humidity - - - pressure: - type: num - pw_matchstring@instance: daily/day0/pressure - - - windSpeed: - type: num - pw_matchstring@instance: daily/day0/windSpeed - - - windGust: - type: num - pw_matchstring@instance: daily/day0/windGust - - - windBearing: - type: num - pw_matchstring@instance: daily/day0/windBearing - - - cloudCover: - type: num - pw_matchstring@instance: daily/day0/cloudCover - - - uvIndex: - type: num - pw_matchstring@instance: daily/day0/uvIndex - - - visibility: - type: num - pw_matchstring@instance: daily/day0/visibility - - - ozone: - type: num - pw_matchstring@instance: daily/day0/ozone - - - temperatureMin: - type: num - pw_matchstring@instance: daily/day0/temperatureMin - - - temperatureMinTime: - type: num - pw_matchstring@instance: daily/day0/temperatureMinTime - - - temperatureMax: - type: num - pw_matchstring@instance: daily/day0/temperatureMax - - - temperatureMaxTime: - type: num - pw_matchstring@instance: daily/day0/temperatureMaxTime - - - apparentTemperatureMin: - type: num - pw_matchstring@instance: daily/day0/apparentTemperatureMin - - - apparentTemperatureMinTime: - type: num - pw_matchstring@instance: daily/day0/apparentTemperatureMinTime - - - apparentTemperatureMax: - type: num - pw_matchstring@instance: daily/day0/apparentTemperatureMax - - - apparentTemperatureMaxTime: - type: num - pw_matchstring@instance: daily/day0/apparentTemperatureMaxTime - - - date: - type: str - pw_matchstring@instance: daily/day0/date - - - weekday: - type: str - pw_matchstring@instance: daily/day0/weekday - - - precipProbability_mean: - type: num - pw_matchstring@instance: daily/day0/precipProbability_mean - - - precipIntensity_mean: - type: num - pw_matchstring@instance: daily/day0/precipIntensity_mean - - - temperature_mean: - type: num - pw_matchstring@instance: daily/day0/temperature_mean - - - hours: - type: dict - pw_matchstring@instance: daily/day0/hours - - day1: - - time_epoch: - type: num - pw_matchstring@instance: daily/day1/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: daily/day1/summary - - icon: - type: str - pw_matchstring@instance: daily/day1/icon - - icon_visu: - type: str - pw_matchstring@instance: daily/day1/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: daily/day1/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: daily/day1/precipIntensity - - - precipIntensityError: - type: num - pw_matchstring@instance: daily/day1/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: daily/day1/precipProbability - - - precipType: - type: str - pw_matchstring@instance: daily/day1/precipType - - temperature: - type: num - pw_matchstring@instance: daily/day1/temperature - - - apparenttemperature: - type: num - pw_matchstring@instance: daily/day1/apparentTemperature - - - dewpoint: - type: num - pw_matchstring@instance: daily/day1/dewPoint - - - humidity: - type: num - pw_matchstring@instance: daily/day1/humidity - - - pressure: - type: num - pw_matchstring@instance: daily/day1/pressure - - - windSpeed: - type: num - pw_matchstring@instance: daily/day1/windSpeed - - - windGust: - type: num - pw_matchstring@instance: daily/day1/windGust - - - windBearing: - type: num - pw_matchstring@instance: daily/day1/windBearing - - - cloudCover: - type: num - pw_matchstring@instance: daily/day1/cloudCover - - - uvIndex: - type: num - pw_matchstring@instance: daily/day1/uvIndex - - - visibility: - type: num - pw_matchstring@instance: daily/day1/visibility - - - ozone: - type: num - pw_matchstring@instance: daily/day1/ozone - - - temperatureMin: - type: num - pw_matchstring@instance: daily/day1/temperatureMin - - - temperatureMinTime: - type: num - pw_matchstring@instance: daily/day1/temperatureMinTime - - - temperatureMax: - type: num - pw_matchstring@instance: daily/day1/temperatureMax - - - temperatureMaxTime: - type: num - pw_matchstring@instance: daily/day1/temperatureMaxTime - - - apparentTemperatureMin: - type: num - pw_matchstring@instance: daily/day1/apparentTemperatureMin - - - apparentTemperatureMinTime: - type: num - pw_matchstring@instance: daily/day1/apparentTemperatureMinTime - - - apparentTemperatureMax: - type: num - pw_matchstring@instance: daily/day1/apparentTemperatureMax - - - apparentTemperatureMaxTime: - type: num - pw_matchstring@instance: daily/day1/apparentTemperatureMaxTime - - - date: - type: str - pw_matchstring@instance: daily/day1/date - - - weekday: - type: str - pw_matchstring@instance: daily/day1/weekday - - - precipProbability_mean: - type: num - pw_matchstring@instance: daily/day1/precipProbability_mean - - - precipIntensity_mean: - type: num - pw_matchstring@instance: daily/day1/precipIntensity_mean - - - temperature_mean: - type: num - pw_matchstring@instance: daily/day1/temperature_mean - - - hours: - type: dict - pw_matchstring@instance: daily/day1/hours - - day2: - - time_epoch: - type: num - pw_matchstring@instance: daily/day2/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: daily/day2/summary - - icon: - type: str - pw_matchstring@instance: daily/day2/icon - - icon_visu: - type: str - pw_matchstring@instance: daily/day2/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: daily/day2/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: daily/day2/precipIntensity - - - precipIntensityError: - type: num - pw_matchstring@instance: daily/day2/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: daily/day2/precipProbability - - - precipType: - type: str - pw_matchstring@instance: daily/day2/precipType - - temperature: - type: num - pw_matchstring@instance: daily/day2/temperature - - - apparenttemperature: - type: num - pw_matchstring@instance: daily/day2/apparentTemperature - - - dewpoint: - type: num - pw_matchstring@instance: daily/day2/dewPoint - - - humidity: - type: num - pw_matchstring@instance: daily/day2/humidity - - - pressure: - type: num - pw_matchstring@instance: daily/day2/pressure - - - windSpeed: - type: num - pw_matchstring@instance: daily/day2/windSpeed - - - windGust: - type: num - pw_matchstring@instance: daily/day2/windGust - - - windBearing: - type: num - pw_matchstring@instance: daily/day2/windBearing - - - cloudCover: - type: num - pw_matchstring@instance: daily/day2/cloudCover - - - uvIndex: - type: num - pw_matchstring@instance: daily/day2/uvIndex - - - visibility: - type: num - pw_matchstring@instance: daily/day2/visibility - - - ozone: - type: num - pw_matchstring@instance: daily/day2/ozone - - - temperatureMin: - type: num - pw_matchstring@instance: daily/day2/temperatureMin - - - temperatureMinTime: - type: num - pw_matchstring@instance: daily/day2/temperatureMinTime - - - temperatureMax: - type: num - pw_matchstring@instance: daily/day2/temperatureMax - - - temperatureMaxTime: - type: num - pw_matchstring@instance: daily/day2/temperatureMaxTime - - - apparentTemperatureMin: - type: num - pw_matchstring@instance: daily/day2/apparentTemperatureMin - - - apparentTemperatureMinTime: - type: num - pw_matchstring@instance: daily/day2/apparentTemperatureMinTime - - - apparentTemperatureMax: - type: num - pw_matchstring@instance: daily/day2/apparentTemperatureMax - - - apparentTemperatureMaxTime: - type: num - pw_matchstring@instance: daily/day2/apparentTemperatureMaxTime - - - date: - type: str - pw_matchstring@instance: daily/day2/date - - - weekday: - type: str - pw_matchstring@instance: daily/day2/weekday - - - precipProbability_mean: - type: num - pw_matchstring@instance: daily/day2/precipProbability_mean - - - precipIntensity_mean: - type: num - pw_matchstring@instance: daily/day2/precipIntensity_mean - - - temperature_mean: - type: num - pw_matchstring@instance: daily/day2/temperature_mean - - - hours: - type: dict - pw_matchstring@instance: daily/day2/hours - - day3: - - time_epoch: - type: num - pw_matchstring@instance: daily/day3/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: daily/day3/summary - - icon: - type: str - pw_matchstring@instance: daily/day3/icon - - icon_visu: - type: str - pw_matchstring@instance: daily/day3/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: daily/day3/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: daily/day3/precipIntensity - - - precipIntensityError: - type: num - pw_matchstring@instance: daily/day3/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: daily/day3/precipProbability - - - precipType: - type: str - pw_matchstring@instance: daily/day3/precipType - - temperature: - type: num - pw_matchstring@instance: daily/day3/temperature - - - apparenttemperature: - type: num - pw_matchstring@instance: daily/day3/apparentTemperature - - - dewpoint: - type: num - pw_matchstring@instance: daily/day3/dewPoint - - - humidity: - type: num - pw_matchstring@instance: daily/day3/humidity - - - pressure: - type: num - pw_matchstring@instance: daily/day3/pressure - - - windSpeed: - type: num - pw_matchstring@instance: daily/day3/windSpeed - - - windGust: - type: num - pw_matchstring@instance: daily/day3/windGust - - - windBearing: - type: num - pw_matchstring@instance: daily/day3/windBearing - - - cloudCover: - type: num - pw_matchstring@instance: daily/day3/cloudCover - - - uvIndex: - type: num - pw_matchstring@instance: daily/day3/uvIndex - - - visibility: - type: num - pw_matchstring@instance: daily/day3/visibility - - - ozone: - type: num - pw_matchstring@instance: daily/day3/ozone - - - temperatureMin: - type: num - pw_matchstring@instance: daily/day3/temperatureMin - - - temperatureMinTime: - type: num - pw_matchstring@instance: daily/day3/temperatureMinTime - - - temperatureMax: - type: num - pw_matchstring@instance: daily/day3/temperatureMax - - - temperatureMaxTime: - type: num - pw_matchstring@instance: daily/day3/temperatureMaxTime - - - apparentTemperatureMin: - type: num - pw_matchstring@instance: daily/day3/apparentTemperatureMin - - - apparentTemperatureMinTime: - type: num - pw_matchstring@instance: daily/day3/apparentTemperatureMinTime - - - apparentTemperatureMax: - type: num - pw_matchstring@instance: daily/day3/apparentTemperatureMax - - - apparentTemperatureMaxTime: - type: num - pw_matchstring@instance: daily/day3/apparentTemperatureMaxTime - - - date: - type: str - pw_matchstring@instance: daily/day3/date - - - weekday: - type: str - pw_matchstring@instance: daily/day3/weekday - - - day4: - - time_epoch: - type: num - pw_matchstring@instance: daily/day4/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: daily/day4/summary - - icon: - type: str - pw_matchstring@instance: daily/day4/icon - - icon_visu: - type: str - pw_matchstring@instance: daily/day4/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: daily/day4/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: daily/day4/precipIntensity - - - precipIntensityError: - type: num - pw_matchstring@instance: daily/day4/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: daily/day4/precipProbability - - - precipType: - type: str - pw_matchstring@instance: daily/day4/precipType - - temperature: - type: num - pw_matchstring@instance: daily/day4/temperature - - - apparenttemperature: - type: num - pw_matchstring@instance: daily/day4/apparentTemperature - - - dewpoint: - type: num - pw_matchstring@instance: daily/day4/dewPoint - - - humidity: - type: num - pw_matchstring@instance: daily/day4/humidity - - - pressure: - type: num - pw_matchstring@instance: daily/day4/pressure - - - windSpeed: - type: num - pw_matchstring@instance: daily/day4/windSpeed - - - windGust: - type: num - pw_matchstring@instance: daily/day4/windGust - - - windBearing: - type: num - pw_matchstring@instance: daily/day4/windBearing - - - cloudCover: - type: num - pw_matchstring@instance: daily/day4/cloudCover - - - uvIndex: - type: num - pw_matchstring@instance: daily/day4/uvIndex - - - visibility: - type: num - pw_matchstring@instance: daily/day4/visibility - - - ozone: - type: num - pw_matchstring@instance: daily/day4/ozone - - - temperatureMin: - type: num - pw_matchstring@instance: daily/day4/temperatureMin - - - temperatureMinTime: - type: num - pw_matchstring@instance: daily/day4/temperatureMinTime - - - temperatureMax: - type: num - pw_matchstring@instance: daily/day4/temperatureMax - - - temperatureMaxTime: - type: num - pw_matchstring@instance: daily/day4/temperatureMaxTime - - - apparentTemperatureMin: - type: num - pw_matchstring@instance: daily/day4/apparentTemperatureMin - - - apparentTemperatureMinTime: - type: num - pw_matchstring@instance: daily/day4/apparentTemperatureMinTime - - - apparentTemperatureMax: - type: num - pw_matchstring@instance: daily/day4/apparentTemperatureMax - - - apparentTemperatureMaxTime: - type: num - pw_matchstring@instance: daily/day4/apparentTemperatureMaxTime - - - date: - type: str - pw_matchstring@instance: daily/day4/date - - - weekday: - type: str - pw_matchstring@instance: daily/day4/weekday - - - day5: - - time_epoch: - type: num - pw_matchstring@instance: daily/day5/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: daily/day5/summary - - icon: - type: str - pw_matchstring@instance: daily/day5/icon - - icon_visu: - type: str - pw_matchstring@instance: daily/day5/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: daily/day5/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: daily/day5/precipIntensity - - precipIntensityError: - type: num - pw_matchstring@instance: daily/day5/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: daily/day5/precipProbability - - - precipType: - type: str - pw_matchstring@instance: daily/day5/precipType - - temperature: - type: num - pw_matchstring@instance: daily/day5/temperature - - - apparenttemperature: - type: num - pw_matchstring@instance: daily/day5/apparentTemperature - - - dewpoint: - type: num - pw_matchstring@instance: daily/day5/dewPoint - - - humidity: - type: num - pw_matchstring@instance: daily/day5/humidity - - - pressure: - type: num - pw_matchstring@instance: daily/day5/pressure - - - windSpeed: - type: num - pw_matchstring@instance: daily/day5/windSpeed - - - windGust: - type: num - pw_matchstring@instance: daily/day5/windGust - - - windBearing: - type: num - pw_matchstring@instance: daily/day5/windBearing - - - cloudCover: - type: num - pw_matchstring@instance: daily/day5/cloudCover - - - uvIndex: - type: num - pw_matchstring@instance: daily/day5/uvIndex - - - visibility: - type: num - pw_matchstring@instance: daily/day5/visibility - - - ozone: - type: num - pw_matchstring@instance: daily/day5/ozone - - - temperatureMin: - type: num - pw_matchstring@instance: daily/day5/temperatureMin - - - temperatureMinTime: - type: num - pw_matchstring@instance: daily/day5/temperatureMinTime - - - temperatureMax: - type: num - pw_matchstring@instance: daily/day5/temperatureMax - - - temperatureMaxTime: - type: num - pw_matchstring@instance: daily/day5/temperatureMaxTime - - - apparentTemperatureMin: - type: num - pw_matchstring@instance: daily/day5/apparentTemperatureMin - - - apparentTemperatureMinTime: - type: num - pw_matchstring@instance: daily/day5/apparentTemperatureMinTime - - - apparentTemperatureMax: - type: num - pw_matchstring@instance: daily/day5/apparentTemperatureMax - - - apparentTemperatureMaxTime: - type: num - pw_matchstring@instance: daily/day5/apparentTemperatureMaxTime - - - date: - type: str - pw_matchstring@instance: daily/day5/date - - - weekday: - type: str - pw_matchstring@instance: daily/day5/weekday - - - day6: - - time_epoch: - type: num - pw_matchstring@instance: daily/day6/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: daily/day6/summary - - icon: - type: str - pw_matchstring@instance: daily/day6/icon - - icon_visu: - type: str - pw_matchstring@instance: daily/day6/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: daily/day6/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: daily/day6/precipIntensity - - - precipIntensityError: - type: num - pw_matchstring@instance: daily/day6/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: daily/day6/precipProbability - - - precipType: - type: str - pw_matchstring@instance: daily/day6/precipType - - temperature: - type: num - pw_matchstring@instance: daily/day6/temperature - - - apparenttemperature: - type: num - pw_matchstring@instance: daily/day6/apparentTemperature - - - dewpoint: - type: num - pw_matchstring@instance: daily/day6/dewPoint - - - humidity: - type: num - pw_matchstring@instance: daily/day6/humidity - - - pressure: - type: num - pw_matchstring@instance: daily/day6/pressure - - - windSpeed: - type: num - pw_matchstring@instance: daily/day6/windSpeed - - - windGust: - type: num - pw_matchstring@instance: daily/day6/windGust - - - windBearing: - type: num - pw_matchstring@instance: daily/day6/windBearing - - - cloudCover: - type: num - pw_matchstring@instance: daily/day6/cloudCover - - - uvIndex: - type: num - pw_matchstring@instance: daily/day6/uvIndex - - - visibility: - type: num - pw_matchstring@instance: daily/day6/visibility - - - ozone: - type: num - pw_matchstring@instance: daily/day6/ozone - - - temperatureMin: - type: num - pw_matchstring@instance: daily/day6/temperatureMin - - - temperatureMinTime: - type: num - pw_matchstring@instance: daily/day6/temperatureMinTime - - - temperatureMax: - type: num - pw_matchstring@instance: daily/day6/temperatureMax - - - temperatureMaxTime: - type: num - pw_matchstring@instance: daily/day6/temperatureMaxTime - - - apparentTemperatureMin: - type: num - pw_matchstring@instance: daily/day6/apparentTemperatureMin - - - apparentTemperatureMinTime: - type: num - pw_matchstring@instance: daily/day6/apparentTemperatureMinTime - - - apparentTemperatureMax: - type: num - pw_matchstring@instance: daily/day6/apparentTemperatureMax - - - apparentTemperatureMaxTime: - type: num - pw_matchstring@instance: daily/day6/apparentTemperatureMaxTime - - - date: - type: str - pw_matchstring@instance: daily/day6/date - - - weekday: - type: str - pw_matchstring@instance: daily/day6/weekday - - - day7: - - time_epoch: - type: num - pw_matchstring@instance: daily/day7/time - - time: - type: str - eval_trigger: ..time_epoch - eval: datetime.datetime.fromtimestamp(sh...time_epoch()).strftime('%HH:%MM') - - summary: - type: str - pw_matchstring@instance: daily/day7/summary - - icon: - type: str - pw_matchstring@instance: daily/day7/icon - - icon_visu: - type: str - pw_matchstring@instance: daily/day7/icon_visu - - nearestStormDistance: - type: num - pw_matchstring@instance: daily/day7/nearestStormDistance - - precipIntensity: - type: num - pw_matchstring@instance: daily/day7/precipIntensity - - - precipIntensityError: - type: num - pw_matchstring@instance: daily/day7/precipIntensityError - - precipProbability: - type: num - pw_matchstring@instance: daily/day7/precipProbability - - - precipType: - type: str - pw_matchstring@instance: daily/day7/precipType - - temperature: - type: num - pw_matchstring@instance: daily/day7/temperature - - - apparenttemperature: - type: num - pw_matchstring@instance: daily/day7/apparentTemperature - - - dewpoint: - type: num - pw_matchstring@instance: daily/day7/dewPoint - - - humidity: - type: num - pw_matchstring@instance: daily/day7/humidity - - - pressure: - type: num - pw_matchstring@instance: daily/day7/pressure - - - windSpeed: - type: num - pw_matchstring@instance: daily/day7/windSpeed - - - windGust: - type: num - pw_matchstring@instance: daily/day7/windGust - - - windBearing: - type: num - pw_matchstring@instance: daily/day7/windBearing - - - cloudCover: - type: num - pw_matchstring@instance: daily/day7/cloudCover - - - uvIndex: - type: num - pw_matchstring@instance: daily/day7/uvIndex - - - visibility: - type: num - pw_matchstring@instance: daily/day7/visibility - - - ozone: - type: num - pw_matchstring@instance: daily/day7/ozone - - - temperatureMin: - type: num - pw_matchstring@instance: daily/day7/temperatureMin - - - temperatureMinTime: - type: num - pw_matchstring@instance: daily/day7/temperatureMinTime - - - temperatureMax: - type: num - pw_matchstring@instance: daily/day7/temperatureMax - - - temperatureMaxTime: - type: num - pw_matchstring@instance: daily/day7/temperatureMaxTime - - - apparentTemperatureMin: - type: num - pw_matchstring@instance: daily/day7/apparentTemperatureMin - - - apparentTemperatureMinTime: - type: num - pw_matchstring@instance: daily/day7/apparentTemperatureMinTime - - - apparentTemperatureMax: - type: num - pw_matchstring@instance: daily/day7/apparentTemperatureMax - - - apparentTemperatureMaxTime: - type: num - pw_matchstring@instance: daily/day7/apparentTemperatureMaxTime + temperature_mean: + type: num + pw_matchstring_@instance: "daily/{..:_day}/temperature_mean" + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage + + hours: + type: dict + pw_matchstring_@instance: "daily/{..:_day}/hours" + + _windSpeedStruct: + type: num + #pw_matchstring@instance: currently/windGust + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage + + beaufortNr: + name: Wind speed as beaufort number + type: num + eval: env.kmh_to_bft(sh...()) + eval_trigger: .. + beaufortString: + name: Wind speed as beaufort description + type: str + eval: env.bft_to_text( sh...beaufortNr() ) + eval_trigger: .. - date: - type: str - pw_matchstring@instance: daily/day7/date + _windBearingStruct: + type: num + #pw_matchstring@instance: currently/windBearing + database@instance: ..:my_database + database_maxage@instance: ..:my_database_maxage + windDirectionString: + type: str + eval_trigger: .. + eval: env.degrees_to_direction_16(sh...()) - weekday: - type: str - pw_matchstring@instance: daily/day7/weekday +# --------------------------------------- logic_parameters: NONE # Definition of logic parameters defined by this plugin @@ -6600,46 +762,3 @@ plugin_functions: de: "Icon als String." en: "Icon as string." - get_wind_direction8: - type: str - description: - de: "Bestimmung der Windrichtung als String (NO, ...) aus der Gradzahl (8 Richtungen)" - en: "Getting the wind direction as strint (NE, ...) from degrees (8 directions)" - parameters: - degrees: - type: num - description: - de: "Windrichtung in Grad" - en: "Wind direction in degrees" - - get_wind_direction16: - type: str - description: - de: "Bestimmung der Windrichtung als String (NO, ...) aus der Gradzahl (16 Richtungen)" - en: "Getting the wind direction as strint (NE, ...) from degrees (16 directions)" - parameters: - degrees: - type: num - description: - de: "Windrichtung in Grad" - en: "Wind direction in degrees" - - get_location_name: - type: str - description: - de: "Bestimmung des Ortsnamens (Vorort) aus latitude und longitude" - en: "Getting location name (suburb) from latitude and longitude" - parameters: - lat: - type: num - default: None - description: - de: "Latitude, falls nicht angegeben wird die Latitude des piratewthr Plugins benutzt" - en: "latitude, if omitted the configured latitude of the piratewthr plugin is used" - lon: - type: num - default: None - description: - de: "Longitude, falls nicht angegeben wird die Longitude des piratewthr Plugins benutzt" - en: "longitude, if omitted the configured longitude of the piratewthr plugin is used" - From 14d2aa95532ac8074adaaf3d1226bb3701626b0f Mon Sep 17 00:00:00 2001 From: msinn Date: Tue, 8 Aug 2023 22:46:53 +0200 Subject: [PATCH 242/775] hue2: Changed requirements for zeroconf --- hue2/__init__.py | 7 ++++--- hue2/plugin.yaml | 2 +- hue2/requirements.txt | 2 +- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/hue2/__init__.py b/hue2/__init__.py index 327fbb2a7..b3998f669 100755 --- a/hue2/__init__.py +++ b/hue2/__init__.py @@ -48,7 +48,7 @@ class Hue2(SmartPlugin): the update functions for the items """ - PLUGIN_VERSION = '2.3.0' # (must match the version specified in plugin.yaml) + PLUGIN_VERSION = '2.3.1' # (must match the version specified in plugin.yaml) hue_group_action_values = ['on', 'bri', 'hue', 'sat', 'ct', 'xy', 'bri_inc', 'colormode', 'alert', 'effect'] hue_light_action_writable_values = ['on', 'bri', 'hue', 'sat', 'ct', 'xy', 'bri_inc'] @@ -322,8 +322,9 @@ def update_light_from_item(self, plugin_item, item): msg = f"qhue exception {e.message}" else: msg = f"{e}" - msg = f"update_light_from_item: item {plugin_item['item'].id()} - function={plugin_item['function']} - '{msg}'" - if msg.find(' 201 ') >= 0 or msg.find(' 201,201 ') >= 0: + msg = f"update_light_from_item: item {plugin_item['item'].id()} - function={plugin_item['function']} - PROBLEM: '{msg}'" + msg += f" - last_change_by={plugin_item['item'].property.last_change_by}" + if msg.find(' 201 ') >= 0 or msg.find(' 201,201 ') >= 0 or str(e).endswith('is not modifiable. Device is set to off.'): self.logger.info(msg) else: self.logger.error(msg) diff --git a/hue2/plugin.yaml b/hue2/plugin.yaml index 805ac343d..a414ded05 100755 --- a/hue2/plugin.yaml +++ b/hue2/plugin.yaml @@ -12,7 +12,7 @@ plugin: # documentation: https://github.com/smarthomeNG/smarthome/wiki/CLI-Plugin # url of documentation (wiki) page support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1586861-support-thread-für-das-hue2-plugin - version: 2.3.0 # Plugin version (must match the version specified in __init__.py) + version: 2.3.1 # Plugin version (must match the version specified in __init__.py) sh_minversion: 1.8.2 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) # py_minversion: 3.6 # minimum Python version to use for this plugin diff --git a/hue2/requirements.txt b/hue2/requirements.txt index 90328ae79..7fe17bf99 100755 --- a/hue2/requirements.txt +++ b/hue2/requirements.txt @@ -17,4 +17,4 @@ qhue # 2023-05-19 11:50:15 WARNING lib.smarthome -Thread: zeroconf-ServiceBrowser-_hue._tcp-29871, still alive # 2023-05-19 11:50:15 WARNING lib.smarthome -Thread: zeroconf-ServiceBrowser-_hue._tcp-29916, still alive -zeroconf>=0.39,<0.52.0 +zeroconf<=0.52.0 From a2f12a39fb47205579e8b31de7fbe635a6dce718 Mon Sep 17 00:00:00 2001 From: msinn Date: Tue, 8 Aug 2023 22:57:40 +0200 Subject: [PATCH 243/775] blockly: Disabled tests, because actual tests require old version of mockup core --- ...backend_blocklylogics.py => test_backend_blocklylogics.py.off} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename blockly/tests/{test_backend_blocklylogics.py => test_backend_blocklylogics.py.off} (100%) diff --git a/blockly/tests/test_backend_blocklylogics.py b/blockly/tests/test_backend_blocklylogics.py.off similarity index 100% rename from blockly/tests/test_backend_blocklylogics.py rename to blockly/tests/test_backend_blocklylogics.py.off From 64365ecfc347953aa91b83c4a59a00bc7b5a8a2b Mon Sep 17 00:00:00 2001 From: msinn Date: Tue, 8 Aug 2023 23:16:20 +0200 Subject: [PATCH 244/775] blockly: Reenabled tests --- beolink/__init__.py | 478 +++--------------- beolink/beodevices.py | 470 +++++++++++++++++ beolink/beonotifications.py | 331 ++++++++++++ beolink/plugin.yaml | 2 +- beolink/webif/__init__.py | 7 +- beolink/webif/templates/index.html | 101 ++-- ...s.py.off => test_backend_blocklylogics.py} | 0 openweathermap/__init__.py | 1 - openweathermap/plugin.yaml | 35 ++ 9 files changed, 970 insertions(+), 455 deletions(-) create mode 100644 beolink/beodevices.py create mode 100644 beolink/beonotifications.py rename blockly/tests/{test_backend_blocklylogics.py.off => test_backend_blocklylogics.py} (100%) diff --git a/beolink/__init__.py b/beolink/__init__.py index defbb9590..1e4d29438 100755 --- a/beolink/__init__.py +++ b/beolink/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab ######################################################################### -# Copyright 2018 +# Copyright 2022- Martin Sinn m.sinn@gmx.de ######################################################################### # This file is part of SmartHomeNG. # @@ -29,6 +29,8 @@ from lib.model.smartplugin import SmartPlugin from .webif import WebInterface +from .beodevices import * +import plugins.beolink.beonotifications as beonotify # If a needed package is imported, which might be not installed in the Python environment, # add it to a requirements.txt file within the plugin's directory @@ -40,7 +42,7 @@ class BeoNetlink(SmartPlugin): the update functions for the items """ - PLUGIN_VERSION = '0.6.1' + PLUGIN_VERSION = '0.8.0' def __init__(self, sh): """ @@ -97,8 +99,7 @@ def __init__(self, sh): # return self.beo_keys = [] - self.beodevices = BeoDevices(self) - #self.beodevices.get_devicelist() + self.beodevices = BeoDevices(self.fromip, self.toip, self.logger, self.datadir, self.translate) self._attrib_current_number = 0 # current number of the subscription entry self.beo_items = {} # key= beo_id + '_' + beo_status + '_' + self._attrib_current_number @@ -108,7 +109,7 @@ def __init__(self, sh): # Rescan devices on startup, since they can have a new IP addrress if self.rescan_on_start: - self.beodevices.scan_subnet(self.fromip, self.toip) + self.beodevices.scan_subnet() # if plugin should start even without web interface self.init_webinterface(WebInterface) @@ -123,11 +124,19 @@ def run(self): # setup scheduler for device poll loop (disable the following line, if you don't need to poll the device. Rember to comment the self_cycle statement in __init__ as well self.scheduler_add('poll_device', self.poll_device, cycle=self._cycle) + #self.scheduler_add('scheduler_param_test_1', self.scheduler_param_test, value={'param1':'val1'}, cycle=30) + #self.scheduler_add('scheduler_param_test_2', self.scheduler_param_test, value={'value':'val2'}, cycle=30) + self.beodevices.get_devicelist() - #self.beodeviceinfo = self.beodevices.get_devicelist() + #self.beodeviceinfo = self.beodevices.get_devicelist()0 #self.beo_keys = list(self.beodeviceinfo.keys()) #self.beo_keys.sort() + self.create_notification_objects() + + #for beo_key in self.beodevices.beo_keys: + # self.scheduler_add('process_notification_'+beo_key, self.process_notification, value={'id': beo_key}, cycle=2) + self.alive = True # if you need to create child threads, do not make them daemon = True! # They will not shutdown properly. (It's a python bug) @@ -137,6 +146,8 @@ def stop(self): Stop method for the plugin """ self.logger.debug("Stop method called") + #for beo_key in self.beodevices.beo_keys: + # self.scheduler_remove('process_notification_'+beo_key) self.scheduler_remove('poll_device') self.alive = False @@ -238,7 +249,6 @@ def update_item(self, item, caller=None, source=None, dest=None): # self.put_beo_api('10.0.0.239', '/BeoZone/Zone/Stand/Active', json_elements='{"active":0}') - def poll_device(self): """ Polls for updates of the device @@ -265,7 +275,27 @@ def poll_device(self): # deviceinfo = self.beodevices.beodeviceinfo[beo_id].get('productType', None) #else: # deviceinfo = self.beodevices.beodeviceinfo[beo_id].get(beo_status, None) - deviceinfo = self.beodevices.beodeviceinfo[beo_id].get(beo_status, None) + if beo_status == 'audiomode': + deviceinfo = self.beodevices.beodeviceinfo[beo_id]['device'].get('audiomode'[1], False) + elif beo_status == 'videomode': + deviceinfo = self.beodevices.beodeviceinfo[beo_id]['device'].get('videomode'[1], False) + elif beo_status == 'powerstate': + deviceinfo = self.beodevices.beodeviceinfo[beo_id]['device'].get('powerstate', False) + elif beo_status == 'stand': + deviceinfo = self.beodevices.beodeviceinfo[beo_id]['device'].get('stand'[1], False) + elif beo_status == 'source': + deviceinfo = self.beodevices.beodeviceinfo[beo_id]['source'].get('source', '-') + elif beo_status == 'volume': + deviceinfo = self.beodevices.beodeviceinfo[beo_id]['volume'].get('level', 0) + elif beo_status == 'muted': + deviceinfo = self.beodevices.beodeviceinfo[beo_id]['volume'].get('muted', False) + elif beo_status == 'FriendlyName': + deviceinfo = self.beodevices.beodeviceinfo[beo_id]['device'].get('FriendlyName', False) + elif beo_status == 'productType': + deviceinfo = self.beodevices.beodeviceinfo[beo_id]['device'].get('productType', False) + + else: + deviceinfo = self.beodevices.beodeviceinfo[beo_id].get(beo_status, None) #self.logger.info(f"poll_device: item={item.id()}, beo_id={beo_id}, beo_status={beo_status}, self.beodevices.beodeviceinfo[beo_id]={self.beodevices.beodeviceinfo[beo_id]}") #self.logger.info(f"poll_device: item={item.id()}, deviceinfo={deviceinfo}") if isinstance(deviceinfo, tuple): @@ -287,6 +317,26 @@ def poll_device(self): return + notification_objects = {} + + def create_notification_objects(self): + + for id in self.beodevices.beo_keys: + self.logger.info(f"No Instance of notification class for device '{self.beodevices.beodeviceinfo[id]['device']['FriendlyName']}', creating one") + self.notification_objects[id] = beonotify.beo_notifications(device_dict=self.beodevices.beodeviceinfo[id], logger_name=self.logger.name + '.notify') + + + def process_notification(self, id=None ): + + #if self.notification_objects.get('id', None) is None: + # self.logger.notice(f"No Instance of notification class for device '{self.beodevices.beodeviceinfo[id]['FriendlyName']}', creating one") + # self.notification_objects[id] = beonotify.beo_notifications(device_dict=self.beodevices.beodeviceinfo[id], logger_name=self.logger.name + '.notify') + # self.logger.notice(f"notification_objects='{self.notification_objects}'") + + if self.notification_objects.get(id, None) is not None: + self.notification_objects[id].process_stream() + + def _update_item_values(self, item, payload): """ Update dict for periodic updates of the web interface @@ -305,415 +355,3 @@ def _update_item_values(self, item, payload): return -# ------------------------------------------ -# Class for handling B&O devices -# ------------------------------------------ - -import requests - -import lib.shyaml as shyaml -from lib.constants import (YAML_FILE) - - -class BeoDevices(): - - beodeviceinfo = {} - beo_keys = [] - - def __init__(self, plugin=None): - self.plugin = plugin - - self.beodeviceinfo = {} - self.beo_keys = [] - - self.filename = os.path.join(self.plugin.datadir, 'bo_deviceinfo'+YAML_FILE) - - pass - - def get_devicelist(self): - self.beodeviceinfo = shyaml.yaml_load(self.filename, ordered=False, ignore_notfound=True) - self.plugin.logger.info('devicelist: {}'.format(self.beodeviceinfo)) - if self.beodeviceinfo is None: - self.scan_subnet(self.plugin.fromip, self.plugin.toip) - else: - self.update_devices_info() - self.beo_keys = list(self.beodeviceinfo.keys()) - self.beo_keys.sort() - return - - def scan_subnet(self, scan_fromip, scan_toip): - self.plugin.logger.info('Scanning network range ({} to {}) for Bang & Olufsen devices'.format(scan_fromip, scan_toip)) - - fromip = scan_fromip.split('.') - toip = scan_toip.split('.') - searchnet = '.'.join(fromip[0:3]) + '.' - min_ip = int(fromip[3]) - max_ip = int(toip[3]) - - filename = os.path.join(self.plugin.datadir, 'bo_deviceinfo' + YAML_FILE) - scan_devicelist = shyaml.yaml_load(filename, ordered=False, ignore_notfound=True) - self.plugin.logger.info('old list {}'.format(scan_devicelist)) - if scan_devicelist is None: - scan_devicelist = {} - - filename = os.path.join(self.plugin.datadir, 'bo_rawinfo' + YAML_FILE) - scan_rawinfo = shyaml.yaml_load(filename, ordered=False, ignore_notfound=True) - #self.plugin.logger.info('old rawinfo {}'.format(scan_rawinfo)) - if scan_rawinfo is None: - scan_rawinfo = {} - - for i in range(min_ip, max_ip): - ip = searchnet + str(i) - - device = 'http://' + ip + ':8080' - try: - r = requests.get(device + '/Ping', timeout=0.5) - result = True - except: - result = False - - if result: - if r.status_code == 200: - beodevice_info = requests.get(device + '/BeoDevice') - actual_device = beodevice_info.json() - - beo_id = actual_device['beoDevice']['productId']['serialNumber'] - - if scan_rawinfo.get(beo_id, None) is None: - scan_rawinfo[beo_id] = {} - scan_rawinfo[beo_id]['BeoDevice'] = beodevice_info.json() - - if scan_devicelist.get(beo_id, None) is None: - scan_devicelist[beo_id] = {} - scan_devicelist[beo_id]['Device-Jid'] = r.headers.get('Device-Jid', '') - - scan_devicelist[beo_id]['ip'] = ip - scan_devicelist[beo_id]['productType'] = scan_rawinfo[beo_id]['BeoDevice']['beoDevice']['productId']['productType'] - scan_devicelist[beo_id]['typeNumber'] = scan_rawinfo[beo_id]['BeoDevice']['beoDevice']['productId']['typeNumber'] - scan_devicelist[beo_id]['itemNumber'] = scan_rawinfo[beo_id]['BeoDevice']['beoDevice']['productId']['itemNumber'] - scan_devicelist[beo_id]['serialNumber'] = scan_rawinfo[beo_id]['BeoDevice']['beoDevice']['productId']['serialNumber'] - scan_devicelist[beo_id]['FriendlyName'] = scan_rawinfo[beo_id]['BeoDevice']['beoDevice']['productFriendlyName']['productFriendlyName'] - scan_devicelist[beo_id]['swVersion'] = scan_rawinfo[beo_id]['BeoDevice']['beoDevice']['software']['version'] - - #pwr = requests.get(device + '/BeoDevice/powerManagement/standby', timeout=0.5) - #actual_pwr = pwr.json() - #scan_devicelist[serial]['powerstate'] = actual_pwr['standby']['powerState'] - - #self.plugin.logger.info("- found B&O device on ip {0: <10}: {1} ({2})".format(ip, scan_devicelist[beo_id]['Device-Jid'], scan_devicelist[beo_id]['FriendlyName'])) - - - self.plugin.logger.info('Scanning network range ({} to {}) finished'.format(scan_fromip, scan_toip)) - - filename = os.path.join(self.plugin.datadir, 'bo_deviceinfo'+YAML_FILE) - shyaml.yaml_save(filename, scan_devicelist) - - filename = os.path.join(self.plugin.datadir, 'bo_rawinfo'+YAML_FILE) - shyaml.yaml_save(filename, scan_rawinfo) - self.plugin.logger.info("Bang & Olufsen device info saved to directory {}".format(self.plugin.datadir)) - - # move result to public var - self.beodeviceinfo = scan_devicelist - self.beo_keys = list(self.beodeviceinfo.keys()) - self.beo_keys.sort() - - self.update_devices_info() - return - - - def update_devices_info(self): - """ - Update info for all found devices - """ - for beo_key in self.beo_keys: - self.update_deviceinfo(beo_key) - - - def read_list_value(self, ip, api_suburl, json_element='mode'): - - api_url = '/BeoZone/Zone' + api_suburl - mode = '-' - active_mode = -1 - raw_mode = self.get_beo_api(ip, api_url, [json_element]) - if raw_mode != '-': - #active_mode = self.get_beo_api(ip, api_url, [json_element, 'active']) - #mode_list = self.get_beo_api(ip, api_url, [json_element, 'list']) - active_mode = raw_mode.get('active', -1) - mode_list = raw_mode.get('list', []) - mode = 'unknown' - if mode_list != 'unknown': - for mode_dict in mode_list: - # self.plugin.logger.info("update_deviceinfo: ip = {}, mode_dict = {}, active_mode = {}".format(ip, mode_dict, active_mode)) - if active_mode != '-1': - if mode_dict['id'] == active_mode: - mode = mode_dict['friendlyName'] - break - else: - mode = '-' - #self.beodeviceinfo[beo_key]['videomode'] = mode.lower() - #self.plugin.logger.info("update_deviceinfo: ip: {} videomode-friendly = {}".format(ip, mode)) - - return (mode.lower(), int(active_mode)) - - - def update_deviceinfo(self, beo_id): - """ - Update info for one of the found devices - - :param beo_id: device key for the device-info dict - """ - ip = self.beodeviceinfo[beo_id]['ip'] - fn = self.beodeviceinfo[beo_id].get('FriendlyName', ip) - - # get speaker info (vol. level, muted) from B&O deivce - speaker_info, error = self.get_speaker_info(beo_id) - if speaker_info: - self.plugin.logger.debug("update_deviceinfo: {} - level={}, muted={}".format(fn.ljust(16), speaker_info['level'], speaker_info['muted'])) - self.beodeviceinfo[beo_id]['volume'] = speaker_info['level'] - self.beodeviceinfo[beo_id]['muted'] = speaker_info['muted'] - if error: - #self.plugin.logger.info("update_deviceinfo: {} - error={}".format(fn, error)) - self.plugin.logger.info("update_deviceinfo: {} speaker_info - ERROR={}, type={}".format(fn.ljust(16), error['message'], error['type'])) - - - - self.beodeviceinfo[beo_id]['powerstate'] = self.plugin.translate(self.get_beo_api(ip, '/BeoDevice/powerManagement/standby', ['standby','powerState'])) - if self.beodeviceinfo[beo_id]['powerstate'] in ['Standby', 'unbekannt']: - self.beodeviceinfo[beo_id]['source'] = '-' - self.beodeviceinfo[beo_id]['videomode'] = ('-', -1) - self.beodeviceinfo[beo_id]['audiomode'] = ('-', -1) - self.beodeviceinfo[beo_id]['stand'] = ('-', -1) - else: - self.beodeviceinfo[beo_id]['source'] = self.get_beo_api(ip, '/BeoZone/Zone/ActiveSources', ['primaryExperience','source','friendlyName']) - - # get picture-mode of the B&O device - self.beodeviceinfo[beo_id]['videomode'] = self.read_list_value(ip, '/Picture/Mode', 'mode') - self.plugin.logger.debug("update_deviceinfo: ip: {} videomode-friendly = {}".format(ip, self.beodeviceinfo[beo_id]['videomode'])) - - # get sound-mode of the B&O device - self.beodeviceinfo[beo_id]['audiomode'] = self.read_list_value(ip, '/Sound/Mode', 'mode') - self.plugin.logger.debug("update_deviceinfo: ip: {} audiomode-friendly = {}".format(ip, self.beodeviceinfo[beo_id]['audiomode'])) - - # get stand position of the B&O device - self.beodeviceinfo[beo_id]['stand'] = self.read_list_value(ip, '/Stand', 'stand') - self.plugin.logger.debug("update_deviceinfo: ip: {} stand-friendly = {}".format(ip, self.beodeviceinfo[beo_id]['stand'])) - - # get possible sources of the B&O device - #raw_sources = self.get_beo_api(ip, '/BeoZone/Zone/Sources', []) - #self.beodeviceinfo[beo_id]['sources'] = [] - #for source in raw_sources.keys(): - # self.plugin.logger.info("update_deviceinfo: ip: {} source = {}".format(ip, source)) - - return - - - def send_beo_command(self, beo_id, api_url, json_elements=None): - ip = self.beodeviceinfo[beo_id]['ip'] - self.send_beo_api('put', ip, api_url, json_elements) - return - - - def post_beo_command(self, beo_id, api_url, json_elements=None): - ip = self.beodeviceinfo[beo_id]['ip'] - self.send_beo_api('post', ip, api_url, json_elements) - return - - - def get_beo_api(self, ip, api_url, json_elements=None): - """ - call the b&o netlink REST api on given ip address - - :param ip: - :param api_url: - :param json_elements: - :return: - """ - result = '' - device = 'http://' + ip + ':8080' - - result = '-' - try: - r = requests.get(device + api_url, timeout=0.5) - request_result = True - except: - self.plugin.logger.debug("Could not get data from device {} for {}".format(device, api_url)) - request_result = False - result = '-' - - if request_result: - try: - actual_r = r.json() - if json_elements: - if len(json_elements) == 1: - result = actual_r[json_elements[0]] - elif len(json_elements) == 2: - result = actual_r[json_elements[0]][json_elements[1]] - elif len(json_elements) == 3: - result = actual_r[json_elements[0]][json_elements[1]][json_elements[2]] - elif len(json_elements) == 4: - result = actual_r[json_elements[0]][json_elements[1]][json_elements[2]][json_elements[3]] - #result = actual_r['standby']['powerState'] - else: - result = actual_r - except: - pass - return result - - - def send_beo_api(self, mode, ip, api_url, json_elements=None): - """ - call the b&o netlink REST api on given ip address with a PUT request - - :param ip: - :param api_url: - :param json_elements: - :return: - """ - result = '' - device = 'http://' + ip + ':8080' - - self.plugin.logger.info("send_beo_api: mode {}, ip {}, url {}, data {}".format(mode, ip, api_url, json_elements)) - result = '-' - try: - if mode.lower() == 'post': - r = requests.post(device + api_url, data=json_elements, timeout=0.5) - else: - r = requests.put(device + api_url, data=json_elements, timeout=0.5) - self.plugin.logger.info("send_beo_api: request result = {}".format(r.status_code)) - request_result = True - except: - self.plugin.logger.debug("Could not get data from device {} for {}".format(device, api_url)) - request_result = False - result = 'unknown' - - return request_result - - # ---------------------------------------------------------------------------- - - def get_speaker_info(self, beo_id): - - ip = self.beodeviceinfo[beo_id]['ip'] - fn = self.beodeviceinfo[beo_id].get('FriendlyName', ip) - - req_result = self.beo_get_request(ip, '/BeoZone/Zone/Sound/Volume/Speaker') - if req_result.get('error', None): - if req_result['error']['message'] != '': - self.plugin.logger.info("get_speaker_info: {} - error = {}".format(fn, req_result['error']['message'] )) - return {}, req_result['error'] - else: - #self.plugin.logger.info("get_speaker_info: {} - speaker={}".format(fn, req_result['speaker'])) - return req_result['speaker'], {} - return {}, {} - - - def get_speaker_volume(self, beo_id): - - ip = self.beodeviceinfo[beo_id]['ip'] - fn = self.beodeviceinfo[beo_id].get('FriendlyName', ip) - - req_result = self.beo_get_request(ip, '/BeoZone/Zone/Sound/Volume/Speaker/Level') - try: - if req_result.get('error', None): - if req_result['error']['message'] != '': - self.plugin.logger.info("get_speaker_volume: {} - error = {}".format(fn, req_result['error']['message'] )) - return -1 - else: - self.plugin.logger.info("get_speaker_volume: {} - level={}".format(fn, req_result['level'])) - return req_result['level'] - except Exception as e: - self.plugin.logger.error("get_speaker_volume: {} - req_result={} - Exception = {}".format(fn, req_result, e)) - - return - - - def set_speaker_volume(self, beo_id, volume): - - ip = self.beodeviceinfo[beo_id]['ip'] - fn = self.beodeviceinfo[beo_id].get('FriendlyName', ip) - - data = '{'+'"level": {}'.format(volume)+'}' - req_result = self.beo_put_request(ip, '/BeoZone/Zone/Sound/Volume/Speaker/Level', data) - - return - - - def set_speaker_muted(self, beo_id, state): - - ip = self.beodeviceinfo[beo_id]['ip'] - fn = self.beodeviceinfo[beo_id].get('FriendlyName', ip) - - data = '{'+'"muted": {}'.format(str(state).lower())+'}' - req_result = self.beo_put_request(ip, '/BeoZone/Zone/Sound/Volume/Speaker/Muted', data) - - return - - - def get_stand(self, beo_id): - - ip = self.beodeviceinfo[beo_id]['ip'] - fn = self.beodeviceinfo[beo_id].get('FriendlyName', ip) - - req_result = self.beo_get_request(ip, '/BeoZone/Zone/Stand/Active') - try: - if req_result.get('error', None): - if req_result['error']['message'] != '': - self.plugin.logger.info("get_stand: {} - error = {}".format(fn, req_result['error']['message'] )) - return -1 - else: - self.plugin.logger.info("get_stand: {} - level={}".format(fn, req_result['active'])) - return req_result['active'] - except Exception as e: - self.plugin.logger.error("get_stand: {} - req_result={} - Exception = {}".format(fn, req_result, e)) - - return - - - def set_stand(self, beo_id, position): - - ip = self.beodeviceinfo[beo_id]['ip'] - fn = self.beodeviceinfo[beo_id].get('FriendlyName', ip) - - data = '{'+'"active": {}'.format(str(position).lower())+'}' - req_result = self.beo_put_request(ip, '/BeoZone/Zone/Stand/Active', data) - - return - - # ---------------------------------------------------------------------------- - - def beo_get_request(self, ip, api_url): - """ - call the b&o netlink REST api on given ip address - - :param ip: - :param api_url: - :return: - """ - device = 'http://' + ip + ':8080' - - try: - r = requests.get(device + api_url, timeout=0.5) - request_result = r.json() - except: - self.plugin.logger.debug("Could not get data from device {} for {}".format(device, api_url)) - request_result = {'error': { 'message': '', 'type': 'NO_RESPONSE'}} - return request_result - - - def beo_put_request(self, ip, api_url, data): - """ - call the b&o netlink REST api on given ip address - - :param ip: - :param api_url: - :return: - """ - device = 'http://' + ip + ':8080' - - try: - r = requests.put(device + api_url, data, timeout=0.5) - request_result = r.json() - except: - self.plugin.logger.debug("Could not get data from device {} for {}".format(device, api_url)) - request_result = {'error': { 'message': '', 'type': 'NO_RESPONSE'}} - return request_result - - diff --git a/beolink/beodevices.py b/beolink/beodevices.py new file mode 100644 index 000000000..b9bacfcbc --- /dev/null +++ b/beolink/beodevices.py @@ -0,0 +1,470 @@ +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +######################################################################### +# Copyright 2022- Martin Sinn m.sinn@gmx.de +######################################################################### +# This file is part of SmartHomeNG. +# +# Sample plugin for new plugins to run with SmartHomeNG version 1.4 and +# upwards. +# +# SmartHomeNG is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SmartHomeNG is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SmartHomeNG. If not, see . +# +######################################################################### + + +# ------------------------------------------ +# Class for handling B&O devices +# ------------------------------------------ + +import os +import requests + +import lib.shyaml as shyaml +from lib.constants import (YAML_FILE) + + +class BeoDevices(): + + beodeviceinfo = {} + beo_keys = [] + + def __init__(self, fromip, toip, logger, datadir, translate): + + self.fromip = fromip + self.toip = toip + self.logger = logger + self.datadir = datadir + self.translate = translate + + self.beodeviceinfo = {} + self.beo_keys = [] + + self.filename = os.path.join(self.datadir, 'bo_deviceinfo'+YAML_FILE) + + pass + + def get_devicelist(self): + """ + Get list of Beolink devices from file + If no devices where read from the file, the configured subnet of the network is scanned + """ + self.beodeviceinfo = shyaml.yaml_load(self.filename, ordered=False, ignore_notfound=True) + self.logger.info('devicelist: {}'.format(self.beodeviceinfo)) + if self.beodeviceinfo is None: + self.scan_subnet() + else: + self.update_devices_info() + self.beo_keys = list(self.beodeviceinfo.keys()) + self.beo_keys.sort() + return + + def scan_subnet(self, scan_fromip=None, scan_toip=None): + """ + Scans the given ip range of the network for Beolink devices + + :param scan_fromip: + :param scan_toip: + """ + if scan_fromip is None: + scan_fromip = self.fromip + if scan_toip is None: + scan_toip = self.toip + self.logger.info(f"Scanning network range ({scan_fromip} to {scan_toip}) for Bang & Olufsen devices") + + fromip = scan_fromip.split('.') + toip = scan_toip.split('.') + searchnet = '.'.join(fromip[0:3]) + '.' + min_ip = int(fromip[3]) + max_ip = int(toip[3]) + + filename = os.path.join(self.datadir, 'bo_deviceinfo' + YAML_FILE) + scan_devicelist = shyaml.yaml_load(filename, ordered=False, ignore_notfound=True) + self.logger.info('old list {}'.format(scan_devicelist)) + if scan_devicelist is None: + scan_devicelist = {} + + filename = os.path.join(self.datadir, 'bo_rawinfo' + YAML_FILE) + scan_rawinfo = shyaml.yaml_load(filename, ordered=False, ignore_notfound=True) + #self.logger.info('old rawinfo {}'.format(scan_rawinfo)) + if scan_rawinfo is None: + scan_rawinfo = {} + + for i in range(min_ip, max_ip): + ip = searchnet + str(i) + + device = 'http://' + ip + ':8080' + try: + r = requests.get(device + '/Ping', timeout=0.5) + result = True + except: + result = False + + if result: + if r.status_code == 200: + beodevice_info = requests.get(device + '/BeoDevice') + actual_device = beodevice_info.json() + + beo_id = actual_device['beoDevice']['productId']['serialNumber'] + + if scan_rawinfo.get(beo_id, None) is None: + scan_rawinfo[beo_id] = {} + scan_rawinfo[beo_id]['BeoDevice'] = beodevice_info.json() + + if scan_devicelist.get(beo_id, None) is None: + scan_devicelist[beo_id] = {} + #scan_devicelist[beo_id]['Device-Jid'] = r.headers.get('Device-Jid', '') + + if scan_devicelist[beo_id].get('device', None) is None: + scan_devicelist[beo_id]['device'] = {} + #scan_devicelist[beo_id]['ip'] = ip + scan_devicelist[beo_id]['device']['ip'] = ip + scan_devicelist[beo_id]['device']['productType'] = scan_rawinfo[beo_id]['BeoDevice']['beoDevice']['productId']['productType'] + scan_devicelist[beo_id]['device']['typeNumber'] = scan_rawinfo[beo_id]['BeoDevice']['beoDevice']['productId']['typeNumber'] + scan_devicelist[beo_id]['device']['itemNumber'] = scan_rawinfo[beo_id]['BeoDevice']['beoDevice']['productId']['itemNumber'] + scan_devicelist[beo_id]['device']['serialNumber'] = scan_rawinfo[beo_id]['BeoDevice']['beoDevice']['productId']['serialNumber'] + scan_devicelist[beo_id]['device']['FriendlyName'] = scan_rawinfo[beo_id]['BeoDevice']['beoDevice']['productFriendlyName']['productFriendlyName'] + scan_devicelist[beo_id]['device']['swVersion'] = scan_rawinfo[beo_id]['BeoDevice']['beoDevice']['software']['version'] + + #pwr = requests.get(device + '/BeoDevice/powerManagement/standby', timeout=0.5) + #actual_pwr = pwr.json() + #scan_devicelist[serial]['powerstate'] = actual_pwr['standby']['powerState'] + + #self.logger.info("- found B&O device on ip {0: <10}: {1} ({2})".format(ip, scan_devicelist[beo_id]['Device-Jid'], scan_devicelist[beo_id]['FriendlyName'])) + + + self.logger.info('Scanning network range ({} to {}) finished'.format(scan_fromip, scan_toip)) + + filename = os.path.join(self.datadir, 'bo_deviceinfo'+YAML_FILE) + shyaml.yaml_save(filename, scan_devicelist) + + filename = os.path.join(self.datadir, 'bo_rawinfo'+YAML_FILE) + shyaml.yaml_save(filename, scan_rawinfo) + self.logger.info("Bang & Olufsen device info saved to directory {}".format(self.datadir)) + + # move result to public var + self.beodeviceinfo = scan_devicelist + self.beo_keys = list(self.beodeviceinfo.keys()) + self.beo_keys.sort() + + self.update_devices_info() + return + + + def update_devices_info(self): + """ + Update info for all found devices + """ + for beo_key in self.beo_keys: + self.update_deviceinfo(beo_key) + + + def read_list_value(self, ip, api_suburl, json_element='mode'): + + api_url = '/BeoZone/Zone' + api_suburl + mode = '-' + active_mode = -1 + raw_mode = self.get_beo_api(ip, api_url, [json_element]) + if raw_mode != '-': + #active_mode = self.get_beo_api(ip, api_url, [json_element, 'active']) + #mode_list = self.get_beo_api(ip, api_url, [json_element, 'list']) + active_mode = raw_mode.get('active', -1) + mode_list = raw_mode.get('list', []) + mode = 'unknown' + if mode_list != 'unknown': + for mode_dict in mode_list: + # self.logger.info("update_deviceinfo: ip = {}, mode_dict = {}, active_mode = {}".format(ip, mode_dict, active_mode)) + if active_mode != '-1': + if mode_dict['id'] == active_mode: + mode = mode_dict['friendlyName'] + break + else: + mode = '-' + #self.beodeviceinfo[beo_key]['device']['videomode'] = mode.lower() + #self.logger.info("update_deviceinfo: ip: {} videomode-friendly = {}".format(ip, mode)) + + return (mode.lower(), int(active_mode)) + + + def update_deviceinfo(self, beo_id): + """ + Update info for one of the found devices + + :param beo_id: device key for the device-info dict + """ + if self.beodeviceinfo[beo_id].get('device', None) is None: + self.beodeviceinfo[beo_id]['device'] = {} + if self.beodeviceinfo[beo_id].get('source', None) is None: + self.beodeviceinfo[beo_id]['source'] = {} + if self.beodeviceinfo[beo_id].get('content', None) is None: + self.beodeviceinfo[beo_id]['content'] = {} + if self.beodeviceinfo[beo_id].get('volume', None) is None: + self.beodeviceinfo[beo_id]['volume'] = {} + + ip = self.beodeviceinfo[beo_id]['device']['ip'] + fn = self.beodeviceinfo[beo_id]['device'].get('FriendlyName', ip) + + # get speaker info (vol. level, muted) from B&O deivce + speaker_info, error = self.get_speaker_info(beo_id) + if speaker_info: + self.logger.debug("update_deviceinfo: {} - level={}, muted={}".format(fn.ljust(16), speaker_info['level'], speaker_info['muted'])) + self.beodeviceinfo[beo_id]['volume']['level'] = speaker_info['level'] + self.beodeviceinfo[beo_id]['volume']['muted'] = speaker_info['muted'] + if error: + #self.logger.info("update_deviceinfo: {} - error={}".format(fn, error)) + self.logger.info("update_deviceinfo: {} speaker_info - ERROR={}, type={}".format(fn.ljust(16), error['message'], error['type'])) + + + self.beodeviceinfo[beo_id]['device']['powerstate'] = self.translate(self.get_beo_api(ip, '/BeoDevice/powerManagement/standby', ['standby','powerState'])) + if self.beodeviceinfo[beo_id]['device']['powerstate'] in ['Standby', 'unbekannt']: + self.beodeviceinfo[beo_id]['source']['source'] = '-' + self.beodeviceinfo[beo_id]['content'] = {} + self.beodeviceinfo[beo_id]['device']['videomode'] = ['-', -1] + self.beodeviceinfo[beo_id]['device']['audiomode'] = ['-', -1] + self.beodeviceinfo[beo_id]['device']['stand'] = ['-', -1] + else: + self.beodeviceinfo[beo_id]['source']['source'] = self.get_beo_api(ip, '/BeoZone/Zone/ActiveSources', ['primaryExperience','source','friendlyName']) + + # get picture-mode of the B&O device + self.beodeviceinfo[beo_id]['device']['videomode'] = list(self.read_list_value(ip, '/Picture/Mode', 'mode')) + self.logger.debug("update_deviceinfo: ip: {} videomode-friendly = {}".format(ip, self.beodeviceinfo[beo_id]['device']['videomode'])) + + # get sound-mode of the B&O device + self.beodeviceinfo[beo_id]['device']['audiomode'] = list(self.read_list_value(ip, '/Sound/Mode', 'mode')) + self.logger.debug("update_deviceinfo: ip: {} audiomode-friendly = {}".format(ip, self.beodeviceinfo[beo_id]['device']['audiomode'])) + + # get stand position of the B&O device + self.beodeviceinfo[beo_id]['device']['stand'] = list(self.read_list_value(ip, '/Stand', 'stand')) + self.logger.debug("update_deviceinfo: ip: {} stand-friendly = {}".format(ip, self.beodeviceinfo[beo_id]['device']['stand'])) + + # get possible sources of the B&O device + #raw_sources = self.get_beo_api(ip, '/BeoZone/Zone/Sources', []) + #self.beodeviceinfo[beo_id]['sources'] = [] + #for source in raw_sources.keys(): + # self.logger.info("update_deviceinfo: ip: {} source = {}".format(ip, source)) + + return + + + def send_beo_command(self, beo_id, api_url, json_elements=None): + ip = self.beodeviceinfo[beo_id]['device']['ip'] + self.send_beo_api('put', ip, api_url, json_elements) + return + + + def post_beo_command(self, beo_id, api_url, json_elements=None): + ip = self.beodeviceinfo[beo_id]['device']['ip'] + self.send_beo_api('post', ip, api_url, json_elements) + return + + + def get_beo_api(self, ip, api_url, json_elements=None): + """ + call the b&o netlink REST api on given ip address + + :param ip: + :param api_url: + :param json_elements: + :return: + """ + result = '' + device = 'http://' + ip + ':8080' + + result = '-' + try: + r = requests.get(device + api_url, timeout=0.5) + request_result = True + except: + self.logger.debug("Could not get data from device {} for {}".format(device, api_url)) + request_result = False + result = '-' + + if request_result: + try: + actual_r = r.json() + if json_elements: + if len(json_elements) == 1: + result = actual_r[json_elements[0]] + elif len(json_elements) == 2: + result = actual_r[json_elements[0]][json_elements[1]] + elif len(json_elements) == 3: + result = actual_r[json_elements[0]][json_elements[1]][json_elements[2]] + elif len(json_elements) == 4: + result = actual_r[json_elements[0]][json_elements[1]][json_elements[2]][json_elements[3]] + #result = actual_r['standby']['powerState'] + else: + result = actual_r + except: + pass + return result + + + def send_beo_api(self, mode, ip, api_url, json_elements=None): + """ + call the b&o netlink REST api on given ip address with a PUT request + + :param ip: + :param api_url: + :param json_elements: + :return: + """ + result = '' + device = 'http://' + ip + ':8080' + + self.logger.info("send_beo_api: mode {}, ip {}, url {}, data {}".format(mode, ip, api_url, json_elements)) + result = '-' + try: + if mode.lower() == 'post': + r = requests.post(device + api_url, data=json_elements, timeout=0.5) + else: + r = requests.put(device + api_url, data=json_elements, timeout=0.5) + self.logger.info("send_beo_api: request result = {}".format(r.status_code)) + request_result = True + except: + self.logger.debug("Could not get data from device {} for {}".format(device, api_url)) + request_result = False + result = 'unknown' + + return request_result + + # ---------------------------------------------------------------------------- + + def get_speaker_info(self, beo_id): + + ip = self.beodeviceinfo[beo_id]['device']['ip'] + fn = self.beodeviceinfo[beo_id]['device'].get('FriendlyName', ip) + + req_result = self.beo_get_request(ip, '/BeoZone/Zone/Sound/Volume/Speaker') + if req_result.get('error', None): + if req_result['error']['message'] != '': + self.logger.info("get_speaker_info: {} - error = {}".format(fn, req_result['error']['message'] )) + return {}, req_result['error'] + else: + #self.logger.info("get_speaker_info: {} - speaker={}".format(fn, req_result['speaker'])) + return req_result['speaker'], {} + return {}, {} + + + def get_speaker_volume(self, beo_id): + + ip = self.beodeviceinfo[beo_id]['device']['ip'] + fn = self.beodeviceinfo[beo_id]['device'].get('FriendlyName', ip) + + req_result = self.beo_get_request(ip, '/BeoZone/Zone/Sound/Volume/Speaker/Level') + try: + if req_result.get('error', None): + if req_result['error']['message'] != '': + self.logger.info("get_speaker_volume: {} - error = {}".format(fn, req_result['error']['message'] )) + return -1 + else: + self.logger.info("get_speaker_volume: {} - level={}".format(fn, req_result['level'])) + return req_result['level'] + except Exception as e: + self.logger.error("get_speaker_volume: {} - req_result={} - Exception = {}".format(fn, req_result, e)) + + return + + + def set_speaker_volume(self, beo_id, volume): + + ip = self.beodeviceinfo[beo_id]['device']['ip'] + fn = self.beodeviceinfo[beo_id]['device'].get('FriendlyName', ip) + + data = '{'+'"level": {}'.format(volume)+'}' + req_result = self.beo_put_request(ip, '/BeoZone/Zone/Sound/Volume/Speaker/Level', data) + + return + + + def set_speaker_muted(self, beo_id, state): + + ip = self.beodeviceinfo[beo_id]['device']['ip'] + fn = self.beodeviceinfo[beo_id]['device'].get('FriendlyName', ip) + + data = '{'+'"muted": {}'.format(str(state).lower())+'}' + req_result = self.beo_put_request(ip, '/BeoZone/Zone/Sound/Volume/Speaker/Muted', data) + + return + + + def get_stand(self, beo_id): + + ip = self.beodeviceinfo[beo_id]['device']['ip'] + fn = self.beodeviceinfo[beo_id]['device'].get('FriendlyName', ip) + + req_result = self.beo_get_request(ip, '/BeoZone/Zone/Stand/Active') + try: + if req_result.get('error', None): + if req_result['error']['message'] != '': + self.logger.info("get_stand: {} - error = {}".format(fn, req_result['error']['message'] )) + return -1 + else: + self.logger.info("get_stand: {} - level={}".format(fn, req_result['active'])) + return req_result['active'] + except Exception as e: + self.logger.error("get_stand: {} - req_result={} - Exception = {}".format(fn, req_result, e)) + + return + + + def set_stand(self, beo_id, position): + + ip = self.beodeviceinfo[beo_id]['device']['ip'] + fn = self.beodeviceinfo[beo_id]['device'].get('FriendlyName', ip) + + data = '{'+'"active": {}'.format(str(position).lower())+'}' + req_result = self.beo_put_request(ip, '/BeoZone/Zone/Stand/Active', data) + + return + + # ---------------------------------------------------------------------------- + + def beo_get_request(self, ip, api_url): + """ + call the b&o netlink REST api on given ip address + + :param ip: + :param api_url: + :return: + """ + device = 'http://' + ip + ':8080' + + try: + r = requests.get(device + api_url, timeout=0.5) + request_result = r.json() + except: + self.logger.debug("Could not get data from device {} for {}".format(device, api_url)) + request_result = {'error': { 'message': '', 'type': 'NO_RESPONSE'}} + return request_result + + + def beo_put_request(self, ip, api_url, data): + """ + call the b&o netlink REST api on given ip address + + :param ip: + :param api_url: + :return: + """ + device = 'http://' + ip + ':8080' + + try: + r = requests.put(device + api_url, data, timeout=0.5) + request_result = r.json() + except: + self.logger.debug("Could not get data from device {} for {}".format(device, api_url)) + request_result = {'error': { 'message': '', 'type': 'NO_RESPONSE'}} + return request_result + + diff --git a/beolink/beonotifications.py b/beolink/beonotifications.py new file mode 100644 index 000000000..02422d56a --- /dev/null +++ b/beolink/beonotifications.py @@ -0,0 +1,331 @@ +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +######################################################################### +# Copyright 2023- Martin Sinn m.sinn@gmx.de +######################################################################### +# This file is part of SmartHomeNG. +# +# Sample plugin for new plugins to run with SmartHomeNG version 1.4 and +# upwards. +# +# SmartHomeNG is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SmartHomeNG is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SmartHomeNG. If not, see . +# +######################################################################### + + +# ---------------------------------------------------- +# Class for handling notifications ofB&O devices +# ---------------------------------------------------- + +import json +import requests +import threading +import logging + +class beo_notifications(): + + ip = None + friendlyName = None + state = '?' + + device_dict = None + + progress = {} + source = {} + playing = {} + volume = {} + swupdate = {} + + _r = None + + + def __init__(self, ip=None, device_dict=None, beodevices=None, logger=None, logger_name=None): + + if logger is None and logger_name is None: + self.logger = logging.getLogger(__name__) + elif logger_name is not None: + self.logger = logging.getLogger(logger_name) + else: + self.logger = logger + + self.device_dict = device_dict + if device_dict is not None: + ip = device_dict['device']['ip'] + + if ip is None: + self.logger.error(f"No ip address specified for Beolink device") + return + + self.ip = ip + self.friendlyName = ip + if device_dict is not None: + self.friendlyName = device_dict['device'].get('FriendlyName', ip) + + self.lock = threading.Lock() + self.log_notification( msg=f"--- New instance of class beo_notifications ---", level='dbghigh') + + + def log_notification(self, notification=None, msg=None, handled=True, level=None): + + msg_time = "" + if notification is None: + log_msg = f"{msg_time}{self.friendlyName:13}{msg}" + else: + #msg_time =f"{notification['timestamp'].split('T')[1].split('.')[0]} " + if msg is None: + msg = notification['type'] + if not handled: + msg = f"({msg}): kind={notification['kind']}" + elif level is None: + level = 'dbghigh' + msg = f"{msg}: data={notification['data']}" + else: + msg = f"{notification['type']}: {msg}" + + log_msg = f"{msg_time}{self.friendlyName:13}{msg}" + + try: + if level is None: + self.logger.info(log_msg) + elif level.lower() == 'dbghigh': + self.logger.dbgmed( log_msg ) + elif level.lower() == 'dbgmed': + self.logger.dbgmed( log_msg ) + elif level.lower() == 'dbglow': + self.logger.dbglow(log_msg) + elif level.lower() == 'debug': + self.logger.debug(log_msg) + elif level.lower() == 'info': + self.logger.info(log_msg) + elif level.lower() == 'warning': + self.logger.warning(log_msg) + else: + log_msg = log_msg + ', level=' + level.lower() + self.logger.info( log_msg ) + except: + self.logger.warning(log_msg) + + def notification_playing_net_radio(self, notification): + + data = notification['data'] + self.playing['name'] = data['name'] + self.playing['genre'] = data['genre'] + self.playing['liveDescription'] = data['liveDescription'] + + self.log_notification(notification, msg=f"name={self.playing['name']}, genre={self.playing['genre']}, liveDescription={self.playing['liveDescription']}" ) + + + def process_notification(self, line): + + try: + line_dict = json.loads(line) + except Exception as e: + self.logger.exception(f"process_notification: Exception 1 {e}") + #print(f"Fehler: {e}") + return + + try: + if self.ip != self.device_dict['device'].get('ip', None): + self.log_notification(msg=f"WRONG DEVICE - self.ip={self.ip}, device_dict ip={self.device_dict['device'].get('ip', None)}", level='warning') + except Exception as e: + self.logger.exception(f"process_notification: Exception 2 {e}") + notification = line_dict['notification'] + notification_type = notification['type'] + data = notification['data'] + + if self.device_dict.get('content', None) is None: + self.device_dict['content'] = {} + + # process notification types + if notification_type == 'PROGRESS_INFORMATION': + changed = False + data = notification['data'] + if data['state'] != self.progress.get('state', ''): + if self.device_dict is not None: + self.device_dict['content']['state'] = data['state'] + self.progress['state'] = data['state'] + changed = True + if data.get('playQueueItemId', None) is None: + self.progress['playQueueItemId'] = '' + if self.device_dict is not None: + self.device_dict['content']['playQueueItemId'] = '' + changed = True + elif data['playQueueItemId'] != self.progress.get('playQueueItemId', ''): + self.progress['playQueueItemId'] = data['playQueueItemId'] + if self.device_dict is not None: + self.device_dict['content']['playQueueItemId'] = data['playQueueItemId'] + changed = True + + if changed: + self.log_notification(notification) + + elif notification_type == 'SOURCE': + experience = data.get('primaryExperience', None) + if experience is not None: + self.source['state'] = experience['state'] + self.source['friendlyName'] = experience['source']['friendlyName'] + self.source['deviceFriendlyName'] = experience['source']['product']['friendlyName'] + self.friendlyName = self.source['deviceFriendlyName'] + self.logger.info(f"{self.ip}: SOURCE friendlyName={self.source['deviceFriendlyName']} - data={data}") + self.logger.info(f"{self.ip}: - self.device_dict={self.device_dict}") + self.source['type'] = experience['source']['sourceType']['type'] + self.source['category'] = experience['source']['category'] + self.source['inUse'] = experience['source']['inUse'] + # self.source['profile'] = experience['source']['profile'] + self.source['linkable'] = experience['source']['linkable'] + # self.source['contentProtection'] = experience['source']['contentProtection'] + if self.device_dict is not None: + self.device_dict['content']['state'] = experience['state'] + self.device_dict['device']['FriendlyName'] = experience['source']['product']['friendlyName'] + if self.device_dict.get('source', None) is None: + self.device_dict['source'] = {} + self.device_dict['source']['friendlyName'] = experience['source']['friendlyName'] + self.device_dict['source']['type'] = experience['source']['sourceType']['type'] + self.device_dict['source']['category'] = experience['source']['category'] + self.device_dict['source']['inUse'] = experience['source']['inUse'] + # self.device_dict['source']['profile'] = experience['source']['profile'] + self.device_dict['source']['linkable'] = experience['source']['linkable'] + + self.log_notification(notification) + #self.log_notification(notification, msg=f"source={self.source}") + else: + self.log_notification(notification) + + elif notification_type == 'NOW_PLAYING_NET_RADIO': + self.playing['name'] = data['name'] + self.playing['genre'] = data['genre'] + self.playing['liveDescription'] = data['liveDescription'] + if self.device_dict is not None: + self.device_dict['content']['channel_name'] = data['name'] + self.device_dict['content']['genre'] = data['genre'] + self.device_dict['content']['title'] = data['liveDescription'] + + self.log_notification(notification, msg=f"name={self.playing['name']}, genre={self.playing['genre']}, liveDescription={self.playing['liveDescription']}" ) + + elif notification_type == 'NUMBER_AND_NAME': + if self.device_dict is not None: + self.device_dict['content']['channel_number'] = data['number'] + self.device_dict['content']['channel_name'] = data['name'] + dvb = data.get('dvb', None) + if dvb: + self.device_dict['source']['tuner'] = data['dvb'].get('tuner', '') + self.log_notification(notification ) + + elif notification_type == 'VOLUME': + self.volume['level'] = data['speaker']['level'] + self.volume['muted'] = data['speaker']['muted'] + self.volume['min'] = data['speaker']['range']['minimum'] + self.volume['max'] = data['speaker']['range']['maximum'] + if self.device_dict is not None: + if self.device_dict.get('volume', None) is None: + self.device_dict['volume'] = {} + self.device_dict['volume']['level'] = data['speaker']['level'] + self.device_dict['volume']['muted'] = data['speaker']['muted'] + self.device_dict['volume']['minimum'] = data['speaker']['range']['minimum'] + self.device_dict['volume']['maximum'] = data['speaker']['range']['maximum'] + + self.log_notification( notification ) + + # elif notification_type == 'SOFTWARE_UPDATE_STATE': + # self.swupdate = {} + # self.swupdate['state'] = data['state'] + # if data.get('error', None) is not None: + # self.swupdate['updstate'] = data['error']['state'] + # self.swupdate['code'] = data['error']['code'] + # self.swupdate['text'] = data['error']['text'] + # + # self.log_notification(notification, msg=f"state={self.swupdate.get('state', '')}, updstate={self.swupdate.get('updstate', '')}" ) + + elif notification_type in('KEYBOARD', 'TRACKPAD', 'SOFTWARE_UPDATE_STATE'): + self.log_notification(notification, handled=False, level='dbghigh') + + else: + self.log_notification(notification, handled=False) + + + def open_stream(self): + + self.log_notification(msg=f"_CONNECT_: Opening connection to {self.ip}...", level='dbghigh' ) + try: + self._r = requests.get(f"http://{self.ip}:8080/BeoZone/Notifications", stream=True) + except Exception as e: + self.log_notification(msg=f"Exception while opening: {e}", level='dbglow') + if self.state != 'off': + self.log_notification(msg=f"_CONNECT_: Device ist offline", level='dbghigh') + self.state = 'off' + self._r = None + return + + if self._r.encoding is None: + self._r.encoding = 'utf-8' + self.state = 'on' + self.log_notification(msg=f"_CONNECT_: Connection opened", level='dbghigh' ) + + self.lines = self._r.iter_lines(decode_unicode=True) + return + + + def close_stream(self): + + if self._r is not None: + loop = True + while loop: + line = next(self.lines, 'OFFLINE') + if line == 'OFFLINE': + line = None + if line: + self.process_notification(line) + else: + loop = False + + self._r.close() + return + + + def process_stream(self): + + + if not self.lock.acquire(blocking=False): + self.log_notification(msg=f"Skipping, process_stream() is locked", level='debug') + return + + if self._r is None: + self.open_stream() + if self._r is None: + self.log_notification(msg=f"process_stream() unlocked because stream could not be opened", level='dbglow') + self.lock.release() + return + + try: + loop = True + while loop: + line = next(self.lines, 'OFFLINE') + if line == 'OFFLINE': + line = None + self._r = None + self.log_notification(msg=f"_LOOP_: Device ging offline", level='dbghigh') + self.log_notification(msg=f"process_stream() unlocked (offline)", level='dbglow') + self.lock.release() + return + + if line: + self.process_notification(line) + else: + loop = False + except Exception as e: + self.log_notification(msg=f"process_stream: Exception {e}") + + self.log_notification(msg=f"process_stream() unlocked", level='debug') + self.lock.release() + diff --git a/beolink/plugin.yaml b/beolink/plugin.yaml index 09656f68f..1062c1b61 100755 --- a/beolink/plugin.yaml +++ b/beolink/plugin.yaml @@ -12,7 +12,7 @@ plugin: # documentation: https://github.com/smarthomeNG/smarthome/wiki/CLI-Plugin # url of documentation (wiki) page # support: https://knx-user-forum.de/forum/supportforen/smarthome-py - version: 0.6.1 # Plugin version + version: 0.8.0 # Plugin version sh_minversion: 1.9 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) multi_instance: False # plugin supports multi instance diff --git a/beolink/webif/__init__.py b/beolink/webif/__init__.py index 3de26118e..573594093 100755 --- a/beolink/webif/__init__.py +++ b/beolink/webif/__init__.py @@ -78,14 +78,15 @@ def get_data_html(self, dataSet=None): if dataSet is None: data = {} data['beodevice_info'] = self.plugin.beodevices.beodeviceinfo + data['item_values'] = self.plugin._item_values - return json.dumps(data) + #return json.dumps(data) - # return it as json the the web page + # return it as json the web page try: return json.dumps(data) except Exception as e: - self.logger.error("get_data_html exception: {}".format(e)) + self.logger.error(f"get_data_html exception: {e} - {data['beodevice_info']}") return {} return diff --git a/beolink/webif/templates/index.html b/beolink/webif/templates/index.html index e7a9cd104..4e057f9c0 100755 --- a/beolink/webif/templates/index.html +++ b/beolink/webif/templates/index.html @@ -11,22 +11,28 @@ {% block pluginscripts %} +{% endblock pluginscripts %} + + +{% block headtable %} +
    + + + + + + + + + + + + + + + + + + + + + + + + + + +
    {{_('Broker Host')}}{{ p.broker_config.host }}{{_('Broker Port')}}{{ p.broker_config.port }}
    {{_('Benutzer')}}{{ p.broker_config.user }}{{_('Passwort')}} + {% if p.broker_config.password %} + {% for letter in p.broker_config.password %}*{% endfor %} + {% endif %} +
    {{_('QoS')}}{{ p.broker_config.qos }}{{_('')}}{{ '' }}
    +{% endblock headtable %} + + + +{% block buttons %} +{% endblock %} + + +{% set tabcount = 3 %} + + + +{% if p.shelly_items == [] %} + {% set start_tab = 2 %} +{% endif %} + + + +{% set tab1title = "" ~ p.get_shortname() ~ " Items" %} +{% block bodytab1 %} + + + + + + + + + + + + + + {% for item in p.shelly_items %} + + + + + + + + + + + + {% endfor %} +
    {{ _('Item') }}{{ _('Typ') }}{{ _('Wert') }}{{ _('Shelly Device') }}{{ _('Shelly ID') }}{{ _('Relais') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
    {{ item._path }}{{ item._type }}{{ item() }}{{ p.get_iattr_value(item.conf, 'shelly_type') }}{{ p.get_iattr_value(item.conf, 'shelly_id') }}{% if p.shelly_relay %}{{ p.get_iattr_value(item.conf, 'shelly_relay') }}{% else %}0{% endif %}{{ item.last_update().strftime('%d.%m.%Y %H:%M:%S') }}{{ item.last_change().strftime('%d.%m.%Y %H:%M:%S') }}
    +{% endblock %} + + + +{% set tab2title = "" ~ p.get_shortname() ~ " Devices" %} +{% block bodytab2 %} + + + + + + + + + + + + + {% for device in p.shelly_devices %} + + + + + + + + + + + {% endfor %} +
    {{ _('Shelly ID') }}{{ _('Online') }}{{ _('Mac Adresse') }}{{ _('IP Adresse') }}{{ _('Firmware Version') }}{{ _('neue Firmware') }}{{ _('konfiguriert') }}
    {{ device }}{{ p.shelly_devices[device].online }}{{ p.shelly_devices[device].mac }}{{ p.shelly_devices[device].ip }}{{ p.shelly_devices[device].fw_ver }}{{ p.shelly_devices[device].new_fw }}{{ p.shelly_devices[device].connected_to_item }}
    +{% endblock %} + + + +{% set tab3title = "" ~ " Broker Information" %} +{% block bodytab3 %} + + + + + + + + + + + + + + + + + + + + + + + + + + + + {% if p.broker_monitoring %} + + + + + + + + + + + + {% endif %} +
    {{ 'Broker Version' }}{{ p._broker.version }}{{ connection_result }}
    {{ 'Active Clients' }}{{ p._broker.active_clients }}
    {{ 'Subscriptions' }}{{ p._broker.subscriptions }}
    {{ 'Messages stored' }}{{ p._broker.stored_messages }}
    {{ 'Retained Messages' }}{{ p._broker.retained_messages }}
     
    {{ _('Laufzeit') }}{{ p.broker_uptime() }}
     
    + +{% if p.broker_monitoring %} + + + + + + + + + + + + + + + + + + + + + + +
    {{ _('Message Durchsatz') }}{{ _('letzte Minute') }}{{ _('letzte 5 Min.') }}{{ _('letzte 15 Min.') }}
    {{ _('Durchschnittlich Messages je Minute empfangen') }}{{ p._broker.msg_rcv_1min }}     {{ p._broker.msg_rcv_5min }}     {{ p._broker.msg_rcv_15min }}
    {{ _('Durchschnittlich Messages je Minute gesendet') }}{{ p._broker.msg_snt_1min }}     {{ p._broker.msg_snt_5min }}     {{ p._broker.msg_snt_15min }}
    +{% endif %} + +{% endblock %} +msg_rcv_1min + + +{% block bodytab4 %} + + + + + + + + + + + + {% for item in items %} + {% if p.has_iattr(item.conf, 'mqtt_topic_in') or p.has_iattr(item.conf, 'mqtt_topic_out') %} + + + + + + + + + {% endif %} + {% endfor %} +
    ItemTypWertTopic InLetztes UpdateLetzter Change
    {{ item._path }}{{ item._type }}{{ item() }}{% if p.has_iattr(item.conf, 'mqtt_topic_in') %}{{ p.get_iattr_value(item.conf, 'mqtt_topic_in') }}{% endif %}{{ item.last_update().strftime('%d.%m.%Y %H:%M:%S') }}{{ item.last_change().strftime('%d.%m.%Y %H:%M:%S') }}
    +{% endblock %} + + diff --git a/shelly/user_doc.rst b/shelly/user_doc.rst index a19ce3012..abb1db782 100755 --- a/shelly/user_doc.rst +++ b/shelly/user_doc.rst @@ -9,13 +9,15 @@ shelly .. image:: webif/static/img/plugin_logo.png :alt: plugin logo :width: 300px - :height: 300px + :height: 90px :scale: 50 % :align: left Das Plugin dienst zur Steuerung von Shelly Devices über MQTT. Zur Aktivierung von MQTT für die Shelly Devices bitte die Dokumentation des jeweiligen Devices zu Rate ziehen. +| + Zurzeit werden folgende Shelly Devices mit Gen1 API unterstützt: - Shelly1/pm @@ -52,6 +54,8 @@ sowie der online-Status. Das Plugin kommuniziert über MQTT und benötigt das mqtt Modul, welches die Kommunikation mit dem MQTT Broker durchführt. Dieses Modul muß geladen und konfiguriert sein, damit das Plugin funktioniert. +| + .. toctree:: :hidden: @@ -59,106 +63,6 @@ sowie der online-Status. user_doc/plugin_configuration.rst -Shelly Device in Betrieb nehmen -=============================== - -Um Shelly Plugs mit diesem Plugin zu nutzen, müssen sie in das lokale WLAN eingebunden sein und die MQTT Unterstützung -muss aktiviert sein. - -Einbindung ins WLAN -------------------- - -Shelly in den AP-Modus versetzen - -- in die Steckdose stecken/an Strom anschließen -- Falls die LED nicht rot/blau blinken, den Taster drücken -> Shelly Plug wird in den AP Mode versetzt -- WLAN SSID suchen und verbinden (z.B. bei ShellyPlug-S die SSID shellyplug-s-xxxxxx) -- Im Browser die Seite http://192.168.33.1 aufrufen -- Einstellungen im Shelly vornehmen -> Einstellungen im Shelly-Hauptmenü - -Gen1 Devices einbinden -~~~~~~~~~~~~~~~~~~~~~~ - -- Fläche **Internet & Security** klicken -- **WIFI MODE - CLIENT** aufklappen -- Haken bei **Connect the Shelly device to an existing WiFi Network** setzen -- SSID und Password eingeben -- **SAVE** klicken -- Mit dem Browser unter der neuen IP Adresse (http://shellyplug-s-xxxxxx) im lokalen WLAN verbinden - -Gen2 Devices einbinden -~~~~~~~~~~~~~~~~~~~~~~ - -- In der Navigation links auf **Settings** klicken -- Im Abschnitt 'Network Settings' auf **Wi-Fi** klicken -- Im Abschnitt 'Wi-Fi 1 settings' Haken bei **Enable Wi-Fi Network** setzen -- SSID und Password eingeben -- **SAVE** klicken -- Mit dem Browser unter der neuen IP Adresse (http://shellyplug-s-xxxxxx) im lokalen WLAN verbinden - -| - -Firmware Update durchführen ---------------------------- - -Die Devices werden im allgemeinen mit einer älteren Firmware Version ausgeliefert. Deshalb sollte als erstes ein -Firmware Update durchgeführt werden. - -Update für Gen1 Devices -~~~~~~~~~~~~~~~~~~~~~~~ - -- Fläche **Settings** klicken -- **FIRMWARE UPDATE** aufklappen -- **UPDATE FIRMWARE** klicken - -Update für Gen2 Devices -~~~~~~~~~~~~~~~~~~~~~~~ - -- In der Navigation links auf **Settings** klicken -- Im Abschnitt 'Device Settings' auf **Firmware** klicken -- Den Button für die aktuelle **stable** Firmware klicken - -| - -MQTT konfigurieren ------------------- - -Für Gen1 Devices -~~~~~~~~~~~~~~~~ - -- Fläche **Internet & Security** klicken -- **ADVANCED - DEVELOPER SETTINGS** aufklappen -- Haken bei **Enable action execution via MQTT** setzen -- Falls der MQTT Broker ein Login erfordert, Username und Password eingeben -- Adresse des Brokers in der Form : eingeben (z.B.: 10.0.0.140:1883) -- Max QoS vorzugsweise auf **1** setzen -- **SAVE** klicken - -.. image:: user_doc/assets/gen1_mqtt_settings.jpg - :class: screenshot - -Für Gen2 Devices -~~~~~~~~~~~~~~~~ - -- In der Navigation links auf **Settings** klicken -- Im Abschnitt 'Connectivity' auf **MQTT** klicken -- Den Haken bei **Enable MQTT Network** setzen -- Den 'MQTT PREFIX' auf **shellies/gen2** konfigurieren -- IP-Adresse und Port des MQTT Brokers unter 'SERVER' konfigurieren -- Falls der Broker eine Anmeldung erfordert, 'USERNAME' und 'PASSWORD' konfigurieren -- **SAVE** klicken - -.. image:: user_doc/assets/gen2_mqtt_settings.jpg - :class: screenshot - -.. note:: - - Bei späteren Rekonfigurationen ist im allgemeinen das PASSWORD Feld leer und das Password muss - (bevor **Save Settings** geklickt wird) erneut eingegeben werden. Sonst verbindet sich das Device - nicht dem Broker. - -| - Konfiguration des Plugins ========================= diff --git a/shelly/user_doc/device_installation.rst b/shelly/user_doc/device_installation.rst index bf55d74fd..f35a894a8 100644 --- a/shelly/user_doc/device_installation.rst +++ b/shelly/user_doc/device_installation.rst @@ -76,7 +76,7 @@ Für Gen1 Devices - Max QoS vorzugsweise auf **1** setzen - **SAVE** klicken -.. image:: user_doc/assets/gen1_mqtt_settings.jpg +.. image:: assets/gen1_mqtt_settings.jpg :class: screenshot Für Gen2 Devices @@ -90,7 +90,7 @@ Für Gen2 Devices - Falls der Broker eine Anmeldung erfordert, 'USERNAME' und 'PASSWORD' konfigurieren - **SAVE** klicken -.. image:: user_doc/assets/gen2_mqtt_settings.jpg +.. image:: assets/gen2_mqtt_settings.jpg :class: screenshot .. note:: diff --git a/shelly/user_doc/assets/plugin_cpnfiguration.rst b/shelly/user_doc/plugin_configuration.rst similarity index 100% rename from shelly/user_doc/assets/plugin_cpnfiguration.rst rename to shelly/user_doc/plugin_configuration.rst From b6baba468e9af7598431ab4489bf95bf15950896 Mon Sep 17 00:00:00 2001 From: sisamiwe Date: Thu, 17 Aug 2023 19:48:00 +0200 Subject: [PATCH 289/775] AVM: Add set_color and set_hsv - bump to 2.2.0 - update user_doc.rst showing all allowed attributes - enhance item_attributes_master.py to add/update attributes list and description - add / enhance methods to set color and hsv - enable to switch to on/off based on dimmer value --- avm/__init__.py | 209 ++++++++++++---------- avm/item_attributes.py | 6 +- avm/item_attributes_master.py | 62 ++++++- avm/plugin.yaml | 6 +- avm/user_doc.rst | 320 ++++++++++++++++++++++++++++++++++ 5 files changed, 502 insertions(+), 101 deletions(-) mode change 100755 => 100644 avm/user_doc.rst diff --git a/avm/__init__.py b/avm/__init__.py index 0eab2326c..b088acda5 100644 --- a/avm/__init__.py +++ b/avm/__init__.py @@ -113,7 +113,7 @@ class AVM(SmartPlugin): """ Main class of the Plugin. Does all plugin specific stuff """ - PLUGIN_VERSION = '2.1.0' + PLUGIN_VERSION = '2.2.0' # ToDo: FritzHome.handle_updated_item: implement 'saturation' # ToDo: FritzHome.handle_updated_item: implement 'unmapped_hue' @@ -2071,6 +2071,7 @@ def handle_updated_item(self, item, avm_data_type: str, readafterwrite: int): 'unmapped_hue': (self.set_unmapped_hue, {'hue': item()}, self.get_unmapped_hue), 'unmapped_saturation': (self.set_unmapped_saturation, {'saturation': item()}, self.get_unmapped_saturation), 'color': (self.set_color, {'hs': item(), 'duration': 1, 'mapped': False}, self.get_color), + 'hsv': (self.set_hsv, {'hsv': item(), 'duration': 1, 'mapped': False}, self.get_hsv), } # Remove "set_" prefix of AHA_WO_ATTRIBUTES Items: @@ -2656,7 +2657,7 @@ def get_state(self, ain: str): """ Get the switch/actuator/lightbulb to a state. """ - return self.get_devices_as_dict()[ain].switch_state + return self.get_devices_as_dict()[ain].simpleonoff # Level/Dimmer-related methods @@ -2671,6 +2672,11 @@ def set_level(self, ain: str, level: int): else: level = int(level) + if not level and self.get_state(ain): + self.set_state_off(ain) + elif level and not self.get_state(ain): + self.set_state_on(ain) + return self.aha_request("setlevel", ain=ain, param={'level': level}, result_type='int') @NoKeyOrAttributeError @@ -2690,6 +2696,11 @@ def set_level_percentage(self, ain: str, level: int): else: level = int(level) + if not level and self.get_state(ain): + self.set_state_off(ain) + elif level and not self.get_state(ain): + self.set_state_on(ain) + return self.aha_request("setlevelpercentage", ain=ain, param={'level': level}, result_type='int') @NoKeyOrAttributeError @@ -2820,58 +2831,91 @@ def get_colors(self, ain: str) -> dict: colors[name] = values return colors + def set_color(self, ain: str, hs: list, duration: int = 1, mapped: bool = True) -> bool: + """ + Set hue and saturation. + hs: colorspace element obtained from get_colors() + hs is an array including hue, saturation and level + hue must be within range 0-359 + saturation must be within range 0-100 + duration: Speed of change in seconds, 0 = instant + mapped = True uses the AVM setcolor function. It only supports pre-defined colors that can be obtained by the get_colors function. + mapped = False uses the AVM setunmappedcolor function, featured by AVM firmwareversion since approximately Q2 2022. It supports every combination if hue/saturation/level + """ + + if len(hs) != 2: + self.logger.warning(f"set_color: hsv={hs} does to much or to less entries. hue and saturation needed.") + return False + + hue = to_int(hs[0]) + saturation = int(to_int(hs[1])*2.55) + duration = to_int(duration) * 10 + + # Range checks: + if not self.HUE_RANGE['min'] <= hue <= self.HUE_RANGE['max']: + hue = clamp(hue, self.HUE_RANGE['min'], self.HUE_RANGE['max']) + self.logger.warning(f"set_color: hue value must be between {self.HUE_RANGE['min']} and {self.HUE_RANGE['max']}; hue will be set to {hue}") + + if not self.SATURATION_RANGE['min'] <= saturation <= self.SATURATION_RANGE['max']: + saturation = clamp(saturation, self.SATURATION_RANGE['min'], self.SATURATION_RANGE['max']) + self.logger.warning(f"set_color: saturation value must be between {self.SATURATION_RANGE['min']} and {self.SATURATION_RANGE['max']}; hue will be set to {saturation}") + + self.logger.debug(f"set_color called with mapped={mapped} and hs: {hue}, {saturation}") + + param = { + 'hue': hue, + 'saturation': saturation, + 'duration': duration, + } + + # special mode for white color (hue=0, saturation=0): + if (hue == 0) and (saturation == 0): + self.logger.debug(f"set_color, warm white color selected") + return self.set_color_temp(ain, temperature=self.COLOR_TEMP_RANGE['min'], duration=1) + + if mapped: + result = self.aha_request("setcolor", ain=ain, param=param, result_type='int') + else: + result = self.aha_request("setunmappedcolor", ain=ain, param=param, result_type='int') + + self.logger.debug(f"set_color in mapped={mapped} with result={result}") + return result + @NoKeyOrAttributeError def get_color(self, ain: str) -> list: """get hue, saturation value as list""" return self.get_devices_as_dict()[ain].color - def set_color(self, ain, hsv, duration=0, mapped=True): + def get_hsv(self, ain: str) -> list: + """get hue, saturation, level value as list""" + return self.get_devices_as_dict()[ain].hsv + + def set_hsv(self, ain: str, hsv: list, duration: int = 1, mapped: bool = True) -> bool: """ - Set hue and saturation. - hsv: HUE colorspace element obtained from get_colors() + Set hue, saturation, level. + hsv: colorspace element obtained from get_colors() hsv is an array including hue, saturation and level hue must be within range 0-359 - saturation must be within range 0-255 + saturation must be within range 0-100 + value must be within range 0-100 duration: Speed of change in seconds, 0 = instant - mapped = True uses the AVM setcolor function. It only - supports pre-defined colors that can be obtained - by the get_colors function. - mapped = False uses the AVM setunmappedcolor function, featured - by AVM firmwareversion since approximately Q2 2022. It - supports every combination if hue/saturation/level - """ - - params = { - 'hue': int(hsv[0]), - 'saturation': int(hsv[1]), - "duration": int(duration) * 10 - } + mapped = True uses the AVM setcolor function. It only supports pre-defined colors that can be obtained by the get_colors function. + mapped = False uses the AVM setunmappedcolor function, featured by AVM firmwareversion since approximately Q2 2022. It supports every combination if hue/saturation/level + """ - # Range checks: - hue = int(hsv[0]) - if (hue < 0) or hue > 359: - self.logger.error(f"set_color, hue value must be between 0 and 359") - return False - saturation = int(hsv[1]) - if (saturation < 0) or saturation > 255: - self.logger.error(f"set_color, saturation value must be between 0 and 255") + self.logger.debug(f"set_hsv called: {ain=}, {hsv=}, {duration=}, {mapped=}") + + if len(hsv) != 3: + self.logger.warning(f"set_color: hsv={hsv} does have to much or to less entries. hue, saturation and level needed.") return False - # special mode for white color (hue=0, saturation=0): - self.logger.warning(f"Debug set_color called with mapped {mapped} and hs(v): {int(hsv[0])}, {int(hsv[1])}") - if (int(hsv[0]) == 0) and (int(hsv[1]) == 0): - self.logger.debug(f"set_color, warm white color selected") - success = self.set_color_temp(ain, temperature=2700, duration=1) - return success + hue, saturation, level = hsv - if mapped: - success = self.aha_request(cmd="setcolor", ain=ain, param=params) - else: - # undocumented API method for free color selection - success = self.aha_request(cmd="setunmappedcolor", ain=ain, param=params) + result_hs = self.set_color(ain, [hue, saturation], duration, mapped) + result_l = self.set_level_percentage(ain, level) - self.logger.warning(f"Debug set color in mapped {mapped} mode: success: {success}") - return success + self.logger.debug(f"set_hsv: in mapped '{mapped}': result_hs={result_hs}, result_l={result_l}") + return result_hs & result_l def set_color_discrete(self, ain, hue, duration=0): """ @@ -3718,14 +3762,14 @@ class FritzhomeDeviceColor(FritzhomeDeviceBase): supported_color_mode = None fullcolorsupport = None mapped = None - hue = None saturation = None unmapped_hue = None unmapped_saturation = None colortemperature = None color = None - + hsv = None + logger = logging.getLogger(__name__) def _update_from_node(self, node): @@ -3744,60 +3788,41 @@ def _update_color_from_node(self, node): colorcontrol_element = node.find("colorcontrol") if colorcontrol_element is not None: + self.color_mode = int(colorcontrol_element.attrib.get("current_mode")) + self.supported_color_mode = int(colorcontrol_element.attrib.get("supported_modes")) + self.fullcolorsupport = bool(colorcontrol_element.attrib.get("fullcolorsupport")) + self.mapped = bool(colorcontrol_element.attrib.get("mapped")) + self.hue = get_node_value_as_int(colorcontrol_element, "hue") + saturation = get_node_value_as_int(colorcontrol_element, "saturation") + self.saturation = int(saturation/2.55) + self.unmapped_hue = get_node_value_as_int(colorcontrol_element, "unmapped_hue") + unmapped_saturation = get_node_value_as_int(colorcontrol_element, "unmapped_saturation") + self.unmapped_saturation = int(unmapped_saturation/2.55) + self.colortemperature = get_node_value_as_int(colorcontrol_element, "temperature") + + if self.mapped: + self.color = [self.hue, self.saturation] + else: + self.color = [self.unmapped_hue, self.unmapped_saturation] - try: - self.color_mode = int(colorcontrol_element.attrib.get("current_mode")) - except ValueError: - pass - - try: - self.supported_color_mode = int(colorcontrol_element.attrib.get("supported_modes")) - except ValueError: - pass - - try: - self.fullcolorsupport = bool(colorcontrol_element.attrib.get("fullcolorsupport")) - except ValueError: - pass - - try: - self.mapped = bool(colorcontrol_element.attrib.get("mapped")) - except ValueError: - pass - - try: - self.hue = get_node_value_as_int(colorcontrol_element, "hue") - self.logger.debug(f"received hue value {self.hue}") - except ValueError: - self.hue = 0 - - try: - value = get_node_value_as_int(colorcontrol_element, "saturation") - self.saturation = int(value/2.55) - self.logger.debug(f"received unmapped saturation value {value}, scaled to {self.saturation}") - except ValueError: - self.saturation = 0 + self.logger.debug(f"FritzColor: created color={self.color} with mapped={self.mapped}") - try: - self.unmapped_hue = get_node_value_as_int(colorcontrol_element, "unmapped_hue") - self.logger.debug(f"received unmapped hue value {self.unmapped_hue}") - except ValueError: - self.logger.warning(f"exception in unmapped_hue extraction") - self.unmapped_hue = 0 + # get levelpercentage + levelcontrol_element = node.find("levelcontrol") + if levelcontrol_element is not None: + levelpercentage = get_node_value_as_int(levelcontrol_element, "levelpercentage") + else: + levelpercentage = 0 - try: - value = get_node_value_as_int(colorcontrol_element, "unmapped_saturation") - self.unmapped_saturation = int(value/2.55) - self.logger.debug(f"received unmapped saturation value {value}, scaled to {self.unmapped_saturation}") - except ValueError: - self.unmapped_saturation = 0 - except Exception as e: - self.logger.error(f"Exception while receiving unmapped saturation: {e}") + # Set Level to zero for consistency, if light is off: + state_element = node.find("simpleonoff") + if state_element is not None: + simpleonoff = get_node_value_as_int_as_bool(state_element, "state") + if simpleonoff is False: + levelpercentage = 0 - try: - self.colortemperature = get_node_value_as_int(colorcontrol_element, "temperature") - except ValueError: - self.colortemperature = 0 + self.hsv = self.color.copy() + self.hsv.append(levelpercentage) def get_colors(self): """Get the supported colors.""" diff --git a/avm/item_attributes.py b/avm/item_attributes.py index b6bb7011a..9b9317aad 100644 --- a/avm/item_attributes.py +++ b/avm/item_attributes.py @@ -29,13 +29,13 @@ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # ALL_ATTRIBUTES_SUPPORTED_BY_REPEATER = ['uptime', 'serial_number', 'software_version', 'hardware_version', 'manufacturer', 'product_class', 'manufacturer_oui', 'model_name', 'description', 'device_log', 'security_port', 'reboot', 'wlanconfig', 'wlanconfig_ssid', 'wlan_guest_time_remaining', 'wlan_associates', 'wps_active', 'wps_status', 'wps_mode', 'wlan_total_associates', 'hosts_count', 'hosts_info', 'mesh_topology', 'number_of_hosts', 'hosts_url', 'mesh_url', 'network_device', 'device_ip', 'device_connection_type', 'device_hostname', 'connection_status', 'is_host_active', 'host_info'] -ALL_ATTRIBUTES_WRITEABLE = ['reboot', 'set_target_temperature', 'set_window_open', 'set_hkr_boost', 'set_simpleonoff', 'set_level', 'set_levelpercentage', 'set_hue', 'set_saturation', 'set_colortemperature', 'switch_toggle', 'tam', 'wlanconfig', 'wps_active', 'deflection_enable', 'aha_device', 'target_temperature', 'window_open', 'hkr_boost', 'simpleonoff', 'level', 'levelpercentage', 'hue', 'saturation', 'colortemperature', 'unmapped_hue', 'unmapped_saturation', 'switch_state'] +ALL_ATTRIBUTES_WRITEABLE = ['reboot', 'set_target_temperature', 'set_window_open', 'set_hkr_boost', 'set_simpleonoff', 'set_level', 'set_levelpercentage', 'set_hue', 'set_saturation', 'set_colortemperature', 'switch_toggle', 'tam', 'wlanconfig', 'wps_active', 'deflection_enable', 'aha_device', 'target_temperature', 'window_open', 'hkr_boost', 'simpleonoff', 'level', 'levelpercentage', 'hue', 'saturation', 'colortemperature', 'unmapped_hue', 'unmapped_saturation', 'color', 'hsv', 'switch_state'] ALL_ATTRIBUTES_WRITEONLY = ['reboot', 'set_target_temperature', 'set_window_open', 'set_hkr_boost', 'set_simpleonoff', 'set_level', 'set_levelpercentage', 'set_hue', 'set_saturation', 'set_colortemperature', 'switch_toggle'] DEPRECATED_ATTRIBUTES = ['aha_device', 'hkr_device', 'set_temperature', 'temperature', 'set_temperature_reduced', 'set_temperature_comfort', 'firmware_version', 'boost_active'] -AHA_ATTRIBUTES = ['device_id', 'manufacturer', 'product_name', 'fw_version', 'connected', 'device_name', 'tx_busy', 'device_functions', 'set_target_temperature', 'target_temperature', 'current_temperature', 'temperature_reduced', 'temperature_comfort', 'temperature_offset', 'set_window_open', 'window_open', 'windowopenactiveendtime', 'set_hkr_boost', 'hkr_boost', 'boost_active', 'boostactiveendtime', 'summer_active', 'holiday_active', 'battery_low', 'battery_level', 'lock', 'device_lock', 'errorcode', 'set_simpleonoff', 'simpleonoff', 'set_level', 'level', 'set_levelpercentage', 'levelpercentage', 'set_hue', 'hue', 'set_saturation', 'saturation', 'set_colortemperature', 'colortemperature', 'unmapped_hue', 'unmapped_saturation', 'color_mode', 'supported_color_mode', 'fullcolorsupport', 'mapped', 'switch_state', 'switch_mode', 'switch_toggle', 'power', 'energy', 'voltage', 'humidity', 'alert_state', 'blind_mode', 'endpositionsset', 'statistics_temp', 'statistics_hum', 'statistics_voltage', 'statistics_power', 'statistics_energy'] +AHA_ATTRIBUTES = ['device_id', 'manufacturer', 'product_name', 'fw_version', 'connected', 'device_name', 'tx_busy', 'device_functions', 'set_target_temperature', 'target_temperature', 'current_temperature', 'temperature_reduced', 'temperature_comfort', 'temperature_offset', 'set_window_open', 'window_open', 'windowopenactiveendtime', 'set_hkr_boost', 'hkr_boost', 'boost_active', 'boostactiveendtime', 'summer_active', 'holiday_active', 'battery_low', 'battery_level', 'lock', 'device_lock', 'errorcode', 'set_simpleonoff', 'simpleonoff', 'set_level', 'level', 'set_levelpercentage', 'levelpercentage', 'set_hue', 'hue', 'set_saturation', 'saturation', 'set_colortemperature', 'colortemperature', 'unmapped_hue', 'unmapped_saturation', 'color', 'hsv', 'color_mode', 'supported_color_mode', 'fullcolorsupport', 'mapped', 'switch_state', 'switch_mode', 'switch_toggle', 'power', 'energy', 'voltage', 'humidity', 'alert_state', 'blind_mode', 'endpositionsset', 'statistics_temp', 'statistics_hum', 'statistics_voltage', 'statistics_power', 'statistics_energy'] AHA_RO_ATTRIBUTES = ['device_id', 'manufacturer', 'product_name', 'fw_version', 'connected', 'device_name', 'tx_busy', 'device_functions', 'current_temperature', 'temperature_reduced', 'temperature_comfort', 'temperature_offset', 'windowopenactiveendtime', 'boost_active', 'boostactiveendtime', 'summer_active', 'holiday_active', 'battery_low', 'battery_level', 'lock', 'device_lock', 'errorcode', 'color_mode', 'supported_color_mode', 'fullcolorsupport', 'mapped', 'switch_mode', 'power', 'energy', 'voltage', 'humidity', 'alert_state', 'blind_mode', 'endpositionsset', 'statistics_temp', 'statistics_hum', 'statistics_voltage', 'statistics_power', 'statistics_energy'] AHA_WO_ATTRIBUTES = ['set_target_temperature', 'set_window_open', 'set_hkr_boost', 'set_simpleonoff', 'set_level', 'set_levelpercentage', 'set_hue', 'set_saturation', 'set_colortemperature', 'switch_toggle'] -AHA_RW_ATTRIBUTES = ['target_temperature', 'window_open', 'hkr_boost', 'simpleonoff', 'level', 'levelpercentage', 'hue', 'saturation', 'colortemperature', 'unmapped_hue', 'unmapped_saturation', 'switch_state'] +AHA_RW_ATTRIBUTES = ['target_temperature', 'window_open', 'hkr_boost', 'simpleonoff', 'level', 'levelpercentage', 'hue', 'saturation', 'colortemperature', 'unmapped_hue', 'unmapped_saturation', 'color', 'hsv', 'switch_state'] AHA_STATS_ATTRIBUTES = ['statistics_temp', 'statistics_hum', 'statistics_voltage', 'statistics_power', 'statistics_energy'] TR064_ATTRIBUTES = ['uptime', 'serial_number', 'software_version', 'hardware_version', 'manufacturer', 'product_class', 'manufacturer_oui', 'model_name', 'description', 'device_log', 'security_port', 'reboot', 'myfritz_status', 'call_direction', 'call_event', 'monitor_trigger', 'is_call_incoming', 'last_caller_incoming', 'last_call_date_incoming', 'call_event_incoming', 'last_number_incoming', 'last_called_number_incoming', 'is_call_outgoing', 'last_caller_outgoing', 'last_call_date_outgoing', 'call_event_outgoing', 'last_number_outgoing', 'last_called_number_outgoing', 'call_duration_incoming', 'call_duration_outgoing', 'tam', 'tam_name', 'tam_new_message_number', 'tam_old_message_number', 'tam_total_message_number', 'wan_connection_status', 'wan_connection_error', 'wan_is_connected', 'wan_uptime', 'wan_ip', 'wan_upstream', 'wan_downstream', 'wan_total_packets_sent', 'wan_total_packets_received', 'wan_current_packets_sent', 'wan_current_packets_received', 'wan_total_bytes_sent', 'wan_total_bytes_received', 'wan_current_bytes_sent', 'wan_current_bytes_received', 'wan_link', 'wlanconfig', 'wlanconfig_ssid', 'wlan_guest_time_remaining', 'wlan_associates', 'wps_active', 'wps_status', 'wps_mode', 'wlan_total_associates', 'hosts_count', 'hosts_info', 'mesh_topology', 'number_of_hosts', 'hosts_url', 'mesh_url', 'network_device', 'device_ip', 'device_connection_type', 'device_hostname', 'connection_status', 'is_host_active', 'host_info', 'number_of_deflections', 'deflections_details', 'deflection_details', 'deflection_enable', 'deflection_type', 'deflection_number', 'deflection_to_number', 'deflection_mode', 'deflection_outgoing', 'deflection_phonebook_id', 'aha_device', 'hkr_device', 'set_temperature', 'temperature', 'set_temperature_reduced', 'set_temperature_comfort', 'firmware_version'] TR064_RW_ATTRIBUTES = ['tam', 'wlanconfig', 'wps_active', 'deflection_enable', 'aha_device'] diff --git a/avm/item_attributes_master.py b/avm/item_attributes_master.py index 505a557f0..a865451d1 100644 --- a/avm/item_attributes_master.py +++ b/avm/item_attributes_master.py @@ -25,6 +25,8 @@ FILENAME_PLUGIN = 'plugin.yaml' +DOC_FILE_NAME = 'user_doc.rst' + ATTRIBUTE = 'avm_data_type' FILE_HEADER = """\ @@ -197,6 +199,8 @@ 'colortemperature': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'rw', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Farbtemperatur (Status und Setzen)'}, 'unmapped_hue': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'rw', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Hue (Status und Setzen)'}, 'unmapped_saturation': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'rw', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Saturation (Status und Setzen)'}, + 'color': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'rw', 'type': 'list ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Farbwerte als Liste h,s (Status und Setzen)'}, + 'hsv': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'rw', 'type': 'list ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Farbwerte und Helligkeit als Liste h,s,v (Status und Setzen)'}, 'color_mode': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Aktueller Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode)'}, 'supported_color_mode': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Unterstützer Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode)'}, 'fullcolorsupport': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Lampe unterstützt setunmappedcolor'}, @@ -269,7 +273,6 @@ def export_item_attributs_py(): ATTRS['HOMEAUTO_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'homeauto'}) ATTRS['MYFRITZ_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'myfritz'}) - # create file and write header f = open(FILENAME_ATTRIBUTES, "w") f.write(FILE_HEADER) @@ -278,7 +281,7 @@ def export_item_attributs_py(): # write avm_data_types for attr, alist in ATTRS.items(): with open(FILENAME_ATTRIBUTES, "a") as f: - print (f'{attr} = {alist!r}', file=f) + print(f'{attr} = {alist!r}', file=f) print(f' {FILENAME_ATTRIBUTES} successfully created!') @@ -381,17 +384,66 @@ def get_all_keys(d): print(f' Check complete.') +def update_user_doc(): + # Update user_doc.rst + print() + print(f'D) Start updating {ATTRIBUTE} and descriptions in {DOC_FILE_NAME}!"') + attribute_list = [ + "\n", + "Dieses Kapitel wurde automatisch durch Ausführen des Skripts in der Datei 'datapoints.py' erstellt.\n", "\n", + "Nachfolgend eine Auflistung der möglichen Attribute für das Plugin:\n", + "\n"] + + for attribute in AVM_DATA_TYPES: + attribute_list.append("\n") + attribute_list.append(f"{attribute.upper()}-Interface\n") + attribute_list.append('^' * (len(attribute) + 10)) + attribute_list.append("\n") + attribute_list.append("\n") + + for avm_data_type in AVM_DATA_TYPES[attribute]: + attribute_list.append(f"- {avm_data_type}: {AVM_DATA_TYPES[attribute][avm_data_type]['description']} " + f"| Zugriff: {AVM_DATA_TYPES[attribute][avm_data_type]['access']} " + f"| Item-Type: {AVM_DATA_TYPES[attribute][avm_data_type]['type']}\n") + attribute_list.append("\n") + + with open(DOC_FILE_NAME, 'r', encoding='utf-8') as file: + lines = file.readlines() + + start = end = None + for i, line in enumerate(lines): + if 'Attribute und Beschreibung' in line: + start = i + if 'item_structs' in line: + end = i + + part1 = lines[0:start+2] + part3 = lines[end-1:len(lines)] + new_lines = part1 + attribute_list + part3 + + with open(DOC_FILE_NAME, 'w', encoding='utf-8') as file: + for line in new_lines: + file.write(line) + + print(f" Successfully updated {ATTRIBUTE} in {DOC_FILE_NAME}!") + if __name__ == '__main__': - # Run main to export item_attributes.py and update ´valid_list and valid_list_description of avm_data_type in plugin.yaml - print() - print(f'Start automated update and check of {FILENAME_PLUGIN} and {FILENAME_ATTRIBUTES}.') + print(f'Start automated update and check of {FILENAME_PLUGIN} with generation of {FILENAME_ATTRIBUTES} and update of {DOC_FILE_NAME}.') print('------------------------------------------------------------------------') + export_item_attributs_py() + update_plugin_yaml_avm_data_type() + check_plugin_yaml_structs() + update_user_doc() + + print() + print(f'Automated update and check of {FILENAME_PLUGIN} and generation of {FILENAME_ATTRIBUTES} complete.') + # Notes: # - HOST_ATTRIBUTES: host index needed # - HOSTS_ATTRIBUTES: no index needed diff --git a/avm/plugin.yaml b/avm/plugin.yaml index eda929418..bdc88c6f0 100644 --- a/avm/plugin.yaml +++ b/avm/plugin.yaml @@ -12,7 +12,7 @@ plugin: documentation: http://smarthomeng.de/user/plugins/avm/user_doc.html support: https://knx-user-forum.de/forum/supportforen/smarthome-py/934835-avm-plugin - version: 2.1.0 # Plugin version (must match the version specified in __init__.py) + version: 2.2.0 # Plugin version (must match the version specified in __init__.py) sh_minversion: 1.8 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) # py_minversion: 3.6 # minimum Python version to use for this plugin @@ -259,6 +259,8 @@ item_attributes: - colortemperature # rw num - unmapped_hue # rw num - unmapped_saturation # rw num + - color # rw list + - hsv # rw list - color_mode # ro num - supported_color_mode # ro num - fullcolorsupport # ro bool @@ -434,6 +436,8 @@ item_attributes: - Farbtemperatur (Status und Setzen) - Hue (Status und Setzen) - Saturation (Status und Setzen) + - Farbwerte als Liste h,s (Status und Setzen) + - Farbwerte und Helligkeit als Liste h,s,v (Status und Setzen) - Aktueller Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode) - Unterstützer Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode) - Lampe unterstützt setunmappedcolor diff --git a/avm/user_doc.rst b/avm/user_doc.rst old mode 100755 new mode 100644 index 551a9ac41..70340456c --- a/avm/user_doc.rst +++ b/avm/user_doc.rst @@ -54,6 +54,326 @@ zu Nichterreichbarkeit des Webservice) führen. Wird ein kürzerer Updatezyklus werden. Dort werden entsprechende Fehlermeldungen hinterlegt. +Attribute und Beschreibung +-------------------------- + +Dieses Kapitel wurde automatisch durch Ausführen des Skripts in der Datei 'datapoints.py' erstellt. + +Nachfolgend eine Auflistung der möglichen Attribute für das Plugin: + + +TR064-Interface +^^^^^^^^^^^^^^^ + +- uptime: Laufzeit des Fritzdevice in Sekunden | Zugriff: ro | Item-Type: num + +- serial_number: Serialnummer des Fritzdevice | Zugriff: ro | Item-Type: str + +- software_version: Software Version | Zugriff: ro | Item-Type: str + +- hardware_version: Hardware Version | Zugriff: ro | Item-Type: str + +- manufacturer: Hersteller | Zugriff: ro | Item-Type: str + +- product_class: Produktklasse | Zugriff: ro | Item-Type: str + +- manufacturer_oui: Hersteller OUI | Zugriff: ro | Item-Type: str + +- model_name: Modellname | Zugriff: ro | Item-Type: str + +- description: Modellbeschreibung | Zugriff: ro | Item-Type: str + +- device_log: Geräte Log | Zugriff: ro | Item-Type: str + +- security_port: Security Port | Zugriff: ro | Item-Type: str + +- reboot: Startet das Gerät neu | Zugriff: wo | Item-Type: bool + +- myfritz_status: MyFritz Status (an/aus) | Zugriff: ro | Item-Type: bool + +- call_direction: Richtung des letzten Anrufes | Zugriff: ro | Item-Type: str + +- call_event: Status des letzten Anrufes | Zugriff: ro | Item-Type: str + +- monitor_trigger: Monitortrigger | Zugriff: ro | Item-Type: bool + +- is_call_incoming: Eingehender Anruf erkannt | Zugriff: ro | Item-Type: bool + +- last_caller_incoming: Letzter Anrufer | Zugriff: ro | Item-Type: str + +- last_call_date_incoming: Zeitpunkt des letzten eingehenden Anrufs | Zugriff: ro | Item-Type: str + +- call_event_incoming: Status des letzten eingehenden Anrufs | Zugriff: ro | Item-Type: str + +- last_number_incoming: Nummer des letzten eingehenden Anrufes | Zugriff: ro | Item-Type: str + +- last_called_number_incoming: Angerufene Nummer des letzten eingehenden Anrufs | Zugriff: ro | Item-Type: str + +- is_call_outgoing: Ausgehender Anruf erkannt | Zugriff: ro | Item-Type: bool + +- last_caller_outgoing: Letzter angerufener Kontakt | Zugriff: ro | Item-Type: str + +- last_call_date_outgoing: Zeitpunkt des letzten ausgehenden Anrufs | Zugriff: ro | Item-Type: str + +- call_event_outgoing: Status des letzten ausgehenden Anrufs | Zugriff: ro | Item-Type: str + +- last_number_outgoing: Nummer des letzten ausgehenden Anrufes | Zugriff: ro | Item-Type: str + +- last_called_number_outgoing: Letzte verwendete Telefonnummer für ausgehenden Anruf | Zugriff: ro | Item-Type: str + +- call_duration_incoming: Dauer des eingehenden Anrufs | Zugriff: ro | Item-Type: num + +- call_duration_outgoing: Dauer des ausgehenden Anrufs | Zugriff: ro | Item-Type: num + +- tam: TAM an/aus | Zugriff: rw | Item-Type: bool + +- tam_name: Name des TAM | Zugriff: ro | Item-Type: str + +- tam_new_message_number: Anzahl der alten Nachrichten | Zugriff: ro | Item-Type: num + +- tam_old_message_number: Anzahl der neuen Nachrichten | Zugriff: ro | Item-Type: num + +- tam_total_message_number: Gesamtanzahl der Nachrichten | Zugriff: ro | Item-Type: num + +- wan_connection_status: WAN Verbindungsstatus | Zugriff: ro | Item-Type: str + +- wan_connection_error: WAN Verbindungsfehler | Zugriff: ro | Item-Type: str + +- wan_is_connected: WAN Verbindung aktiv | Zugriff: ro | Item-Type: bool + +- wan_uptime: WAN Verbindungszeit | Zugriff: ro | Item-Type: str + +- wan_ip: WAN IP Adresse | Zugriff: ro | Item-Type: str + +- wan_upstream: WAN Upstream Datenmenge | Zugriff: ro | Item-Type: num + +- wan_downstream: WAN Downstream Datenmenge | Zugriff: ro | Item-Type: num + +- wan_total_packets_sent: WAN Verbindung-Anzahl insgesamt versendeter Pakete | Zugriff: ro | Item-Type: num + +- wan_total_packets_received: WAN Verbindung-Anzahl insgesamt empfangener Pakete | Zugriff: ro | Item-Type: num + +- wan_current_packets_sent: WAN Verbindung-Anzahl aktuell versendeter Pakete | Zugriff: ro | Item-Type: num + +- wan_current_packets_received: WAN Verbindung-Anzahl aktuell empfangener Pakete | Zugriff: ro | Item-Type: num + +- wan_total_bytes_sent: WAN Verbindung-Anzahl insgesamt versendeter Bytes | Zugriff: ro | Item-Type: num + +- wan_total_bytes_received: WAN Verbindung-Anzahl insgesamt empfangener Bytes | Zugriff: ro | Item-Type: num + +- wan_current_bytes_sent: WAN Verbindung-Anzahl aktuelle Bitrate Senden | Zugriff: ro | Item-Type: num + +- wan_current_bytes_received: WAN Verbindung-Anzahl aktuelle Bitrate Empfangen | Zugriff: ro | Item-Type: num + +- wan_link: WAN Link | Zugriff: ro | Item-Type: bool + +- wlanconfig: WLAN An/Aus | Zugriff: rw | Item-Type: bool + +- wlanconfig_ssid: WLAN SSID | Zugriff: ro | Item-Type: str + +- wlan_guest_time_remaining: Verbleibende Zeit, bis zum automatischen Abschalten des Gäste-WLAN | Zugriff: ro | Item-Type: num + +- wlan_associates: Anzahl der verbundenen Geräte im jeweiligen WLAN | Zugriff: ro | Item-Type: num + +- wps_active: Schaltet WPS für das entsprechende WlAN an / aus | Zugriff: rw | Item-Type: bool + +- wps_status: WPS Status des entsprechenden WlAN | Zugriff: ro | Item-Type: str + +- wps_mode: WPS Modus des entsprechenden WlAN | Zugriff: ro | Item-Type: str + +- wlan_total_associates: Anzahl der verbundenen Geräte im WLAN | Zugriff: ro | Item-Type: num + +- hosts_count: Anzahl der Hosts | Zugriff: ro | Item-Type: num + +- hosts_info: Informationen über die Hosts | Zugriff: ro | Item-Type: dict + +- mesh_topology: Topologie des Mesh | Zugriff: ro | Item-Type: dict + +- number_of_hosts: Anzahl der verbundenen Hosts (Muss Child von "network_device" sein) | Zugriff: ro | Item-Type: num + +- hosts_url: URL zu Hosts (Muss Child von "network_device" sein) | Zugriff: ro | Item-Type: str + +- mesh_url: URL zum Mesh (Muss Child von "network_device" sein) | Zugriff: ro | Item-Type: str + +- network_device: Verbindungsstatus des Gerätes // Defines Network device via MAC-Adresse | Zugriff: ro | Item-Type: bool + +- device_ip: Geräte-IP (Muss Child von "network_device" sein) | Zugriff: ro | Item-Type: str + +- device_connection_type: Verbindungstyp (Muss Child von "network_device" sein) | Zugriff: ro | Item-Type: str + +- device_hostname: Gerätename (Muss Child von "network_device" sein | Zugriff: ro | Item-Type: str + +- connection_status: Verbindungsstatus (Muss Child von "network_device" sein) | Zugriff: ro | Item-Type: bool + +- is_host_active: Host aktiv? (Muss Child von "network_device" sein) | Zugriff: ro | Item-Type: bool + +- host_info: Informationen zum Host (Muss Child von "network_device" sein) | Zugriff: ro | Item-Type: str + +- number_of_deflections: Anzahl der eingestellten Rufumleitungen | Zugriff: ro | Item-Type: num + +- deflections_details: Details zu allen Rufumleitung (als dict) | Zugriff: ro | Item-Type: dict + +- deflection_details: Details zur Rufumleitung (als dict); Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item | Zugriff: ro | Item-Type: dict + +- deflection_enable: Rufumleitung Status an/aus; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item | Zugriff: rw | Item-Type: bool + +- deflection_type: Type der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item | Zugriff: ro | Item-Type: str + +- deflection_number: Telefonnummer, die umgeleitet wird; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item | Zugriff: ro | Item-Type: str + +- deflection_to_number: Zielrufnummer der Umleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item | Zugriff: ro | Item-Type: str + +- deflection_mode: Modus der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item | Zugriff: ro | Item-Type: str + +- deflection_outgoing: Outgoing der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item | Zugriff: ro | Item-Type: str + +- deflection_phonebook_id: Phonebook_ID der Zielrufnummer (Only valid if Type==fromPB); Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item | Zugriff: ro | Item-Type: str + +- aha_device: Steckdose schalten; siehe "switch_state" | Zugriff: rw | Item-Type: bool + +- hkr_device: Status des HKR (OPEN; CLOSED; TEMP) | Zugriff: ro | Item-Type: str + +- set_temperature: siehe "target_temperature" | Zugriff: ro | Item-Type: num + +- temperature: siehe "current_temperature" | Zugriff: ro | Item-Type: num + +- set_temperature_reduced: siehe "temperature_reduced" | Zugriff: ro | Item-Type: num + +- set_temperature_comfort: siehe "temperature_comfort" | Zugriff: ro | Item-Type: num + +- firmware_version: siehe "fw_version" | Zugriff: ro | Item-Type: str + + +AHA-Interface +^^^^^^^^^^^^^ + +- device_id: Geräte -ID | Zugriff: ro | Item-Type: str + +- manufacturer: Hersteller | Zugriff: ro | Item-Type: str + +- product_name: Produktname | Zugriff: ro | Item-Type: str + +- fw_version: Firmware Version | Zugriff: ro | Item-Type: str + +- connected: Verbindungsstatus | Zugriff: ro | Item-Type: bool + +- device_name: Gerätename | Zugriff: ro | Item-Type: str + +- tx_busy: Verbindung aktiv | Zugriff: ro | Item-Type: bool + +- device_functions: Im Gerät vorhandene Funktionen | Zugriff: ro | Item-Type: list + +- set_target_temperature: Soll-Temperatur Setzen | Zugriff: wo | Item-Type: num + +- target_temperature: Soll-Temperatur (Status und Setzen) | Zugriff: rw | Item-Type: num + +- current_temperature: Ist-Temperatur | Zugriff: ro | Item-Type: num + +- temperature_reduced: Eingestellte reduzierte Temperatur | Zugriff: ro | Item-Type: num + +- temperature_comfort: Eingestellte Komfort-Temperatur | Zugriff: ro | Item-Type: num + +- temperature_offset: Eingestellter Temperatur-Offset | Zugriff: ro | Item-Type: num + +- set_window_open: Window-Open-Funktion (Setzen) | Zugriff: wo | Item-Type: bool + +- window_open: Window-Open-Funktion (Status und Setzen) | Zugriff: rw | Item-Type: bool + +- windowopenactiveendtime: Zeitliches Ende der "Window Open" Funktion | Zugriff: ro | Item-Type: num + +- set_hkr_boost: Boost-Funktion (Setzen) | Zugriff: wo | Item-Type: bool + +- hkr_boost: Boost-Funktion (Status und Setzen) | Zugriff: rw | Item-Type: bool + +- boost_active: Status der "Boost" Funktion | Zugriff: ro | Item-Type: bool + +- boostactiveendtime: Zeitliches Ende der Boost Funktion | Zugriff: ro | Item-Type: num + +- summer_active: Status der "Sommer" Funktion | Zugriff: ro | Item-Type: bool + +- holiday_active: Status der "Holiday" Funktion | Zugriff: ro | Item-Type: bool + +- battery_low: Battery-low Status | Zugriff: ro | Item-Type: bool + +- battery_level: Batterie-Status in % | Zugriff: ro | Item-Type: num + +- lock: Tastensperre über UI/API aktiv | Zugriff: ro | Item-Type: bool + +- device_lock: Tastensperre direkt am Gerät ein | Zugriff: ro | Item-Type: bool + +- errorcode: Fehlercodes die der HKR liefert | Zugriff: ro | Item-Type: num + +- set_simpleonoff: Gerät/Aktor/Lampe an-/ausschalten | Zugriff: wo | Item-Type: bool + +- simpleonoff: Gerät/Aktor/Lampe (Status und Setzen) | Zugriff: rw | Item-Type: bool + +- set_level: Level/Niveau von 0 bis 255 (Setzen) | Zugriff: wo | Item-Type: num + +- level: Level/Niveau von 0 bis 255 (Setzen & Status) | Zugriff: rw | Item-Type: num + +- set_levelpercentage: Level/Niveau von 0% bis 100% (Setzen) | Zugriff: wo | Item-Type: num + +- levelpercentage: Level/Niveau von 0% bis 100% (Setzen & Status) | Zugriff: rw | Item-Type: num + +- set_hue: Hue (Setzen) | Zugriff: wo | Item-Type: num + +- hue: Hue (Status und Setzen) | Zugriff: rw | Item-Type: num + +- set_saturation: Saturation (Setzen) | Zugriff: wo | Item-Type: num + +- saturation: Saturation (Status und Setzen) | Zugriff: rw | Item-Type: num + +- set_colortemperature: Farbtemperatur (Setzen) | Zugriff: wo | Item-Type: num + +- colortemperature: Farbtemperatur (Status und Setzen) | Zugriff: rw | Item-Type: num + +- unmapped_hue: Hue (Status und Setzen) | Zugriff: rw | Item-Type: num + +- unmapped_saturation: Saturation (Status und Setzen) | Zugriff: rw | Item-Type: num + +- color: Farbwerte als Liste h,s (Status und Setzen) | Zugriff: rw | Item-Type: list + +- hsv: Farbwerte und Helligkeit als Liste h,s,v (Status und Setzen) | Zugriff: rw | Item-Type: list + +- color_mode: Aktueller Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode) | Zugriff: ro | Item-Type: num + +- supported_color_mode: Unterstützer Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode) | Zugriff: ro | Item-Type: num + +- fullcolorsupport: Lampe unterstützt setunmappedcolor | Zugriff: ro | Item-Type: bool + +- mapped: von den Colordefaults abweichend zugeordneter HueSaturation-Wert gesetzt | Zugriff: ro | Item-Type: bool + +- switch_state: Schaltzustand Steckdose (Status und Setzen) | Zugriff: rw | Item-Type: bool + +- switch_mode: Zeitschaltung oder manuell schalten | Zugriff: ro | Item-Type: str + +- switch_toggle: Schaltzustand umschalten (toggle) | Zugriff: wo | Item-Type: bool + +- power: Leistung in W (Aktualisierung alle 2 min) | Zugriff: ro | Item-Type: num + +- energy: absoluter Verbrauch seit Inbetriebnahme in Wh | Zugriff: ro | Item-Type: num + +- voltage: Spannung in V (Aktualisierung alle 2 min) | Zugriff: ro | Item-Type: num + +- humidity: Relative Luftfeuchtigkeit in % (FD440) | Zugriff: ro | Item-Type: num + +- alert_state: letzter übermittelter Alarmzustand | Zugriff: ro | Item-Type: bool + +- blind_mode: automatische Zeitschaltung oder manuell fahren | Zugriff: ro | Item-Type: str + +- endpositionsset: ist die Endlage für das Rollo konfiguriert | Zugriff: ro | Item-Type: bool + +- statistics_temp: Wertestatistik für Temperatur | Zugriff: ro | Item-Type: list + +- statistics_hum: Wertestatistik für Feuchtigkeit | Zugriff: ro | Item-Type: list + +- statistics_voltage: Wertestatistik für Spannung | Zugriff: ro | Item-Type: list + +- statistics_power: Wertestatistik für Leistung | Zugriff: ro | Item-Type: list + +- statistics_energy: Wertestatistik für Energie | Zugriff: ro | Item-Type: list + + item_structs ------------ Zur Vereinfachung der Einrichtung von Items sind für folgende Item-structs vordefiniert: From 3ced13c7c020af661f7ecf06cc0713ad86b6a075 Mon Sep 17 00:00:00 2001 From: aschwith Date: Thu, 17 Aug 2023 21:13:42 +0200 Subject: [PATCH 290/775] solarforecast: replaced deprecated sh.now() with method of lib shtime --- solarforecast/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/solarforecast/__init__.py b/solarforecast/__init__.py index d0409061d..ddde696e2 100755 --- a/solarforecast/__init__.py +++ b/solarforecast/__init__.py @@ -141,7 +141,7 @@ def poll_backend(self): # Decode Json data: wattHoursToday = None wattHoursTomorrow = None - today = self._sh.now().date() + today = self._sh.shtime.now().date() tomorrow = today + datetime.timedelta(days=1) self.last_update = today From 033d9327905906ade43e2a7d9a47e4fc11d91c59 Mon Sep 17 00:00:00 2001 From: aschwith Date: Thu, 17 Aug 2023 21:46:03 +0200 Subject: [PATCH 291/775] resol: improved user_doc --- resol/user_doc.rst | 67 +++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 66 insertions(+), 1 deletion(-) diff --git a/resol/user_doc.rst b/resol/user_doc.rst index 725daed24..2be1058ef 100755 --- a/resol/user_doc.rst +++ b/resol/user_doc.rst @@ -12,7 +12,10 @@ resol :scale: 50 % :align: left -Resol plugin, mit Unterstützung für Resol Solar Datenlogger, Frischwasserwaermetauscher und Regler. +Allgemein +========= + +Resol plugin, mit Unterstützung für Resol Solar Datenlogger, Frischwasserwärmetauscher und Regler. Konfiguration ============= @@ -20,3 +23,65 @@ Konfiguration Die Informationen zur Konfiguration des Plugins sind unter :doc:`/plugins_doc/config/resol` beschrieben. +Weiterführende Informationen +============================ + +Weitere Informationen zu Resol Parametern und Quellen sind hier zu finden: + +https://github.com/danielwippermann/resol-vbus + +https://danielwippermann.github.io/resol-vbus/#/vsf + + +Resol Protokol +-------------- + +Synch byte beween messages: 0xAA + +Message: + + Byte Content + + 0-1 Destination + + 2-3 Source + + 4 Protocol Version, 0x10 -> "PV1", 0x20 -> "PV2", 0x30 -> "PV3" + + 5-6 Command + + 7-8 Frame count, Example 0x1047-> 104 bytes + + +Beispiele +========= + +.. code-block:: yaml + + solar: + resol_source@solar: '0x7721' + resol_destination@solar: '0x0010' + resol_command@solar: '0x0100' + + sensordefektmaske: + type: num + resol_offset@solar: 36 + resol_bituse@solar: 16 + resol_factor@solar: + - '1.0' + - '256.0' + + temperatur_1: + name: 'Temperature Kollektor' + type: num + resol_offset@solar: 0 + resol_bituse@solar: 16 + resol_factor@solar: + - '0.1' + - '25.6' + resol_isSigned@solar: + - False + - True + + + From 5721732c2975d10802b52d036911c48cefb9f473 Mon Sep 17 00:00:00 2001 From: aschwith Date: Thu, 17 Aug 2023 21:48:55 +0200 Subject: [PATCH 292/775] sonos: improved thread termination; reintroduced old legacy method play_radio --- sonos/__init__.py | 98 +++++++++++++++++++++++++++++++++++++++++++++-- sonos/plugin.yaml | 2 +- 2 files changed, 95 insertions(+), 5 deletions(-) diff --git a/sonos/__init__.py b/sonos/__init__.py index deb39814b..0654fde61 100755 --- a/sonos/__init__.py +++ b/sonos/__init__.py @@ -273,7 +273,7 @@ def unsubscribe(self): self.logger.dbglow("Preparing to terminate thread") if debug: self.logger.dbghigh(f"unsubscribe(): Preparing to terminate thread for endpoint {self._endpoint}") - self._thread.join(2) + self._thread.join(timeout=4) if debug: self.logger.dbghigh(f"unsubscribe(): Thread joined for endpoint {self._endpoint}") @@ -283,7 +283,7 @@ def unsubscribe(self): self.logger.dbghigh(f"Thread killed for endpoint {self._endpoint}") else: - self.logger.error("unsubscibe(): Error, thread is still alive") + self.logger.warning("unsubscibe(): Error, thread is still alive after termination (join timed-out)") self._thread = None self.logger.info(f"Event {self._endpoint} unsubscribed and thread terminated") if debug: @@ -2298,9 +2298,99 @@ def play_sonos_radio(self, station_name: str, start: bool = True) -> None: return False return True - def _play_radio(self, station_name: str, music_service: str = 'TuneIn', start: bool = True) -> tuple: """ + Old legacy radio select function. The function is not based on the SoCo library. + Plays a radio station by a given radio name. If more than one radio station are found, the first result will be + played. + :param station_name: radio station name + :param start: Start playing after setting the radio stream? Default: True + :return: None + """ + + # ------------------------------------------------------------------------------------------------------------ # + + # This code here is a quick workaround for issue https://github.com/SoCo/SoCo/issues/557 and will be fixed + # if a patch is applied. + + # ------------------------------------------------------------------------------------------------------------ # + + if not self._check_property(): + return False, "Property check failed" + if not self.is_coordinator: + sonos_speaker[self.coordinator].play_tunein(station_name, start) + else: + + data = 'anon' \ + 'Sonos' \ + 'search:station{search}' \ + '0100'.format( + search=station_name) + + headers = { + "SOAPACTION": "http://www.sonos.com/Services/1.1#search", + "USER-AGENT": "Linux UPnP/1.0 Sonos/40.5-49250 (WDCR:Microsoft Windows NT 10.0.16299)", + "CONTENT-TYPE": 'text/xml; charset="utf-8"' + } + + response = requests.post("http://legato.radiotime.com/Radio.asmx", data=data.encode("utf-8"), + headers=headers) + schema = XML.fromstring(response.content) + body = schema.find("{http://schemas.xmlsoap.org/soap/envelope/}Body")[0] + + response = list(xmltodict.parse(XML.tostring(body), process_namespaces=True, + namespaces={'http://www.sonos.com/Services/1.1': None}).values())[0] + + items = [] + # The result to be parsed is in either searchResult or getMetadataResult + if 'searchResult' in response: + response = response['searchResult'] + elif 'getMetadataResult' in response: + response = response['getMetadataResult'] + else: + raise ValueError('"response" should contain either the key ' + '"searchResult" or "getMetadataResult"') + return False, "response should contain either the key 'searchResult' or 'getMetadataResult'" + + for result_type in ('mediaCollection', 'mediaMetadata'): + # Upper case the first letter (used for the class_key) + result_type_proper = result_type[0].upper() + result_type[1:] + raw_items = response.get(result_type, []) + # If there is only 1 result, it is not put in an array + if isinstance(raw_items, OrderedDict): + raw_items = [raw_items] + + for raw_item in raw_items: + # Form the class_key, which is a unique string for this type, + # formed by concatenating the result type with the item type. Turns + # into e.g: MediaMetadataTrack + class_key = result_type_proper + raw_item['itemType'].title() + cls = get_class(class_key) + #from plugins.sonos.soco.music_services.token_store import JsonFileTokenStore + items.append( + cls.from_music_service(MusicService(service_name='TuneIn'), raw_item)) + #cls.from_music_service(MusicService(service_name='TuneIn', token_store=JsonFileTokenStore()), raw_item)) + + if not items: + exit(0) + + item_id = items[0].metadata['id'] + sid = 254 # hard-coded TuneIn service id ? + sn = 0 + meta = to_didl_string(items[0]) + + uri = "x-sonosapi-stream:{0}?sid={1}&flags=8224&sn={2}".format(item_id, sid, sn) + + self.soco.avTransport.SetAVTransportURI([('InstanceID', 0), + ('CurrentURI', uri), ('CurrentURIMetaData', meta)]) + if start: + self.soco.play() + return True, "" + + def _play_radio_dontuse(self, station_name: str, music_service: str = 'TuneIn', start: bool = True) -> tuple: + """ + WARNING: THIS FUNCTION IS NOT WORKING FOR SOME RADIO STATIONS, e.g. Plays a radio station by a given radio name at a given music service. If more than one radio station are found, the first result will be played. :param music_service: music service name Default: TuneIn @@ -2879,7 +2969,7 @@ class Sonos(SmartPlugin): """ Main class of the Plugin. Does all plugin specific stuff """ - PLUGIN_VERSION = "1.8.2" + PLUGIN_VERSION = "1.8.3" def __init__(self, sh): """Initializes the plugin.""" diff --git a/sonos/plugin.yaml b/sonos/plugin.yaml index 3f60f0aa7..4fca62837 100755 --- a/sonos/plugin.yaml +++ b/sonos/plugin.yaml @@ -12,7 +12,7 @@ plugin: documentation: https://github.com/smarthomeNG/plugins/blob/master/sonos/README.md support: https://knx-user-forum.de/forum/supportforen/smarthome-py/25151-sonos-anbindung - version: 1.8.2 # Plugin version + version: 1.8.3 # Plugin version sh_minversion: 1.5.1 # minimum shNG version to use this plugin py_minversion: 3.8 # minimum Python version to use for this plugin multi_instance: False # plugin supports multi instance From 4441b781125bc87765c5ec3562706ff800814796 Mon Sep 17 00:00:00 2001 From: lgb-this Date: Sat, 19 Aug 2023 09:22:55 +0200 Subject: [PATCH 293/775] First commit of byd_bat. --- byd_bat/__init__.py | 981 +++++++++++++++++++++++ byd_bat/locale.yaml | 40 + byd_bat/plugin.yaml | 334 ++++++++ byd_bat/requirements.txt | 1 + byd_bat/user_doc.rst | 106 +++ byd_bat/webif/__init__.py | 164 ++++ byd_bat/webif/static/img/diag.JPG | Bin 0 -> 67618 bytes byd_bat/webif/static/img/home.JPG | Bin 0 -> 117340 bytes byd_bat/webif/static/img/plugin_logo.png | Bin 0 -> 44103 bytes byd_bat/webif/static/img/readme.txt | 6 + byd_bat/webif/static/img/temp.JPG | Bin 0 -> 70537 bytes byd_bat/webif/static/img/volt.JPG | Bin 0 -> 121379 bytes byd_bat/webif/templates/index.html | 473 +++++++++++ 13 files changed, 2105 insertions(+) create mode 100644 byd_bat/__init__.py create mode 100644 byd_bat/locale.yaml create mode 100644 byd_bat/plugin.yaml create mode 100644 byd_bat/requirements.txt create mode 100644 byd_bat/user_doc.rst create mode 100644 byd_bat/webif/__init__.py create mode 100644 byd_bat/webif/static/img/diag.JPG create mode 100644 byd_bat/webif/static/img/home.JPG create mode 100644 byd_bat/webif/static/img/plugin_logo.png create mode 100644 byd_bat/webif/static/img/readme.txt create mode 100644 byd_bat/webif/static/img/temp.JPG create mode 100644 byd_bat/webif/static/img/volt.JPG create mode 100644 byd_bat/webif/templates/index.html diff --git a/byd_bat/__init__.py b/byd_bat/__init__.py new file mode 100644 index 000000000..a6a236776 --- /dev/null +++ b/byd_bat/__init__.py @@ -0,0 +1,981 @@ +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +######################################################################### +# Copyright 2023 Matthias Manhart smarthome@beathis.ch +######################################################################### +# This file is part of SmartHomeNG. +# https://www.smarthomeNG.de +# https://knx-user-forum.de/forum/supportforen/smarthome-py +# +# Monitoring of BYD energy storage systems (HVM, HVS). +# +# SmartHomeNG is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SmartHomeNG is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SmartHomeNG. If not, see . +# +######################################################################### + +# ----------------------------------------------------------------------- +# +# History +# +# V0.0.1 230811 - erster Release +# +# V0.0.2 230812 - Korrektur Berechnung Batteriestrom +# +# V0.0.3 - Code mit pycodestyle kontrolliert/angepasst +# - Anpassungen durch 'check_plugin' +# +# ----------------------------------------------------------------------- +# +# Als Basis fuer die Implementierung wurde die folgende Quelle verwendet: +# +# https://github.com/christianh17/ioBroker.bydhvs +# +# Diverse Notizen +# +# - Datenpaket wird mit CRC16/MODBUS am Ende abgeschlossen (2 Byte, LSB,MSB) +# +# ----------------------------------------------------------------------- + +from lib.model.smartplugin import * +from lib.item import Items +from .webif import WebInterface +import cherrypy + +import socket +import time +import matplotlib +import matplotlib.pyplot as plt +import numpy as np + +byd_ip_default = "192.168.16.254" + +scheduler_name = 'mmbyd' + +BUFFER_SIZE = 4096 + +byd_sample_basics = 60 # Abfrage fuer Basisdaten [s] + +byd_timeout_1s = 1.0 +byd_timeout_2s = 2.0 +byd_timeout_8s = 8.0 + +byd_tours_max = 3 +byd_cells_max = 160 +byd_temps_max = 64 + +byd_no_of_col = 8 + +byd_webif_img = "/webif/static/img/" +byd_path_empty = "x" +byd_fname_volt = "bydvt" +byd_fname_temp = "bydtt" +byd_fname_ext = ".png" + +MESSAGE_0 = "010300000066c5e0" +MESSAGE_1 = "01030500001984cc" +MESSAGE_2 = "010300100003040e" + +MESSAGE_3_1 = "0110055000020400018100f853" # Start Messung Turm 1 +MESSAGE_3_2 = "01100550000204000281000853" # Start Messung Turm 2 +MESSAGE_3_3 = "01100550000204000381005993" # Start Messung Turm 3 +MESSAGE_4 = "010305510001d517" +MESSAGE_5 = "01030558004104e5" +MESSAGE_6 = "01030558004104e5" +MESSAGE_7 = "01030558004104e5" +MESSAGE_8 = "01030558004104e5" + +MESSAGE_9 = "01100100000306444542554700176f" # switch to second turn for the last few cells (not tested, perhaps only for tower 1 ?) +MESSAGE_10 = "0110055000020400018100f853" # start measuring remaining cells (like 3a) (not tested, perhaps only for tower 1 ?) +MESSAGE_11 = "010305510001d517" # (like 4) (not tested) +MESSAGE_12 = "01030558004104e5" # (like 5) (not tested) +MESSAGE_13 = "01030558004104e5" # (like 6) (not tested) + +byd_errors = [ + "High Temperature Charging (Cells)", + "Low Temperature Charging (Cells)", + "Over Current Discharging", + "Over Current Charging", + "Main circuit Failure", + "Short Current Alarm", + "Cells Imbalance", + "Current Sensor Failure", + "Battery Over Voltage", + "Battery Under Voltage", + "Cell Over Voltage", + "Cell Under Voltage", + "Voltage Sensor Failure", + "Temperature Sensor Failure", + "High Temperature Discharging (Cells)", + "Low Temperature Discharging (Cells)" +] + +byd_invs = [ + "Fronius HV", + "Goodwe HV", + "Fronius HV", + "Kostal HV", + "Goodwe HV", + "SMA SBS3.7/5.0", + "Kostal HV", + "SMA SBS3.7/5.0", + "Sungrow HV", + "Sungrow HV", + "Kaco HV", + "Kaco HV", + "Ingeteam HV", + "Ingeteam HV", + "SMA SBS 2.5 HV", + "", + "SMA SBS 2.5 HV", + "Fronius HV" +] + +byd_invs_lvs = [ + "Fronius HV", + "Goodwe HV", + "Goodwe HV", + "Kostal HV", + "Selectronic LV", + "SMA SBS3.7/5.0", + "SMA LV", + "Victron LV", + "Suntech LV", + "Sungrow HV", + "Kaco HV", + "Studer LV", + "Solar Edge LV", + "Ingeteam HV", + "Sungrow LV", + "Schneider LV", + "SMA SBS2.5 HV", + "Solar Edge LV", + "Solar Edge LV", + "Solar Edge LV" +] + + +class byd_bat(SmartPlugin): + + """ + Main class of the Plugin. Does all plugin specific stuff and provides + the update functions for the items + + HINT: Please have a look at the SmartPlugin class to see which + class properties and methods (class variables and class functions) + are already available! + """ + + PLUGIN_VERSION = '0.0.2' + + def __init__(self,sh): + """ + Initalizes the plugin. + + If you need the sh object at all, use the method self.get_sh() to get it. There should be almost no need for + a reference to the sh object any more. + + Plugins have to use the new way of getting parameter values: + use the SmartPlugin method get_parameter_value(parameter_name). Anywhere within the Plugin you can get + the configured (and checked) value for a parameter by calling self.get_parameter_value(parameter_name). It + returns the value in the datatype that is defined in the metadata. + """ + + # Call init code of parent class (SmartPlugin) + super().__init__() + + # get the parameters for the plugin (as defined in metadata plugin.yaml): + + if self.get_parameter_value('ip') != '': + self.ip = self.get_parameter_value('ip') + else: + self.log_info("no ip defined => use default '" + byd_ip_default + "'") + self.ip = byd_ip_default + + if self.get_parameter_value('imgpath') != '': + self.bpath = self.get_parameter_value('imgpath') + if self.bpath is None: + self.log_info("path is None") + self.bpath = byd_path_empty + else: + self.log_info("no path defined") + self.bpath = byd_path_empty + + self.log_debug("BYD ip = " + self.ip) + self.log_debug("BYD path = " + self.bpath) + + # cycle time in seconds, only needed, if hardware/interface needs to be + # polled for value changes by adding a scheduler entry in the run method of this plugin + # (maybe you want to make it a plugin parameter?) + self._cycle = byd_sample_basics + + self.last_diag_hour = 99 # erzwingt beim ersten Aufruf das Abfragen der Detaildaten + + self.byd_root_found = False + + self.byd_diag_soc = [] + self.byd_diag_volt_max = [] + self.byd_diag_volt_max_c = [] + self.byd_diag_volt_min = [] + self.byd_diag_volt_min_c = [] + self.byd_diag_temp_max_c = [] + self.byd_diag_temp_min_c = [] + self.byd_volt_cell = [] + self.byd_temp_cell = [] + for x in range(0,byd_tours_max + 1): + self.byd_diag_soc.append(0) + self.byd_diag_volt_max.append(0) + self.byd_diag_volt_max_c.append(0) + self.byd_diag_volt_min.append(0) + self.byd_diag_volt_min_c.append(0) + self.byd_diag_temp_max_c.append(0) + self.byd_diag_temp_min_c.append(0) + a = [] + for xx in range(0,byd_cells_max + 1): + a.append(0) + self.byd_volt_cell.append(a) + a = [] + for xx in range(0,byd_temps_max + 1): + a.append(0) + self.byd_temp_cell.append(a) + + self.last_homedata = self.now_str() + self.last_diagdata = self.now_str() + + # Initialization code goes here + + self.sh = sh + + self.init_webinterface() + + return + + def run(self): + """ + Run method for the plugin + """ + self.scheduler_add(scheduler_name,self.poll_device,cycle=self._cycle) + + self.alive = True + + return + + def stop(self): + """ + Stop method for the plugin + """ + self.logger.debug("Stop method called") + self.scheduler_remove('poll_device') + self.alive = False + + def parse_item(self, item): + """ + Default plugin parse_item method. Is called when the plugin is initialized. + The plugin can, corresponding to its attribute keywords, decide what to do with + the item in future, like adding it to an internal array for future reference + :param item: The item to process. + :return: If the plugin needs to be informed of an items change you should return a call back function + like the function update_item down below. An example when this is needed is the knx plugin + where parse_item returns the update_item function when the attribute knx_send is found. + This means that when the items value is about to be updated, the call back function is called + with the item, caller, source and dest as arguments and in case of the knx plugin the value + can be sent to the knx with a knx write function within the knx plugin. + """ + # todo + # if interesting item for sending values: + # self._itemlist.append(item) + # return self.update_item + if self.get_iattr_value(item.conf,'byd_root'): + self.byd_root = item + self.byd_root_found = True + self.log_debug("BYD root = " + "{0}".format(self.byd_root)) + + def parse_logic(self, logic): + """ + Default plugin parse_logic method + """ + if 'xxx' in logic.conf: + # self.function(logic['name']) + pass + + def update_item(self, item, caller=None, source=None, dest=None): + # Wird aufgerufen, wenn ein Item mit dem Attribut 'mmgarden' geaendert wird + + if self.alive and caller != self.get_shortname(): + # code to execute if the plugin is not stopped + # and only, if the item has not been changed by this plugin: + + return + + def poll_device(self): + # Wird alle 'self._cycle' aufgerufen + + self.log_debug("BYD Start *********************") + + if self.byd_root_found is False: + self.log_debug("BYD not root found - please define root item with structure 'byd_struct'") + return + + # Verbindung herstellen + client = socket.socket(socket.AF_INET,socket.SOCK_STREAM) + try: + client.connect((self.ip,8080)) + except: + self.log_info("client.connect failed (" + self.ip + ")") + self.byd_root.info.connection(False) + client.close() + return + + # 1.Befehl senden + client.send(bytes.fromhex(MESSAGE_0)) + client.settimeout(byd_timeout_1s) + + try: + data = client.recv(BUFFER_SIZE) + except: + self.log_info("client.recv 0 failed") + self.byd_root.info.connection(False) + client.close() + return + self.decode_0(data) + + # 2.Befehl senden + client.send(bytes.fromhex(MESSAGE_1)) + client.settimeout(byd_timeout_1s) + + try: + data = client.recv(BUFFER_SIZE) + except: + self.log_info("client.recv 1 failed") + self.byd_root.info.connection(False) + client.close() + return + self.decode_1(data) + + # 3.Befehl senden + client.send(bytes.fromhex(MESSAGE_2)) + client.settimeout(byd_timeout_1s) + + try: + data = client.recv(BUFFER_SIZE) + except: + self.log_info("client.recv 2 failed") + self.byd_root.info.connection(False) + client.close() + return + self.decode_2(data) + + # Speichere die Basisdaten + self.basisdata_save(self.byd_root) + + # Pruefe, ob die Diagnosedaten abgefragt werden sollen + tn = self.now() + if tn.hour == self.last_diag_hour: + self.byd_root.info.connection(True) + self.log_debug("BYD Basic Done ****************") + client.close() + return + + self.last_diag_hour = tn.hour + + # Durchlaufe alle Tuerme + for x in range(1,self.byd_bms_qty + 1): + self.log_debug("Turm " + str(x)) + + # 4.Befehl senden + if x == 1: + client.send(bytes.fromhex(MESSAGE_3_1)) + elif x == 2: + client.send(bytes.fromhex(MESSAGE_3_2)) + elif x == 3: + client.send(bytes.fromhex(MESSAGE_3_3)) + client.settimeout(byd_timeout_2s) + + try: + data = client.recv(BUFFER_SIZE) + except: + self.log_info("client.recv 3 failed") + self.byd_root.info.connection(False) + client.close() + return + self.decode_nop(data,x) + time.sleep(2) + + # 5.Befehl senden + client.send(bytes.fromhex(MESSAGE_4)) + client.settimeout(byd_timeout_8s) + + try: + data = client.recv(BUFFER_SIZE) + except: + self.log_info("client.recv 4 failed") + self.byd_root.info.connection(False) + client.close() + return + self.decode_nop(data,x) + + # 6.Befehl senden + client.send(bytes.fromhex(MESSAGE_5)) + client.settimeout(byd_timeout_1s) + + try: + data = client.recv(BUFFER_SIZE) + except: + self.log_info("client.recv 5 failed") + self.byd_root.info.connection(False) + client.close() + return + self.decode_5(data,x) + + # 7.Befehl senden + client.send(bytes.fromhex(MESSAGE_6)) + client.settimeout(byd_timeout_1s) + + try: + data = client.recv(BUFFER_SIZE) + except: + self.log_info("client.recv 6 failed") + self.byd_root.info.connection(False) + client.close() + return + self.decode_6(data,x) + + # 8.Befehl senden + client.send(bytes.fromhex(MESSAGE_7)) + client.settimeout(byd_timeout_1s) + + try: + data = client.recv(BUFFER_SIZE) + except: + self.log_info("client.recv 7 failed") + self.byd_root.info.connection(False) + client.close() + return + self.decode_7(data,x) + + # 9.Befehl senden + client.send(bytes.fromhex(MESSAGE_8)) + client.settimeout(byd_timeout_1s) + + try: + data = client.recv(BUFFER_SIZE) + except: + self.log_info("client.recv 8 failed") + self.byd_root.info.connection(False) + client.close() + return + self.decode_8(data,x) + + self.diagdata_save(self.byd_root) + self.byd_root.info.connection(True) + + self.log_debug("BYD Diag Done +++++++++++++++++") + client.close() + + return + + def decode_0(self,data): + # Decodieren der Nachricht auf Befehl 'MESSAGE_0'. + + self.log_debug("decode_0: " + data.hex()) + + # Serienummer + self.byd_serial = "" + for x in range(3,22): + self.byd_serial = self.byd_serial + chr(data[x]) + + # Firmware-Versionen + self.byd_bmu_a = "V" + str(data[27]) + "." + str(data[28]) + self.byd_bmu_b = "V" + str(data[29]) + "." + str(data[30]) + if data[33] == 0: + self.byd_bmu = self.byd_bmu_a + "-A" + else: + self.byd_bmu = self.byd_bmu_b + "-B" + self.byd_bms = "V" + str(data[31]) + "." + str(data[32]) + "-" + chr(data[34] + 65) + + # Anzahl Tuerme und Anzahl Module pro Turm + self.byd_bms_qty = data[36] // 0x10 + if (self.byd_bms_qty == 0) or (self.byd_bms_qty > byd_tours_max): + self.byd_bms_qty = 1 + self.byd_modules = data[36] % 0x10 + self.byd_batt_type_snr = data[5] + + # Application + if data[38] == 1: + self.byd_application = "OnGrid" + else: + self.byd_application = "OffGrid" + + self.log_debug("Serial : " + self.byd_serial) + self.log_debug("BMU A : " + self.byd_bmu_a) + self.log_debug("BMU B : " + self.byd_bmu_b) + self.log_debug("BMU : " + self.byd_bmu) + self.log_debug("BMS : " + self.byd_bms) + self.log_debug("BMS QTY : " + str(self.byd_bms_qty)) + self.log_debug("Modules : " + str(self.byd_modules)) + self.log_debug("Application : " + self.byd_application) + return + + def decode_1(self,data): + # Decodieren der Nachricht auf Befehl 'MESSAGE_1'. + + self.log_debug("decode_1: " + data.hex()) + + self.byd_soc = self.buf2int16SI(data,3) + self.byd_soh = self.buf2int16SI(data,9) + + self.byd_volt_bat = self.buf2int16US(data,13) * 1.0 / 100.0 + self.byd_volt_out = self.buf2int16US(data,35) * 1.0 / 100.0 + self.byd_volt_max = self.buf2int16SI(data,5) * 1.0 / 100.0 + self.byd_volt_min = self.buf2int16SI(data,7) * 1.0 / 100.0 + self.byd_volt_diff = self.byd_volt_max - self.byd_volt_min + self.byd_current = self.buf2int16SI(data,11) * 1.0 / 10.0 + self.byd_power = self.byd_volt_out * self.byd_current + if self.byd_power >= 0: + self.byd_power_discharge = self.byd_power + self.byd_power_charge = 0 + else: + self.byd_power_discharge = 0 + self.byd_power_charge = -self.byd_power + + self.byd_temp_bat = self.buf2int16SI(data,19) + self.byd_temp_max = self.buf2int16SI(data,15) + self.byd_temp_min = self.buf2int16SI(data,17) + + self.byd_error_nr = self.buf2int16SI(data,29) + self.byd_error_str = "" + for x in range(0,16): + if (((1 << x) & self.byd_error_nr) != 0): + if len(self.byd_error_str) > 0: + self.byd_error_str = self.byd_error_str + ";" + self.byd_error_str = self.byd_error_str + byd_errors[x] + if len(self.byd_error_str) == 0: + self.byd_error_str = "no error" + + self.byd_param_t = str(data[31]) + "." + str(data[32]) + + self.log_debug("SOC : " + str(self.byd_soc)) + self.log_debug("SOH : " + str(self.byd_soh)) + self.log_debug("Volt Battery : " + str(self.byd_volt_bat)) + self.log_debug("Volt Out : " + str(self.byd_volt_out)) + self.log_debug("Volt max : " + str(self.byd_volt_max)) + self.log_debug("Volt min : " + str(self.byd_volt_min)) + self.log_debug("Volt diff : " + str(self.byd_volt_diff)) + self.log_debug("Current : " + str(self.byd_current)) + self.log_debug("Power : " + str(self.byd_power)) + self.log_debug("Temp Battery : " + str(self.byd_temp_bat)) + self.log_debug("Temp max : " + str(self.byd_temp_max)) + self.log_debug("Temp min : " + str(self.byd_temp_min)) + self.log_debug("Error : " + str(self.byd_error_nr) + " " + self.byd_error_str) + self.log_debug("ParamT : " + self.byd_param_t) + return + + def decode_2(self,data): + # Decodieren der Nachricht auf Befehl 'MESSAGE_2'. + + self.log_debug("decode_2: " + data.hex()) + + self.byd_batt_type = data[5] + if self.byd_batt_type == 0: + # HVL -> unknown specification, so 0 cells and 0 temps + self.byd_batt_str = "HVL" + self.byd_capacity_module = 4.0 + self.byd_volt_n = 0 + self.byd_temp_n = 0 + self.byd_cells_n = 0 + self.byd_temps_n = 0 + elif self.byd_batt_type == 1: + # HVM 16 Cells per module + self.byd_batt_str = "HVM" + self.byd_capacity_module = 2.76 + self.byd_volt_n = 16 + self.byd_temp_n = 8 + self.byd_cells_n = self.byd_modules * self.byd_volt_n + self.byd_temps_n = self.byd_modules * self.byd_temp_n + elif self.byd_batt_type == 2: + # HVS 32 cells per module + self.byd_batt_str = "HVS" + self.byd_capacity_module = 2.56 + self.byd_volt_n = 32 + self.byd_temp_n = 12 + self.byd_cells_n = self.byd_modules * self.byd_volt_n + self.byd_temps_n = self.byd_modules * self.byd_temp_n + else: + if (self.byd_batt_type_snr == 49) or (self.byd_batt_type_snr == 50): + self.byd_batt_str = "LVS" + self.byd_capacity_module = 4.0 + self.byd_volt_n = 7 + self.byd_temp_n = 0 + self.byd_cells_n = self.byd_modules * self.byd_volt_n + self.byd_temps_n = 0 + else: + self.byd_batt_str = "???" + self.byd_capacity_module = 0.0 + self.byd_volt_n = 0 + self.byd_temp_n = 0 + self.byd_cells_n = 0 + self.byd_temps_n = 0 + + self.byd_capacity_total = self.byd_bms_qty * self.byd_modules * self.byd_capacity_module + + self.byd_inv_type = data[3] + if self.byd_batt_str == "LVS": + self.byd_inv_str = byd_invs_lvs[self.byd_inv_type] + else: + self.byd_inv_str = byd_invs[self.byd_inv_type] + + self.log_debug("Inv Type : " + self.byd_inv_str + " (" + str(self.byd_inv_type) + ")") + self.log_debug("Batt Type : " + self.byd_batt_str + " (" + str(self.byd_batt_type) + ")") + self.log_debug("Cells n : " + str(self.byd_cells_n)) + self.log_debug("Temps n : " + str(self.byd_temps_n)) + + if self.byd_cells_n > byd_cells_max: + self.byd_cells_n = byd_cells_max + if self.byd_temps_n > byd_temps_max: + self.byd_temps_n = byd_temps_max + return + + def decode_5(self,data,x): + # Decodieren der Nachricht auf Befehl 'MESSAGE_5'. + + self.log_debug("decode_5 (" + str(x) + ") : " + data.hex()) + + self.byd_diag_soc[x] = self.buf2int16SI(data,53) * 1.0 / 10.0 + self.byd_diag_volt_max[x] = self.buf2int16SI(data,5) / 1000.0 + self.byd_diag_volt_max_c[x] = data[9] + self.byd_diag_volt_min[x] = self.buf2int16SI(data,7) / 1000.0 + self.byd_diag_volt_min_c[x] = data[10] + self.byd_diag_temp_max_c[x] = data[15] + self.byd_diag_temp_min_c[x] = data[16] + + # starting with byte 101, ending with 131, Cell voltage 1-16 + for xx in range(0,16): + self.byd_volt_cell[x][xx] = self.buf2int16SI(data,101 + (xx * 2)) / 1000.0 + + self.log_debug("SOC : " + str(self.byd_diag_soc[x])) + self.log_debug("Volt max : " + str(self.byd_diag_volt_max[x]) + " c=" + str(self.byd_diag_volt_max_c[x])) + self.log_debug("Volt min : " + str(self.byd_diag_volt_min[x]) + " c=" + str(self.byd_diag_volt_min_c[x])) + self.log_debug("Temp max : " + " c=" + str(self.byd_diag_temp_max_c[x])) + self.log_debug("Temp min : " + " c=" + str(self.byd_diag_temp_min_c[x])) +# for xx in range(0,16): +# self.log_debug("Turm " + str(x) + " Volt " + str(xx) + " : " + str(self.byd_volt_cell[x][xx])) + + return + + def decode_6(self,data,x): + # Decodieren der Nachricht auf Befehl 'MESSAGE_6'. + + self.log_debug("decode_6 (" + str(x) + ") : " + data.hex()) + + for xx in range(0,64): + self.byd_volt_cell[x][16 + xx] = self.buf2int16SI(data,5 + (xx * 2)) / 1000.0 + +# for xx in range(0,64): +# self.log_debug("Turm " + str(x) + " Volt " + str(16 + xx) + " : " + str(self.byd_volt_cell[x][16 + xx])) + + return + + def decode_7(self,data,x): + # Decodieren der Nachricht auf Befehl 'MESSAGE_7'. + + self.log_debug("decode_7 (" + str(x) + ") : " + data.hex()) + + # starting with byte 5, ending 101, voltage for cell 81 to 128 + for xx in range(0,48): + self.byd_volt_cell[x][80 + xx] = self.buf2int16SI(data,5 + (xx * 2)) / 1000.0 + + # starting with byte 103, ending 132, temp for cell 1 to 30 + for xx in range(0,30): + self.byd_temp_cell[x][xx] = data[103 + xx] + +# for xx in range(0,48): +# self.log_debug("Turm " + str(x) + " Volt " + str(80 + xx) + " : " + str(self.byd_volt_cell[x][80 + xx])) +# for xx in range(0,30): +# self.log_debug("Turm " + str(x) + " Temp " + str(xx) + " : " + str(self.byd_temp_cell[x][xx])) + + return + + def decode_8(self,data,x): + # Decodieren der Nachricht auf Befehl 'MESSAGE_8'. + + self.log_debug("decode_8 (" + str(x) + ") : " + data.hex()) + + for xx in range(0,34): + self.byd_temp_cell[x][30 + xx] = data[5 + xx] + +# for xx in range(0,34): +# self.log_debug("Turm " + str(x) + " Temp " + str(30 + xx) + " : " + str(self.byd_temp_cell[x][30 + xx])) + + return + + def decode_nop(self,data,x): +# self.log_debug("decode_nop (" + str(x) + ") : " + data.hex()) + return + + def basisdata_save(self,device): + # Speichert die Basisdaten in der sh-Struktur. + + self.log_debug("basisdata_save") + + device.state.current(self.byd_current) + device.state.power(self.byd_power) + device.state.power_charge(self.byd_power_charge) + device.state.power_discharge(self.byd_power_discharge) + device.state.soc(self.byd_soc) + device.state.soh(self.byd_soh) + device.state.tempbatt(self.byd_temp_bat) + device.state.tempmax(self.byd_temp_max) + device.state.tempmin(self.byd_temp_min) + device.state.voltbatt(self.byd_volt_bat) + device.state.voltdiff(self.byd_volt_diff) + device.state.voltmax(self.byd_volt_max) + device.state.voltmin(self.byd_volt_min) + device.state.voltout(self.byd_volt_out) + + device.system.bms(self.byd_bms) + device.system.bmu(self.byd_bmu) + device.system.bmubanka(self.byd_bmu_a) + device.system.bmubankb(self.byd_bmu_b) + device.system.batttype(self.byd_batt_str) + device.system.errornum(self.byd_error_nr) + device.system.errorstr(self.byd_error_str) + device.system.grid(self.byd_application) + device.system.invtype(self.byd_inv_str) + device.system.modules(self.byd_modules) + device.system.bmsqty(self.byd_bms_qty) + device.system.capacity_total(self.byd_capacity_total) + device.system.paramt(self.byd_param_t) + device.system.serial(self.byd_serial) + + self.last_homedata = self.now_str() + + return + + def diagdata_save(self,device): + # Speichert die Diagnosedaten in der sh-Struktur. + + self.log_debug("diagdata_save") + + self.diagdata_save_one(device.diagnosis.tower1,1) + if self.byd_bms_qty > 1: + self.diagdata_save_one(device.diagnosis.tower2,2) + if self.byd_bms_qty > 2: + self.diagdata_save_one(device.diagnosis.tower3,3) + + self.last_diagdata = self.now_str() + + return + + def diagdata_save_one(self,device,x): + + device.soc(self.byd_diag_soc[x]) + device.volt_max.volt(self.byd_diag_volt_max[x]) + device.volt_max.cell(self.byd_diag_volt_max_c[x]) + device.volt_min.volt(self.byd_diag_volt_min[x]) + device.volt_min.cell(self.byd_diag_volt_min_c[x]) + device.temp_max_cell(self.byd_diag_temp_max_c[x]) + device.temp_min_cell(self.byd_diag_temp_min_c[x]) + + self.diag_plot(x) + +# self.log_debug("Turm " + str(x)) +# for xx in range(0,self.byd_cells_n): +# self.log_debug("Volt " + str(xx+1) + " : " + str(self.byd_volt_cell[x][xx])) +# for xx in range(0,self.byd_temps_n): +# self.log_debug("Temp " + str(xx+1) + " : " + str(self.byd_temp_cell[x][xx])) + + return + + def diag_plot(self,x): + + # Heatmap der Spannungen + i = 0 + j = 1 + rows = self.byd_cells_n // byd_no_of_col + d = [] + rt = [] + for r in range(0,rows): + c = [] + for cc in range(0,byd_no_of_col): + c.append(self.byd_volt_cell[x][i]) + i = i + 1 + d.append(c) + rt.append("M" + str(j)) + if ((r + 1) % (self.byd_volt_n // self.byd_modules)) == 0: + j = j + 1 + dd = np.array(d) + + fig,ax = plt.subplots(figsize=(10,4)) # Erzeugt ein Bitmap von 1000x500 Pixel + + im = ax.imshow(dd) + cbar = ax.figure.colorbar(im,ax=ax,shrink=0.5) + cbar.ax.yaxis.set_tick_params(color='white') + cbar.outline.set_edgecolor('white') + plt.setp(plt.getp(cbar.ax.axes,'yticklabels'),color='white') + + ax.set_aspect(0.25) + ax.get_xaxis().set_visible(False) + ax.set_yticks(np.arange(len(rt)),labels=rt) + + ax.spines[:].set_visible(False) + ax.set_xticks(np.arange(dd.shape[1] + 1) - .5,minor=True) + ax.set_yticks(np.arange(dd.shape[0] + 1) - .5,minor=True,size=10) + ax.tick_params(which='minor',bottom=False,left=False) + ax.tick_params(axis='y',colors='white') + + textcolors = ("white","black") + threshold = im.norm(dd.max()) / 2. + kw = dict(horizontalalignment="center",verticalalignment="center",size=9) + valfmt = matplotlib.ticker.StrMethodFormatter("{x:.3f}") + + # Loop over data dimensions and create text annotations. + for i in range(0,rows): + for j in range(0,byd_no_of_col): + kw.update(color=textcolors[int(im.norm(dd[i,j]) > threshold)]) + text = ax.text(j,i,valfmt(dd[i,j], None),**kw) + + ax.set_title("Turm " + str(x) + " - Spannungen [V]" + " (" + self.now_str() + ")",size=10,color='white') + + fig.tight_layout() + if len(self.bpath) != byd_path_empty: + fig.savefig(self.bpath + byd_fname_volt + str(x) + byd_fname_ext,format='png',transparent=True) + self.log_debug("save " + self.bpath + byd_fname_temp + str(x) + byd_fname_ext) + fig.savefig(self.get_plugin_dir() + byd_webif_img + byd_fname_volt + str(x) + byd_fname_ext, + format='png',transparent=True) + self.log_debug("save " + self.get_plugin_dir() + byd_webif_img + byd_fname_temp + str(x) + byd_fname_ext) + plt.close('all') + + # Heatmap der Temperaturen + i = 0 + j = 1 + rows = self.byd_temps_n // byd_no_of_col + d = [] + rt = [] + for r in range(0,rows): + c = [] + for cc in range(0,byd_no_of_col): + c.append(self.byd_temp_cell[x][i]) + i = i + 1 + d.append(c) + rt.append("M" + str(j)) + if ((r + 1) % (self.byd_temp_n // self.byd_modules)) == 0: + j = j + 1 + dd = np.array(d) + cmap = matplotlib.colors.LinearSegmentedColormap.from_list('',['#f5f242','#ffaf38','#fc270f']) + norm = matplotlib.colors.TwoSlopeNorm(vcenter=dd.min() + (dd.max() - dd.min()) / 2, + vmin=dd.min(),vmax=dd.max()) + + fig,ax = plt.subplots(figsize=(10,2.5)) # Erzeugt ein Bitmap von 1000x400 Pixel + + im = ax.imshow(dd,cmap=cmap,norm=norm) + cbar = ax.figure.colorbar(im,ax=ax,shrink=0.5) + cbar.ax.yaxis.set_tick_params(color='white') + cbar.outline.set_edgecolor('white') + plt.setp(plt.getp(cbar.ax.axes,'yticklabels'),color='white') + + ax.set_aspect(0.28) + ax.get_xaxis().set_visible(False) + ax.set_yticks(np.arange(len(rt)),labels=rt) + + ax.spines[:].set_visible(False) + ax.set_xticks(np.arange(dd.shape[1] + 1) - .5,minor=True) + ax.set_yticks(np.arange(dd.shape[0] + 1) - .5,minor=True,size=10) + ax.tick_params(which='minor',bottom=False,left=False) + ax.tick_params(axis='y',colors='white') + + textcolors = ("black","white") + threshold = im.norm(dd.max()) / 2. + kw = dict(horizontalalignment="center",verticalalignment="center",size=9) + valfmt = matplotlib.ticker.StrMethodFormatter("{x:.0f}") + + # Loop over data dimensions and create text annotations. + for i in range(0,rows): + for j in range(0,byd_no_of_col): + kw.update(color=textcolors[int(im.norm(dd[i,j]) > threshold)]) + text = ax.text(j,i,valfmt(dd[i,j], None),**kw) + + ax.set_title("Turm " + str(x) + " - Temperaturen [°C]" + " (" + self.now_str() + ")",size=10,color='white') + + fig.tight_layout() + if len(self.bpath) != byd_path_empty: + fig.savefig(self.bpath + byd_fname_temp + str(x) + byd_fname_ext,format='png',transparent=True) + self.log_debug("save " + self.bpath + byd_fname_temp + str(x) + byd_fname_ext) + fig.savefig(self.get_plugin_dir() + byd_webif_img + byd_fname_temp + str(x) + byd_fname_ext, + format='png',transparent=True) + self.log_debug("save " + self.get_plugin_dir() + byd_webif_img + byd_fname_temp + str(x) + byd_fname_ext) + plt.close('all') + + return + + def buf2int16SI(self,byteArray,pos): # signed + result = byteArray[pos] * 256 + byteArray[pos + 1] + if (result > 32768): + result -= 65536 + return result + + def buf2int16US(self,byteArray,pos): # unsigned + result = byteArray[pos] * 256 + byteArray[pos + 1] + return result + + def now_str(self): + return self.now().strftime("%d.%m.%Y, %H:%M:%S") + + def log_debug(self,s1): + self.logger.debug(s1) + + def log_info(self,s1): + self.logger.warning(s1) + + # webinterface init method + def init_webinterface(self): + + """" + Initialize the web interface for this plugin + + This method is only needed if the plugin is implementing a web interface + """ + try: + self.mod_http = Modules.get_instance().get_module( + 'http') # try/except to handle running in a core version that does not support modules + except: + self.mod_http = None + if self.mod_http is None: + self.logger.error("Not initializing the web interface") + return False + + import sys + if not "SmartPluginWebIf" in list(sys.modules['lib.model.smartplugin'].__dict__): + self.logger.warning("Web interface needs SmartHomeNG v1.5 and up. Not initializing the web interface") + return False + + # set application configuration for cherrypy + webif_dir = self.path_join(self.get_plugin_dir(), 'webif') + config = { + '/': { + 'tools.staticdir.root': webif_dir, + }, + '/static': { + 'tools.staticdir.on': True, + 'tools.staticdir.dir': 'static' + } + } + + # Register the web interface as a cherrypy app + self.mod_http.register_webif(WebInterface(webif_dir,self), + self.get_shortname(), + config, + self.get_classname(), + self.get_instance_name(), + description='') + + return True diff --git a/byd_bat/locale.yaml b/byd_bat/locale.yaml new file mode 100644 index 000000000..f29ff2617 --- /dev/null +++ b/byd_bat/locale.yaml @@ -0,0 +1,40 @@ +# translations for the web interface +plugin_translations: + # Translations for the plugin specially for the web interface + 'BYD Home': {'de': '=', 'en': 'BYD Home'} + 'BYD Diagnose': {'de': '=', 'en': 'BYD Diagnostics'} + 'BYD Spannungen': {'de': '=', 'en': 'BYD Voltages'} + 'BYD Temperaturen': {'de': '=', 'en': 'BYD Temperatures'} + + 'Laden': {'de': '=', 'en': 'Loading'} + 'Gesamtkapazität': {'de': '=', 'en': 'Total capacity'} + 'Batterieladung': {'de': '=', 'en': 'Battery charge'} + 'Ladeleistung': {'de': '=', 'en': 'Charging power'} + 'Entladeleistung': {'de': '=', 'en': 'Discharge power'} + 'Bilder-Pfad': {'de': '=', 'en': 'Image path'} + 'Basisdaten': {'de': '=', 'en': 'Basic values'} + 'Diagnosedaten': {'de': '=', 'en': 'Diagnostic values'} + + 'Leistung': {'de': '=', 'en': 'Power'} + 'Spannung Ausgang': {'de': '=', 'en': 'Voltage output'} + 'Strom Ausgang': {'de': '=', 'en': 'Current output'} + 'Spannung Batterie': {'de': '=', 'en': 'Voltage battery'} + 'Spannung Batteriezellen max': {'de': '=', 'en': 'Voltage battery cell max'} + 'Spannung Batteriezellen min': {'de': '=', 'en': 'Voltage battery cell min'} + 'Spannung Batteriezellen Differenz': {'de': '=', 'en': 'Voltage battery cell delta'} + 'Temperatur Batterie': {'de': '=', 'en': 'Temperature battery'} + 'Temperatur Batterie max': {'de': '=', 'en': 'Temperature battery max'} + 'Temperatur Batterie min': {'de': '=', 'en': 'Temperature battery min'} + + 'Wechselrichter': {'de': '=', 'en': 'Inverter'} + 'Batterietyp': {'de': '=', 'en': 'Battery type'} + 'Seriennummer': {'de': '=', 'en': 'Serial number'} + 'Türme': {'de': '=', 'en': 'Towers'} + 'Module pro Turm': {'de': '=', 'en': 'Modules per tower'} + 'Parameter': {'de': '=', 'en': 'Parameter'} + 'Fehler': {'de': '=', 'en': 'Error'} + + # Alternative format for translations of longer texts: + 'Hier kommt der Inhalt des Webinterfaces hin.': + de: '=' + en: 'Here goes the content of the web interface.' diff --git a/byd_bat/plugin.yaml b/byd_bat/plugin.yaml new file mode 100644 index 000000000..67944d0de --- /dev/null +++ b/byd_bat/plugin.yaml @@ -0,0 +1,334 @@ +# Metadata for the plugin +plugin: + # Global plugin attributes + type: interface # plugin type (gateway, interface, protocol, system, web) + description: + de: 'Plugin fuer die Anzeige von Daten von BYD Batterien' + en: 'Plugin to display data from BYD batteries' + maintainer: Matthias Manhart + tester: Matthias Manhart + state: develop # change to ready when done with development +# keywords: iot xyz +# documentation: https://github.com/smarthomeNG/smarthome/wiki/CLI-Plugin # url of documentation (wiki) page + support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1886748-support-thread-f%C3%BCr-das-byd-batterie-plugin + + version: 0.0.2 # Plugin version (must match the version specified in __init__.py) + sh_minversion: 1.9 # minimum shNG version to use this plugin +# sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) +# py_minversion: 3.6 # minimum Python version to use for this plugin +# py_maxversion: # maximum Python version to use for this plugin (leave empty if latest) + multi_instance: false # plugin supports multi instance + restartable: true + classname: byd_bat # class containing the plugin + +parameters: + + ip: + type: ip + default: '192.168.16.254' + description: + de: "IP-Adresse der BYD Batterie (Master)" + en: "IP address of the BYD battery (master)" + + imgpath: + type: str + default: '' + description: + de: "Pfad fuer Heatmap-Bilder (z.Bsp. fuer smartvisu)" + en: "Path for heatmap images (e.g. for smartvisu)" + +item_attributes: + + byd_root: # only used internally - do not use for own items ! + type: bool + mandatory: True + description: + de: 'Root-Flag fuer das Plugin' + en: 'Root-Flag for plugin' + +item_structs: + + byd_struct: + + byd_root: true # must stay here - used by the plugin internally - do not remove ! + + info: + + connection: # shows connection status of plugin to battery + type: bool + initial_value: false + enforce_updates: true + visu_acl: ro + + state: + + current: # [A] Charge / Discharge Current + type: num + visu_acl: ro + database: init + + power: # [W] Power (+ discharge / - charge battery) + type: num + visu_acl: ro + database: init + + power_charge: # [W] Power charging + type: num + visu_acl: ro + database: init + + power_discharge: # [W] Charge discharging + type: num + visu_acl: ro + database: init + + soc: # [%] SOC + type: num + visu_acl: ro + database: init + + soh: # [%] SOH + type: num + visu_acl: ro + database: init + + tempbatt: # [°C] Battery Temperature + type: num + visu_acl: ro + database: init + + tempmax: # [°C] Max Cell Temp + type: num + visu_acl: ro + database: init + + tempmin: # [°C] Min Cell Temp + type: num + visu_acl: ro + database: init + + voltbatt: # [V] Battery Voltage + type: num + visu_acl: ro + database: init + + voltdiff: # [V] Max - Min Cell Voltage + type: num + visu_acl: ro + database: init + + voltmax: # [V] Max Cell Voltage + type: num + visu_acl: ro + database: init + + voltmin: # [V] Min Cell Voltage + type: num + visu_acl: ro + database: init + + voltout: # [V] Output Voltage + type: num + visu_acl: ro + database: init + + system: + + bms: # F/W BMS + type: str + cache: true + visu_acl: ro + + bmu: # F/W BMU + type: str + cache: true + visu_acl: ro + + bmubanka: # F/W BMU-BankA + type: str + cache: true + visu_acl: ro + + bmubankb: # F/W BMU-BankB + type: str + cache: true + visu_acl: ro + + batttype: # Battery Type + type: str + cache: true + visu_acl: ro + + errornum: # Error (numeric) + type: num + cache: true + visu_acl: ro + + errorstr: # Error (string) + type: str + cache: true + visu_acl: ro + + grid: # Parameter Table + type: str + cache: true + visu_acl: ro + + invtype: # Inverter Type + type: str + cache: true + visu_acl: ro + + modules: # modules (count) + type: num + cache: true + visu_acl: ro + + bmsqty: # tours (count) + type: num + cache: true + visu_acl: ro + + capacity_total: # Total capacitiy (all modules) [kWh] + type: num + cache: true + visu_acl: ro + + paramt: # F/W BMU + type: str + cache: true + visu_acl: ro + + serial: # Serial number + type: str + cache: true + visu_acl: ro + + diagnosis: + + tower1: + + soc: # [%] SOC + type: num + visu_acl: ro + database: init + + volt_max: + + volt: # max voltage [V] + type: num + visu_acl: ro + database: init + + cell: # cell number of max voltage + type: num + visu_acl: ro + database: init + + volt_min: + + volt: # min voltage [V] + type: num + visu_acl: ro + database: init + + cell: # cell number of min voltage + type: num + visu_acl: ro + database: init + + temp_max_cell: # cell number of max temperature + type: num + visu_acl: ro + database: init + + temp_min_cell: # cell number of min temperature + type: num + visu_acl: ro + database: init + + tower2: + + soc: # [%] SOC + type: num + visu_acl: ro + database: init + + volt_max: + + volt: # max voltage [V] + type: num + visu_acl: ro + database: init + + cell: # cell number of max voltage + type: num + visu_acl: ro + database: init + + volt_min: + + volt: # min voltage [V] + type: num + visu_acl: ro + database: init + + cell: # cell number of min voltage + type: num + visu_acl: ro + database: init + + temp_max_cell: # cell number of max temperature + type: num + visu_acl: ro + database: init + + temp_min_cell: # cell number of min temperature + type: num + visu_acl: ro + database: init + + tower3: + + soc: # [%] SOC + type: num + visu_acl: ro + database: init + + volt_max: + + volt: # max voltage [V] + type: num + visu_acl: ro + database: init + + cell: # cell number of max voltage + type: num + visu_acl: ro + database: init + + volt_min: + + volt: # min voltage [V] + type: num + visu_acl: ro + database: init + + cell: # cell number of min voltage + type: num + visu_acl: ro + database: init + + temp_max_cell: # cell number of max temperature + type: num + visu_acl: ro + database: init + + temp_min_cell: # cell number of min temperature + type: num + visu_acl: ro + database: init + +logic_parameters: NONE + +plugin_functions: NONE diff --git a/byd_bat/requirements.txt b/byd_bat/requirements.txt new file mode 100644 index 000000000..6ccafc3f9 --- /dev/null +++ b/byd_bat/requirements.txt @@ -0,0 +1 @@ +matplotlib diff --git a/byd_bat/user_doc.rst b/byd_bat/user_doc.rst new file mode 100644 index 000000000..9bddd962d --- /dev/null +++ b/byd_bat/user_doc.rst @@ -0,0 +1,106 @@ +.. index:: Plugins; byd_bat +.. index:: byd_bat + +======= +byd_bat +======= + +.. image:: webif/static/img/plugin_logo.png + :alt: plugin logo + :width: 300px + :height: 300px + :scale: 50 % + :align: left + +Anzeigen von Parametern eines BYD Energiespeichers. Die Parameter entsprechen den Daten, die in der Software Be_Connect_Plus_V2.0.2 angezeigt werden. + +Es werden 1-3 Türme unterstützt. + +Die Grunddaten werden alle 60 Sekunden aktualisiert. Die Diagnosedaten werden beim Start des Plugin und dann immer zur vollen Stunde abgerufen. + +Die Spannungen und Temperaturen in den Modulen werden mit Hilfe von Heatmaps dargestellt. Diese werden im Web Interface angezeigt. Zusätzlich können diese Bilder auch in ein weiteres Verzeichnis kopiert werden (z.Bsp. für smartvisu). + +Das Pflugin benoetigt nur ein Item mit der folgenden Deklaration: + +byd: + struct: byd_bat.byd_struct + +Alle verfügbaren Daten werden im Struct 'byd_struct' bereitgestellt. Diverse Parameter besitzen bereits die Eigenschaft 'database: init', so dass die Daten für die Visualisierung bereitgestellt werden. + +Anforderungen +------------- + +Der BYD Energiespeicher muss mit dem LAN verbunden sind. Die IP-Adresse des BYD wird über DHCP zugewiesen und muss ermittelt werden. Diese IP-Adresse muss in der Plugin-Konfiguration gespeichert werden. + +Notwendige Software +~~~~~~~~~~~~~~~~~~~ + +* matplotlib + +Unterstützte Geräte +~~~~~~~~~~~~~~~~~~~ + +Folgende Typen werden unterstützt: + +* HVS (noch nicht getestet) +* HVM (getestet mit HVM 19.3kWh und 2 Türmen) +* HVL (noch nicht getestet) +* LVS (noch nicht getestet) + +Bitte Debug-Daten (level: DEBUG) von noch nicht getesteten BYD Energiespeichern an Plugin-Autor senden. Beim Start von smarthomeng werden die Diagnosedaten sofort ermittelt. + +Konfiguration +------------- + +plugin.yaml +~~~~~~~~~~~ + +Bitte die Dokumentation lesen, die aus den Metadaten der plugin.yaml erzeugt wurde. + + +items.yaml +~~~~~~~~~~ + +Bitte die Dokumentation lesen, die aus den Metadaten der plugin.yaml erzeugt wurde. + + +logic.yaml +~~~~~~~~~~ + +Bitte die Dokumentation lesen, die aus den Metadaten der plugin.yaml erzeugt wurde. + + +Funktionen +~~~~~~~~~~ + +Bitte die Dokumentation lesen, die aus den Metadaten der plugin.yaml erzeugt wurde. + +Web Interface +------------- + +Ein Web Interface ist implementiert und zeigt die eingelesenen Daten an. + +Beispiele +--------- + +Oben rechts werden die wichtigsten Daten zum BYD Energiespeicher angezeigt. + +Im Tab "BYD Home" sind die Grunddaten des Energiespeichers dargestellt: + +.. image:: assets/home.JPG + :class: screenshot + +Im Tab "BYD Diagnose" werden Diagnosedaten angezeigt: + +.. image:: assets/diag.JPG + :class: screenshot + +Im Tab "BYD Spannungen" werden die Spannungen der Module als Heatmap angezeigt: + +.. image:: assets/volt.JPG + :class: screenshot + +Im Tab "BYD Temperaturen" werden die Temperaturen der Module als Heatmap angezeigt: + +.. image:: assets/temp.JPG + :class: screenshot diff --git a/byd_bat/webif/__init__.py b/byd_bat/webif/__init__.py new file mode 100644 index 000000000..0bdd656ac --- /dev/null +++ b/byd_bat/webif/__init__.py @@ -0,0 +1,164 @@ +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +######################################################################### +# Copyright 2023 Matthias Manhart smarthome@beathis.ch +######################################################################### +# This file is part of SmartHomeNG. +# https://www.smarthomeNG.de +# https://knx-user-forum.de/forum/supportforen/smarthome-py +# +# This file implements the web interface for the byd_bat plugin. +# +# SmartHomeNG is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SmartHomeNG is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SmartHomeNG. If not, see . +# +######################################################################### + +import datetime +import time +import os +import json + +from lib.item import Items +from lib.model.smartplugin import SmartPluginWebIf + + +# ------------------------------------------ +# Webinterface of the plugin +# ------------------------------------------ + +import cherrypy +import csv +from jinja2 import Environment, FileSystemLoader + + +class WebInterface(SmartPluginWebIf): + + def __init__(self, webif_dir, plugin): + """ + Initialization of instance of class WebInterface + + :param webif_dir: directory where the webinterface of the plugin resides + :param plugin: instance of the plugin + :type webif_dir: str + :type plugin: object + """ + self.logger = plugin.logger + self.webif_dir = webif_dir + self.plugin = plugin + self.items = Items.get_instance() + + self.tplenv = self.init_template_environment() + + @cherrypy.expose + def index(self, reload=None): + """ + Build index.html for cherrypy + + Render the template and return the html file to be delivered to the browser + + :return: contents of the template after beeing rendered + """ + pagelength = self.plugin.get_parameter_value('webif_pagelength') + tmpl = self.tplenv.get_template('index.html') + # add values to be passed to the Jinja2 template eg: tmpl.render(p=self.plugin, interface=interface, ...) + return tmpl.render(p=self.plugin, + webif_pagelength=pagelength, + items=sorted(self.items.return_items(), key=lambda k: str.lower(k['_path'])), + item_count=0) + + @cherrypy.expose + def get_data_html(self, dataSet=None): + """ + Return data to update the webpage + + For the standard update mechanism of the web interface, the dataSet to return the data for is None + + :param dataSet: Dataset for which the data should be returned (standard: None) + :return: dict with the data needed to update the web page. + """ + # get the new data + data = {} + data['bydip'] = self.plugin.ip + data['imppath'] = self.plugin.bpath + data['last_homedata'] = self.plugin.last_homedata + data['last_diagdata'] = self.plugin.last_diagdata + + data['current'] = f'{self.plugin.byd_current:.1f}' + " A" + data['power'] = f'{self.plugin.byd_power:.1f}' + " W" + data['power_charge'] = f'{self.plugin.byd_power_charge:.1f}' + " W" + data['power_discharge'] = f'{self.plugin.byd_power_discharge:.1f}' + " W" + data['soc'] = f'{self.plugin.byd_soc:.1f}' + " %" + data['soh'] = f'{self.plugin.byd_soh:.1f}' + " %" + data['tempbatt'] = f'{self.plugin.byd_temp_bat:.1f}' + " °C" + data['tempmax'] = f'{self.plugin.byd_temp_max:.1f}' + " °C" + data['tempmin'] = f'{self.plugin.byd_temp_min:.1f}' + " °C" + data['voltbatt'] = f'{self.plugin.byd_volt_bat:.1f}' + " V" + data['voltdiff'] = f'{self.plugin.byd_volt_diff:.3f}' + " V" + data['voltmax'] = f'{self.plugin.byd_volt_max:.3f}' + " V" + data['voltmin'] = f'{self.plugin.byd_volt_min:.3f}' + " V" + data['voltout'] = f'{self.plugin.byd_volt_out:.1f}' + " V" + + data['bms'] = self.plugin.byd_bms + data['bmu'] = self.plugin.byd_bmu + data['bmubanka'] = self.plugin.byd_bmu_a + data['bmubankb'] = self.plugin.byd_bmu_b + data['batttype'] = self.plugin.byd_batt_str + data['errorstr'] = self.plugin.byd_error_str + " (" + str(self.plugin.byd_error_nr) + ")" + data['grid'] = self.plugin.byd_application + data['invtype'] = self.plugin.byd_inv_str + data['modules'] = str(self.plugin.byd_modules) + data['bmsqty'] = str(self.plugin.byd_bms_qty) + data['capacity_total'] = f'{self.plugin.byd_capacity_total:.2f}' + " kWh" + data['paramt'] = self.plugin.byd_param_t + data['serial'] = self.plugin.byd_serial + + data['t1_soc'] = f'{self.plugin.byd_diag_soc[1]:.1f}' + " %" + data['t1_volt_max'] = f'{self.plugin.byd_diag_volt_max[1]:.3f}' + " V (" + str(self.plugin.byd_diag_volt_max_c[1]) + ")" + data['t1_volt_min'] = f'{self.plugin.byd_diag_volt_min[1]:.3f}' + " V (" + str(self.plugin.byd_diag_volt_min_c[1]) + ")" + data['t1_temp_max_cell'] = str(self.plugin.byd_diag_temp_max_c[1]) + data['t1_temp_min_cell'] = str(self.plugin.byd_diag_temp_min_c[1]) + if self.plugin.byd_bms_qty > 1: + data['t2_soc'] = f'{self.plugin.byd_diag_soc[2]:.1f}' + " %" + data['t2_volt_max'] = f'{self.plugin.byd_diag_volt_max[2]:.3f}' + " V (" + str(self.plugin.byd_diag_volt_max_c[2]) + ")" + data['t2_volt_min'] = f'{self.plugin.byd_diag_volt_min[2]:.3f}' + " V (" + str(self.plugin.byd_diag_volt_min_c[2]) + ")" + data['t2_temp_max_cell'] = str(self.plugin.byd_diag_temp_max_c[2]) + data['t2_temp_min_cell'] = str(self.plugin.byd_diag_temp_min_c[2]) + else: + data['t2_soc'] = "-" + data['t2_soc'] = "-" + data['t2_volt_max'] = "-" + data['t2_volt_min'] = "-" + data['t2_temp_max_cell'] = "-" + data['t2_temp_min_cell'] = "-" + if self.plugin.byd_bms_qty > 2: + data['t3_soc'] = f'{self.plugin.byd_diag_soc[3]:.1f}' + " %" + data['t3_soc'] = f'{self.plugin.byd_diag_soc[3]:.1f}' + " %" + data['t3_volt_max'] = f'{self.plugin.byd_diag_volt_max[3]:.3f}' + " V (" + str(self.plugin.byd_diag_volt_max_c[3]) + ")" + data['t3_volt_min'] = f'{self.plugin.byd_diag_volt_min[3]:.3f}' + " V (" + str(self.plugin.byd_diag_volt_min_c[3]) + ")" + data['t3_temp_max_cell'] = str(self.plugin.byd_diag_temp_max_c[3]) + data['t3_temp_min_cell'] = str(self.plugin.byd_diag_temp_min_c[3]) + else: + data['t3_soc'] = "-" + data['t3_soc'] = "-" + data['t3_soc'] = "-" + data['t3_volt_max'] = "-" + data['t3_volt_min'] = "-" + data['t3_temp_max_cell'] = "-" + data['t3_temp_min_cell'] = "-" + + # return it as json the the web page + try: + return json.dumps(data) + except Exception as e: + self.logger.error("get_data_html exception: {}".format(e)) diff --git a/byd_bat/webif/static/img/diag.JPG b/byd_bat/webif/static/img/diag.JPG new file mode 100644 index 0000000000000000000000000000000000000000..9c12b92dbd31d2ad5e13aec4857650aa56d0d96d GIT binary patch literal 67618 zcmeFZ1ymi&vNyhQOK=PBkl^kXf(IwKJ8aw?0>RxS1PJaD+}%QOcX#(qkbFBi_uQLv z&$;h=_y6Ae*7~oP*{hkJnV#;hUsqRGSJm)s_S-4|RZ2`!3;+QE0nh{g0N>V7A4Ofw zO#lEH8Ndqw000kwhTsK2f^%r#3#lM;05muc0nTfsefWL`kO2Uwllue!IPkqE;A;VJ z0ubzj|Ge3e7XrT!_}_?tjJ~6znYq3LiHyFrnZCUv7z3DSs2@57 zCRleUiiZAszBM-HM;?lU2>^(V{YU;E`UQOYcMN~&7ylbM=@-qv5cq|_F9d!e@C$)| zAi%=J!pg(M#ly@@!py?M#>KebUrM zO=51uPp!%>!z5!XVr*tE;cjoNA{X2cLn1iHVQ# zpUUwueh1^Hj{f)d{6B@30H2W|kKy;e+t~lsypplqe{Hla%?18hz5b0-@Ppqn;3mNQ zhk$^<$M|#We}?179WeWW-}^hWg5M#z00%^PHBB7LSCHmGK1&^$Xe;-z$NDgNH{%K*U5s!hAu1 zOYq{~KEAaB(BL7KpktsQ$N-RN5Kw3k-?{}HK2@M4U0}b|PLU@6n1EA4h&`FtvVKEf+;mGVTS>DEcgeQMl*@2}vc1po& zVDE>3h>e5$43CnEn&$ZnHg*n9E^Z!?SE6F#5|UC%%5PLu)zmc%jf_o9&CD$v9G#q9 zT;1IL-vtB)1&4&j#>FQjCMBn&W@YE(=H(X@7JaI!uBol7Z)oi7>h9_7>mT?$J~25p zJu^EuzqY=yxwXBsySIOKesOtqeFM7v`kgKa0Mu_}{hhLZq6-a77bG+^6g1p-x*#B3 zzzGTs8ite^7F}2YPTvlLjO8sn=F6Cml^qD=tcs^t2KHl!*c5DQlxN>b`$5@%jxfLf zPn7+guz%1s4|oCv0UkUkG=L!Bt5_&aCglG}dY&c9AkReCBAyH234L2`m8g&L`s0X2 zM!6JS=b9*{Jj8#PSR@tcY)1H*a=rmxn9i?X+1#zQb^20eeZ14sDg6fUtXK;#rR-Qg$dX+J%d;_%N-_t-^ydbXW*_zHVObBZTUR}?FbW;M;y7R6h&xFDPsp4Y`qrTF z@}k*1PVFubhfl&wrzJj9jJT**HmBvNE{qH`1>r{bVN^e==K}C3vaM#qP^+*zN(guBhd1~U@8dt{askU2&^d*&)&0N zmdLHOEfNe)_e+&&BISr}VjR;7Zaec|2& zmg43yI43+&g(ZX9_Pph&+UB8|ULUZ+b_yxZzP_QUt4NDooXV^^RC>Q5J&EP#Rz#fB1l14{ zEg5O{JHcQ`+u?I8xFWR21@PWJJ&*KON>eaD^Y6Mz7a$@CiHhGZ#9BkS8}wUyMap61 zG`udfah3z;BkF*ifzC(nvyPUf0koC&*6OVc@G9x5@U*1fbt}311_)`zZ>gBHyc9$g z!7e0h83@U#jSo(z8DwRe8-+*@;v#|VrGcu;s*m98n^y^1mBCjy3ktT%OTR*9&=2?q z7)O@-Qu*!)02lfQit7psF4UXRjEH#bvgKv3=;Vx|-4^^@G<+j(T=i;U`wQDSel9kM z6$P#9xQ&sgz*udn_k|^;=GJvefq@d&uaud5QpCjI`P`?Td%tjHf!9m)GB=P8Gju}p za8vcvpJQRkSQY~~arNVp`SR@3rs@W#V|t;_sA`*F7`ixItL4nzvU~$LmT@wcNkBmp zy;GGI`aaWsODc>ZPGJ#vgzTk?oSg0nE!nreX%ta@V(>c?xAL%4SW(6MbsQSoVTP7u z3k5QC_H3b&+7(THOyW+@t}hSNIUbomc;hbt>#oo8a!o#U@fVre%*TX#z;Sm8Fpr2~ zLd+;=8R;-g9(j{!xcaCB&h#gCVo-L|s9G(AATO(}!H2XHq^+8!R+WH?wg>w(7=jF+ zC<@ze+2qOgk#d_H&4AQ8x@v#WRXWh|}d#V_a9BCnS(UL@FYEcve3Zc zDkb2M!&y)SGAAc}X|h5Ae@aF3;WsMtjuYnBCSSM2kB{!hh%VCX`+Rxh2HLrx&$OC} zSXy);bbv{T&ffr*f#;gab)h0#$2hQk?xZ#px5O+DDV;AdpMR zw=}=taufJ5Ikdw`iaH2zoFNylEm0m{ws7DlZwQSqYRS})4`v0x3z!<#XRHAjwN`Ol zGkTD6bTrLJm(|op_^jBHQsWGeT&EK3#kLl|Xd*TxHH_er5K>|dTHsxOv@!pvIk#+H z$BEaxO*z?3e=X0!>W3;Jjg1sQWJ*oVXHf2{q|q>4y^z?d<~?Ta#-0fLr*kCbDORFUaWeS(juyveAL4&V)?h3eI~Y$Y09Z}jt! zc1b)(@)0;Sp-I?89PY9|Et0(J%9YkHSdm;SbVBlYPmK0jguQYC;PF=(O>- zuiv{2PGkKh?h%E`nP+NH_8v>4&ZWa zn<(Gg%9>+3Wx!ByAH0zriI_ z;!IB?3v^xTQ>i&vrVU`sZEBd}G}&Y`j1PS?)wtRXU8RdkAG-zX2F=Gi?#N+9Sm@2G z8*;XYHR<4cYBvK)^>XVFm*`U@G;P(0iG*^e%0YAWg5~LbBOo58vkqF6N~pO{wLH1o zR>QD`Ht>S{-V=|m*T2DLQO--W}^3k)a&bF8EYBF zFg;{_SheMXs(}&UWl=G8!+Ig5E)T^L;(_K&Ho~Dq)S$M>0j58pj=&b8=#6TPrsgA{ zCGOhB;~SuZwfv)%Pw%I_BzwVGD}7`GrHDhjnX5c}{k14;HO;-__2GM#YKqP0xSQTi zKFVWP@HJc0Y)bgA$8PPrNZajtx$mDRx(np{l#3Pt)3tm|6-oVA(=b*91EgZNR-~=A zIdQydS|7z!Lb624bobllhQ+d;9u!ARo)jNY)p_%bT>NH7Nn-q7^ii5o1A1=EZ3f`YJLxWLT2amCVMYo=hU+{owlS+(2CEpw9t z>b`S$Sk zc$Z@2_&U`R>0%Y(pno$|_*WMSb6ro~Y4J`kjy}l=YuId?5{6uc3^yOlGUf)UkT-9T zg;nn5lzKc+enMmvF8E%)YAQp0^u)4s;`KD2oY_48R!W+LGAs2N;}h6JjL|1=BCMwm zx-#(j_-)oo$>Ww(x1&Tz1BgHTeLq^Nx)Q-v-|(T-vS< z`k6}5nsZFEV9v@uUSuOtQy@{bz@WjY{ZMw2ti->-WEL$!{n z3i57$1CDPNY`1Is0N`mD!P|{JnD>5fkr=bootj%c!z@`{7@*4-y@(dMxHH~J{8I&) z$SO8)=GOu*Mr|&iB6kj1gWs7>9!S!#dD9t!Uvm)y$>_aRoudAjPZ#C!?OF|H%SHsIcLp*`Q-5b?I^zrp1EZh}@XT0=S z;hCrs|Q&w#Hxvm2AcwbUPYocqz(~+Xz3ttae{B7p_je(%}pDA@hPW%$#jfS##N|7*=oBWeGb z^Cn&+UzV4#uwE>{ia(4Xw92@OYnTN~WOWt1yBfw6ckA9y|#eU-gdOP>|p z;iw;sBw_exuKuqNUIRU~X!=YCPeNdEPS^(j)?#4a7r7O2isL~)*-@zqatgh#s;WQZ z3emau73Lu^+h_f$pm;jcdoK=YZGj1emb=#%4WzAZPa@Gc$pYl>jYses>egndsEils6v6mpbTgJI8~Xr%dylgyh%VV?9I+ZD!C4r zbl2YYb%}RlDV(SIxa9n2k|lddDycbk5UOUnQxyYZo?;Ts7*ibz$49Pyvu1!>;g;iWOP3C0r#q@Lj z(2fy#^V5{rg`$#CQVHZ<1i72Pa|y&9M=A&#ME(*Ic>RW$bFezv<6^wXRaK_Bwt2dS zc0um&tVjo8qjI^#015A-%dFIieQqSe(ExW@&`)*lK?JPPvz)y4SJB*U&bBMo))Kvd zYUK1Rwsq}4bSwS(-z6!seA&$Yu-Ek$B9PgX#x8Z<&*YVGVIoL#+&L@JmhQr1DEs2{ zlw(n;gR-L4c1oL()&@qg)M- zfL7B?YOF3>1SW`$D{F$_Y|qye{13*<4B{}Ku(%s-Ot2;AX8BV2dIlXRPRe_AWoRTu zcmCa$3U-1OTKHYgmtVVlUE1J#+lx}ZrfQb=;Y0V-*wEn_b=$*b#)jAkI#|bDk$hwy zSJ3c~qY$sUFy*Z3PX7kD8TX06=aW1EYXH=s8A(y1i2=C(Jr7M7(i%sbbn8q%pIk6r zmKMcDzfK&yOmI3}m^-_# zTDWat$yXcsY5|=nRzmt|fd?mdN1v1>F}f$K!-mLOvJ-CV;Y$kj?y*~jEx+J5bw&fj z&TkJcGm*deO4>EOXV5U1h+FWWA?t;&;E*i*-nXWl?&HPYW>?#TOY5u`s4?z8(Q7qr4Y$ zkP2K0Vty~$6h7GOq@6M>hJst9Lw9Bs6y)e7QI?%wR%)ZqK2;NcQQ#&e_N9$V1XU_M zJ=s*ZB{tdmL#`u-#2ewkF;oH(#x z{ymR)jj3Fvo+7z26rbR;B~A8U&WipbSDwjSJ>16#Bn9ZMDpcl7@=;y|Int zDI_@+b0f4E0yBm3m5k6{BGsDZae0P&masXl8$lI-CmOIrnd}QI$p&E`xFlls$uuC|;C?jVqpAk%9jbv!Im|?$5RPoNL>cjiw~b1{-95Dr?0j zoI6aP%`&p3)4QyQTr$fCk)9u#*w>v(RQR-e-lQsjS=6GbjA(g}LQM=|5LI^UJRR)O z^JOb2Dd1CDyDMA^T+p5{NnB^8WA1aqXA9;uj)&^U*QD@q{EC-f2)gKUHw|QPx+vDr zo?WO6ugUGUe@4;kS<|K0KyWnDCI#3vE9YqirPtxj_{DWam^$dMKckS2BVD0yK*G1Y zB}Ux%a{G3ew%iBX)rlnCbmp=qbXnoaIDUU;_YRjB_gXz4kt-WSsi^TcK&M>hH-NE^ zg^$(V%G}+3Tg^k6Ag#-d%g(dwA!(~}bDShqmD<$O_#14uyVuNG20szO2(9{d;xrT9P&QmdVDB%hHG$a8EW& z9KtsMabNMkw$SGJuJvvnRlh@aIRbt^U7e+5^rvLo&y%brR$^$MW)m|n+Qlrc*VK#z z*SgDoBv2X2S<;lrZ&~C-2Rfg+l?R@0Aofvk?gt#+=743eN3(k=d zRGK8cU?Af}t9E``!K`jX2NbKgZZ%4e4$Yx9lV~P{?Txh3GRhg z1VH`A9>3UvWm&}FBu}Hy#VG*+v&v1vz8zf`s0WZ-B}y)H^0cM9Y;pH;kmIQ1K_E5{ zoVVdOdh#|pJG!A?f06-Z=f}+2eI#pbnAsY0YGJ`g+N##Z5MLb+POA`Hl5LgNPxY>9YeN z?AzKpLgB-#Nt^qrx*AsaSLd`$@J%7yykhQOrwEnS-{hs7Uc5e2olSR~ zJ9DlO$Av6}FkC#^-&PvolhwMx__05L5`SsZ1mqzX3taG+xH5DcfW1-tR zRzp87MT?OjK81W>t#f5wwjo2_bj+gGTnA55aDw98>qG!HREB~Oe*Ay5_WktC9xrLI zcvxbXV&uFG!IQLTPlpU=9=k*)ybfaLlf9A!_H6~X%Em6Rd1~>4#o9{_SY$8PqPK4; zkh2Re^T?LLPIOWU@BCX;Ij!C#aGvW&UI;;y9up9+$oe9@3gU__pigh;5)-ihyutDk z_l8Zssm{97mrmrd{H*YK^fy3}h8JnTAB@~8-Yq2yAP=FIsS`!jx~kwovQ7wlxJ-&C zk8BVhMeR9s(df1(OG?@4@zuw^0ji%VPOm>My<%yyFCZlJHJl|dsi}VIb7${mAXw!_ zJeL|ayYppf+v=6o1FXO!^N4IUMCFh0AB8FFlOxhPD^{7uQ8xy3Zlw{m=7NFSoI$5~ z?s;x@_rJMM%Q)9nqrL&wbj(y5qpoePqvjc25$R@ZS>38veJIUnnjqYT`t+O8YJ+$k zc_d5$M19A~dvyN|Q2UUb->-?!ac9W;ed>gY8NZ@C#8NhjwFP9tfM=3TNUG@2P~jbN zA@T<|!5FJz8B6SGQ);BZrFvLCNM)!4ZJCFM+NAtNUEMDgnwq^viDJ7&u#%Zbn5wk7 zo{F@|HYVlQ0nO&24O|tE`FLzjiG>TGt+zrhTVO zl#>p~L@+BMe(E|Xer7rEx2xFF?Tk)m!!1)tYCH_qdQN;%EF*!xlm6Ep}?PS)E3RbnUKb2kS#DVQonjn{co;mR*yRymTwKM}) zcp0+h$&;DtG12zhxQ=D`8LMXL91V+W_o0BL#JsAMjnhkekp@j}VEbAM%6junSyjg) zqN_Kd>;Caw*LhwYU+Oz&EB+LWYAaYw(U_(u+ChUexgK7aT6CUsB_QeUgKQAh*=39H z;o;?UT_XHtLTlo(!8p3c^lN0Xj<^Y;`r(?OcQu;yylbnqHB)6DgS2C}-!x3%ElYEK z2}Vi5t9qwZ%)|}#ROe0<2Z@-~)nEKfpw!91!EQ}kitr74Pkc>c&=cDnkF;ZqtlLkq zwl1bN_L5|^>?0^Eny&tRl|Ukdwwdx8uXA&rcN|$}t1@M(H$lfN5%q6wlMA#QM@HEc zY%5pYbv~520^fkV%0|{&bk3Itr4#43k)#zbUL9=%o*{EQTy0e_XGnclCscKsK$_RQ zllaG=DK)u@gecbKb|L;0meZH-{Ob5Uk%J%Nkxi-sWl zN?oyiXSDrPndJ=L{uu4{Q(iIK&(EY{5k~lCXtO+-#vP+jMN60VHF_HgH1h`CYCJ~x z+PR09;R+8jEEQ}CPv?<t&2P1%L)Z8{jq4OxX+=|q6u4(Zk@?%}j{f&sjVHP;B}1R}vqMb}lkxkyydnNYugkN6Ze zn4p_{zewPMDG!SiP>pgJ1!=xAhlv-oFm10+icbu*8D$Gh?aeirD<9-6h zpv#?FMX-er<@n-l|K(R2sP^h1>Rz6unb6Ye?DeY&(=i=diyHlS zOk-Kq1rSanb{4R;b>+^m8t3*n6&5vn)^chu#y%REO&i-9NAGMg^9f?rOkhWwFlA;`>vM7rHBsWc?W|tLKYc7qf8NsM zFFZ|eb?nJ%hs5{wm5PCSh1eJYB9?tufMr8FU%2Y%C3s3)l;(Hs5qGU1LqFnV96c?N zV7IqS7K}Fmxb;0ls>?fOKGLuQ;T_8`t_Qp$B619sPIxaBQp%By#o_=f#_IpJZW8(G z_(w%UGeK8ghEgCOtF;y=rPHgW==9~|Z5g(nExFD1ytUfMaN(|6gn5t9WZ^RPd+G@` zBpaBeg+aqYH}_;7t`<{*$482Itmry&a$dXe0NRX9nygjN%F;c&z=T!eXI|yflWYf) zS3vP<=>=ooPmedz1<-E2i-))bs)jZ>7ODdLNouD-Z>=49y;w!lmo+))4ntTD9k*o(q%%=fC5%&V~0>$iKIF4R?)|gAsrJYDrWFE=2`Hn_ZAe)y8@iSLb zm8R3yikJ=?hlf0$3jOMc_n$4Z`Vb*M3nMb{w3QO)j+@%R(smpHw*bW<% z&}OyiN34ujF3n1mhmk!_Uo>1Kym-bh87)1>L*u+9`nq-8=HJBK9gbEJB8~o-*%)d72_4s=bg9h zv0~g>s<~rAL@wFzjv46MD6~$^>Ojty=kpHW70ZMs^G=M?uv+pQs>0aLPu608 zULbXuYPyCP9b|o{QZr4rme_;cbF6`*cFiZMOiU$mj=BSt(Q=;V?t}@ktxAM)26Qz& zYK$zmaw`z|_+G)vPVWX?Y@3X7_JGBoJRK>4+80i#sroda<*3}>F`1nuaGO^m!LM`7 zvNQa-V)rqW{|E&UoYY*FFJJcB@cTkn`|ZzOJ~GYqIA_h+aba#q=Gk8;I(Omv%WNM_ zy(4@vr&@@+IF&%--vHuMbS8M%sWz0=_Bo(mjxt_nB@?Nxd68?r z?`);4pP@6=+;to|)O_Xo-g5x@^^;H(3W;NhJ}oa=oe8nju~vf8)(i@1gUBR&ee(XN zCaXakTZ2c#?zOx#9zSd}B!6)0|BJ{qZhk#Om5kHJmjww=)5517Odnrf__X^(m(V`7 zk(e}7*9GMnC!{5xHztse1b`&{z0mVp}6 z*&IncLuNBcQja=Q-irTObr1;AaFFccq%V-qpT~1!Fdm3;P z>`m<&*=gmVR6Rj&Wx1$1x5Osy5yJh1Tj4w{pm(E=yN3ww)CBDIoF`1IdUB1fsT-j{ zA0J>A{2@02pXAK1fq-aHRoC?6g1T!EIMl;UrZ<;>`Admd4;m~|wITUQ+GhF^4HVp3 z%hgeDMB(f{XPmBN&i)LKyVkG+MP;wp_)e34WroFWob*EZK#^DDRoz0cKrgsKaV-v) zcYVn2ru;7mQM#@h;jW`*H&ej-5ysrsq#-@$0V)<8fM>H^T84WHTTA18YW_mGrdPfH zp^4344OD*n^`9(TvuS0;D`*^9JHV1%| zjYVmf0+t01m(Z%b>vD+m_p)Q-UES{9m03MNTdJs=>4h9#H7Bun!*me;^S52D5%z{=|S z!wb#gpqOax+4s`uK3gSjeNGl9mp<l2-y{x5Mloac$v=hw>mfQv`vX9^4!a-33 z;v!qz$_3Ai%i_*V*N-FMf)s(pKj=Mh#V`)skY+K?l8u`&O{Cga0!U%pez<(x4T5RsfYXduqkP&p zA@nC#z*13iBbVj|^xN)2#bK#qYPd;c9(kCP;PL`>S1lmV>xrCs{%)6X5zGM5ZWKmrH^yu0sI|Ee)c(&&I5=q7T5r3Kf+1N)>)H3>LgsZgmjNDSW+*NL-l)n20xEeV^JAUA~Gs&p{0dv)~=GVUg zl)@Its;&2+^8|4KgLQJmr_U}E|O)AtF)9;`f(`r>T$`#<+>>_ z!)kJ&n;T#mZnz~J3D=Orns};)v~fZR1ODR55q#Lo{x7c*gR6}|!Nd|vPsd;`?C zB`D1}2_EVq@AKp*?05xEYevyJFWsTmF0O8H_1apr1vssMB%A5=OCB-~S1iN6oV%RN z4d5=lO;3-9DBKc%=(#UA&wgBgd^7AC)k}OD1EWF5wW@vy500W)TE)N3^5feOS+!Fsme7=Iv(;xj%cKeMwCrrS~zw=zhyQ*Yi zH_@E`vo!6{iwa<<)6F63g_DnN zS%9aR46+Er_Cl6+_f)>@&A$yOk+RRC?&pG8AgYB%Vy>wTLBm9znsI{f?6HDY0Lf1(Z$E{Ps_k z3*q6|aBY@1naJ;d>R?e|K#7m_o}|~q2cLXv6WywPR5!(N0;4iHxEi^_eU(&!B)fur zC3t%J=mENS7&YP*n-4GmwLh@PmXj{g!kon;X3eLXJFB`7XlWRe6|^pO~l2LFp{$+NGNwr}G;?|3a{aG*P$ucJ{oMmV@JHhu2G7 zW6unt^~W$q{di5Sm0-rErZ_^8_W^lFN|Uy>E+C)!ff8a}bk)}D6F)++#!fI>+tb*wE~b531!wpjbEkDgh<$anzjDc*HFaH0)<52J($NOhLI5>~2 zr2Y*MUA=Njyh$57me<8#u~Qoli)b@#oXr{Bj|f}5?(_}!`$g=;F4eza!h8i@3{Qo z(wu2&;T$&;E-q4r)0Ln5D2NO?p_++;b?Osw&%Y)}2wEpbwIDW%Jy11-KR5E4b%8rB>{FPox3W>HP10_d zKBU(lXRp6Hx(Z*_)rDEvAw(AjGFy6`nLyIN`*&v^+OxJ1Scq;368I2!({p{eUd$ z#SONb65*zn2l8>A@pgW#jiw6gQrsQaoRf+kqPpeVb0K+x6xR?$e1zPsN^eR$?~P%` zd=ksZ+4J6obbc>>AH{%n?=wxk+A015{s@w5(mHn3YI${MNS+9vZ67V~b1IfY1j;vTq&f8h)FA_0F zf^ZoiP7BNoyEwDE1m}H>@#2?FuTzLjQv#O4eNbeZcer=h7sfq z+;cJ82hL<8*}>mT?(>+QEeds#Kg_|=_`m`lv35{(rAtB92kV$;jU&cRrfhac9TIx- z44A|Y^!mT+^V|cC$jbbh ztxZM`C)%(O`>{k7d#pyn27uk@O$q3p*NQsOn{wBA`h3Vddxd^D!~JD zdDz_{PWR=Q?7D#GSX+hn%xTjj+Hi)&`=gZu!Ht8@br;^?xFdm+!96vE!Uf57&8H5p zYl6h5z#bwJBtBHixl}9{%aZZ_ZT_J^7%VX%epr>(tnaMP&^oUPR=q8j>A@$^YnTZn z3L={USn=1MO&l0E^$X!j_c}9H#P1BJy+5Z?-;+1D=r$6pZoC#jnub_Hk{5AC(uHflZEU&b9U+xt@xZgXekVpu&Fj?1ZdF+WK2X zH=zn0PSYuIPc+!leDkkS9Uk;Lj3`LZmD5j5Jlv>Vjo-GTya++i%k^XA?$Cj!2>q(@ z-9;e45B7uNf=WMHxVsOh6E}>uj^y*%N|Pou_jb~dR9dd;U0k3jXe9<%*0o}tJ}rr& zFz0AvvmE0V2UH8K1(DoEihA*i;y~?a4r-pDsCi0_c~V~omy{aS`cQE{3uor=>_phX z7ejd5-N^;p{!i=f=}+Lnu;MbpreC|j8a$dQamLZu=iV;)verc=pXltB&MsT<@J%>amkH1N@u6!Tym~C9+R) z@|k*R^^yuwQx>dez1Ws1iL-V`R3`NGkZE6;xqr`mU8v}xO_qjvvg?6xF z=J7nCaIdW{VFcJE0QW(n9ro>y1mi zqQs}~=6BsQFW|22)fasonr`&*GlpwvGEoR8N}W*9U0syxKd9+~tUK8548w_Q76!JP zpk^yWkuyFw$puyU0QcsjLNqu;N|&q0U~%;!Au#nK7pZ(!-^?hyh76_2`H%ij0?gNc zveR)#xIDg$cwC=0HV5WEQk6cg&!D)i(;h)-b2M(fZ+&k^qbc(et=`^M6>5TOMA!PV z{eW$S*-6c_Mlt7{)sO!me|{l3aG`0`q@jg1LJ`%c`Nz5~f|qtY=q1A_{I>lPQ&Apr z+ypE!GSi<@nH>n>;2cNg(nIC`=Ir^G!CpT(y#L)YEE2I=HgJ*227(E!hcWJ)57#Rj zN(w@#53!75U^%dGZ+1ta69JCBs{_rD7I^BNawHXLna_OPiF2}|6=5?Occ__M4S(0C zbKB$0af7a;&0@3wsNEi|+ye??B+!16tifrBq10k7;AQRsEFY5I>zn+n;ian8U2%MfLO`%E`3ijvPih}gT`BTM3a|iRX&E2nuK8XH)>v9`s5YGKZw-#ccTY+ooHF=8})#E{j7G1%4<7)rpOky~azJ$O*&w zKoYtv`Hp!9f0HIhti{`;eH9#UGx29i^A=Y$HO$X-Mrg3l@{_fNEz5WzV&@KFC6AP= zSJ3y|OJ-0PWZ0B6Nn*{ck8*1rMbZT3-pDSaaP`6ZGX4dhP3t7BA7Q9B-th-4R0opt zxYeG&suDv+>K)FMXpTyeB#2Uw&UdYnm}1=cRGYd+oyw7j)likXC|ew!{3ZBf`y$F` zIcTDIi2Of85bateUO$cVT75fmTRevB;)p7Q%1DqYQd~Y3&CgrW9)dh{?k|V!y=$0+ zE-F>fLpGE}WbR{sRHG;Wb`avml_Y$Fzv|6#vv&5J1?=#?+lKy&b`uTB@v+?Gs6rTAxSraKL|#iMI7BfN1**k}x4q{#3J!C_v;-L!qR~zwSc_a_A_xOsAOi z?w%12C-0Ay>9EVF&nFbu04)5O&q@fX*HHqU9;j~QN3YmgBYUY6&9cSR{R#%iI3Ox# z3G~ugRht>tdF8M{789BihUv7xiXQZ}m-X{ovO*M2O3S`%zl((b&WT>&K_-;IGnt)1=-W+ea})|OvXWbMd)p}Uo+YMM?_?dXL3f?VAf8P={JI&!%h!r}L+ z`FBzB-mPHHd=O!hk^bn_c?t8gE#(Ue>)-~y%8qt|GnM}tkz_u@$)r9%;NZe$D~z5L ztFHATD>5gpo#;yS_4MVEsSHnGk{i}gvWr|b=`DiTi*AgLXK*nc`v{m2hJWfmIBM=X zVIAI@YK(EMHE`L6Oj6Hkpv|Hd3K>60M@*b+|EcP=NK8kY2Z3b8Kvmb2_DJw`KD90!hTxOeuS=?AJ*u~3}EVYBs=8E)l-X!4Bs4Ishl23z_15)r)yzkvBpsJZDKo4vF- ziwH&YeHgPYT^+O@ha|Mqx-Z+~U#uKMjc3R)#w8kioIDYY7MxbWR-D@^;2^DPdmQ)` z=Jx(xdr^SaUr^g3*V8BvWqW49I)WPff7pA=s5rZAU9boQLV^T$Nw6dYcZVcc2pZgj zL*ZIzA;BF2Bsc*Agy8OO!QI`0SD}R!{eFA&-e-T==j`40-ag&ucAp;%7=yRgJ8Q1F z)|&FHXXYoHR(x6gSk1BZHeZ>a_GW2Mx4EE)V|5@N{{h940QQP+yFWDTllk8$Y6bDY08f@hr&Io%5Q!};YIlG(=W z>|!z+vKLSey>IYDvg>S?164-Bu(pXC^DN6ZMP0|Tw0EFCxDXoc#{Ce<8!l{NZus}= zCBt6df#g{gP5=_G2;7Mgy0JbGJ_NNuq!3jVULBBYr&W?@nG`@&yEJ>c?sqvIXXSw` zDonNFJ9(P}r#>jiFr`#$VrnBlFGkhTkbDTYmRnsR%h1v((h zR}Ss+m3vN#dGA2_Q)kz`nGMagwI}pynx9tP=@1%Wr$x-GRG;3+{#fdBUi8YxNX!V` zg}$AIO?Nagw_l~!RKM37rmOhGhmXz?gnVo-s_csaRk6GuRRmFbZt>OcPjJZ(ka6vL z*hRommAuFY@aKE(MlLVEn5%8v!`$0tcCA~cvE=jE_B+ECQV1Dz;*WTW@j!uI#m7jB zAEOCm0;YaCeJV9sdHZUu+tV*=VVBn}(RC0v5pGCivABVe^ooL?pEg=QT72rmM9~w! zyc4^%ikkFISG;H*S&VN2a|<}1547qtKC;llh8n1&ZMd%@kr^!Y{Vs0w$IZ(Ad0&|E zNtnKg_YIowoC{tAR|TifOLHx+la0KMKa*wZK08;=i{a-^;?tA;Ef^ zViLpHc^LvK18uk#hczCbw9hrR;5O=f>oUXM$}P}c(2?GvJ;B?=DoR~3!Py``6@2|r z+JT&_u06j`bfPj!OF4{n3pH?v)BD-I;-rE(u%KRZ_P4=pZQXAAF!@}!=fCX5>EJ|z zKul}wbO(Bbdi}Bu?LgF*C64sT5p8`de1ta}>uGg>5<3}% zqSk1ymw7+FeO@|OYcX~+4LG>f7u)p|{Gp{vw10EnUCeHAYaO8Tp*HVNyA`SZhJNbv zf*cxp^DcREYf!Z9$Jip8&S&m|3)N>Y_Xi_z6mvy!Ddv_T@v5WvOXrhCEODw0o@D2n ziLc~PNgrS&wHXI9p~mVS3KOA$D27aFmkkPXZ9AOULiRgHk;zsbF?b&{{Akg)I%t&nUo~0Gvs*k8$`{E-0h0 z?cJyer!!$mK}oWKMw7;41dn>r6c`J&C%2H=4Z8Oqn73v z!sw}pR1I-^RmepC1VsK1@Ns|l3=P}vKESww_!Qj5>tRhZr*M=rOinye4O9lISJq>r zlcrPEi|f|9n#WJW^#qSpVDobbcc$YH)6H$xpH-Az5SoAaYz(4|(MT`SUl$7yW^-|2 zn4Ate4J~GHJeFtp`0lWCqm02(!E>_V07H7>aJ_z_vXG)7Wa6v5gLRzX^C5A16C6eA z(?h!LMT0K=r%kZnAfE6e{GYVQ;n?CIKV0%FulvYMW8bu}Gm5Jb8s3vt@8$&i40mcL zX05ZlWufbI^YBiAyn4RBHDxO!qx<>QAgg0muW&8K5k8lG37h{7i{Ptx1Xw!OP?43v z^Fc_YqmI=kgv^Yes!wEhamX)wNq5-6EoWv}q*MRRSig0|SWk>A2OWbWO53`hWGjq{ zqh+l4R`9Hl>CISFDUb0t ztuz^|N5d6)spuS$h{f@!CO{I@6|u*V;&mmhB=!3Gs+4!Nac8z(ukX(nKKPO-vx+8* zLt{ruZyw)8?wY$MM}v-_=(1UB(%%9vYV14CDcWljhY!Vk2-aZ2NTgr!<*3wX&_XD7 z)O!u{1o`F8^*`mdh0S(H{PZZGb;Na0fwl{L^$wcrzot>VX2?mmX{0e#WF_eniggGS zpx^LFW<|#cF!fg?f|PxLt94iJK$=ZI<^Qa=yu1Us=D?(tkKxB28`H*WMU{KZLgAtN z`v)t@I{^!_gl~(uZ~{Zfj1R2_{|&?sq&Vzc^J|nO1$&7Uw6hJGEviSzG71MuuA>-%k385`$4;M zE|$iixO;VEl<^}M+=Fr*bF3BIPGEA8z?(8GsSEW2X4*YpP0IJEbh2=%NYfSYqduDb z*sk0*mvdeyCuGg^VaBt|6sAoK%H&o!YwV9*YBIQDNQvP!)&AAX#6(v--it82x_M6= ztjLV|Mpn@Ma(Oq6WY(no$~>iU>%)lAmz-Z#G*0NSYoGLPzIH@e7oH%u3*yG6{Dr= zlTGr9!bF09`)I{$*}eWML%Ya)qKG zGX>cm*&YD5dTr$8BZ$K{1smS1nwq^tI)1O9N$RKEINZ_8Az{~JuBwC&(78qq*iwKC zI-d8u!bNlat}aS3a@w%nH0}(b)ON*R58GMAHr#WRFywTUva(^43y>OD`8iS-skcxO zqxEH>fgr+BEA++!^$T!-9r!RIbmw)tFu(9%nsQ_saQuAph*vNbrSCKkEM-R_=a6X$ z4>>LL{3QPtskrv+=hg#_1{2T_L^pcjk(Sv5VX?K_=cjUuyn|J8GRE$Cf$FnAbzX2s6*2Or z%4i0u2&Tp{&lr|hRi|4P6=j9#_ztAq>J8QNqAoZ(xyi z_PiB1W!?cOJS-Jh=!3wHTlol5Vi-JZ3#e zY!@c2_c7|rj|up@tKa}HqCEY*xu)hUm-_=J2GDkKZp2Ubfy{p9>?ArfP`7l8ooD4} zWk0sS={1$$*j!%+Ms?7A8Gw%N1v-xB)dcox(?;2ux0l3Vapq+sJHQ0P#QU9=q0x8@kYl(u=2mX8Z!HCrfC4OC%|sD

    ak)W!hUsT}OV40M{TA$&NbE6T?y^T2yq+Qo z!?)xTl@SM$*OzlnJ#U9$8XZo#)g{oi(L5`X`c|3EO8@0se4yqNMXyeouaqWDq3RS7 zgTp7+1jh+jx4SQQ2g1ZDD2()!wJd&lxV1>;fWG;l3FC>o7zb^f!n*G84Z~a}OjrM< zB9#^HTJ;1k7489D-+XQHas|pbsld*B3D2p)Icw5+xWFB#!un>9@c4lKn8c-iokD4S zlXM-GQdMrX37g06Ky|e8Y{HG?+OSH3ca2@)#+-k>hy=9@D7<37mPM4BP2IfC*Rve< z#FqeP?ZrN?77(jT9KW9tgG!B#t0IG%7T-id}!~o0U`S&}J6=P@~YgS|5d6^|?lEdyiJoS1jyyVd@lDVYwz{B8g z3;LTn2b^rJrwL}guf%!P^~PHnwSeU|H^+RqEUomnX2y$7jrBD*IY z$&6VZc5J8KqPxGkjkpG_qBbAA&~PAYLyPbrkXa%hNwc0j)e}f;-5Q~IBWAuqvj{&v zMp>tNR!bweo21c9kASY|fjh}JuWBdnN7T?*%m6%rot?3rS&$Q4)4smGLL@JowSI{+ z<@Gqv-W_QFR!rX4@`8Eofwky(J^fR61|OzAfmS-Dia8M;(PUOp9^$z15N5mN> ziu8QC``uheVK&==jbiz)?TC?I><) zG{&W~5+{86oRo5Xt6PUTpjTLqV^I{7$ulZpVX~FcUwuu_t5fttQV-DOK4K z;8!(a4>Cuc5!p2<2amsMXW{{tDnf|T;V|JO**g9;>^asSJd9zxV@v0YnN2leYs9pl zH^p~lXrpi`ALCAFFt1QRY;Wki?OgEX(dN>>#z{U6(s=Lm?1jXNKYy3Y8BtYHQ>v7Z z%u_$I*`J+$5@z%jhHmt6ijEejpI~0q=Uk9(Eg%OA_O_C8bHt;Qtn!Uh6bI1+zRuOx zY+GBV(6z$v+2D4a7Mcy3sw8f;#i+N%KN*kLvmCd6$ETFS>_oI<>>R`}FJSsmF2ZY8 z31wIbl+SvKO?_LWeJxv72Npw|bLr517W5v7PRiW*^$c~e@wxf;vN4+X}NAe<@?EUr(F%p>1j$tS+ zy+bRRR2_b)8q(h_w56(_l20X==09-S3~z8wGIL0^#UN|FDH!`TXR=PPQymvzYomZ5 zL~g8e9E+~DUV7L+_Z!GfFIs!_4Nec^&DhwPer&034fXk^mp2#_MHCXro_c^0pN`JZ zAmOxv8_auU+rc-t|5oawt-%Yf?G2B79E#;4*K=xo*j5rG$-~SqcXr{MtGt~lrEMK4 zzaa`G{)@rt&|EF6_q)U%rfE$F-EXEjbiVenDkyBBGR5GEenJ`A0vJe>y(b%uuu

    lb{GB3Kr`o%Uzz#^YBTloy5U!0}i;nW9=VT8XGa zH*;-}Q`M5=b)#DOgU0eC8PtG1K!W{Zewgwb$?@ic)yA4uAq7Fo$M)X975f*JHNjm= zgy|HJ?BsO~W9|@N?vL8<(AOtc?78OZ<+O*li})67 zRn^oclCjc;Y1DYbDl&~J3?qLkzV?@rHnK$NM??WAO5WU65wbTHBbVgqSV4{S?3V{Gkj)g}uhH{w2sFDvB)cKCfakK#XtF2blWS1l0U@y>@ zb<0P1zHO+B@jp>CrD#|&PGpX6z4xOzAiM8T7h{6aXy3hgL7wu*qil;25sumAr-Ued zT2`o%@U0rL(BjPWHEe33P`jc}J%q%+4qZuo%+&k2Od1mR(K$4J?6bKPsVL&_gO@7hlY&{0Ne+uezofjf&{Xs5u&8^u(YWpQ9qu*-qNxAA7Q&B*mm{YhD-J?(mjKz z{8)>hV5=QPo{p|Au$V*b9Ul~MR zB<^bzIuu8^vl6U%c8=*_O6Eq&O-C6Fw1D9}>Y!ymFHGXt+|a6dOEFZ+5>y$XqmhMu3;*hC{qm_6#!Zz&)9}rMlS}ok{}@5{2Rs4YtZ;P?+h`5*NOUS* zJR+1KcYkWIDl85&bRsrEAIC#2jq6>-Xk{<=E$1Rj5zDlR1MLd#maK3b;G4S_s{V^s z5dZw${|p}S-~aost2+#Wbtr+Apj7t#Z)s24$)7NbwsvmBMoF9&nx8EgI}rRJmhhwEWQS*^&QJd(&H`Zv}bu(VRKEf{~Kt z7dMQ^CwHLCV5B%e8W5WO`*>Qy0{IeO`wAAiOJ7A1nB)|e4e`@x`%la95r16+7kbL1 zE3)#Y)(o}YbEu?7my&JWs@%+-B|A|&9Hc?W-vDw+;NJMr+<^$B|NN#O_t<3$N?7!N zp6gh?O$z_3D*J-BNUueQFHLW9;39%ci-+)2%U9iL4xxG*UdRVv=lI~bw`gL=XwVyZSAC4> zBbNrx$_X-)xP>hI`wwNTVdCousP^&hkq^KRJn$Yl3*A49dYVeaJI0fxV*6D-?Tk=a z|HICo3L}4w^!|VPo7xmkhm;b6UY#1;V($sHt7z(24SMD;@zhq!{|b*W75#!t+ml;+ z8@oo~N_)Wj9p&uF{Eeg|(cXLAi8$f!(TRoT#%nfU1vqt3jht}eh3m16j=BI2=H8`K zM#orr<-4ldphG#nag(HrQAN(|t_af!dun{#s{Gsn5B*l~knMmf|%g=qJvXIURbJ zKE}GA&#!qLJf*=@_DyBT8S^;EkPF3hXs7ReGtA9qdvP*K4m2$DDhZL5MA>X8i}^Km zSEA>yL#&*E9CU}kb&P-+4-F1UHnZRZvZ*#Ft0bN6bdG|iZ;LR!7cWX}!??q_zhmw#n(b0RvcqDKA%AURbcuKP7n*liJr`s$B@#7+nSz7KI}RpZyUoT&lb zg2OPH8A0jWdq1EsGBE&%`S(xc4YICnMjHj6aQi^@)4yO6`5s0H6kLFl7 zuJSlZ7j#CQT`Q;b*|^H~;HP{c(L*Umccyzy$&Gtju;}pkHHJG-$PE{3X!-G=%{!0K zPciJA_JvV}yFQ=N?m#6HMJ>f!r0|+>=Ve@oMwejI2L6s4zIVp^pv9-AFJvD*H$@BT zc0>UYp`y|&d#|$*^}zr?MU<^(f_!T9Fj3?wO+}~c8-|Cvo3ixB0!%GcHnTU}4W$SX z&sFZmn)Iuqi%5FkAJ3nd@>3G{)dWOrr{een0tpjAzkAUIw^zq$_qbCvHA*gimaKc2 zDDY5`U*vn9jm;+F=t2k1D8hEDW7m8K@*CRKzK9gYrjGmpCf_GVCgjUfa2&ADZ3d#00Tew|h^bss4)+8#~}n=KJ@itS@7cQ zTo|sQpUY_~IbnQBlny()5VAoRAHfmZMn#|#h;(Tj=^<;vEAGyZq%O8HXs6Vl zRA=WKu3HKN#X=2wPcrasOsecqXV`*y77)Il?+;&*A&-tkudMfW5E?ecUb17AA}V@3 z)-LkuhjVPaJKXaIeEa>rjvFx;7$UcDH7}i&G&y~Q-8Y2K5LmUL@>=vFVMfZUs;%vz z;&@m=0ff1W92!>TZO}GduRmx;B$t2ogwe>5Bz?a{INC7P?oev8<^ zcq1Y^Q@br&{-OR-&yT{C_m=3#Z|zl!U)%O|gM_Cn5!0)qMi)#9Q~n)%@y_1{%tp3B zy>1^I0crxV=WCgz2tBu)MTF44Td6#HF#VXGa6@bkC*@5T3Z?T#Scwy?M++cccsWkI z)hSYia(v8Q{a~6X$SD+S=TRoYtVdq+gKyhoT&jkK?qYYF+&-Iz z&~62b@(N@_xK7DlS)p>tjknl!_|05S{xniF2H2fx^LgZAR*H9!qn4p z(Z^J3dV2U+xW8SW2c8rr{2H1K@$4a$l*)*Y1(&^lyFX~x}9m=PCmQ?#jULmu3szDsZp7iXP0mv`c17|O^DpVYJjU*- zF%>7Hr^Xxv=W!k&6%~gMrL>QmuY8yd8cz6cIj`54BD^%#ne}kHLvwq+E^`xCk6FgN zEs48Ut<%lF6dxiz(Ti@+pwNeGw{tF8O~EDdwHBLldxM{leL{`F#V_z1RNSAY?}AWhfAK3ig>TTx+eae zzp3|pb%7&i_U2xY@RUglUdfHziVdqC_vRVF@Wna&Ry!)U~Ii1q2d!cbFDY9uNRzG}JHiigTt4>>u)Ys87 z7l#uP+Ygk9{&2nnopLnL@O^2ZcbYMeLdl&*vRDY>H1U3Vd8VhJqj|q;quwSjjVi~) z+&P#I6^+st!qW=LSP2f}DSF-MDDAR(9ep6z=7fE_J77Ed8tVQ65}})Td4_J*Acbe>jtAc|!Sh!Tpp8!airief1Hr@w8a$^A zrdl@KXkZP(0=G$729K!p;`O*#avbKF`|5e;RKM2=n6)a^4SA}Cvqhwn)WfRtigj;m z6r<%b_NRH1V(!=CXVm+l2**9x>&CH12Ztcsfn8s#8FtZOW6XOF`*q;rApE}KWmH_R z)6@?7Lq>JiA<#a>?n#l=!xqmh&xQC)_I@R#kWO0Vu zWeGs{;*hF$hOEB>wO$Vu$|CAAtw`Jw>l4z8GIi&)dG=lx&VJUHQNkh-t|Nc z_&D0kVsTZS;P!pNQgACiS9{(&XO}!ODG9Bx7DQ)iw$Pal+S7zVaJ#kh!pz``;jF?U zyt&|SvZi^nI?~MAy}Im-Hg_Q2<9gi5NHdDAmp zMdf3#A)3KWvLpz{2&^h1QH4u(s#qi!XVlBssPoFO-b25nG$|D}-lE&$@4c6ACCUhI z%J6meR^Qg(O`2QE4<`@T)YO@;wJG#(JhxSF@Jp{60R!MS+Y(XYoo2#u+m&Q6S?tYq z7bb!P}!|FN0%5Rbh(&g*;B{AI=ld zXB;$MN!tRT_4I()2+wW#NmuleLM22$d(gE)os2x6j1_-EJQm5c~mt z3D0;1tIk>zG4GOF8)QLy%2!?)MfSmrp2(t&^t#u_2v;A&TaRhYEVd9lYAI{_U3<{! zk)kETWJzMuTXd+~kdChme_M}!!ZtM{jNvHIhZ0e!GRaUTm!@EWJIg&`Qu7g0;!>ig zniVykmv_>AQ+PU`uCHThxc$=J4E9ke%jWR%85BID4hFd zs806RF(m!Y#noALyn7DVLu7Eq*xE&ojlqn$PfaMA|3>6kT%h#E3brw#vp_+mFQip0 zIxwwhC7f?Cy1+pt(Aw8J#Ej9=f?2i5tOL8<96raA&HcG$Y{qb6j)ui0g!`o;T~rA} zp_W)|RUHh4eGd-unasPbv@!sn2=^SHdzhO!A$+*UG^VenZvqVVfJ9HLAdB(Zx5#WT zdHNT|41mww@_V@gi_OP?4@z>|ZVyq&SfZ*t#@C1=y$dYVT??G(z@^;hV`zH$g!pYp zYk%I@R^cfl1ceGU1KcNTd(K^ZGo+?k@1TyNe?I9FLUuZ6)B&6Ctk)~~cvu;}$b9|& zRhUH%K$08SQ9TX1?@kfuSE2wG_3iq}}i7(!-g5(=th4)pGoj2Y3Hc zJd%%0M0z-~(S@GIDoGxZ25{d5Wm$5&%{k5Jq~w9Gq0eL=AbH4MNDcoi-`2BZ%58z`3hlo8SQop<+~)dMzZ~?NlLb<8>=dG`u?s=Th_rMKW@XXk$0b{ zkE~l!riDb8yr7wPs}x#fD>2wC!!L9-pjSB?E8gK=7*zIif^e6V;4`1|8X z%nC_D6;cg6xOt~wMvxai+o?`Vx$ftK_YV4Cz1m|s2D*rKUsYcn{uO(Nxq2$xwr0ANH7p-y zxXn~zg^u=ZGAVaeJUF0%p+5rNuq^D{BPdD?3!DmGw<7@aVOtqa=1SM#5j24o8(dXU z;v`m*N40lvTL@3NrW1gZq{wGIar{WwX)OaE@_ktbZe-b)v&79iBAeIov{}yPes3=g zD_4;4v>I%=tHy^N)|n*uiKD|C?08-g;x{vpN$D%Q-_fG1d#aBMKa4C!L~VDtC1WY= zX|S-zZRT2jAhUja@1S>a`Dy(!8bpfbKEm0uOt)&GgjrFfqj;RTn$saZ3PdIUwuZGS zX?dc+%Yg3!s?M+X)__{m>(y8zr?69_6}v2ZEwb0wUFS8W&C!L&hD*@~B0Fa%E>iSn zSKGHDmW9ow_Q$KaPy}{u!12ip>U6|ZRN<=J-ccAiQ|Ob#1+AU)G07KPe4o1lP%4c{ zFN0N>KPIiDZRkI(PZaVdct3u~+PKPabD_NOl%r4SsbNl(y`q8br}a^>J%eLR+c{+A zG+gjNV{IxlXW1$-&ho0NGg&n(iE(aLPh8;g!nRe(o^xg+!aP-L%!kRsV>qO!GF+1f z4cr@1UBfj`x>^e|*{9!F#{zoaD2i~!@^us`4@aT75r6O!k1hJPL-V{wY)@2Q@O)6w zW<^8=+q13aNF*yNSdKB&7IKt|mFl{q?Qzk$Nbzc7tBg1~F_)q86*zDFpmC!56(?3? ztY4Jp^k)Qf`w|1(t-~otYuU2;il{qt&~Z#^Ytyxqo>f$ZA0w3#o~jNc^nI`wmmuG^ zRmQT%fL4E>#6L{t*1mr9BJlDZy=^bmm2;2Bw;y+);A(g1C1(J8~KnJqk z_6&UQ>cpN{KMq6rmAQ?|?}8&SOQP6N{k(LRQw8Gm=nExWVs8{=r*>-TnE48^bEuV@H< zjqhyDf!|a%h8_8wiMN(TTeR_`lJO5Z;=(ez`d5b$(qAVkUB{nHG|xX`PcZe#-r|1( z0Z4!1+}A#_e1!t;FwRxgttHaj4iJXvogL8v!iP#oE&XuHCCvRvH7=pyS?p)Np8*j7 zzZfnLaT;9X)x;SXEMw!sUEPp6KewwgcTknC;HP$y8Ykp;6y_Sd(ojf4h^R&)HL;5u z%03+HUWPPi@;YXsp5T0(rdllt(5Pn$VWDrV9yzjMvGp2t-`2|=1#QYm+P!@2&_lQP z=H=Yhe#=Uf#&lBMs2n`IW2A?>=)(_Eo5RSW&VH+huUdV{KJ$WvS6nU?sknaO^_F*+ zOSQ$eni@iAFG^U&uv{__8a}imqT|NrMT+nF>{~ux6Y)Lip!$NbDS~n}ojW5wRc}+( z_>m-xL5g`K2DBCk+iT89YzYwym#?}xy?F#A3;zv8`(LJPq3ZMg4m9j82IQ?Cg5tuW ze?l*hm^NBgr9a()PL>q=pzXf&U$c)hke+>)YzXB|v73wdV*^+w=Ck}8AfuUn>?4OH zbXNqxH5)fcH_z_9eIfSz4z%5v5eHn-p#Gm6`^CTIOaJ!9T$ytYqk&FePKR63O3?4u z(;xOV0eHqLk=F-Ti2>jAmK>0=BB_qDJ&KV^ahw4 zPpL-5U~&KqfkFAd$M_fe(0G!$0NfyeEu|Df!M|d>o5z?ln6ICj%{HOh49`h9q?+HW z6zy5Zy~6pGfnZ3v1I6W|`h>M!(?p2<68Z{VNwh=0&EEH62kNtjp?4sEt(MCdzI?xa z;FcjekOB2fNKAlRcjNni$`Bhod(os8J`@vGb_d@0&{AzVr%kBIyN2^8Hg(ss-3}2e zQ^CJk49IR&^Ea#cv&AT(-xB%#*=nT!9~na}qevIhAyXQ)eQoe2#e1=;dU-!IiQ0LG zc>gx%bUD?n8{pNT^8SBEIMx3dCFyRaZ8cf0&FisgHjbCj)|b`Rs#lf8EyjM0yWhsb z<^SOGIn{k<=l@;H#VCH}zz#zP&>NP@2P2~zi>_KLfaMVj#EUbG9RQ)UzWVKd+_3I9 z4kJUzBcyCgP|H36kn;ViUbBbVYH%!n19-)TDF(OrRW0K8iPn~>Z)mclGh0IOd&sN0 zHt^(}wt-@V>$>ehb|4jo)#9jW6->Hwo(2^GmancHv)k8E{#?;d-)uL~$Rz+W;#ntT zQCmJUh4!idX+*t)%k(djkUB)inixWyg{QXiR#0d5Z2a3L3jKp#5;_{u*96_})RQIo zi2j~>yKrtrtfsu+Z<9RCTDfMg*#pVk4unD%eF^b-;}pc{1aOw8fyN^-OBvH?(aK*N zc|nMo@^eV2)&M?a6*l^8|td`o^GUIWkXBtgjr3!KVyBJ_n2HIf1lWR z=~|Ojiq4}#y$D=t;0)k{v)%gJww%hT-x@+L0F{P9_X%78m1@`Q0Qmn#<~62{CtBnDe*AjfG?)yQ~mg_GXD+rE#6=@AqsTCygbf;sG4Jnv7S72j7YH!RG_%?!3H=~T~FA?@;4tZe2xtryd&VtnF+b?vErEO%CQMgv1|yL68q# z{Cd-Nuz~DQ0&xth%JKmxoN<$)#%^&WohxjLhM1=x8YmbY^0Z<+a5AdsHiOVdH{BqS zcO|xkp>8juJ-I?|_~zg;sHWzu>JCIV`>cdVln3sU4Odi#RR>yT-zIiHV~X=^d-j3i z1qsb_0prGXU;b^Lfdwx^c!c2n^ZI)Iuq)TJ!m5Z_&QN;Yi2D>F7-anfH?JSH{nVZZ zhQ10FDiSMVyQE%~bUz9dwpkNe4!LM1fr|vC+4dW;W%4TQE6cIs&DI(Rfnq?tA`*FC z8nE7J=XvlaSYtXv@{CM@Ee}ok4~ihKg?o=u^C`S{?6KDs!DPKleX2 z8GxItRQ?WxjH)QQiu3=w$oB7}-2du-V?7ePd`>=m>u)V~>auA!{ltN{jHEO2{WWoS z_mQA{bjo?dbj|$rUZb(KitW)fY3p7(x9BY3dZ1WjAOr!0#9I8t^)LrqkNbe@v1^Bv z%sxo}!}TBrT#tZ)mP_KlxE|Gz4y3O(6A}lo&JFGV9^yaL1qeoR6A#6P&M;2jf!-9^ zO=GSUwh*!TdWlxo^G4I7zN^M-Om55{XjWNCAnrtsn&m&^sXHGA-10|%xaAYeH{s$q zaOg*CVQ^M)()WHRtVf*h$Y&FD9JeXUW23xZ1c&-iS?V)QzjO&b;$eQY#~;DWo8hui z85@5=x{~U+?1r%=eA>1A&nFCh8K?^fA2bS>f9-NB@J<|7A^&7;`f5v@qcSbSE&AQ{ z)a}Jbpz1`v)6A3%PG}m>f*3`Ty|E^J?1o)8Jm7&R`)84*=Y@eNLYxJNA~4=Vk6DCT z_QsZDDv`}6*v)trEUQA%xD-v&`|-Opb67}gAH#UkRv`|6d~&$SP25Ha0JSY9QS~b!v)wKpN{Q(IIFEE`S?;;tawiwp!8uAt>7HAFE39};kpo4 z62FXA0eh7GW|Y@|Z_}BA3&NgvAo}IbmUB#?fNp?SmH|OeQ5lemNPojRCA((2M zsPagd;y=~albSEg1>!PDd|=52mFx=Xydds+}(3RiC~`wveL5<>gr=bx;F_ z@zJzc@f=xKc$;lUUbUUExi0497qOj@nfs@i`hE9_8=-DUIrCdryadfdXLJDT82k0? zPoMZ9M!bk9sMK+A$hLszou{WuL~A7U*;T);G@QqIWJr*X!h}IeOSK5N`F~nwzuPS6 zrT*j6@E#{fnOPm*iS{F@i1JGb6bxFMAs^*sSh32~Alyu!RF8vdsScCZOtCcpF_ zx7(BRk-!M3yoOTBt>q+dEX$g`957A#IrBiEdjWV6{6Gnk-S!yq^^hWH>a6JbdPwK; ztXsNxg6t5@ra+I_mjdKJ`7+t|+Sb}IfP-Zi^Ouq(`AbG-3r8Qbg`u88jzb{-t7&Mc zUS#;^r8D|x-OE=m&cw+XHvZw2f9)Gc1yZ-aivC1YL^%}TXC|gw;+q=(;zqD_L z=1o;vIHP>g(Wqrn>gE@kkrFnI!;W7YMOm1-NHQJ)d^&GVj=rmTnW#^wspxNLhCR~&prWQq@yGVc>A z|5HYIbPV5?jT8hJ*~Z&)#Q^px6~6z+*4$xmjtTgoqs%&|AZ1j$5@WY+*!00gWah+xQHw7zzQ6{&v`%0!VvZ zvve1*L0H_JmEgeih+M9$V~jhPQt1wq1Iki$l+nc=kQF|~AantufsWjvp|Inl8iNef zj`i#U1Mj+{M7xUHT;veT9F)25*s0=>rYUm1Zcq`*g3Qf?6n6G~H_*}B;UmHPK*+}$ z(_P(@{RU^2FTrNMZPk~wHQVOO4gt;BqOxu&0*?37@64az3WYAb;a??;xf5-Q^f#~B;LJg*G z*q4G0G^x3PSRA|NHn4K}men7~CUj>Uqf`SyL^a?>=Z*kLuD_KdXX$0KEg4MTh--a` zy5~0$uUa|iCOb7HHCiRdB4I=;W5bHC^zn3P%wc5YZxqq&Y+@TZ7R%?DEx;0(a%s^d z$v9Tww!21K2Jm%zg3Es^y!}@XNq(0PhfQPb=nP5%POO(IVU2dbP5IVGwBZoSOH|vy z|4at!0or58cFWt`9Irp9w(7SdT)N>@2a{RlTZ1eyZ#u9ZW(n}G3mY?)kdD<(*j54n z1M<9CZt%f-pLpX2pe5)fB>9N!#6+R)AU&9WU62MAYn5@z2!>u<^#G@f1+qn)zY8H$ zAe~baoDCnmLg?l6jW_qvq*Mqz-`=J*zfJX2Z*GBAkDaf}Hp9Ae#e!YrsflW;>imQ! zkAvSh8}Ye_C<+J++CtkUze7j90QS4QM}w5jM&h%)M}8c%Ljnih9q3qIM7$G<21(zR zeQW?hULN@@QuQ_XPa2}r>$(f!m7*}wp9-6h9~Pqi{<+@;#sf0$|LR>&Qh>z$4ZU{W zEoSoaEwT)V$&$rZpH7PXdN*|kIy#F6_EW%FCGrFAFcKHsfnJ%_05@QMKrb1i5i(VZ zR|GsQ*SE&N-p>6;e*a&+?Jo)5-_#1YbK$CYpe!yO;P%aw*iHOjY6<_(i9~%5m|VNn zl54a%V0|PMU9Acel~}POFgoDUt0G3L1n;WVk_D{Wd(q@hu*U-erekpPiR%7A9lk79 z^-SmKCD{hv**}r}b?)cZi(7}G>)lR0%R}xvTFJp5OBg121f0|KtZmK$m7xUMmnAU` zk2>dlrJjK<6S~Fu_fo|Em2M>cZ{LsNlv>(NW&x_@mqJH%_-jeexo0DE`Bn`dtG{RcU3 z2}k<*$h_^R3gZF1{>J+nJ+y^*iEU)0P5)qJzb`r#u@c2&ap~^}{XI2;P#$CQE(h$P z#6s)ZGRQRL$#s`;YCsewC<JX6G66bEZ4WAcOzH+PVD zwH1>^CP}JVm3tJShea%s)w9I1ZsuMoQJd7pEBD{`_VI5G{|hya&6oi>;&G%n@A5hE zIP|ps;qO*6MHB9FAf-pVXwdG_No(cdX&a}*iAfR@)ADj-Bi3)8-o!5sBvjA*FSp4* z(pLX+L4cOOmi{imKkDZ%39jEv>9;*G6Z`64iv;?2!kMecsVq31+_iqBckcd}UG;6z ziX9i1wmG$G^f(oK!dQ5{c1tw=+nl0+l%iZ@;DZQ_Gw?-_Gi zmxr8CtaOwgN2rkRNN?WlP7qJ=0RN|u~d5D+Bij3QYqaw?Ld z0s@xgOd&`Rkfegdk_CrzJIvAV1 z_TFo*wdQ>0^UUd;nZ$bN;bi2cho;*2B1*_-U@q1^Ni2lHL_^pjU>X-gQ{PW4JYJQ~V>*2hL>LT{t%tl2hI z>2qlbm)k#@k3Ke^POq-5nV?xneURERtomF&k;7yR<&{21!`PMqM5<`} z^>x=9`d6u&H;KDvl4K!)^hvvcEPn-I)ugTklRLTwgf z?<3W6qoY#v^$6u26}BG-8EfoeYhaPwumVXROEZPn@uJNWxJGEFa{) z7BoxJtsa5Ap>z)?=?C`R_Npx;9;Hg2QkUwKm*O9d(AJH$K2_cj6T+F$G;Vn}ch27A zbIY6SVaV!c2QqxorftwG_-$ypZi0&|sRzjOgh6xLr#2^o*3GqHln6b%G{CsQv!N&K z5+L~;b zxI|bjpd=;}#AuqaO+HpXi&hxy*WRW=UiqASUXptgYve1)5|*vuUE$iIs5NK|^#s78 z1v+?0I>0D*Q&p$XDVI=8$pFiKjQ*x{cZ3b#V){vj`EjBeH-n>=YY9tEr^t<0jY?BI zz3)-=C{-*j&iH!|UU#pP?nY@{5kg@rhSf0uB9mON;GSQ;P3XCa+dT^-L=NUv+?h5} zQ>{ViL`CwE(EDB0_AjH9`LbKLg79+8m~_kUm-FK)EwAcFYlJ}A4;5MZ7rOv0NvUHq z6$}ly0|j!m`@2QFzem$mr{MDN?wO!~7x3oyGXvPxHx`726jI5mq@YhUrTm8)mmK%< zivikoz-BmDB3VniV5RFg z`1=>DW~v;5_ zb%OR|?MVAt>6~GG(dB%_O`Yv4s3vdDhFMRcz5<{Ah5GBy)1NTBl$S(#H=TXO<8Oa8 z2vFavAG9n^b~k?GtVS+m59AZ#k)=m}B~ZLiKPP+e6V~l65l~%s(~v+@rntp6 z#iE#aqCHLejUdhTnbvu}LY}iO4;}c_LRE|DN>g~hv4INKq*m=VGO-Rog{@}3 z4S|FE)VcF{dQ(GWi;Vq5nm_7}RNfPjF|h6O==Wlu^#*I8OFlyk?X=V?-QoI$e2VZ9 zt$e;RU1GS1wr#+>miyEFrKP_Fa*+jqb>_ntJ-8{m+=8@g`lOV;{3w2tVbV3EuSvY$l}jT4r-oEblHnf%aJ25TBm+&%G03mRvPiKE&*cMw`Ur{9b@#B z7>}L1$Sc<7{ZvhNQs^BQ@-S4-of%3;Bm$>#La1ud`@>8C>@vQ|bs?J9vs-9Eq`GCv zqa^l56JENKD@ngCd=L^vFcMa@F1Jb=z$_>tZy{d)mD(d|{^UhU`s#=C%r(_*;slz+ zuQ@U7m8fWe2f&otiGdi)Gm+S8*&j^tFteecFF@4;-(N-Z{`sfw568jW6xOcX3s4te zKS61EYd*gJO858wir0u$n{qptQQP=8ps6BXs%5SqXsflJyyoFKeH$H#5-DwKfSCdS@OWpPDB z(x?52bQizyq~ijM-P#oG5+ji@yg6QYuqx(Igs!RQdj@Mbm}K%>MU)=_-?X2kmi5S3 z!D^n+zP=QyzUJDtXlT`~>AeAGA^}+<>m9_p4Y^o_L*B4rcSqR#GqU>j%Y83pFIR!x z?=0IOF1aRXQ~PUtRzE1$=iVtMeQ=#@CtF+^Ot~tO%-&OSs*JG}>NFafycIkX)QHLs zmcA7HniG2TXs`Bo9|*{|d;H}q&qgEO5#OqV=b|>H+u^c?cl3d_u`4zAJRFk(U-r7gf86IAUtb~zJHKk zfTfV;#d($wgC)45O7GA(7>(hi4K2)XmgBUCLheQCo6&BWP5huN7^SRN0fWR z1$*-YDd_NA+vcQ{=XE#+n9EFOxc!Vf_f)nznk(Kfg+h^`7{-nLa|y|3r~IE-!tWRR z5q>DKYjIFndy`0f`ZvR7*m3WiaUb8B0v-f1wJRcLc2z>c1-=|8&yq$@VbmOYviM-js5_NfFUd?kO7-kF0$sMU*Yw92eR+YLBeY*esv= ze)r?CJFWD#2~b1I!|dz?s1k4s5P8iB!5S=qy{$VR095?wH?e{$R-5O}KS6xo78Z^H zwW>P+P~o2`PEwZjM<1$1&74ABOA9lK`Mm4BbS|+lOTOE^hcHV`_2rh64--g zE6^iUyzDA{NOaJ~?EAJ)pN*9yvai=&sn>FBahvN|l%*&8a;CkN^=_jLD0gZiGW%l2 z-12hE56?>~w9EW|`>wh!qtL7~(RnV`_|0=4k7VyB(-H4HT;DtMQsiMNs|R?MVj|oQ z?{Uv{U!~EdSLBmSme_H`50xQVtGT^R=jNx|!&~qQp~;KjWvvJ&SkTq1U?C&EkDKOn z*v%9LgssW3KS44TQ&<9M?NKN5h1niB*kRT?P5cCJlw9MbID%*(>AVq{QJEc3d+3dR zkvowxUe(jl3Gkx?v3v*Un~FRvZtvbo{@w_Zk!Qo&E6&vH+c;}jdagi#vn=luO!EHf z#Swl*hQB?6G!(Yx8G3Sd&pkYACTk1akfaT?UVIb;*(Pc*G8$bG99J%$_a$a~TIXvV zEp}R7@UX80O1=`m5l6o2f|wbTzRR7C-1!Oe*v)R)vhN4MsAnb-7Y*p5fo&XOOHs!^ zLCW0yCzFdxfeEI72E=uyZ7+d{=U`Nwe9#2?D*bKXPN18?xIk36*s{k`sD$g=jrA(Q zEVmc4YLIH?^?v#6=w~5H6W{Y7;O~irH{q4Ol7q}w+)$58w@8AfqsI(*sp5b>^u4^S zS`Sslo(BgqO-Fj_E&Xom!ZPw}jv^$W`sZ^dlDglLw+u%{+DT$Xa-3P9JOOgo=WQEG z^~Kns_zX6TRMSVt&Yj!dpREJCUpziv)HrV847>iDwy@
    c&!z zioV(-rIn&2-}^C)>{yTe_ox1Ih34iGAGmV5aW$Z8m?5!)8&50X&sgySA-|_&gS67&4P*7^l2a*Y%zVaTCZgJ4h z!qQZ;hs`HUxfD$|`}-E2>#;k!horcKS)jyk7got{4K@4(fjUQhW*jIuEUVZhx?5$d>cl=OvF61y%je?` z24BBE`;dr|TC9+zwDtgs*9e^}JycU?SV6VjhU6?owi8AQOoC%{;~vk}0vRWgM~+Ez zW>hO^8Jz9QWMeis3;jHrkK*slzr6Y`=w2@0GlLEc_SpY2(f;RvkO* zC`~=M>^k-0Jh9ut0TbQOeC^iqKuTX8!imx@+CUX^r*-43Fh!N)9f4%XhR=iW5yZta z+Uk#YOq%#>e;4^hsMxu81XsjPZA~mDhqELe zT=-hfd_|SpJC44}X8~>aRy3MXy$bSwXFq*LB#ttR9AA#!d0J-^l}_I5>!UGF#EWKO zJ8G?I$0d0VRYh3LsK0LfVqKAD{S!2TW&ZR$aP}%gr<28ls8Nt?k=*>=wu?xu@61UqbL}H(3<)zbJ$*6e95FR&*=Mmri#{^&8kNzUW{r`2<|1Bgqm|yS<5@gly zJvLH5*hCJ4?Gih!!`!ngD_pw7x;0I_|7DKz|5NR>|5d*?Hw2PO%0mD__z4iW6ma=e z9kwvth2p(TW<#?Si$*}xSL&Zmyoo8R)G#=}mF9 z1UU)IfObSPFCK4j!UJyyp#eR|SG`gKobPbzO%`1C=MVrErmt5%)NAaMwGF8^lAajM z&&X$`80nl>r$v4IRx1U%F>gQcj97eLHwiK;`?f?pB~)kV>6@$)>L_OJk!Nq0uG8xH zC%%?|tD5l0luCGT#hC>sQ+ktk=&KtVwVccwLB1EZxvF)y>A%w19w!;fs@m=jvgJiW zB&+2+;K~7()!^Im3oM}fZN{RL4A+ybhDC>`Qp@dFX7%4XV_tHzs|=-<*TnO)+7nE; z5>*z`WYg9!I7azL)qiz0qmjb5bak|o4SqsCCNgsAK3CWh^IhFr<=74Dk;yuj5?yI+ zy>GoG?)&PhP`0i~Sph>AK|3>JkjmK=$E(|(brGz=QcAC`zrGeY!Qg8XjR1SH9P39lJ6h3;Z2K;e)CD^R8R^3S0jZRg=6Cnp_fBcs1|&qVlR7rC*E-2Be~(9 zQ>pM|wPNrG6Kk*2d+{f()z)lf);CbGP`VvOf_k2|FNxt`yrfwoqtzB~fkrh0qf^Za zvRiR#_w2vMMt%3Y+7N~l)xiz2(=@&@DLfdxYBLDI$*M=>M-Oq+pXcnF`lbSnmj#y_t z6>FsI=hzvYxptdb>x-ZLN)%jksJv6Q$>XDb&8@=0(eI1uOavQO2GiBa`J7%1w{?vf z*mY0N3nX(wzS;R5A}n%K%b72`1)es=+q!;Ri>Lb1X&bF(nbn|+Nw<&8$2uj6GdnR! z7il%-+Kjq}lvHy;VjovUXotMees0IgjjKo*?y7t`uteLi8!rCUI_m}NM!vI<9#t{I zWJTsqFZZZectUpjosw^nuB{>yy}piNB4|d5xp;`(%f;PR>I&V8<^%d`%9FDXvS9FA zcXSx$aH!eC&KjZzJn28`-5W&?Bn$W}6&ZMawo@P63r{b+uG$jn2VxBr7oXt|ci>ny zWf?>=Q@^Cs%gt!aQi57jHYv03KEE{|(d)Z(| zRHiF@Rmq$%T8&k=y@^Cc-SJ(wB&trot$;(o{?fUdkqdBf4a)~g)N;AiZ$p6AtrFf< zEtH(mcdJlt<3Q-EXe#jf{$D6vCvN`kGK0II#{xgQY7>^UbvhpsH0 zeSA+3P_BN0P9(Ze%b?VMA0pyEKPzq!8mvR^ze^aDrAV?S7&LOywp>Y z;4P@S&Gzm}(zo>xS&Z_SRhLI`w8x>aE0$skA1G^%22>C&%vjD6hr{DoPpsK|*nsz# z%+wOrcACDjx+cH}&7y$0&LH_E7}6U}Eg+>$2k15L0$cI@)D>8=7g zsQWj<31+rhveFH%nPK?vWM(nWDx=dsMDPf=cacm!l`-Z2U?vA^@C3vLCvS; z2y${xoYrcjzJc7D)9u?EmRjow1@myAu8fb{N6Rt4%japWi*GXJJaTpAiKQv+E$L=E zuWIZ5z#C)V;?U~er7yK>g1-31%v;Gvve>|CfSQg62wt{zJ`EK7+V=Sk9UP7!fCt0U z4xd4V)XrQ@0IRc86V29Qu7nvY`~)$b^4Cfh9XTH$=B7=o_j%trwr8Zl?CL1gU7`hE zNJ7GEc{@>?7GL)AH~((J^$y*=5Jz`TD7~Reu;$tUOW%^pPU;zKMCR)2V``V7T!x|z zB4o9|Rs1dCMJ8q_rQsRRP?g~Hnyjc<@n9yh?6%XpFYn($gY0-2qz7sasK~3VnmvM+tk;QA1$_J4wenweeh^1$cOtM)Y%ynN^o;GU9X#QXirsV$*la zN1OPm=}5t&r(W#qO_N2sw*!y!hl$J1NN>T;i9Fb`*MSgWHru1L;royjT=~d;w$kkS z(c?aEF544W@O#X6?^KqB#!tuJ=%&?=fK)Wd4-?;TBmiqw-C-E&9o{n==;-f20|)d?3L+yE*kd>uVnW*%JMO zXgnqM%R4WT1))+`ELe98ZLkcbo^etmqx5a0fMj#>DajA-P9PMz@GKdwHZTb4BxDqj zVMd=5JE{h}!ZNxUU8}{f4ZdYUH#i$bH^tY!zyUO7&d+$vA7~8E6d&u=Z}Q*?$S^Pk z?q_3s%{_i~r2sZqcAOR#OAi+xt1KN)M#fGT&>iXZ*U2!WDz8bvhI5ioic!qBBBJJ=2C=TEcPJdNsXgGj2~C@qq#C+-@{w6(l>R#YIi0ude3Se0<@{4~1hx5yTvu&+nMvLhVv z_xl%HexBsWc;}YLyF9~A>8W?5+yhkG)}^f;rAN2hcO94o);Y8f!Q# zw#QYme_{5mo%2%M$>+9{C_UKkmid*wmga~KJAv4&QkYr$Y~c4a{sdp^{$&QLzuIta zj8$SQL)y7z^IQ2{XAm#trPNXd*DyZM{F`Roe8VZ&X4+xGau2k>6YA^;+-IxkK!_Zi zwjPLnXnrsFxHxLYdyd5M)~FAl`jB&P-HtcAxNI-36C+5+S)M4OOS`4>0BFa)^8i=r z$;{p9xK?Xc;u&z6PM7Q`6EbIxNM=*trtDtTU6$3Svg}~AmfP)9nQ0jqpIF?bVi8@P zdE(RHugTooV1InyDs{Zr{ADw%s=^8jM&!KY(x=oXiFZ5rr#+InfL0zCjklL~t;|fX z)!2A72kLHZ|0PLllH-=#tlOKKiJc!J;y5nhubc3?1i086M_;97zNAB9;VCtrvgmcx zq)EGJcG*mRbWzf#28-yrNktm^ZUVH_zdMtpq4q#r4XMvtmEi)DDtzh4Vw6=+z7eu$ zScEZbNKy z287yJeUl>=q(Y`WBlt;r1`bA_&&!hTOKW}sIFSkJs6cVtf*;E}F~*iGfu>-F0Y|si zIy5yYST?vy`rgpG5iQfnrCxGA)#!2Zs|+qVwZ9v5HsE9cQS)bM=n}MHYW}I4gS5eF z)JvZ`TV1za~oxrI|uMT%1Cs_d*BG#yuXCQfnSPCkR)v(gW4%5-6G_i;znonk1P zE6NWW$!oVLOw9iCRRM7GFDJqg$n${xZ*5H=lB4pMHJhoj%-60U=5J{#tUq3QnGC}D z49^il@1MgU!2p_rdtqz-0QJnjTpL1w(hev~fK%!Ny@iPqOr2zOIWDuexF zIv$mld3uf=45mG?Z7Ual@CEfsiD5s@h2)`!G-a&a?E6bQ-)DEajr%Q;NYFT#a@;o3 zzuyrz;LMt$S%x`UNqK)lI9W`lXB8C{U1To$S<%gy)YYjT&sW$guW?rWy0uD9k|mgm z7;7|fo+FiMJ7?uJolHWTV~ecj(yB~GKOR`$*x(Ll54`r)HF$}c51%C}-0IRkWs=P- zBh?AkhMwEBVRi%50&#n#IUix&_)5cp&rN(qqj`3U_;Fs3o4;C=9$Q?9X^Y(Z$F$wD zPE1#UZi1#EEoRh54(cKs*eR3#d~VwT;if{9mNimof%|jP_z24*&so_(DTOc1UmJ7A zMif|+H3JW3Ip&q__~r7<`HiXIN`>J5She~pU=#mHR8*w@WsGLGHFc6Bh4D=*i?!+& zhupUL?M^zM^7Pd{DG$>uOONfZl?Y^mvaZ2%6~f0jj3geQY6bW1&>9xEga)$7NmDmQ zGnLZVr>3Pya`!KZcgZl^A6DA)W7W28NoFL8Y??1kH3sfG+^nvYqZa2B6&a6C6RTs{ zN_m-2qGXYhS_UUQnp^^JXT5dmDf%_H9B>&hJEI6ih!BOwjIL_ zdN-ZS{Ng(i8%g%hvYP1;Gx2PpPM&gF<+uxTeq#9{9gXoZ#dD#RQv_~Wn+-8TCk#Wj zuMo|J$uolun9=hQa z$Qo^ZAmg(Y^mY>}W#_!)=Bg0{uIgc;k_e9?O4RBsa}K{lP|Mdw6f@0`Ok^oPGog*W zv7Zr8;O{IZ?9HwlvUvPHu(e9P55wWJS1FIvA9mtsrlP5^U$Jw>*nrl94aG?DsM*<= zZkvVg+s`1Lwh$>bW^x&9U}#MhhkbeQ6zBYR{M226TAf~^c3<3Z)!0=bM2a@Ap|sOG zi!SYA4ssS@TO_I&s}xBf>kHe(&#lCk0&qZMm3rPwkELdUg>7p8Gj-RQPAWwamr@T4 zWgbg>CYxwEu7NO$IwnN?6uB*Olj11o!M1SzyF8_g1f`($KC7b1aY%dlZq48r^oPjI zyOr>x0*}<#hn28LP?yu>t22O;u^R7^v$fu6lWS}=HZjq`Vq%SWH5AhHPSC;T5#5Pmh-=@}&9>6q@yfd0Sl~?C?;l{7b=HTBq;fQf>je7lXgc)dY3 zh!Qbe0J9q|Al4=7GHq|*!`geMo@-)iZ0x456{@5DOqs4jK2S`LyL0zy@1t);V&Yws z^T}K*@tgzIYsaWVfH(EP-FwU(vilPxxHLuY2GbkzvI8J%r9dQA@6ECjlLt-p_n^6- zU~zo{=3;w=~ul!XwOJ5LSWURJ*t&y4^@_p+*<8+Mf z13l@sH?BLe>u(! z0WJP%YjAR5Fb2Wds=4Eoz|4hlY3KRT>H1gS<&|YFc`PaKFQ7LE0D3E!v25R*y*;sH zQ*XMp2B-rtl`eiey={^(K!NRLF@w3b&ICK?Z%c+Hq1s1rPAea;FAZo;o6!PX7;B3h zV{iX;vnRFD-O!{(=rGR2p`u`N;yEx*!Rpl3^qvb0z5lfP;@$th&dk35OtlX{e61Kv z9T1{N+`0E^lz?eE(d1_LCiMaXgG<{I0>u8W4E8#ia7#bn)fw*0X=nkF)h<{h&UQ!b z&f7;_EqCs`U5-$JqBmFK1@#VWPiN7dLzndxIgaa4%f~iJaGzDbATCWw=P@jsm zz5AWP??#PRN;@|&iQWgg>mKpQC!X9BamgDEbL!^Gd=o+nzKpx{-{!W{GU_`181CY# z$lU4<5&8+DG8mtf4tN{MRy2rE|OSno2_x# z+xDeAM;cU(ydyA)we<`FF(hJW=I7%3iz?O5WhQq>I~bePIPn>L>&W0f)^7mF^umtu z$LfLhD*yQ+0Cmf+ipwr_thzR^#4T~v!-Fck;m#eS9m?lzrU5FKT`z}htX)t`XyE)S zgZ+!M_~w85y%++3p6tsoa-rIF6VBGhyCn&mqU$m3RA=heVHjMo_O(@;g1iga-=2Th z=GAyt;6U2i5~#hxd?K+^WufZ*RMxLXOooZ#JnPiOWN}L7Y4d6IyhU)y=LycvFyqd?Gc_=>o*nxsDx5V5W$4s$TzjMJn@#s1^8e2zY-z!?1~?d6+geytit!2ic+-Q7T( zd!l8Gk6y--yP+RN6M-Ki2M-?$6EGGiO!S?#C?t4%4!us{t8~*c9=#P9m1zyXBy2{h zmLZdyEzoIQwDj?IT^|SM1HJ~_SXT4>dFO&Ot1NHkc+~zvkIGAU+^&+J(}bkUl^Bc-_Ojll*6vGIxwo{{ z&ab>JY7vcwp`cVR`^QOLCdi z4!_!L3yP{b*UjT#2x92P$ert*thT%n-J5k~AkaWAk`Hu+=3LP1p{6#FIXJ-#=Of^f zR%GMNxo5nPBvm5STh1=rmcHNhzthW^O@1vLy$>SmA#5x2)|H{Mv3N{!z_zWErun4S zD(LApTX(_>`$SLufg8G_8Pzc0v;fE5HJmyb`3Z`f`w7CCyL(CihpHbYT)&E%3 z)THFIcwL#s6kk0%dsbhely-`t4U8Pi3bV=!FZJLwZVKhPeH=iMECu zB~r1*uDWlu7Bzdo8>iO0kMt{K52(jvi?Q9O5h#nUu@8qutqU#u;ausdpY~AufOdy} z?3a+{&I!#QWiRS3lpE}$0R(0^Z)T^Cl!%f(Z=0-+Jm8TZ`++X7Db+xPDTkFwx;hKD z8`jSYA~ zFRBW-slNIHb*2}el@sV?AQlb>wiYb7_)>&ZTj;Zpc<{M9;4Xh5>`(oAQY-0GyQp4b zYPKcY20&2qguho)( zbrhliwc#ncPWnZ8Ho~}CR{^3S+D=q7cnffz7c{{TH9@846*q=yUVI&6PuTxZk~f@OsYhaBRg}w50E>q z0h?p}{vklj`9~zv$&2+t)}Aq&!M0cbt6dWgy<~;re_Ix7@Ci3J;Dd3sIjw;}>Gf)P z(EW8e;okq6^YAa(1}&Xyz}|$Q7WKj63FK2iiXqQFeP~j4v!O3}B~El`|7j|6IrZOG K$SnJFaOX|V z`6K82|2^xTci+A1z4fwZHC=m8)7ABLb#?9T>bd=TI}f<`Tuf3700##L(1m>fw+r`f zM4iFL0D!bKfDQlvAOjHK_yO=R3KMqo+#d`;fYER;S~Ky(w;b>g0Jt}@Ndy4Fo}t0+ zg2UXjlgdNek1VzBLdQT_V%V=Jv%aKJ&38Etvw6_#0cQu zd2UXjlgdN zej>oe!p6?W!o$bPO2*2@#{na_0Dtuiwx$8l1K7hprT{REvICI)?Fj^SuLrP&J^!}; zfl6U&Z*R@V%nY$((lfNuH)1ldvSfDFvu0*xVqpdd2s>Nr8JHW{lj$3ofFXi3d$o-; zWMD%<8Wm1y7HR9JMy6m1S6d?mR~bbES91eiLmFYByC?$Ae9o5EmPYn^WX_fr5Ia6+ z!H0jVoexHTd(8Y$NWj+6nD6B?u|G<{?gStH(H18sCnhI$CM#PLW>#KaUS<|HW;QlP zSP4cu7l^%{Gb6-~;*ScR8QB@wg01brRuHmp73%3*IoJz6bZ`J0@)_$HbLtx!ax&`c zu^Te7vKs0!>KU@LG8(e88*+29@~|1R>OcI0y`jMm?$!>r7JuVyXuxb_VPt6pvA2Vv z!OHxQ`R|SXKgAOU(GRA72|r9mSQ);jwnlpPM$ceL=vzlKvhcu!QT*QdOe`Ec0?hwZ zPJsCv7{4_1|7^?uQ)meZ7#i>yd~3Uv?cdoe7}@;SdfNgl^v~|~-z9~8@pl5o0;Yd= z?%WYz{+0Wm;rQDVnEJu~?}xI&{*&x~;QGzv-vaqJT)*M^w-ETZ3jfxw-*Ej~2>e@x ze{0wOV{rX+c#R-1FV_j?&fZP|o&rFKh)9SCAS6U2WDp1$1rrtKW1!$*+(W~}$9X`2 zkAsg#NJ2wSNK8eHhyRf2ArB?Gi%&>P&-j>`m7SAYQd(ACQCU@8)6&}3-qG3B-7_>iGCDT? zbz*X1acOyFb!~lP6MArXbbNApc7E|qE;s=2?_&Ls?4RVqgvkYufB-}QeUl3g-U*g~ zm^~>i>;Emu zehBtcu1NqI5DwOPKumxz;6f~jHU<8_C1p{k6LDiv&e5eHknWibrCoXqd!Fz=(IvKP zC2h%ziT)h=fDYCCE5v_yYL!@VUwlDfs61$zHZ4d5TV53K*GX=GB6E(}ino6uc}wKJ zyyDFucfQwY_buRo&<_ewEvj(&Y_|9oFh_G``jdiR!I9m$@7j3_a8HJ&|DaM(-zo{0 zNpF!}A9!_yeDnL24_})pJknh9p-^}tl;YnJTWPab5@>?o|I$b=yp`hwDv))p zl@@-Qn7pAw{B*HaVp-%C04|L)B`Zo@%S~KL95-6H1>kN)&S`Wa<=If)$7~C&!|BAI zNmQc8I8gRPK;hJ)Ui;ygvKV2fS zzuIEb$m`ZDtBgaREnv617(SIe4L5wRUZad;`LvfNQ%i+Y%nEC&LshmQg<=nAD;#Uy z`I$U$n%l!AXe?4>@uUt@@iS8TNc;jXeGi!slE!eZva7;8yM?rD-xP5rR&TtW1mcw*n}7a?T;9c zShFV_?Y$n{tsTp+L%&a^&`tM-g7hjw=x?VOEZEPW6pASl=sEsa3zA1qDo()y|S7ciS}?C4!eaJnVfD^tH; z4Ay!ocZnS#3$|6U?d0=Tb<<5Ka#@lh)aWiyMh#qj{UofImZ#I}BT0`_x$h3S`Skh* zFwevs7*-`nA2L)YDmV}dS&Sk~ri#R&AoJXd?q1E_fLwv{|Qie_rTkBv-?sandEXWmEfNb6Zrx(P;|LbkI=6vDLjvFcu{nL-e%dscwK# zz;VurZHak~_MleQm(jH1qarNzO(#anm0nBRbfFimXyQdU$~puSl%=AG^?hxhs>Q>o z5JH@`hT=*;?C73El<(Rrs&L-|?$rBCF;slg)U05Gwx!#9X(kMpk6S>F>~a|1Gd58L zJ7_v`@Y!jk=0>=&i+?nc@?to6XE9D365fRKE{U_0$xlNnyI^_&LX6aUvHHH~x#%<_ zq!ZDWi=vXDr>8?G+e z;3kYN`Y^Gp6_5~zd-_O{;>qUFtigxHN8(}%K61f%!ECr2?(3J&lgDy2O#2)lG&R)` z{DAQ8G`b=2A!4Qg|UW9>WuBl1Z@j0)Kyk}@~%TRxC{Mvq;m4wqB-u* zMve+dpZT})RmA1s4&g3ze6)06oL*7fwIxTx^Ia}==%yhWGp=2-!ZWWVK^U>;wX{ET z0OM0tC&-n^gcrY=@;uUF0SP3Yq_qgP1RbDST6Gk=L+Z*J8ipHs&9ee`E#E}Dpjr^4 z@-bhXG;caD2c@T7Io6H1OX?A~@u`DP$MY;tp56DzMKmZkG@c1 zO(~NWp9Rs5)OKy-$cuDSCV8VDO=;AcZ>jEDKps|CzBT66vtC~s!E7#~E`eqRxvMN# zVQXn`)XXK9fa2IkDH!V~(6XegGsY@?dA)jeT4er<%aB0+1*ipvm zJ5|0G?1%S#GK8fjOPT1&i|BodLf;}F1;uD|2J^iwCbd7EHAT#_rWz~jG92GAbV%@f+pWSh1T9(YdI`wR+gM{o0=_R+= zaON1twdU8&N2@Ub&)DN$RCv@R#W(lE=_SR8V!s5W1=0RJRWk=f)$u_>8LTC(Hwq?7 z+6m%=n!5$lmJ*_4y^wXXgg@F7c`mm@C8hUQiVJ1?1gy}D%3c}>f}A6xp?EH zQAO-dy@e_sIYqS1T=ipurBx*w0jzC`byxbfE4DaKp5fy?(yhpB=dySP3xiJNXEc2g zu13LdH-{*#rTfOAmirGd@FzGfAoY+?wz+28_0KKFAxzx5hWCUtiI%3ZnmjN??Iy)i z?Q*X;%SM=Q0ZXe}QfkKhHnFB9GW>>z+V}ZdZUKny=y8riGfk79Uu!r&W~Mt2e=CQ~ zOY!P8FNdnh(2+{;8M73WDcoVvL%dm=q;~kI`r+fu4ZhwoqqTExt}%-+Zd+Ml+5r$r zW0v4_jaJ^K4JCI03SmGxuVO^@G~6y+R|aANZJIaApHl8*XxvHI2hOa?LPmkXvOcp* zO8F$m3UoG}u4R+g=yw5XX?1flCx+u#cc2$7cAH>?WU*@Ww1_IP-7BbK zrlcN1Wue*iQLA*~Qg4V)nN&9|MlL8v4S-y+qhw2}lQr*K7nh_*EP0Mvggw=h0DY9# z%xWE6UOzplIuIJj?$6V;dyhlO*1FN8D>hw7xI(ErYRYj7_;jTk*f%rNtGU5#n_MQN zWzg4#uc%WKgaiW85JkD9pbrHdyx&rV2$7l#vBYC(uC$+(@4L;^s zG=7ksRaPzWkJxZhDfA4Gd%$Y014Wh&7M8o*3(NFYnOiLB61^mq&Ot_z8T!C^qjyoV z)s$aA8o!!-7>#)Gt1g_4FS&<; z)U1mPy!XjtTM~p%Cr)&jVEW>`(&|INRVu?98x|V)dc0OsgQ{ViFbPG_&3RrzW%G=b zjIyakp$KWG3E2nUTY$-)k_Ry`sBKQ?tAj8uRY)MIDy$C1;nwKTOC4ZC$R)DI?9vgo_bJ~4N^WbYemHL&YEG0bjkF1?H2GE9|+fJ z)@WLopal^xbp$uWven51H6Zwyy()Vg4pEMG$(Ffjj|q!2hgd03XcZc72ES@rN-8gP z)c-=?a9;Gn8Zz|WRtSaVW2Ok3=_!*WrD@b#>}f$@Z7S#HpN&` zoW05jeR9GdAH?;9OwNZQ+;w=r&P+glk%0A(Fwq3cX*?&ne_cMpE&tiiS>{;*8N-~y zY~iGr;e?jv^kWb+08mUXl?gC}T zI17x9Q5|BUmQZA^-l~Y&=%r3?cIODTF!mU%N&@-?&D^kuI5rBYDyl-&7f!q5h!PDh za2;{TL{0O4KflS_q}_UgE>K4_Ah84YdUV z;0OAjE8;M-6P{A6bkgA8yzMpjz#8j!GhGR2=f{myYwi{_8`Y5PnF+Mt!y!CnDYA4> ztK$=$Ej49Or|Kuu#tU)E!`%;1kqD4lqiai%46kG1&AH#&Kq#&`4GGn-_RU0ujjP>@ z7t!M6No0f9(>zTdALXfeDii6Rdc}w5PIYI=k_C7!+6?7O1Em)NP|&uQ!#m1$3pskL z>Z*4i(kEU+rS8stK&5NZ1>;_!aF>3izhsOnTumFC(^pNnz-VS}@-0mhFz{XGri3j~Vv{1P}X_ zmll)*h5gDrh?yPuy|%&HQv6*^I86778N}wRHpN9SdU&^zwzdyl#cu%~r2FYVs7w%r z!~Uss4OBF<@#I>DaLSqDwGQ1pVr{4dzcHbwwKW?XT)WC(0|EGkbo62M5NEyX*wCYp zoJ^|I3)=2(t@pO#BWts@R;7I#q}NAYE#cn@Z(c3>CUxz|g;0Yh6-he<4uLdLPLU%u z@p2TFViAV3nIOAr!nl~-$+h`Q1jaT}3LG`BmGt|aFMOs515RYWxB6%E1+v;b@7Ihs zaBcJ>fl8G(-D&ravzl99oKLGfDzHXwdU;#vf#hs_J z^)(|7fJ6QP9=_CCY zAi+q!X&a9mJaq@HJf|QX&!+dPDu}k52?V``j;H=gIlX(J=)RVVIv6wSrw7u+^^*vW zuPUktKrp^Xd3+8b>c(;AdXWA>^_GZqiK%zF*>aM*)^uzCH}^bTsL8^*{Wb2AOmSVU zx2!ik5y2T9IBEhdJ<@k#kou~=}0_EezVBU+d?s%r~Zt` z%qWFEE)Gr%LW(w=tz8emTsY+M0nPd<(P6gc=6_W$numr+utHr$gWtiHOhbjT#BKqK z6>9}I1xQ)><&&hx&2nON5&K$ixMCj}IJlTN=^xZNLHWx1hYlZkZ1!9Z(9n;-v%U(8 z`>VRqJqKE>)x=3-vNx%{Dz6(>=lg*(-DQOnEUC{8fcm~b{R^eF!JWA_xh>J<%NNSw zrn$&pnv3q_){WaD0k-_-WR}%6sxk(l}34K>Jff znrvROiD;3S^u))7W)%n0&X703biBcjDtqMCdsj{;A2;)EX?Xpxs6TBhz{qrx=mCQh zHE&i;b?v(WI|tk5jQD-*``*}qR7FGpfX^XogHNy_lz4KoE{th+rPhnZ$3L<}q>0GY zi|;Sn3inpwW`H}x>vZrIfFksR0vKW60{%R-2tP+o+m8~aS8#9bs?52z9G9MB?_Rz zuF6P(&-B?PEqJQX#FDHaf+a7C?f2_eopR%+V7bbBpZjwO9p$`TNul|BXQKIyYXzOx zM~w>_ib{{VP2-tZ-QG3s2djAvXoO#G!2F1RD57u@y~`wV!s@kvTXG9Xsw6!f`Zjm| zQobTPeNwi{R=ha)7JytHcXeC^^BLlfxBq;n>p6h0cCKE-j1`QRKG%Eue+b=oTOd5#IX<>tNx3U~DNbUY2aV9BzPjgK5ru zzGVY*s+iB@|A8~TS&(4Hr9Y(`{(mf%{0_}DFU8mPUmJ<-^$(0E)55_*3*96g%)c}< z{a3ciqu6eer|5q|N7f`&LrwEenoBVH=T_35vh~z{+3fR&H88!BlY{jU;6G08-HXI- z8hm@9#GK*yl(Br#AM<0m**yG41%@y_gv0>sQTP>ruocPA4qkyZ{(c<9xXR_j4ofzf z;x?SnF|v~TLPGkHJZKXg&hq2X|}qC&9YaQ$VK<`7<-3e zsXR7s5tUwN+t**Zu?{!()I>foYgz^;Y1(}~!-0OHey==4K6X$pj;`jGOCXqZk;DXNqw{EWzJL#@Hs(slSY37L< z+cMY2-Vy7>m`$H2*6!(!(8Z(cm}ux!INp+QeG3S%fUJ!i5f)%cFwG+Pt*?9(1!AFt zKs1@+ME51`PJUa+p-alKbaBzw8!iWCJ@2osQdJgHL_5Ej9MZibko$layNk`eR~5fU6PCYQkz>wzJTs=Frjmt#~XA zddcnC6XR-gN#m+Z_IpFP#_}=N`4U}%Vf1GGv$A)cPo;-;_V#x3%LVxy4eM*87Gh1+ zg=di>qH1GbBI6UGgRIdzf)8*a>c(Hi4!BcL zCgjVYheR_&oS_<8X9`xi8w)a}4jJbW`>E<{y-Pk#h}X_3=$SzWG2?{N=DmcdGp%CZR_kg(;FURRH z7wrjepskYzShPK-+0|qoDV*ln(Ag^YHIK-gpnU`9#g$uY7pnaZaS`^?wQKD+whKzNT?l36v@X=D7Fgs@ zE~uJEuo@F%J@(e?LUR%VpqPFli4=8-^KW1+Eb_?tI%m2+IWw_wpwq(0DN&>!FNf7- z7@|j*MFdS;3K^<6n{i@L$N4;qR;wXsQKvlA?i@DsB40&66OO{Mxw_)m0v<1_$#oQG z*)^{HgY8z+4Rx~Y{vo(#ERd(NMyxhWsY3bg<@|H+WwyvNcwP{tp^9L>bK1q`<$`1V z5~{4Bk*fVSe@wFSVPTFUJX@#hSnhCyxTWUO*EcjN|I5`x#LF1zsRqHcNZ zW`+T~*IOJxICigmyJ>idbQ6=$v?DQ)HeH*)ys8~$TB=AI;y;RFBXt~8i;>VfNb-SZ zfbs`5ag(9cCprY&{^jAQ(_j@m+!C#g4vjK9+u$niCL6+g84;45o^7k9r1&eNby7%y zPAOAQ?VQj;O)bi^nU$7B88AP)Q;P^Boh=cN0b#a*17?=C#&0;RP&c(vxK!@ICG0GY z4q26oSCSQr&L^?3VSL8;XpO7QHVPX%;jIVGq|#O8WwoQ~GzpsrkLDa#uLoGO&-Z2K z(;;&T368+^T>6eWk0nMOiIImjjl;Y7lpe%y#;nhBXXkpE>u@E4VxvTF@Z?9V(dqd2 z0+OgJpcpjBg zZsL|Q1CH1j4k*|);nf)BZ?K=YY1;MN-;2~J^QKM9Hn(mT3g?^Yb|WGK+35}i1}KA2 zSKP0bKvq1|-pqsAmf##S)-#n3=CsYi$jRcH{+rR7>+8zhW-FG@PShHX&8(nI>#Dp4 z^tyo%CQAmIQNIt@@39Lk3%a={5vIBr@^}xqmi#D|XF}`}0DH7Wg5#Xk7=@s^Z#is4l(U7?$j7ZAS?t=+#DD69<-8 ze3j5&-oJhhS_-+uNtf@}RfYwUqf*Avx_??qoeXFL z4m*Nhtskk}T@c37nVN9ckQq68TTwBxt;Dby6^h)@nzFN-6R*cw4z>?!xKBn~@pyQeI%OWEuvoeCX3WjCdkBV@yI{YTx54Pz zZdzOIyn8mb8~4qT^vpU}5{Qryvk4PFQ|e<_d+ARCb1gB42Sq3-O(Mtq$k3udV-a^m zxj*YHP&LjFWK4fOS4chyqlG(Ckg&TF@jxVW>88Bl0A#!xSZW zNI>$s)F@|GW1`Q*&IlWl3E}R5`gViPBzOr@FvQcUk}=ML1i7X<`1~5ZXiJO74MPNc zmk$gM7Fx0i$RH4JC(co`2j#JGR0%rLE1btYlT)D~Q!8|z@w{zXDrta0u)s3A$;z-R z?liN|tWEWZuuV=3I&rk9b2?W7lt;+?S8X-}&x_m&*J0ZznVYziJ2UlfVSYukjiENX zK+BWY)aSPH?{tq4oH{w+Mg~wIW|-mOa$mfXsBH#chfQ9paw zSyurgsoL=IH65{(c#Tw^e)1 zsKRe1#DSPSpW3L0c|*I@R+b(NM!_`Bet~cMej;=GaXDl7i)It7j=A& z>zyu1rN(uu8oE*ORZx~4P?iy`ni9W&6hw7R8+i6l0~j4pKhVe@rVZPjlrhdJ@KlBG*7 zJtmp-D-NG&8@NIQFbZsMkj9R$6*QrY$${!4Qd!gOMAnnQm9__DU(rO2x4H!ny+PP; z6NCTK7Z+z}VK7oFy`Gjsh*@!(L*@)Da(`yO_kc4rJODysUaK^W*-MY*xW}9nA|}SJ z9e+}M3lPY-7VZcy4?ka_*|_0oI&^D%-A$5;HcJXco3wb8>1EtiGSO*6K|Pd_SHXqb z-9I=z?`=&Lw}6)3(m*g=b_>Ad%uHYCUXGo;Qlrna((h!&?RY%ZFEI}tAo2e*1LP$u zPhzcU=S>W(sk2w9QBoWwBPr%oHg5OGszAcEGiCM&VI9Tx7_S2tb|ACn9@;%?(v?+* z%42&^Dr5DlyA-TF-NWTW47}-wg~o)}-X>?A4gTQrD$&n(f9(>lndIUA=}A`dbtU8H zWYbHbIhy_uYOY=W;*ev4lYGXf^C5WZ#&0SnKiin`306)B4claP+F?Ui!qBj=suBCq zWrV6I{-Szs9?>(U%Wzcxk)4;8DAz^CZdy$I4t4sQA`=8CZ|HK@5|=oGg`28p$IS8T zR5!93GB^>uH;Q9naa-p=Bs;$*UVz=f0qbU?Iq6}K>@Y02R2BAu`qDFbNhDlwFy7 zYjGvXVh9~Q#Vj7Rep++VF*}m|-kBcxUIUMWWuYk;oc{Bb(F^!T?2CBO#&w+GWrCS% z#Uuj^q1SG^*L1glz&g0BhJwYS`&-;Y-3A`%oONTLTKxm%=&e*3U$^#lAs7gn*Zyd& z_+;nRLG%$UY8J^In);X~z#Tk4!!82lTnn-|A{YbH@Y$5V?!l%nM;BW)_!LrnvrfD< zT}PiXcjnoDKrE|OGnTZtvr7@XtlJVABE~C#=7#PJU+($m?!=|dy#@RUED58a74m() z*7;yYWHU4-`}2ljuzNAw%4bTZe7Pv(HAK5Eng|(j@gHjOL%ZSn`{s*wq%0NB{I0aL zH|0g)lEyH|Er+NX@k7-TUlRu>7I&Wj6vkWm>Pls^b<96i6f&sy_ixTNQ93pUFw(^K zU%rdcJV3$*fOzNr0uEifQ%{@`uQDs9qYq%%CgCgc5wuuOu%psfs_->=!X2JvzWqjK z5dQ@v*(zg0xyB*LqN2=KFVJpKQP=Zz+>XJGi{yn@DCr*}gW+B|&7an3^u{}oo+b1- z;IwfYqvg}1?glZwuYTMwHm_Qn7!m?DeIPLT0?*RLu`({MlF`nFPIQG_KR1QWD9AfI zJ(lLRU7yxpv=vY+s`CHj<5dChJaV0ii$U(!O_c{i)mfxRQfJsggNZSQlo0#@0%93k zHgwM97D=XZLE~FMhY-bk&8kt@>iG7F^Y+kquXN9p_shq+@)Ao#L_P^e82>z4GC_%F z1`AYse#T(E@uLmHpfL%HRUPyx%CQ5S$*A*mh84Hho{(b+cNC56WB)OMCp2{-z|Tb9bNwm<21k?H$OUcW;-mdgTwwbQ-`pCGi!LviX7d`adV(TgokY2 zV0HdAVEKmH8OsXUT?l&jdaUFk|Dyk+B}X__O`Fmy_v>MwsB3Q^&fa(3{?+yglitZ1 z;WVFl+D`JQc8n%kHcqz8t2Bdp@)7V6jTQQoYRJ3^4et^TN3zdkY+SZ!W%7P(py42S z0zJnAMVn51dotqOR!VkU{kB*^n{J`Fzd&ZK0qab!=uG8hRlK|TNv+#Ng85nU^AJh< zHq684npMkd1f4nxFJMbRe8LNA6VCDuV`|%0H#3L_lW3q`YXb&bmd>-9`Ae0w>pyI2 z`uD*K6d0F3dz@p&S8$Wc9HOadR)Gr6mb{L`1P6p0E3TcIS$~Xioj>4duO0B^7VB+U z6Nl1+CzJhkO&``>qV!OrvB<{udAptpnm7K%a6~XNRlj=^U0|+KS+g_t5j~x+*UFrQ z-{P&wvWzT)XtUrz|aj=yCIKpWF6znU5r+m}oBwE6zk&gD^^HlXZPmb3D!59-K@3F?UBQU$* zO@$+tI`~G@G4sitx9MH0Vm(=i-YrLKoL|1rJ3laD;k`^o-3-i@^-4F7oS2zY&%is7 zGFlDPf86ou2cxL>w9;uK+;yF<@yERS-L?6G81z5;^w(gj70%>y)V?z> zaxE>}gn=vH)dAVKa}+?jf~weFu82WgFCFrV6Yxys!7P2lkiawhB24fQ<;%X3u^Xy$ zDue}FK2Ne6oT&|WGaC=tgG`Dv%&YcbR?2Y5TL#m5Y}dm+p&-EXZ~UIC)56hx_S)ri91x6~E4s3xR_Y+gbZ}$nrsXC#jH80Yuevj#x|VaO zZ4hz`kaAHxNMw4(97{Sbn&i=%-Jiba5L8|a%sGdc#L7aRY{q!QK%M1 zY}2dISNH~*ssI`Dsj@zy)n-FH_4?gR&DRBv5jnDYD`Fo!4pnld#rt(Ig(l{82+xvb zhtljoAr^665}j8SVEC$w`QFZJ4)xk4L3m*}=jes&u`I~pnVaM*y{bqGra+;^{$;mP zq)v}4V*1+frrC$%H)N`3KAm{329xV6?`2Ac-@KQ)AF->Gim%Y|n2ygNSu}0lnZzf5 zANB3O{AH9JprBy+{sZsMFdX z8*i@hN^pPbL|h-!=kuiJ#Fs!|JOc9|*H<8^PNi`01v=>>+TdAu%f(4>8gA_vM{=o_ zq*-x=EwPaj2w%U)A8^0V#H@jG=!R@lh;aCdm0hd8AqPF(`I$x95RVfVW!JfC1vc|o zJ#~(MLH6ej}H2|TA%p`l+P=$GZ;GFcz;TE7Xb_=+WDWCee z75IHi@E`KWc2~lovVV`37r|LitFi)|1uKr#$u6Nv%WMt!NRNcz=B(tNgR48(05mDf z%U?itqBc%*_se$ZH7=0~(zYt@5vH5ggtmuGDZ57?draqaBA_Q8<Hahg z9I;tCr>@#lGN;<4PS2~(HPIkqo54c#%G+(<8g6|=CEnR-k+C6kUgvIW4s~9^7T!sw zyp;)Nb(I5RDTQGePp9UM_&sw^wwr0Fcx&w(h8dm>_#&pSWdPt9ZllA19SPS=XUp5Q z%Z4uXi6lqVdTDXoe@gB0fySn+$w{%vb$&(Tx~HSiUVP&5QdXmkIiW1H;lo0`PRFEQ zLEh&07xlSzlu0S%hj`R4#6f$=WJ^`DHGE${*@M~!i?@Jal?358NLiU@2FhhsZsAm3 z6$~$IGP$BOFcR;5)pjAT=iT6#BmKH5ME+@z^u;KHRZ;#Vl%l>;^|LAQI6Ai>>sN{S zeQ^%&&Fn(7E1ms=Gvhg}s3|!q-n~BW&yv-eXt89%Go6{Tzw|wt}^cV zFh{j-{cN7oGHU3n5Y~K4iccW2ijk%+3K~ix{q+5vK zzA7WbFSD_90V#-_$cTw?)+&?8#s>V3nenDJ8HqoHPNy+)JS7M-cQ#_JcBHT4om{@x zgc0Y{?<)MjjJt;0Z%*k`WZX7;yI>0)y~`&MiP-_V(34JzBwCGo6EjtG#=(bNn;3czk1+jt-L&X=5)1B#6M$5G5 z<;-{ZtV*T8PL_U^5kCGgN)3uV5ib2=JX@-z&Qprx$Bq7Y&lAGlGH1mwlj(jDw}0|v zD#nQzYSceuHyaGM&7p#O3%FmlW$8)Gt~gje!^*@InxQgYA*5B?9fgNc%VS|}Ge-lY zw7U^HOEt8Yy|CB@6q>d1L4HcH=@ zkNscF#~5{biobX~lN=UH(?} zk7eU3o|0P1?dizr2+N?$zT~$iiQFIUFHFK29GJ1x*G*(c^7jfYJSQn?Ul|+>+i2x; z8*&q~o(%!B(yFk}Iv3UkkNPAgH_eeg+Mf(!kgLT+LnM`KZ4S{ayMSIV{q$UyFUL8B&m@`4KFH_m4^pyBX#`L>R7dfQi62VA zPf6t|M{ZKQ~eK<~Ox((wk0G)8Qo7HIpO$OHvwU*SU; zhc*4Gzht^_vbtQS(!SrZW71eLCi5loDSzC(>AYrpxiA;BQ%;H_y3i*prN!QsCZ0H( z;b$5jbC6eNfY88A6u}FmNLE<8jPjZTUD1BeN5q58k`W#F>xD%kDTj)fjq5NXLx_7DBH-cKR4shD((^;2dplKjIqh@9KE+?x3!(ZxHD*MN-Tr z?0wBv_HrKf+WeDx@M=-rtEsnuUt6B4!arxj%Utbl-^i)WOh5@+idZu@T;l!2B-xai zMqcL#n(7bZnf8N2c^_~NgGWmapV8m z^FmjNX_>=dk9G3;jSXFTg?FGE|RIpp;Ejv(we8hCG(lTVvr@ku|3rzk&^;)ncrZLWf2 zY4%%vtP2`DY}=YGrC2$%v|Z3xKn$Yh0bDcCXDDX^%(qg$Ggfd5z&ai&Cwzyy`tU$N zZn877oDfC*1$)Kw>|O(W0zq!!>FAp?Sg7wGxd@SB1Li2Hbp8Ik8+*#^tR(-yYE=@C zVf1I`XKs#(#YPx)x?GM?yjy#g4;w#Kl~!wgnSM4XA*kesv2F(W1JSTQa&Mly{7nhv zm91NST$?A8(vD#LB>r;7Pm+KT5y_5SghxsImD<-TzDEYIW80`-!BG-avB>@($kq>% zr;$ODml9aADQ$|J!}Gr!XoPx^6(y*rze+ckW$=~}W>JriZAYJ|o!uU=7JW$}{P z7d`%GG+wQ4?a#?aF_VZ2&g3!&Qev2PeeMvzqj08U?M08@lT&v_ZD$*p&l+7D0_=Yd zC-jI}xnAafsMxWxaw9hIco=8n>MyJVz&S)1$z7sU<9+Z46foIn?D<~T`yD6qs#vCF zB9dgX`6x+V0zPgpv^p(IgGIsnPe+cDw=K;atLGd`JW}x^p6{c)>KK|%v;|PtJAFt0 zXMf0(a2H(>j95)QtEsT0K5>(#LnX}9dLlVS3{BBmAH3JOZnHH9G!Z)kyJb4RZ%GIN zXQ^v|xlZpmSXl~u=I$wE$Gu84YF_boJ!<$Tg$osdGE&St%tjZsP*r?6X5`}k7`>w~ z;6(g2m~5WGf?}GtQ``08uE`m=h&s4Pje&%81Sv0Ha6Mek<%Ck4r&%Pw#4yZ1Rp3y& zXyB9*7I27hCjW!TCV4)0=eXlcJppJ|QIP#K2lb4?IIsK2s&Rb3Amy@q zbmBp%>^Ai}iWki{V!<}Cvd4AaZRo6Mp{-PWN#j~YskFFkG@~MF%-C`2ernTw_bMeS z?=BjG?hWnQ@2yqDb9B7$&4{f$F?X;cYi;Eq9PHG7jL%^P3Nlrxsi=omwiY<$Z1CkL zXuX-Wui-JN4157eiKAK7SF;O&ivYPEhu@hz^BaDO>0EO&c?*y~#E&nCHrpQP5!yUF z^Yk#=eV-0a;D96u#aQV^5!FmiBKwfO#6}yP{XX{pWVarn(G#{^a7^|mgT5Mt(N7U( z?=ilpKA3za>;UajMtmFD)o*R%Q~F(kAHd6jm}+DLghq4BgK7+2M^2#XlOjXiRCTQB zxctdxrMOI;zW6O+F4U+4=;m_0h3e}=s=>rISKU-e;%XK%U&P53QJ%%4;KO(OB(B!n z{FBDKyQezQngu>NQEtlT8f1`v?MmkjTBi^Eazi3*ka7ll*=8bttJ5*&Xi2~uG-AaH zZ_U!wHH{VqqDk3v`IE60(LG~-TKC0WfNV4?Z0%)LlGOX|G#Z1RZAl6;Yb21&X#IDq z^*x701>0SB3-9^d{Kx@+GI9T{h2T7YS;T{{4z;^^QgE|4F00v43kw8Mxdqsm0vRbK zRqSO7t10Mg+4zErvz`^?Hck712wsXql8$@KJc&mv%d^JZSzQMu1%k#M?-O;>c1`#0 zg}IS>j*f%t%pwgkm%_V-fgitC_rnQgCD?Ghl_Sge77>@x$2nsVQWt`QZCFYKclHGs z{!|5TH)zR0I6#PGXv$pMBlpFUQTCy*c5;EE$TCm_6?P^P{~se=*<2PYDx=)uU*hY( zJdmV7RFq?rr11vqiD0C?Cko_{qLe8j%rf6mKSkp#Ty%C|;HaCEQ24c- z>FE#dg_d!}iOD-T3bds6!hY|B_qc-=I89r~ce&{AZ;vX-aaOT#q;a{$QN#7}F+&0F zLQhn->pgKE0ec*wA}DwBBu;ru$H&5^$hr144gA3srsUa{M`R{pHeD3ODd@RD^Na#U ztL|d8U;$|(m_v498thqN1%YB{g=)mr@o&MB!O1}tlY4DCD5}of8XBQ|yWsSZhq#wg_fiVo+tENYk=`yx zg?SQZS@V5PYSQ#l;}o$cPm~^-8iv~t`>=V-R=DiEK(7co9JuRlw7(Z{rIfva`f#$- z>QG&?N0o7ij{W&E4m?Y$SHJislmuTcUW~Oh#OX5cO6BHD^Z1G|z!~eK zW1~fTcZR{`>tLLPD~<_~HWy^RC+<&YYnbApIv9K>A$NE03xG|Q#>ZO*5nw-}jOTM) z+voMft?xN9S(Qfbd&e*&h9HXg|Ha;02gTLxX`@Xb5P}mdXc8b0G`L#`5FmJPO9BLG z8ka^J2@--6AR)os-Q6v?rg0kg#--`p{mz`Z=bX9o&77&a=eu9sdH&W=nGo=e9i!Y+l1qL*uuG5cdy9JfyR_b4%W%vN{K#b9bnlYd@KcBRTj>_XSdV7SgWV zd9Q545Bu5MN5qlcDwZYCfI*{iTa+AKAXhnAijqOk)^z0)j}*g7J{%fZ?>27s3y)>Y z;*5H|Gl-W8QZ*(A?6h6$xRWm_*$ZLfv>VH_jUAUfTPtyBia%@PI*V|Bg%pg3Q!MH&LS1bzK`OE@3ZN!Zcwq@h*kwhVs|i$(n+zD)Wip!Js)o zNZE+zFUiN_2`U;)O|OQd$(AVZ>VVW=;WE8NWzq9VQ@_!rSxF_?ZFyEb-Bh>llE2U0 zrIWGoYyAmD1d}NbR+xLduQVf0IITv`WujKixHL7GQ} z-Mt{^NT*r!gJ{Bmu#F~#yxkg4mBq@;1#9bf;16~mb#=zYev&Jv{tyjq2Vor7=AY(( zscULlXhjdcNq#p2)2E)-2CCP(e}H6%QIfKTn_&vuDI&s&2{Xd-vrTHUGxbfy32bL{ zkUvLT7sD$8t4`M}$?g8CzQFE>1PZ1og^bI>&IZb$^zlU>vtgPu^5}vua0S*j|NcEM zZ9MK!`p4BX0%~~Bo*A_XSC@(P=88#S<5g;) zEqAmKl8I7C)bG2+dUIAIm*6Xzj?i|hy9#W);Pkr7z!gJNJlR!|v#*p&$pH$&I?Qu7 z>7a!==a}$t+S4o?1vw|dR&w!&Ue-RY;9`!l*%xv14RZr8t-3(Ihb$Zzm`wC(Zzr#X zx?s4_Ktp3jh3WV8Emo-RQ2E_D2)^|DG*XOp=a|5l_BI?fIVa53+5j43YSGK!*~N$A zX?I!^3@3)>D(4Pe5ydJ#NcIj<`%je8jnf*gg7J@unRq4XLNS!t;><#ohndy4LMl zo9F_ME7v>K4HmAlxUmY^QCJo*qZ(S#deC&@&s2{GAFyv~b7;b|=0>-h3S*stWKpMtnuR?<6S z_hDhad{F1kX*kDuY)LjnLEj4EgLc?_Z1FQORi$W|sNlXw%VD5XQTEs=-)B(1m%%+M z%0`ioVRp5GJ7s-)V`X>s3i8>bq4PK}SpUh3YtO8E)LH@Gg=So zi++fN>B>nT`YFP(5mn}!_l5CR2;4PiD0Qj3lbe5>Q~OsD&kb0B?!EUl@~_*F?E|@7 zM-zsHtqTy?o3u=y84}?Yhr!HWW!IvI99pj?76fQOs3X zxtKUzZyal__P$jTj8}a{uYP|!qFKMfv6uFb7Kpi(XcrKv2JX3ra=v`9_$zB0>*tF? z-nBbzmUdogcp0UnCY7)6V+P%%NA?UNwf2Nbf6sfKYkdJlMp6?tYyu-6DJv+a=wh02N zuNB|*BM-pId@AvbQ{zK+Iy8Bywlc@WEj%f2)@4TM8m2!a7VWB)+FX8O|F!e&%bZj_8QJ-9QhyKK5xU&^^~#wyOgqQVSJ5%Oj)9xSyYSumEP z6>=Qpte$j~T+@H=7IB`kqqOElB2B~px2H7loqW~wfj77a>fAd5v0fXpoj&SgjQ8}} zZzCCz%JzbIgmjl;7(=7>8X|+<+6j=|PS$KC7ptN1JC?E`5M8}_0qZ9KkVLHd$?~*#u3xV8X`f49EMWYHam>?;BvA4!(GbIzR| zuiV06pHSUy4Q^4n);u1bkS~G*o>x{FQd*Pa4^J+Fnxv${s?#A>-gPVS3D(T=823r) z9m6+kr&}WFo@Masa*$!$Wf7`~{`F+~3yeX8DC|3IMc)Sf{y?SN&bYBcd=f#2jvun2 z_Q{Z26#f~WDHzhcazEAY`$`>Zq0&A*%A8hnfsARq46Zmbl`dvBO1`}uS6$Au%VV%ttk z@A2q(J6`>gnSUL6Q{f!7OttJ@G(^;Wz4@ugh1F%ftg5AXLO#>OQJjcFBVSCLl;=sT zviVvI$m9y~-t?})_q@$2puqOvTzJop0{&9NmDJM8NjQ=UPPtf4_ntIBGc}qjs;Q@f6D591K2v$V zAM}lwTT#og&%pzynv$Lm$t0>;Ut&AnlbUJc1pjV^A8Z&JtDY6I^bwMUSbzI!k3jDz zDiC!@)cdrcw&v&c-0pKZu_$rpW8BKruTkIp)*eLi7&=3_RZ~0RHIXOAcs@EChTmpI z4|QTLh;!{geTYY+II@2vSW%5;9?*rTMEBQI#@siuH(X0M;JqdL<6gLAf8)T?lHGyh zS)ppLDWs-tUbaGt4>Cg#27B=rlNaC;7UJokP&FLj)RKTsE0QM5!9 zFgtyZ5G_%K*_OG7u(IEv!V^JD#tX#H?F*vxA)92+4Ip9(_`;>Y|-#j}oBZoQQIY3p)Wej9~DQv;c66z(~=>NT+m>h*J(%XH`L zUN~8pzfJDm{F-599wbZTvW+85)##xqV$Dk98v_$+HSxs;<0`pM zBYtTEFKF}T9o#`UR6|nINB}O_0Jd?faT7I1(?q=q+>7x5KYWP|?O2OQH{s2U6msnW zyIdMD7Pe#vu&;1-C&7*Lj%yYQLhcU>I&}UtJBHBDuBJ5}amD$p+|nr2frD$ux|H$k zjE^&(cEr-vu?=>Duzku{ALg#?Cd*%R)(9C)^2qpzKhb{|N?cB{#=>-r0ZLhW(5p1| zpDO15jV#$;c0uxG>RLZ09q!-cHm{_vIjw1MmpeF)Rw{a zt24G7=Ak=oqHBUfB-YvQ9&n3IViO#f4IlG$`(Vn6h4n_&2B6dcxx?VX{M~DDlgG-b z1k7JL#JuM31*sr<-$lgtw1!Fcj+vJb|#xk9##K+%iNO3Szz$`#rp3qzQ?jBKzwdkal zp7lgKg#UWKxr-)=ks%@a<9wLAgOHM_oR6@SGPLRCbxugmc1?v*eq}?|eWqr$@7!Ft%pr8rJHFKu z8n6CN&eQ0^@3e7q~wRo3+x{A53~UaFWA#F{6r+MXY`K_14};|=*|fc z?c?RIRNg2lY-A;J4Y(iU49wMEyA};LKQOK?Lvn}g@lQBo*NczO?L4uXW`F0v^C(6c zXMnB?qJw`wJkoIqSRG!4P^S%=-P5|G$rPrV8-9&#LY9v-q~-hU-{@Yy*26wwyc;R+ zyE>f}d)caV)UkbDgI8c9cy08^27BdR%C1{44Q;-d^io|kjqvh}u^h;O=hF2|3V+v= zeJiOrezDkQ6`?b^Q4^D!XgA66K0Xo~s-7;5g|D4S*jlucWRmHV(Qm=^32WlsTHU9h z^@%Psyod6_s-_!0A@Nt9h^h=f;ne$`d7b%qx9*({g}XwbjP;Y236tG-&WqS=jIxIeC?}^4Xl`A zFQyCu=m#8v9**MAC+4neUS_ToP+(oql8FU|yz< zZFK92IbOMPdJ5d6w_%Lxg#Px7m!FZsJGTJ-$uokw8rgh%s*Q%pu ze7C|7N^Kf~Vt7XoKk@6A0`JnS{_d0hj}ZWQSm|SeN)aJ(T>F;i`t}lGlX2#yeJ5YC zAg-$li&4Vi4VQA?m-%#(8tp-nm~{rnABeE)`{hl_E1VB{D0uWQ!v^;gjs`a}JWArL1GvVTLNH%sA-UR`g@5)vw(KD2V4JALcn(NA`;v!WZ*yjBo=-kFpnPviRccM%*82VarNRPGa^bczf@j@x<<&p zZmyU*<9vFeae|(BS0W3+V<-0R^&k5ze0cDj&rfyzBGB71+r~tsa{QFt`29@RszQ`{ z!hvu&=o4W_NP_*>yIX0?ap4GfUdD=9*F=YjROJNfnc3as$p)X2AEU?PA*&}-*<>u4a+=wVfgA331&rcVN!$h71)DA<=_M^vpV zg7Efq8#~b=4Nr@Cs7tenkF={dzGn}cy^a-2w{T3Z4Y_m|s@9!C4b1sF?x54TZ4{xU$?K*`9 zq_V!0nyNBdw{?|f1n2Qzo8 zVSVHKN->Vs1obrMeJhK)))R!)fm~geuVyfJ!ehMy?KXT71ozkj-8h3N9VBGj3SA{U9%mP|UEB@@V@4vmKr}FNaqoWj6VwDrqJf z%N9A@$_OGP|B8E_ZHYvNa&kZpIxFlb9nq?ckK)%;z#v3Ds}4=E|MUQ^&zQR#{& zXSvzlalE3w{!k`vynHLv<7qBvQ2VtIYuE8Xm$bR-b$}@8Gd*{4)0an!`Kz?aPbY^+ z>*c?e=}m>G8%}!OKR8YO7dP%ZTZS@~1$^QTOTArOf#P*B8`cJ`~ zUeMqpb(p9S!Kb_ehB>&x?Oj{g5i`I9-QQPilBOcG2f4aV6zYA$(r=ryV52&K z^~D`6lofh$TxyHm>^I2&8Jgc|u*~^?>n&l68+dh1bNZP{;s|d$D&P0n06yiv62SN$ z@_+W9Xr^+>$F-WOV>VetFN|B;baQmSdB6G%iiuo8H&gxw$$D-BI?cozUB5y2YgWar zcyrc?@Hl;&a;2UXH)&+tu}}%lZ%{#%x6I0|P-hZruo?4krr3svG+SQUNv~>Zq>3kD zhUWT?0~@6GdwTzvp{PX>skIuvj$)osR_;(xmW6pJMzEYFZa)jlkGGDC1q2+Q1?o~Z z@a%c0U%mfQb46wu6ZFVD0Q*%xxzWHmOZ;r6oGqA|7e2ovku^%SzuqYw*}bai0CY;u zw-g^f?B&1Nr)^2+pm3BWWA}0-?Hm5}G72k!DW(dWHq!HKHPVGPg6fM-cS&rF*a%PX zF^1iT(87w|$BPfJl8ZnrDajIq)-&)I&FDEtx-4beN)^URD)kiJ+}HtCP>+*yc%;>) zZkdyqQFjKwWG-9RU>|gaknx1h4S#?XmEdZEjC*rmo4eD}YBg2G_8cpFC{;dY;H53R z!+THiBQcEHMsbgYqE;*_m_upDMiGwxlIVr|Z;)TfaUDaj)Ct*dP>t!0g!|O`d#SJy zEg}>XfG&J%2mJY8*wg&axlX=eN-DKxR{I-ND)$>Sy4WmOmr8SKsWEj%#dW$h{kHtx zH=!U#tRKe;#V+sGGBx)$yW|FvV|uMT_ZrA1hU~F4m6^QA=bay(F|24AU1l8L$ohz) zIQy+Ji;jIxH<^e|Mq@OA(h$ii!!@v=*Py>amjzNQ_mD)ln7={C;xLr9d#z&Q0_`(x zrD+QSLG*aXt6VQd^ZH;l9y-g}T&gw;xO3m0f`Q9*7G=T!cQLi{*Rfa;v&8qyvGVJ1 z=YZaHx`IR0WBSLxL1{3wLKF}tWTJ_qh=GTZ`VC?Pw#sg;jcl(aBqgV^!IHl`Li=Q6 zGV9pwwy_4ach%|XJV945ewpy>K?xnB0CxxUm_6+V+y{*+MIHBl19U8q4!Bg*eAj$z1 z$8)u~AS#jb9$ucsRG=lV5DMF30JaA}N^3O_ND3Ldq)v%`gZhBR9`cPz!i*`@pw#k$ z%#3m&n!Sox{3PF>3Y;UvN-q;_d;j-qTPEX^|x3!lVo4Hx37yGRpeoPseClljp zH|I+$e;pR|-x@VbN7pF7FZ-Y5iS^fX_n+gr_5bep_x1swfNAavoa1~pG1%qV=i7Ev z0niUDYCL?fC2BlYGg;j%I=mwNl>KuZ!huLOz**~QeOA9K1VW#0K11pF39@SVk(-zJ zmHER#e9S&XpfqJ((J!FiY%u6dH(Z|Eab72c$2|TlgbAbR!SN#Td9hecTYhvb|Kw`+8G z78h+OYR+RJzZ^LR%3BWPseaI&pQYisXiNiWqQ8U?k=$o-uvUPJ9TuDU`PO{t5a5+( z0IG&zr9OH3+X=<4`Nr**c3Id7t1{GgptNyenM7eIF-NSExm~z(_gKpm zkB)E=NucSBJhhT-!RhBF^E(OZ@Ok3LDas$;e2Iyv{?tBFty71D1s8Lf*ES#t56rW8{3Seq2unw0kj&skZ&*vV? zqfzW;^28!&;;T^z#7@9)GJvLDhl-Y6IOeOlnU%QLY4N``X#e_*|MILwpHnF=orZUm z!L7vB4r(+IXm)diK2bW>8g)H=ajr_fVy+y_=3*@8qFVpUKw&R4U$hAAO&F+F&O&iD zm8g?8R=H$5G)8%?Z^(lRmqqHa$V#2K>+- z&T$&v%sGe$=6gTPdq zVn={1MqpsnZ|}NF$A-K5zCw-}m~jPl8QVZ<8Zu3Gc2HVrg73vL*tU{slCW~=_j);Cg2LvM)a=H>?yPvu| z9k&`qm{kK03ATSYKlI4#sSzc}Pg1M?71gpG?Iz2#>qsz~4EZ$gIUGe;r z)+2mEa=%-41vH5YkY@+3tWO|Rp2ZCcj#aWUimdxD{0NWxk#fqOx7~`OX#X{6+`6oH zY-}kDCi3&lDfjG9Q8P}UxyhGWidV`z2cgX;TZvsFXyhkQk6l;hlnoh(B_`>W;Ey8K zHJf4!!%=hcYWeilJ2Q(^;{pr>CH@}P8`hzpsM|N{I&uUf+71wy&6Zm*LT{FpMx}Pzd8wr2@!O z)Kk-=f~u(f(mkoYaQKy!`AN&zg`~<{i%GKMQnpucX_nqD{LQspAx^5b@SRr|6$hXI z5zTvRglD>%MBg6w&fPw0EDujNpG3e z2=^X?^Z4}LXe$ahd08B{Rh&RS8!l(-LEaf%=&rs4N*XEONE8s9>Oq(+SusPrz*D17 z682nunchbggBCg7jAMG~vts{Xv3R34tJl$?uyRPBUXZcI?fD2IsRm6tnzE~0NA2R+ z64$u*;;Ffw^UVD$%!xXF`GFa9ExL%sH+6bxYUD|3kFR)OC-p&^x2&C7$QeUL{SB7a z!SRZ(-Sae8s)r&X;`c|9_Fdd$YO9@@mkx__J}KQ?RMznKi~SstVN1`=j??C`SK?BQ z!aX@sFpuUN+%n%^yFscI(GzYd-l@%TA`8jCXwRw0hU`}kgPP>e4~wjM<|Ufmv(76a z%Vb7Li}1{9oFjd#48iMH;LE6z6j(~CzjKFYLV6)ov*SX)(u@7d>=s4(axDJsD=Bs- zzHqcaxw@X-+;);rb40kWc6Ak)OtCJxAQNPM3_Z)=ic_fxA{(z{{OeJW2kZ;_Lcq-5 zu;xF*Klu%cSUOhbZr!btx_+)po@u2%=dDpOA+DQa9iv#V%}Li_mD=d{Oc&q$MLn0a zl%T^sIxjD-`gH3>#_?2@>08SY{!d z`)$QUxLXa_`K$J!n$U!v0qEma$b|8e4Km3OtF*_-q5-11xqyuS1b?&6Z;)nr&2h;Y zg-%1<&krZ}H)%3tMU;g0hT zb!5b*v5`ouzT_0|f!XA;Fy>KuV{3dX_wn7MIY2t)+gg%fzPs5Gx<^cw77jx(KcSIz zwTmNS4M%0jiK(hOfgZ&j8smKS_F9#Y(wsWuCfXI!rMCO#nQ$!gH1WCJ z61NhM9SrPd`HKz%iuBxHdh^!hc*+kyo{x6CR(=q^?Dx?#Mbue8aS5;KVZN!ciHVh? z_ywfFlC%%d>U@Nap~x7^lj5TywtEY6N2M-Opj+%wV*@6arCspNu(MsC2L?4`5Pllw zAKjbf!&aarn}bp%4{sn!4AWVAXN{>G|V5m$}6ytB)5q4E#e)G z;>Gk#94k9g1#Jbl`Ad+WyehVW-K0ERs1_xK!ce*asO5eR72dCqlAl&uN#>S}WnlT% z@C-d!-y=4*kfoC^>NqsKd<*r%B6A_e$Vh+0hohd~t3Y zF7C*DDvuXn5tE!L@rIAb0%tZdaP-aTm2bioIN#U>UER8#zDdDA0t9{5)IS#)Crj3+ zA+}S9@zT>Twb~zIw0`OnuTNUG%x{LxUOKOq0+s(K0ZT@o-!x6w?Tfd&yJz8P2TSwb z5w+02ci^B#h1qJBh+zVeP2Z@lhQ?f%M;H8ozZ;3sT zp78B=Wcu(FCaEwGOZfb!f;wBA?qou8Ybj%9Mi`sYA6_4luG3YYZxxI#KS^Y#L3h7R6P72V`3^&v{GgbH$Sj_k4w>jVdXyx3XDq}Ob&#zj`BC0O+4aA_o zGTyLXIF^NDY^!)blj81FxCoq90b-9f@!TFsvf888&er?ZT!s9xVB%)P^S%THtyam* z+R^*XtHykaqiEhsIdK+ydrKdV@YAuWB;^=N+l(%y*%H76 zW|2PeDEVqj@*^K7UBIw&5&LK<=tC?7M=5OD8k<^kh!HW$7b|^EtEG8SaWA?v zc2m5|xYuB43A&ROf}h&d^I|zqijQy;%gM2~82Y)RfjeyfNe%LDbYmFh2_wB(x=xQM z@Qb#M5w*hp4x?PP5*v$J2s{u7i=;ux} z*BAnc6Uu3SdTXi{d+;)BNOw=hjJpnU^#k8kLjFKhn}OgG(T%s-O4JL>H{KEbP`kka zvzvHZC5#!O#`!BsY7r3kPswZ?m864O%F8-ae8O^16hvzeZ*UQ=oaAXL<%vgq!H5*q z+-}8hdYrjrXky*K+wKT{9Q*9$ae@vbj)+-||L7W_+AUMY_e}j|UMZ*%D0((1OzZhhT3BSnW^oE7hrny4gtqi5%seTp9@{6~ zVfK2pX2*!HPAYa1>m2!Q1tb*IYrHG#$6+L@Z}wqDUI7GD_Y|`k2)$YPKb=;OYjfn*Q)<4be3jNGjP

    +{% endblock bodytab1 %} - - {% if p.get_shelly_device_from_item( item ) == {} %} - {{ _('(Bisher) kein passendes Shelly Device gefunden') }} - {% else %} - {{ item.last_update().strftime('%d.%m.%Y %H:%M:%S') }} - {{ item.last_change().strftime('%d.%m.%Y %H:%M:%S') }} - {% endif %} - - {% endfor %} - -{% endblock %} -{% set tab2title = "" ~ p.get_shortname() ~ " Devices" %} +{% set tab2title = "" ~ p.get_shortname() ~ " " ~ _('Devices') ~ " (" ~ p.discovered_devices()|length ~ ")" %} {% block bodytab2 %} - - - - - - - - - - - - - - - - {% for device in p.shelly_devices | sort %} - {% if p.shelly_devices[device].mac %} - - {% if not p.shelly_devices[device].ip %} - - {% else %} - - {% endif %} - {% if not p.shelly_devices[device].gen %} - - {% else %} - - {% endif %} - - - - - - - - - - - {% endif %} - {% endfor %} -
    {{ _('Shelly ID') }}{{ _('API') }}{{ _('Devicetype') }}{{ _('Online') }}{{ _('Mac Adresse') }}{{ _('IP Adresse') }}{{ _('Firmware Version') }}{{ _('neue Firmware') }}{{ _('rssi') }}{{ _('Item(s) konfiguriert') }}
    {{ device }}{{ device }} ?Gen{{ p.shelly_devices[device].gen }}{{ p.shelly_devices[device].app }}{{ p.ja_nein(p.shelly_devices[device].online) }}{{ p.shelly_devices[device].mac }}{{ p.shelly_devices[device].ip }}{{ p.shelly_devices[device].fw_ver }}{{ p.ja_nein( p.shelly_devices[device].new_fw ) }}{{ p.shelly_devices[device].rssi }}{{ p.ja_nein( p.shelly_devices[device].connected_to_item ) }}
    -{% endblock %} +
    +
    Device Information
    +
    +
    +{% endblock bodytab2 %} {% set tab3title = "" ~ " Broker Information" %} {% block bodytab3 %} - - - - - - - - - - - - - - - - - - - - - - - - - - - +
    +
    {{ 'Broker Version' }}{{ p._broker.version }}{{ connection_result }}
    {{ 'Active Clients' }}{{ p._broker.active_clients }}
    {{ 'Subscriptions' }}{{ p._broker.subscriptions }}
    {{ 'Messages stored' }}{{ p._broker.stored_messages }}
    {{ 'Retained Messages' }}{{ p._broker.retained_messages }}
    + + + + + + + + + + + + + + + + + + + + + + + + + + + {% if p.broker_monitoring %} + + + + + + + + + + + + {% endif %} +
    {{ 'Broker Version' }}{{ p._broker.version }}{{ connection_result }}
    {{ 'Anzahl activer Clients' }}{{ p._broker.active_clients }}
    {{ 'Aktive Subscriptions' }}{{ p._broker.subscriptions }}
    {{ 'gespeicherte Messages' }}{{ p._broker.stored_messages }}
    {{ 'Retained Messages' }}{{ p._broker.retained_messages }}
     
    {{ _('Laufzeit') }}{{ p.broker_uptime() }}
     
    + {% if p.broker_monitoring %} - -   - - - {{ _('Laufzeit') }} - {{ p.broker_uptime() }} - - - -   - + + + + + + + + + + + + + + + + + + + + + + +
    {{ _('Message Durchsatz') }}{{ _('letzte Minute') }}{{ _('letzte 5 Min.') }}{{ _('letzte 15 Min.') }}
    {{ _('Durchschnittlich Messages je Minute empfangen') }}{{ p._broker.msg_rcv_1min }}     {{ p._broker.msg_rcv_5min }}     {{ p._broker.msg_rcv_15min }}
    {{ _('Durchschnittlich Messages je Minute gesendet') }}{{ p._broker.msg_snt_1min }}     {{ p._broker.msg_snt_5min }}     {{ p._broker.msg_snt_15min }}
    {% endif %} - - -{% if p.broker_monitoring %} - - - - - - - - - - - - - - - - - - - - - - -
    {{ _('Message Durchsatz') }}{{ _('letzte Minute') }}{{ _('letzte 5 Min.') }}{{ _('letzte 15 Min.') }}
    {{ _('Durchschnittlich Messages je Minute empfangen') }}{{ p._broker.msg_rcv_1min }}     {{ p._broker.msg_rcv_5min }}     {{ p._broker.msg_rcv_15min }}
    {{ _('Durchschnittlich Messages je Minute gesendet') }}{{ p._broker.msg_snt_1min }}     {{ p._broker.msg_snt_5min }}     {{ p._broker.msg_snt_15min }}
    -{% endif %} +
    +{% endblock bodytab3 %} -{% endblock %} -msg_rcv_1min {% block bodytab4 %} - - - - - - - - - - - - {% for item in items %} - {% if p.has_iattr(item.conf, 'mqtt_topic_in') or p.has_iattr(item.conf, 'mqtt_topic_out') %} - - - - - - - - - {% endif %} - {% endfor %} -
    ItemTypWertTopic InLetztes UpdateLetzter Change
    {{ item._path }}{{ item._type }}{{ item() }}{% if p.has_iattr(item.conf, 'mqtt_topic_in') %}{{ p.get_iattr_value(item.conf, 'mqtt_topic_in') }}{% endif %}{{ item.last_update().strftime('%d.%m.%Y %H:%M:%S') }}{{ item.last_change().strftime('%d.%m.%Y %H:%M:%S') }}
    {% endblock %} From 3594db9710ea1d2929747f088a403c0d0ab43c3d Mon Sep 17 00:00:00 2001 From: msinn Date: Wed, 23 Aug 2023 21:06:31 +0200 Subject: [PATCH 333/775] shelly: Status input support for shellyrgbw2; Bump to v1.6.1 --- shelly/__init__.py | 21 +++++++++++++++++++-- shelly/locale.yaml | 23 ++++++++++++----------- shelly/plugin.yaml | 2 +- shelly/webif/templates/index.html | 6 +++--- 4 files changed, 35 insertions(+), 17 deletions(-) diff --git a/shelly/__init__.py b/shelly/__init__.py index 8dbc14b70..6dbb452f1 100755 --- a/shelly/__init__.py +++ b/shelly/__init__.py @@ -39,7 +39,7 @@ class Shelly(MqttPlugin): the update functions for the items """ - PLUGIN_VERSION = '1.6.0' + PLUGIN_VERSION = '1.6.1' def __init__(self, sh): @@ -557,7 +557,7 @@ def on_mqtt_announce(self, topic, payload, qos=None, retain=None): self.shelly_devices[shelly_id]['conf_id'] = config_data['shelly_conf_id'] if config_data['shelly_list_attrs']: self.shelly_devices[shelly_id]['list_attrs'] = config_data['shelly_list_attrs'] - self.update_items_from_status(shelly_id, '', 'online', config_data.get('online', True)) + self.update_items_from_status(shelly_id, '', 'online', self.shelly_devices.get('online', True)) except Exception as e: self.logger.exception(f"{inspect.stack()[0][3]}: Exception {e.__class__.__name__}: {e}\n- mqtt-topic={topic}\n- mqtt-payload={payload}") @@ -979,6 +979,23 @@ def handle_gen1_status(self, shelly_id: str, property, topic, payload, group=Non elif property == 'has_update': pass + elif property in ['mode']: # for SHRGBW2 + self.update_items_from_status(shelly_id, '', property, sub_status) + elif property == 'lights': # for SHRGBW2 + if len(sub_status) > 0: + light = sub_status[0] + self.update_items_from_status(shelly_id, 'light', 'on', light.get('ison', False)) + self.update_items_from_status(shelly_id, 'light', 'mode', light.get('mode', '')) + self.update_items_from_status(shelly_id, 'light', 'red', light.get('mode', 0)) + self.update_items_from_status(shelly_id, 'light', 'green', light.get('mode', 0)) + self.update_items_from_status(shelly_id, 'light', 'blue', light.get('mode', 0)) + self.update_items_from_status(shelly_id, 'light', 'white', light.get('mode', 0)) + self.update_items_from_status(shelly_id, 'light', 'gain', light.get('gain', 0)) + self.update_items_from_status(shelly_id, 'light', 'effect', light.get('effect', 0)) + self.update_items_from_status(shelly_id, 'light', 'transition', light.get('transition', 0)) + self.update_items_from_status(shelly_id, 'light', 'power', light.get('power', 0)) + self.update_items_from_status(shelly_id, 'light', 'overpower', light.get('overpower', False)) + elif property == 'sensor': self.update_items_from_status(shelly_id, 'sensor', 'state', sub_status['state'], 'info') diff --git a/shelly/locale.yaml b/shelly/locale.yaml index b2fa61f81..8eb3e27a6 100755 --- a/shelly/locale.yaml +++ b/shelly/locale.yaml @@ -1,19 +1,20 @@ # translations for the web interface plugin_translations: # Translations for the plugin specially for the web interface - 'Mac Adresse': {'de': '=', 'en': 'Mac Address'} - 'IP Adresse': {'de': '=', 'en': 'IP Address'} - 'Firmware Version': {'de': '=', 'en': '='} - 'neue Firmware': {'de': 'neue FW
    verfügbar', 'en': 'new FW
    available'} - 'Item(s) konfiguriert': {'de': 'Item(s)
    konfiguriert', 'en': 'item(s)
    configured'} + 'Mac Adresse': {'de': '=', 'en': 'Mac Address'} + 'IP Adresse': {'de': '=', 'en': 'IP Address'} + 'Firmware Version': {'de': '=', 'en': '='} + 'neue Firmware': {'de': 'neue FW
    verfügbar', 'en': 'new FW
    available'} + 'Item(s) konfiguriert': {'de': 'Item(s)
    konfiguriert', 'en': 'item(s)
    configured'} -# 'Message Durchsatz': {'de': '=', 'en': 'Message throughput'} -# 'letzte Minute': {'de': '=', 'en': 'last minute'} -# 'letzte 5 Min.': {'de': '=', 'en': 'last 5 min'} -# 'letzte 15 Min.': {'de': '=', 'en': 'last 15 min'} +# 'Message Durchsatz': {'de': '=', 'en': 'Message throughput'} +# 'letzte Minute': {'de': '=', 'en': 'last minute'} +# 'letzte 5 Min.': {'de': '=', 'en': 'last 5 min'} +# 'letzte 15 Min.': {'de': '=', 'en': 'last 15 min'} - 'Unbehandelter Status': {'de': '=', 'en': 'Unhandled status'} - 'Unbehandelter Substatus': {'de': '=', 'en': 'Unhandled sub-status'} + 'Unbehandelter Status': {'de': '=', 'en': 'Unhandled status'} + 'Unbehandelter Substatus': {'de': '=', 'en': 'Unhandled sub-status'} + 'Unbehandelter {gen} Status': {'de': '=', 'en': 'Unhandled {gen} status'} # Alternative format for translations of longer texts: 'Durchschnittlich Messages je Minute empfangen': diff --git a/shelly/plugin.yaml b/shelly/plugin.yaml index d3624cc41..c5ad270c7 100755 --- a/shelly/plugin.yaml +++ b/shelly/plugin.yaml @@ -12,7 +12,7 @@ plugin: # documentation: http://smarthomeng.de/user/plugins/mqtt2/user_doc.html support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1451853-support-thread-für-das-shelly-plugin - version: 1.6.0 # Plugin version + version: 1.6.1 # Plugin version sh_minversion: 1.9.5.5 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) multi_instance: True # plugin supports multi instance diff --git a/shelly/webif/templates/index.html b/shelly/webif/templates/index.html index 4b6476291..b9f636d75 100755 --- a/shelly/webif/templates/index.html +++ b/shelly/webif/templates/index.html @@ -113,15 +113,15 @@ targets: [2], "className": "devicetype" }, { - title: '{{ _('API') }}', + title: "{{ _('API') }}", targets: [3], "className": "deviceapi" }, { - title: '{{ _('Online') }}', + title: "{{ _('Online') }}", targets: [4], "className": "devicetype" }, { - title: '{{ _('Mac Adresse') }}', + title: "{{ _('Mac Adresse') }}", targets: [5], "className": "devicemac" }, { From cb38351a1592fbf61cb1e76ecfccabaae73fd665 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Wed, 23 Aug 2023 21:54:39 +0200 Subject: [PATCH 334/775] stateengine plugin: initialize suspendduration with negative value in struct. As soon as suspend time setting is negative, the default value from etc/plugin.yaml is used. (before default suspend time was ignored when using state_suspend struct --- stateengine/StateEngineEval.py | 2 +- stateengine/StateEngineItem.py | 13 +++++++++++-- stateengine/__init__.py | 5 ++++- stateengine/plugin.yaml | 17 +++++------------ 4 files changed, 21 insertions(+), 16 deletions(-) diff --git a/stateengine/StateEngineEval.py b/stateengine/StateEngineEval.py index a05af27e4..a3f8f1583 100755 --- a/stateengine/StateEngineEval.py +++ b/stateengine/StateEngineEval.py @@ -259,7 +259,7 @@ def insert_suspend_time(self, suspend_item_id, suspend_text="Ausgesetzt bis %X") suspend_remaining = suspend_time - suspend_over self._log_debug("Remaining suspend time: {0}", suspend_remaining) if suspend_remaining < 0: - self._log_debug("Eval-Method 'insert_suspend_time': Suspend should already be finished!") + self._log_debug("Eval-Method 'insert_suspend_time': Suspend time already over.") self._eval_lock.release() return "Suspend already over." suspend_until = self._abitem.shtime.now() + datetime.timedelta(seconds=suspend_remaining) diff --git a/stateengine/StateEngineItem.py b/stateengine/StateEngineItem.py index 768421fc7..48643a55e 100755 --- a/stateengine/StateEngineItem.py +++ b/stateengine/StateEngineItem.py @@ -225,7 +225,7 @@ def __init__(self, smarthome, item, se_plugin): # Init suspend settings self.__suspend_time = StateEngineValue.SeValue(self, "Suspension time on manual changes", False, "num") - self.__suspend_time.set_from_attr(self.__item, "se_suspend_time", StateEngineDefaults.suspend_time) + self.__suspend_time.set_from_attr(self.__item, "se_suspend_time", StateEngineDefaults.suspend_time.get()) # Init laststate and previousstate items/values self.__config_issues = {} @@ -307,6 +307,7 @@ def __init__(self, smarthome, item, se_plugin): self.__update_original_caller = None self.__update_original_source = None self.__using_default_instant_leaveaction = False + self.__using_default_suspendtime = False # Check item configuration self.__check_item_config() @@ -453,6 +454,13 @@ def run_queue(self): self.__logger.debug("Current instant leave action {}, default {}, currently using default {}", self.__instant_leaveaction, self.__default_instant_leaveaction, self.__using_default_instant_leaveaction) + if self.__suspend_time.get() < 0: + self.__using_default_suspendtime = True + else: + self.__using_default_suspendtime = False + self.__logger.debug("Current suspend time {}, default {}, currently using default {}", + self.__suspend_time, StateEngineDefaults.suspend_time, + self.__using_default_suspendtime) self.update_lock.acquire(True, 10) while not self.__queue.empty() and self.__ab_alive: job = self.__queue.get() @@ -501,7 +509,8 @@ def run_queue(self): # Update current values StateEngineCurrent.update() - self.__variables["item.suspend_time"] = self.__suspend_time.get() + self.__variables["item.suspend_time"] = StateEngineDefaults.suspend_time.get() \ + if self.__using_default_suspendtime is True else self.__suspend_time.get() self.__variables["item.suspend_remaining"] = -1 self.__variables["item.instant_leaveaction"] = self.__default_instant_leaveaction.get() \ if self.__using_default_instant_leaveaction is True else self.__instant_leaveaction.get() diff --git a/stateengine/__init__.py b/stateengine/__init__.py index 143a8313f..ebbe6419d 100755 --- a/stateengine/__init__.py +++ b/stateengine/__init__.py @@ -85,7 +85,10 @@ def __init__(self, sh): self.logger.info("Set default log level to {}, Startup log level to {}.".format(SeLogger.log_level, SeLogger.startup_log_level)) self.logger.info("Init StateEngine (log_level={0}, log_directory={1})".format(log_level, log_directory)) StateEngineDefaults.startup_delay = self.get_parameter_value("startup_delay_default") - StateEngineDefaults.suspend_time = self.get_parameter_value("suspend_time_default") + suspend_time = self.get_parameter_value("suspend_time_default") + suspend_time_value = StateEngineValue.SeValue(self, "Default Suspend Time", False, "num") + suspend_time_value.set(suspend_time) + StateEngineDefaults.suspend_time = suspend_time_value default_instant_leaveaction = self.get_parameter_value("instant_leaveaction") self.__default_instant_leaveaction = StateEngineValue.SeValue(self, "Default Instant Leave Action", False, "bool") self.__default_instant_leaveaction.set(default_instant_leaveaction) diff --git a/stateengine/plugin.yaml b/stateengine/plugin.yaml index 086192832..e1acd55b1 100755 --- a/stateengine/plugin.yaml +++ b/stateengine/plugin.yaml @@ -555,7 +555,7 @@ item_attributes: se_suspend_time: type: foo - valid_min: 1 + valid_min: -60 valid_max: 86400 description: de: 'Dauer der Unterbrechung der automatischen Steuerung nach manuellen Aktionen in Sekunden' @@ -747,13 +747,6 @@ item_structs: cache: True initial_value: -1 - settings_edited: - type: bool - name: settings editiert - eval_trigger: ...settings.* - eval: not sh..self() - on_update: ...retrigger = True if sh..self.property.prev_update_age > 0.1 else None - rules: name: Zustandsautomat remark: configure your se_item_* and eval_triggers here @@ -1075,7 +1068,7 @@ item_structs: visu_acl: rw cache: True enforce_updates: True - initial_value: 60 + initial_value: -1 on_change: .seconds = value * 60 if not sh..self.property.last_change_by in ["On_Change:{}".format(sh..seconds.property.path), "On_Change:{}".format(sh..duration_format.property.path)] else None on_update: .seconds = value * 60 if "Init" in sh..self.property.last_update_by else None @@ -1101,7 +1094,7 @@ item_structs: visu_acl: rw cache: True enforce_updates: True - initial_value: 60 + initial_value: -1 on_change: .seconds = value * 60 if not sh..self.property.last_change_by in ["On_Change:{}".format(sh..seconds.property.path), "On_Change:{}".format(sh..duration_format.property.path)] else None on_update: .seconds = value * 60 if "Init" in sh..self.property.last_update_by else None @@ -1127,7 +1120,7 @@ item_structs: visu_acl: rw cache: True enforce_updates: True - initial_value: 60 + initial_value: -1 on_change: .seconds = value * 60 if not sh..self.property.last_change_by in ["On_Change:{}".format(sh..seconds.property.path), "On_Change:{}".format(sh..duration_format.property.path)] else None on_update: .seconds = value * 60 if "Init" in sh..self.property.last_update_by else None @@ -1174,7 +1167,7 @@ item_structs: visu_acl: rw cache: True enforce_updates: True - initial_value: 60 + initial_value: -1 on_change: .seconds = value * 60 if not sh..self.property.last_change_by in ["On_Change:{}".format(sh..seconds.property.path), "On_Change:{}".format(sh..duration_format.property.path)] else None on_update: .seconds = value * 60 if "Init" in sh..self.property.last_update_by else None From f0079dc85b3825b9e972f413c7159b04493c5900 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Wed, 23 Aug 2023 21:58:48 +0200 Subject: [PATCH 335/775] oppo plugin: update plugin.yaml with default retry and suspend settings --- oppo/plugin.yaml | 113 +++++++++++++++++++++++++++++------------------ 1 file changed, 69 insertions(+), 44 deletions(-) diff --git a/oppo/plugin.yaml b/oppo/plugin.yaml index b4b4da9b8..c0c504e24 100755 --- a/oppo/plugin.yaml +++ b/oppo/plugin.yaml @@ -15,6 +15,60 @@ plugin: parameters: + host: + type: str + mandatory: false + + description: + de: Netzwerkziel/-host + en: network host + + port: + type: int + default: 23 + + description: + de: Port für Netzwerkverbindung + en: network port + + serialport: + type: str + mandatory: false + + description: + de: Serieller Anschluss (z.B. /dev/ttyUSB0 oder COM1) + en: serial port (e.g. /dev/ttyUSB0 or COM1) + + conn_type: + type: str + mandatory: false + valid_list: + - '' + - net_tcp_request + - net_tcp_client + - net_tcp_jsonrpc + - net_udp_server + - serial + - serial_async + + description: + de: Verbindungstyp + en: connection type + + command_class: + type: str + default: SDPCommandParseStr + valid_list: + - SDPCommand + - SDPCommandStr + - SDPCommandParseStr + - SDPCommandJSON + - SDPCommandViessmann + + description: + de: Klasse für Verarbeitung von Kommandos + en: class for command processing + model: type: str mandatory: false @@ -60,7 +114,7 @@ parameters: autoconnect: type: bool - default: true + mandatory: false description: de: Automatisches Verbinden bei Senden @@ -82,59 +136,30 @@ parameters: de: Pause zwischen Verbindungsversuchen en: wait time between connect retries - host: - type: str - mandatory: false - - description: - de: Netzwerkziel/-host - en: network host - - port: - type: int - default: 23 + retry_cycle: + type: num + default: 30 description: - de: Port für Netzwerkverbindung - en: network port + de: Pause zwischen Durchgängen von Verbindungsversuchen + en: wait time between connect retry rounds - serialport: - type: str - mandatory: false + retry_suspend: + type: num + default: 3 description: - de: Serieller Anschluss (z.B. /dev/ttyUSB0 oder COM1) - en: serial port (e.g. /dev/ttyUSB0 or COM1) + de: Anzahl von Durchgängen vor Verbindungsabbruch oder Suspend-Modus + en: number of connect rounds before giving up / entering suspend mode - conn_type: + suspend_item: type: str - mandatory: false - valid_list: - - '' - - net_tcp_request - - net_tcp_client - - net_tcp_jsonrpc - - net_udp_server - - serial - - serial_async + default: '' description: - de: Verbindungstyp - en: connection type + de: Item-Pfad für das Standby-Item + en: item path for standby switch item - command_class: - type: str - default: SDPCommandParseStr - valid_list: - - SDPCommand - - SDPCommandStr - - SDPCommandParseStr - - SDPCommandJSON - - SDPCommandViessmann - - description: - de: Klasse für Verarbeitung von Kommandos - en: class for command processing item_attributes: From 2efbe28aaee64222928028fd2385d29c55be6303 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Wed, 23 Aug 2023 22:01:22 +0200 Subject: [PATCH 336/775] lms plugin: update plugin.yaml with default retry and suspend attributes --- lms/plugin.yaml | 97 +++++++++++++++++++++++++++++-------------------- 1 file changed, 57 insertions(+), 40 deletions(-) diff --git a/lms/plugin.yaml b/lms/plugin.yaml index 81a482357..0f76d7b2c 100755 --- a/lms/plugin.yaml +++ b/lms/plugin.yaml @@ -15,14 +15,6 @@ plugin: parameters: - standby_item_path: - type: str - default: '' - - description: - de: Item-Pfad für das Standby-Item - en: item path for standby switch item - host: type: str mandatory: true @@ -55,38 +47,6 @@ parameters: de: Zeilen-/Antwortbegrenzer en: line or reply terminator - autoreconnect: - type: bool - default: true - - description: - de: Automatisches Neuverbinden bei Abbruch - en: automatic reconnect on disconnect - - autoconnect: - type: bool - default: true - - description: - de: Automatisches Verbinden bei Senden - en: automatic connect on send - - connect_retries: - type: num - default: 5 - - description: - de: Anzahl Verbindungsversuche - en: number of connect retries - - connect_cycle: - type: num - default: 3 - - description: - de: Pause zwischen Verbindungsversuchen - en: wait time between connect retries - web_port: type: int default: 9000 @@ -133,6 +93,63 @@ parameters: de: Klasse für Verarbeitung von Kommandos en: class for command processing + autoreconnect: + type: bool + default: true + + description: + de: Automatisches Neuverbinden bei Abbruch + en: automatic reconnect on disconnect + + autoconnect: + type: bool + mandatory: false + + description: + de: Automatisches Verbinden bei Senden + en: automatic connect on send + + connect_retries: + type: num + default: 5 + + description: + de: Anzahl Verbindungsversuche + en: number of connect retries + + connect_cycle: + type: num + default: 3 + + description: + de: Pause zwischen Verbindungsversuchen + en: wait time between connect retries + + retry_cycle: + type: num + default: 30 + + description: + de: Pause zwischen Durchgängen von Verbindungsversuchen + en: wait time between connect retry rounds + + retry_suspend: + type: num + default: 3 + + description: + de: Anzahl von Durchgängen vor Verbindungsabbruch oder Suspend-Modus + en: number of connect rounds before giving up / entering suspend mode + + suspend_item: + type: str + default: '' + + description: + de: Item-Pfad für das Standby-Item + en: item path for standby switch item + + item_attributes: sqb_command: From f9358749eb32408cdb9f32b83cc259d751169c03 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Wed, 23 Aug 2023 22:04:07 +0200 Subject: [PATCH 337/775] denon plugin: update plugin.yaml with default reconnect and suspend attributes --- denon/plugin.yaml | 96 ++++++++++++++++++++++++++++------------------- 1 file changed, 57 insertions(+), 39 deletions(-) diff --git a/denon/plugin.yaml b/denon/plugin.yaml index f6d3a79b0..e73b38f68 100755 --- a/denon/plugin.yaml +++ b/denon/plugin.yaml @@ -15,13 +15,6 @@ plugin: parameters: - standby_item_path: - type: str - default: '' - description: - de: 'Item-Pfad für das Standby-Item' - en: 'item path for standby switch item' - model: type: str mandatory: false @@ -61,38 +54,6 @@ parameters: de: Binärer Übertragungsmodus en: binary communication mode - autoreconnect: - type: bool - default: true - - description: - de: Automatisches Neuverbinden bei Abbruch - en: automatic reconnect on disconnect - - autoconnect: - type: bool - default: true - - description: - de: Automatisches Verbinden bei Senden - en: automatic connect on send - - connect_retries: - type: num - default: 5 - - description: - de: Anzahl Verbindungsversuche - en: number of connect retries - - connect_cycle: - type: num - default: 3 - - description: - de: Pause zwischen Verbindungsversuchen - en: wait time between connect retries - host: type: str mandatory: false @@ -140,6 +101,63 @@ parameters: de: Klasse für Verarbeitung von Kommandos en: class for command processing + autoreconnect: + type: bool + default: true + + description: + de: Automatisches Neuverbinden bei Abbruch + en: automatic reconnect on disconnect + + autoconnect: + type: bool + mandatory: false + + description: + de: Automatisches Verbinden bei Senden + en: automatic connect on send + + connect_retries: + type: num + default: 5 + + description: + de: Anzahl Verbindungsversuche + en: number of connect retries + + connect_cycle: + type: num + default: 3 + + description: + de: Pause zwischen Verbindungsversuchen + en: wait time between connect retries + + retry_cycle: + type: num + default: 30 + + description: + de: Pause zwischen Durchgängen von Verbindungsversuchen + en: wait time between connect retry rounds + + retry_suspend: + type: num + default: 3 + + description: + de: Anzahl von Durchgängen vor Verbindungsabbruch oder Suspend-Modus + en: number of connect rounds before giving up / entering suspend mode + + suspend_item: + type: str + default: '' + + description: + de: Item-Pfad für das Standby-Item + en: item path for standby switch item + + item_attributes: denon_command: From 88dbe653eed0873d3d6ccd3aeb4b90baac7f7216 Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Wed, 23 Aug 2023 22:14:56 +0200 Subject: [PATCH 338/775] NEW Epson Plugin: based on SDP control of Epson projectors are supported (minimal setup for now) --- epson/__init__.py | 115 ++++++++++ epson/commands.py | 42 ++++ epson/datatypes.py | 11 + epson/plugin.yaml | 297 +++++++++++++++++++++++++ epson/user_doc.rst | 87 ++++++++ epson/webif/static/img/plugin_logo.png | Bin 0 -> 72038 bytes 6 files changed, 552 insertions(+) create mode 100755 epson/__init__.py create mode 100755 epson/commands.py create mode 100755 epson/datatypes.py create mode 100755 epson/plugin.yaml create mode 100755 epson/user_doc.rst create mode 100644 epson/webif/static/img/plugin_logo.png diff --git a/epson/__init__.py b/epson/__init__.py new file mode 100755 index 000000000..89fa6676a --- /dev/null +++ b/epson/__init__.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +######################################################################### +# Copyright 2016 +######################################################################### +# This file is part of SmartHomeNG +# +# Denon AV plugin for SmartDevicePlugin class +# +# SmartHomeNG is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SmartHomeNG is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SmartHomeNG If not, see . +######################################################################### + +import builtins +import os +import sys + +if __name__ == '__main__': + builtins.SDP_standalone = True + + class SmartPlugin(): + pass + + class SmartPluginWebIf(): + pass + + BASE = os.path.sep.join(os.path.realpath(__file__).split(os.path.sep)[:-3]) + sys.path.insert(0, BASE) + +else: + builtins.SDP_standalone = False + +from lib.model.sdp.globals import (PLUGIN_ATTR_CONNECTION, PLUGIN_ATTR_SERIAL_PORT, PLUGIN_ATTR_CONN_TERMINATOR, CONN_NULL, CONN_SER_ASYNC) +from lib.model.smartdeviceplugin import SmartDevicePlugin, Standalone + + +class epson(SmartDevicePlugin): + """ Device class for Epson projectors. """ + + PLUGIN_VERSION = '1.0.0' + + def _set_device_defaults(self): + + if PLUGIN_ATTR_SERIAL_PORT in self._parameters and self._parameters[PLUGIN_ATTR_SERIAL_PORT]: + self._parameters[PLUGIN_ATTR_CONNECTION] = CONN_SER_ASYNC + else: + self.logger.error('No serialport set, connection not possible. Using dummy connection, plugin will not work') + self._parameters[PLUGIN_ATTR_CONNECTION] = CONN_NULL + + b = self._parameters[PLUGIN_ATTR_CONN_TERMINATOR].encode() + b = b.decode('unicode-escape').encode() + self._parameters[PLUGIN_ATTR_CONN_TERMINATOR] = b + + def _transform_send_data(self, data=None, **kwargs): + if isinstance(data, dict): + data['limit_response'] = self._parameters[PLUGIN_ATTR_CONN_TERMINATOR] + data['payload'] = f'{data.get("payload", "")}{data["limit_response"].decode("unicode-escape")}' + return data + + def on_data_received(self, by, data, command=None): + + commands = None + if command is not None: + self.logger.debug(f'received data "{data}" from {by} for command {command}') + commands = [command] + else: + # command == None means that we got raw data from a callback and + # don't know yet to which command this belongs to. So find out... + self.logger.debug(f'received data "{data}" from {by} without command specification') + + # command can be a string (classic single command) or + # - new - a list of strings if multiple commands are identified + # in that case, work on all strings + commands = self._commands.get_command_from_reply(data) + if not commands: + if self._discard_unknown_command: + self.logger.debug(f'data "{data}" did not identify a known command, ignoring it') + return + else: + self.logger.debug(f'data "{data}" did not identify a known command, forwarding it anyway for {self._unknown_command}') + self._dispatch_callback(self._unknown_command, data, by) + + # TODO: remove later? + assert(isinstance(commands, list)) + + # process all commands + for command in commands: + custom = None + if self.custom_commands: + custom = self._get_custom_value(command, data) + + base_command = command + value = None + try: + value = self._commands.get_shng_data(command, data) + except OSError as e: + self.logger.warning(f'received data "{data}" for command {command}, error {e} occurred while converting. Discarding data.') + else: + self.logger.debug(f'received data "{data}" for command {command} converted to value {value}') + self._dispatch_callback(command, value, by) + + self._process_additional_data(base_command, data, value, custom, by) + +if __name__ == '__main__': + s = Standalone(epson, sys.argv[0]) diff --git a/epson/commands.py b/epson/commands.py new file mode 100755 index 000000000..6e7cf1260 --- /dev/null +++ b/epson/commands.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +""" commands for dev epson + +Most commands send a string (fixed for reading, attached data for writing) +while parsing the response works by extracting the needed string part by +regex. Some commands translate the device data into readable values via +lookups. +""" + +models = { + 'ALL': ['power', 'source'] +} + +commands = { + 'power': {'read': True, 'write': True, 'read_cmd': 'PWR?', 'write_cmd': 'PWR {VALUE}', 'item_type': 'bool', 'dev_datatype': 'onoff', 'reply_pattern': ['^(?:\:+)?\s?PWR=0(0|1)', '^:+WR=0(0|1)'], 'item_attrs': {'cycle': '60', 'initial': True}}, + 'source': {'read': True, 'write': True, 'write_cmd': 'SOURCE {RAW_VALUE_UPPER}', 'item_type': 'str', 'dev_datatype': 'raw', 'reply_pattern': 'SOURCE {LOOKUP}', 'lookup': 'SOURCE'} +} + +lookups = { + 'ALL': { + 'SOURCE': { + '11': 'Analog', + '12': 'Digital', + '13': 'Video', + '14': 'YCbCr (4 Component)', + '15': 'YPbPr (4 Component)', + '1F': 'Auto', + '21': 'Analog', + '22': 'Video', + '23': 'YCbCr', + '24': 'YPbPr (5 Component)', + '25': 'YPbPr', + '2F': 'Auto', + 'A0': 'HDMI', + '41': 'Video', + '42': 'S-Video', + '43': 'YCbCr', + '44': 'YPbPr' + } + } +} diff --git a/epson/datatypes.py b/epson/datatypes.py new file mode 100755 index 000000000..8dce04e3e --- /dev/null +++ b/epson/datatypes.py @@ -0,0 +1,11 @@ +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab + +import lib.model.sdp.datatypes as DT + +class DT_onoff(DT.Datatype): + def get_send_data(self, data, **kwargs): + return 'ON' if data else 'OFF' + + def get_shng_data(self, data, type=None, **kwargs): + return False if data == '0' else True if data == '1' else None diff --git a/epson/plugin.yaml b/epson/plugin.yaml new file mode 100755 index 000000000..947df4d6c --- /dev/null +++ b/epson/plugin.yaml @@ -0,0 +1,297 @@ + +plugin: + type: interface + description: Epson Projectors + maintainer: OnkelAndy + tester: Morg + state: develop + keywords: iot device av epson sdp + version: 1.0.0 + sh_minversion: 1.9.5 + py_minversion: 3.7 + multi_instance: false + restartable: true + classname: epson + +parameters: + + model: + type: str + mandatory: false + valid_list: + - '' + - TW-5000 + + description: + de: Modellauswahl + en: model selection + + timeout: + type: num + default: 3 + + description: + de: Timeout für Geräteantwort + en: timeout for device replies + + terminator: + type: str + default: "\r" + + description: + de: Zeilen-/Antwortbegrenzer + en: line or reply terminator + + binary: + type: bool + default: false + + description: + de: Binärer Übertragungsmodus + en: binary communication mode + + baudrate: + type: num + default: 9600 + + description: + de: Serielle Übertragungsgeschwindigkeit + en: serial transmission speed + + bytesize: + type: num + default: 8 + + description: + de: Anzahl Datenbits + en: number of data bits + + parity: + type: str + default: N + valid_list: + - N + - E + - O + - M + - S + + description: + de: Parität + en: parity + + stopbits: + type: num + default: 1 + + description: + de: Anzahl Stopbits + en: number of stop bits + + host: + type: str + mandatory: false + + description: + de: Netzwerkziel/-host + en: network host + + port: + type: int + default: 23 + + description: + de: Port für Netzwerkverbindung + en: network port + + serialport: + type: str + mandatory: false + + description: + de: Serieller Anschluss (z.B. /dev/ttyUSB0 oder COM1) + en: serial port (e.g. /dev/ttyUSB0 or COM1) + + conn_type: + type: str + default: serial_async + valid_list: + - '' + - net_tcp_client + - serial_async + + description: + de: Verbindungstyp + en: connection type + + command_class: + type: str + default: SDPCommandParseStr + valid_list: + - SDPCommand + - SDPCommandParseStr + + description: + de: Klasse für Verarbeitung von Kommandos + en: class for command processing + + autoreconnect: + type: bool + default: true + + description: + de: Automatisches Neuverbinden bei Abbruch + en: automatic reconnect on disconnect + + autoconnect: + type: bool + mandatory: false + + description: + de: Automatisches Verbinden bei Senden + en: automatic connect on send + + connect_retries: + type: num + default: 5 + + description: + de: Anzahl Verbindungsversuche + en: number of connect retries + + connect_cycle: + type: num + default: 3 + + description: + de: Pause zwischen Verbindungsversuchen + en: wait time between connect retries + + retry_cycle: + type: num + default: 30 + + description: + de: Pause zwischen Durchgängen von Verbindungsversuchen + en: wait time between connect retry rounds + + retry_suspend: + type: num + default: 3 + + description: + de: Anzahl von Durchgängen vor Verbindungsabbruch oder Suspend-Modus + en: number of connect rounds before giving up / entering suspend mode + + suspend_item: + type: str + default: '' + + description: + de: Item-Pfad für das Standby-Item + en: item path for standby switch item + + +item_attributes: + + epson_command: + type: str + + description: + de: Legt das angegebene Kommando für das Item fest + en: Assigns the given command to the item + + epson_read: + type: bool + + description: + de: Item liest/erhält Werte vom Gerät + en: Item reads/receives data from the device + + epson_read_group: + type: list(str) + + description: + de: Weist das Item der angegebenen Gruppe zum gesammelten Lesen zu. Mehrere Gruppen können als Liste angegeben werden. + en: Assigns the item to the given group for collective reading. Multiple groups can be provided as a list. + + epson_read_cycle: + type: num + + description: + de: Konfiguriert ein Intervall in Sekunden für regelmäßiges Lesen + en: Configures a interval in seconds for cyclic read actions + + epson_read_initial: + type: bool + + description: + de: Legt fest, dass der Wert beim Start vom Gerät gelesen wird + en: Sets item value to be read from the device on startup + + epson_write: + type: bool + + description: + de: Änderung des Items werden an das Gerät gesendet + en: Changes to this item will be sent to the device + + epson_read_group_trigger: + type: str + + description: + de: Wenn diesem Item ein beliebiger Wert zugewiesen wird, werden alle zum Lesen konfigurierten Items der angegebenen Gruppe neu vom Gerät gelesen, bei Gruppe 0 werden alle zum Lesen konfigurierten Items neu gelesen. Das Item kann nicht gleichzeitig mit epson_command belegt werden. + en: When set to any value, all items configured for reading for the given group will update their value from the device, if group is 0, all items configured for reading will update. The item cannot be used with epson_command in parallel. + + epson_lookup: + type: str + + description: + de: Der Inhalt der Lookup-Tabelle mit dem angegebenen Namen wird beim Start einmalig als dict oder list in das Item geschrieben. + en: The lookup table with the given name will be assigned to the item in dict or list format once on startup. + + description_long: + de: "Der Inhalt der Lookup-Tabelle mit dem angegebenen Namen wird beim\nStart einmalig als dict oder list in das Item geschrieben.\n\n\nDurch Anhängen von \"#\" an den Namen der Tabelle kann die Art\nder Tabelle ausgewählt werden:\n- fwd liefert die Tabelle Gerät -> SmartHomeNG (Standard)\n- rev liefert die Tabelle SmartHomeNG -> Gerät\n- rci liefert die Tabelle SmarthomeNG -> Gerät in Kleinbuchstaben\n- list liefert die Liste der Namen für SmartHomeNG (z.B. für Auswahllisten in der Visu)" + en: "The lookup table with the given name will be assigned to the item\nin dict or list format once on startup.\n\n\nBy appending \"#\" to the tables name the type of table can\nbe selected:\n- fwd returns the table device -> SmartHomeNG (default)\n- rev returns the table SmartHomeNG -> device\n- rci returns the table SmartHomeNG -> device in lower case\n- list return the list of names for SmartHomeNG (e.g. for selection dropdowns in visu applications)" + +item_structs: + + power: + type: bool + epson_command: power + epson_read: true + epson_write: true + epson_read_group: [] + epson_read_initial: true + epson_read_cycle: '60' + + source: + type: str + epson_command: source + epson_read: true + epson_write: true + + ALL: + + read: + type: bool + enforce_updates: true + epson_read_group_trigger: ALL + + power: + type: bool + epson_command: power + epson_read: true + epson_write: true + epson_read_group: + - ALL + epson_read_initial: true + epson_read_cycle: '60' + + source: + type: str + epson_command: source + epson_read: true + epson_write: true +plugin_functions: NONE +logic_parameters: NONE diff --git a/epson/user_doc.rst b/epson/user_doc.rst new file mode 100755 index 000000000..3f2859a59 --- /dev/null +++ b/epson/user_doc.rst @@ -0,0 +1,87 @@ +.. index:: Plugins; epson +.. index:: epson + +===== +epson +===== + +.. image:: webif/static/img/plugin_logo.png + :alt: plugin logo + :width: 768px + :height: 249px + :scale: 25 % + :align: center + +Steuerung eines Epson Projektors über RS232 Schnittstelle. Theoretisch klappt auch +die Verbindung via TCP - wurde aber nicht getestet! + +Das Plugin unterstützt eine Reihe von Epson Projektoren. Folgendes Modell wurde +konkret berücksichtigt, andere Modelle funktionieren aber mit hoher Wahrscheinlichkeit +auch. + +- TW-5000 + + +Konfiguration +============= + +Diese Plugin Parameter und die Informationen zur Item-spezifischen Konfiguration des Plugins sind +unter :doc:`/plugins_doc/config/epson` beschrieben. + + +plugin.yaml +----------- + +.. code-block:: yaml + + # etc/plugin.yaml + epson: + plugin_name: epson + model: TW-5000 + timeout: 3 + terminator: "\r" + binary: false + autoreconnect: true + autoconnect: true + connect_retries: 5 + connect_cycle: 3 + serialport: /dev/ttyUSB0 + conn_type: serial_async + command_class: SDPCommandParseStr + + +Struct Vorlagen +=============== + +Der Itembaum sollte jedenfalls über die structs Funktion eingebunden werden. Hierzu gibt es vier +Varianten, wobei die letzte die optimale Lösung darstellt: + +- einzelne Struct-Teile wie epson.power +- epson.ALL: Hierbei werden sämtliche Kommandos eingebunden, die vom Plugin vorgesehen sind +- epson.TW-5000 bzw. die anderen unterstützten Modelle, um nur die relevanten Items einzubinden +- epson.MODEL: Es wird automatisch der Itembaum für das Modell geladen, das im plugin.yaml angegeben ist. + +Sollte das selbst verwendete Modell nicht im Plugin vorhanden sein, kann der Plugin Maintainer +angeschrieben werden, um das Modell aufzunehmen. + +.. code-block:: yaml + + # items/my.yaml + Epson: + type: foo + struct: epson.MODEL + + +Kommandos +========= + +Die RS232 oder IP-Befehle des Geräts sind in der Datei `commands.py` hinterlegt. Etwaige +Anpassungen und Ergänzungen sollten als Pull Request oder durch Rücksprache mit dem Maintainer +direkt ins Plugin einfließen, damit diese auch von anderen Nutzer:innen eingesetzt werden können. + + +Web Interface +============= + +Aktuell ist kein Web Interface integriert. In naher Zukunft soll dies über die +SmartDevicePlugin Bibliothek automatisch zur Verfügung gestellt werden. diff --git a/epson/webif/static/img/plugin_logo.png b/epson/webif/static/img/plugin_logo.png new file mode 100644 index 0000000000000000000000000000000000000000..f9c47f41bf0814557ea8c41422bcbdda371dec0a GIT binary patch literal 72038 zcmYhDV{o8N*rlJ?HYUl$#>B?Nwr$(CZQHhOYodwmOl<92-)`;hpRWFU_jS(Q-KV-b zLS9x30Tve)000CDabZOO0Q+wWyh20%H^x6G4gw%JbP~dX%I@ozI#BB9W?#8UBu<-TCOKPxzDeq-hX`WvtE6#)l30!Xk;P*#$UMwhxuKpCHmYS zMh%g{j@B-p%=8g7=t}1fXHAyS8o6|W#0(PRg>LtwIZ^9RJ$U37yf_AjwMi(8z07+x zcWJ4ujJfGM$8*oNh!PK#BPO%!qO-SaMAS|?Z510OD)knL^c*NvP=9}7={wIHF7P!;Mj1U5D)yb#`wG=5sJiDDHiPQr=!#8m5X#_F76FrSf5^IJ2qt zDFLhR!8K)phL(C7qknw^a~I#b9tvkeTe2r9B-60at+jg_cPGjVtMAq_ z%~ij0|H8}0Xz}$qdtR^qNRoX|U@ZFxKc#V}hZSHfaG(zlT}`>{ z!~_s+NtpOmP_wYLw0<8hT_Z_c_*#Y5pFl>gI6&oNLjr-GKzKX?ups$|MUV-IX8{8= zAWC;xP)G$~O6NT>k~32ilud>1P4@?m6n_2j9oLM11M#;#eU859AxLhy&M8lS`z<6} z;%U-JO2_W~gOkD&jE_X>T1$l}V_Q}_+fgDuFgySzurFNAsJT0^0zzf3<7A#D?MSqS z;;3$Zm<-ygnB^|D2jupOp$pclH{c(?z(SC3A()ReAFz8!{!J7}D!3$2(NdS86ctzT z(=YhAz@^kjSTbe<_gTiOk2|FAo<9KQM;%Hy1{8R9%^gNYV>G@Z4ri3ckHoYcrYcs0 zgXVwf@bLdvZHx;JICJ2-(sTo^Iotb%RUg02j37L zSp8MfAfCT>e%S)mp%Yoz%CjIcqz@ooM2|BgvJ|LqLm_)wrGk!~zj zlxVP+{}&MZcN@M0}i7wVzEKJJy0Zs@W)nL$birnpV%lo z*wW8^w{J7?eDE2#FFL{_`~J6ZKX7lo~`sxEHCLA;?s!sq}40>>C_AUS-;sSs7$O6s&ykfNO6Sk;a%&foFOf==<5v4 z|7~?pVE3(%2nhJxKWJ9zpPlBC)=|we^X~L)VAL3mmXKG4yj5oKV8lqPXIi%d=|+cd zCp*?u32~|aIu-o;DekY>;u7y_w}89_32C?JORMDNgel`d@4x*-A)4*%Wi3rclc9%K zAJAe_OWR~Inew3s4A{l}b2*>L!p<_KXsY}$I>_o+LJ#sr(YbqbZaw~Q@!nuCmVfyV zPC?2WKLW7mVU&gY3!tA&Hg%nC93dxrpL-_cghiFMa%SeWw1(q5#boEn4uz?twz2f} z92E}Ry_cG4e?}1enfH`D@$D-VL;1?q?5NNPetw;LrT<#g&J8kYxTEUVa2OgRY}f8a za4D#P^3m-@auQq#^W|~Cn}$QCyG`~ceADzsJdc8=zG*L*u*k|7x6&R*S!wD&VUSB- zEz*^=^Z1)b?P0^zafZNed0_REiH@S2>Mb{TbC$Kbn2nr42~VD?`pG>ZGvvn~!P8f- zLi>G0Tj$JN!W7fpz2PfwdjeBj=|7#W_jj%bzubG#q`7A{a6;`E2&f2~k*0cS-$P4l zT7&L2=q~Ub;e%}W-1UB^;)P!Bkmq^im3rSi>~^H54s2-s9Y|hZpV8MabJ59puHV>Y zMc9Izh1wW8Ub|t-31W*uc5^Tiv6&)Vfk|2RGTr^Nal5eZO3>~%&o3~zYHAWE_>S17 zl^MfZ{qMHks=js1=aOxjqUQR-=gRaVY3N^yOs{zpFUcRSwE-R&Dxi(i)56%onX!Y1 zI9sVF8dnePL-@58B1Y@|*jcQ&;~8S4^BVFMl*$L!f7dV2XxModn$_%WeY*5jffynL zt!@*AdXfKxo<$o%ypd2w$BBqqW&j%}lYmZLEgCyIM)#gekpM%y<*~a&rQg#_YS-Mt z;1nu6aif+g(n`?!j0v(yK?ZGNFf@bYvT~;dhJ+A@?u01F;bV+WpI?T^sJyBLlt|bUj zN}^FXM0^4`786#Py19!JlaqgFX?07SVY{@7N}J6fNfZch6RJ%_0vg1PD+AzCXS&~q z3GJsDJ@LKfIEcAw{ct({lL!X`N8)O?Z3sUaza_jmz zlIu8{V)pFSBb69Z(8(+yClyqZm5FCzHd%3R;>7qln@w)tqMm)>rkc8E&e*Nf9@~zr zMoez)7@KZ=^PG%be?&paJ#R_Z9;b4v-Rrs4ViqGx?peep1l`|wUYK|~RR)xb%|Klf zNvV0ktFBiDnZ2TQ7o9xUxXl(4$L%h?@!{!2 zpw;y^w}_x{*{9>gMe`14wd)psC*)TqVdrFi$1#ydul#!`p4xj5h21G{Ds5%t_b$6v&J}H7?*3WeM`P{v0Cmv z|BeG?pzYR0QS_a_GqWHiVn_1_T2Bczq?*RaVRJ2MEoU%R1a6S3Qe-O2iEY-qsOwqs z6kMv@8)N)#GQ;gy3 zKudF7#8MKdTgvb_{OTC|{+Qf3?^@wg+{2*}r~&+Xg%+?shD&;ocSmC7*()^<>Nr-v zWwV~63I`|D22aJYVjTt_k$G-RN4cy$d^fCp6SHTL#o0ojjo;1eS@Qh1#(i10@H+ms zO;ZWm)K{nugq8_naZ@NXhtkfNoJ);L4#R%v zbQBfkR?uAHIQd2M+5jQ^HVcav91gYZ>+_8ne;Iz}U{``A^TmeFKB#cp*cSxbxAWkb z@Y`~yRE}DDgtrvqS&Mze8_8EXy(kZ_`=j^7v`dy%^Xon5569X;?<%7qY+f6adiz08 zmDiU+j^lePi8oT_9kD+Tnr+Zg3W1;H=@(nS}^R$M|I1~vACC1nWI*&aIb zOwd*HClM5R19zWR;-c42j?$}F^U2oFV08;CWUW*@0b6OZQLFTljK&c!hlpH1Osoz7 zngGX3iM^9)BuBVgzQMObzz2Th-y2B3mgQmrpvM^v3=5(#VkgJoQ^sg9@>}CY^=Uvn zKA*-K^O?Qi(*=|)CJuf{E$`&cEa$`_Q==zpjWdvK3BeUGm>4yjiQ0}c%O~Pq(PPZjN9r}| zO$u>UI9RDCxo_H^TdrU?h1*bsr+Q;`oTvr7DAfb6E+D;M&qx>PF3Vva1kOhyO9%g; zuF8WiG09kAz#0{>H$kAjw}us!zjKF{a}Z7>y>6#FZ4z!PAH+@JNMx2UJ1M$G53Cs% z&UF991wK7!${8lcB9kTCJRG<6g_c_Ofq{Md0@uU7aIXXeu!l=+6l14fDC6eagfdk# z6XQ!F|7PscUD^8J1sGlNk=69s%Gh3aQCkAz)m5S>w7ft*)@SGPV+g-FP`rwnh#*8* zW$vfoTfYoR_15uI@=fh#QyE^u>7C0l1N_G=8@PKPsVOrFwru0XDPMGG7xO3$>|m@7 ztKF!X`dZJCvLUoTCF=LrX-oYMP7tdfqGC6_Imo+>H^KY&PQlqBi`WLZ3r<4*I`A@# zkc;21E3tLy$jo zUBpH1i;=I^k5#+7Yjp$p-~Wns2%$(Gd?{$I+ZByhQ}0+AM29ETQXOf6b|Nr|dRbu+ zYs&>`?lVuE8WFPfW3}0$YP$Rjs{DXF9VeNNq&~cuh9GM|MqxX1MllcJfJPaly1T%kbWgudn3EYn(< zmIKu9R3D@#4gS7lDqdV!J6m$63nli_RR{u;6OUekAJ_7%R!C7z&7^5})#Ut&|0xPJ z;L8K6a-WNX@u{{Uj&)7i3JIb`XJL&On&9SgQeGR*SRAQ7iBv+a#wHVisD1z{*?kwB z*-2CO!&T9~TlIWqxjTgXcL4vP*6*&z=%LSye&K{@BQx6T247l@+aaFXH)l${zqo0% zB)l>c_|IR9eXN%fK)SaaD`OI=-?fATGlD%aM(i zIrcNCuA--v9J`z-&vqaug}=t_IQHW#&}m-F$}LBLx}VpcF}+pk`(5Z2^yZr<4vH_? z=}=08BR>638!Aky?&S_E;kjT~YBp8-4zQ6zO=kOIU2d_?ge!Z{ZXY3iqN1!jO7AVxMd1W3cJ^O#_mE)_OPC3l5l@g@Yf} z1rY08<3)tX{(;@41l{%emM5aKq{Y!sWF<<*q7Q<1d`u z4YON(-NSX!uY4w9Ip1qm=nI~&)|pgfLs@jgIIg{j_(!wNbilE>-X`>W zLZ>k-{TNy1(r7?L6u(v9MNU6mK;?LLu{WZ?+!o~JXW-=U*h5KfglrW^c@UkPq>&q_ zp2C;|VSulV**%C||HQ2zw1K3zD%4cmda;YiSwTSF=TM$Tk-k{xSGxHvnU!DD$Ikw$ zh4ajE2vZ~?ZXexBKh+&TPIM^=#2*HMqD!gzW+->-eUPQ_8;qnd!~T~ZaWV&qgm#AT zEIZ`NXA;lFBceGF-a2UusS)_XV375}sVw;)wD&8s2W!Gz>sw@M5m;YQ@OeKHrjfPT zNl)zb91^;R%4o+^nUi`rx{SeOW12A4EFI(tqJ#tX&cvYlh4LnJ@S(FqU|hHQ{!$9x zZsa1LfmrqVg2eyBLK0r9 z(+2^*Bh-CYLuz|56W13~GYb0sjcN z-%NT1zAcrP)`Q2#g%pIBs1<}+k})F(VaRT$z#Cooc(xR<>bn@#CD!<3O5f1OY<2pw za?PA#-*tw}EqY;b2US7nYRDr!n<|0Ag%fJr#KNeNr@%P@rXbPPK>te`@iE z0lm^AE=iTw<5%<@r+?4^g26WYZD@}WVd}ZZ&3!k!yz!CFfm}VLO(;AyA-4*N-$VZg z*2RSJtD3I&5R;&+nWvaw&zM@2&}A=P5ibmIpynSQ`IAj$^H$$tZfS82i2YA86vP02 zSou!~fbpTOg|vjpen>uf$BR+f&I@i`MMcGHh%X{vy4>`spuN+Qmn%x?s4sef)}f@5 zPc(pm^YH%O5Nxqc`*cTgnRk)c0>( zMCkQqYe|6rH?@r<$SDqaRo}Bgs|$e~ox>@Er;Ibmjjr!2sZeHtdq?{-HdBk1J0t}* zZIMkE@Z(o2pbs6nF{Q{Y03z_?mIQ$}B1I_(2jN18YH_YWl1VQ1>9@C}*J4GXXI5=? zpVZnRze@YNB1lIuJPLp)`(F=6RA8I=r+NWBYVxTb`F6o+y~U(mXdq)}@W)#}aBn}9 z3*BpGpr34_hr28ueRbfXi~HjZBZ!4T4DySQ)W)U^_gu|QyFAlsAbO>hT6;jTe<0u5 z-L!#UYn>yb@&g(E&F=@MbbS803ODrAW$b}DTH@V5mcj>1`)*z8@eAFXy>-|w8<=_q z5Pfn%ria|k9C+2_VRFg)K0RE^HdpX$ZQwE%^_||Z%2fnf#Rm`d0#t1B8HE847fc@$ znA|9!(t)ZCUHJE&!sZ&tG7we3f%JDYX5)+Xr-|7@*54GR>iZtNfxK>>lBzIFnzP^F z{y}IcfUNjwl%Vj@+@Jn_Qy8_{ISi@We(-@VJly%eBVK_YBVAoOr+7YnaDzQ&i0Qg*dnPo2fe z8dSiAo7Ci_9F!-6AT2!sD_PKtn{f0wYSx0s!g)Jm-lj-wvomeY%9X$G)_ zk@vwAva_}-uG55<*4IWV%Fo-Zclw(coTEdpTyuNkFv`UG9i=~bX7SGv64iU#?v2d# zvQB(cly8~=KaIa2mf}+9zY~`F^0knP-hW|xbr6(2t#$_D+@0L?f_1{t{p7E__3F^aT_%mCwAvj%fx-STvoayZ));fy5Tf!KFL`c?U;%w$Q$2fYwa#<(n{0x z(d5tZMHaVDRa^gsibd+!py_E3`oz&ohJ^#1{&{?R2?aFI3a92X<@NcQs8 zc%W})`d=z%N9na)oyhfVH;PY`rMEx1ud=+(6hDPK1PE8Ne#syzgTZDvuNh#i*juVP zJAGwwn#}r#Oze|woVZJBwf$vOQDZGJ7EHf1ghQQ}TcK7Q1t(Kp?Z6AZAt}zQ`@%=r zc6YC_$8lodU$%x9fQgWFE}(53B#_~k-K72zfZnvA>oPZF+=Y5P9yna<< z-T62(HOS|cmXy@H$^NmrZay_!i+bcpWqi-UE7*FTmEm8wfs!9Qe(g?Xv*AVbs?k(# zXsQbiT(K+Uz#6t~3M169ktR=rhQ|L#kW#xPjFra6tZS2T4&D-q1y6{y&Z<75H zpXC7TbqG}}a4I{Gu%UNY(yHIFFZVWeFF1Yj^Zchw$&jX{aok-`y#Aripev(NrBGl5 z>UZRDT?FVMf=6eMN!w5+{+yGs?S}cChI7Jw!>yipspq&MwF`crU={>s-Us^@LqSHh zGb6xH8E7I_VTY`YrmC}=O&1cs`*ehK#pzgJ-T=STRNvUe z!5NeYN&GI0++vNJ$>6=w&nykzE1PsTCkj3JuzWqEY3R@k;97^7FYx-e7bAw2dKvYX zRzvW|{I$M`$i~gmNj12B_>d%!izxr*p(h8Fb6B(Fh25U0k6A}Jt_HFgW2WlbJmdhZ z>~84-?H~~82$_Y$8$6yH&~KwpboNFy zp|Jy>v9g$+u**WMS+`(8wV&kMgDv$EA zlWTrPzNm=)$@w~zUO@$7s9X(%mrmdhDEn4%?~TLs@Q3exp@G|6msw`&Q+uoqQmLlG zXKRZavIu=03!B_^<6LVe5&-?qfk8Nh7Sdp9v{ojOwNklZpRHV5dYAIIUB z3I7>j=&qGo?LCX+n>|Jk)|r#XM@?C6ijfkc198)`84&b!juVjFT94HUASIAodLZ)Q zyl*GKtd_^f0_E_@jFMiPFT9u^+n70g(b*KQ_$EYsY5hbwNguTVpKOx{*-6z%)yyxU z#2}|bj5x-h`G}&-L`}HtsOUMq$Qxm;_?NiM1s#?ujy+yodBc>s#H1zOOT*)(Z6L(> z5=CXEzhvPT`^k7m))R}9YZa*d(AF(%f$u(80-TAP)m8(%^y{Rd<-ofiwbs7CxLm93AN!a!^v>hiU< z2hATn8@TMs1HkL?J$>oPYZm61@vSIu{r0UyPTWgB_`9JyicPO#`RX3o6l&{u`yL+@ z_wGQ8(ENRqG*2l5dSki-e_cXJ1CX~}ltGxY1@D_mO<4;r@({kl zT-LuE@DPbSA;ymO@R#RE+|TR8LR5d?Vl+O_CJ@LlT<=$&S$gq$RewlmUi*EGr1X)ZcR#?wt(^Zy#BfVhw8BfeX2fX)aeVA^_Z8?jAbeT-f^o(&G2DWZ2FY!s@9+ zhkj4oZ}$rrSRJJ(9WAiUH8hWFBHo64a>UnGI_MqjS@~?YLn^);%+=-)s9FsMSltF! z3F(ONd%69rLk;Zg*UjR?R2HJ0YG? zlD6bZ3g{cffWGbjJQ&}lOz!w6HsXU1Uf9>QcuJ1r*LiI0?i9acoI)QHx7W#R{>T+| zDS9m`1XfDOI|GB!WPRjrpbvs?M^{^H+~76=RWP4@u?@$u_=fu=im-1BSjwt(`YGXy zo7--mN|}-lWA9H^d*=j9EnIJ(PUv;e6Cu$$c;f_c0WkFEu~Zj1!WyWiapo$w2-8mPFjV|^54M92u2(X8%D(n)5x?TRV_AaL z=o_nP_bcZ)*rjPyw0zshX>vhm-B7HF1W<*qZQx{q_CJ#DOnuby@(9iE=uN!NEa;_@n7r>cCI#5b`g{S2 zgK1OOQW#p5m6|O!T-W%_x^8s%U38|qZ3MW0mrV;%ZkNM+AH}Hkt~VwrpYFNWbm-aNE^Y6rjB?358w@dt3Mpy-?XEOTqP>tKK;?u>*6ys)LkF*p5oxdN< zXu}#*H0S^qxL_*UtRp#I{*k(1bw~R52$kQG3Hsz0_PB_U5%he1mUINZW|l7hF`9OeWk?G<=zJR@ny$4L456#ZbEu#z7?u2bp`fOB8`3`h@=eY~AIOHf#T{l}!` z#^sbk$mKYnS1x_|6ckEV9X2slco-8l8=e z(7A&bW3SehX9ua5|Bsh{yi=n4ISY#7F8LrLLkje#)v=m1H@g@85&xj_d+}1KIz{h! zI;G^7w&jDU7=TrT22A=yK{{L#AW6V8-fB0QYuy$T4MGc48xi-MvrZ8MhpC(`Rs+G0 z5;Ne=T!m1g`zX3zH%#kuCoZ~bnX>>WD_2#}aYJmgfG3X!eZWtFuy<&mg-}q=5ca$n zSw*sA`hG}Oi_5XUKbP9;7e}57)=ais*b*H=E#ubZNk9)D2jkicUdqs*xG(4h^ib!* zl-y{wPaLwb%uwZA-8fNcaY7n?fqlHHScy69BHi%BS@cuX}R?1y(`ldSbaAc>rd2M1PMnaO}3DA7_xlE19KsQc`ZW)NX;m&%SjwqKy< z{yQ4sv;NEW66NDNS>m#iYdD<$K%x$n$-(Ae!{?IxgIquO5g%2sTD#Yx3FW2*n9!;i zTry$cJ)T-JdG#|NPLhT4feTS!qvPd@eVXI3@c5KY$K>}q<1Sust%9HtX*(kF@U;Ok zyZhjJB)mh&!hUp}`wM_Eh!Lx>pGoFQdw4>CapXz*fX9^Z)=R4EGeQ7?_>u|-#m%Y_ zeHEQiCvKaBfin|+C4LD_+TqYf(Z0BM?LSZfVf;8z9F*)30q-!9=fz!*O zR;k@p(Y#FAX_^3@-}aasvH7(7o55J3(%bLufiU?8xzVINF%V3v4TQ;J3NU!W9ln!l zWC^oQW#P{a8-oOqg?UsA9g8hHmWi@m<`L{XO%@iv%|9iZ*+e``3jqYx22tBXZ8W{5 zx0Y2xC#lC_kae?22-I}>Gs^+qAMSM!kKQ#Ww+?zFgwH-K{Z?RaI}-CJwYs!$+Nf3v z;QokcL`IEg^EDccqdCs1CIq#3ug$wEpL$CBFUFq2e2CgA$U+P>=|5JLSx63EVdoBK z9?@P~=DHT9w(X1R{Dxv$ZhK&OE}hKS98oBmRw<>E!?*q|rNpzKFuZ!6=eY zMfSdkswE<&sMtY05)aGsZ){Pgq(Q!^5b)3?HqYxOanM(EfRRf`iiJJW-v6tr{51kE zkm4pju9=hUlQTD3B(syk)@uH&f0RB3jRJJ?cg3@WU(;lOnoP@#9IEaZEg_Ge1NJ6pyl9t0-e`Y*hZgx~% z%`A-zSoL&xR#1C~P(oI=%1mx5p^hCI+j~J+0Lh@7HYCP@_A*+)z6RV684O?06bWC`Q;Cban zGgryubVxa&B3gl&k1;yr0|6ldu|z`?gUO0jcEm!G1U||>UJQH(W5R!cbwM&pC-Kx& za1^-q_)GE>zbD}uh2EQkJ>QB?Ra=@hcQK0z z-(AQf1cl$F8QCi>((9k}k;Kv%+d%(|2kr&tftp-)LFTo@(QwJ+D?W_jW`c%Kr zVSu0C_b|l*Rc*5O$>pnZscb*)P>tTZ4e_v!q>iGWC)u9yj-Qh37)`DhM^PXW*9H6p zbw-_0Hcx+Aah+mBOe(F31boBqTKz*}CI*b5|4UiSDKX zsgC?*hWrM?=)&vv`D!uWlaN5CS0Y8t+lxBGUf%GoXg}SKvo{_EfL#CrYyA%A$ zb@n4Zp%@xkgp1y`;C*}5ZOs)_I;se^3AC;4#Zw0f7EcxTY~ZiL?1Hj9k2sc;>w>EA zf38b$s?6Zt`g^384+n_Y$KX=B(#n=Rz#x+ggaPM?vz(%i@e}U~9?!W#>Zpa80!iTE zE|SvAT3FR8xJ|xyMl7`FoJ>?N1HYg=pUi;D!|KV=YGlMo(+90b7nUZynUit3DL413 ztvw&Y*sOK>Il7!7i(c`+7k~O;rs0QK?zi%HqtTTUVjTC@AllX7{nVI}0PcS`ZRa#N zL2Dqf##NoOkKR9FGnpktuWuv;!bi%4veqIIOmNuS5uzC~wl$eb$%8~wTgTf0bLP0= z`&oidSS)-V3A5(+P*S&edoe%SKLhgT$iSz9&E6jS3Z?TIJd8M_}3{}@4(6x zN&^yyZPt6_ls7x>G*u=Z_1XThAjD&4z(2s45*dI+Y`kbrt>(T{IpO?8TIcGUA~GHE zJ-3}9CgPIo_t7&%U4GIRuj2}LAE^dd5!$cY_WKsd%T zT4LH&(I~0a!X`$8$Hs4ExQJ;KS0;(2Wx>1!R{`Ajd3KOUO0dJt)c@IQ)~P><-}X3X z>YXp|#3wZX6sV3Z;YZS)(MB^LuS|=JrkaJ8UC&_tNx%@HoTdzWAayH#rRrH&>-|2i z?zp#>K2wn?tle_<`Zk|raj!-4g$(2q0qy16sX!kqjN z4B^|r*63$qGEnIA@OzPh+=ERYY( z%VYFU36tz|h>5#-KPiu13iVg4D;QQ-h#XKKE4f0>U9QBr{>_x&%oyh3qvK77}#$@VAqe z&h|=I4N6YyLkm*JpcdrB*);`)N{#R5-yac&DES2{^reQ$PgZMO zV~o9*B_7D{*_N7e@}l{9QQ>J|Xx^hfr>=M^sPgt_tO=G2{aFa`-xo#(UY7aEi^e@? zoCLXaBnIs#p3-=QF;+EyP>b2w+GOJfyyg$hkR$Ishfyf^e!ePqFt~%(DA2AzQC2Vn z413#ipE~VTw;(z@iA`{$oGr8VF(1DB`bl_zDB6%TKk>aYxl7qE zZr4e&ilFVJm^cuC_2~8sb`QS!Zg>%PwaiZm5CQA?nt=_MO z%aaDHqtg2yPF;bzMnnP?+l}L5Hl5Mo(KE+dLJ1I&$h|9htO-4k2MHdCPd$wZ9S~mp z%w?kB?(%t1{wwIbqnGzTrjO~#ZN|5R8FcvHuYUhfdwpvByl*WLz#0 z11mxz$~QRnfb5~O(_V+|8;Hs7&A_r{YZf`b&5RpLh8G(~vGwQ3qrzUcpQ+lS?`!wK`bQ9rPqNDN zE~`_OQkT-~bFK-P|I-eF7hHb6U%Gxd^_$HTT=s9@KER(okoR(@s;~6PyRbmWhs7yF zYE@Ja*wy^s1_+Loi}#vd?#5zNtW0|8_Fr}#GQHF((j=lUOva@7P9PT5a;Hy9tZ@NiXNx8)vnU1dws zWh!)hvpEL^C!1^Nn<5W|gR9^U14NWHJ?|>o2c`+@Llf@EE`)sLpB7u+uMh1nO+mC~~^)9D-#ES%E?ED^>&*1gMyCITR z>E+kzay`V*D4gmYLI&C~!2wEWfWGLSxMtVmGgP67g=0dv#t$67nh*;h5|{|p?>SQ= zJT-+18k?sW!~r2;e!9Momv=+ff)j~8-2HCo!UKwWSV@xVE~_fO9+up#DNo0^aB@yd zlWOO%BXQ=|9QPrPV`a6u>CmmN?xSYrP{Ylqn2XAfLteYmYBgKa{>Tbw{c5FznmDb~ z;D9td$<}~2%}W6xJ^Umhd(5g{)0>PnMelTeNVc*WXN$!tB8de>SqQJ+>7g6Twkm@s z>&)($=YyD$6V=Tuu1R;pTy}WVd5{9?G5#5)T0G`9PUb0|sGis`RPL3yhQa+mp2@SK zRS+VBAK~-mbnlzNFEafDxKHcyjeo@=^~w`Y_*7%B7VNLa1vo!JHe|-T$Mlbb!oG#f zq9Fh&D{;4k2Epdtn=gja3ik`i1L}2KbzVMY5dtB`a{}c16k8D&UGo^tvdc!Vka$a! zQf$4MmAWJA#Z#&V5X`%kxA7loCh^~pzAh`zVffG^ces!mVK-Hc8(?QWHda~(0wP9`G3_O zG&T$VE`D)kPwuv%dpT%e_r;~-W$>21~9p>-+wqjZ>0+iD;G(_<`3Ky$gjT7sfRor;F#(RWsY zyR2{PdL@|_W&19<_Bfifi0!C~62t6T*AEi7@6>p92a#kaJ)K%%5T`oojt%_(;1=u3 zd2RFRYGq$g(D#i40!@D_j>t-ipiCuh+p&uCx7*RVCc4Iw5QbO|J{0c2?VY_QSANGB z+X1$8vh*`xU;;a$`nLPszu~}CRP;!MNbYm?J}^yOO(?g1NjD!cY*3Uq{4#kE4IY;% z(*7>Q64f#LhP-5y<~ENF-crYQ8yfHG6JLKl9u+C2!YW*%&tjfwi!`$p_;qp(n~~{r z${1d2-Z&e0+Dxg}f6xtB{qQHdk`emGr{8Klvr*7Q-WC6EbK=r!bKN=)HkOjFKD$R? z@;L2Ea;A5K1O6}7#>%8)@Mv+yWq18UI;*{(ehT;h*!!TiCI8s*Pin9pHUI+BzTm^L z5CDi}9_$4{&w=cDW>Ho@F#r6=MBivK)MAK~+ZRcG(?Mi42k2K?(2`9F!{Gq&*DGKi z10TKq0y}u%uNS4e%9{ZN?dC|;Sq5Hcs2NJjlIJ`;LMD#9%I<4;Eg0JQxs9?W<Xbv$C`s*uqA!_U4w&Y0ozFkTsy4-bJ*%kbY-YD8oSXyx`p1JNO-83EI$utA zkt8o>Nf`P>=5xxQFUT5yQT!ETj<08^yttQ9Nm+B(%d}7dj(O87#g@3>-f`bNo2aa- z6U;a#U_FCdbPG|YyprEgeOe>9gW;mx6mi^d1-)3ZY5?Wu)+PubANXCdW1jU-nz)tD z>WY1#qQ!}SR(n>$X0#s!Sl`ruQ&8U0!R&K2EvfA~Ol?CGEZd1OFavn=I|z(AsS z0gS|!_-U2`F~$>g$*{NlHf%b)JFxe^e*kLqqxJ2;aKp~sKF^vY?tbb6r|zbLyXDQi z&1K!^Y{&<*Ab@G#O+-zful>xwi2OEzrt)bUZ%3DqvgGywf|r^fvDu3d;g+!SDxWqN zkvfTeq$XgLQG7H1i>bF@tFw!>Mc;)}+}(>i6t@LMTHLj`ySpvi-K|jErMNp3ha$x( zF2&tBd}lxV-1`$UW@eH(l8niKS!nuuHjHLIS1ee(o==gY2`$XnA8)bXO|Ze9dRZ`v zz4E7NW`1S4Y_rXMTv7o50jA5A?5B<$%q(lHZ4hI5d~K2^+{+$u5rA(-kp2J+nC9mlb3#NcD z<81a;p5H9L;lWeg_=79~!rN4fmbaXF($5!Qj*4$yw{BdU(&;b`LLgxfhCD%sT8 zP}+A|7(oI94($IE7BBg^zw&GmNIX!yN3t=SMbwECJ0###bGI{r^RD|sYPXU?!3CP! zY5cr+0ip00$H*ddg`hs`R0b4Wc(H;`^GC2rM|7U-q+sNRIjZXr-7rB?#aU5RyK`P` zxdz}c$cDszAcOm0xYMlb`{!9+c&O?IPF9pFl;@#b)|{Iu-vlTR>dK^a98;`}`k;hK z#^J4^^3TK|`mZ~_E?#UE*yNcWnQ>AGTVwhkKlU&PEKo1|sd@erAIu)cinCGrq5I^Ok>#@xX6)zZIdmM0p z5(pFxMeeXEpqK4h&@& zW+hwmdt=(8265@hkrmN*mf$S5Du&@{7KQ3GczjXCoq}r0lrq1%SZ-^TJjP{Bn}^T0 zpF4&qJtpqJl_yk9I6RW-j^!&W5y~SIps7<~)bu0t8Qw45Tr(_a#%6f|+7MGor(A#H zD>E7d_-hmh0I;Aug>Y>nY)?j6eIVJ+>zX{@yK_h=GMHnC{}LI<_A(WF-XV3hN1Tnl zk)o%e)La_(K6{Ie?Ve930#P`MXzJe)84ndJyzA#v2UPs^USH;b0*8vT^s zDSmZiV}v;5755~8$}ydOWL~pM(&OT9rgLdTc)y_w5}&_rv|lKFk$sLWHDG?xaDI&J zBud-P;i8!5ke2(dNWhRkS-4N`3k@JFEH^3)W_5lX@E25k?hapE960397jIuN8C{k8 z3c+ZKm7e%+^!&s}Z;CW&geZ^p9sIwwNMz1XO1@3z^L# zuCzaB)beEMMh8k(!|hnE1<-}pWL49B#^sVc=skifGP3%!)G8HvQD2-!D0mb}xOBRi znSsHnIu+>^9A<4K89ooG*EB6!)b<|(Q^v~fUDwUQOKf-DM9gC~NVwD*X-JCYy&dMf z`AG2-%j=&IE^yz1+s+E<#gD>9v8RMV0EwM6o!MXF8*-zeYUu5hm30@Xm}ak+=r_w` zt`mgj^rF%>$I>6CXr>nu+mIyK|4mNQv@Ui#s<{}CZ9J>uCP944R7SG0j^FjipaAPz z=%*R~C*wJS?Pn6Yg8<=lLNY;F{k!6Cq?aum|-?r}v~1nOAcO1c-#7Vi2VcMd#rImWM@r3C|GTn~wX<@9U3_I? zD~M)M2S8m}SI*fG&*L_pG+@6-Vz7H^+xX|~rKPI!aZkCzY5g#jRW{+I`fbTl)}%Jv~M=yElFx^RuPJ>tTf zdmqnndYQv|37@WCVQ=yWwg%Nek5=ISrr!h3<#L@Lci3ARCjgkglq&v)A(ver@s(9= zvL3M zZ_%g;+x~ELyDe=Aw`MG_MW>_Yzo|8fWwO6RuW5)+F``ArQmG@c9i?7{&S>2-k`X+( zhF6L`zuX^&|APiI!YoEjQy1fYy?#u?6~{%^Z@8^eC5yPA1Yghu%Z<4hq*1#^cX0Bz zz7`z~6e&Q{B0>Rw{wGXKSq!(pbmEOd0TVK>{%N7h$o2#u%UVdw9!%K}eteL)Qh_!Z z*a!&r3?qgERG<8H&}7DXdvBBP=F?e1hm$*EFHjdP+@+a(51r%{#Ezk?{NmDRo@1A?Y%BI5rTlq)#Z zy>3YU8`7SHVJdIi*nZ3&+Xj7lV`GnkGixdROBX6N9YstgU3r8BuHZ@z6 z^{)A0x#w%A)zqz})wHVfr(@xyNpnx9Q!B!>vrA7U!E=9n3lOWz=0v`K6P~&YNHq`T znWKR$@JQ#KUsb<3MW=+BcuQ-u8?OxZ$(c`9x5;RiZWBOud9xX zxu?-maZfw&Z4P$pMeMU)Wn{WGW1 zSD{8<0#YbAFglR_KQG!K$@zLNZlG45tf>T~av~4VMfr+*$cVcvxlMoWF5}iz&_G^Qg^W>TyzFco8r2b_cSK7j%Cn&ViSHEb9n+~w|cJDsA zws}}g!1u%N{iTB-iLChn9iH00blY7Gs#Nb^ywwAKb$RiCuJ)bIlN@X?chm_4W7!R; zMVZ_hb0-~Fvqx!RiXnmlz;4nYj&#M|{)wmpbvY=r&W;FTA;z{_c2wzAv1NA@%ta97 zK@+tuyYG<6x-@OhfnIP>@v)NQ&td(0!O8YN1t(lgdC&wv{s|fqZzL3cTALKR#hqjZ zkZb49t#~5?X~pA?SwGm(zvI7F5CGZfsOPkUgQ!5Qb}5g$H~aH*UL-%4s6>1M!4n=1 zYjuwIN5_#`nN28QS5U1OH%yS6LJzF?Vhd9QcMJoe;`p|f(xHTs=T*hCrQs2-)}Ayh zeI9r6XO^HId@vOW4LD$tFvZS)%8{Ep9$Jr%c6YXYf0uURu0jD~>kFTw`ZTnGzjt3% zdO}+g#nC){qaNOxYbKa@)N3318^1#t@0=tkKd-H(Gna1!ilP4%e{W>{5AA|Xs5TWF zb0_91!qMJxcRgg%IWtWQB+LlGL&ka&ei_+TEx-@5Ld|6W(-pk}-1xP`Wz7_nmydL? zHxJ^@@ClZYkbr-WC2rTJc4(kaz_*BSmKvaQnti0N%y`a~7GtcgvHH!QNTmd@kOFF9 zvTl1s^9ocD>^!3)$cCiL6rQJbrS=MsnwgSuX-^~82E6H=lm3`L5P`6-#8OgZqzoJN z>q!@%129M(X+M>1PPZQj@^a?KS!sTLgdJQskz}J1Y9IKDK8*obK+F1bit2s83pUG$ z`v(Q)se$E82O0d^UNWqJ0t+yYQs7Zg_P^C9T+ZHcuvoP6ry|@{77?8Gh6S<2(hyRjUU0E2NgGk=_=04Zhgmm2WY0MFcG3 z+I6REx@W<}k*_azhL}f*WaR=+c|T9*#>JgQJxb*8Rg&;@u>q#X~kNPDXqKyK*GM(|eZ?9! z54h!&&bi*`<@s1lwsyl!j9oG@QCH5c;ntslr^n zJ^gSN#fR7NAUE`p-RA45AI6`|)HMnN9!u7-DOWDCRh9TH9RTnX7EtiDhcg!tcY%RO z+Schv;-*A>DfR!IXH(+~&UUjM;a13a%{q*@87xV)y@8egFG=iD0_SOod*r~b(>s%- zy{za#J{DDB4T8Fr2hf*wZ|9m`i76<%N4_oB7XCQ|9@PAjR0w0+e%zy!u&%zaQRcO^ zNVW_v5V6h=VI0~=$0u&YCSaV!MOKUK)o>qeUuIopH4yCy4@VIo<#{pb)^ZyK*6yi6 z2sEFf-7TN&*v*742p^303mg>o5&zcB65Vy)3AJ93Biu)wDClP0yLmo}#usYUZ=EBnWkLXCt>3AM zx+M^BGsTt0Jq0MT(EewA9CW|bqjYH!F@do@%#);~CxRxqGeBQHaFOTCY; zVVhPH;Vs+fw5KB@Z5VHYBB275u&LOx0=${G*!xgubdzTcQ59SEV8i1>ZgGuC zBRM`4FiS`WpfAQj(>Lf4Vp3nt10?96~P?PZ| zAdxUX6ns18zV@tTgD3mcTsNJCHUSsBk7&s)d(3lhY_cZLS^D3${~1Pp*v33*k_an| z5|f`$hw0$c={<1wU_0y^cevw2oR{dry!4jQpjoz&<-u`*1AZ@T4&d^J{As9SvC;dc z=L3W5cIUoLsT~Dl)L!q)Yzv{kg`TZ02F>FBX)e0twquR?MYOpLfM?t)Iajr3kB@^& zimY>we#$$^u$-op4{89(&n2cPKkdM^?m4h0QI%EGbuuLER0}KET zOuWo5$n;7#WfbP_%SsgOn|jTJTog#YX2*>SJmSq4V?~qpzjJ*g%o;eR9#JUz6ni9i zJw2``jkW@7U33?ln;DtHtk(+EH3UMOPv`6~gZ9vK9Z{2R@JI``WNQjHHj4=b9$BFb zoapg?I@m4~Y0d7(b(H)iYqv^&fV=q5EcQP4<|3-zsR3SoR_W%D()@9x!5UuL#h*b9 zTn1Zt+>0srHD{R28l*VPkTPv#I#sm-UvQv*>>?y8=qKA60rl&Bd&GR!-h^(wxASyh zLNyA|fC!&0IiC*7dmtIfd^^AdVt4F5|8_cpHZMBy1vwmA%jC-cbnt!q<>TzmtF_i# zpA;Q(Cat1rorMH~ShfOyW|cj(SUE~w@Y2T@0T+AXh-L|9;me<9{hT0(=sOxHFPMF< zSccR%-^I4f&5K`GxumtC4NU%(riZFbIjTGg5+7Q35thmNrth=Rbmw)S5%ZHf+=@+> z3fyp=?YhI(W$8Ty50JR)O}Hwk-bKZ=ZC2)%9)(QfZ}r6L(kAMwn##V2r}QjO%;UKD z8P%qdH9Lg~ki9kPK4~q8{0o0c9<%A8mDOWMVQx$mJe9G)gaP1^;~&EL$(Qc~NC3?Y zgje%8TDJ5p(-rp`6e^-6(RzIO-7k>L`)l*RoSB_9E4V{k#`%uIv(s_tHaZZR_dQ&X-@4)F z`v#=sx+L-Ral=_;XJa;=^b6*9=awU1Q?u^hs^8~#+KP7PQquLuP{@fp%D^OY*N(na z#Fi|nSQt5-9v7&A7|m<%Jsfwt8#OYVo39Sqvq>Y1duuXk)QsO*1lVo@nf$&q68SRM z_{@0_lx^qDi4cpT*Z0acOIkDkArn{2qG9ECvfJ)Ea@_xY`p-0s{e*#!-&i~P!jZg3 zC!k0^bPziQ)veGY03qjV$MZY>QO$&o+I19vk-%}$1 zpA~BM)5yO@WvQd~|2o5*^DFyzL|+ z+#-izHvym9mG_M>M(p}pQ@#<;oB=akli=xE$02vHLW7fHKQm9=#Ulc&r=Oehgvueo zJatN->9UocpGQ&KFK=D1jFmcS(_dkI9y?FwwYJz6k+zSZN%b*-Ka3*#lTwop}5N|2Z6mec?dM@jvL}TesfLFS-&m#`HB1*o zKFP|ytj#DDZ{HX`w=?FgJ@Ad=VpR1fyPlotwlWg46%Lb9u0~!)OHUn0bpbwGsDWFz z;6G(Am0`gT!LYdi=skmaa1B6Y*k5fd<_gPnGlY;Bn?XGZqskhQ69Nx>xvMi1xMEjuH!lG_co5CiG z$Ni6rTM2u?0t#Bqfs}TBgZCeypLx+UU{BK##~TyZ9MAsJR6n7~FaQCVX0d!nLVm-9 z3AB5W)jvdcL(7Nni6lA#LXKn>nC9sm9EecpGwBBfEeqQxX}CS#ep9gQ*Vx;)Yy_hc z>584=ttepLFKn9V&7hU`R~Fl!{wI9B0Z;VJhSQ6 zi62OYIjyy51Oj_lnn=tmCFF;8k3;y~Nb}r6Nrd{15haxfEKBzX2YB005>I#pB?Di-bGr65iy?$!39Rm7<5RtImc2URC3 zIYpr>|By&ygw!45>vCl103&6Wx>t^^?tpmwyrgmeI(Hfax$Flre_DhswFZW{C&0cn zY5e0yD8$V!2|)~T^tpv6?FwwZB)&w5M?n**?R>Rc?weiX%BiqWQ-#oLmFDy5g!;Xl(*$^UzcD#ubZ}W8Dp=m!3IGCbAC7Fq!P zq-M|Xeq!gG+LLyeK>+q&=F$o+3@sNiJV*ZSx;q(l_ILjXgKsSu$uk^^5&or7E7^4- zMdK6|mIAb8)wTt>ZY__KT#L6Xn1<7k{A?m}Ma@O7!3G48RI8dyY3aOPb7 z1Ozh|gTOQNz@m!gn!IPLBvb@&9U-4g*4s}xmJ zzNw$o@II-E?dv3O*I(MKyvNrOyxhuk&$9`AG4_)XoARUjd8#iE#`Jk?L|s3*BQZl+0}AFRr&2_h zvnpeLknunFYbAni2!*JTvP0TGw<}Ze&8D1gPNOHDw7yl));{Du^eV|^@1%~e(9P7+ z_-|V8X5d7y0*jFPZ~gX{4f18!ZInozUD}HFZ-PY*eZvKP5`aUVX$bQIYSM22qAY*# zZyFZRto81_MVazI0ND1CO$HRu0r?nF0pE4+E1ugZX|B|pOtY@Qp0l=!%xa^#HFRc2 z8?(M4#1g88L_4S+UN}IuDlohK{cHur-p#D+OlmJ3&R=@mbh5QQG~jwDMlQjR;vzAwHpc4kLxz%PugG)pq_M0o}0D1vRtQjjCJa zI2j`OFk)o^(BZ$Uml!iMx*(zeT;}M}n8GgoLjhR{8!c|19aUK40EoA*=&#(w{?xtB z)eszrz3K?LEe)TkDwHcsLXta-zFb$@v`y``VpeyIT%}%%u&-RM9jK>e$FaX?z8lYH zPag(QvSY>o1SkFQIT49T$yNlUI@N{QXEe$3C`Cf5E-|XJ3E|8~Vf_9!Ktl;DU#gp^ zL%%*?xAs@OxeU26?B6I%4JJu_#W0qpx)PoGjVGelU!=dlcbU zxVk^Jt8Frz2#W#_j&R-H^fXdAc1bl{aH^m_KX_#)5C<5gH2gRFG-k!Z+yDhmM*vVi z5dxUx05EruPYetNL`AIp6p^ccz?cGnZ0l-QPU`Cmf)WEv2fX9OeCAFn=VrXyn2pDf zu_4)H0vXAoPuZF5W4sUIS}_Cc0Qvcb*nfsH)<6RvB4HtLHP8?h<*E&i-9!Vvrd9(4 zb?73J#J9%D zjy98~4LHC=Jd1`sypbK#(fjvtC%is=nL`xlEH0$tX~%?6b$dI!yqEBb$gez?@YAesm0VTOT&KPFHDxUkGKJlcP}m$-RkZo zZOszJ2*fr+4RYKdn7;f2zc*7QVqPr(1Sy^z#r%J3_`tH?o90F|9Hi)hH74c)Ubo@T zmeWgqV)zosYPLkygM1PMxpHP##N5N_HP)7S@4eUxakAz{@+zvHT;&05Oqp;1HVr6g zQp6tv5cdr3Km{rrU#fh6jp-xQxFY zEo5R`wuzwi98FT-`z2*G(kvtVvJn7(z~25vHp>!$J-LYP9#(88;%YRxEZT!|vc1(P z>{_0PQ5TUif|W?<23LOW?rRjy13|WbiOJydj;K9n9J|Aeh83GL04d^M$cTh&COPC7 ze!j3VntP!BN0N%iX4;$Z`?VSdh=zrXPy)QmdwFBUF^Y4YAhuD790&n`@R_Hx6wKp+ z4#cm)K}2eLUTQz}BR^`rQdAz9!Fr7o01o;rn9#t&Ie&3aPeEH1S&F%;`0141`mUq# zA6}AKcz{~R5>cN}`@fq_U-Lc+h{hjAShtHWNb}S zGMH?lWrSrtL*%kl`Yl4_B%QK!Brxw=yUS$RRwieH;xUBA#n-s~@q#J- zK0lO^)9I+b<_BT3TkvZJ=Z3l3MU#Q0Se09TvE2*0ZU4VS{3{elhIRn9zXK4D`0U^) z;OH>Qb;k}a2{`Hc+HkF=h*u(OUoE5mxcUo8<`4f4X@>H%kEOpxC=dbs{WKO}3f|=^ z!O++P^2}s-+a7{1DWCxhM%N0-xMBT?`uh2_6VOpDz>#(05ZWxdHIQ{QxP)Wl1!#v1`zwX>z_i1KlMx?5ddK8+QUN zJJrkbua|5S{d&s7yXb3SMeKSq$)=^VBDcGO%BPFrPk(8R7i@UHYbv^VNS3~9HYuv> z@F{Ama4G6(@M&6Gp^8J ze!dmt?2 zD9Q-4cAJ^8k{GIo079U9r`s=^I-vSZL_}s|cpES{`b|J9HR>G*-_VSm(M7muqAbyA zPJ*Un2tcr$YADg|4l69%YAI?ceKw9=5e@=RNYlX3Ju1V16-o}y^|Qn~s9o=%{>{-p zs9ig$$?rl?-SkIO{WmqOqwQdA(0o&J$jlhzI(7^{cxLV^Qas^jzC7A<23^Df))I-r zcy8T1>1Ego61Cd$(TBZA$<6NAWTfr7Dr&~W4~tc*n%->4@swO>+VBKf*X%j)lTWmGq?_KWEd`e_5+B zRQ;$r(@Kebi5qKHr)!Kx?&%O=u8u zz<{Tig06QDnA88pRup~;!*mOBC^QXm%z@BvlJH~|YlnoiOHWkXd48c{3v|x3r?zU^ zA4t>12LiSx}^~dxm?_`#PwJ$Bc zE7y~SJ9M%xnswod%AcEu52eZ4fYE%3V~Y;Kz3S=12NHSzzHbyDMA>ff2bbzeUurLQ zygWWjcth0~IQ|u7?syInBLuqM#~zOdgrHvvAIFeraAHMxe9Hf;M-YHoEVgm~#Joc5 z^6%_9kSI3~Sz3c^odnyjj23=AIxi7jc|^B*K6Y`(J}SQpE@DbRl8p!tm325iv9}S{ z&y_5WA(b+W00loG&_o=9Wh*Aa_&@H2``*%xzou5#(qH-W%bdhk4=@e>6=Q4u_aWC= z=T)dBH`;ymA@@qh`aU+$z&MY9!l%!o9X+VUSLQo|MqTaOfc85wzzm!W!s|AM+o)4*GMRqg)JKQR^b* z&XZ=)^EmV(lsIa}8Pl~RrEhVLD?JuC0m#i5IeN*|ijGqKp@sjn94C(5p$N;2=4Q>u zYJUEKhO1T*UZ0`$gd&0iamw+USw>*n0*F`p?^yyX+zfN4^B*q%#WIVl-QwnN^H9Pj zL9W`ZOFD@dqY+t4X3e&@^$V-!&4Q}uxnusJFR2ROjV3?$^+I72nNNNOuy^Cx==Kr$ z9c$Z=>f3(vSl;bk+b}q**`=_aD?jO^Ya7YyuJbS=k^PvSm!zdpn@lZI(m6cBHV3W6 zqZsVzr?at`>wDiN6>_3Ea0lp8?i+_^Ft0Dmi27!vscF=@sn0_XGFVfknZu4=lR?cL z`=DF1!i1;B{k~uOy>$|a$8&P%CLd&B%MY>+leRXXV#5moloWaPOc$)YTe0-% z|H;MeIM$y^Fi@!1UWsr2WeFEFIh54#P{vq_;Pz4TrV^8d%k0 z$>?-SZ1EGkg%)2VLx`rp+T|%aX3~05X z65TqwodW2$20r+vksJh{n`#C8enbmc9nd8TkpB7ef%Uxc_v+6|7b*FxwlyJ$1#100 zpSN6H7q7p^b@)31e-XY^Ax z9{F*agW^#Y3bWh0 z(pRec`<#w2LYBDqz#PGf_C=D!Qf>E>Kt`L{BX+)>olGb=up1$>H+v zB5fjO!^0=%cl>@+25%1}IXc6+WZL$KS>6ZpExW9d0D1XmijMmXHG*}1FR!Z#cGwBS ze^}qoAM893JQh!`8lW6_Kh3(E%l`0R|JF}+xx*|*_%j;Z@X7=ZIS%SY z*u+%9p^0L7Eh|3ElzRn#3R@(ax$yn0eB}R!GxZTQuUZELF1+~!V2`bnkUS_t0}Udk zsZS=!r#bViL-&YKNR)Z^FAuKrzYIU`JntaX6B@Mf@h#1{(0@DJpa;^`+sUc{awoC- zr!O$T@ECl->GiyAJ3G1aSqdb2uL6|eU41jP=-lWpSzjv8KyEt4rM8r?#%HYn1A@;) z0M?mC3HOD0J%_y=Io$`gLtOQU?lyFv1Wn;A^p+QmyBaz)s-0ZVXt_)a{tPm!nT~Jh z^I%r^`5GKmqfuv8y)~-GMV23i92~m2tPk^$A(Pu~pE9cX&)t~3`@%SGkc+4JQBVFB-Z3NU+n9d!11r{Y0(k~_( z=Dd6q1_R{dzFIq3ne=au*8sARkAIsUkbE=)`%Drfsy*$r2b?f~*W6A{kbVC6+r7Y7 z83nu1rNCA5L%xiWoCeFR;)W1XV&-dLHyH}&L$VXLw`uh^<~xd4AzW;S8Ht4Xn4f>xeg4;yp&lGSzGT=q&uHjT(+vj_V%MjnlY8kw>$Soa zxW-Q_9ZY#NthkoCb|zI0-CJ`Im)HO@By_4!99{hxVt9#?hz{_N#$pkbXaQ_wOZ8q; zrEfawaO?V`o+P|ls&FsB>-)Ut{YP`qJ(DT_!52~BV5_V9cLXP5qp;?PPcm`0-y~>h zoRpRRCupw!-@}|(t&k^6dTtwPhp_tY+k&R2rtM{4De+onQ4&@~?qN>H3l@dleGOac z{Z~k$J0!fiJMC^KT*O{59M^$I;*@Wcx6hiN%4`{h*Wuf?@BGl!V@f-9Ue5a(_j+#- z=Fw{yYzbUff8H@8{&AgD(n3W2E%Dd%dgcS6H6~!U>y{n~4z-_w2`v6*RM9gu3?`(` z5{{*;#He@c&eUu1ou*EJ0t2=G#J*+LaQM4B0`j`z;@`q_b@s1Od2iwA3q}7t$z;s_ zQ;yu8l2@n9!^z3u(r4qpTS_!Y*(Ed@>)+x-1E9cCnGL!Q@Bz!Ux7Qn7+ATS*g zlPK3b`LmSr($1aQnSsJk1tf6)7W3^_hsGIR^T5ij-j%rdQ{MFlF1bVQsF zb*Qy4ohufG(l-gDmE$a+nKi~4+8GX7D^LZRSFc77#;G+M(^8^3|T-19J6N58XUSPli^xNrI zGCAU)pJe%_o(GxqN;B!nvg}HX5=sEaY)1Y-V(=A~;&kHVo(QVsl;>^4DZ^DW0*`2) z`90dZMi86yd+$i>{gek|+N>s!*e^@s2bQ}(n$4RLM@^x?DeC?B)1Lx;usX3-+ORz8 z$FcPQPPy=hW_ytU$vd<4sO{H=eem?sT0hC+oga5|xQL+kPgbb6dH%y&Iq~soiZo+K zK(^I0r8~W2^vR&YQ6XB>*SE~aQ9H-#eIA3yRg?=Bd2p=6eYm2#0R-f#K?v_IdzlyJ zewpr4V>wd*4SAdy&d=_m}`M7VAx%=Z%@0OW) z)-L-A{m zL;9Hdc)hsARh011iwvl?*0cn?%QR59)B`ONCZj;{Hxix~zC1Enhceii9K7n0O1))~*3+U>-kY-A^fogj49uB3X^l2BwN_i&RcxI) zs^|#k&pe2}eo9_ia28>>6UC;`g>xG%&Lq8B#veswUMvc#&+3w0D<_?7LTu=5;P#4d zO_`Ahl58q5el2md(&etrk^B)I?;^xWuDxlm{6u+ZuqUmL08}vw&$f)?m2`j3p3fA2 zjz)pSKh%WHt434EA6KBidJ4q-h^n8kLR2L@)(7R!_!S)xG2k9@x|fwvQ=os4&r9#J zLBpUVLN`a0v4;j$91m=VcY-i@XiewlyE27)WQz6Cu zRAvJD5(TxQ6c{MFW>pq{mmN*5-C3~BF{FDY=(f0)D9`I3q;Od!sl=AV*Ng<39B8QC zFA-t4iC?(<(5Gh8$A|FOIDP^LnVcAlK-Jfs@1_^|WN(L&on!i&hl!(|Jq8khKQZeH zf`aTLgL*37LyU9GCmk;9UAP@WV40lKtwe!yxMrb>D1%rne|F!_+(AqE+(AH(u3Qro+fg6GS|)uS$_e@uDvI2vv#a-Ig{GXl4iI>Dls&xT z)-JiDQg#+5AEJJ!zxF@;3%XADosG(sZgL72e|vcDV^bpA}+a=D(<-bVpvw^;)7 z33tp5UpvgM&zdjd1MZOosa*OYu@CbD%L=$PcZj)7RlJ&JEimG4;b0GF;J00Jkk#iG zRoA7qnrV-8o~w8-in6?YItf>LN^agK{&NkTE=M(5?lLa={+14}h*kj@$gQP{orDK# z%r16w8-k!<-L%VAV8fDvtUgg$ z-8VS%Av-7cLjHNfHRPis;-LtN;)0;FWVtnGoq841#0m34E-e0p!fa_O@n>%21zXRP zwJ&mHz>T8z_fE%wmX=K2Z(CjkXA7J`{>dHw=V!78gE1o3b@QD1kr_v5pgdpc3}uS& zSv6)1X=I+3r|us1MYo%e6l;}F3jDo&3C#<{90y)*#}{t$W+_`7xN$AOUql^Lh`Be` z`~KlDI?(aHXurnjfLQ=TRoU?|kpoXpDS1^h1)&*5lU*EaG41Y0wUD z5arV9b`LtIQC{Gt=aIo4F)@BHJj^P+oBPR5h|@VLk-27X0CNF6aVw)eKQz!Ct??iR zSnS`3XP}I7$%G-xRSU)DO@5_~|(szZ2ng3@MXLR3fdL2~6<}ftr z&ba3(@3*^;3cHSM)qe(Ts?RFSUb$Uy3!qR(rc?NQYmXEt0GHtZyBB{hUw|5gf+A{k zx$59~Q%AvIl_VxpI9GNVN}Q+=#QCbo9685w7{)M)2m2+1bG2vX#CZmV$Lz^RnF?u^Bv(CHd<}J~_V{^!>xK>OFG!*k4>?*@H8FfZbAtdq za+OH@w|Yc}&6x3#X%mbA0&wytY<53rS4H|s(!vU#oP8Zcsq^JWr=WdU^};78U=HV% z0X<=rp=Tb0zdzL1Gs`wtH=rhS3BE=+$N(K!#d{YQVTFdIvNAPDCB?7G6WTb*3mDxg zou+FQRBu>caVZW&syp7flnKMJM0M+)JiIy*UV$$%8_t66ORZBkgl1X&n9P;U zwR?Icx97mgLcfvg?w-dW&ANT!dU?!PQ)tXIn~pF07XMayC$z^Efvdvc;)aOkhn`aV ztwQUxAnxlbY52XO6ziw(n-cz_-R@a@jigl%-!7kr?5zG_kCV6D=0n#R)o#tfy>W7a z7uc=3tBTe`$+OQJPfrhDORu)KZ;mubb!mfv^S_V$(ufPiX}8HGiHyO@{c!+wrB5=E zTSzyix1yV1 z;)!Li@n@C(z0HL_<taI|O%kcbA`UpW1)bxjS{a{ zTnJ2~(HCNv$7|tVyd=qv;6a$2$Z!}AbclmL5k%_?Hl)UvCInKa*UTJOk689!bWUG0 zopR!HJm0$3!7JJVxDBr@AzMtgV*g=)VFC`tuY+}tu?mUlW{se5Jz%(})MB$uQbE#2D9%c?;D2O8 z;Kf64{V$Ls+1`0*ts>k5Y{OiE!2vK|c`7V8AejpS=>L1CZN{Y#nOiWiYL9TymAL<; zM%q61+!U-o1`CZ13083(68Bb&tL=H_sL7#jXNFam%BYc!s#cUsRzGf_jDU(6YW{sA zM)ldBEbi2mEtU{~7^iE5i>v2<{V`3`drxFitLhqeS(oBJq0rQ!Bn}!a8dTNv#M(0d zLMxb7I;;gKdlF@7olsA1wyqe78}9ecKu&hc>}u-^Iu@V#K1Wv&pdlbL=M(X=TGW1$ zjwB^BDe7Oc1A5>b^+<}@13GMKMZoh{=5R6^(Vow8>I-P~HV!m7(G1Z8vLe)YeTz|% zk~orin6il9CL*0-0gl%DyL{mN8O>~d7I&1u#14J7Yz+Ig$E`YML!w+$oh~afj140P zUhw{}Hqv*A>V^AfxX zWNZX5Q-G>TQEOgSpd@3rBV8c?nU`)UagBIIf2Ofl6&e7aY#Jfm3Eiy*bkDdaZ@R&QSc!8lv*hKlcf{hNxnNIuwq&30XJCo!fQ0qLDZ)?@fuV&E!(ITZrfE8LU`0XVFVot>~cTw5B-aQ#}^!d;*88(w2j52`_8 znXmrN5pa@gIHEh_(6J!=YC4VtTEhJGM9_(4p?E$esROKBn z&@x$Je)oBn(69>M;`$`AxrgyJucErzMx{g6-iXVsR=2d^7m`W-5bKf3bzkZ(_<| z-FGR?gOGMCh)v%j8&6HsHVybQnG1F5eSCa>0}FQzwv{CCXei#Z@Ke}Hm!T8yhzZ-|012=f4Bb<8T@Lp968{;te5_=KV=T>4knp-{1&5rD>tp-h=t^Jw zN96+w+J)+VYTCETD&4`eYy<4V*hwUt>{KT$Ls+ydk{G2al#K}^<{NEAtjosH*x^O` zlb*iNc|gCDKKr+dZEyqF^A@wffV`iL?x64lb~{V!%E%-3K)|t7Z3%`fP&u&P3dEjw zl>j!~bw8vN_0t%YQ0!1VMuzP@E(FcM(cAysg6XJLF!Md^3F@9-4GOrHvN*QzEbBUK z|7kw^yg{bq|2oh(FVy9gl>eBUe;J2$$^VO5kL1?B5LOssi}y1s7mAa%vM;Q`A?uIm z4a{k0DCbCZ^9kkUQ;yi!SAvHGUGg)y_AZkc)_ z=-vodGIq=tam@$zW&pI4K-u9rAvwl{Ffc$qm~rG`a!U4_ss9=V-dFeiR}r+_&`rO~ z3=73L61B{i`jw^uKR9w(0LdD0o8IZ$svh$H8jU#7-Li1h~9 zyK>)DR*Bd?d+K|?nLTmMvH*_v#lBO|k<bc!VofzqsLOYA$K!-+4#^K2@+Kg^2}~On37A$?@!V|QF?xqA-LO%;S{qRU}{%zi4icwG0?t2z1XF89b*%EY5DX1Y~lj8%lJIp6`v{Yc{xXORq60%X^c& z7G>eXwA=pIxUjSDgAk$yT%w=rcwmrnieU{NsWj(3Z0ZU7fPMU0&hbRNI;2o~$ZD1a zF6TW1Fb_kmN$;WbJJHvO2|kdIhb*@*$6zUyIMwzAbDJwO@GpQ_^)u+V zIb%K;5E`(<0NwNRM>$+BU=4;aMqNI=XVV+?FO8l_I<;rUIjwm^)44yT+oBS)I66{vNcX)_f$>yiIqK^rzQyfQxbr z&pR+5%y|;bU%eD{^@Bwwu_<&(!9?UEj$KY}kl5caH zp2fSQj`+B9vP>l9l4;$M8KBF*6lT$=v&gsM#IJ>)S1$ayJsCq(65|h zwzNecnJcRp20xFJQUpqAyD8r}+3_6V<6~;;CG)GC0#2D7@`e3a1u{4ai%}&`uvKbv zta93=PJ}e#BHCBphuU-Ey#SQ6A4;K>A>v6*@NH)oRbB900s>p=VimtR1Yyx&S-DzF%r)a3k}7qhyEnSWt?AioFV+yD>OCb@cm67 z^HUh{mVge&3|++F0ADfSqBbMG(ksv&PSqjLyG7OSf=o&`J$~-)-JH_U;~QB@y`u3n zU@ZrQ()P2zqqKokViqsn-Osh`%Ta0n=ZL;Yh92AhLqVbO$B-zFGXVUo5x~z{J@9P> z7&FG))kaxDMJQZ7C$!Gka;BdLO=BYo`DfMZfSblj7?ecS_kw(-ZsGPLTlSG(*roCQ z_M$@H0kf4>#>-bU^ZGuR@;o?thJA1Y!x|8ffiLn!CNk8Zs2ao;m`T9=NC|0ca-DTNKNMP(#i-&deWP(f$iGm0uaDd$NG4m5YTy%B^e-6x zi!)zR^Fuf|*w;-|aFI*MZZ*4S?%~$B<*BUUvqZzuyHq|&|mWZr3S&V?+*xaoc8-scS3-<*q~)$lTeSe)R07jw_Z(y zF?~xf$V)E#XwC`36828HvMJ}a)XcEQ*O^B&R4tM;uCv!onfk6Cs2#Qu1O4QVotM-0 zvg~wq+vy6Uc;>*#hUdI9_0b9I~iZ zqPVp~U%g9jj6h%r3=fWQh&6=SZmpFnMna#n(LEBE-p=#~c|qayGgxPWNL6+JRM zhd}z#sx*Hhp3?$WFdS<6{qIv7%xv5`pHwj9PD}C`658GP0PNsTLFf=CHi)lD(N$iV zl(AzxD@!XgBb$ zWigIl7XN?!yRx@VItedh(a11{nigR5*gtF_zcC)n8McV81=Rp!A4CpSXuF(I{wJc; zVfP{m6nRW%P*fM$v4qi77`ht2j|LV%uf5^kKwe9V%-qAN!4YWQ@YfYSpADkvFwivM z0CKY>)o0)Rd}RU8X!V@FARs&6@vB?eCFSnSu?<6vI(d-hmb-T2>TdsU1Dhn~kGtq`uNSZG%scJZR6z@WJ?n08Gv?(f}!B0?xv z#(&$)V6lb-AA7Y-pIr16R}oIolNwaia9}Uo6sZnhO4QFVBoDpZpYr!8d%lXEe!uO; z49!RTBIS}a#NGN<9^inK`Cz#C4D^{KKPU4~-b43<94^x)%`6jDL?C*8=Yw6@enkz4 z5oL{B)S*#LL%@=wIRZrBt~!vOf=TF`%@3$CdwK7`nunm}0-|B31usaMWIp(iU$NBO zxE_7`c3g4_^H^`J$F$qw&MXCMh zGo9h+$_Oz#Kx|jlo=`j8kRJDIM!blCWodT6+M8Wt+~Pk@?4h@g46Okj=wJ78-*)Fy zQ)1Fgg^v;0^JP1Veed$=x+B^S1u2xf3Ujy-yw=Gl1(QvdRQV$F221~JQ}rg7uI@|$ zT*}U+>odrX5bQW(-DMRHP_z2Mx?A=-uf*mPFO;l1qIPhc2W4uOcJIynSjKt3p6D4V zCUjF1zUPy!{rdL|>Rcmh48PD&j@FDp*dp`N5*F3qA=Bk%Zz7YK=NX5wvVEa@3+FNB zrxz~ckWF;tizA3!_n+?-rHux& z=4}Xk|DXX}^Zt**6oZo4y62Ly=r{r(A@_2QsqK*+8<28bZ2_oYj!t;eh6ozCf|-n) zT`tl2_9vhH^p~%*z{XMj3s4M4QqtUD3fX9Bl77q|M-*uR&YYCm?yZ|<1GUDi6-5aI zJcgL_@-?u9l(Q)Otin%(9NTc?+)F0^Xq0HPFGcBaI--VNz-cz%+; z$CDrv64Mjl0Bx0}k_OreDcmz2&Om!Mw4)zai2)Daa=ijkm!M=!cOsV-{U;-`*?p!c z1T%kmXkobv>p2-o@(bKwLc2)QEzl3?udJzyJ_7N{If9x%@jK&^AxKvL2%WMT8r}YSs9GZEBy3_N(I$L7NOY%q0I=>{Y)wkMQuF>Fk~duxy}>43 zX1JwXjARk|_(8Cn{&B6|JOab?+3UB$;3&R7=si%BqRRPpc93)8W>n<=Ml8pDsk{$) zm@?AF&2M75+U%Jv>U<(QFv|qLcVU{jb*e# zSP*=1lAw1cW%0}T6+hc}Dp}c}dmvD3g)%(<#slDCBjs1t_aEA{C{==oGeaiJxTHSK ztqqV!IZOqiQvlZD&2~q6#^+uT!@@r?6YROD?x2LfFuSfA0L~sR$c0DIVoeiy$n*9* z{OWcErPo)=&0+ZcL9Xd>AI{)ylcA7+;8vGZR^Zz>v>ZCiqY%K=RHfYMLXyv(*4MXP zY~g#+(kIelgOpTBQx+@yD$*-FJvlIMtLQG{CQ8)oE;KncDpLD{h-cs(qFkGcuqp_n zv8~@7X<@qiR2c^unz-k6h0xw(bY**&P4^KnGY7SH)g>TyK$ZVQu+96CCE`-*|FW}w z%L`u>-kH@6`GilcddnP8?@|tOmfHT^|9~UvQOe{Tbq)zho=9cHd8&)3+xmdT|8a9Q z*W&NCTR8;A1b_p!4@FC8Y@4}xHMnwk{qsS(no%H)M}-3{Aa#xEf5I`!BZ?5IR$JH^ zJ%n@!Utco}%J*s#5&!}{KPSx`pZMM%wh!KUBNG`65=TK?2>{_Ms9Ys|W`5ROUlGZM zGzS~VrZh%C_9;u^jscBILV~?l{ysPs+L%9X3Mge_tiWw0DK-9$%BON~Em%=0J*u;2 z9M%ZUNJ4RPNmpQey1__fW)mW~4H;I6{SNesNvdztk)0}0$1P<0{R)#4n``XGHNz(4 z{w*(P2I+E@;jHsyW(+QDkB^*ve6eqKH7}mYnqfWVxAob`wOWTE$yM1;A8!NOdG2k2 z7-<#(f>H}Xl>8@p+&w><83V5SQ!MVvB?1%M1L*~K-d8ocfpdlK1c0*;c3R8OTAA6# z0Ng>Q*W@ShH*^4TJ~P=_4&OC#iIamm`Nu|=g@f6&cT9aeOYNZkdrcQ3g};#(j9E-{ zuw?1uD+a6teD~>dmj594b$w3uQ}>2XVBBFOSZq$A5}TKq`grML;(xM>Q;0&M8Q=It zjBfWb4uVAV|5~4?3M*=YBDy8l zk^5Ner{$^P?(%TZLCyMu5!^9ANZf&!NVp+Kv}fV)BJ+}feSrBuWaK=eHO2lWU7!e9 z_v_|wW)lY*Xs>j>Vd)`S__*kEF{AdN?G`>g3j0KvF~Uw^6S)u{MUmEi{ZE&5V}O@x zo0Q%52CX*MA8Lo^A}ntYS{AmIs>?<8F09i1ZZv#f6G(?aai_pM9=bMCOq*Ap^qnci z37eXNM*2?dP%e%89fe;BP!=vcvfEhHv!bNh^E{nN-wzLz6WV>JP zlm1Y8G()kIQ}w0Y&)oM%|J>-qJEWa~_eWmj+9A?aDegal#BoD$WXOXPuT}tfBj#D;XgNG|;TVhJqMGqBLF}Zq6%G`>t$u7=2 zEn+4Vk15%vOyI#?CRoqdJE7XZ6``ZMlQtq=d}|Z)vH2$OSrC)iEPq#21Ctk};V;e4 zAMV-L646&j4;~vJ`jzaces3yxMa4gT{u&}ILgLm>-H0qrMCyuFRXY4tWS3zQNdLM? zNVs8sIhmpkX_VM;N$u!Ha|(ep1E9PXz`WW%gDv4YN-j$P5Y3F&fbIt?@bz0 zxFHE&zw6NIlws>Jp}dSnYQM~{1g~88L<@r(Z!sU#kg%}24cUP~`eLC9vQm*I01|Qm z!gsh|^STi!|H%;9%9v1FTRf@2_8=Y?bCM%waSaG9Z?YjaNhs}_CQ9=W1D+|UBK&b= zHp^#7m)Yv@u~PA@vwAyL_OaMo8OfGtb~UTAd&O2%c+IBrZ;2sZW{!JY^wdpBh#y<% zzF+*U=wyOSZx)A!-mLTxlVQ2jS=42SF)Z~{SfpM`bbbwvr`yklW0f;I0Imd0kY#&7MMpPZWwxvlDN!Us$8 zP8>bW>`=k%%Er&p!0nPh1FyD+1axwm?{uxkYW6M-EZ&ZUIow>6N8-FT8%+!@ZWx-e zBiUW*t<}sGC&miq7jAXXLoJ<5n>{lb#Cuh}aJ|go$zYUEb$^78%G#DYk|K$?Fw}`3G0kEoH9YDK%xQy`= zQjSLm=f3PKUI(yD;l>`}VRyP@y~$AP^rWcJnxSuqH83Q~0`E5=&bLldm9a#L_&z8p z>l;lCKA*u+CyTOLde62t1DWr)2TYr-XIdkEr_A>Uoqcn$z7eRgOag$`4$Q=k-I|0y zsb=t_4N4ZXIy~%#Erv8u4|KT?{?rS3PT>|EKQRWggjYXizgLB!@r$31;$zOm`|QGq>maPaJA1XV#5J0 z3&>l)>E}#xLJ2>>E9z=>S05s*MO;In1Ec|KjpE<{5jyL!YTWBTLC+pv@29sn=w zx5sxpwdL6}Y5ED=0sTIi2RcPoEeO(8T4m&8xxbaA-7xe7flc-dP4+3hsV$I}cl&@} z$Cxx7lB85?>k%=__-_?4EtqI_#tuyzw0*v3w?E+E0eUd{Yr0bd>)Gj|mkwGJgwGpK zKeiO!Ob%G9zd7zZr}*qrW#>a$F|IK@;2d8#@ z9kv&|FSnlnFUYpHdrzyRS`bc@2Nrys`Zn^BDhh1Gk>h-vL)qdahX{H&(93pHP-BAD zzJ18CL$R-eY5`7QORqxo08BPRkFF@^u>e`;WeGkQzka{J@+>3G1%>_svc)SwZVa;R zA^M4Ez8}HSzA5Uj&Kw7V)iL8_uQ|a=#S$K%LMz^XZ~9|v%=K@pe;#+0P{ReAb>}VR zH^5n-jzRR|CGySS6I4=eGUuhT%dhJd0tr04ZZV0omppE?;5MIH&4%yXM?8f71_nG* z`cH4NOG&!e$_XpK)v?8l0MlsPJHL=*mp1+;MY+JwWUq&0zm3AJNRUxl72x)-s`2Od zbZHJhxZ~GbtMd!eZ9l4x+F8A1VU5_-=jHJtFxwV)US^fCpS0^7ea15Uj*G%GSkv z_WdgJLw_R&+X`?mW`v8Dw-JI>7i9oYj}*F508rm^ztka(?n zkiYY82GxUf!JU5EbLe@K4<$leG3s3VfO zgmfIjK1iOP0OKSOH{%8aqSsSdOm>cqq<4LvStiUwV}O-yjA8cID>KS&VklX&Yo?F} z{2- z|8BU0FxHb&O;7M^H~g$KTsP+8iwcGugq(1shI$gj=TCw~eqYl_E=5;B{o8T+Cd^7N zbV0GWnX9D_rH1D;dRK!}g}}N0#?C%t<@J#WxFW=d=NcLI?zS0Fzw=l#I^aSFM1F{W ziKjTq$m%;uppolXpefw}Xmby^!IBXKDYc`i3*LiY%=1DGJNAT9D)72Fz{cH|)mXVF zZ2H5?(SnA0Dmr*L(d5V8Yu@gI_?#ST?n!$*+P<$tQ- zd3)VcuMSHW2+;m@EQVMEc!K@%2ebc4q0)MOYKT*TUJmzH)4hQ3Z!RFgK~!rb+}l2T zY?n~`V%R{$7fkA`NBS{x%ziwzC0lfi8v!{`{CZ{3;IVHf$6^~hOG945tL>ak*d2oB zP=m2ibS!6F=ib%(IGn5FMYp+uP&65}L3PHj$ljQ<=4?+=SEKf!u-Ht|_kT=Heb80HlMZWc$z zIPs2lzQ&lodf10QbM7!IYqTk~T>&ooB}yI3m(Vb$xk&Moo6YR(3wLme(^^DO<1>l~ zH+-Upq@RaS!T@tfc(<|Y=__hA>`NWcS;c#Su&FS3kJ}GJqiIyum3~{Nh@9)6!_Kk-YQ}{>n+UeXhRXo4G`560$&p~ArNj@6m5RPQA5YK8=YB* zg$#%yFEU9NZ22lJ^1c}04SZmP#I%)t_SPKY>Uw~XOa)A{+xH*UC9l)S-S1#%Up(9v zl<1S}k>I{mHc-%O0T$M#FiQ3nUk62943d&79Mb=<0X<=Sl>C zUNslmz+_|ksA!oPL&%yC@$|Km>apL4lV;NzIgSuPYTXLkv5EmXPPt`wA8hHEs2#j= z_n^sdtAGNmvekk=g)^$+iJ|Oum+YjV@6L zAYF?P5<{(;8Jh!1XX!{RZ{bWCa`&d^%JJwW?E0YZzTy#-9@QR^MfFbN zdk8+?pL2Qr848phfSK1khOd%PCnXD#r$V_QO@I^)?no(vr~4)eX#|-7QwV$tVtY4fn+C6&uF4j8KboxGCoG-gA=2rP$`j<~bu+^Erg#=gw^4~PRI#+KQY{6&8P?eG+ zztj=%7@PgKD$)DD3sah#7m4@C^c0bLb#)IRMqZt=?YBJc-1*n&aS?03fGKgR%B*RGYJ#)Ijb%$)8DUw;V8nI7HwI*tL#Q4bs2XR;nX9;D#ZG&9SBKgc?cPIZ?7d`n<_y?#dK$T&b-H_1EnZJua ziAKBlQV8q2o4yAOajHmvRuo`%B0OcjS=K#r-21%V{g9_1h^C6pOY*(s_e`Da;8$Jm z!>`ktww#_>%9-`LhKzO(3+NLZh{g^mik}|*+P@Qx4ZIP8aQ1pd7|j0CD?djOk95-Y z)h^si*V({vOBXX#3O(I7N5EXCGFDRIc2?`ejp9~^-qGXPXTd|9|0Ro5nd^WxQ}iW~ zBQT7V0jtQeSU}OX3QWg3aSsUxL`uBF5H3B%4!J4SEVOUbn^7+IL!+!pA%&q|08pf#ODkWyzU0xqN24msm%eZlC>`K_vW4JGp;pf?4zsD3yG|hs|0j9|#&% ziAbnuK++|p^lnBryG41@^8uT=RFi=bK7F?6@E+$w? z?9LnzwDBDi-5fXE!22428Po^p7lQz_Wi3vxF`F5VB~@fj`YGZEkbcYQkk)bm!xag{ zam)&ZoqmNKRrcG11`x?iD$-^r~O>ZAmi=HNUY!6EyQ_k^ZB;yO}mpYD& zO4YSsD_mS=^u9N!_nk#)H~hU={#MB!oKUvxd+;zNjq z4>b;a&-FZcS$7&w-45It<$W1w^M9tgPYny)vjZ_0P%Js<>iHosEcgT8k(38O_DG4GV9KYzRM$t zH~tN3rre3`4KL1TRw)If@CAZs6KAnvi+3WwLu+c%O6-)JM+d=o$}VB$=+;N0$j!sy zsi!+@)*Bl=6l?YCf?F}avBE-ukjV5u#3C9|JC+Gj{dfX+9;{|}b+#bzN$HJQK+H2+ zVt%XI0$@6OcoVNs^L&9n)zljya^%wQ3;WTc8r5AmNzDi_5I}4Rn2ce7ZCIYz?p+OJ10Pvbj2ik z!&DdoarXtOongLi2K9e1piW~L)I7p2!RbC`Wx8EPd`70$Qir>apdmgYO-q`HvauY! z_^Y_0MiOlTe!e$K=pNF~#3+(W*ikr0La4grlUY<30kNv#y7CSZ{fF_E zcSfjoATQ%ePj8!gv+L)|K_CML!GIGZqLlAvZ@Dzfh0}Eog@ZRt;|I7XH5t&vY@i$r#`Ef=G&&bdI9igd{3ON40*?0^1GV=X zD2boArbZd8u)p{v$;bXyky-R5#-ET@*XfXQLFIgRd+PA7M&K`|C70~2y%KQPJuq$) zZ6PtEdp|^@e~A^-9?ku-rW3Q9hxv* zMsh0q{iqPkWL-gzV2=TpcH3WL4@VFD{PCMsw=B|u88QWMX60I_q|5vs@QhV$hXzw0P!M1g4?OuesgQvR|yZDv4)6H z1+QT(2dH3|@q>`7(z$kia;MZ3gmpwoi}~3!<;syXz9|aa@oeaA?4CZSs0^WdumK=% zfPM+t&>QMl(5*GB66Rwq1GUn2Ow+dQUX=UX@e@hzokOWhbzq4}@R>5Qkn3Z+aD$Fx z9uFO@J_}{WR#6PAuL_~R zp3=>Jcb6~Rx0qM0z`G`wJ{i!rNOh#DaGt;Kn|9~L@aT94(hl<4E-`mnr+A1+X7UL1 zP3xtZ_vGcJNg5)`F2AAx6UUE|tSs~qI;Zhx2l-$9D&Y!>k}XAB3D78&e4%6TP`!nw z=DRMc4JmroJ9B9#2^uj|x+r-Y^4 zjO7jEHy-9_EelR{+atesvGcOkQh+q9vONkLEMzkuaVj%I?2pWN#(ghTVZlGTCv% zN3YDVr_bmVk8AEqPv7`yd2)MV;WE>WNhs3+4*ha-zHaX|;|q42)@|Ud2mH7pqL0J6 zjt!yfMw1KNf>DdhQ*^%v-+47&H66AM3o85Zmi1RyUW+^U?~IZMu0%K{J1B;c2DMrv zPUWtid7vmN)zZC&HHcn|Tpv2IzPB2Jfg@PJ+PkmRJmyB*cX>=oF-`ed-$l~uoY*XW zTjK){nt2sCm!0-aVyHKap5qCK9+{A8n9FIOxfj?ui%rGNwv z`r`U?_oNrs*-}EFO2X04JjGg0d@`yQswx?2t>I2m!P2DLCS&HsmSW}OK|keIRQlz^ zPWa?$606#U4;rlQG;SoBBFFW1nj9cTDFc-%lCUJ>zd-DDMMdcpR{0pnsnmFhf>8eu zMA@*hSOFKrV1b_m61UvLbM?he&L51-IbOBOW+8%qaz_l@K`P>~Zf_v#(6)=C89vq4 zCo7lNNs+2Hs%+@Qr(9!W=9_q?7GHjhrAIw%LIDrpGJ9Ank}EO+@367GFwWcDchr*+ z2>Jen?DgkY2jNeT4x+k-+%dWUTPqz!E1$R)+QEGG35Pk3Fp}CM#1}VR#{YB<3keQK zBdB@Y^K2oykg+y#c2a58`)j5e?O*Uhn{DU)&&=`j(fgy5(Wt#jP{;k0*U!b=A3XBS zpLN0~9nk#`Q$f=olqF06$pA3%IHx1`?AK|pxv2ms=GVjIu6$|@a9Zz_-0D~I_rH%b zm4P>^8wYu3w1+jzAmkcttt_)&QYSBmgT^)Su2=$S&COCIJZj2Af+Y0D?^2n_!c;<+ z^sEW3sqi5ny7K~l4q`NV_Nj?6d%Y%Fe>W2w-9AmRbz0W?y(1wdwX;_ORK8lUm|S)F zW`@LFJ=u`2J)l4N(fJ(zIgI%ez8=md`wDGilYH(+t>6%q;3?>Fn%ZgB&T~o0J z*^XteWRjbdRgIqo^E8r+Bv_3ytl0mI8}D(t zekw|nnp=%f@sSCx-#)J?r6zQ`9d7uJK4!a3DX9MXm0V0gNh>3B!mv`@sETy5)HS`D zf2l8``QJaE1}nCn^<8tTlh-ZSS1ps0*Ie{6GO{xRC6{Yt3yYp4O)6$Vd?%~1x(H3P zS-me+GhUN`=**DY197u(&nHzK8Clg|xW&|sH~CV1Xp{O$NF;1OBFo1QwHBjcAD5{x zeQh!6?7BPnmo?g;f>)a*KfYxlJoA$0rF;=}94|+rH7|y8G;tK`!xpIOkn=l`w7eos zItNoptz(^(f*;9rMgv1e%Rp0&zC)&VFs;%ydgoL*0=DdYj!i>Ho&H8KNSP}nN{Ao4_YX{6OJSY&M zu*pe+wl(ZW*OE}I^|KU7+)}KF`gCT#R}!KQ^pWig%Os5`Log+J2Y`SYSqnN;0DHDl>}_)+BGXlBg}oZ93h+~J#*ThAX0 z*ZrCLU5t&ls?o}67AT3CNJT5hmK7MTkz}ougZ+ud<#Rh`#8+goe+%K?h94nC?BQWT z;uqWb8)1@eM&8vkgY{D+1AmTuGdzYX%n~4r93btiV`+4I0=EIWu{=O3>|WiW(6GN* z&4>RP;&&3p(5&DZX4)_mY|qmZQhU9#{h8e=V)xURfw8K**Y*q<>ZvRDXMNM4;J#Y+uG4cd$>|IkPv#wquopW>>R!Zz=6nV&L_;JpmyB&S1m)GNlfe1&7aZn zOeQt*zA(t;hqWUPs3|8+o1a4lL5@O!jS~o+*?OUI4^WY&A^-T5D{Ae3jh1PZNI3rH zBOH6?-{6cYTlLNw+x9zscuv(QEuF-pps2++aa*vI*)Kh&Ea`7Aa1WhFXmV6$LszMs zbAP2l7~Zdv4g(I5I!tF4R8g`L50JZ0$#35@h5b=(GO&vF&0`#)4?XFlweqGO{#`A* zjnGu_N12bVTlmFZ1<$HeKH5=k*R9wJ-+7>oV`4>XQ*Cz)yjgTWu8CE@4>1ljktx2M1t-Kvy@2E(guyo zyvDFb+@0T1J4URVYa__4wWR#%F=08&N=pSbvU2?2NlPqcLnO6QEnDpdC1xb1knl&# zlKsV;v1>kowsBVoNj|aBC@JAuK7W|6 zVTJE&X`Mod=zFF@g%w=gx}LS03G=AcJO{iYxt^;b=#r8O+40n0qOD$Cn%{I_euj1fKVJlD z2QBl^Z?5aAL-(5fD*QYl6`PS! z4}6DT7-vB5RzGr{tWl{XsFKzZNg6ZjoZQA{s&g_|SAIwqEO$h;sG7U|`(rUXk?-)5 zjUcU+SD^y*4oY?(V^XwK+Z1G|2p*)HPxaGxNBD7}3-Wf~?zMnWUrAH~g@|{&U%*>6 zI4b-k#V{cD2pQ1KE}EZUk)^PE7Txwbd}=)nN9$K4kE>&F^R2C8O}}Bfhbq>Sqr=nS zw#(9+Q!GIr{wVjqgI0E(QuC&J(??d#Wf1GwEhn;5O3qkdP0x9*Q4t$rb+5!e()L&WolU%BR@JVBn0qmUqtN#| zQdVGWa<`_~9|lqHxnp_T@*Z4Z7iolb{6MPw`SML4CaslKt{@u>06Y@)XUE3oc$K-K zwB7VbT@GOU9QmYie-j^S!xx`}K8}sT4{KnSD>5)fgVpK-_hUi`RvfBuQo5BWBtow7 z^sXK8Q;;A{_q3r1@sqGJ5l_Xw3G+5VzNEE9F1#mwuxw1geQz42F=dff(wsMWcXdtK zSJVZo*XoT6m33LJ5Nz6I z{8e^HXl;;L_7}UC93uEoAMEjt73N0;)*b%MvP8DgErHYmr>S0 zQag7Fu?JgQD)P~)GWX#vV0h9ETF+RM(880;mcQ&j>#`1O4;LN8el(& zn`-tua`<^{0ZAai`87wQPmtpSGy)keugIf5a_||aTnQWKgj`i6AQnh!JrpN8eXvRi z-%+;vO8cuXbCp$gY15ohn%u(6#k-?MxGwLgSgE}tE#J(w?Jb0`=-4TQ1C+eH0q6>e z(xnvU0m+ZAI5rK@kSWf;P!+|YM`ovQB&(O7PeIVV>ys#v!cu%PatI8!YnYd?C?W7o2IADF@Y zvuH4NMRLE5);@>0@H*Kl3|2oUdYT2Dy(p<=<~M?KH`G0;vcJw_)KpTfVKchzX=)j{ z+rw$}mhk2?N@Tc#%O-+5PyihEbV}7@E3}$`gY+HrvFd|uxQT%*x)6E9zIZjs@qr~A zl}p&ZvV&pud6ddK7ldCGMUqzO?w~~-#F!!}Gsz_w5m~)w0(`eMNqL3O03FVVT?VQ_ z;pU^woL2#YK4kjj(ML7+7oJ!jvr(i(`HxBA(0Q%925Oq7syP&ujbhyJCzhYezwN8X zk{TMT!@pel!CgIp`~iE8`iswZ6w>&qPu^nXxPW=%zTFisd?MfCfA%P=XBhje%TnJV zd0YzoXU;z%Zpjl-FR{aWccGy~SBQLkdjX&Ka2WUeVSd!>x4@m%jbWTPOBQJ-0AQLF zGcBZ9Q2O!09(IK(Xj#kY6dGVXJSJt3$R6e8W8jrDBxV2fJ2<{>Os%pOUt8V$CBLas z>F*s8$Gr(zT;Mb)uV~1BjV|re%D|B&Vt_g2>1`2W+b>uWCkNfFjo>3UG3B2I$+Cq7 zTd3)EZXEB`nxJ_gr%~bFBP?zhtF{?kmAOPkMGPXEUq5_E+E#t*OA}EHglynz!HI7^!5RZ~yLnNdk(&Gn3CglcCAd^D>f)ZbbnGGN`9fZr3Q6muI>9LF=al$A6(I<+wg5LfN>E(7>B*AWupoc zHWy|i#>l3Vut!aVS8M%ODw&p%S0V`pLV~5GLhm zQV@GgC!pvi&-=E-2d>rN(3w}erp%5ZtD zW2YV)A~eh^&qmhnxk2EM%DGDAT4KF2@f!zda``~uyJ*D1kXd|AS=C2ezc(Q_zcI^( zz@#YYxE{;ek&Yw!(#}p&KuQ*%d}!#9myQDv$X=ykbx zZjv-H>pZ|ujw=%jP#r`70+3Ah%CyVde+}-=1sKe-Z5_bLh1ekg+2#-d089%bPK7uM zy)ZEh{}9N{zc^s1-zUV%%plvHFcTtDO{aImHm32T3xFg5>Tj1IyIy z2y=4RfdpIr*QrO@n#rIdLIF{jYHNW=J(g`vxo5w4I$^|82Qpb;<2Hhe4_$HpzYObWjoQs%pO2|?HeEAejF0`5?tVkVdEh!7IL5yCdU3_ zXzQflbFpbdVB5eU8G@P_4sZ1GA$f1I8bRYSW^%NpW4dnuw-=D z=m%09QHzkq$&nRKj=G@D%kGkZxVok1pf`SbVgxI(AsWyd0Qubo$Hf_fiAfMj7az&e zm;(dIwr}gk7qkcsoe>C%@`-IF|B1y0H{b2U17JNEah6cUOAWTBuTX~OTC<;(J4~f% z8B?yd+zW7@dOmh}h+BRmMCiYaK{qlcm9srag*G|uxCk1TWg&($_FJQ^+<52niYLDOfxt3OGIB@a9wg znJiG(ozJaanPo!rJ;SE_MgrRMg>Rb``*Kb)TNzX!YvA#`y~1S6tpp3k_$t;5A^W~& za=_0Ir;0iW+^`@Vta|zSkLR6pIoa zQE(?U^++6Y`Hen-1V+d?Y0UM>Z$_A`t~oX=BT=n*vW*6NQCACmsZ7e^4z2uihrdqh;Sk)24^9!pbCg+6w;eI0N|k(0lm=xxVf!h z{+dsXX^m2yp&KDga@&D(S_=LWPNzeir3^K1+VUbirGiiqMSJpT-`NcJh+?BK4faAcLuW3gVq*!X&nI-6HA$Iv+gKZ4Zvo@l^%_3 zc?I_6Bk}SBr=Dlsk}NGl`xog%Hll%nISkgor-!3V8-;~QLO#)Naz+7NiWEN`cDEY1 zTTSZVhr}cVe&#Z+N5MXP6V26#lycO{suuA4W?m?w2ovm!042uO=_{WmMgd9waWxSm z7ZiH4Vt11o*md}VleoY`7|63W}(RD^ot;>#!4esFt_$Pw$ z)*!83c%h|d=P@M`(?aSGUhbz#66-zgRJG--oYyW8%275e|Gd+12!I-SA?d* zZWPQWdUi^}N1u`kWxssU4I47X^wYZt5#AMzrgWWH6nL>gIYWN*PN!!(9GBkYDmzyk zV7jOcMx{i9^|`UL)k@1*bM9d~GV+6S^CfLc%8q*fmaQh$Wxx1FC)Mtyr#Q09lVmvs zXTFntv6xPyzGKe;MP@-9lg*=A*!HD||FpGAjwJid^$?08L7V+qwp_E6J$qHC6-FGa zp{EWVZJM8f!N@5p;N;D22WxxD1__Kf{3(tcAAyo_LL1*9_V3JTz%x+Ka{Tsbkyz&J zD#HDvk+E2|WL-L@f@w2cl~n|>3wPf9IJu*1S<6z_G&u}lS3?ej{0wX{s@8=?d+D#E z(j{T_Wcv;s>?)p%$=*RN7X6tyxMXA?kNM_TKHZ-oP$2L9xFlM7tbiqL6a(|kyo%ui zz7(uo(ajT{TK~^KYIz5mp!Z6Q_WXK zEAfpWPM=5UDYit<_7W0)r26?Tk#!-!b|Xb7NWlIt#OyMKt;F*CPAfvZp)Gq#B;bRD z2;mUKqM;gLi_ke;>rVNWX;9oyKIaKuZW9~0dO?-HBCZ_tTZ`dM^GxHyYS~`dN9jZT zkM(oOoTwyKQkTSTz;8J*esu7}ykcZzR0cet!TcYS-iry1dYeQ(J;5UC7hgE;E9UAH zxKY@899Up~Z`&)ZmpdSXt;(k~?e5yANxoj*w$~#l%ZLyLz_RT@9LyBu>)2ir{(k%0rw3Eo8obRBI(A4i6NS1Z>*bid_Uq7BBQX935P?uPueKOy_y6kj3LXrHI8m z=WtPEiPaA#RnMyMGNjm2V$73i7V;F_pzMA;2c>cbEb_9F)wgcdHxAQaM5+yP9soP6 zoBTwBX5bp;%3&GtKsQ9%X}*pt!w#+1pO`~#1R$wm`Z$bKLX#EjWbu!rE(7Px2-8YePgXOfs7 zuu~29@?_aDR>I2qEVHv(XK8B{w$v+P5@+1ZYHc-u)ZT^Q6Ra~ZSi4rIw)epP@;SEG z>yTEUt-uHg%BeGSfFJT{E0GXE`h*6Mqd<@wl@aY!t6(vG4LWe`NK3QlRY8ily=(S9 z5n|rhp`dx*Fx4~Mjmtz`IDGZBZkrRw!lYt6*XEIk zs;~QBa>T_ySU*_-Wymm0S1g1!b;m9IaQJIck4j$%JC=4(ZHv@wUahV-d&6`)4Aqu+ zTqitqJ738zO6q^_;a@1g_R3S(JCJ7wONvJ(7_kIwGDV5h3~xD!I1$N>OqDlCa7W&P zeuwv5Vkg?}dxeBc?fv_6BAk*&vF9W%>Jsrb1t*=?H?lJ5;XBA$Ybg&*=Zj(Nht8OI zZ8h!T*%y56DQ_epP0P`BP-EDL%|L;lvEzpArB)|(5UPwj$oUyuD^By2Tw~75z8L6k z(}?M}{EN{v!|>qF0M%`eGE;z`76*Afkm5fs@z=FML|}^0PAx*t4=$YQX4hUK(>`Uv z*hSy}-J$gGq`_y4u3^qJe&noIT*0ZTevFW5V&c^v!`T=PJQ(vCqFm1#`S>z*7#)a0 z5u_lVC24J)G{nox)S|(2Bexf5$;#YAimoucPTB?%wEWLt0Z+OxPv|B}8Vr z9V<{{>l)np(zUB74-1C+M0U1#RBfI-I`!}0W0s$RpcZp|0FG>-`(q@$ET$HHH0}wo zZk?I*1a~v<6u^VVSLI>mcP)Qv!P=s~`>N&pTM*(|2NO>%G>C0iC7FSjTNK+O{jpEj z#E*Su&9Cej&GcFoVXy|#>a+=3ekUjcJoIt@KK3QS=I3cJ`Jcadik7krY;HQ{j~PB{ zT1upm6@Dzwppom*pYbT6e~M=%=vXjszeMcPO*`;o;S*wlRxv=8orAm=DMMyjYA^AoOB^Wv-&^|Q5XoeFkeW1QWt^3+GNq}i zE!nr+T*SW)KXc>dhTRqY@WJrW*?$fpyy=?S@9_Dz*?T2*za|sUG-u`d1SyWVcfq<_ayi`>py|~ui`VkB4Q$c& zft;zp1Jl|0(aJFJ_hZS}|3bZq;DkN^Z|1;e{3Hf?O9v-BPC=6&#ph9oHCDwluV!WT zRn7$Q$AFDpj8AQ3Doq4K8zmSkf9xt|DSqDXrq^G+5W*pfuSr%Bwh3n$aFB)$cno0S z40RGsylU@%EDLfm8<-WGI#WCR7#RWPnBGhIfj7Q6dab)rWNs!8Tr9u=pi_pLU?jOB zI;W|@31KovPqZy>lw>vE#y+!@^9dFcL%ZdJ4-z3K7*U$V0ihLNNOxTFjKx7FZ+};X zzosz;Sn}EC|DjVxfb5@$iVhurQ^)(MFn1yy65+u(jajrSPHk)RdENj z@c8#*p==hk82q*Hx1_qln7w87U9rwLePp!fTAh5Nho-?0XdxgA2GXm{KnM@4sBz{8 zCAeh`ZTWGfUY1%=6B9l3iN>TaAV{W}+Wm`AF3&r~Zp=INq%&=5J zDJ4iiGV7KC%g%;7T>G}+65U$Uzm771TA3bdW&3h+2rCCcDNG2Mw+G#e zAy#2KeJ~i07sUN3wsGc@r1rgQ1?F16tgfG+AEJ@jJ3kzi7LE`@R-)O` z^~F5Jak-1D3P8yhui=xv7$J%6oIm~@np$Q#_OMkS_s3!6%2oe)>3Geva8nk2lkRDh zJ3UKel1a7^`m7X{m%{3qEhp*c`sO-S%$^}#5XkEB*aXFQApl%(n6DxIvDKN>OY^-J z*qpCd5F3^0E{d`#G|63@*b{28trnRP5eiDsc4Ytj+r%p{d9qkR=j-D0!aiQ>GYSCa zoW`bj*Z$3p6{q?AyTmV@wGW3?EhuB`P`4xOI~^$GseOct<5B#$qQ9CRK0i2M5dE0z~3}E4JwAixVy4lNJD^QFvEHnOo-r&)_?f0D%q`y2FeYGs55dzltWsr`OHUB=oia;SnbSo4-&21|QMVc%&;0 z*(mnOlF7Ed&5dgB-C}_yKIN+Nk1y#7Q%9&omag$eO+IwN*O&wRt$qWL9We~n&8u^+ zHYjh>TedpI-$OSQ_0b^2gIpZ5#eHd;&(}g0bIyd>xCS{Wtig5#%ylRj+wV4>9q+e& z(_2({{f%`s2|R*5;yFi`t19<)3bFEGedvP@Dvw4DND8pvbr=A3rr~=K@3WYW!5IPo z&QpPb&|5FT7S27<0Odo1=iW@q|3PZCb5TpIeHt;*JzyBIL_ejxp8!QuXMtI>-tVc6 zBo4+fV8)%_7MW<_nj8)-A_-rc$4QeCXyx5vk$gHWHWTZHRz)|MS{9ytY_GkvzD(@i zSGV__5pi#9AK(HP6@1ZqjLEzFWn1LbJPu;Rs_D<3Eyjap=@?6J(>LRG)C}0|@REx? ze?-VZ29KRRlBaLe2VlbQz|Z-hmGn=Xk5m+CJ?HR(%ha74G5`JdO9d7zMhUczoQ9O> z*&T9%ULC9Y>A~3DO3s2qeriS`_88ytuoGIfkdHucm?Eb_m4n;paO3lxy6fUCB6%uO z^&krPM-?aWjJp{Wj<}XOwc-(j6LgLym#}-@eNQZR2E=l%9p-!=XTY4uGY@inr-n z?uqbn-y;x!jZ*X8PjFj`*z8l>z$S=X3MstOJw~MBZ3o>P=F_oyJC{@*=pW{e8uh1p z#M!NGSfC*@;#tCHGKxd(lX6JrnhkkKu?gnUYK7?~tcVDSTMgMk-x7@zs00vlq zlu8Zg@hdc~*km8UDU8vsP{pu7g%=wrL-JQ*q)SD2@t!Gt{)AzoAq?L#ha6jBisq2W z%tHD57L)Qg| zxPKn{C7_2FZ5OcJ0m)i;S;$S^=-8)>wrYijUjRh1W5{mPDfhd#It%LZIVDYt$~;lB z&}p|3%n|5zPTFKc^FE{k73CBo&DJ{_EtFcT+-(GX>-*{Vk%=SpWz5JtgB^7EUJ-8Q zGW{^?n~M>R505(e29#z6D5`Y7vs=Vj)Em?7Y5iF5Mc9o`o9%|4^8-e)up7FYp4*`I z6ag-_KGutA5l&AwGEt`?6aZHs$t!U*$E|ac=mt!Z|NIs$*w@EQ8_LIo3mkC}+srpp zrxDV8{GKel2iNd0e3ND5*l=QiVzHLll$|18%M6)?dK%`qgcSncEGP(Xs~SVqi-b+f z2CW#A%zoZ~P5vUp%RrFB$wQB6af^Klb6Y0s`+}IJCz3m{+qmKSN8ogiU6qb+l6|G1 z^PyOIft$eV@$e27Q^M2S_I8%+S51^J8;g>>%GTG#5`u^m%+$x%g;Rx7(sMEC2*_hh zh6lcG#muVLZy-kWrsN~?0!E|IbrO*02|x%c4GpViRNLkni;>W&E+ao+cnVg=SZU40Z=6#mvQG9(J4;Ca)x=(itfXR!qN<53D6rLC zFdVHYi#lLJQIkTDGDIzlfA_4x>52dON8EufiT3>ZCp|UY{d(K|s_XiOeivo7eq~p} zaC{!KmpA}98Cy8Q|1XNp{#S~I$I|mMDRA*~L}Jdm@wPEE!uP|6ZLUHMpUEpx{|>i} z072b6C$JeVNZKD`td5%?hPagen)DYsQKSbEV0DM7m?HSf)Rh89wuJzSCAZl}0H!I6 z5Bc!!PL1bL0a7b?M432b7@GuZJvc4vy093!Yvy1VOy5iS#21ukHh9=D?(rwFLbXSi zu-O#7cvj(J`10x83OXgf7 zhW+c@He>bQ^q1s|Zt-f1Vwc@3!ITpH^SP@;l!E1*YEC8)Tff;x7@|?9QghSzG4aJh z33ktF;qh3d8fURflxkY1eHeSbS{xqD8|&4suF$F?MWW0QA3AY=-HC(F?FU$CF{7Vy zAE^yl?icvQC2QiVg^_+b=kAksAx7D0fOkLE2qyqqU2 zNT;adB$bWqm-3M^A|dP>8d}&}Iu(-ZJLX zt91lpZW^*HSTGyih23@m)4j?N05Ix-c3hg$xO{gI%=2>hRL~-dmn$`sfW~QC`i=21b|0Yr( z0*yY1Vt+Yb2#;^)f41+P!2(z{X1tjb%n&oQ&+|-q2bw9|E2rxf(m}Q3w%j?NE>}z0 zZM&yY2de)F|J`(Z#z4e9h}Z({I=4CCCgSwII~teEQaw&(NYjS2?4_BI*1X%_V#&0< zc$gf%6X9sDMJN4>n9?`ED2m#hE9b)grH8BL`l8?WxYCPWJK0_|BPMLqSndlWZUCgO zwB&YDCUjzqGea5TC0XB9m`KQ)=zktH*S5`HnY7b@Eg0bBu?Hj~^!%k|_3hSp6l|o^t=}hki7PlTwW}*R$S9>1TGYlk(-ONqR|^qJ6O=Xx5RaytLT9$`a8S|mHY-tWXE zSDRcrJD1LxQVd)|pqtI5-NBICV-=iLG6g!#4Ewsq@2h;<-!H5Co^2d_4+6YOtl`g+ z#{!3d7WR`>X!*JP%VntB0wiC~b5y^VKu{ndvem6U;tOm9lQD(Vfl0NN=X~yJzzBDQ zEdwgZWoyl$k|v4&Ek(Oj-=z)f$rY!5_N2s|-&rg?Z$lz;nq)^~^gm()m_L;^kbzRQ z=g&BYJ)8`vvZNsQF=I3>e>UJd60nedx~fsVMTPNKB|`5V29(a7DQ3J@I`2M>E9oM! zuJDoLkDNI|6*D_VJM8h_qvsq_VWruGuU+`cGvfHI_#rf@C?A8Ktldyskj$i50m!IT zq*YZ}m9}kD78aWA{hBFCvnrgogCi3Wy;UqHbztiV_z0;f9ig5pOpTTDaQdL9#1R` z!@7C58uZmmanb77S*o`#u&b++X%&jAWw10f(;qIYP5dx^Q`RAdh2Y0fqy?B=en$j? zg>`jeF&l(-JThL|?lzF#)Vy@h{!kM6-yC`Ev+(WgR!Gvzi{u^}gt4@kRVgj)z~pJJ z?qhE121rD*lPi-9izkkL>6L8sB@5isISQ2W!&sFxY-$h7e{2DT+MNijZ* z%2WM&RYU(Yrbv3~XWJ)g+^)AalXdbio^~kZ|2$MT0fM2q@PoF}oTNt=7;grm)&J6? zzIcC@6~?B)hq6fwh7e`Em^fA3?8CyIW(N1Zb+}7%Ez)D)65M^%S@C4ty-Ir$GC*bH z)g5Aur48{_7{L6Fg$f9x9kZ+7&|TE|V^B%Fm?Vsxc@1pwQ0d0w8wSLn4QX4VQ94+4 zI`=(XHZB5A>_6jH?e3+L(XLfMIJ_t~DIw5Vl#0{d^dfv<84zzq49YXEWPib{gE8q5 z3SHIL{eBb{#(5Vc-81IZ zM%J!TPq8@_t2y{{&q^h>$gv@Mrp{RRKNM+m3-gZftCU5a>O&NcDZpO>`fF*XCXm?1n8|_pP6jlUeaV#B!{&971Dj-JA)$IcEUHzrcgr zCr>&IQlGibKVSDI2zj15W23#l{+<--aJ~p`x3Wu#*=gZq+LbV1AdHbrdH;$NzGP(5 zG~rc|mn`d|A&ntXWiRhdS~w$__ByXhPX5|kJv@6(hb2B zd{ws*VNHVHfX&%p7js<(R?;u(LTdSQ-Qk<3A`z!Iun$=4b%nucaP;5buY_AB=+b^w zj-kF{xPe$78)i#8r;o%Je1ipG0Odp%BIIq&VNZJ)u!Sv2cNHKW^#)9$&yfS%J>E6F z1#71FKTx25pQDQjE^Iy{|AkJI@8o8Hg7-*I>0it$<205N1TZuLy5j?2ig!DrpmF!s zXawjYfYuqkm8kf*S=)2Lk>38wWiDNC8Q>(D1p4t$&YGX|>F}_jou|AW2?nxi1RVGP z7XV-a6>yI?=@2irZ&kF#c_kJ3=*4fpn^wfy1sdMS`m2SqS>zN1^r&g~C5YOzy= zT$Tb}F+S9)G?|o<$6F0LhXQh`=DJk>Rn8yo=U?nuzVA1$kn6E~*1R_&0qlC9R&|mg z)r))k*!wZ(Fanoiy<5Gn!W?(rp79&l(Ufg;w7pnc?t%*zWjD8lq5EXsbg92y=lm&> zr}bFopFc!C;axfS8y{X8wX9@E{DU%&Tm>-vP;#(>;K^?P_&(u*3K#9)CkWW$At1p# zxXK2S$$rUPKL1iNuf(An!Q$fcH|-&K(I}}y9|j@;0FWOGo0QHsrdfKf*E>1Fq=^WR z{{bfA>3eozdhgv!{|N%f^^}_9zI|IfeI00Rr4be^&%=>UeF2bvX9(c;j{tvF3pGB? zbzOaDXZo6nS53+@rl1naZn#TC!_Av`(d-X>EKG4*;GtU>hqS?Ac^qnIsB8rcu&oUM zpW^=0I7V78Z2A5a>mlQ@+!;D#o;mKk8r#fzmO52S;Ga+TG-07xYLMf68olHJ0L06% z0RNo|z^+S`8)7a`mTFDQL<6uspAvaJhJD%a+4QyNn80bxe^GT=dwA^gxV88cyyC z(S}b}Zt0KtN9--2H;Q}EZUaU$m9?h2h@p^}^r0$ZkFS^t1 zT0;iRWq|ndxm-l`@}(?hn|igdid|jyq zv|A>EjBtbi9tWoVRwDzH#wGF&w3(t?HSGYewswqcqL`%}x@*qf(}=hEvCm&3s1@cC z1$awO52aw35CI4tx&f1j8Xoi~-#$5fR|KY`zq8{nqk{zI!+>e&g%?yn&Ibo*3bbq+7@;AWB-B=Lw-kxQETbYDm=AcFo>xa<$<_)KK z+j#kXVN(?<{j}|ix3gB|5?q7NHUFFZ_cVhGEruWoSK}i@%15$ugB;VlzO&+4Gu?M{ z;p!20z$zho#Oz3*a)qa-y}6e;WzF7cF*i@Vm$23gz$ze z6?3*0z#c7CLM*do7j=J^E^(`a78L%5W=ofP2%*#W}qIaeUk6L!Aq0asU6PrNU2ZX@B9`|Ul=tHq!GVmNngNC zUT(8Vp9J5604rv;T#n4v`Q39NMlq3I;;3p0=M!UY`EJ>FgViltO3Kz#1F#QS+%F!j z%z%KZbseW~6iU!}2XR+G4>9Q7Bt`Db!2UWto@JQX=lEARK17>Vw1S6w@tgan zLDCpmYG9zPp@4RWq=1%3=lna2)JURe$-Ke-gjhwxAdQ88SAlG&D3S^ZdPcLRX2cl< zk7~YiX@Z%?M$hyKRaSTptFm)&>Cp<;Zeo{cpCQ{ag8*1->M0^X^!>!W&hJ?Ud*5}8 z$B{>%xC3VTjs~idb0W=xcn_ps2kU=J)wF8n#rWj%`Rxp2bX8`@0@UyK#0*R;;SMNI z-}U4-?5`BJB&mULD*#dlTbKcH7WdZ1f4waQhgCOe59KKFQ}3#X(zodVfN48X$^#(Z zOR4@0MbGK~K`I;>0Lj-uLVOU3eER-%X(~X0VxZPTexv#$SM1k&#M8UV2rUgAF*I7~ zm9c?j@8aEm!&ZKDE+j_!n5O4&C~wTyqs)r=UGBkKE<8hypl)0JO1@f#^9~kG)c{Xv zo@I5IJZVuSyux$%o$-xr>sbr;S4EquWDmiDrY8Qc!qrOM z!AWvt`_`6x3OKTyVS((zEgL(|Y~9?M!wN3DBcrxFaMN6CqL>rTLZ;6JO8F19wT(Ab zEise0cU{#v!kxSc1k7+9FGD>GYi-&|GQxuS`%ed;sOxe*2Icg2lht%p^{cD%jE*qzYiY8K;&G6Q@{#fN389@Y*qGs=*sET~ZeTGH@g7c|3;Pl1ngqd1YYxds5>6c@X`*mFBpS)nUM* zP4Z7|Uiy|zjLhgk7p43pxHqJZk*45IjL1v2*oBrc9>*O}N)kE#!u0|Yq(~o4nx#H^>5in+yQlsf?LS}zTKNp^IMS6c zVHL?5?fpc@elhJAudRRJPdXtJgmpc1>-oxH6By1!Kg2pqM3hEYogJ_tPH!?(q`FQQ ziU2`Fh5zgr2t7_;DMK95$wLsEmg7^?I$_XP3d-Muc@84!PmOk=2~(eIzUoPQMge{{ z=MCY>cmw*UIk=?9Z;ZF?-yj@e-!Qq(xS72IY>TJ(_AafMO6NaG+&xeYH%cksqVK&5 zd}{mam_p)tyi?bB1&`|B-6ZMAzKXH;m+$mW5zUtD2-Uz(f<+>b@cX+8thntXWQ+cc ziJ=btgRE@E+z}E(;wl{I$U|l}^~f5#&1}#hTOL)oPZt3^m{dCaRE#<~rSUq=l2VhM zq7ae5GQ4SJm#){>lU;{fDZtlM>b zm*K>Zi3@+b5(A;julNN2Jpf5~%)YYETj)OATYWC4nWzHMF%TT< z#qp2*YHRPT$G{$en_m0`_>^3l(BZTrEf zBtln$(3oqNrtW|?Wcl%kK6y8{PdgbD+~$}pgz=EtsXa`aDyepzGnVqa(bHI;$o)Hs zPrMfOEm3~mTj6xPz3=1D++)d0z1UDGoYE>bvBm(&%1H>!&c6Q zr&Z>mN&O#ohgOH5bOi&mp_YnP%?Ml3kk@Mxd!*nhs~f_A)iM8BIbl%e%X}bLOBr{G z`O>%EA&(#DEK`A;vsUHpLtCefp4Ie#xUKHN<;pc^l$jwIOPFaWuPuqY>zO4)!0{`4 zq$MsuhDx9D#&l|Raq0W;L=i}Tbj?F)$g=V~H>}w3tei7-?E{ZS?a4v|`d+w&7Jd#o zfMu@0z*O$RKSfQrrzp30z>y-=-o}e%XRwc<1`QtIKZwAM04i{=sYRVOn5dO2RjK?U`94pzo8{ACf1zm7VK-s^cVG8*0-L8%a*k!r|s5#1F+aInG@A^WXG!aJY8A6ha^?glj6-ZY(B&WKzjnCIh!rm<4$-ZAUkfY;4^Zckod_rods|JcDm=FK^H1#zwy z?VDvy!8B41IvPj5z3vF2zsgP#z&QXgrN8DE^T&_%s=>Cq%|4n=B5{L=XL&B^iV7Qk z__&6`@Wcf(O`hf#>SaA4PAF!Ik3asTLJ^H`$Yrel2Se}S3^K>%fy?7BUeTL>TK~A| zcZp5YTD0D^23vhL-4fuICIw?Ta0L}Oh76F{vH>@+kiATp=#`y}sf(8YFMHc7NU}N8 zUEgqBf#%^6$_O1$Ro{N9H;tfiz`yFD-VgpbN0CzP&u%zlk=?x!Q#8GWQQH6Ok1#B{ z2&9%8nl$gS7*B5`?1(^%B}Of%C0MWs+u~FydKWU*DOSJ(Uq}qexZ-n=`+Zi2|P?*gh1ubg={U19aQ`W5IC$eT~ZV_M)Qk z5Vr)sQ#304<)0r!hQyHOX>{c03K_fJ-k2&shXW&c&|JBwESd1&ihJ10%1W(4is==3 zJflr}Ffnp*3aG8b2V4jMx+mCI5Y5sR`^wy2%Ee2nX!(n&&{u?>>G#j>ueUH=Z!Y09 z_ZE>V>Q>y2eC=I4v#{Ru7CaA8OQuc@C!hIa{ozDd$GSuyR?5GAlu9?qG!9q3v6s%3 zA)No-x?>adNludEb>tOtht{!C0rjy_IE9=*?%9TqI4{2sKhJyGZU5hkCyHrX#ahpP znG_h33b4Gh!tYzrGgm>zq1OpD@hiuOsxkRaBGf5oL9~Tx+^-qBg#zcdS4S9KNo-2O z`m~zE#`wIVI&x9#R(=l%r79ucX{MG%{;8H1kokrB7&EFG8&6&BpVfMOH-HDnV33Or zn=tMDyAqkMMFhPXNMqrj)V4~8&~aG&fgm=Ef-=xR3T0FliE=sZEQX**#?;llXd>|k zOcr4*9d|Lp(<@N7&b(~0h!Ps0Q`%5wKy!;Ep^I1Zm=^TS9UQ?$Xm*D zbVA+v5jbOCW?^meiOD9tbz=(xh>XRaM&{Lzv8r79e4fr0>g-*3$> z91zBK;KlIw67T%|;-9lL(S$B%WN`O5fqZ~&!K^Xw8lpm8`={^eu!(82B+?GSB8nvO zfdjjGPnHyYx3G{ybU;P>uhXHpNw@~pQi6oDLrsHF%};zZc^8hK`5n;2{&35R=ku3y zlq}AoqwyBytk=nJi0BEFRAcMyh~}!o`LP1oEvUzaLSK;{$`cj%?nhAP{B7>tp%wEx zX?7lo&Y$_Y5DrG^H+#*11gq4(hELe?DN8OAB{2q+DcxB&&$6fW&$UpO-hKTo|m@chqE=@<-X!n7(GvoyI%mlKEEUEb};ohiszRF35Kb%?PUhTh$ zC*Lce0bY8JngY(Q3Bz~x zWHsC4a-1mlqR^f7&?b3&#NV985xbEHaWaXk^7d{Hjs{cCwqZ;#PHe#QZ}_GfudqLK zkqFZ9|CO83S$n?Z(2c0%qZ>0EHAGQg=9{I$oFV|uG%EmmXC8&=%rRNNJWhLFJ<0tg zLrD2~wX$QH&i8?2J2=Q*FlU=QT_8}!7Lo7G%l-ORE7!QCuC4ml*VBkbdA>vVwo?hE zNe&zSt?5&F=kD3I%j^irLXDb8Q1E9gJWd9S^p-5O$85|i#CqmrsjMoG2JLEZwMDml zJgbb8rMBW-MtC_^STKdUJ;vu0J=Y{1&FFeYa(wN4T=mD2|Ni-cm`N?U&@(~^JvOPAhjOS@Io9{H#6C_(L9Le8I`g-CXa zNjj?SN8C**;4l46DZJKx_L<#wYjDhvLgV}HR(j)Kv#fDNn%dhN0*P7UkhUAza8t3L z9>B$WwSpK7B=wrR4FNFL>C{ng&m#+ML{4q>7;$0_`BUZlx|Pmn^~vcs?DyFC0*%%keN`}`Caa6^MLEm_4D5c z5}6%iEE1b4D=qf%tU0Zf?=4r`^gJqv-u|v%yzsqxL!Ja`GvBrj%}q;1h!%W9{#hk8 z8LX5{uR#)eZLoTO0ow29i4UWAsjmKtr3}oG6bSaDH!*`lA)0kF)kbEinO2IQ_LDS<)j0 z$o9iRc-$-oplo#)gsbl|0q10I_zwP3u18O^-qg09sxuQ}T6h>a_i6HBF2**n0GYIC zt~7;nNf2#d;@e_+Y^Ha>Y0Oq@PvOG71$(R#6Vngko2Mc6#893bE5!ZhP71ch&Dbl^ z(f^v@;wOg_8%haD>1HQrBuIjNK8drKH1)gZFUCONfdfVh@!l@}Y7t;%rg&iB4eli% z_uM9S-vgE7xE7;5+ThOFpZ%}bXNx8V$e%f=hq3;_sHh&#AD#)EAy}<|sxr*eRV25D z3&acsduCbhKI2cxIuP(l5GvBtz|}mnnQW-b{cR*dYk3hRY53_zP)r~HJB}|n1lYnv zi43qvm)W>?t;84GJ)iD@$tM1hVL&vI+v|gJ8wpFXc0PZM&*J%L(S`=(HFvaYm9&xx z??(f*{RhLkz(e4v;Ri2`xux!B-mz@|v(=^hjBzDPx+F?-6*^)?3KofA(+DiB4|%X3 zog~WZKMYa@_vl1nvi8hH4z^4DH(#vKq{cML6!krL(T4d}`z=TDMjJp7a*>86P$bQF<_z?`0NJ%B@28x|OnX23rJTPU)#;8X? z^os-*_}l@IvmD?es#zfhCm$fgEY3;8IY0xcB(6%>=&wtOSp zRw9;p2-&;Gc}yglNJE7h1Ua2(jIlHYGSrNo@N|YtLZqrN2xH9NYO)sN+UBsEm&>n9 zJUL-FTsyi`>qWc#YmWO>!JeXWDoDf^U<46QX$t(CiSVkGsmm&Z)9N+k8esM|Qz zpv&!A-k8Wu9)9?G>?XRq>yy0jTR==7l@d21t# zVW&iI0fe{)1o;GzfQRZhzI11wfmEk%OF}V6ARJx*42^?unt6wSd1;W#@3}iG!eu%E zcwzh`qB0h*CDiIKMEv8w)B)f5Z8K)cpbW-_(v-Z^?r4Td@{HIm01)xacm~6JeSH4H zSCWO2x&KH>01jp+;cADoL@$@U{2eiGPntoXzFMhxcDUD9w=|wh9WATwZ0sS?r>d%m zx7i*gNb+xK4C^!HD$VVSOjh=wBhaIjfZJn~%WDd&HMdY{ijxYYNG_9o;5TwhX2cSa z7Y7W+JLR5ORw*9?$dJR~xR*lFW9j01D26$dXX3{!bo-QF80=7{5P+TG1lpiC#%(0^ zm|!1VB>?RhA_XHy>^SitP^3%v%OiKjNb0&fb(|Zz1a9*MLwb$oSp-EyCjtgsU=qRL ztq*#GEYwMi4~C0$1oc z9RF0S({D+J!;>Gb?BjYpJ6D-j0p%8ukM}OJ^vMbh<+a+)1WA4*xGAN}WaKg!p18fd z(g*=aMF3hwuh;M73F0mZ=0G}GHhn-{)!gADV2KF8w>phxi1HK}e;T}Y?@iJhVsn`E+;T9xVMMlzCMe>S2qR2a;fqNkZH1Wlh5NuwvoS;6JcuG6oZo15o`x`@Z! zL1@0i&3S;kqDWQK{|E%1#S!Fl6~k-~S}Y5jz5cRaTb$14T><6(zIERl+jmQ=sNd)< z*89t=f^nODmVsd&q8R45n4#I~VyTkLU+-hJ_dDe9qnSp*N4k!KoDJr$<1)Lzx6}S>3wK|&F zIa;I71A0f8&sUTH{L$<}M|=HQc8*(%Qq1&Se!^qTb5rGc%!JC-dA2N z`kwa|rQnyd6N&|qSUlI|zFx($FA9g8X>SoB=ww*Q-Z8=9vbqO2Mks?%;qH+fqyDiWJ`z#FljW; z+*VTV?Nao*-Ani9E#v9)!F}yFU7<}B!%o}Pn${Ys7ZuGNz?++flFWn@xM}pq z;&-cLvfnBQ;uVGf=XNZ4kdduJ@6{L12mGMxat)HE5dh{yYr8uf*N zgW$O=6#ctKgQ*b8V4SL|g&HH-j+lvmNGyqlcws=|??raU)FlPQ3P@r|NdOM!79zl1 zEoM|G9)W?OFMNmS?U_K%3F0#)MZKj|Dz=t-ywiu#^6|oc3uLsg=brln`|bd{vkB5gZX+GIlqsmIZ!!SuGZc-kGHWWtX}9!6954Zj~s&!NhEvo;2;vh@$+fVMf3`W zosrS>5xGpZ-5bqbKG^0M-)wW%pVv|6bk!Knd~II?H^t?0tx_<|D>OkKZLKRII*&;5 zOBcu8Gr{c*JY{iV5J098fKOam@T_>Q-Kk_(!QjB{Dgo$WF`(#c^H^#x*SX!i5nbF=L48Pt&omPtnd#OBGxe^2~(ME}21q1cnl z^D|rQPJV|k{DU(7Gl2m7fpcI3Saps^u6Vjlu$?H$7p~iJaEJib@(~4w9aIqbf$s#v z4}yO`jDu8+f)2RYyTw4(3X4MV*hrPO_fAh%x7{)J@h)h!FH2tO^N*x8nnUf{6TN*9 zj1rt8|G_ZK2^oeRk;Cv|1_Tw7z~vAY0)0hNi?(+y!U0irFtYn*d;G1Le=?o`yfAqh zUYW@4OKLQtZk?dLSBKE@6qMlrr@XST(KrsqF_FM>kt|bc^}7+5EAPn>BPG`nQW5|G zPgtDfpk8^!T8`Tag!Zra6!B+f)Q zZrL=a+p*E<6UHec2!MdwDypf$If+8vWO@W4i8ka_w4(So1WlioW55Q%380_a2+&(H z6g!5K#OFf4F7`bLO@Co`yYC&IstB(i(TG6+nMeQxJY86Z)$*1UN7LH`8}ljw=s^Jl z;MI!}fY}8_z4Z+9A_BvrCz=Qd{4KTGZhfx0f46)u4~G-*jNbVrBth(KQ(Wx(h7|(; zOCytSTM)`lcIuib5M~;@zA~@L^e~2^>8{HzZot78oX&~;{{Pr0YFnw#pSrDIEw!_0 z3Iyo zO<8udS;=vGyD;B6{=Dane<#h{H&&$$?6SDA9|{3@b5K1JHFCLfiWVIsjZF5JV!Pv- zy@`U1j-%OKT#00R+(jBt@xCMPHqNS6Pp~;9&Hi(_2!Mc}U)MrjlS-B8RP3+8oc!{l z^HqFL?W~-NxZSJA78ZGzm6V|nKqe6Y0V@U$BCahg%5iesy#$W821=(rU`pc(-4Q({ z)EzqR*JzfHjYNZMX3oMOfJ`O;0`3lkS-;6NM@W=$l>qdhT#Nv0<(-^csa%NzqdDDX zV&J&vXNIHtFL_tDH~P(`DY9=YJ{>3UPtr|#j37=s-0n56_bTjEtii8}%Zb5Gx3vhQ zD8=Qny<%B=z-Na~c`xumq^Xlpk7wNwyCbpK<|Oy_E`b295`Z3-%S8Z0ly&Ege+2nT zD`Va&)9G&m)jSh3%PZ(TrB&TtH&+T*ng9qm<;sGVK&pqeWplnoVMAUCAs_0Mwb|Cv6m?W2KviTOclu{vYqZ+G zlydz1DOq9XZ6ima5I`mp00F-oJr0{;b$E*G_MZT^(cO@ED)m-LPYlwp@2^Z0^}9ZL zp;C#2=_X;FI+v(;j z&9a}kU9J}@thTum&DQuFt6lwoJJ41*cfOSK9Pn1{O=+_0Perc1fUh0%B$d8 za*~Q`LXpC}tk8Wj!JX#2chL0{bX=!Bk{<}H9UV)==4Hnj=rZEX!Pmeyhoe+uIFenG z6?OUgTB_+7OSMt>~1mLs@MVCMl zBkJ~eKv3oObM1&!P`0t`*2YjI7s_yK?v>Bx+3Yh>x%`WZW(PBw-pmh0@?Xg<==S>( zLw;4&JU{?WfUy)K0DoPnG03{?+F^EL#_jT~j3f>u zHQEistd6qly#dt&c2Aeb$ARGk00G#WZZ9lmV-d*5wFP;z##agTm`mK1LB=coEca8we7s!^FjUW(!l@bsD0lzIu z5jAe_5UpJPr`Grw@F_+Bw&wg;2mnJ6Z{P@W5QE_luYeq}VEERB8biSaW!EdK@{MZs zlUTaGA;?jX7W9rK*>Op8(eLeD3g40Jou}35UIXKDSgdUv#r7Mu+Q)vHUz7uN0nd1X zScS=~0Kq~v)q&uh6eHUQfdJ<;i%-;r1I6)UK9lM3v2M?ViM++x0#5xO_hm_D?EHau z=yW)~4m@v<&kZo{hj1AFXzbAXS<^dl1JE*NY>vljeN>oNxbQs^^re$>tf91Y(uzsV zOoxq_L4y*0pYJJL=(7alM&tNCKv3WJx!qeDlD+0kSL;SiHQUMtC6fA)z5xB3tSEZI z;fH<<$fD@$SQOm_p8djgiXz8u*Kz$ea#qRK#3fHsQZ>-BR24eCpjb1*?aQy?OtdMSJiY}b2g`XRIO3X~IL z@kC`bc8ihY-at^~xNw8iDe?&_&JqN%w>X$}&n&a8e=og3x!x^Zn%RY^N+Oh-`zvAH zwR``EWhnZdlca8fB*xOxl%m{fYsQf8&pG_OFgU&|SEXrr+8fH~TYz8Z^nweVt~{$o z^Bm~^(KLJPktAnYH!+;;JUL`IbU9F5RXJ6yV*f0%h7W6SV#ob@Ue>K5RiPZ{R)>d} z#+i*E(Lr4@6uzGZ`Vpe3FFREL3Pry~K;W=Wum59tAb3ZM(^E2+x9V?oda(~d8{I*G zqH7Eo{+h6FiPFpRRg0 zLQ$U?4Tc@H`2{yki9{0v!r8Kis{3~P)W0o&tD_=FQZw7yfjA5w!q4xwCA943HG00= z#moeJX>vjL=M}0sh2nMsL-)1rse%Y?w(jvUig`4jk+aZ`0Sh!nZkAGGnyFOxxCreo zR_pc8ayIgUj0<)zyHaQ3)AemOtKruMB8m;+7$h|wOI0H?c$*fqA=>u!td?J8F!Qh5 zy^3yKyF2~p*z7np$Kv2`%#K$NP^wn;Q>a!Ig~Jru<^X<0Tax zce5&~Aa1uhwJk!Q1?l0wgR$Ztj3(qSrfT30=jF~TB$l{*x>=I+SHrcab;fBLyg#_a}`*1XMRXWrzUP*3BJ)wR$sLNR2% z%}L*2b?B#g1H}y{%Yy!l;_gzHdqX4~evW0?{eaw`QmfVb!qMmpr4HwM;pf9l=Ed{8 zfnGP5?Z(-_aCs_OAnAtMQM_N2g6;#AOeg2%lTB8K=EguMF^adg)Y9_1!A;WIyhP#& zo8A5=5@e|op2KJ~{xz19v#lhV+*qs9tQ@S;+;vlNS=9_9Z|oC`QybiV$)Dq$U=+S1 z7}3rK>#n{2n5WX3?$R11-=uej;_wEuLw-H`{We?m%>uRFyb0W%?soVki_y4TSZj3> zON?ec7%y>s#%fKs^EP&UAT0U2YsJrUlJfZmUUUsF1Sh+H;+>KZ-+iti3<_eR*T!-f z;09w{)VF{6P>uGE0hH|SGMjx}JehpfU@+`~f$JC`ttYHD+dFwVIsaW@H2r)aEnhj( z=9qnBacQr~-hlSHNQ` Date: Wed, 23 Aug 2023 22:15:48 +0200 Subject: [PATCH 339/775] denon plugin: add serial connection attributes to plugin.yaml --- denon/plugin.yaml | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/denon/plugin.yaml b/denon/plugin.yaml index e73b38f68..3ab797598 100755 --- a/denon/plugin.yaml +++ b/denon/plugin.yaml @@ -54,6 +54,44 @@ parameters: de: Binärer Übertragungsmodus en: binary communication mode + baudrate: + type: num + default: 9600 + + description: + de: Serielle Übertragungsgeschwindigkeit + en: serial transmission speed + + bytesize: + type: num + default: 8 + + description: + de: Anzahl Datenbits + en: number of data bits + + parity: + type: str + default: N + valid_list: + - N + - E + - O + - M + - S + + description: + de: Parität + en: parity + + stopbits: + type: num + default: 1 + + description: + de: Anzahl Stopbits + en: number of stop bits + host: type: str mandatory: false From 579cdeaf16ac648ec4df7d49c55e48a8c90efa5d Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Wed, 23 Aug 2023 22:16:36 +0200 Subject: [PATCH 340/775] oppo plugin: add serial connection attributes to plugin.yaml --- oppo/plugin.yaml | 40 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 39 insertions(+), 1 deletion(-) diff --git a/oppo/plugin.yaml b/oppo/plugin.yaml index c0c504e24..225667609 100755 --- a/oppo/plugin.yaml +++ b/oppo/plugin.yaml @@ -68,7 +68,7 @@ parameters: description: de: Klasse für Verarbeitung von Kommandos en: class for command processing - + model: type: str mandatory: false @@ -104,6 +104,44 @@ parameters: de: Binärer Übertragungsmodus en: binary communication mode + baudrate: + type: num + default: 9600 + + description: + de: Serielle Übertragungsgeschwindigkeit + en: serial transmission speed + + bytesize: + type: num + default: 8 + + description: + de: Anzahl Datenbits + en: number of data bits + + parity: + type: str + default: N + valid_list: + - N + - E + - O + - M + - S + + description: + de: Parität + en: parity + + stopbits: + type: num + default: 1 + + description: + de: Anzahl Stopbits + en: number of stop bits + autoreconnect: type: bool default: true From 1cffcbf57fb0dab0e77d75b9fb62f1e336f6f03b Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Wed, 23 Aug 2023 22:48:39 +0200 Subject: [PATCH 341/775] stateengine plugin: improve logging for set_by_attr value handling --- stateengine/StateEngineValue.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/stateengine/StateEngineValue.py b/stateengine/StateEngineValue.py index 3bef3f02d..3675f41f4 100755 --- a/stateengine/StateEngineValue.py +++ b/stateengine/StateEngineValue.py @@ -91,6 +91,8 @@ def set_from_attr(self, item, attribute_name, default_value=None, reset=True, at if value is not None: _using_default = False self._log_develop("Processing value {0} from attribute name {1}, reset {2}", value, attribute_name, reset) + elif default_value is None: + return None, None, False else: value = default_value _using_default = True From 15965f84876e6b230fe4293cca24aa21045ee038 Mon Sep 17 00:00:00 2001 From: msinn Date: Thu, 24 Aug 2023 11:47:13 +0200 Subject: [PATCH 342/775] shelly: Test implementation for setting shellyrgbw2 attributes; Bump to v1.6.2 --- shelly/__init__.py | 39 +++++++++++++++++++++++++-------------- shelly/plugin.yaml | 2 +- 2 files changed, 26 insertions(+), 15 deletions(-) diff --git a/shelly/__init__.py b/shelly/__init__.py index 6dbb452f1..15e7ed6b3 100755 --- a/shelly/__init__.py +++ b/shelly/__init__.py @@ -39,7 +39,7 @@ class Shelly(MqttPlugin): the update functions for the items """ - PLUGIN_VERSION = '1.6.1' + PLUGIN_VERSION = '1.6.2' def __init__(self, sh): @@ -312,19 +312,27 @@ def update_item(self, item, caller=None, source=None, dest=None): shelly_id = self.get_iattr_value(item.conf, 'shelly_id').upper() shelly_type = self.get_iattr_value(item.conf, 'shelly_type', '').lower() if shelly_type != '': - # old configuration type + # old configuration mode shelly_relay = self.get_iattr_value(item.conf, 'shelly_relay') if not shelly_relay: shelly_relay = '0' topic = 'shellies/' + shelly_type + '-' + shelly_id + '/relay/' + shelly_relay + '/command' self.publish_topic(topic, item(), item, bool_values=['off', 'on']) else: - # new configuration type + # new configuration mode for Gen1 device topic = 'shellies/' + config_data.get('shelly_id', '') shelly_group = config_data['shelly_group'] if shelly_group.startswith('switch:'): topic += '/relay/' + shelly_group.split(':')[1] + '/command' self.publish_topic(topic, item(), bool_values=['off', 'on']) + elif shelly_group.startswith('lights:'): + topic += '/color/' + shelly_group.split(':')[1] + '/set' + shelly_attr = config_data['shelly_attr'] + if shelly_attr == 'on': + shelly_attr = 'turn' + payload = {shelly_attr: item()} + + pass else: self.logger.warning(f"update_item: Output to group {shelly_group} is not supported") elif config_data.get('gen', None) == '2': @@ -984,17 +992,20 @@ def handle_gen1_status(self, shelly_id: str, property, topic, payload, group=Non elif property == 'lights': # for SHRGBW2 if len(sub_status) > 0: light = sub_status[0] - self.update_items_from_status(shelly_id, 'light', 'on', light.get('ison', False)) - self.update_items_from_status(shelly_id, 'light', 'mode', light.get('mode', '')) - self.update_items_from_status(shelly_id, 'light', 'red', light.get('mode', 0)) - self.update_items_from_status(shelly_id, 'light', 'green', light.get('mode', 0)) - self.update_items_from_status(shelly_id, 'light', 'blue', light.get('mode', 0)) - self.update_items_from_status(shelly_id, 'light', 'white', light.get('mode', 0)) - self.update_items_from_status(shelly_id, 'light', 'gain', light.get('gain', 0)) - self.update_items_from_status(shelly_id, 'light', 'effect', light.get('effect', 0)) - self.update_items_from_status(shelly_id, 'light', 'transition', light.get('transition', 0)) - self.update_items_from_status(shelly_id, 'light', 'power', light.get('power', 0)) - self.update_items_from_status(shelly_id, 'light', 'overpower', light.get('overpower', False)) + for property in light: + if property == 'ison': + self.update_items_from_status(shelly_id, 'lights:0', 'on', light.get(property, False)) + elif property == 'mode': + self.update_items_from_status(shelly_id, 'lights:0', property, light.get(property, '')) + elif property in ['red', 'green', 'blue', 'white', 'gain', 'effect', 'transition', 'power']: + self.update_items_from_status(shelly_id, 'lights:0', property, light.get(property, 0)) + elif property == 'overpower': + self.update_items_from_status(shelly_id, 'lights:0', property, light.get(property, False)) + else: + self.log_unhandled_status(shelly_id, property, light.get(property), topic=topic, payload=payload, group='lights:0', position='*l1') + if len(sub_status) > 1: + self.log_unhandled_status(shelly_id, 'light[1]', light[1], topic=topic, payload=payload, group='lights:'+str(len(sub_status)), position='*l1') + elif property == 'sensor': self.update_items_from_status(shelly_id, 'sensor', 'state', sub_status['state'], 'info') diff --git a/shelly/plugin.yaml b/shelly/plugin.yaml index c5ad270c7..4ddab247b 100755 --- a/shelly/plugin.yaml +++ b/shelly/plugin.yaml @@ -12,7 +12,7 @@ plugin: # documentation: http://smarthomeng.de/user/plugins/mqtt2/user_doc.html support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1451853-support-thread-für-das-shelly-plugin - version: 1.6.1 # Plugin version + version: 1.6.2 # Plugin version sh_minversion: 1.9.5.5 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) multi_instance: True # plugin supports multi instance From 35f3d2556b6659dbe8bf9c3705fa955c4355648c Mon Sep 17 00:00:00 2001 From: Onkel Andy Date: Thu, 24 Aug 2023 12:19:09 +0200 Subject: [PATCH 343/775] stateengine plugin: minor logging improvements and fixes --- stateengine/StateEngineEval.py | 3 ++- stateengine/StateEngineItem.py | 22 +++++++++++++++++++--- stateengine/StateEngineValue.py | 3 +++ 3 files changed, 24 insertions(+), 4 deletions(-) diff --git a/stateengine/StateEngineEval.py b/stateengine/StateEngineEval.py index a3f8f1583..ffbc18f70 100755 --- a/stateengine/StateEngineEval.py +++ b/stateengine/StateEngineEval.py @@ -217,7 +217,8 @@ def get_attributevalue(self, item, attrib): self._log_debug("Executing method 'get_attributevalue({0}, {1})'", item, attrib) if ":" in item: var_type, item = StateEngineTools.partition_strip(item, ":") - item, issue = self._abitem.return_item(self._abitem.get_variable(item)) if var_type == "var" else item + if var_type == "var": + item, issue = self._abitem.return_item(self._abitem.get_variable(item)) else: item, issue = self._abitem.return_item(item) try: diff --git a/stateengine/StateEngineItem.py b/stateengine/StateEngineItem.py index 48643a55e..156d5f0a4 100755 --- a/stateengine/StateEngineItem.py +++ b/stateengine/StateEngineItem.py @@ -844,6 +844,16 @@ def update_releasedby(self, state): ''' def __log_issues(self, issue_type): + def list_issues(v): + if isinstance(v.get('issue'), list) and len(v.get('issue')) > 1: + self.__logger.info("has the following issues:") + self.__logger.increase_indent() + for e in v.get('issue'): + self.__logger.info("- {}", e) + self.__logger.decrease_indent() + else: + self.__logger.info("has the following issue: {}", v.get('issue')) + if issue_type == 'actions': to_check = self.__action_status.items() warn = ', '.join(key for key in self.__action_status.keys()) @@ -873,7 +883,14 @@ def __log_issues(self, issue_type): if issue_type == 'states': self.__logger.info("State {} is ignored because", entry) elif issue_type == 'config entries': - self.__logger.info("Attribute {} has an issue: {}", entry, value.get('issue')) + if value.get('attribute'): + self.__logger.info("Attribute {}", value.get('attribute')) + self.__logger.increase_indent() + self.__logger.info("defined in state {}", entry) + self.__logger.decrease_indent() + list_issues(value) + else: + self.__logger.info("Attribute {} has an issue: {}", entry, value.get('issue')) continue else: additional = " used in" if origin_list else "" @@ -891,8 +908,7 @@ def __log_issues(self, issue_type): origin.get('conditionset')) self.__logger.info("{}", origin_text) self.__logger.decrease_indent() - text = "has an issue: {}".format(value.get('issue')) - self.__logger.info("{}", text) + list_issues(value) self.__logger.info("") for entry, value in to_check: if 'issue' not in value: diff --git a/stateengine/StateEngineValue.py b/stateengine/StateEngineValue.py index 3675f41f4..feb0ee6e3 100755 --- a/stateengine/StateEngineValue.py +++ b/stateengine/StateEngineValue.py @@ -105,6 +105,9 @@ def set_from_attr(self, item, attribute_name, default_value=None, reset=True, at else: value_list.append("{}:{}".format(attr_type, entry)) value = value_list + elif value is not None and attr_type is not None: + # update value type correctly based on attr_type + value = "{}:{}".format(attr_type, value) # Convert weird string representation of OrderedDict correctly if isinstance(value, str) and value.startswith("["): value = re.split('(, (?![^(]*\)))', value.strip('][')) From a9497bcc949dea61e0f985bd9e0f6306d0bdd21a Mon Sep 17 00:00:00 2001 From: msinn Date: Thu, 24 Aug 2023 12:27:14 +0200 Subject: [PATCH 344/775] executor: Removed duplicate scroolbars from webinterface; Bump to v1.2.1 --- executor/__init__.py | 2 +- executor/plugin.yaml | 2 +- executor/webif/templates/index.html | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/executor/__init__.py b/executor/__init__.py index 4e02a5cf0..740b18b69 100755 --- a/executor/__init__.py +++ b/executor/__init__.py @@ -40,7 +40,7 @@ class Executor(SmartPlugin): the update functions for the items """ - PLUGIN_VERSION = '1.2.0' + PLUGIN_VERSION = '1.2.1' def __init__(self, sh): """ diff --git a/executor/plugin.yaml b/executor/plugin.yaml index 3f26ce297..351405348 100755 --- a/executor/plugin.yaml +++ b/executor/plugin.yaml @@ -12,7 +12,7 @@ plugin: documentation: https://www.smarthomeng.de/user/plugins/executor/user_doc.html support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1425152-support-thread-plugin-executor - version: 1.2.0 # Plugin version + version: 1.2.1 # Plugin version sh_minversion: 1.9 # minimum shNG version to use this plugin #sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) py_minversion: 3.8 # minimum Python version to use for this plugin, use f-strings for debug diff --git a/executor/webif/templates/index.html b/executor/webif/templates/index.html index bda07578b..22967b243 100755 --- a/executor/webif/templates/index.html +++ b/executor/webif/templates/index.html @@ -501,13 +501,13 @@
    {{ _('Plugin') }}     : {% if p.aliv
    -
    +