1
0
mirror of https://github.com/pommi/python-itho-wpu.git synced 2024-12-03 15:45:09 +01:00

style: apply flake8 and black formatting

To check for errors:
$ pre-commit run --all-files

To install as pre-commit hook:
$ pre-commit install
This commit is contained in:
Pim van den Berg 2021-05-30 14:09:27 +02:00
parent 8c7d0ed947
commit 751182b70d
8 changed files with 171 additions and 89 deletions

2
.flake8 Normal file
View File

@ -0,0 +1,2 @@
[flake8]
max-line-length = 99

15
.pre-commit-config.yaml Normal file
View File

@ -0,0 +1,15 @@
repos:
- repo: local
hooks:
- id: flake8
name: flake8
language: system
entry: flake8
types: [python]
stages: [commit]
- id: black
name: black
language: system
entry: black
types: [python]
stages: [commit]

View File

@ -14,27 +14,31 @@ import sys
def parse_args(): def parse_args():
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter, parser = argparse.ArgumentParser(
description='Convert Itho Servicetool database to SQLite') formatter_class=argparse.ArgumentDefaultsHelpFormatter,
parser.add_argument('--itho-db', nargs='?', required=True, help="Itho Database file") description="Convert Itho Servicetool database to SQLite",
parser.add_argument('--sqlite-db', nargs='?', default='heatpump.sqlite', help="Itho Database file") )
parser.add_argument('--force', action='store_true', help="Force overwrite SQLite database") parser.add_argument("--itho-db", nargs="?", required=True, help="Itho Database file")
parser.add_argument(
"--sqlite-db", nargs="?", default="heatpump.sqlite", help="Itho Database file"
)
parser.add_argument("--force", action="store_true", help="Force overwrite SQLite database")
args = parser.parse_args() args = parser.parse_args()
return args return args
def convert(par_file, sqlite_db): def convert(par_file, sqlite_db):
par_file = par_file.replace("$", "\\$") par_file = par_file.replace("$", "\\$")
par_conn = pyodbc.connect(f'DRIVER={{MDBTools}};DBQ={par_file};') par_conn = pyodbc.connect(f"DRIVER={{MDBTools}};DBQ={par_file};")
par_conn.setencoding('UTF-8') par_conn.setencoding("UTF-8")
par_conn.setdecoding(pyodbc.SQL_CHAR, encoding='UTF-8') par_conn.setdecoding(pyodbc.SQL_CHAR, encoding="UTF-8")
par_cur = par_conn.cursor() par_cur = par_conn.cursor()
sqlite_db = db.sqlite(sqlite_db) sqlite_db = db.sqlite(sqlite_db)
tables = [] tables = []
for table_info in par_cur.tables(tableType='TABLE'): for table_info in par_cur.tables(tableType="TABLE"):
if re.match('^(VersieBeheer|Data[Ll]abel|Parameterlijst)', table_info.table_name): if re.match("^(VersieBeheer|Data[Ll]abel|Parameterlijst)", table_info.table_name):
tables.append(table_info.table_name) tables.append(table_info.table_name)
for t in sorted(tables): for t in sorted(tables):
@ -46,10 +50,25 @@ def convert(par_file, sqlite_db):
for r in sorted(rows): for r in sorted(rows):
data.append((r.Index, r.Naam, r.Tekst_NL, r.Tooltip_NL, r.Eenheid_NL)) data.append((r.Index, r.Naam, r.Tekst_NL, r.Tooltip_NL, r.Eenheid_NL))
if re.match("^Parameterlijst", t): if re.match("^Parameterlijst", t):
par_cur.execute(f"select Index, Naam, Naam_fabriek, Min, Max, Default, Tekst_NL, Omschrijving_NL, Eenheid_NL from {t}") par_cur.execute(
"select Index, Naam, Naam_fabriek, Min, Max, Default, "
f"Tekst_NL, Omschrijving_NL, Eenheid_NL from {t}"
)
rows = par_cur.fetchall() rows = par_cur.fetchall()
for r in sorted(rows): for r in sorted(rows):
data.append((r.Index, r.Naam, r.Naam_fabriek, r.Min, r.Max, r.Default, r.Tekst_NL, r.Omschrijving_NL, r.Eenheid_NL)) data.append(
(
r.Index,
r.Naam,
r.Naam_fabriek,
r.Min,
r.Max,
r.Default,
r.Tekst_NL,
r.Omschrijving_NL,
r.Eenheid_NL,
)
)
if re.match("^VersieBeheer", t): if re.match("^VersieBeheer", t):
par_cur.execute(f"select VersieNummer, DataLabel, ParameterLijst from {t}") par_cur.execute(f"select VersieNummer, DataLabel, ParameterLijst from {t}")
rows = par_cur.fetchall() rows = par_cur.fetchall()

38
db.py
View File

@ -2,7 +2,7 @@ import sqlite3
from sqlite3 import Error from sqlite3 import Error
class sqlite(): class sqlite:
def __init__(self, db_file): def __init__(self, db_file):
self.conn = self.connect(db_file) self.conn = self.connect(db_file)
@ -34,15 +34,17 @@ class sqlite():
print("Error:", e) print("Error:", e)
def create_table(self, t): def create_table(self, t):
if t.startswith('datalabel'): if t.startswith("datalabel"):
query = """CREATE TABLE {} ( query = """CREATE TABLE {} (
id real, id real,
name text, name text,
title text, title text,
tooltip text, tooltip text,
unit text unit text
);""".format(t) );""".format(
elif t.startswith('parameterlijst'): t
)
elif t.startswith("parameterlijst"):
query = """ query = """
CREATE TABLE {} ( CREATE TABLE {} (
id real, id real,
@ -54,32 +56,42 @@ class sqlite():
title text, title text,
description text, description text,
unit text unit text
);""".format(t) );""".format(
elif t.startswith('versiebeheer'): t
)
elif t.startswith("versiebeheer"):
query = """ query = """
CREATE TABLE {} ( CREATE TABLE {} (
version integer primary key, version integer primary key,
datalabel integer, datalabel integer,
parameterlist integer parameterlist integer
);""".format(t) );""".format(
t
)
self.execute(query) self.execute(query)
self.conn.commit() self.conn.commit()
def insert(self, t, data): def insert(self, t, data):
if t.startswith('datalabel'): if t.startswith("datalabel"):
query = """ query = """
INSERT INTO {} (id, name, title, tooltip, unit) INSERT INTO {} (id, name, title, tooltip, unit)
VALUES (?, ?, ?, ?, ?); VALUES (?, ?, ?, ?, ?);
""".format(t) """.format(
elif t.startswith('parameterlijst'): t
)
elif t.startswith("parameterlijst"):
query = """ query = """
INSERT INTO {} (id, name, name_factory, min, max, def, title, description, unit) INSERT INTO {} (id, name, name_factory, min, max, def, title, description, unit)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?); VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?);
""".format(t) """.format(
elif t.startswith('versiebeheer'): t
)
elif t.startswith("versiebeheer"):
query = """ query = """
INSERT INTO {} (version, datalabel, parameterlist) INSERT INTO {} (version, datalabel, parameterlist)
VALUES (?, ?, ?); VALUES (?, ?, ?);
""".format(t) """.format(
t
)
self.executemany(query, data) self.executemany(query, data)
self.conn.commit() self.conn.commit()

View File

@ -11,7 +11,7 @@ import db
from collections import namedtuple from collections import namedtuple
from itho_i2c import I2CMaster, I2CSlave from itho_i2c import I2CMaster, I2CSlave
logger = logging.getLogger('stdout') logger = logging.getLogger("stdout")
logger.setLevel(logging.INFO) logger.setLevel(logging.INFO)
stdout_log_handler = logging.StreamHandler(sys.stdout) stdout_log_handler = logging.StreamHandler(sys.stdout)
stdout_log_handler.setFormatter(logging.Formatter("%(message)s")) stdout_log_handler.setFormatter(logging.Formatter("%(message)s"))
@ -27,28 +27,43 @@ actions = {
def parse_args(): def parse_args():
parser = argparse.ArgumentParser(description='Itho WPU i2c master') parser = argparse.ArgumentParser(description="Itho WPU i2c master")
parser.add_argument('--action', nargs='?', required=True, parser.add_argument(
choices=actions.keys(), help="Execute an action") "--action",
parser.add_argument('--loglevel', nargs='?', nargs="?",
choices=["debug", "info", "warning", "error", "critical"], required=True,
help="Loglevel") choices=actions.keys(),
parser.add_argument('--timestamp', action='store_true', help="Show timestamp in output") help="Execute an action",
parser.add_argument('--master-only', action='store_true', help="Only run I2C master") )
parser.add_argument('--slave-only', action='store_true', help="Only run I2C slave") parser.add_argument(
parser.add_argument('--slave-timeout', nargs='?', type=int, default=60, "--loglevel",
help="Slave timeout in seconds when --slave-only") nargs="?",
parser.add_argument('--no-cache', action='store_true', choices=["debug", "info", "warning", "error", "critical"],
help="Don't use local cache") help="Loglevel",
parser.add_argument('--export-to-influxdb', action='store_true', )
help="Export results to InfluxDB") parser.add_argument("--timestamp", action="store_true", help="Show timestamp in output")
parser.add_argument("--master-only", action="store_true", help="Only run I2C master")
parser.add_argument("--slave-only", action="store_true", help="Only run I2C slave")
parser.add_argument(
"--slave-timeout",
nargs="?",
type=int,
default=60,
help="Slave timeout in seconds when --slave-only",
)
parser.add_argument("--no-cache", action="store_true", help="Don't use local cache")
parser.add_argument(
"--export-to-influxdb",
action="store_true",
help="Export results to InfluxDB",
)
args = parser.parse_args() args = parser.parse_args()
return args return args
class IthoWPU(): class IthoWPU:
def __init__(self, master_only, slave_only, slave_timeout, no_cache): def __init__(self, master_only, slave_only, slave_timeout, no_cache):
self.master_only = master_only self.master_only = master_only
self.slave_only = slave_only self.slave_only = slave_only
@ -56,13 +71,13 @@ class IthoWPU():
self._q = queue.Queue() self._q = queue.Queue()
self.no_cache = no_cache self.no_cache = no_cache
self.cache = IthoWPUCache() self.cache = IthoWPUCache()
self.nodeid = self.get('getnodeid') self.nodeid = self.get("getnodeid")
self.datatype = self.get('getdatatype') self.datatype = self.get("getdatatype")
self.heatpump_db = db.sqlite('heatpump.sqlite') self.heatpump_db = db.sqlite("heatpump.sqlite")
def get(self, action): def get(self, action):
if not self.no_cache: if not self.no_cache:
response = self.cache.get(action.replace('get', '')) response = self.cache.get(action.replace("get", ""))
if response is not None: if response is not None:
logger.debug(f"Response (from cache): {response}") logger.debug(f"Response (from cache): {response}")
return response return response
@ -85,7 +100,7 @@ class IthoWPU():
if not self.master_only: if not self.master_only:
slave.close() slave.close()
self.cache.set(action.replace('get', ''), response) self.cache.set(action.replace("get", ""), response)
return response return response
@ -97,32 +112,37 @@ class IthoWPU():
def get_datalog_structure(self): def get_datalog_structure(self):
listversion = self.get_listversion_from_nodeid() listversion = self.get_listversion_from_nodeid()
datalabel_version = self.heatpump_db.execute( datalabel_version = self.heatpump_db.execute(
f"SELECT datalabel FROM versiebeheer WHERE version = {listversion}")[0]['datalabel'] f"SELECT datalabel FROM versiebeheer WHERE version = {listversion}"
)[0]["datalabel"]
if datalabel_version is None or not type(datalabel_version) == int: if datalabel_version is None or not type(datalabel_version) == int:
logger.error(f"Datalabel not found in database for version {listversion}") logger.error(f"Datalabel not found in database for version {listversion}")
return None return None
datalabel = self.heatpump_db.execute( datalabel = self.heatpump_db.execute(
f"SELECT name, title, tooltip, unit FROM datalabel_v{datalabel_version} order by id") f"SELECT name, title, tooltip, unit FROM datalabel_v{datalabel_version} order by id"
)
if len(self.datatype[5:-1]) != len(datalabel): if len(self.datatype[5:-1]) != len(datalabel):
logger.warning(f"Number of datatype items ({len(self.datatype[5:-1])}) is not equal to the number of datalabels ({len(datalabel)}) in the database.") logger.warning(
f"Number of datatype items ({len(self.datatype[5:-1])}) is not equal to "
f"the number of datalabels ({len(datalabel)}) in the database."
)
Field = namedtuple('Field', 'index type label description') Field = namedtuple("Field", "index type label description")
datalog = [] datalog = []
index = 0 index = 0
for dl, dt in zip(datalabel, self.datatype[5:-1]): for dl, dt in zip(datalabel, self.datatype[5:-1]):
description = dl['title'].title() description = dl["title"].title()
if dl['unit'] is not None: if dl["unit"] is not None:
description = f"{description} ({dl['unit']})" description = f"{description} ({dl['unit']})"
description = f"{description} ({dl['name'].lower()})" description = f"{description} ({dl['name'].lower()})"
datalog.append(Field(index, int(dt, 0), dl['name'].lower(), description)) datalog.append(Field(index, int(dt, 0), dl["name"].lower(), description))
if dt in ['0x0', '0xc']: if dt in ["0x0", "0xc"]:
index = index + 1 index = index + 1
elif dt in ['0x10', '0x12', '0x92']: elif dt in ["0x10", "0x12", "0x92"]:
index = index + 2 index = index + 2
elif dt in ['0x20']: elif dt in ["0x20"]:
index = index + 4 index = index + 4
else: else:
logger.error(f"Unknown data type for label {dl['name']}: {dt}") logger.error(f"Unknown data type for label {dl['name']}: {dt}")
@ -134,10 +154,10 @@ class IthoWPUCache:
def __init__(self): def __init__(self):
self._cache_file = "itho-wpu-cache.json" self._cache_file = "itho-wpu-cache.json"
self._cache_data = { self._cache_data = {
'nodeid': None, "nodeid": None,
'serial': None, "serial": None,
'datatype': None, "datatype": None,
'schema_version': '1', "schema_version": "1",
} }
self._read_cache() self._read_cache()
@ -148,17 +168,17 @@ class IthoWPUCache:
with open(self._cache_file) as cache_file: with open(self._cache_file) as cache_file:
cache_data = json.load(cache_file) cache_data = json.load(cache_file)
logger.debug(f"Loading local cache: {json.dumps(cache_data)}") logger.debug(f"Loading local cache: {json.dumps(cache_data)}")
for key in ['nodeid', 'serial', 'datatype']: for key in ["nodeid", "serial", "datatype"]:
if key in cache_data: if key in cache_data:
self._cache_data[key] = cache_data[key] self._cache_data[key] = cache_data[key]
def _write_cache(self): def _write_cache(self):
with open(self._cache_file, 'w') as cache_file: with open(self._cache_file, "w") as cache_file:
logger.debug(f"Writing to local cache: {json.dumps(self._cache_data)}") logger.debug(f"Writing to local cache: {json.dumps(self._cache_data)}")
json.dump(self._cache_data, cache_file) json.dump(self._cache_data, cache_file)
def get(self, action): def get(self, action):
if action not in ['nodeid', 'serial', 'datatype']: if action not in ["nodeid", "serial", "datatype"]:
logger.debug(f"Cache for '{action}' is not supported") logger.debug(f"Cache for '{action}' is not supported")
return None return None
logger.debug(f"Reading '{action}' from local cache") logger.debug(f"Reading '{action}' from local cache")
@ -167,7 +187,7 @@ class IthoWPUCache:
return self._cache_data[action] return self._cache_data[action]
def set(self, action, value): def set(self, action, value):
if action not in ['nodeid', 'serial', 'datatype']: if action not in ["nodeid", "serial", "datatype"]:
logger.debug(f"Cache for '{action}' is not supported") logger.debug(f"Cache for '{action}' is not supported")
return None return None
logger.debug(f"Writing '{action}' to local cache: {value}") logger.debug(f"Writing '{action}' to local cache: {value}")
@ -177,8 +197,10 @@ class IthoWPUCache:
def is_messageclass_valid(action, response): def is_messageclass_valid(action, response):
if int(response[1], 0) != actions[action][0] and int(response[2], 0) != actions[action][1]: if int(response[1], 0) != actions[action][0] and int(response[2], 0) != actions[action][1]:
logger.error(f"Response MessageClass != {actions[action][0]} {actions[action][1]} " logger.error(
f"({action}), but {response[1]} {response[2]}") f"Response MessageClass != {actions[action][0]} {actions[action][1]} "
f"({action}), but {response[1]} {response[2]}"
)
return False return False
return True return True
@ -194,6 +216,7 @@ def process_response(action, response, args, wpu):
measurements = process_datalog(response, wpu) measurements = process_datalog(response, wpu)
if args.export_to_influxdb: if args.export_to_influxdb:
from itho_export import export_to_influxdb from itho_export import export_to_influxdb
export_to_influxdb(action, measurements) export_to_influxdb(action, measurements)
elif action == "getnodeid": elif action == "getnodeid":
process_nodeid(response) process_nodeid(response)
@ -208,18 +231,20 @@ def process_nodeid(response):
"type": { "type": {
13: "WPU", 13: "WPU",
15: "Autotemp", 15: "Autotemp",
} },
} }
} }
manufacturergroup = ((int(response[5], 0) << 8) + int(response[6], 0)) manufacturergroup = (int(response[5], 0) << 8) + int(response[6], 0)
manufacturer = hardware_info[int(response[7], 0)]["name"] manufacturer = hardware_info[int(response[7], 0)]["name"]
hardwaretype = hardware_info[int(response[7], 0)]["type"][int(response[8], 0)] hardwaretype = hardware_info[int(response[7], 0)]["type"][int(response[8], 0)]
productversion = int(response[9], 0) productversion = int(response[9], 0)
listversion = int(response[10], 0) listversion = int(response[10], 0)
logger.info(f"ManufacturerGroup: {manufacturergroup}, Manufacturer: {manufacturer}, " logger.info(
f"HardwareType: {hardwaretype}, ProductVersion: {productversion}, " f"ManufacturerGroup: {manufacturergroup}, Manufacturer: {manufacturer}, "
f"ListVersion: {listversion}") f"HardwareType: {hardwaretype}, ProductVersion: {productversion}, "
f"ListVersion: {listversion}"
)
def process_serial(response): def process_serial(response):
@ -232,24 +257,24 @@ def process_datalog(response, wpu):
message = response[5:] message = response[5:]
measurements = {} measurements = {}
for d in datalog: for d in datalog:
if d.type == 0x0 or d.type == 0xc: if d.type == 0x0 or d.type == 0xC:
m = message[d.index:d.index+1] m = message[d.index : d.index + 1] # noqa: E203
num = int(m[0], 0) num = int(m[0], 0)
elif d.type == 0x10: elif d.type == 0x10:
m = message[d.index:d.index+2] m = message[d.index : d.index + 2] # noqa: E203
num = ((int(m[0], 0) << 8) + int(m[1], 0)) num = (int(m[0], 0) << 8) + int(m[1], 0)
elif d.type == 0x12: elif d.type == 0x12:
m = message[d.index:d.index+2] m = message[d.index : d.index + 2] # noqa: E203
num = round((int(m[0], 0) << 8) + int(m[1], 0) / 100, 2) num = round((int(m[0], 0) << 8) + int(m[1], 0) / 100, 2)
elif d.type == 0x92: elif d.type == 0x92:
m = message[d.index:d.index+2] m = message[d.index : d.index + 2] # noqa: E203
num = ((int(m[0], 0) << 8) + int(m[1], 0)) num = (int(m[0], 0) << 8) + int(m[1], 0)
if num >= 32768: if num >= 32768:
num -= 65536 num -= 65536
num = round(num / 100, 2) num = round(num / 100, 2)
elif d.type == 0x20: elif d.type == 0x20:
m = message[d.index:d.index+4] m = message[d.index : d.index + 4] # noqa: E203
num = ((int(m[0], 0) << 24) + (int(m[1], 0) << 16) + (int(m[2], 0) << 8) + int(m[3], 0)) num = (int(m[0], 0) << 24) + (int(m[1], 0) << 16) + (int(m[2], 0) << 8) + int(m[3], 0)
else: else:
logger.error(f"Unknown message type for datalog {d.name}: {d.type}") logger.error(f"Unknown message type for datalog {d.name}: {d.type}")
logger.info(f"{d.description}: {num}") logger.info(f"{d.description}: {num}")

View File

@ -6,11 +6,11 @@ def export_to_influxdb(action, measurements):
from influxdb import InfluxDBClient from influxdb import InfluxDBClient
influx_client = InfluxDBClient( influx_client = InfluxDBClient(
host=os.getenv('INFLUXDB_HOST', 'localhost'), host=os.getenv("INFLUXDB_HOST", "localhost"),
port=os.getenv('INFLUXDB_PORT', 8086), port=os.getenv("INFLUXDB_PORT", 8086),
username=os.getenv('INFLUXDB_USERNAME', 'root'), username=os.getenv("INFLUXDB_USERNAME", "root"),
password=os.getenv('INFLUXDB_PASSWORD', 'root'), password=os.getenv("INFLUXDB_PASSWORD", "root"),
database=os.getenv('INFLUXDB_DATABASE') database=os.getenv("INFLUXDB_DATABASE"),
) )
json_body = [ json_body = [
{ {
@ -22,4 +22,4 @@ def export_to_influxdb(action, measurements):
try: try:
influx_client.write_points(json_body) influx_client.write_points(json_body)
except Exception as e: except Exception as e:
print('Failed to write to influxdb: ', e) print("Failed to write to influxdb: ", e)

View File

@ -32,7 +32,7 @@ class I2CRaw:
def read_i2c_block_data(self, n_bytes): def read_i2c_block_data(self, n_bytes):
data_raw = self.fr.read(n_bytes) data_raw = self.fr.read(n_bytes)
unpack_format = 'B'*n_bytes unpack_format = "B" * n_bytes
return list(struct.unpack(unpack_format, data_raw)) return list(struct.unpack(unpack_format, data_raw))
def close(self): def close(self):
@ -80,7 +80,7 @@ class I2CMaster:
self.i.close() self.i.close()
class I2CSlave(): class I2CSlave:
def __init__(self, address, queue): def __init__(self, address, queue):
self.address = address self.address = address
self.queue = queue self.queue = queue

9
pyproject.toml Normal file
View File

@ -0,0 +1,9 @@
[tool.black]
line-length = 99
include = '\.pyi?$'
exclude = '''
/(
\.git
| \.venv
)/
'''