homecontrol: major rewrite, add ws handler
This commit is contained in:
parent
2ba8a1ec08
commit
7e6ee25a6d
7 changed files with 560 additions and 328 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -5,3 +5,4 @@ db.sqlite
|
|||
__pycache__
|
||||
*.swp
|
||||
db.sqlite
|
||||
poetry.lock
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
import homecontrol
|
||||
import core
|
||||
|
||||
homecontrol.main()
|
||||
core.main()
|
||||
|
|
402
homecontrol/core.py
Executable file
402
homecontrol/core.py
Executable file
|
@ -0,0 +1,402 @@
|
|||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import asyncio
|
||||
import dataset
|
||||
from flask import Flask, request, json, jsonify, abort, make_response
|
||||
import logging
|
||||
from math import inf
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
from xdg import XDG_CONFIG_HOME
|
||||
# local
|
||||
from homecontrol.core_ws import Core_WS
|
||||
|
||||
CONFIG_FILE = f"{XDG_CONFIG_HOME}/homecontrol.json"
|
||||
DB_FILE = f"{XDG_CONFIG_HOME}/homecontrol.sqlite"
|
||||
LIMIT = 100
|
||||
PORT_WS = 8100
|
||||
HOST_WS = "0.0.0.0"
|
||||
PORT_RT = 8200
|
||||
HOST_RT = "0.0.0.0"
|
||||
# TIMEOUT_CLIENT = 10
|
||||
core = None
|
||||
|
||||
# sensors
|
||||
NUM_VALUES = 1000
|
||||
|
||||
|
||||
def setup():
|
||||
# arguments
|
||||
parser = argparse.ArgumentParser(description='homecontrol')
|
||||
parser.add_argument("--hw", "--host_ws", dest="host_ws", type=str,
|
||||
help="websocket host")
|
||||
parser.add_argument("--pw", "--port_ws", dest="port_ws", type=int,
|
||||
help="websocket port")
|
||||
parser.add_argument("--hr", "--host_rt", dest="host_rt", type=str,
|
||||
help="REST host")
|
||||
parser.add_argument("--pr", "--port_rt", dest="port_rt", type=int,
|
||||
help="REST port")
|
||||
parser.add_argument("-c", "--config", dest="config", type=str, help="config file",
|
||||
default=CONFIG_FILE)
|
||||
parser.add_argument("-f", "--dbfile", dest="dbfile", type=str, help="database file",
|
||||
default=DB_FILE)
|
||||
parser.add_argument("-d", "--debug", dest="debug", action="store_true",
|
||||
help="debug mode")
|
||||
parser.add_argument("-D", "--debugflask", dest="debugflask", action="store_true",
|
||||
help="flask debug mode")
|
||||
|
||||
# parse arguments
|
||||
args = parser.parse_args()
|
||||
|
||||
# initialize config
|
||||
if not os.path.exists(XDG_CONFIG_HOME):
|
||||
os.makedirs(XDG_CONFIG_HOME)
|
||||
config = {}
|
||||
try:
|
||||
config_file = open(args.config, "r")
|
||||
config = json.load(config_file)
|
||||
except:
|
||||
pass
|
||||
|
||||
# fill new keys with defaults
|
||||
if not config.get("host_ws"):
|
||||
config["host_ws"] = HOST_WS
|
||||
if not config.get("port_ws"):
|
||||
config["port_ws"] = PORT_WS
|
||||
if not config.get("host_rt"):
|
||||
config["host_rt"] = HOST_RT
|
||||
if not config.get("port_rt"):
|
||||
config["port_rt"] = PORT_RT
|
||||
|
||||
if not config.get('dbfile'):
|
||||
config['dbfile'] = DB_FILE
|
||||
|
||||
# overwrite with arguments
|
||||
if args.host_ws:
|
||||
config["host_ws"] = args.host_ws
|
||||
if args.port_ws:
|
||||
config["port_ws"] = args.port_ws
|
||||
if args.host_rt:
|
||||
config["host_rt"] = args.host_rt
|
||||
if args.port_rt:
|
||||
config["port_rt"] = args.port_rt
|
||||
if args.dbfile:
|
||||
config['dbfile'] = args.dbfile
|
||||
|
||||
# save to file
|
||||
with open(args.config, 'w') as config_file:
|
||||
json.dump(config, config_file)
|
||||
|
||||
# temporary option
|
||||
if args.debug:
|
||||
config["debug"] = args.debug
|
||||
else:
|
||||
config["debug"] = False
|
||||
if args.debugflask:
|
||||
config["debugflask"] = args.debugflask
|
||||
else:
|
||||
config["debugflask"] = False
|
||||
|
||||
return config
|
||||
|
||||
|
||||
class Core:
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
# logging
|
||||
FORMAT="[%(asctime)13s :: %(name)18s :: %(levelname)7s] %(message)s"
|
||||
DATEFMT="%Y%m%d %H:%M:%S"
|
||||
logging.basicConfig(level=logging.INFO, format=FORMAT, datefmt=DATEFMT)
|
||||
self.logger = logging.getLogger("Core")
|
||||
if config["debug"]:
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
self.logger.info("initialization starting...")
|
||||
# runtime
|
||||
self.actor_queue = {}
|
||||
self.db = dataset.connect(f"sqlite:///{config['dbfile']}?check_same_thread=False")
|
||||
self.cws = Core_WS(self.config["debug"], self.config["host_ws"],
|
||||
self.config["port_ws"])
|
||||
self.flask_thread = threading.Thread(target=self.flask_thread_fn)
|
||||
self.flask_thread.setDaemon(True)
|
||||
self.flask_thread.start()
|
||||
self.logger.info("initialization complete.")
|
||||
|
||||
def flask_thread_fn(self):
|
||||
app = Flask (__name__)
|
||||
|
||||
@app.route("/", methods = [ "GET" ])
|
||||
def root_get():
|
||||
ret = {}
|
||||
return make_response(jsonify(ret), 200)
|
||||
|
||||
@app.route("/actor/command", methods=['POST'])
|
||||
def actor_command():
|
||||
ret = {}
|
||||
try:
|
||||
content = request.json
|
||||
actorId = content.get("id")
|
||||
command = content.get("command")
|
||||
data = content.get("data")
|
||||
self.actor_add_queue(actorId, command, data)
|
||||
except Exception as ex:
|
||||
self.logger.error(f"Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
abort(400)
|
||||
|
||||
return make_response(jsonify(ret), 200)
|
||||
|
||||
|
||||
@app.route("/actor/get", methods=['GET'])
|
||||
def actor_get():
|
||||
ret = self.actor_get_actors()
|
||||
return make_response(jsonify(ret), 200)
|
||||
|
||||
|
||||
@app.route("/actor/get_levels/<actorId>", methods=['GET'])
|
||||
def actor_get_level(actorId):
|
||||
ret = {}
|
||||
if request.args.get("limit"):
|
||||
limit=int(request.args.get("limit"))
|
||||
else:
|
||||
limit=None
|
||||
self.logger.debug(f"actor/get_levels/{actorId}, limit: {limit}")
|
||||
ret[actorId] = self.actor_get_levels(actorId, limit=limit)
|
||||
return make_response(jsonify(ret), 200)
|
||||
|
||||
|
||||
@app.route("/actor/update", methods=['POST'])
|
||||
def actor_update():
|
||||
ret = {}
|
||||
try:
|
||||
content = request.json
|
||||
actorId = content.get("id")
|
||||
actorType = content.get("type")
|
||||
level = content.get("level")
|
||||
maxLevel = content.get("maxLevel")
|
||||
self.actor_add_level(actorId, actorType, maxLevel, level)
|
||||
ret = self.actor_get_queue(actorId)
|
||||
except Exception as ex:
|
||||
self.logger.error(f"Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
abort(400)
|
||||
|
||||
return make_response(jsonify(ret), 200)
|
||||
|
||||
|
||||
@app.route("/sensor/get", methods=['GET'])
|
||||
def sensor_get_sensors():
|
||||
ret = self.sensor_get_sensors()
|
||||
return make_response(jsonify(ret), 200)
|
||||
|
||||
|
||||
@app.route("/sensor/get_values/<sensorId>", methods=['GET'])
|
||||
def sensor_get_values(sensorId):
|
||||
ret = {}
|
||||
min_ts = 0
|
||||
if request.args.get("min_ts"):
|
||||
min_ts=int(request.args.get("min_ts"))
|
||||
max_ts = inf
|
||||
limit=None
|
||||
if request.args.get("limit"):
|
||||
limit=int(request.args.get("limit"))
|
||||
if request.args.get("max_ts"):
|
||||
max_ts=int(request.args.get("max_ts"))
|
||||
self.logger.debug(f"sensor/get_values/{sensorId}, [{min_ts}, {max_ts}], limit: {limit}")
|
||||
ret[sensorId] = self.sensor_get_values(sensorId, min_ts=min_ts, max_ts=max_ts, limit=limit)
|
||||
return make_response(jsonify(ret), 200)
|
||||
|
||||
|
||||
@app.route("/sensor/update", methods=['POST'])
|
||||
def sensor_add_value():
|
||||
ret = {}
|
||||
try:
|
||||
content = request.json
|
||||
sensorId = content.get("id")
|
||||
sensorType = content.get("type")
|
||||
value = content.get("value")
|
||||
ret = self.sensor_add_value(sensorId, sensorType, value)
|
||||
except Exception as ex:
|
||||
self.logger.error(f"Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
abort(400)
|
||||
return make_response(jsonify(ret), 200)
|
||||
|
||||
## run app
|
||||
app.run(use_reloader=False, debug=self.config["debug"],
|
||||
host=self.config["host_rt"], port=self.config["port_rt"])
|
||||
|
||||
|
||||
async def status_thread(self):
|
||||
while True:
|
||||
for n in self.cws.nodes:
|
||||
await self.cws.node_get_status(n)
|
||||
|
||||
status = self.cws.nodes[n].get_status()
|
||||
if status and len(status) > 0:
|
||||
self.actor_add_level(n, status["level"], status["ts"])
|
||||
await asyncio.sleep(1)
|
||||
|
||||
|
||||
def actor_add_level(self, actorId, level, ts):
|
||||
ret = {}
|
||||
try:
|
||||
if not self.db["actors"].find_one(actorId=actorId):
|
||||
info = self.cws.nodes[actorId].get_info()
|
||||
q = {"actorId": info["nodeid"], "actorType": info["nodetype"], "maxLevel":
|
||||
info["maxlevel"]}
|
||||
self.db['actors'].insert(q)
|
||||
q = {"actorId": actorId, "ts": ts, "level": level}
|
||||
self.db['actor_levels'].insert(q)
|
||||
except Exception as ex:
|
||||
self.logger.error(f"Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
|
||||
return ret
|
||||
|
||||
def actor_add_queue(self, actorId, command, data):
|
||||
try:
|
||||
if self.db["actors"].find_one(actorId=actorId):
|
||||
self.actor_queue[actorId] = {"command": command, "data": data}
|
||||
except Exception as ex:
|
||||
self.logger.error(f"Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
|
||||
|
||||
def actor_get_actors(self):
|
||||
ret = {}
|
||||
try:
|
||||
for s in self.db["actors"]:
|
||||
actorId = s["actorId"]
|
||||
ret[actorId] = self.actor_get_info(actorId)
|
||||
except Exception as ex:
|
||||
self.logger.error(f"Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
return ret
|
||||
|
||||
|
||||
def actor_get_info(self, actorId):
|
||||
ret = {}
|
||||
try:
|
||||
q = self.db["actors"].find_one(actorId=actorId)
|
||||
ret["actorId"] = actorId
|
||||
ret["maxLevel"] = f"0x{q['maxLevel']}"
|
||||
ret["actorType"] = q["actorType"]
|
||||
q = self.db["actor_levels"].find_one(actorId=actorId, order_by="-ts")
|
||||
ret["ts"] = q["ts"]
|
||||
ret["level"] = f"0x{q['level']}"
|
||||
except Exception as ex:
|
||||
self.logger.error(f"Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
return ret
|
||||
|
||||
|
||||
def actor_get_levels(self, actorId, limit=None):
|
||||
ret = {}
|
||||
try:
|
||||
if self.db["actors"].find_one(actorId=actorId):
|
||||
ret = self.actor_get_info(actorId)
|
||||
if limit:
|
||||
query = self.db["actor_levels"].find(actorId=actorId, _limit=limit)
|
||||
else:
|
||||
query = self.db["actor_levels"].find(actorId=actorId)
|
||||
levels = []
|
||||
for q in query:
|
||||
levels.append({"ts": q["ts"], "value": f"0x{q['level']}"})
|
||||
ret["levels"] = levels
|
||||
except Exception as ex:
|
||||
self.logger.error(f"Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def actor_get_queue(self, actorId):
|
||||
ret = {}
|
||||
try:
|
||||
if actorId in self.actor_queue:
|
||||
ret = self.actor_queue.pop(actorId)
|
||||
except Exception as ex:
|
||||
self.logger.error(f"Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
return ret
|
||||
|
||||
def sensor_get_info(self, sensorId):
|
||||
ret = {}
|
||||
try:
|
||||
q = self.db["sensors"].find_one(sensorId=sensorId)
|
||||
ret["sensorId"] = sensorId
|
||||
ret["sensorType"] = q["sensorType"]
|
||||
q = self.db["sensor_values"].find_one(sensorId=sensorId, order_by="-ts")
|
||||
ret["ts"] = q["ts"]
|
||||
except Exception as ex:
|
||||
self.logger.error(f"Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
return ret
|
||||
|
||||
|
||||
def sensor_get_values(self, sensorId, min_ts, max_ts, limit=None):
|
||||
ret = {}
|
||||
try:
|
||||
if self.db["sensors"].find_one(sensorId=sensorId):
|
||||
ret = self.sensor_get_info(sensorId)
|
||||
if limit:
|
||||
query = self.db["sensor_values"].find(sensorId=sensorId,
|
||||
ts={"between": [min_ts, max_ts]}, _limit=limit)
|
||||
else:
|
||||
query = self.db["sensor_values"].find(sensorId=sensorId,
|
||||
ts={"between": [min_ts, max_ts]})
|
||||
values = []
|
||||
for q in query:
|
||||
values.append({"ts": float(q["ts"]), "value": float(q["value"])})
|
||||
ret["values"] = values
|
||||
except Exception as ex:
|
||||
self.logger.error(f"Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def sensor_get_sensors(self):
|
||||
ret = {}
|
||||
try:
|
||||
for s in self.db["sensors"]:
|
||||
sensorId = s["sensorId"]
|
||||
ret[sensorId] = self.sensor_get_info(sensorId)
|
||||
except Exception as ex:
|
||||
self.logger.error(f"Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
return ret
|
||||
|
||||
|
||||
def sensor_add_value(self, sensorId, sensorType, value):
|
||||
ret = {}
|
||||
try:
|
||||
if not self.db["sensors"].find_one(sensorId=sensorId):
|
||||
self.db['sensors'].insert({"sensorId": sensorId, "sensorType": sensorType})
|
||||
self.db['sensor_values'].insert({"sensorId": sensorId, "ts": time.time(), "value":
|
||||
value})
|
||||
except Exception as ex:
|
||||
self.logger.error(f"Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def main():
|
||||
config = setup()
|
||||
FORMAT="[%(asctime)13s :: %(name)18s :: %(levelname)7s] %(message)s"
|
||||
DATEFMT="%Y%m%d %H:%M:%S"
|
||||
logging.basicConfig(level=logging.INFO, format=FORMAT,
|
||||
datefmt=DATEFMT)
|
||||
logger = logging.getLogger("main")
|
||||
core = Core(config)
|
||||
if config["debug"]:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
tasks = [ asyncio.ensure_future(core.cws.ws_server), asyncio.ensure_future(core.status_thread()) ]
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
try:
|
||||
loop.run_until_complete(asyncio.gather(*tasks))
|
||||
loop.run_forever()
|
||||
except KeyboardInterrupt:
|
||||
print("\nCTRL-C received, stopping...")
|
||||
time.sleep(1)
|
||||
asyncio.gather(*tasks).cancel()
|
||||
finally:
|
||||
if loop.is_running():
|
||||
loop.stop()
|
||||
print("done.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
120
homecontrol/core_ws.py
Normal file
120
homecontrol/core_ws.py
Normal file
|
@ -0,0 +1,120 @@
|
|||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
import websockets
|
||||
|
||||
class Node_WS:
|
||||
def __init__(self, debug, nodeid, nodetype, name, maxlevel, ws):
|
||||
self.debug = debug
|
||||
self.nodeid = nodeid
|
||||
self.nodetype = nodetype
|
||||
self.name = name
|
||||
self.maxlevel = maxlevel
|
||||
self.ws = ws
|
||||
self.status = {}
|
||||
## logging
|
||||
FORMAT='[%(asctime)13s :: %(name)18s :: %(levelname)7s] %(message)s'
|
||||
DATEFMT='%Y%m%d %H:%M:%S'
|
||||
logging.basicConfig(level=logging.INFO, format=FORMAT,
|
||||
datefmt=DATEFMT)
|
||||
self.logger = logging.getLogger(f"Node_WS:{self.nodeid.split('-')[0]}")
|
||||
if self.debug:
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
|
||||
def get_info(self):
|
||||
ret = {}
|
||||
ret["nodeid"] = self.nodeid
|
||||
ret["nodetype"] = self.nodetype
|
||||
ret["name"] = self.name
|
||||
ret["maxlevel"] = self.maxlevel
|
||||
return ret
|
||||
|
||||
def get_status(self):
|
||||
ret = self.status
|
||||
return ret
|
||||
|
||||
class Core_WS:
|
||||
def __init__(self, debug, host, port):
|
||||
self.debug = debug
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.nodes = {}
|
||||
## logging
|
||||
FORMAT='[%(asctime)13s :: %(name)18s :: %(levelname)7s] %(message)s'
|
||||
DATEFMT='%Y%m%d %H:%M:%S'
|
||||
logging.basicConfig(level=logging.INFO, format=FORMAT,
|
||||
datefmt=DATEFMT)
|
||||
self.logger = logging.getLogger("Core_WS")
|
||||
if self.debug:
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
self.ws_server = websockets.serve(self.handler, self.host, self.port)
|
||||
|
||||
def get_nodes(self):
|
||||
ret = {}
|
||||
for n in self.nodes:
|
||||
ret[n] = self.nodes[n].get_info()
|
||||
return ret
|
||||
|
||||
def get_node(self, nodeid):
|
||||
ret = None
|
||||
if nodeid in self.nodes:
|
||||
ret = self.nodes[nodeid].get_info()
|
||||
return ret
|
||||
|
||||
async def node_get_status(self, nodeid):
|
||||
await self.node_send_command("get_status", nodeid, {})
|
||||
|
||||
async def node_register(self, nodeid, nodetype, name, maxlevel, websocket):
|
||||
self.nodes[nodeid] = Node_WS(self.debug, nodeid, nodetype, name, maxlevel, websocket)
|
||||
self.logger.info(f"node registered: {nodeid} :: {name}")
|
||||
|
||||
async def node_unregister(self, nodeid):
|
||||
if nodeid and nodeid in self.nodes:
|
||||
self.nodes.pop(nodeid)
|
||||
self.logger.info(f"node unregistered: {nodeid.split('-')[0]}")
|
||||
|
||||
async def node_send_command(self, command, nodeid, args):
|
||||
if nodeid in self.nodes:
|
||||
m = {"command": command, "args": args}
|
||||
message = json.dumps(m)
|
||||
self.logger.debug(f"> {nodeid} {command}")
|
||||
if self.nodes[nodeid].ws.open:
|
||||
await asyncio.wait([self.nodes[nodeid].ws.send(message)])
|
||||
else:
|
||||
self.logger.warning(f"send_command({nodeid}): ws not open")
|
||||
self.node_unregister(nodeid)
|
||||
|
||||
async def handler(self, websocket, path):
|
||||
nodeid = None
|
||||
try:
|
||||
async for message in websocket:
|
||||
m = json.loads(message)
|
||||
# self.logger.debug(f"< {m}")
|
||||
|
||||
nodeid = m.get("nodeid")
|
||||
cmd = m.get("command")
|
||||
if nodeid and cmd:
|
||||
self.logger.debug(f"< {nodeid} {cmd}")
|
||||
if cmd == "register":
|
||||
if not nodeid in self.nodes:
|
||||
nodetype = m.get("nodetype")
|
||||
name = m.get("name")
|
||||
maxlevel = m.get("maxlevel")
|
||||
await self.node_register(nodeid, nodetype, name, maxlevel,
|
||||
websocket)
|
||||
ret = {"command": "registered"}
|
||||
await websocket.send(json.dumps(ret))
|
||||
elif cmd == "status":
|
||||
status = m.get("status")
|
||||
if status:
|
||||
self.nodes[nodeid].status = status
|
||||
self.nodes[nodeid].status["ts"] = time.time()
|
||||
else:
|
||||
self.logger.warning(f"unknown command: {m}")
|
||||
|
||||
except websockets.ConnectionClosed:
|
||||
pass
|
||||
|
||||
finally:
|
||||
await self.node_unregister(nodeid)
|
|
@ -1,324 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
from flask import Flask, request, json, jsonify, abort, make_response
|
||||
import logging
|
||||
import argparse
|
||||
import time
|
||||
import dataset
|
||||
import os
|
||||
import sys
|
||||
from math import inf
|
||||
from xdg import XDG_CONFIG_HOME
|
||||
|
||||
CONFIG_FILE = f"{XDG_CONFIG_HOME}/homecontrol.json"
|
||||
DB_FILE = f"{XDG_CONFIG_HOME}/homecontrol.sqlite"
|
||||
LIMIT=100
|
||||
PORT = 5000
|
||||
# TIMEOUT_CLIENT = 10
|
||||
core = None
|
||||
|
||||
# sensors
|
||||
NUM_VALUES = 1000
|
||||
|
||||
def setup():
|
||||
# arguments
|
||||
parser = argparse.ArgumentParser(description='homecontrol')
|
||||
parser.add_argument('-p', '--port', dest='port', type=int,
|
||||
help='listening port')
|
||||
parser.add_argument('-c', '--config', dest='config', type=str,
|
||||
help='config file', default=CONFIG_FILE)
|
||||
parser.add_argument('-f', '--dbfile', dest='dbfile', type=str,
|
||||
help='database file', default=DB_FILE)
|
||||
parser.add_argument('-d', '--debug', dest='debug', action='store_true',
|
||||
help='debug mode')
|
||||
parser.add_argument('-D', '--debugflask', dest='debugflask', action='store_true',
|
||||
help='flask debug mode')
|
||||
|
||||
# parse arguments
|
||||
args = parser.parse_args()
|
||||
|
||||
# initialize config
|
||||
if not os.path.exists(XDG_CONFIG_HOME):
|
||||
os.makedirs(XDG_CONFIG_HOME)
|
||||
config = {}
|
||||
try:
|
||||
config_file = open(args.config, "r")
|
||||
config = json.load(config_file)
|
||||
except:
|
||||
pass
|
||||
|
||||
# fill new keys with defaults
|
||||
if not config.get('port'):
|
||||
config['port'] = PORT
|
||||
|
||||
if not config.get('dbfile'):
|
||||
config['dbfile'] = DB_FILE
|
||||
|
||||
# overwrite with arguments
|
||||
if args.port:
|
||||
config['port'] = args.port
|
||||
if args.dbfile:
|
||||
config['dbfile'] = args.dbfile
|
||||
|
||||
# save to file
|
||||
with open(args.config, 'w') as config_file:
|
||||
json.dump(config, config_file)
|
||||
|
||||
return args, config
|
||||
|
||||
class Logger:
|
||||
def __init__(self, name, debug):
|
||||
FORMAT="[%(asctime)13s :: %(name)18s :: %(levelname)7s] %(message)s"
|
||||
DATEFMT="%Y%m%d %H:%M:%S"
|
||||
logging.basicConfig(level=logging.INFO, format=FORMAT, datefmt=DATEFMT)
|
||||
self.logger = logging.getLogger(name)
|
||||
if debug:
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
|
||||
def getLogger(self):
|
||||
return self.logger
|
||||
|
||||
|
||||
class Core:
|
||||
def __init__(self, dbfile, debug):
|
||||
self.logger = Logger("Core", debug).getLogger()
|
||||
self.logger.info("initialization starting...")
|
||||
self.actor_queue = {}
|
||||
self.db = dataset.connect(f"sqlite:///{dbfile}?check_same_thread=False")
|
||||
self.logger.info("initialization complete.")
|
||||
|
||||
|
||||
def actor_add_level(self, actorId, actorType, maxLevel, level):
|
||||
ret = {}
|
||||
try:
|
||||
if not self.db["actors"].find_one(actorId=actorId):
|
||||
self.db['actors'].insert({"actorId": actorId, "actorType": actorType,
|
||||
"maxLevel": maxLevel})
|
||||
self.db['actor_levels'].insert({"actorId": actorId, "ts": time.time(), "level":
|
||||
level})
|
||||
except Exception as ex:
|
||||
self.logger.error("Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
|
||||
return ret
|
||||
|
||||
def actor_add_queue(self, actorId, command, data):
|
||||
try:
|
||||
if self.db["actors"].find_one(actorId=actorId):
|
||||
self.actor_queue[actorId] = {"command": command, "data": data}
|
||||
except Exception as ex:
|
||||
self.logger.error("Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
|
||||
|
||||
def actor_get_actors(self):
|
||||
ret = {}
|
||||
try:
|
||||
for s in self.db["actors"]:
|
||||
actorId = s["actorId"]
|
||||
ret[actorId] = self.actor_get_info(actorId)
|
||||
except Exception as ex:
|
||||
self.logger.error("Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
return ret
|
||||
|
||||
|
||||
def actor_get_info(self, actorId):
|
||||
ret = {}
|
||||
try:
|
||||
q = self.db["actors"].find_one(actorId=actorId)
|
||||
ret["actorId"] = actorId
|
||||
ret["maxLevel"] = f"0x{q['maxLevel']}"
|
||||
ret["actorType"] = q["actorType"]
|
||||
q = self.db["actor_levels"].find_one(actorId=actorId, order_by="-ts")
|
||||
ret["ts"] = q["ts"]
|
||||
ret["level"] = f"0x{q['level']}"
|
||||
except Exception as ex:
|
||||
self.logger.error("Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
return ret
|
||||
|
||||
|
||||
def actor_get_levels(self, actorId, limit=None):
|
||||
ret = {}
|
||||
try:
|
||||
if self.db["actors"].find_one(actorId=actorId):
|
||||
ret = self.actor_get_info(actorId)
|
||||
if limit:
|
||||
query = self.db["actor_levels"].find(actorId=actorId, _limit=limit)
|
||||
else:
|
||||
query = self.db["actor_levels"].find(actorId=actorId)
|
||||
levels = []
|
||||
for q in query:
|
||||
levels.append({"ts": q["ts"], "value": f"0x{q['level']}"})
|
||||
ret["levels"] = levels
|
||||
except Exception as ex:
|
||||
self.logger.error("Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def actor_get_queue(self, actorId):
|
||||
ret = {}
|
||||
try:
|
||||
if actorId in self.actor_queue:
|
||||
ret = self.actor_queue.pop(actorId)
|
||||
except Exception as ex:
|
||||
self.logger.error("Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
return ret
|
||||
|
||||
def sensor_get_info(self, sensorId):
|
||||
ret = {}
|
||||
try:
|
||||
q = self.db["sensors"].find_one(sensorId=sensorId)
|
||||
ret["sensorId"] = sensorId
|
||||
ret["sensorType"] = q["sensorType"]
|
||||
q = self.db["sensor_values"].find_one(sensorId=sensorId, order_by="-ts")
|
||||
ret["ts"] = q["ts"]
|
||||
except Exception as ex:
|
||||
self.logger.error("Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
return ret
|
||||
|
||||
|
||||
def sensor_get_values(self, sensorId, min_ts, max_ts, limit=None):
|
||||
ret = {}
|
||||
try:
|
||||
if self.db["sensors"].find_one(sensorId=sensorId):
|
||||
ret = self.sensor_get_info(sensorId)
|
||||
if limit:
|
||||
query = self.db["sensor_values"].find(sensorId=sensorId,
|
||||
ts={"between": [min_ts, max_ts]}, _limit=limit)
|
||||
else:
|
||||
query = self.db["sensor_values"].find(sensorId=sensorId,
|
||||
ts={"between": [min_ts, max_ts]})
|
||||
values = []
|
||||
for q in query:
|
||||
values.append({"ts": float(q["ts"]), "value": float(q["value"])})
|
||||
ret["values"] = values
|
||||
except Exception as ex:
|
||||
self.logger.error("Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def sensor_get_sensors(self):
|
||||
ret = {}
|
||||
try:
|
||||
for s in self.db["sensors"]:
|
||||
sensorId = s["sensorId"]
|
||||
ret[sensorId] = self.sensor_get_info(sensorId)
|
||||
except Exception as ex:
|
||||
self.logger.error("Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
return ret
|
||||
|
||||
|
||||
def sensor_add_value(self, sensorId, sensorType, value):
|
||||
ret = {}
|
||||
try:
|
||||
if not self.db["sensors"].find_one(sensorId=sensorId):
|
||||
self.db['sensors'].insert({"sensorId": sensorId, "sensorType": sensorType})
|
||||
self.db['sensor_values'].insert({"sensorId": sensorId, "ts": time.time(), "value":
|
||||
value})
|
||||
except Exception as ex:
|
||||
self.logger.error("Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
|
||||
@app.route("/actor/command", methods=['POST'])
|
||||
def actor_command():
|
||||
ret = {}
|
||||
try:
|
||||
content = request.json
|
||||
actorId = content.get("id")
|
||||
command = content.get("command")
|
||||
data = content.get("data")
|
||||
core.actor_add_queue(actorId, command, data)
|
||||
except Exception as ex:
|
||||
logger.error("Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
abort(400)
|
||||
|
||||
return make_response(jsonify(ret), 200)
|
||||
|
||||
|
||||
@app.route("/actor/get", methods=['GET'])
|
||||
def actor_get():
|
||||
ret = core.actor_get_actors()
|
||||
return make_response(jsonify(ret), 200)
|
||||
|
||||
|
||||
@app.route("/actor/get_levels/<actorId>", methods=['GET'])
|
||||
def actor_get_level(actorId):
|
||||
ret = {}
|
||||
if request.args.get("limit"):
|
||||
limit=int(request.args.get("limit"))
|
||||
else:
|
||||
limit=None
|
||||
logger.debug(f"actor/get_levels/{actorId}, limit: {limit}")
|
||||
ret[actorId] = core.actor_get_levels(actorId, limit=limit)
|
||||
return make_response(jsonify(ret), 200)
|
||||
|
||||
|
||||
@app.route("/actor/update", methods=['POST'])
|
||||
def actor_update():
|
||||
ret = {}
|
||||
try:
|
||||
content = request.json
|
||||
actorId = content.get("id")
|
||||
actorType = content.get("type")
|
||||
level = content.get("level")
|
||||
maxLevel = content.get("maxLevel")
|
||||
core.actor_add_level(actorId, actorType, maxLevel, level)
|
||||
ret = core.actor_get_queue(actorId)
|
||||
except Exception as ex:
|
||||
logger.error("Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
abort(400)
|
||||
|
||||
return make_response(jsonify(ret), 200)
|
||||
|
||||
|
||||
@app.route("/sensor/get", methods=['GET'])
|
||||
def sensor_get_sensors():
|
||||
ret = core.sensor_get_sensors()
|
||||
return make_response(jsonify(ret), 200)
|
||||
|
||||
|
||||
@app.route("/sensor/get_values/<sensorId>", methods=['GET'])
|
||||
def sensor_get_values(sensorId):
|
||||
ret = {}
|
||||
min_ts = 0
|
||||
if request.args.get("min_ts"):
|
||||
min_ts=int(request.args.get("min_ts"))
|
||||
max_ts = inf
|
||||
limit=None
|
||||
if request.args.get("limit"):
|
||||
limit=int(request.args.get("limit"))
|
||||
if request.args.get("max_ts"):
|
||||
max_ts=int(request.args.get("max_ts"))
|
||||
logger.debug(f"sensor/get_values/{sensorId}, [{min_ts}, {max_ts}], limit: {limit}")
|
||||
ret[sensorId] = core.sensor_get_values(sensorId, min_ts=min_ts, max_ts=max_ts, limit=limit)
|
||||
return make_response(jsonify(ret), 200)
|
||||
|
||||
|
||||
@app.route("/sensor/update", methods=['POST'])
|
||||
def sensor_add_value():
|
||||
ret = {}
|
||||
try:
|
||||
content = request.json
|
||||
sensorId = content.get("id")
|
||||
sensorType = content.get("type")
|
||||
value = content.get("value")
|
||||
ret = core.sensor_add_value(sensorId, sensorType, value)
|
||||
except Exception as ex:
|
||||
logger.error("Exception Type:{type(ex).__name__}, args: {ex.args}")
|
||||
abort(400)
|
||||
return make_response(jsonify(ret), 200)
|
||||
|
||||
|
||||
def main():
|
||||
args, config = setup()
|
||||
core = Core(config['dbfile'], args.debug)
|
||||
logger = Logger("main", args.debug).getLogger()
|
||||
|
||||
app.run(host='0.0.0.0', port=config['port'], debug=args.debugflask)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
34
poetry.lock
generated
34
poetry.lock
generated
|
@ -307,6 +307,14 @@ optional = false
|
|||
python-versions = "*"
|
||||
version = "0.1.8"
|
||||
|
||||
[[package]]
|
||||
category = "main"
|
||||
description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
|
||||
name = "websockets"
|
||||
optional = false
|
||||
python-versions = ">=3.6.1"
|
||||
version = "8.1"
|
||||
|
||||
[[package]]
|
||||
category = "main"
|
||||
description = "The comprehensive WSGI web application library."
|
||||
|
@ -328,7 +336,7 @@ python-versions = ">=3.6,<4.0"
|
|||
version = "4.0.1"
|
||||
|
||||
[metadata]
|
||||
content-hash = "16ab7be1164526511af6ed79ca5a18d81640a45f6b724a8c630738700b25eaef"
|
||||
content-hash = "7aff4b3329061d479efc285986d802cecef5bc4a051feffeef7dd90a74e98df9"
|
||||
python-versions = "^3.8"
|
||||
|
||||
[metadata.files]
|
||||
|
@ -471,6 +479,30 @@ wcwidth = [
|
|||
{file = "wcwidth-0.1.8-py2.py3-none-any.whl", hash = "sha256:8fd29383f539be45b20bd4df0dc29c20ba48654a41e661925e612311e9f3c603"},
|
||||
{file = "wcwidth-0.1.8.tar.gz", hash = "sha256:f28b3e8a6483e5d49e7f8949ac1a78314e740333ae305b4ba5defd3e74fb37a8"},
|
||||
]
|
||||
websockets = [
|
||||
{file = "websockets-8.1-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:3762791ab8b38948f0c4d281c8b2ddfa99b7e510e46bd8dfa942a5fff621068c"},
|
||||
{file = "websockets-8.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:3db87421956f1b0779a7564915875ba774295cc86e81bc671631379371af1170"},
|
||||
{file = "websockets-8.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4f9f7d28ce1d8f1295717c2c25b732c2bc0645db3215cf757551c392177d7cb8"},
|
||||
{file = "websockets-8.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:295359a2cc78736737dd88c343cd0747546b2174b5e1adc223824bcaf3e164cb"},
|
||||
{file = "websockets-8.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:1d3f1bf059d04a4e0eb4985a887d49195e15ebabc42364f4eb564b1d065793f5"},
|
||||
{file = "websockets-8.1-cp36-cp36m-win32.whl", hash = "sha256:2db62a9142e88535038a6bcfea70ef9447696ea77891aebb730a333a51ed559a"},
|
||||
{file = "websockets-8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:0e4fb4de42701340bd2353bb2eee45314651caa6ccee80dbd5f5d5978888fed5"},
|
||||
{file = "websockets-8.1-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:9b248ba3dd8a03b1a10b19efe7d4f7fa41d158fdaa95e2cf65af5a7b95a4f989"},
|
||||
{file = "websockets-8.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ce85b06a10fc65e6143518b96d3dca27b081a740bae261c2fb20375801a9d56d"},
|
||||
{file = "websockets-8.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:965889d9f0e2a75edd81a07592d0ced54daa5b0785f57dc429c378edbcffe779"},
|
||||
{file = "websockets-8.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:751a556205d8245ff94aeef23546a1113b1dd4f6e4d102ded66c39b99c2ce6c8"},
|
||||
{file = "websockets-8.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:3ef56fcc7b1ff90de46ccd5a687bbd13a3180132268c4254fc0fa44ecf4fc422"},
|
||||
{file = "websockets-8.1-cp37-cp37m-win32.whl", hash = "sha256:7ff46d441db78241f4c6c27b3868c9ae71473fe03341340d2dfdbe8d79310acc"},
|
||||
{file = "websockets-8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:20891f0dddade307ffddf593c733a3fdb6b83e6f9eef85908113e628fa5a8308"},
|
||||
{file = "websockets-8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c1ec8db4fac31850286b7cd3b9c0e1b944204668b8eb721674916d4e28744092"},
|
||||
{file = "websockets-8.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:5c01fd846263a75bc8a2b9542606927cfad57e7282965d96b93c387622487485"},
|
||||
{file = "websockets-8.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:9bef37ee224e104a413f0780e29adb3e514a5b698aabe0d969a6ba426b8435d1"},
|
||||
{file = "websockets-8.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d705f8aeecdf3262379644e4b55107a3b55860eb812b673b28d0fbc347a60c55"},
|
||||
{file = "websockets-8.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:c8a116feafdb1f84607cb3b14aa1418424ae71fee131642fc568d21423b51824"},
|
||||
{file = "websockets-8.1-cp38-cp38-win32.whl", hash = "sha256:e898a0863421650f0bebac8ba40840fc02258ef4714cb7e1fd76b6a6354bda36"},
|
||||
{file = "websockets-8.1-cp38-cp38-win_amd64.whl", hash = "sha256:f8a7bff6e8664afc4e6c28b983845c5bc14965030e3fb98789734d416af77c4b"},
|
||||
{file = "websockets-8.1.tar.gz", hash = "sha256:5c65d2da8c6bce0fca2528f69f44b2f977e06954c8512a952222cea50dad430f"},
|
||||
]
|
||||
werkzeug = [
|
||||
{file = "Werkzeug-1.0.0-py2.py3-none-any.whl", hash = "sha256:6dc65cf9091cf750012f56f2cad759fa9e879f511b5ff8685e456b4e3bf90d16"},
|
||||
{file = "Werkzeug-1.0.0.tar.gz", hash = "sha256:169ba8a33788476292d04186ab33b01d6add475033dfc07215e6d219cc077096"},
|
||||
|
|
|
@ -11,12 +11,13 @@ requests = "^2.23.0"
|
|||
flask = "^1.1.1"
|
||||
dataset = "^1.2.2"
|
||||
xdg = "^4.0.1"
|
||||
websockets = "^8.1"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
pytest = "^5.2"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
homecontrol = "homecontrol.homecontrol:main"
|
||||
homecontrol = "homecontrol.core:main"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry>=0.12"]
|
||||
|
|
Loading…
Reference in a new issue