bettwaage-sidequest #2
10 changed files with 460 additions and 148 deletions
|
@ -1,27 +0,0 @@
|
||||||
import requests as r
|
|
||||||
from time import sleep
|
|
||||||
|
|
||||||
bett_ip = "http://192.168.178.110:80"
|
|
||||||
mash_ip = "http://192.168.178.84:9587"
|
|
||||||
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
tl = r.get(f"{bett_ip}/sensor/tl/").json()["value"]
|
|
||||||
tr = r.get(f"{bett_ip}/sensor/tr/").json()["value"]
|
|
||||||
br = r.get(f"{bett_ip}/sensor/br/").json()["value"]
|
|
||||||
|
|
||||||
print(f"tl = {tl}")
|
|
||||||
print(f"tr = {tr}")
|
|
||||||
# print(f"tl = {tl}")
|
|
||||||
print(f"br = {br}")
|
|
||||||
print("==========")
|
|
||||||
print(f"total = {tl + tr + br * 2}")
|
|
||||||
print("==========")
|
|
||||||
|
|
||||||
s = r.post(f"{mash_ip}/bettwaage/add?tl={int(tl * 1000)}&tr={int(tr * 1000)}&bl={int(br * 1000)}&br={int(br * 1000)}")
|
|
||||||
|
|
||||||
sleep(1)
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
exit()
|
|
||||||
except:
|
|
||||||
pass
|
|
118
bettwaage-plotter/data_analysis.ipynb
Normal file
118
bettwaage-plotter/data_analysis.ipynb
Normal file
File diff suppressed because one or more lines are too long
|
@ -2,72 +2,78 @@ import requests
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import json
|
import json
|
||||||
|
import os
|
||||||
|
|
||||||
|
latest_history_path = "latest_history.json"
|
||||||
|
|
||||||
file_path = "history.json"
|
file_path = latest_history_path
|
||||||
history_url = "http://192.168.178.84:9587/bettwaage/history"
|
history_url = "http://192.168.178.84:9587/bettwaage/history"
|
||||||
|
|
||||||
|
focus_on_latest_bed_data = False
|
||||||
|
|
||||||
convert_time_to_seconds = True
|
convert_time_to_seconds = True
|
||||||
|
|
||||||
# Script
|
# Get data
|
||||||
data = None
|
data = None
|
||||||
|
if file_path is None or not os.path.exists(file_path):
|
||||||
if file_path is None:
|
print("Fetching data ...")
|
||||||
data = requests.get(history_url)
|
data = requests.get(history_url)
|
||||||
data = data.json()
|
data = data.json()
|
||||||
|
|
||||||
|
print("Saving latest data ...")
|
||||||
|
with open(latest_history_path, "w", encoding="UTF-8") as fp:
|
||||||
|
json.dump(data, fp)
|
||||||
else:
|
else:
|
||||||
|
print("Reading data ...")
|
||||||
with open(file_path, "r") as fp:
|
with open(file_path, "r") as fp:
|
||||||
data = json.load(fp)
|
data = json.load(fp)
|
||||||
|
|
||||||
|
|
||||||
# Experiment: Solving for missing foot with known total weight
|
print("Processing data ...")
|
||||||
bed_weight = 78290
|
|
||||||
person_weight = 63000
|
# Get rough value for empty bed weight per leg
|
||||||
known_total_weight = bed_weight + person_weight
|
rough_bed_weight = 80
|
||||||
bed_only_weight = {}
|
bed_only_weight = {}
|
||||||
for d in data:
|
for d in data:
|
||||||
if d["total"] == bed_weight:
|
if d["total"] < rough_bed_weight:
|
||||||
bed_only_weight = {
|
bed_only_weight = {
|
||||||
"tl": d["tl"],
|
"tl": d["tl"],
|
||||||
"tr": d["tr"],
|
"tr": d["tr"],
|
||||||
"bl": bed_weight - (d["tl"] + d["tr"] + d["br"]),
|
"bl": d["bl"],
|
||||||
"br": d["br"],
|
"br": d["br"],
|
||||||
}
|
}
|
||||||
|
total_bed_only_weight = sum(bed_only_weight.values())
|
||||||
break
|
break
|
||||||
|
|
||||||
in_bed_data = None
|
if focus_on_latest_bed_data:
|
||||||
threshhold = 100000
|
# Collect all coherent sequences of someone being in bed
|
||||||
min_length = 100
|
in_bed_datas: list[list[dict]] = []
|
||||||
|
is_in_bed_sequence = False
|
||||||
|
threshhold = 100.0
|
||||||
for d in data:
|
for d in data:
|
||||||
t = d["total"]
|
t = d["total"]
|
||||||
if t >= threshhold:
|
if t >= threshhold:
|
||||||
if in_bed_data is None:
|
if not is_in_bed_sequence:
|
||||||
in_bed_data = []
|
in_bed_datas.append([])
|
||||||
in_bed_data.append(d)
|
is_in_bed_sequence = True
|
||||||
elif in_bed_data is not None:
|
in_bed_datas[-1].append(d)
|
||||||
if len(in_bed_data) < min_length:
|
elif is_in_bed_sequence:
|
||||||
in_bed_data = []
|
is_in_bed_sequence = False
|
||||||
else:
|
|
||||||
break
|
# Pick latest with minimum length/duration
|
||||||
|
min_length = 100
|
||||||
|
for sequence in in_bed_datas:
|
||||||
|
if len(sequence) >= min_length:
|
||||||
|
data = sequence
|
||||||
|
|
||||||
|
|
||||||
# Calculate bottom left
|
# Prepare data for plotting
|
||||||
for d in data:
|
|
||||||
d["bl"] = known_total_weight - (d["br"] + d["tr"] + d["tl"])
|
|
||||||
# Set known total weight
|
|
||||||
d["total"] = known_total_weight
|
|
||||||
|
|
||||||
|
|
||||||
data = in_bed_data
|
|
||||||
|
|
||||||
|
|
||||||
# Array data
|
|
||||||
x = [d["timestamp"] for d in data]
|
x = [d["timestamp"] for d in data]
|
||||||
x = [datetime.strptime(d, "%Y-%m-%d %H:%M:%S.%f") for d in x]
|
# x = [datetime.strptime(d, "%Y-%m-%d %H:%M:%S.%f") for d in x]
|
||||||
|
|
||||||
if convert_time_to_seconds:
|
# if convert_time_to_seconds:
|
||||||
max_time = max(x)
|
# max_time = max(x)
|
||||||
x = [(d - max_time).total_seconds() for d in x]
|
# x = [(d - max_time).total_seconds() for d in x]
|
||||||
|
|
||||||
total = [d["total"] for d in data]
|
total = [d["total"] for d in data]
|
||||||
tl = [d["tl"] for d in data]
|
tl = [d["tl"] for d in data]
|
||||||
|
@ -80,20 +86,35 @@ left = [t + b for t, b in zip(tl, bl)]
|
||||||
right = [t + b for t, b in zip(tr, br)]
|
right = [t + b for t, b in zip(tr, br)]
|
||||||
|
|
||||||
|
|
||||||
|
fig, ax = plt.subplots()
|
||||||
|
|
||||||
|
person_weight = [t - total_bed_only_weight for t in total]
|
||||||
|
|
||||||
|
ax.set_xlabel("Time (s)")
|
||||||
|
ax.set_ylabel("Weight (kg)")
|
||||||
|
|
||||||
|
ax.plot(x, person_weight, color="tab:blue")
|
||||||
|
|
||||||
|
plt.show()
|
||||||
|
exit()
|
||||||
|
|
||||||
# Experiment: Calculate position over time
|
# Experiment: Calculate position over time
|
||||||
bed_size = (140, 200)
|
bed_size = (140, 200)
|
||||||
left_bed_only = bed_only_weight["tl"] + bed_only_weight["bl"]
|
left_bed_only = bed_only_weight["tl"] + bed_only_weight["bl"]
|
||||||
top_bed_only = bed_only_weight["tr"] + bed_only_weight["tl"]
|
top_bed_only = bed_only_weight["tr"] + bed_only_weight["tl"]
|
||||||
|
right_bed_only = bed_only_weight["tr"] + bed_only_weight["br"]
|
||||||
|
bottom_bed_only = bed_only_weight["br"] + bed_only_weight["bl"]
|
||||||
position_over_time = []
|
position_over_time = []
|
||||||
for t, l in zip(top, left):
|
for t, b, l, r in zip(top, bottom, left, right):
|
||||||
|
horizontal_weight = l - left_bed_only + r - right_bed_only
|
||||||
|
vertical_weight = t - top_bed_only + b - bottom_bed_only
|
||||||
position_over_time.append(
|
position_over_time.append(
|
||||||
(
|
(
|
||||||
bed_size[0] * (l - left_bed_only) / person_weight,
|
bed_size[0] * (l - left_bed_only) / horizontal_weight,
|
||||||
bed_size[1] * (t - top_bed_only) / person_weight,
|
bed_size[1] * (t - top_bed_only) / vertical_weight,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# Plot data
|
# Plot data
|
||||||
fig, (ax0, ax1) = plt.subplots(nrows=2)
|
fig, (ax0, ax1) = plt.subplots(nrows=2)
|
||||||
|
|
||||||
|
@ -112,7 +133,6 @@ ax0.legend(
|
||||||
["Total", "Top Left", "Top Right", "Bottom Left", "Bottom Right", "Top", "Bottom"]
|
["Total", "Top Left", "Top Right", "Bottom Left", "Bottom Right", "Top", "Bottom"]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# Experiment: Plot position
|
# Experiment: Plot position
|
||||||
import math
|
import math
|
||||||
import colorsys
|
import colorsys
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
matplotlib
|
matplotlib
|
||||||
requests
|
requests
|
||||||
numpy
|
numpy
|
||||||
|
PyQt5
|
||||||
|
|
|
@ -1,7 +1,11 @@
|
||||||
# For Philips Hue Counter
|
# For Philips Hue
|
||||||
phue
|
phue
|
||||||
|
|
||||||
|
# For Fritz.Box API
|
||||||
|
fritzconnection
|
||||||
|
|
||||||
# API
|
# API
|
||||||
|
requests
|
||||||
fastapi
|
fastapi
|
||||||
uvicorn[standard]
|
uvicorn[standard]
|
||||||
|
|
||||||
|
|
|
@ -1,28 +1,14 @@
|
||||||
|
import asyncio
|
||||||
from fastapi.responses import HTMLResponse, JSONResponse
|
from fastapi.responses import HTMLResponse, JSONResponse
|
||||||
from fastapi import APIRouter
|
from fastapi import APIRouter
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
import os
|
import os
|
||||||
import csv
|
import csv
|
||||||
|
from .handlers.bett import file_path, local_history, log_bed_weights
|
||||||
|
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
asyncio.create_task(log_bed_weights())
|
||||||
file_path = "bettwaage.csv"
|
|
||||||
header = "timestamp;tl;tr;bl;br;total;"
|
|
||||||
|
|
||||||
latest_values = []
|
|
||||||
zero_values = [0, 0, 0, 0]
|
|
||||||
scale_values = [1, 1, 1, 1]
|
|
||||||
|
|
||||||
|
|
||||||
def add_line_to_history(line: str) -> None:
|
|
||||||
with open(file_path, "a") as fp:
|
|
||||||
fp.write(line + "\n")
|
|
||||||
|
|
||||||
|
|
||||||
def convert_to_weight(value: int, zero_value: int, scale: float) -> float:
|
|
||||||
return (value - zero_value) * scale
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/file", tags=["file"])
|
@router.get("/file", tags=["file"])
|
||||||
|
@ -32,7 +18,8 @@ async def get_file():
|
||||||
|
|
||||||
|
|
||||||
@router.get("/history")
|
@router.get("/history")
|
||||||
async def get_history(count: int = None) -> []:
|
async def get_history(count: int = None) -> list[dict]:
|
||||||
|
|
||||||
points = []
|
points = []
|
||||||
with open(file_path, "r", encoding="UTF-8") as fp:
|
with open(file_path, "r", encoding="UTF-8") as fp:
|
||||||
reader = csv.DictReader(fp, delimiter=";")
|
reader = csv.DictReader(fp, delimiter=";")
|
||||||
|
@ -57,61 +44,15 @@ async def get_history(count: int = None) -> []:
|
||||||
return points
|
return points
|
||||||
|
|
||||||
|
|
||||||
@router.post("/add")
|
|
||||||
async def add_weight(tl: int, tr: int, bl: int, br: int):
|
|
||||||
global latest_values
|
|
||||||
latest_values = [tl, tr, bl, br]
|
|
||||||
|
|
||||||
tl = convert_to_weight(tl, zero_values[0], scale_values[0])
|
|
||||||
tr = convert_to_weight(tr, zero_values[1], scale_values[1])
|
|
||||||
bl = convert_to_weight(bl, zero_values[2], scale_values[2])
|
|
||||||
br = convert_to_weight(br, zero_values[3], scale_values[3])
|
|
||||||
|
|
||||||
sum = tl + tr + bl + br
|
|
||||||
add_line_to_history(f"{str(datetime.now())};{tl};{tr};{bl};{br};{sum};")
|
|
||||||
return "Added data"
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/latest")
|
@router.get("/latest")
|
||||||
async def get_latest():
|
async def get_latest():
|
||||||
if not latest_values:
|
if len(local_history) == 0:
|
||||||
return HTMLResponse(status_code=200, content="No data given yet")
|
return HTMLResponse(status_code=200, content="No data given yet")
|
||||||
total = sum(latest_values)
|
return JSONResponse(local_history[-1])
|
||||||
return JSONResponse(
|
|
||||||
{
|
|
||||||
"tl": latest_values[0],
|
|
||||||
"tr": latest_values[1],
|
|
||||||
"bl": latest_values[2],
|
|
||||||
"br": latest_values[3],
|
|
||||||
"total": total,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/delete", tags=["file"])
|
@router.delete("/delete", tags=["file"])
|
||||||
async def delete_file():
|
async def delete_file():
|
||||||
os.remove(file_path)
|
os.remove(file_path)
|
||||||
add_line_to_history(header)
|
return "Deleted file"
|
||||||
return "Deleted file and created new file with headers"
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/zero", tags=["calibration"])
|
|
||||||
async def set_zero():
|
|
||||||
if not latest_values:
|
|
||||||
return HTMLResponse(
|
|
||||||
status_code=400, content="Requiring data before setting zeros."
|
|
||||||
)
|
|
||||||
global zero_values
|
|
||||||
zero_values = latest_values
|
|
||||||
return "Set zeroes to: " + " | ".join(str(v) for v in zero_values)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/scales", tags=["calibration"])
|
|
||||||
async def set_scales(tl: float, tr: float, bl: float, br: float):
|
|
||||||
global scale_values
|
|
||||||
scale_values = [tl, tr, bl, br]
|
|
||||||
return "Set scales to: " + " | ".join(str(v) for v in scale_values)
|
|
||||||
|
|
||||||
|
|
||||||
if not os.path.exists(file_path):
|
|
||||||
add_line_to_history(header)
|
|
||||||
|
|
171
src/endpoints/handlers/bett.py
Normal file
171
src/endpoints/handlers/bett.py
Normal file
|
@ -0,0 +1,171 @@
|
||||||
|
import asyncio
|
||||||
|
from datetime import datetime
|
||||||
|
import math
|
||||||
|
import os
|
||||||
|
from statistics import median
|
||||||
|
from typing import Optional
|
||||||
|
import requests as r
|
||||||
|
from ..hue import hue
|
||||||
|
import logging
|
||||||
|
|
||||||
|
file_path: str = "bettwaage.csv"
|
||||||
|
header: str = "timestamp;tl;tr;bl;br;total;\n"
|
||||||
|
|
||||||
|
bett_ip: str = "http://192.168.178.110:80"
|
||||||
|
matrix_clock_api: str = "http://192.168.178.84:8000"
|
||||||
|
|
||||||
|
empty_weight: Optional[float] = None
|
||||||
|
local_history = []
|
||||||
|
history_max_length: int = 24 * 60 * 60 # 24 hours
|
||||||
|
min_noticable_difference: float = 25 # In kg
|
||||||
|
initial_scale_coutndown: int = (
|
||||||
|
0 # Number of updates for the scale, until return to clock, should be a multiple of 3
|
||||||
|
)
|
||||||
|
current_scale_countdown: int = 0
|
||||||
|
|
||||||
|
average_person_weight: float = 75
|
||||||
|
|
||||||
|
sexy_mode_detection: bool = False # Turn lights "sexy" if two people are in bed
|
||||||
|
|
||||||
|
is_warning_active: int = -1
|
||||||
|
leg_capacity_limit_patterns = [
|
||||||
|
# {"limit": 80, "pattern": 110, "duration": 250},
|
||||||
|
# {"limit": 90, "pattern": 110, "duration": 100},
|
||||||
|
# {"limit": 100, "pattern": 10, "duration": 50},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def get_clusters(data: list[float], min_delta: float) -> dict:
|
||||||
|
clusters = {}
|
||||||
|
for point in data:
|
||||||
|
for known in clusters.keys():
|
||||||
|
if math.fabs(point - known) < min_delta:
|
||||||
|
clusters[known].append(point)
|
||||||
|
continue
|
||||||
|
clusters[point] = [point]
|
||||||
|
return clusters
|
||||||
|
|
||||||
|
|
||||||
|
def show_time():
|
||||||
|
r.post(f"{matrix_clock_api}/time")
|
||||||
|
|
||||||
|
|
||||||
|
def show_scale(weight: float):
|
||||||
|
r.post(f"{matrix_clock_api}/message", json={"message": f"{weight:3.1f}kg"})
|
||||||
|
|
||||||
|
|
||||||
|
def is_capacity_reached() -> bool:
|
||||||
|
latest = local_history[-1]
|
||||||
|
highest_limit = None
|
||||||
|
for value in [latest["tl"], latest["tr"], latest["br"], latest["bl"]]:
|
||||||
|
for limit in leg_capacity_limit_patterns:
|
||||||
|
if value >= limit["limit"] and (
|
||||||
|
highest_limit is None or limit["limit"] > highest_limit["limit"]
|
||||||
|
):
|
||||||
|
highest_limit = limit
|
||||||
|
|
||||||
|
global is_warning_active
|
||||||
|
if highest_limit is None:
|
||||||
|
if is_warning_active:
|
||||||
|
is_warning_active = 0
|
||||||
|
show_time()
|
||||||
|
return False
|
||||||
|
|
||||||
|
if is_warning_active != highest_limit["limit"]:
|
||||||
|
is_warning_active = highest_limit["limit"]
|
||||||
|
r.post(
|
||||||
|
f"{matrix_clock_api}/pattern?pattern={highest_limit['pattern']}&step_ms={highest_limit['duration']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def check_for_change():
|
||||||
|
# Check for capicity limits
|
||||||
|
if is_capacity_reached():
|
||||||
|
logging.info(f"Capacity reached")
|
||||||
|
return
|
||||||
|
|
||||||
|
if len(local_history) < 2:
|
||||||
|
return
|
||||||
|
|
||||||
|
global current_scale_countdown
|
||||||
|
global empty_weight
|
||||||
|
latest = local_history[-1]
|
||||||
|
if current_scale_countdown > 0:
|
||||||
|
if current_scale_countdown % 3 == 0:
|
||||||
|
show_scale(latest["total"] - empty_weight)
|
||||||
|
current_scale_countdown -= 1
|
||||||
|
|
||||||
|
# Is triggered?
|
||||||
|
delta = latest["total"] - local_history[-2]["total"]
|
||||||
|
if math.fabs(delta) < min_noticable_difference:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Changed weight up or down?
|
||||||
|
weight_increased = delta > 0
|
||||||
|
|
||||||
|
# Make sure there is a bed_weight
|
||||||
|
if empty_weight is None:
|
||||||
|
clusters = get_clusters(
|
||||||
|
[d["total"] for d in local_history], min_noticable_difference
|
||||||
|
)
|
||||||
|
empty_weight = min([median(cluster) for cluster in clusters.values()])
|
||||||
|
logging.info(f"Empty weight: {empty_weight}")
|
||||||
|
|
||||||
|
# Determine number of people
|
||||||
|
number_of_people = round((latest["total"] - empty_weight) / average_person_weight)
|
||||||
|
logging.info(f"Number of people: {number_of_people}")
|
||||||
|
|
||||||
|
# Show scale?
|
||||||
|
if number_of_people == 1 and weight_increased and current_scale_countdown == 0:
|
||||||
|
current_scale_countdown = initial_scale_coutndown
|
||||||
|
else:
|
||||||
|
current_scale_countdown = 0
|
||||||
|
|
||||||
|
# Make room sexy
|
||||||
|
if sexy_mode_detection:
|
||||||
|
if number_of_people >= 2 and weight_increased:
|
||||||
|
hue.in_room_activate_scene("Max Zimmer", "Sexy")
|
||||||
|
elif number_of_people == 1 and not weight_increased:
|
||||||
|
hue.in_room_activate_scene("Max Zimmer", "Tageslicht")
|
||||||
|
|
||||||
|
|
||||||
|
def add_line_to_bed_history(line: str) -> None:
|
||||||
|
exists = os.path.exists(file_path)
|
||||||
|
with open(file_path, "a") as fp:
|
||||||
|
if not exists:
|
||||||
|
fp.write(header)
|
||||||
|
fp.write(line + "\n")
|
||||||
|
|
||||||
|
|
||||||
|
def add_weights_to_log(tl: float, tr: float, bl: float, br: float):
|
||||||
|
total = tl + tr + bl + br
|
||||||
|
timestamp = datetime.now()
|
||||||
|
|
||||||
|
global local_history
|
||||||
|
local_history.append({"tl": tl, "tr": tr, "bl": bl, "br": br, "total": total})
|
||||||
|
if len(local_history) > history_max_length:
|
||||||
|
local_history = local_history[len(local_history) - history_max_length :]
|
||||||
|
|
||||||
|
add_line_to_bed_history(f"{str(timestamp)};{tl};{tr};{bl};{br};{total};")
|
||||||
|
|
||||||
|
|
||||||
|
async def log_bed_weights():
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
tl = r.get(f"{bett_ip}/sensor/tl/").json()["value"]
|
||||||
|
tr = r.get(f"{bett_ip}/sensor/tr/").json()["value"]
|
||||||
|
bl = r.get(f"{bett_ip}/sensor/bl/").json()["value"]
|
||||||
|
br = r.get(f"{bett_ip}/sensor/br/").json()["value"]
|
||||||
|
|
||||||
|
# Sanity check
|
||||||
|
if min([tl, tr, bl, br]) <= 0:
|
||||||
|
continue
|
||||||
|
|
||||||
|
add_weights_to_log(tl, tr, bl, br)
|
||||||
|
check_for_change()
|
||||||
|
except Exception as ex:
|
||||||
|
logging.exception(ex)
|
||||||
|
finally:
|
||||||
|
await asyncio.sleep(1)
|
78
src/endpoints/handlers/fritz.py
Normal file
78
src/endpoints/handlers/fritz.py
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from fritzconnection import FritzConnection
|
||||||
|
from datetime import datetime
|
||||||
|
from ..hue import hue
|
||||||
|
|
||||||
|
|
||||||
|
refresh_every_seconds: int = 60 # Every x seconds devices are polled again
|
||||||
|
trigger_away_after_seconds: int = (
|
||||||
|
3 * 60
|
||||||
|
) # After all away-devices are gone for x seconds
|
||||||
|
away_triggered = False
|
||||||
|
away_devices = ["B2:06:77:EE:A9:0F"] # Max' iPhone
|
||||||
|
macaddresses_to_track = ["B2:06:77:EE:A9:0F"] # Max' iPhone
|
||||||
|
|
||||||
|
fritz_api = FritzConnection(address="192.168.178.1")
|
||||||
|
|
||||||
|
# Referenced documentation: https://avm.de/fileadmin/user_upload/Global/Service/Schnittstellen/hostsSCPD.pdf
|
||||||
|
|
||||||
|
|
||||||
|
devices_last_online = {}
|
||||||
|
|
||||||
|
|
||||||
|
def get_all_devices() -> list:
|
||||||
|
numberOfDevices = fritz_api.call_action("Hosts", "GetHostNumberOfEntries")[
|
||||||
|
"NewHostNumberOfEntries"
|
||||||
|
]
|
||||||
|
devices = []
|
||||||
|
for i in range(numberOfDevices):
|
||||||
|
devices.append(
|
||||||
|
fritz_api.call_action("Hosts", "GetGenericHostEntry", NewIndex=i)
|
||||||
|
)
|
||||||
|
return devices
|
||||||
|
|
||||||
|
|
||||||
|
def get_specific_device(mac_address: str) -> dict:
|
||||||
|
return fritz_api.call_action(
|
||||||
|
"Hosts", "GetSpecificHostEntry", NewMACAddress=mac_address
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def check_for_change():
|
||||||
|
# Check if devices are away for away-mode
|
||||||
|
all_away = True
|
||||||
|
for device in away_devices:
|
||||||
|
last_online = devices_last_online[device]
|
||||||
|
if (datetime.now() - last_online).total_seconds() < trigger_away_after_seconds:
|
||||||
|
all_away = False
|
||||||
|
break
|
||||||
|
|
||||||
|
# Execute away mode
|
||||||
|
global away_triggered
|
||||||
|
if all_away:
|
||||||
|
if not away_triggered:
|
||||||
|
away_triggered = True
|
||||||
|
hue.in_room_deactivate_lights("Max Zimmer")
|
||||||
|
else:
|
||||||
|
away_triggered = False
|
||||||
|
|
||||||
|
async def track_network_devices():
|
||||||
|
global devices_last_online
|
||||||
|
|
||||||
|
# Initial values to avoid None
|
||||||
|
for macaddress in macaddresses_to_track:
|
||||||
|
devices_last_online[macaddress] = datetime(1970, 1, 1, 0, 0, 0)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
for macaddress in macaddresses_to_track:
|
||||||
|
is_online = get_specific_device(macaddress)["NewActive"]
|
||||||
|
if is_online:
|
||||||
|
devices_last_online[macaddress] = datetime.now()
|
||||||
|
|
||||||
|
check_for_change()
|
||||||
|
except Exception as ex:
|
||||||
|
logging.exception(ex)
|
||||||
|
finally:
|
||||||
|
await asyncio.sleep(refresh_every_seconds)
|
17
src/main.py
17
src/main.py
|
@ -1,13 +1,14 @@
|
||||||
from core.mash import MaSH
|
import asyncio
|
||||||
from hue.hue_feature import HueModule
|
from fastapi import FastAPI
|
||||||
from matrix_clock.matrix_clock_feature import MatrixClockModule
|
from endpoints.hue import router as hue_router
|
||||||
|
from endpoints.bettwaage import router as bettwaage_router
|
||||||
|
from endpoints.handlers.fritz import track_network_devices
|
||||||
|
|
||||||
mash: MaSH = MaSH("config.yaml")
|
app = FastAPI()
|
||||||
|
asyncio.create_task(track_network_devices())
|
||||||
mash.add_module(HueModule())
|
|
||||||
mash.add_module(MatrixClockModule())
|
|
||||||
|
|
||||||
|
app.include_router(hue_router, prefix="/hue", tags=["hue"])
|
||||||
app.include_router(bettwaage_router, prefix="/bettwaage", tags=["bett"])
|
app.include_router(bettwaage_router, prefix="/bettwaage", tags=["bett"])
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
mash.run()
|
app.run()
|
||||||
|
|
5
start.sh
Normal file
5
start.sh
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
cd /home/pi/mash-server/src
|
||||||
|
|
||||||
|
source ../.env/bin/activate
|
||||||
|
|
||||||
|
python3 -m uvicorn main:app --reload --host 0.0.0.0 --port 9587
|
Loading…
Reference in a new issue