Merge pull request 'bettwaage-sidequest' (#2) from bettwaage-sidequest into master

Reviewed-on: #2
This commit is contained in:
Maximilian Giller 2024-06-07 23:38:55 +02:00
commit dac9d81777
10 changed files with 460 additions and 148 deletions

View file

@ -1,27 +0,0 @@
import requests as r
from time import sleep
bett_ip = "http://192.168.178.110:80"
mash_ip = "http://192.168.178.84:9587"
while True:
try:
tl = r.get(f"{bett_ip}/sensor/tl/").json()["value"]
tr = r.get(f"{bett_ip}/sensor/tr/").json()["value"]
br = r.get(f"{bett_ip}/sensor/br/").json()["value"]
print(f"tl = {tl}")
print(f"tr = {tr}")
# print(f"tl = {tl}")
print(f"br = {br}")
print("==========")
print(f"total = {tl + tr + br * 2}")
print("==========")
s = r.post(f"{mash_ip}/bettwaage/add?tl={int(tl * 1000)}&tr={int(tr * 1000)}&bl={int(br * 1000)}&br={int(br * 1000)}")
sleep(1)
except KeyboardInterrupt:
exit()
except:
pass

File diff suppressed because one or more lines are too long

View file

@ -2,72 +2,78 @@ import requests
import matplotlib.pyplot as plt
from datetime import datetime
import json
import os
latest_history_path = "latest_history.json"
file_path = "history.json"
file_path = latest_history_path
history_url = "http://192.168.178.84:9587/bettwaage/history"
focus_on_latest_bed_data = False
convert_time_to_seconds = True
# Script
# Get data
data = None
if file_path is None:
if file_path is None or not os.path.exists(file_path):
print("Fetching data ...")
data = requests.get(history_url)
data = data.json()
print("Saving latest data ...")
with open(latest_history_path, "w", encoding="UTF-8") as fp:
json.dump(data, fp)
else:
print("Reading data ...")
with open(file_path, "r") as fp:
data = json.load(fp)
# Experiment: Solving for missing foot with known total weight
bed_weight = 78290
person_weight = 63000
known_total_weight = bed_weight + person_weight
print("Processing data ...")
# Get rough value for empty bed weight per leg
rough_bed_weight = 80
bed_only_weight = {}
for d in data:
if d["total"] == bed_weight:
if d["total"] < rough_bed_weight:
bed_only_weight = {
"tl": d["tl"],
"tr": d["tr"],
"bl": bed_weight - (d["tl"] + d["tr"] + d["br"]),
"bl": d["bl"],
"br": d["br"],
}
total_bed_only_weight = sum(bed_only_weight.values())
break
in_bed_data = None
threshhold = 100000
min_length = 100
if focus_on_latest_bed_data:
# Collect all coherent sequences of someone being in bed
in_bed_datas: list[list[dict]] = []
is_in_bed_sequence = False
threshhold = 100.0
for d in data:
t = d["total"]
if t >= threshhold:
if in_bed_data is None:
in_bed_data = []
in_bed_data.append(d)
elif in_bed_data is not None:
if len(in_bed_data) < min_length:
in_bed_data = []
else:
break
if not is_in_bed_sequence:
in_bed_datas.append([])
is_in_bed_sequence = True
in_bed_datas[-1].append(d)
elif is_in_bed_sequence:
is_in_bed_sequence = False
# Pick latest with minimum length/duration
min_length = 100
for sequence in in_bed_datas:
if len(sequence) >= min_length:
data = sequence
# Calculate bottom left
for d in data:
d["bl"] = known_total_weight - (d["br"] + d["tr"] + d["tl"])
# Set known total weight
d["total"] = known_total_weight
data = in_bed_data
# Array data
# Prepare data for plotting
x = [d["timestamp"] for d in data]
x = [datetime.strptime(d, "%Y-%m-%d %H:%M:%S.%f") for d in x]
# x = [datetime.strptime(d, "%Y-%m-%d %H:%M:%S.%f") for d in x]
if convert_time_to_seconds:
max_time = max(x)
x = [(d - max_time).total_seconds() for d in x]
# if convert_time_to_seconds:
# max_time = max(x)
# x = [(d - max_time).total_seconds() for d in x]
total = [d["total"] for d in data]
tl = [d["tl"] for d in data]
@ -80,20 +86,35 @@ left = [t + b for t, b in zip(tl, bl)]
right = [t + b for t, b in zip(tr, br)]
fig, ax = plt.subplots()
person_weight = [t - total_bed_only_weight for t in total]
ax.set_xlabel("Time (s)")
ax.set_ylabel("Weight (kg)")
ax.plot(x, person_weight, color="tab:blue")
plt.show()
exit()
# Experiment: Calculate position over time
bed_size = (140, 200)
left_bed_only = bed_only_weight["tl"] + bed_only_weight["bl"]
top_bed_only = bed_only_weight["tr"] + bed_only_weight["tl"]
right_bed_only = bed_only_weight["tr"] + bed_only_weight["br"]
bottom_bed_only = bed_only_weight["br"] + bed_only_weight["bl"]
position_over_time = []
for t, l in zip(top, left):
for t, b, l, r in zip(top, bottom, left, right):
horizontal_weight = l - left_bed_only + r - right_bed_only
vertical_weight = t - top_bed_only + b - bottom_bed_only
position_over_time.append(
(
bed_size[0] * (l - left_bed_only) / person_weight,
bed_size[1] * (t - top_bed_only) / person_weight,
bed_size[0] * (l - left_bed_only) / horizontal_weight,
bed_size[1] * (t - top_bed_only) / vertical_weight,
)
)
# Plot data
fig, (ax0, ax1) = plt.subplots(nrows=2)
@ -112,7 +133,6 @@ ax0.legend(
["Total", "Top Left", "Top Right", "Bottom Left", "Bottom Right", "Top", "Bottom"]
)
# Experiment: Plot position
import math
import colorsys

View file

@ -1,3 +1,4 @@
matplotlib
requests
numpy
PyQt5

View file

@ -1,7 +1,11 @@
# For Philips Hue Counter
# For Philips Hue
phue
# For Fritz.Box API
fritzconnection
# API
requests
fastapi
uvicorn[standard]

View file

@ -1,28 +1,14 @@
import asyncio
from fastapi.responses import HTMLResponse, JSONResponse
from fastapi import APIRouter
from datetime import datetime
import os
import csv
from .handlers.bett import file_path, local_history, log_bed_weights
router = APIRouter()
file_path = "bettwaage.csv"
header = "timestamp;tl;tr;bl;br;total;"
latest_values = []
zero_values = [0, 0, 0, 0]
scale_values = [1, 1, 1, 1]
def add_line_to_history(line: str) -> None:
with open(file_path, "a") as fp:
fp.write(line + "\n")
def convert_to_weight(value: int, zero_value: int, scale: float) -> float:
return (value - zero_value) * scale
asyncio.create_task(log_bed_weights())
@router.get("/file", tags=["file"])
@ -32,7 +18,8 @@ async def get_file():
@router.get("/history")
async def get_history(count: int = None) -> []:
async def get_history(count: int = None) -> list[dict]:
points = []
with open(file_path, "r", encoding="UTF-8") as fp:
reader = csv.DictReader(fp, delimiter=";")
@ -57,61 +44,15 @@ async def get_history(count: int = None) -> []:
return points
@router.post("/add")
async def add_weight(tl: int, tr: int, bl: int, br: int):
global latest_values
latest_values = [tl, tr, bl, br]
tl = convert_to_weight(tl, zero_values[0], scale_values[0])
tr = convert_to_weight(tr, zero_values[1], scale_values[1])
bl = convert_to_weight(bl, zero_values[2], scale_values[2])
br = convert_to_weight(br, zero_values[3], scale_values[3])
sum = tl + tr + bl + br
add_line_to_history(f"{str(datetime.now())};{tl};{tr};{bl};{br};{sum};")
return "Added data"
@router.get("/latest")
async def get_latest():
if not latest_values:
if len(local_history) == 0:
return HTMLResponse(status_code=200, content="No data given yet")
total = sum(latest_values)
return JSONResponse(
{
"tl": latest_values[0],
"tr": latest_values[1],
"bl": latest_values[2],
"br": latest_values[3],
"total": total,
}
)
return JSONResponse(local_history[-1])
@router.delete("/delete", tags=["file"])
async def delete_file():
os.remove(file_path)
add_line_to_history(header)
return "Deleted file and created new file with headers"
@router.post("/zero", tags=["calibration"])
async def set_zero():
if not latest_values:
return HTMLResponse(
status_code=400, content="Requiring data before setting zeros."
)
global zero_values
zero_values = latest_values
return "Set zeroes to: " + " | ".join(str(v) for v in zero_values)
@router.post("/scales", tags=["calibration"])
async def set_scales(tl: float, tr: float, bl: float, br: float):
global scale_values
scale_values = [tl, tr, bl, br]
return "Set scales to: " + " | ".join(str(v) for v in scale_values)
if not os.path.exists(file_path):
add_line_to_history(header)
return "Deleted file"

View file

@ -0,0 +1,171 @@
import asyncio
from datetime import datetime
import math
import os
from statistics import median
from typing import Optional
import requests as r
from ..hue import hue
import logging
file_path: str = "bettwaage.csv"
header: str = "timestamp;tl;tr;bl;br;total;\n"
bett_ip: str = "http://192.168.178.110:80"
matrix_clock_api: str = "http://192.168.178.84:8000"
empty_weight: Optional[float] = None
local_history = []
history_max_length: int = 24 * 60 * 60 # 24 hours
min_noticable_difference: float = 25 # In kg
initial_scale_coutndown: int = (
0 # Number of updates for the scale, until return to clock, should be a multiple of 3
)
current_scale_countdown: int = 0
average_person_weight: float = 75
sexy_mode_detection: bool = False # Turn lights "sexy" if two people are in bed
is_warning_active: int = -1
leg_capacity_limit_patterns = [
# {"limit": 80, "pattern": 110, "duration": 250},
# {"limit": 90, "pattern": 110, "duration": 100},
# {"limit": 100, "pattern": 10, "duration": 50},
]
def get_clusters(data: list[float], min_delta: float) -> dict:
clusters = {}
for point in data:
for known in clusters.keys():
if math.fabs(point - known) < min_delta:
clusters[known].append(point)
continue
clusters[point] = [point]
return clusters
def show_time():
r.post(f"{matrix_clock_api}/time")
def show_scale(weight: float):
r.post(f"{matrix_clock_api}/message", json={"message": f"{weight:3.1f}kg"})
def is_capacity_reached() -> bool:
latest = local_history[-1]
highest_limit = None
for value in [latest["tl"], latest["tr"], latest["br"], latest["bl"]]:
for limit in leg_capacity_limit_patterns:
if value >= limit["limit"] and (
highest_limit is None or limit["limit"] > highest_limit["limit"]
):
highest_limit = limit
global is_warning_active
if highest_limit is None:
if is_warning_active:
is_warning_active = 0
show_time()
return False
if is_warning_active != highest_limit["limit"]:
is_warning_active = highest_limit["limit"]
r.post(
f"{matrix_clock_api}/pattern?pattern={highest_limit['pattern']}&step_ms={highest_limit['duration']}"
)
return True
def check_for_change():
# Check for capicity limits
if is_capacity_reached():
logging.info(f"Capacity reached")
return
if len(local_history) < 2:
return
global current_scale_countdown
global empty_weight
latest = local_history[-1]
if current_scale_countdown > 0:
if current_scale_countdown % 3 == 0:
show_scale(latest["total"] - empty_weight)
current_scale_countdown -= 1
# Is triggered?
delta = latest["total"] - local_history[-2]["total"]
if math.fabs(delta) < min_noticable_difference:
return
# Changed weight up or down?
weight_increased = delta > 0
# Make sure there is a bed_weight
if empty_weight is None:
clusters = get_clusters(
[d["total"] for d in local_history], min_noticable_difference
)
empty_weight = min([median(cluster) for cluster in clusters.values()])
logging.info(f"Empty weight: {empty_weight}")
# Determine number of people
number_of_people = round((latest["total"] - empty_weight) / average_person_weight)
logging.info(f"Number of people: {number_of_people}")
# Show scale?
if number_of_people == 1 and weight_increased and current_scale_countdown == 0:
current_scale_countdown = initial_scale_coutndown
else:
current_scale_countdown = 0
# Make room sexy
if sexy_mode_detection:
if number_of_people >= 2 and weight_increased:
hue.in_room_activate_scene("Max Zimmer", "Sexy")
elif number_of_people == 1 and not weight_increased:
hue.in_room_activate_scene("Max Zimmer", "Tageslicht")
def add_line_to_bed_history(line: str) -> None:
exists = os.path.exists(file_path)
with open(file_path, "a") as fp:
if not exists:
fp.write(header)
fp.write(line + "\n")
def add_weights_to_log(tl: float, tr: float, bl: float, br: float):
total = tl + tr + bl + br
timestamp = datetime.now()
global local_history
local_history.append({"tl": tl, "tr": tr, "bl": bl, "br": br, "total": total})
if len(local_history) > history_max_length:
local_history = local_history[len(local_history) - history_max_length :]
add_line_to_bed_history(f"{str(timestamp)};{tl};{tr};{bl};{br};{total};")
async def log_bed_weights():
while True:
try:
tl = r.get(f"{bett_ip}/sensor/tl/").json()["value"]
tr = r.get(f"{bett_ip}/sensor/tr/").json()["value"]
bl = r.get(f"{bett_ip}/sensor/bl/").json()["value"]
br = r.get(f"{bett_ip}/sensor/br/").json()["value"]
# Sanity check
if min([tl, tr, bl, br]) <= 0:
continue
add_weights_to_log(tl, tr, bl, br)
check_for_change()
except Exception as ex:
logging.exception(ex)
finally:
await asyncio.sleep(1)

View file

@ -0,0 +1,78 @@
import asyncio
import logging
from fritzconnection import FritzConnection
from datetime import datetime
from ..hue import hue
refresh_every_seconds: int = 60 # Every x seconds devices are polled again
trigger_away_after_seconds: int = (
3 * 60
) # After all away-devices are gone for x seconds
away_triggered = False
away_devices = ["B2:06:77:EE:A9:0F"] # Max' iPhone
macaddresses_to_track = ["B2:06:77:EE:A9:0F"] # Max' iPhone
fritz_api = FritzConnection(address="192.168.178.1")
# Referenced documentation: https://avm.de/fileadmin/user_upload/Global/Service/Schnittstellen/hostsSCPD.pdf
devices_last_online = {}
def get_all_devices() -> list:
numberOfDevices = fritz_api.call_action("Hosts", "GetHostNumberOfEntries")[
"NewHostNumberOfEntries"
]
devices = []
for i in range(numberOfDevices):
devices.append(
fritz_api.call_action("Hosts", "GetGenericHostEntry", NewIndex=i)
)
return devices
def get_specific_device(mac_address: str) -> dict:
return fritz_api.call_action(
"Hosts", "GetSpecificHostEntry", NewMACAddress=mac_address
)
def check_for_change():
# Check if devices are away for away-mode
all_away = True
for device in away_devices:
last_online = devices_last_online[device]
if (datetime.now() - last_online).total_seconds() < trigger_away_after_seconds:
all_away = False
break
# Execute away mode
global away_triggered
if all_away:
if not away_triggered:
away_triggered = True
hue.in_room_deactivate_lights("Max Zimmer")
else:
away_triggered = False
async def track_network_devices():
global devices_last_online
# Initial values to avoid None
for macaddress in macaddresses_to_track:
devices_last_online[macaddress] = datetime(1970, 1, 1, 0, 0, 0)
while True:
try:
for macaddress in macaddresses_to_track:
is_online = get_specific_device(macaddress)["NewActive"]
if is_online:
devices_last_online[macaddress] = datetime.now()
check_for_change()
except Exception as ex:
logging.exception(ex)
finally:
await asyncio.sleep(refresh_every_seconds)

View file

@ -1,13 +1,14 @@
from core.mash import MaSH
from hue.hue_feature import HueModule
from matrix_clock.matrix_clock_feature import MatrixClockModule
import asyncio
from fastapi import FastAPI
from endpoints.hue import router as hue_router
from endpoints.bettwaage import router as bettwaage_router
from endpoints.handlers.fritz import track_network_devices
mash: MaSH = MaSH("config.yaml")
mash.add_module(HueModule())
mash.add_module(MatrixClockModule())
app = FastAPI()
asyncio.create_task(track_network_devices())
app.include_router(hue_router, prefix="/hue", tags=["hue"])
app.include_router(bettwaage_router, prefix="/bettwaage", tags=["bett"])
if __name__ == "__main__":
mash.run()
app.run()

5
start.sh Normal file
View file

@ -0,0 +1,5 @@
cd /home/pi/mash-server/src
source ../.env/bin/activate
python3 -m uvicorn main:app --reload --host 0.0.0.0 --port 9587