Compare commits

...

39 commits

Author SHA1 Message Date
dac9d81777 Merge pull request 'bettwaage-sidequest' (#2) from bettwaage-sidequest into master
Reviewed-on: #2
2024-06-07 23:38:55 +02:00
72f206c8cd Merge branch 'master' into bettwaage-sidequest 2024-06-07 23:38:30 +02:00
679a6c2e25 Implemented sexy mode flag 2024-06-07 23:32:56 +02:00
Max
41073fce50 Some more bed data science 2024-05-17 09:48:14 +02:00
Max
ddf32f02b2 Removed some verbosity from the bed 2024-05-16 17:26:12 +02:00
be56ad1956 Relaxed time intervals for device tracking 2024-05-09 03:08:22 +02:00
dbf11551c5 Fixed typo, Improved away mode trigger mechanism 2024-05-09 02:43:17 +02:00
ab249a679d Maybe fixing background task for fritz 2024-05-09 02:33:53 +02:00
a32ad26d0b Maybe some improvements 2024-05-09 02:29:44 +02:00
bfa6c10aa0 Explicified start.sh 2024-05-09 02:18:26 +02:00
3c0c85ecaa Added missing requirements 2024-05-09 02:18:07 +02:00
3eb99735ee Added start.sh 2024-05-09 02:15:19 +02:00
c7932b2a71 Implemented network api for devices away mode 2024-05-08 22:10:52 +02:00
69956d66ea Fixed typo 2024-05-08 20:57:37 +02:00
Max
a6bbf7ef4d Improved in-bed sequence filtering 2024-05-07 16:55:12 +02:00
33617ec7a3 Improved handling of show_time 2024-05-07 14:41:00 +02:00
35443b7cc8 Added sanity check to clean up data 2024-05-07 00:04:31 +02:00
4fb7c8b461 Fixed exception on start up 2024-05-07 00:03:31 +02:00
cf4894e0ba Adjusted plot script for new circumstances 2024-05-07 00:02:40 +02:00
2eaa5fd14e Fixed scale 2024-05-06 23:22:13 +02:00
139f2f6427 Improved warning patterns 2024-05-06 23:13:53 +02:00
0280825b9d Fixed scale countdown 2024-05-06 23:13:06 +02:00
49c641cb46 Fixed global empty_weight 2024-05-06 23:06:51 +02:00
a723ccf2f8 Fixed scale 2024-05-06 23:06:04 +02:00
abd9ec221c Fixed cluster call 2024-05-06 23:04:49 +02:00
dc22afdfc8 Fixed math.abs not found 2024-05-06 23:02:37 +02:00
42b593f7f3 Added exception logging 2024-05-06 23:00:02 +02:00
1779517c0f Added more logging 2024-05-06 22:58:43 +02:00
1b5c73127e Added logging 2024-05-06 22:56:01 +02:00
7736ff0397 BL available 2024-05-06 22:48:30 +02:00
fa4acda10d Fixed local history being deleted 2024-05-06 22:28:35 +02:00
c4e302aef2 Fixed warning active flag 2024-05-06 22:23:32 +02:00
9a9d40db82 Fixed file create logic 2024-05-06 22:22:13 +02:00
1819c9db9e Fixed contrast reference 2024-05-06 22:14:34 +02:00
78a5b3d8ca Fixed some type problems with local history 2024-05-06 22:13:57 +02:00
07c6ff492e Adjusted bed measurement frequency to 1 hz 2024-05-06 22:11:24 +02:00
ab5a192524 Fixed format string quotes 2024-05-06 22:07:30 +02:00
3ced1992a5 Fixed relative import 2024-05-06 22:05:21 +02:00
f6b6e99457 Integrated bed scale 2024-05-06 22:00:54 +02:00
10 changed files with 460 additions and 148 deletions

View file

@ -1,27 +0,0 @@
import requests as r
from time import sleep
bett_ip = "http://192.168.178.110:80"
mash_ip = "http://192.168.178.84:9587"
while True:
try:
tl = r.get(f"{bett_ip}/sensor/tl/").json()["value"]
tr = r.get(f"{bett_ip}/sensor/tr/").json()["value"]
br = r.get(f"{bett_ip}/sensor/br/").json()["value"]
print(f"tl = {tl}")
print(f"tr = {tr}")
# print(f"tl = {tl}")
print(f"br = {br}")
print("==========")
print(f"total = {tl + tr + br * 2}")
print("==========")
s = r.post(f"{mash_ip}/bettwaage/add?tl={int(tl * 1000)}&tr={int(tr * 1000)}&bl={int(br * 1000)}&br={int(br * 1000)}")
sleep(1)
except KeyboardInterrupt:
exit()
except:
pass

File diff suppressed because one or more lines are too long

View file

@ -2,72 +2,78 @@ import requests
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
from datetime import datetime from datetime import datetime
import json import json
import os
latest_history_path = "latest_history.json"
file_path = "history.json" file_path = latest_history_path
history_url = "http://192.168.178.84:9587/bettwaage/history" history_url = "http://192.168.178.84:9587/bettwaage/history"
focus_on_latest_bed_data = False
convert_time_to_seconds = True convert_time_to_seconds = True
# Script # Get data
data = None data = None
if file_path is None or not os.path.exists(file_path):
if file_path is None: print("Fetching data ...")
data = requests.get(history_url) data = requests.get(history_url)
data = data.json() data = data.json()
print("Saving latest data ...")
with open(latest_history_path, "w", encoding="UTF-8") as fp:
json.dump(data, fp)
else: else:
print("Reading data ...")
with open(file_path, "r") as fp: with open(file_path, "r") as fp:
data = json.load(fp) data = json.load(fp)
# Experiment: Solving for missing foot with known total weight print("Processing data ...")
bed_weight = 78290
person_weight = 63000 # Get rough value for empty bed weight per leg
known_total_weight = bed_weight + person_weight rough_bed_weight = 80
bed_only_weight = {} bed_only_weight = {}
for d in data: for d in data:
if d["total"] == bed_weight: if d["total"] < rough_bed_weight:
bed_only_weight = { bed_only_weight = {
"tl": d["tl"], "tl": d["tl"],
"tr": d["tr"], "tr": d["tr"],
"bl": bed_weight - (d["tl"] + d["tr"] + d["br"]), "bl": d["bl"],
"br": d["br"], "br": d["br"],
} }
total_bed_only_weight = sum(bed_only_weight.values())
break break
in_bed_data = None if focus_on_latest_bed_data:
threshhold = 100000 # Collect all coherent sequences of someone being in bed
min_length = 100 in_bed_datas: list[list[dict]] = []
for d in data: is_in_bed_sequence = False
t = d["total"] threshhold = 100.0
if t >= threshhold: for d in data:
if in_bed_data is None: t = d["total"]
in_bed_data = [] if t >= threshhold:
in_bed_data.append(d) if not is_in_bed_sequence:
elif in_bed_data is not None: in_bed_datas.append([])
if len(in_bed_data) < min_length: is_in_bed_sequence = True
in_bed_data = [] in_bed_datas[-1].append(d)
else: elif is_in_bed_sequence:
break is_in_bed_sequence = False
# Pick latest with minimum length/duration
min_length = 100
for sequence in in_bed_datas:
if len(sequence) >= min_length:
data = sequence
# Calculate bottom left # Prepare data for plotting
for d in data:
d["bl"] = known_total_weight - (d["br"] + d["tr"] + d["tl"])
# Set known total weight
d["total"] = known_total_weight
data = in_bed_data
# Array data
x = [d["timestamp"] for d in data] x = [d["timestamp"] for d in data]
x = [datetime.strptime(d, "%Y-%m-%d %H:%M:%S.%f") for d in x] # x = [datetime.strptime(d, "%Y-%m-%d %H:%M:%S.%f") for d in x]
if convert_time_to_seconds: # if convert_time_to_seconds:
max_time = max(x) # max_time = max(x)
x = [(d - max_time).total_seconds() for d in x] # x = [(d - max_time).total_seconds() for d in x]
total = [d["total"] for d in data] total = [d["total"] for d in data]
tl = [d["tl"] for d in data] tl = [d["tl"] for d in data]
@ -80,20 +86,35 @@ left = [t + b for t, b in zip(tl, bl)]
right = [t + b for t, b in zip(tr, br)] right = [t + b for t, b in zip(tr, br)]
fig, ax = plt.subplots()
person_weight = [t - total_bed_only_weight for t in total]
ax.set_xlabel("Time (s)")
ax.set_ylabel("Weight (kg)")
ax.plot(x, person_weight, color="tab:blue")
plt.show()
exit()
# Experiment: Calculate position over time # Experiment: Calculate position over time
bed_size = (140, 200) bed_size = (140, 200)
left_bed_only = bed_only_weight["tl"] + bed_only_weight["bl"] left_bed_only = bed_only_weight["tl"] + bed_only_weight["bl"]
top_bed_only = bed_only_weight["tr"] + bed_only_weight["tl"] top_bed_only = bed_only_weight["tr"] + bed_only_weight["tl"]
right_bed_only = bed_only_weight["tr"] + bed_only_weight["br"]
bottom_bed_only = bed_only_weight["br"] + bed_only_weight["bl"]
position_over_time = [] position_over_time = []
for t, l in zip(top, left): for t, b, l, r in zip(top, bottom, left, right):
horizontal_weight = l - left_bed_only + r - right_bed_only
vertical_weight = t - top_bed_only + b - bottom_bed_only
position_over_time.append( position_over_time.append(
( (
bed_size[0] * (l - left_bed_only) / person_weight, bed_size[0] * (l - left_bed_only) / horizontal_weight,
bed_size[1] * (t - top_bed_only) / person_weight, bed_size[1] * (t - top_bed_only) / vertical_weight,
) )
) )
# Plot data # Plot data
fig, (ax0, ax1) = plt.subplots(nrows=2) fig, (ax0, ax1) = plt.subplots(nrows=2)
@ -112,7 +133,6 @@ ax0.legend(
["Total", "Top Left", "Top Right", "Bottom Left", "Bottom Right", "Top", "Bottom"] ["Total", "Top Left", "Top Right", "Bottom Left", "Bottom Right", "Top", "Bottom"]
) )
# Experiment: Plot position # Experiment: Plot position
import math import math
import colorsys import colorsys

View file

@ -1,3 +1,4 @@
matplotlib matplotlib
requests requests
numpy numpy
PyQt5

View file

@ -1,7 +1,11 @@
# For Philips Hue Counter # For Philips Hue
phue phue
# For Fritz.Box API
fritzconnection
# API # API
requests
fastapi fastapi
uvicorn[standard] uvicorn[standard]

View file

@ -1,28 +1,14 @@
import asyncio
from fastapi.responses import HTMLResponse, JSONResponse from fastapi.responses import HTMLResponse, JSONResponse
from fastapi import APIRouter from fastapi import APIRouter
from datetime import datetime
import os import os
import csv import csv
from .handlers.bett import file_path, local_history, log_bed_weights
router = APIRouter() router = APIRouter()
asyncio.create_task(log_bed_weights())
file_path = "bettwaage.csv"
header = "timestamp;tl;tr;bl;br;total;"
latest_values = []
zero_values = [0, 0, 0, 0]
scale_values = [1, 1, 1, 1]
def add_line_to_history(line: str) -> None:
with open(file_path, "a") as fp:
fp.write(line + "\n")
def convert_to_weight(value: int, zero_value: int, scale: float) -> float:
return (value - zero_value) * scale
@router.get("/file", tags=["file"]) @router.get("/file", tags=["file"])
@ -32,7 +18,8 @@ async def get_file():
@router.get("/history") @router.get("/history")
async def get_history(count: int = None) -> []: async def get_history(count: int = None) -> list[dict]:
points = [] points = []
with open(file_path, "r", encoding="UTF-8") as fp: with open(file_path, "r", encoding="UTF-8") as fp:
reader = csv.DictReader(fp, delimiter=";") reader = csv.DictReader(fp, delimiter=";")
@ -57,61 +44,15 @@ async def get_history(count: int = None) -> []:
return points return points
@router.post("/add")
async def add_weight(tl: int, tr: int, bl: int, br: int):
global latest_values
latest_values = [tl, tr, bl, br]
tl = convert_to_weight(tl, zero_values[0], scale_values[0])
tr = convert_to_weight(tr, zero_values[1], scale_values[1])
bl = convert_to_weight(bl, zero_values[2], scale_values[2])
br = convert_to_weight(br, zero_values[3], scale_values[3])
sum = tl + tr + bl + br
add_line_to_history(f"{str(datetime.now())};{tl};{tr};{bl};{br};{sum};")
return "Added data"
@router.get("/latest") @router.get("/latest")
async def get_latest(): async def get_latest():
if not latest_values: if len(local_history) == 0:
return HTMLResponse(status_code=200, content="No data given yet") return HTMLResponse(status_code=200, content="No data given yet")
total = sum(latest_values) return JSONResponse(local_history[-1])
return JSONResponse(
{
"tl": latest_values[0],
"tr": latest_values[1],
"bl": latest_values[2],
"br": latest_values[3],
"total": total,
}
)
@router.delete("/delete", tags=["file"]) @router.delete("/delete", tags=["file"])
async def delete_file(): async def delete_file():
os.remove(file_path) os.remove(file_path)
add_line_to_history(header) return "Deleted file"
return "Deleted file and created new file with headers"
@router.post("/zero", tags=["calibration"])
async def set_zero():
if not latest_values:
return HTMLResponse(
status_code=400, content="Requiring data before setting zeros."
)
global zero_values
zero_values = latest_values
return "Set zeroes to: " + " | ".join(str(v) for v in zero_values)
@router.post("/scales", tags=["calibration"])
async def set_scales(tl: float, tr: float, bl: float, br: float):
global scale_values
scale_values = [tl, tr, bl, br]
return "Set scales to: " + " | ".join(str(v) for v in scale_values)
if not os.path.exists(file_path):
add_line_to_history(header)

View file

@ -0,0 +1,171 @@
import asyncio
from datetime import datetime
import math
import os
from statistics import median
from typing import Optional
import requests as r
from ..hue import hue
import logging
file_path: str = "bettwaage.csv"
header: str = "timestamp;tl;tr;bl;br;total;\n"
bett_ip: str = "http://192.168.178.110:80"
matrix_clock_api: str = "http://192.168.178.84:8000"
empty_weight: Optional[float] = None
local_history = []
history_max_length: int = 24 * 60 * 60 # 24 hours
min_noticable_difference: float = 25 # In kg
initial_scale_coutndown: int = (
0 # Number of updates for the scale, until return to clock, should be a multiple of 3
)
current_scale_countdown: int = 0
average_person_weight: float = 75
sexy_mode_detection: bool = False # Turn lights "sexy" if two people are in bed
is_warning_active: int = -1
leg_capacity_limit_patterns = [
# {"limit": 80, "pattern": 110, "duration": 250},
# {"limit": 90, "pattern": 110, "duration": 100},
# {"limit": 100, "pattern": 10, "duration": 50},
]
def get_clusters(data: list[float], min_delta: float) -> dict:
clusters = {}
for point in data:
for known in clusters.keys():
if math.fabs(point - known) < min_delta:
clusters[known].append(point)
continue
clusters[point] = [point]
return clusters
def show_time():
r.post(f"{matrix_clock_api}/time")
def show_scale(weight: float):
r.post(f"{matrix_clock_api}/message", json={"message": f"{weight:3.1f}kg"})
def is_capacity_reached() -> bool:
latest = local_history[-1]
highest_limit = None
for value in [latest["tl"], latest["tr"], latest["br"], latest["bl"]]:
for limit in leg_capacity_limit_patterns:
if value >= limit["limit"] and (
highest_limit is None or limit["limit"] > highest_limit["limit"]
):
highest_limit = limit
global is_warning_active
if highest_limit is None:
if is_warning_active:
is_warning_active = 0
show_time()
return False
if is_warning_active != highest_limit["limit"]:
is_warning_active = highest_limit["limit"]
r.post(
f"{matrix_clock_api}/pattern?pattern={highest_limit['pattern']}&step_ms={highest_limit['duration']}"
)
return True
def check_for_change():
# Check for capicity limits
if is_capacity_reached():
logging.info(f"Capacity reached")
return
if len(local_history) < 2:
return
global current_scale_countdown
global empty_weight
latest = local_history[-1]
if current_scale_countdown > 0:
if current_scale_countdown % 3 == 0:
show_scale(latest["total"] - empty_weight)
current_scale_countdown -= 1
# Is triggered?
delta = latest["total"] - local_history[-2]["total"]
if math.fabs(delta) < min_noticable_difference:
return
# Changed weight up or down?
weight_increased = delta > 0
# Make sure there is a bed_weight
if empty_weight is None:
clusters = get_clusters(
[d["total"] for d in local_history], min_noticable_difference
)
empty_weight = min([median(cluster) for cluster in clusters.values()])
logging.info(f"Empty weight: {empty_weight}")
# Determine number of people
number_of_people = round((latest["total"] - empty_weight) / average_person_weight)
logging.info(f"Number of people: {number_of_people}")
# Show scale?
if number_of_people == 1 and weight_increased and current_scale_countdown == 0:
current_scale_countdown = initial_scale_coutndown
else:
current_scale_countdown = 0
# Make room sexy
if sexy_mode_detection:
if number_of_people >= 2 and weight_increased:
hue.in_room_activate_scene("Max Zimmer", "Sexy")
elif number_of_people == 1 and not weight_increased:
hue.in_room_activate_scene("Max Zimmer", "Tageslicht")
def add_line_to_bed_history(line: str) -> None:
exists = os.path.exists(file_path)
with open(file_path, "a") as fp:
if not exists:
fp.write(header)
fp.write(line + "\n")
def add_weights_to_log(tl: float, tr: float, bl: float, br: float):
total = tl + tr + bl + br
timestamp = datetime.now()
global local_history
local_history.append({"tl": tl, "tr": tr, "bl": bl, "br": br, "total": total})
if len(local_history) > history_max_length:
local_history = local_history[len(local_history) - history_max_length :]
add_line_to_bed_history(f"{str(timestamp)};{tl};{tr};{bl};{br};{total};")
async def log_bed_weights():
while True:
try:
tl = r.get(f"{bett_ip}/sensor/tl/").json()["value"]
tr = r.get(f"{bett_ip}/sensor/tr/").json()["value"]
bl = r.get(f"{bett_ip}/sensor/bl/").json()["value"]
br = r.get(f"{bett_ip}/sensor/br/").json()["value"]
# Sanity check
if min([tl, tr, bl, br]) <= 0:
continue
add_weights_to_log(tl, tr, bl, br)
check_for_change()
except Exception as ex:
logging.exception(ex)
finally:
await asyncio.sleep(1)

View file

@ -0,0 +1,78 @@
import asyncio
import logging
from fritzconnection import FritzConnection
from datetime import datetime
from ..hue import hue
refresh_every_seconds: int = 60 # Every x seconds devices are polled again
trigger_away_after_seconds: int = (
3 * 60
) # After all away-devices are gone for x seconds
away_triggered = False
away_devices = ["B2:06:77:EE:A9:0F"] # Max' iPhone
macaddresses_to_track = ["B2:06:77:EE:A9:0F"] # Max' iPhone
fritz_api = FritzConnection(address="192.168.178.1")
# Referenced documentation: https://avm.de/fileadmin/user_upload/Global/Service/Schnittstellen/hostsSCPD.pdf
devices_last_online = {}
def get_all_devices() -> list:
numberOfDevices = fritz_api.call_action("Hosts", "GetHostNumberOfEntries")[
"NewHostNumberOfEntries"
]
devices = []
for i in range(numberOfDevices):
devices.append(
fritz_api.call_action("Hosts", "GetGenericHostEntry", NewIndex=i)
)
return devices
def get_specific_device(mac_address: str) -> dict:
return fritz_api.call_action(
"Hosts", "GetSpecificHostEntry", NewMACAddress=mac_address
)
def check_for_change():
# Check if devices are away for away-mode
all_away = True
for device in away_devices:
last_online = devices_last_online[device]
if (datetime.now() - last_online).total_seconds() < trigger_away_after_seconds:
all_away = False
break
# Execute away mode
global away_triggered
if all_away:
if not away_triggered:
away_triggered = True
hue.in_room_deactivate_lights("Max Zimmer")
else:
away_triggered = False
async def track_network_devices():
global devices_last_online
# Initial values to avoid None
for macaddress in macaddresses_to_track:
devices_last_online[macaddress] = datetime(1970, 1, 1, 0, 0, 0)
while True:
try:
for macaddress in macaddresses_to_track:
is_online = get_specific_device(macaddress)["NewActive"]
if is_online:
devices_last_online[macaddress] = datetime.now()
check_for_change()
except Exception as ex:
logging.exception(ex)
finally:
await asyncio.sleep(refresh_every_seconds)

View file

@ -1,13 +1,14 @@
from core.mash import MaSH import asyncio
from hue.hue_feature import HueModule from fastapi import FastAPI
from matrix_clock.matrix_clock_feature import MatrixClockModule from endpoints.hue import router as hue_router
from endpoints.bettwaage import router as bettwaage_router
from endpoints.handlers.fritz import track_network_devices
mash: MaSH = MaSH("config.yaml") app = FastAPI()
asyncio.create_task(track_network_devices())
mash.add_module(HueModule())
mash.add_module(MatrixClockModule())
app.include_router(hue_router, prefix="/hue", tags=["hue"])
app.include_router(bettwaage_router, prefix="/bettwaage", tags=["bett"]) app.include_router(bettwaage_router, prefix="/bettwaage", tags=["bett"])
if __name__ == "__main__": if __name__ == "__main__":
mash.run() app.run()

5
start.sh Normal file
View file

@ -0,0 +1,5 @@
cd /home/pi/mash-server/src
source ../.env/bin/activate
python3 -m uvicorn main:app --reload --host 0.0.0.0 --port 9587