Integrated bed scale

This commit is contained in:
Maximilian Giller 2024-05-06 22:00:54 +02:00
parent c823f2a40a
commit f6b6e99457
9 changed files with 207 additions and 144 deletions

View file

@ -1,27 +0,0 @@
import requests as r
from time import sleep
bett_ip = "http://192.168.178.110:80"
mash_ip = "http://192.168.178.84:9587"
while True:
try:
tl = r.get(f"{bett_ip}/sensor/tl/").json()["value"]
tr = r.get(f"{bett_ip}/sensor/tr/").json()["value"]
br = r.get(f"{bett_ip}/sensor/br/").json()["value"]
print(f"tl = {tl}")
print(f"tr = {tr}")
# print(f"tl = {tl}")
print(f"br = {br}")
print("==========")
print(f"total = {tl + tr + br * 2}")
print("==========")
s = r.post(f"{mash_ip}/bettwaage/add?tl={int(tl * 1000)}&tr={int(tr * 1000)}&bl={int(br * 1000)}&br={int(br * 1000)}")
sleep(1)
except KeyboardInterrupt:
exit()
except:
pass

View file

@ -1,117 +0,0 @@
from fastapi.responses import HTMLResponse, JSONResponse
from fastapi import APIRouter
from datetime import datetime
import os
import csv
router = APIRouter()
file_path = "bettwaage.csv"
header = "timestamp;tl;tr;bl;br;total;"
latest_values = []
zero_values = [0, 0, 0, 0]
scale_values = [1, 1, 1, 1]
def add_line_to_history(line: str) -> None:
with open(file_path, "a") as fp:
fp.write(line + "\n")
def convert_to_weight(value: int, zero_value: int, scale: float) -> float:
return (value - zero_value) * scale
@router.get("/file", tags=["file"])
async def get_file():
with open(file_path, "r", encoding="UTF-8") as fp:
return HTMLResponse("\n".join(fp.readlines()))
@router.get("/history")
async def get_history(count: int = None) -> []:
points = []
with open(file_path, "r", encoding="UTF-8") as fp:
reader = csv.DictReader(fp, delimiter=";")
for row in reader:
if not row:
continue
points.append(
{
"timestamp": row["timestamp"],
"total": float(row["total"]),
"tl": float(row["tl"]),
"tr": float(row["tr"]),
"bl": float(row["bl"]),
"br": float(row["br"]),
}
)
if count:
return points[-count]
else:
return points
@router.post("/add")
async def add_weight(tl: int, tr: int, bl: int, br: int):
global latest_values
latest_values = [tl, tr, bl, br]
tl = convert_to_weight(tl, zero_values[0], scale_values[0])
tr = convert_to_weight(tr, zero_values[1], scale_values[1])
bl = convert_to_weight(bl, zero_values[2], scale_values[2])
br = convert_to_weight(br, zero_values[3], scale_values[3])
sum = tl + tr + bl + br
add_line_to_history(f"{str(datetime.now())};{tl};{tr};{bl};{br};{sum};")
return "Added data"
@router.get("/latest")
async def get_latest():
if not latest_values:
return HTMLResponse(status_code=200, content="No data given yet")
total = sum(latest_values)
return JSONResponse(
{
"tl": latest_values[0],
"tr": latest_values[1],
"bl": latest_values[2],
"br": latest_values[3],
"total": total,
}
)
@router.delete("/delete", tags=["file"])
async def delete_file():
os.remove(file_path)
add_line_to_history(header)
return "Deleted file and created new file with headers"
@router.post("/zero", tags=["calibration"])
async def set_zero():
if not latest_values:
return HTMLResponse(
status_code=400, content="Requiring data before setting zeros."
)
global zero_values
zero_values = latest_values
return "Set zeroes to: " + " | ".join(str(v) for v in zero_values)
@router.post("/scales", tags=["calibration"])
async def set_scales(tl: float, tr: float, bl: float, br: float):
global scale_values
scale_values = [tl, tr, bl, br]
return "Set scales to: " + " | ".join(str(v) for v in scale_values)
if not os.path.exists(file_path):
add_line_to_history(header)

View file

@ -0,0 +1,56 @@
import asyncio
from fastapi.responses import HTMLResponse, JSONResponse
from fastapi import APIRouter
import os
import csv
from handlers.bett import file_path, local_history, log_bed_weights
router = APIRouter()
asyncio.create_task(log_bed_weights())
@router.get("/file", tags=["file"])
async def get_file():
with open(file_path, "r", encoding="UTF-8") as fp:
return HTMLResponse("\n".join(fp.readlines()))
@router.get("/history")
async def get_history(count: int = None) -> list[dict]:
points = []
with open(file_path, "r", encoding="UTF-8") as fp:
reader = csv.DictReader(fp, delimiter=";")
for row in reader:
if not row:
continue
points.append(
{
"timestamp": row["timestamp"],
"total": float(row["total"]),
"tl": float(row["tl"]),
"tr": float(row["tr"]),
"bl": float(row["bl"]),
"br": float(row["br"]),
}
)
if count:
return points[-count]
else:
return points
@router.get("/latest")
async def get_latest():
if len(local_history) == 0:
return HTMLResponse(status_code=200, content="No data given yet")
return JSONResponse(local_history[-1])
@router.delete("/delete", tags=["file"])
async def delete_file():
os.remove(file_path)
return "Deleted file"

View file

@ -0,0 +1,149 @@
import asyncio
from datetime import datetime
import math
import os
from statistics import median
from typing import Optional
import requests as r
from ..hue import hue
file_path: str = "bettwaage.csv"
header: str = "timestamp;tl;tr;bl;br;total;"
bett_ip: str = "http://192.168.178.110:80"
matrix_clock_api: str = "http://192.168.178.84:8000"
empty_weight: Optional[float] = None
local_history = []
history_max_length: int = 24 * 60 * 60 # 24 hours
min_noticable_difference: float= 25 # In kg
show_scale_countdown: int = 0 # Number of updates for the scale, until return to clock
average_person_weight: float = 75
is_warning_active: bool = False
leg_capacity_limit_patterns = [
{"limit": 80, "pattern": 110, "duration": 1000},
{"limit": 90, "pattern": 110, "duration": 250},
{"limit": 100, "pattern": 10, "duration": 50},
]
def get_clusters(data: list[float], min_delta: float) -> dict:
clusters = {}
for point in data:
for known in clusters.keys():
if math.abs(point - known) < min_delta:
clusters[known].append(point)
continue
clusters[point] = [point]
return clusters
def show_time():
r.post(f"{matrix_clock_api}/time")
def show_scale(weight:float):
r.post(f"{matrix_clock_api}/message", json={
"message": f"{weight:3.1f}kg"
})
def is_capacity_reached() -> bool:
latest = local_history[-1]
highest_limit = None
for value in [latest["tl"], latest["tr"], latest["br"], latest["bl"]]:
for limit in leg_capacity_limit_patterns:
if value >= limit["limit"] and (
highest_limit is None or limit["limit"] > highest_limit["limit"]
):
highest_limit = limit
global is_warning_active
if highest_limit is None:
if is_warning_active:
is_warning_active = False
show_time()
return
is_warning_active = True
r.post(f"{matrix_clock_api}/pattern?pattern={highest_limit["pattern"]}&step_ms={highest_limit["duration"]}&contrast=255")
def check_for_change():
# Check for capicity limits
if is_capacity_reached():
return
global show_scale_countdown
latest = local_history[-1]
if show_scale_countdown > 0 and show_scale_countdown % 3 == 0:
show_scale(latest["total"])
show_scale_countdown -= 1
# Is triggered?
delta = latest["total"] - local_history[-2]["total"]
if math.abs(delta) < min_noticable_difference:
return
# Changed weight up or down?
weight_increased = delta > 0
# Make sure there is a bed_weight
global empty_weight
if empty_weight is None:
clusters = get_clusters(local_history)
empty_weight = min([median(cluster) for cluster in clusters.values()])
# Determine number of people
number_of_people = round((latest["total"] - empty_weight) / average_person_weight)
if number_of_people == 1 and weight_increased:
show_scale_countdown = 60 # Should be a multiple of 3
elif number_of_people >= 2 and weight_increased:
show_scale_countdown = 0
show_time()
hue.in_room_activate_scene("Max Zimmer", "Sexy")
elif number_of_people == 1 and not weight_increased:
hue.in_room_activate_scene("Max Zimmer", "Tageslicht")
else:
show_scale_countdown = 0
show_time()
def add_line_to_bed_history(line: str) -> None:
if not os.path.exists(file_path):
add_line_to_bed_history(header)
with open(file_path, "a") as fp:
fp.write(line + "\n")
def add_weights_to_log(tl: float, tr: float, bl: float, br: float):
total = tl + tr + bl + br
timestamp = datetime.now()
global local_history
local_history.append(
{"tl": tl, "tr": tr, "bl": bl, "br": br, "total": total, "timestamp": timestamp}
)
if len(local_history):
local_history = local_history[len(local_history) - history_max_length :]
add_line_to_bed_history(f"{str(timestamp)};{tl};{tr};{bl};{br};{total};")
check_for_change()
async def log_bed_weights():
while True:
try:
tl = r.get(f"{bett_ip}/sensor/tl/").json()["value"]
tr = r.get(f"{bett_ip}/sensor/tr/").json()["value"]
# bl = r.get(f"{bett_ip}/sensor/bl/").json()["value"]
br = r.get(f"{bett_ip}/sensor/br/").json()["value"]
# Remove later
bl = br
add_weights_to_log(tl, tr, bl, br)
except:
pass
await asyncio.sleep(60)

View file

@ -1,6 +1,8 @@
import asyncio
from fastapi import FastAPI from fastapi import FastAPI
from endpoints.hue import router as hue_router from endpoints.hue import router as hue_router
from endpoints.bettwaage import router as bettwaage_router from endpoints.bettwaage import router as bettwaage_router
from endpoints.handlers.bett import log_bed_weights
app = FastAPI() app = FastAPI()