12521 lines
497 KiB
Python
12521 lines
497 KiB
Python
#!/usr/bin/env python3
|
|
import os
|
|
import sys
|
|
import ast
|
|
from ast import literal_eval
|
|
import falcon
|
|
from falcon import HTTP_200, HTTP_400, HTTP_401, HTTP_500
|
|
import json
|
|
import logging
|
|
from dotenv import load_dotenv
|
|
import calendar
|
|
import io
|
|
import datetime
|
|
from datetime import timedelta, timezone
|
|
import jwt
|
|
import psycopg2
|
|
import html
|
|
import re
|
|
import fnmatch
|
|
import traceback
|
|
import time
|
|
import pytz
|
|
from PIL import Image, ImageDraw, ImageFont
|
|
import paho.mqtt.client as mqtt
|
|
import ssl
|
|
import hashlib
|
|
import itertools
|
|
from collections import defaultdict, deque
|
|
import warnings
|
|
from io import BytesIO
|
|
import zipfile
|
|
from minio import Minio
|
|
from minio.error import S3Error
|
|
import numpy as np
|
|
import cv2
|
|
from sklearn.mixture import GaussianMixture
|
|
import openai
|
|
from openai import OpenAI
|
|
from typing import List, Tuple
|
|
|
|
st = 0
|
|
if True:
|
|
|
|
#from scipy import interpolate
|
|
from scipy.optimize import curve_fit
|
|
from scipy import stats
|
|
import pandas as pd
|
|
#from scipy.signal import savgol_filter
|
|
|
|
EnablePlot = False #True
|
|
|
|
if EnablePlot:
|
|
import matplotlib
|
|
matplotlib.use('Agg') # Set the backend before importing pyplot
|
|
import matplotlib.pyplot as plt
|
|
from matplotlib.colors import LinearSegmentedColormap
|
|
import matplotlib.dates as mdates
|
|
|
|
# Configure logging
|
|
logging.basicConfig(
|
|
level=logging.DEBUG,# .ERROR,
|
|
format='%(asctime)s [%(levelname)s] %(message)s'
|
|
)
|
|
logger = logging.getLogger(__name__)
|
|
|
|
location_names = {-1:"All",0:"?",5:"Office",6:"Hallway",7:"Garage",8:"Outside",9:"Conference Room",10:"Room",34:"Kitchen",
|
|
56:"Bedroom",78:"Living Room",102:"Bathroom",103:"Dining Room",104:"Bathroom Main",105:"Bathroom Guest",
|
|
106:"Bedroom Master", 107:"Bedroom Guest", 108:"Conference Room", 109:"Basement", 110:"Attic", 200:"Other"}
|
|
|
|
Loc2Color = {"?":(0,0,0),"Office":(255,255,0),"Hallway":(128,128,128),"Garage":(128,0,0),"Outside":(0,0,0),"Conference Room":(0,0,128),
|
|
"Room":(64,64,64),"Kitchen":(255,0,0),"Bedroom":(16,255,16),"Living Room":(160,32,240),"Bathroom":(0,0,255),
|
|
"Dining Room":(255,128,0),"Bathroom Main":(16,16,255), "Bedroom Master":(0,255,0),"Bathroom Guest":(32,32,255),
|
|
"Bedroom Guest":(32,255,32), "Basement":(64,64,64), "Attic":(255,165,0), "Other":(192,192,192)}
|
|
|
|
Loc2Color = {"Bedroom":((16,255,16),0),"Bedroom Master":((0,255,0),0),"Bedroom Guest":((32,255,32),0),"Bathroom":((0,0,255),1),
|
|
"Bathroom Main":((16,16,255),1),"Bathroom Guest":((32,32,255),1),"Kitchen":((255,0,0),2),"Dining Room":((255,128,0),3),
|
|
"Office":((255,255,0),4),"Conference Room":((0,0,128),5),"Room":((64,64,64),6),"Living Room":((160,32,240),7),"Hallway":((128,128,128),8),
|
|
"Garage":((128,0,0),9),"Basement":((64,64,64), 10),"Attic":((255,165,0), 11),"Other":((192,192,192),12),"?":((0,0,0),13),"Outside":((0,0,0),14)}
|
|
|
|
|
|
s_table = ["temperature", "humidity", "pressure", "light", "radar", "voc0", "voc1", "voc2", "voc3", "voc4", "voc5", "voc6", "voc7", "voc8", "voc9"] # derived
|
|
smells_table = ["s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "s8", "s9"] # derived
|
|
|
|
Consolidataed_locations = {"?":"Room","Office":"Office","Hallway":"Hallway","Garage":"Garage","Outside":"Outside","Conference Room":"Office",
|
|
"Room":"Room","Kitchen":"Kitchen","Bedroom":"Bedroom","Living Room":"Living Room","Bathroom Guest":"Bathroom",
|
|
"Dining Room":"Dining Room","Bathroom":"Bathroom", "Bathroom Main":"Bathroom","Bedroom Master":"Bedroom",
|
|
"Bedroom Guest":"Bedroom", "Basement":"Basement", "Attic":"Attic", "Other":"Room"}
|
|
|
|
AveragePercentPerLocation = {"Bedroom":[29, 37.5], "Bathroom":[2, 4], "Office":[10, 40],"Hallway":[0.1, 0.2],"Garage":[2, 3],"Outside":[5, 10],
|
|
"Room":[5, 10],"Kitchen":[5, 12.5], "Living Room":[5, 10],
|
|
"Dining Room":[5, 10], "Basement":[0, 0.2], "Attic":[0, 0.2]}
|
|
|
|
location_indexes = {}
|
|
|
|
for i in location_names:
|
|
location_indexes[location_names[i]] = i
|
|
|
|
|
|
|
|
# HTTP Status codes
|
|
HTTP_200 = falcon.HTTP_200
|
|
HTTP_201 = falcon.HTTP_201
|
|
HTTP_400 = falcon.HTTP_400
|
|
HTTP_401 = falcon.HTTP_401
|
|
HTTP_404 = falcon.HTTP_404
|
|
HTTP_500 = falcon.HTTP_500
|
|
|
|
load_dotenv()
|
|
|
|
DB_NAME = os.getenv('DB_NAME')
|
|
DB_USER = os.getenv('DB_USER')
|
|
DB_PASSWORD = os.getenv('DB_PASSWORD')
|
|
DB_HOST = os.getenv('DB_HOST')
|
|
DB_PORT = os.getenv('DB_PORT')
|
|
MINIO_ACCESS_KEY = os.getenv('MINIO_ACCESS_KEY')
|
|
MINIO_SECRET_KEY = os.getenv('MINIO_SECRET_KEY')
|
|
MINIO_HOST = os.getenv('MINIO_HOST')
|
|
MINIO_PORT = os.getenv('MINIO_PORT')
|
|
DAILY_MAPS_BUCKET_NAME = os.getenv('DAILY_MAPS_BUCKET_NAME')
|
|
JWT_SECRET = os.getenv('JWT_SECRET')
|
|
MASTER_ADMIN = os.getenv('MASTER_ADMIN')
|
|
MASTER_PS = os.getenv('MASTER_PS')
|
|
|
|
OPENAI_API_KEY = os.getenv('OPENAI_API_KEY')
|
|
model_engine = os.getenv('OPENAI_API_MODEL_ENGINE')
|
|
use_pdb = True
|
|
debug = False
|
|
debug_string = ""
|
|
logger.debug(f"Environment variables: {os.environ}")
|
|
filesDir = "/home/app/well_web_storage" #os.path.dirname(os.path.realpath(__file__))
|
|
min_io_address = MINIO_HOST + ":" + MINIO_PORT
|
|
|
|
miniIO_blob_client = Minio(min_io_address, access_key=MINIO_ACCESS_KEY, secret_key=MINIO_SECRET_KEY, secure=False)
|
|
|
|
user_id_2_user = {}
|
|
smell_min = 1
|
|
no_smell = 102400000
|
|
smell_max = no_smell - 1
|
|
sensor_legal_values = {"radar": (0,1000, 1), "co2": (smell_min, smell_max, 31), "humidity": (1,99, 31), "light": (0, 4095, 1),
|
|
"pressure": (0, 10000, 5), "temperature": (1, 60, 31), "voc": (smell_min, smell_max, 31), "voc0": (smell_min, smell_max, 31),
|
|
"voc1": (smell_min, smell_max, 31), "voc2": (smell_min, smell_max, 31), "voc3": (smell_min, smell_max, 31), "voc4": (smell_min, smell_max, 31),
|
|
"voc5": (smell_min, smell_max, 31), "voc6": (smell_min, smell_max, 31), "voc7": (smell_min, smell_max, 31), "voc8": (smell_min, smell_max, 31), "voc9": (smell_min, smell_max, 31),
|
|
"s0": (smell_min, smell_max, 31), "s1": (smell_min, smell_max, 31), "s2": (smell_min, smell_max, 31), "s3": (smell_min, smell_max, 31), "s4": (smell_min, smell_max, 31),
|
|
"s5": (smell_min, smell_max, 31), "s6": (smell_min, smell_max, 31), "s7": (smell_min, smell_max, 31), "s8": (smell_min, smell_max, 31), "s9": (smell_min, smell_max, 31)}
|
|
|
|
smell_legal_values = {"s0": (smell_min, smell_max, 31), "s1": (smell_min, smell_max, 31), "s2": (smell_min, smell_max, 31), "s3": (smell_min, smell_max, 31), "s4": (smell_min, smell_max, 31),
|
|
"s5": (smell_min, smell_max, 31), "s6": (smell_min, smell_max, 31), "s7": (smell_min, smell_max, 31), "s8": (smell_min, smell_max, 31), "s9": (smell_min, smell_max, 31)}
|
|
|
|
def read_file(file_name, source = "LOCAL", type_ = "TEXT", bucket_name="daily-maps"):
|
|
|
|
blob_data = ""
|
|
if source == "MINIO":
|
|
blob_data = ReadObjectMinIO(bucket_name, file_name)
|
|
elif source == "LOCAL":
|
|
login_file = os.path.join(filesDir, file_name)
|
|
login_file = login_file.replace("\\","/")
|
|
logger.debug(f"Full file path: {login_file}")
|
|
logger.debug(f"File exists: {os.path.exists(login_file)}")
|
|
#print(login_file)
|
|
if type_ == "TEXT":
|
|
with open(login_file, encoding="utf8") as f:
|
|
blob_data = f.read()
|
|
else:
|
|
with open(login_file, 'rb') as f:
|
|
blob_data = f.read()
|
|
|
|
elif source == "AZURE":
|
|
try:
|
|
blob_data = ""#container_client.download_blob(file_name).readall()
|
|
except Exception as err:
|
|
logger.error("Not reading Azure blob "+str(err))
|
|
blob_data = ""
|
|
return blob_data
|
|
else:
|
|
pass
|
|
return blob_data
|
|
|
|
|
|
def match_with_wildcard(string, pattern):
|
|
return fnmatch.fnmatchcase(string, pattern)
|
|
|
|
def extract_differing_part(string, pattern):
|
|
regex_pattern = re.escape(pattern).replace(r'\*', r'(.+)')
|
|
match = re.match(regex_pattern, string)
|
|
if match:
|
|
return match.group(1)
|
|
else:
|
|
return None
|
|
|
|
def get_db_connection():
|
|
return psycopg2.connect(dbname=DB_NAME, user=DB_USER, password=DB_PASSWORD, host=DB_HOST, port=DB_PORT)
|
|
|
|
|
|
def generate_token(username):
|
|
expiration = datetime.datetime.now(timezone.utc) + timedelta(hours=24)
|
|
token = jwt.encode({"username": username, "exp": expiration}, JWT_SECRET, algorithm="HS256")
|
|
return token
|
|
|
|
|
|
def verify_token(token):
|
|
try:
|
|
payload = jwt.decode(token, JWT_SECRET, algorithms=["HS256"])
|
|
return payload
|
|
except jwt.ExpiredSignatureError:
|
|
return None
|
|
except jwt.InvalidTokenError:
|
|
return None
|
|
|
|
def SaveObjectInBlob(file_name, obj):
|
|
"""
|
|
Saves a Python object to MinIO blob storage using JSON serialization
|
|
|
|
Args:
|
|
file_name (str): Name of the file to save in blob storage
|
|
obj: Python object to serialize and save
|
|
"""
|
|
try:
|
|
# Convert object to JSON string
|
|
json_str = json.dumps(obj)
|
|
# Convert string to bytes
|
|
json_bytes = json_str.encode('utf-8')
|
|
|
|
# Save to MinIO
|
|
miniIO_blob_client.put_object(
|
|
DAILY_MAPS_BUCKET_NAME,
|
|
file_name,
|
|
io.BytesIO(json_bytes),
|
|
len(json_bytes)
|
|
)
|
|
return True
|
|
except Exception as e:
|
|
logger.error(f"Error saving object to blob: {traceback.format_exc()}")
|
|
return False
|
|
|
|
def ReadObjectMinIO(bucket_name, file_name):
|
|
try:
|
|
# Retrieve the object data
|
|
response = miniIO_blob_client.get_object(bucket_name, file_name)
|
|
|
|
# Read the data from response
|
|
data_bytes = response.read()
|
|
|
|
# Convert bytes to string and then load into a dictionary
|
|
data_string = data_bytes.decode('utf-8')
|
|
|
|
# Don't forget to close the response
|
|
response.close()
|
|
response.release_conn()
|
|
|
|
return data_string
|
|
|
|
except S3Error as e:
|
|
logger.error(f"An error occurred while reading {file_name}: {e}")
|
|
return None
|
|
except:
|
|
logger.error(f"An error occurred while decoding {file_name}")
|
|
return None
|
|
|
|
#def ReadObjectMinIO(bucket_name, file_name):
|
|
#try:
|
|
## Retrieve the object data
|
|
#response = miniIO_blob_client.get_object(bucket_name, file_name)
|
|
|
|
## Read the data from response
|
|
#data_bytes = response.read()
|
|
|
|
## Convert bytes to string and then load into a dictionary
|
|
#data_string = data_bytes.decode('utf-8')
|
|
|
|
## Don't forget to close the response
|
|
#response.close()
|
|
#response.release_conn()
|
|
|
|
#return data_string
|
|
|
|
#except S3Error as e:
|
|
#logger.error(f"An error occurred: {e}")
|
|
#return None
|
|
|
|
|
|
def package_response_C(payload, status_code=HTTP_200):
|
|
"""Package response in a standard format"""
|
|
if status_code == HTTP_200:
|
|
return {"status": "success", "data": payload}
|
|
else:
|
|
return {"status": "error", "message": payload, "code": status_code}
|
|
|
|
def package_response(content, status=falcon.HTTP_200):
|
|
"""
|
|
Format the HTTP response.
|
|
|
|
:param content: The content to be returned in the response.
|
|
:param status: HTTP status code (default is 200 OK).
|
|
:return: A dictionary containing the formatted response.
|
|
"""
|
|
if isinstance(content, str):
|
|
# If content is a string, try to parse it as JSON
|
|
try:
|
|
response = json.loads(content)
|
|
except json.JSONDecodeError:
|
|
# If it's not valid JSON, use it as message
|
|
response = {"message": content}
|
|
elif isinstance(content, dict):
|
|
# If content is a dictionary, serialize it with datetime handling
|
|
try:
|
|
# First serialize to JSON string with datetime handling
|
|
json_str = json.dumps(content, default=datetime_handler)
|
|
# Then parse back to dict
|
|
response = json.loads(json_str)
|
|
except TypeError as e:
|
|
response = {"message": f"Serialization error: {str(e)}"}
|
|
else:
|
|
# For any other type, convert to string and use as message
|
|
response = {"message": str(content)}
|
|
|
|
# Add status code to the response
|
|
response["status"] = status
|
|
|
|
# Handle specific status codes
|
|
if status == falcon.HTTP_400:
|
|
response["error"] = "Bad Request"
|
|
elif status == falcon.HTTP_401:
|
|
response["error"] = "Unauthorized"
|
|
elif status == falcon.HTTP_500:
|
|
response["error"] = "Internal Server Error"
|
|
|
|
return response
|
|
|
|
def GetPriviledges(conn, user_name, password):
|
|
sql = "SELECT key, access_to_deployments, user_id FROM public.person_details WHERE user_name = '" + user_name + "'"
|
|
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
result = cur.fetchall()#cur.fetchone()
|
|
if result != None:
|
|
if result[0][0] == password:
|
|
return result[0][1], result[0][2]
|
|
else:
|
|
return "0", "0"
|
|
else:
|
|
return "0", "0"
|
|
|
|
def GetPriviledgesOnly(user):
|
|
with get_db_connection() as conn:
|
|
if isinstance(user, (int)) or user.isdigit():
|
|
sql = "SELECT access_to_deployments FROM public.person_details WHERE user_id = " + user
|
|
else:
|
|
sql = "SELECT access_to_deployments FROM public.person_details WHERE user_name = '" + user + "'"
|
|
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
result = cur.fetchall()#cur.fetchone()
|
|
if result != None:
|
|
return result[0][0]
|
|
else:
|
|
return "0"
|
|
|
|
def ListDeployments(priviledges, user_id):
|
|
|
|
global user_id_2_user
|
|
|
|
conn = get_db_connection()
|
|
|
|
if priviledges == "-1":
|
|
sql = "SELECT * FROM public.deployments ORDER BY deployment_id ASC;"
|
|
else:
|
|
sql = f"SELECT * FROM public.deployments WHERE deployment_id IN ({priviledges}) OR user_edit = {user_id} ORDER BY deployment_id ASC;"
|
|
|
|
try:
|
|
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
result = cur.fetchall()#cur.fetchone()
|
|
if result == None:
|
|
complete_result = []
|
|
else:
|
|
deployment_ids = []
|
|
deployment_records_dict = {}
|
|
for record in result:
|
|
deployment_id = record[0]
|
|
deployment_ids.append(deployment_id)
|
|
deployment_records_dict[deployment_id] = record
|
|
|
|
sql = f"SELECT * FROM public.deployment_details WHERE deployment_id IN ({','.join(map(str, deployment_ids))}) ORDER BY deployment_id ASC;"
|
|
cur.execute(sql)
|
|
details_result = cur.fetchall()
|
|
|
|
beneficiary_ids = []
|
|
|
|
for record_details in details_result:
|
|
if record_details[1] != None and record_details[1] not in beneficiary_ids:
|
|
beneficiary_ids.append(record_details[1])
|
|
|
|
sql = f"SELECT * FROM public.person_details WHERE user_id IN ({','.join(map(str, beneficiary_ids))});"
|
|
|
|
|
|
cur.execute(sql)
|
|
user_id_2_user = {}
|
|
users = cur.fetchall()#cur.fetchone()
|
|
for usr_record in users:
|
|
user_id_2_user[usr_record[0]] = usr_record
|
|
|
|
complete_result = []
|
|
if details_result != None:
|
|
for record_details in details_result:
|
|
deployment_record = deployment_records_dict[record_details[0]]
|
|
complete_record = {'deployment_id': record_details[0], 'beneficiary_id': record_details[1], 'caretaker_id': record_details[2],
|
|
'owner_id': record_details[3], 'installer_id': record_details[4],
|
|
'address_street': record_details[6], 'address_city': record_details[7], 'address_zip': record_details[8],
|
|
'address_state': record_details[9], 'address_country': record_details[10],
|
|
'devices': record_details[5], 'wifis': record_details[11], 'persons': deployment_record[4], 'gender': deployment_record[5],
|
|
'race': deployment_record[6], 'born': deployment_record[7], 'pets': deployment_record[8], 'time_zone': deployment_record[3]
|
|
}
|
|
complete_result.append(complete_record)
|
|
except:
|
|
logger.debug(f"Error: {traceback.format_exc()}")
|
|
return complete_result
|
|
|
|
def ListCaretakers():
|
|
|
|
conn = get_db_connection()
|
|
sql = "SELECT * FROM public.person_details WHERE role_ids LIKE '%2%' ORDER BY last_name;" #2 is caretaker
|
|
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
result = cur.fetchall()#cur.fetchone()
|
|
if result == None:
|
|
result = []
|
|
|
|
return result
|
|
|
|
def ListBeneficiaries(privilidges, user_info):
|
|
|
|
conn = get_db_connection()
|
|
with conn.cursor() as cur:
|
|
|
|
if (privilidges == "-1"):
|
|
sql = "SELECT * FROM public.person_details WHERE role_ids LIKE '%1%' ORDER BY last_name;" #1 is beneficiary
|
|
else:
|
|
#we need to find beneficiaries from list of deployments
|
|
sql = f"SELECT beneficiary_id FROM public.deployment_details WHERE deployment_id IN ({privilidges}) ORDER BY deployment_id ASC;"
|
|
cur.execute(sql)
|
|
result1 = cur.fetchall()#cur.fetchone()
|
|
if result1 == None:
|
|
result = []
|
|
return result
|
|
beneficiaries = ",".join(str(x[0]) for x in result1)
|
|
sql = f"SELECT * FROM public.person_details WHERE user_id IN ({beneficiaries}) OR user_edit = {user_info} AND role_ids LIKE '%1%' ORDER BY last_name;" #1 is beneficiary
|
|
logger.debug(f"sql= {sql}")
|
|
|
|
cur.execute(sql)
|
|
result = cur.fetchall()#cur.fetchone()
|
|
if result == None:
|
|
result = []
|
|
|
|
return result
|
|
|
|
def UserDetails(user_id):
|
|
|
|
conn = get_db_connection()
|
|
|
|
sql = "SELECT column_name FROM information_schema.columns WHERE table_schema = 'public' AND table_name = 'person_details';"
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
columns_names = cur.fetchall()
|
|
|
|
sql = "SELECT * FROM public.person_details WHERE user_id = "+user_id
|
|
|
|
caretaker_record = {}
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
result = cur.fetchone() #cur.fetchall()
|
|
if result != None:
|
|
cnt = 0
|
|
for field in columns_names:
|
|
caretaker_record[field[0]] = result[cnt]
|
|
cnt += 1
|
|
|
|
return caretaker_record
|
|
def DeviceDetails(mac):
|
|
|
|
conn = get_db_connection()
|
|
|
|
sql = "SELECT column_name FROM information_schema.columns WHERE table_schema = 'public' AND table_name = 'devices';"
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
columns_names = cur.fetchall()
|
|
|
|
sql = "SELECT * FROM public.devices WHERE device_mac = '" + mac + "'"
|
|
|
|
device_record = {}
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
result = cur.fetchone() #cur.fetchall()
|
|
if result != None:
|
|
cnt = 0
|
|
for field in columns_names:
|
|
device_record[field[0]] = result[cnt]
|
|
cnt += 1
|
|
|
|
return device_record
|
|
|
|
def DeploymentDetails(deployment_id):
|
|
|
|
deployment_record = {}
|
|
|
|
conn = get_db_connection()
|
|
|
|
with conn.cursor() as cur:
|
|
|
|
sql = "SELECT * FROM information_schema.columns WHERE table_schema = 'public' AND table_name = 'deployments';"
|
|
cur.execute(sql)
|
|
columns_names = cur.fetchall()
|
|
|
|
sql = "SELECT * FROM public.deployments WHERE deployment_id = '" + deployment_id + "'"
|
|
|
|
cur.execute(sql)
|
|
result = cur.fetchone() #cur.fetchall()
|
|
if result != None:
|
|
cnt = 0
|
|
for field in columns_names:
|
|
deployment_record[field[3]] = result[cnt]
|
|
cnt += 1
|
|
|
|
sql = "SELECT * FROM information_schema.columns WHERE table_schema = 'public' AND table_name = 'deployment_details';"
|
|
cur.execute(sql)
|
|
columns_names = cur.fetchall()
|
|
|
|
sql = "SELECT * FROM public.deployment_details WHERE deployment_id = '" + deployment_id + "'"
|
|
|
|
cur.execute(sql)
|
|
result = cur.fetchone() #cur.fetchall()
|
|
if result != None:
|
|
cnt = 0
|
|
for field in columns_names:
|
|
deployment_record[field[3]] = result[cnt]
|
|
cnt += 1
|
|
|
|
|
|
return deployment_record
|
|
|
|
def ValidUser(user_name, password):
|
|
|
|
|
|
if use_pdb:
|
|
with get_db_connection() as db_conn:
|
|
priviledges, user_id= GetPriviledges(db_conn, user_name, password)
|
|
return priviledges, user_id
|
|
|
|
else:
|
|
pass
|
|
#container = GetReference("/MAC")
|
|
#try:
|
|
## We can do an efficient point read lookup on partition key and id
|
|
##response = container.read_item(item="64B708896BD8_temperature_2024-01-01_00", partition_key="64B708896BD8") #OK
|
|
##items = query_items(container, '64B708896BD8') #Too slow
|
|
##AddToLog("1!")
|
|
#privileges = GetCaretakers(container, email, password)
|
|
#return privileges
|
|
|
|
#except Exception as err:
|
|
#AddToLog("Error !1 "+str(err))
|
|
|
|
|
|
def SelectOption(html_code, select_id, selected_item):
|
|
"""
|
|
Modifies HTML code to set the selected attribute for a specific option in a select element.
|
|
|
|
Args:
|
|
html_code (str): Original HTML code
|
|
select_id (str): ID of the select element to modify
|
|
selected_item (str or int): Value of the option to be selected
|
|
|
|
Returns:
|
|
str: Modified HTML code with the selected attribute added
|
|
"""
|
|
# Convert selected_item to string for comparison
|
|
selected_item = str(selected_item)
|
|
|
|
# Find the select element with the given ID
|
|
select_pattern = rf'<select[^>]*id=[\'"]?{select_id}[\'"]?[^>]*>(.*?)</select>'
|
|
select_match = re.search(select_pattern, html_code, re.IGNORECASE | re.DOTALL)
|
|
|
|
if not select_match:
|
|
return html_code # Return unchanged if select element not found
|
|
|
|
select_content = select_match.group(0)
|
|
select_content_orig = select_content
|
|
# Remove any existing selected attributes
|
|
select_content = re.sub(r'\s+selected(?=[>\s])', '', select_content, flags=re.IGNORECASE)
|
|
|
|
# Add selected attribute to the matching option
|
|
def replace_option(match):
|
|
value = re.search(r'value=[\'"]?([^\'">\s]+)', match.group(0))
|
|
if value and value.group(1) == selected_item:
|
|
# Add selected attribute before the closing >
|
|
return match.group(0).rstrip('>') + ' selected>'
|
|
return match.group(0)
|
|
|
|
modified_select = re.sub(
|
|
r'<option[^>]*>',
|
|
replace_option,
|
|
select_content
|
|
)
|
|
|
|
# Replace the original select element with the modified one
|
|
return html_code.replace(select_content_orig, modified_select)
|
|
|
|
def FillFields(blob_data, record, form_type):
|
|
"""
|
|
Fill in the input fields in the HTML blob_data with values from the caretaker dictionary.
|
|
|
|
:param blob_data: str - The initial HTML string containing empty or placeholder input fields.
|
|
:param caretaker: dict - The dictionary containing values to populate the fields.
|
|
:return: str - The HTML string with the input fields filled with the appropriate values.
|
|
"""
|
|
# Ensure blob_data is a string
|
|
#blob_data = str(blob_data)
|
|
|
|
# Populate the fields
|
|
for field in record:
|
|
logger.debug(f"field= {field}")
|
|
if field == "user_id":
|
|
if record[field] is not None:
|
|
escaped_string = html.escape(str(record[field]))
|
|
# Create a regex pattern to match the span with specific id
|
|
pattern = rf'(<span[^>]+id="editing_user_id"[^>]*>)([^<]*)(</span>)'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(3)}', blob_data)
|
|
elif field == "deployment_id":
|
|
if record[field] is not None:
|
|
escaped_string = html.escape(str(record[field]))
|
|
# Create a regex pattern to match the span with specific id
|
|
pattern = rf'(<span[^>]+id="editing_deployment_id"[^>]*>)([^<]*)(</span>)'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(3)}', blob_data)
|
|
elif field == "device_id":
|
|
if record[field] is not None:
|
|
escaped_string = html.escape(str(record[field]))
|
|
# Create a regex pattern to match the span with specific id
|
|
pattern = rf'(<span[^>]+id="editing_device_id"[^>]*>)([^<]*)(</span>)'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(3)}', blob_data)
|
|
elif field == "user_name":
|
|
if record[field] != None:
|
|
escaped_string = html.escape(record[field])
|
|
pattern = rf'(<input[^>]+id="new_user_name"[^>]+value=")[^"]*(")'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(2)}', blob_data)
|
|
|
|
# Add value attribute if it does not exist
|
|
pattern = rf'(<input[^>]+id="new_user_name"[^>]*)(>)'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)} value="{escaped_string}"{m.group(2)}', blob_data)
|
|
|
|
elif field == "location":
|
|
if record[field] != None:
|
|
blob_data = SelectOption(blob_data, 'location', record[field])
|
|
|
|
elif field == "gender":
|
|
if record[field] != None:
|
|
blob_data = SelectOption(blob_data, 'gender', record[field])
|
|
|
|
elif field == "race":
|
|
if record[field] != None:
|
|
blob_data = SelectOption(blob_data, 'race', record[field])
|
|
|
|
elif field == "time_zone_s":
|
|
if record[field] != None:
|
|
blob_data = SelectOption(blob_data, 'time_zone_s', record[field])
|
|
|
|
elif field == "time_edit" or field == "user_edit":
|
|
pass
|
|
else:
|
|
if record[field] != None:
|
|
escaped_string = html.escape(str(record[field]))
|
|
pattern = rf'(<input[^>]+id="{field}"[^>]+value=")[^"]*(")'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(2)}', blob_data)
|
|
|
|
# Add value attribute if it does not exist
|
|
pattern = rf'(<input[^>]+id="{field}"[^>]*)(>)'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)} value="{escaped_string}"{m.group(2)}', blob_data)
|
|
|
|
return blob_data
|
|
|
|
def StoreThresholds2DB(device_id, TR, BR, TLIFE, BLIFE):
|
|
|
|
#print('\nCreating create_caretaker\n')
|
|
# Create a caretaker object. This object has nested properties and various types including numbers, DateTimes and strings.
|
|
# This can be saved as JSON as is without converting into rows/columns.
|
|
conn = get_db_connection()
|
|
cur = conn.cursor()
|
|
|
|
if device_id == None or device_id == 0:
|
|
return 1
|
|
|
|
try:
|
|
|
|
|
|
sql = f"""
|
|
UPDATE public.devices
|
|
SET
|
|
radar_threshold = '[{TR},{BR},{TLIFE},{BLIFE}]'
|
|
WHERE device_id = {device_id};
|
|
"""
|
|
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
print(sql)
|
|
cur.execute(sql)
|
|
|
|
# Commit the changes to the database
|
|
conn.commit()
|
|
|
|
# Close the cursor and connection
|
|
cur.close()
|
|
conn.close()
|
|
logger.debug("Written/updated!")
|
|
return 1
|
|
except Exception as err:
|
|
AddToLog(traceback.format_exc())
|
|
return 0
|
|
|
|
|
|
def StoreBeneficiary2DB(parameters, editing_user_id):
|
|
|
|
#print('\nCreating create_caretaker\n')
|
|
# Create a caretaker object. This object has nested properties and various types including numbers, DateTimes and strings.
|
|
# This can be saved as JSON as is without converting into rows/columns.
|
|
conn = get_db_connection()
|
|
cur = conn.cursor()
|
|
|
|
if editing_user_id == None or editing_user_id == "":
|
|
editing_user_id = "0"
|
|
|
|
try:
|
|
|
|
current_utc_time = datetime.datetime.now(timezone.utc)
|
|
|
|
# Convert to epoch time
|
|
current_epoch_time = current_utc_time.timestamp()
|
|
|
|
if editing_user_id != "0":
|
|
sql = f"""
|
|
UPDATE public.person_details
|
|
SET
|
|
email = '{CleanObject(parameters.get('email'))}',
|
|
user_name = '{CleanObject(parameters.get('new_user_name'))}',
|
|
first_name = '{CleanObject(parameters.get('first_name'))}',
|
|
last_name = '{CleanObject(parameters.get('last_name'))}',
|
|
address_street = '{CleanObject(parameters.get('address_street'))}',
|
|
address_city = '{CleanObject(parameters.get('address_city'))}',
|
|
address_zip = '{CleanObject(parameters.get('address_zip'))}',
|
|
address_state = '{CleanObject(parameters.get('address_state'))}',
|
|
address_country = '{CleanObject(parameters.get('address_country'))}',
|
|
time_edit = {current_epoch_time},
|
|
user_edit = {CleanObject(parameters.get('user_id'))},
|
|
role_ids = '{CleanObject(parameters.get('role_ids'))}',
|
|
phone_number = '{CleanObject(parameters.get('phone_number'))}',
|
|
picture = '{CleanObject(parameters.get('picture'))}',
|
|
key = '{CleanObject(parameters.get('key'))}'
|
|
WHERE user_id = {editing_user_id}; -- replace 34 with the actual person_id you want to update
|
|
"""
|
|
|
|
else:
|
|
sql = f"""
|
|
INSERT INTO public.person_details
|
|
(role_ids, email, user_name, first_name, last_name, address_street, address_city, address_zip, address_state, address_country, time_edit, user_edit, phone_number, picture, key)
|
|
VALUES
|
|
('{CleanObject(parameters.get('role_ids'))}', '{CleanObject(parameters.get('email'))}', '{CleanObject(parameters.get('new_user_name'))}',
|
|
'{CleanObject(parameters.get('first_name'))}', '{CleanObject(parameters.get('last_name'))}', '{CleanObject(parameters.get('address_street'))}',
|
|
'{CleanObject(parameters.get('address_city'))}', '{CleanObject(parameters.get('address_zip'))}', '{CleanObject(parameters.get('address_state'))}',
|
|
'{CleanObject(parameters.get('address_country'))}', {current_epoch_time}, {CleanObject(parameters.get('user_id'))}, '{CleanObject(parameters.get('phone_number'))}',
|
|
'{CleanObject(parameters.get('picture'))}', '{CleanObject(parameters.get('key'))}');
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
|
|
# Commit the changes to the database
|
|
conn.commit()
|
|
|
|
# Close the cursor and connection
|
|
cur.close()
|
|
conn.close()
|
|
|
|
AddToLog("Written/updated!")
|
|
return 1
|
|
except Exception as err:
|
|
AddToLog(traceback.format_exc())
|
|
return 0
|
|
return ok
|
|
|
|
def DeleteRecordFromDB(form_data):
|
|
|
|
caretaker = form_data['user_name']
|
|
privileges = GetPriviledgesOnly(caretaker)
|
|
|
|
if privileges != "-1":
|
|
AddToLog("Forbidden!")
|
|
return 0
|
|
|
|
conn = get_db_connection()
|
|
cur = conn.cursor()
|
|
function = form_data.get('function')
|
|
if function == "deployment_delete":
|
|
user_id = form_data['user_id']
|
|
editing_deployment_id = form_data['editing_deployment_id']
|
|
priviledges = form_data['priviledges']
|
|
if editing_deployment_id == None or editing_deployment_id == "" or editing_deployment_id == "0":
|
|
AddToLog("deployment_id is not defined")
|
|
cur.close()
|
|
conn.close()
|
|
return 0
|
|
try:
|
|
if user_id == "-1":
|
|
sql = f"""
|
|
DELETE FROM public.deployments WHERE deployment_id = {editing_deployment_id}
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
|
|
sql = f"""
|
|
DELETE FROM public.deployment_details WHERE deployment_id = {editing_deployment_id}
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
|
|
# Commit the changes to the database
|
|
conn.commit()
|
|
|
|
# Close the cursor and connection
|
|
cur.close()
|
|
conn.close()
|
|
|
|
AddToLog("Deleted!")
|
|
return 1
|
|
else:
|
|
#lets check if user_edit = user_id
|
|
sql = f"SELECT user_edit FROM public.deployments WHERE deployment_id = '{editing_deployment_id}'"
|
|
cur.execute(sql)
|
|
result = cur.fetchone()
|
|
if priviledges != "-1":
|
|
if result[0] != int(user_id):
|
|
cur.close()
|
|
conn.close()
|
|
return 0
|
|
|
|
sql = f"""
|
|
DELETE FROM public.deployments WHERE deployment_id = {editing_deployment_id}
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
|
|
sql = f"""
|
|
DELETE FROM public.deployment_details WHERE deployment_id = {editing_deployment_id}
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
|
|
# Commit the changes to the database
|
|
conn.commit()
|
|
|
|
# Close the cursor and connection
|
|
cur.close()
|
|
conn.close()
|
|
|
|
AddToLog("Deleted!")
|
|
return 1
|
|
|
|
except Exception as err:
|
|
AddToLog(traceback.format_exc())
|
|
cur.close()
|
|
conn.close()
|
|
return 0
|
|
|
|
elif function == "device_delete":
|
|
user_id = form_data['user_id']
|
|
editing_device_id = form_data['editing_device_id']
|
|
priviledges = form_data['priviledges']
|
|
if editing_device_id == None or editing_device_id == "" or editing_device_id == "0":
|
|
AddToLog("editing_device_id is not defined")
|
|
cur.close()
|
|
conn.close()
|
|
return 0
|
|
try:
|
|
if user_id == "-1":
|
|
sql = f"""
|
|
DELETE FROM public.deployments WHERE device_id = {editing_device_id}
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
|
|
conn.commit()
|
|
|
|
# Close the cursor and connection
|
|
cur.close()
|
|
conn.close()
|
|
|
|
AddToLog("Deleted!")
|
|
return 1
|
|
#else:
|
|
##lets check if user_edit = user_id
|
|
#sql = f"SELECT user_edit FROM public.deployments WHERE deployment_id = '{editing_deployment_id}'"
|
|
#cur.execute(sql)
|
|
#result = cur.fetchone()
|
|
#if priviledges != "-1":
|
|
#if result[0] != int(user_id):
|
|
#cur.close()
|
|
#conn.close()
|
|
#return 0
|
|
|
|
#sql = f"""
|
|
#DELETE FROM public.deployments WHERE device_id = {editing_device_id}
|
|
#"""
|
|
#logger.debug(f"sql= {sql}")
|
|
## Execute update query
|
|
#cur.execute(sql)
|
|
|
|
#conn.commit()
|
|
|
|
## Close the cursor and connection
|
|
#cur.close()
|
|
#conn.close()
|
|
|
|
#AddToLog("Deleted!")
|
|
#return 1
|
|
|
|
except Exception as err:
|
|
AddToLog(traceback.format_exc())
|
|
cur.close()
|
|
conn.close()
|
|
return 0
|
|
else:
|
|
|
|
#user_id = form_data['user_id']
|
|
editing_user_id = form_data['delete_user_id']
|
|
if editing_user_id == None or editing_user_id == "" or editing_user_id == "0":
|
|
AddToLog("user_id is not defined")
|
|
cur.close()
|
|
conn.close()
|
|
return 0
|
|
try:
|
|
if privileges == "-1": #user_id == "-1":
|
|
sql = f"""
|
|
DELETE FROM public.person_details WHERE user_id = {editing_user_id}
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
|
|
# Commit the changes to the database
|
|
conn.commit()
|
|
|
|
# Close the cursor and connection
|
|
cur.close()
|
|
conn.close()
|
|
|
|
AddToLog("Deleted!")
|
|
return 1
|
|
else:
|
|
#lets check if user_edit = user_id
|
|
sql = f"SELECT user_edit FROM public.person_details WHERE user_id = '{editing_user_id}'"
|
|
cur.execute(sql)
|
|
result = cur.fetchone()
|
|
if result[0] != int(user_id):
|
|
cur.close()
|
|
conn.close()
|
|
return 0
|
|
|
|
sql = f"""
|
|
DELETE FROM public.person_details WHERE user_id = {editing_user_id}
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
|
|
# Commit the changes to the database
|
|
conn.commit()
|
|
|
|
# Close the cursor and connection
|
|
cur.close()
|
|
conn.close()
|
|
|
|
AddToLog("Deleted!")
|
|
return 1
|
|
|
|
except Exception as err:
|
|
AddToLog(traceback.format_exc())
|
|
cur.close()
|
|
conn.close()
|
|
return 0
|
|
|
|
def StoreCaretaker2DB(parameters, editing_user_id):
|
|
|
|
#print('\nCreating create_caretaker\n')
|
|
# Create a caretaker object. This object has nested properties and various types including numbers, DateTimes and strings.
|
|
# This can be saved as JSON as is without converting into rows/columns.
|
|
conn = get_db_connection()
|
|
cur = conn.cursor()
|
|
|
|
if editing_user_id == None or editing_user_id == "":
|
|
editing_user_id = "0"
|
|
|
|
try:
|
|
|
|
current_utc_time = datetime.datetime.now(timezone.utc)
|
|
|
|
# Convert to epoch time
|
|
current_epoch_time = current_utc_time.timestamp()
|
|
|
|
if editing_user_id != "0":
|
|
sql = f"""
|
|
UPDATE public.person_details
|
|
SET
|
|
role_ids = '{parameters.get('role_ids')}',
|
|
access_to_deployments = '{parameters.get('access_to_deployments')}',
|
|
email = '{parameters.get('email')}',
|
|
user_name = '{parameters.get('new_user_name')}',
|
|
first_name = '{parameters.get('first_name')}',
|
|
last_name = '{parameters.get('last_name')}',
|
|
address_street = '{parameters.get('address_street')}',
|
|
address_city = '{parameters.get('address_city')}',
|
|
address_zip = '{parameters.get('address_zip')}',
|
|
address_state = '{parameters.get('address_state')}',
|
|
address_country = '{parameters.get('address_country')}',
|
|
time_edit = {current_epoch_time},
|
|
user_edit = {parameters.get('user_id')},
|
|
phone_number = '{parameters.get('phone_number')}',
|
|
picture = '{parameters.get('picture')}',
|
|
key = '{parameters.get('key')}'
|
|
WHERE user_id = {editing_user_id}; -- replace 34 with the actual person_id you want to update
|
|
"""
|
|
|
|
else:
|
|
sql = f"""
|
|
INSERT INTO public.person_details
|
|
(role_ids, access_to_deployments, email, user_name, first_name, last_name, address_street, address_city, address_zip, address_state, address_country, time_edit, user_edit, phone_number, picture, key)
|
|
VALUES
|
|
('{parameters.get('role_ids')}', '{parameters.get('access_to_deployments')}', '{parameters.get('email')}', '{parameters.get('new_user_name')}', '{parameters.get('first_name')}', '{parameters.get('last_name')}', '{parameters.get('address_street')}', '{parameters.get('address_city')}', '{parameters.get('address_zip')}', '{parameters.get('address_state')}', '{parameters.get('address_country')}', {current_epoch_time}, {parameters.get('user_id')}, '{parameters.get('phone_number')}', '{parameters.get('picture')}', '{parameters.get('key')}');
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
|
|
# Commit the changes to the database
|
|
conn.commit()
|
|
|
|
# Close the cursor and connection
|
|
cur.close()
|
|
conn.close()
|
|
|
|
AddToLog("Written/updated!")
|
|
return 1
|
|
except Exception as err:
|
|
AddToLog(traceback.format_exc())
|
|
return 0
|
|
return ok
|
|
|
|
def CleanObject(object_in, typee = "s"):
|
|
|
|
if typee == "n":
|
|
res = 0
|
|
if object_in == None or object_in == "":
|
|
return 0
|
|
if isinstance(object_in, str):
|
|
try:
|
|
res = object_in.replace("'", '"')
|
|
except:
|
|
pass
|
|
return res
|
|
else:
|
|
res = ""
|
|
if object_in == None:
|
|
return ""
|
|
if isinstance(object_in, str):
|
|
try:
|
|
res = object_in.replace("'", '"')
|
|
except:
|
|
pass
|
|
return res
|
|
return object_in
|
|
|
|
def StoreDeployment2DB(parameters, editing_deployment_id):
|
|
conn = get_db_connection()
|
|
cur = conn.cursor()
|
|
|
|
if editing_deployment_id == None or editing_deployment_id == "":
|
|
editing_deployment_id = "0"
|
|
|
|
try:
|
|
|
|
current_utc_time = datetime.datetime.now(timezone.utc)
|
|
|
|
# Convert to epoch time
|
|
current_epoch_time = current_utc_time.timestamp()
|
|
|
|
if editing_deployment_id != "0":
|
|
sql = f"""
|
|
UPDATE public.deployments
|
|
SET
|
|
persons = {CleanObject(parameters.get('persons'), "n")},
|
|
gender = {CleanObject(parameters.get('gender'), "n")},
|
|
race = {CleanObject(parameters.get('race'), "n")},
|
|
born = {CleanObject(parameters.get('born'), "n")},
|
|
pets = {CleanObject(parameters.get('pets'), "n")},
|
|
time_zone_s = '{CleanObject(parameters.get('time_zone_s'))}',
|
|
user_edit = {CleanObject(parameters.get('user_id'), "n")},
|
|
time_edit = {current_epoch_time}
|
|
WHERE deployment_id = {CleanObject(editing_deployment_id, "n")};
|
|
"""
|
|
|
|
else:
|
|
sql = f"""
|
|
INSERT INTO public.deployments
|
|
(persons, gender, race, born, pets, time_zone_s, user_edit, time_edit)
|
|
VALUES
|
|
({CleanObject(parameters.get('persons'), "n")}, {CleanObject(parameters.get('gender'), "n")}, {CleanObject(parameters.get('race'), "n")},
|
|
{CleanObject(parameters.get('born'), "n")}, {CleanObject(parameters.get('pets'), "n")}, '{CleanObject(parameters.get('time_zone_s'))}',
|
|
{CleanObject(parameters.get('user_id'), "n")}, {current_epoch_time})
|
|
RETURNING deployment_id;
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
if editing_deployment_id == "0":
|
|
new_deployment_id = cur.fetchone()[0]
|
|
# Commit the changes to the database
|
|
conn.commit()
|
|
|
|
if editing_deployment_id != "0":
|
|
sql = f"""
|
|
UPDATE public.deployment_details
|
|
SET
|
|
beneficiary_id = {CleanObject(parameters.get('beneficiary_id'), "n")},
|
|
caretaker_id = {CleanObject(parameters.get('caretaker_id'), "n")},
|
|
owner_id = {CleanObject(parameters.get('owner_id'), "n")},
|
|
installer_id = {CleanObject(parameters.get('installer_id'), "n")},
|
|
address_street = '{CleanObject(parameters.get('address_street'))}',
|
|
address_city = '{CleanObject(parameters.get('address_city'))}',
|
|
address_zip = '{CleanObject(parameters.get('address_zip'))}',
|
|
address_state = '{CleanObject(parameters.get('address_state'))}',
|
|
address_country = '{CleanObject(parameters.get('address_country'))}',
|
|
|
|
wifis = '{CleanObject(parameters.get('wifis'))}',
|
|
devices = '{CleanObject(parameters.get('devices'))}',
|
|
lat = {CleanObject(parameters.get('lat'), "n")},
|
|
lng = {CleanObject(parameters.get('lng'), "n")},
|
|
gps_age = {CleanObject(parameters.get('gps_age'), "n")}
|
|
|
|
WHERE deployment_id = {editing_deployment_id};
|
|
"""
|
|
|
|
else:
|
|
sql = f"""
|
|
INSERT INTO public.deployment_details
|
|
(deployment_id, beneficiary_id, caretaker_id, owner_id, installer_id, address_street, address_city, address_zip, address_state, address_country)
|
|
VALUES
|
|
({new_deployment_id}, {CleanObject(parameters.get('beneficiary_id'), "n")}, {CleanObject(parameters.get('caretaker_id'), "n")}, {CleanObject(parameters.get('owner_id'), "n")}, {CleanObject(parameters.get('installer_id'), "n")},
|
|
'{CleanObject(parameters.get('address_street'))}', '{CleanObject(parameters.get('address_city'))}', '{CleanObject(parameters.get('address_zip'))}', '{CleanObject(parameters.get('address_state'))}',
|
|
'{CleanObject(parameters.get('address_country'))}');
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
|
|
# Commit the changes to the database
|
|
conn.commit()
|
|
|
|
|
|
|
|
|
|
# Close the cursor and connection
|
|
cur.close()
|
|
conn.close()
|
|
|
|
AddToLog("Written/updated!")
|
|
return 1
|
|
except Exception as err:
|
|
AddToLog(traceback.format_exc())
|
|
return 0
|
|
return ok
|
|
|
|
def StoreDevice2DB(parameters, editing_device_id):
|
|
|
|
# Create a device object. This object has nested properties and various types including numbers, DateTimes and strings.
|
|
# This can be saved as JSON as is without converting into rows/columns.
|
|
conn = get_db_connection()
|
|
cur = conn.cursor()
|
|
|
|
if editing_device_id == None or editing_device_id == "":
|
|
editing_device_id = "0"
|
|
|
|
try:
|
|
|
|
current_utc_time = datetime.datetime.now(timezone.utc)
|
|
|
|
# Convert to epoch time
|
|
current_epoch_time = current_utc_time.timestamp()
|
|
|
|
if editing_device_id != "0":
|
|
sql = f"""
|
|
UPDATE public.devices
|
|
SET
|
|
device_mac = '{CleanObject(parameters.get('device_mac'))}',
|
|
well_id = '{CleanObject(parameters.get('well_id'))}',
|
|
description = '{CleanObject(parameters.get('description'))}',
|
|
location = '{CleanObject(parameters.get('location'))}',
|
|
close_to = '{CleanObject(parameters.get('close_to'))}',
|
|
radar_threshold = '{CleanObject(parameters.get('radar_threshold'))}',
|
|
temperature_calib = '{CleanObject(parameters.get('temperature_calib'))}',
|
|
humidity_calib = '{CleanObject(parameters.get('humidity_calib'))}'
|
|
WHERE device_id = {editing_device_id};
|
|
"""
|
|
|
|
else:
|
|
sql = f"""
|
|
INSERT INTO public.devices
|
|
(device_mac, well_id, description, location, close_to, radar_threshold, temperature_calib, humidity_calib)
|
|
VALUES
|
|
('{CleanObject(parameters.get('device_mac'))}', '{CleanObject(parameters.get('well_id'))}', '{CleanObject(parameters.get('description'))}',
|
|
'{CleanObject(parameters.get('location'))}', '{CleanObject(parameters.get('close_to'))}', '{CleanObject(parameters.get('radar_threshold'))}',
|
|
'{CleanObject(parameters.get('temperature_calib'))}', '{CleanObject(parameters.get('humidity_calib'))}');
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
|
|
# Commit the changes to the database
|
|
conn.commit()
|
|
|
|
# Close the cursor and connection
|
|
cur.close()
|
|
conn.close()
|
|
|
|
AddToLog("Written/updated!")
|
|
return 1
|
|
except Exception as err:
|
|
AddToLog(traceback.format_exc())
|
|
return 0
|
|
return ok
|
|
|
|
|
|
def ShowAge(seconds):
|
|
minutes = int(seconds // 60)
|
|
hours = int(minutes // 60)
|
|
days = int(hours // 24)
|
|
|
|
if seconds >= 0:
|
|
hours = int(hours - (days * 24))
|
|
minutes = int(minutes - (days * 24 * 60) - (hours * 60))
|
|
seconds = int(seconds - (days * 24 * 60 * 60) - (hours * 60 * 60) - (minutes * 60))
|
|
|
|
if days > 0:
|
|
report = f"{int(days)} d {int(hours)} h {int(minutes)} m {int(seconds)} s"
|
|
elif hours > 0:
|
|
report = f"{int(hours)} h {int(minutes)} m {int(seconds)} s"
|
|
elif minutes > 0:
|
|
report = f"{int(minutes)} m {int(seconds)} s"
|
|
else:
|
|
report = f"{int(seconds)} s"
|
|
|
|
return report
|
|
else:
|
|
return "0 s"
|
|
|
|
def UpdateDevicesTable(html_string, devices, users):
|
|
#ID, Well id, MAC, Last_Message, Location, Description, Deployment
|
|
table_rows_string = ""
|
|
for device in devices:
|
|
result = next(item for item in users if item[0] == device[6])
|
|
deployment_name = result[1]
|
|
if result[2] != None:
|
|
deployment_name = deployment_name + " " + result[2]
|
|
mac = device[2]
|
|
mac_row_string = f' <td style="text-align:center"><a href="#" onclick="OpenDevice(\'{mac}\'); return false;">{mac}</a></td>\n'
|
|
age = time.time() - device[3]
|
|
|
|
if (age < 300):
|
|
row_sting = f' <tr style="background-color: #90FFD7;">\n <th scope="row" style="text-align:center">{device[0]}</th>\n'
|
|
else:
|
|
row_sting = f' <tr>\n <th scope="row" style="text-align:center">{device[0]}</th>\n'
|
|
|
|
row_ending = f' <td style="text-align:center"><input type="checkbox" onchange="IsItSingle();"></td>\n </tr>\n'
|
|
for col_cnt in range(1, len(device)):
|
|
column_value = device[col_cnt]
|
|
#print(column_value)
|
|
if col_cnt == 2:
|
|
col_string_template = mac_row_string
|
|
elif col_cnt == 3:
|
|
if column_value > 0:
|
|
col_string_template = f' <td style="text-align:center">{ShowAge(age)}</td>\n'
|
|
else:
|
|
col_string_template = f' <td style="text-align:center">No data</td>\n'
|
|
elif col_cnt == 4:
|
|
col_string_template = f' <td style="text-align:center">{column_value}</td>\n'
|
|
elif col_cnt == 5:
|
|
if column_value == None:
|
|
col_string_template = f' <td style="text-align:center"></td>\n'
|
|
else:
|
|
col_string_template = f' <td style="text-align:center">{column_value}</td>\n'
|
|
elif col_cnt == 6:
|
|
col_string_template = f' <td style="text-align:center"><a href="#" onclick="OpenDeployment(\'{column_value}\')" title="{deployment_name}">{column_value}</a></td>\n';
|
|
else:
|
|
if column_value == None:
|
|
column_value = ""
|
|
col_string_template = f' <td style="text-align:center">{column_value}</td>\n'
|
|
row_sting = row_sting + col_string_template
|
|
row_sting = row_sting + row_ending
|
|
table_rows_string = table_rows_string + row_sting
|
|
#print(table_rows_string)
|
|
|
|
html_string = html_string.replace("###ROWS###",table_rows_string)
|
|
return html_string
|
|
|
|
def UpdateDeploymentsSelector(html_string, deployments, include_all=True, selected="1"):
|
|
# <option value="All" selected>All</option>
|
|
if include_all:
|
|
selector_string = f' <option value="0">All</option>\n'
|
|
else:
|
|
selector_string = ''
|
|
|
|
for deployment in deployments:
|
|
first_name = ""
|
|
last_name = ""
|
|
if deployment[1] != None:
|
|
first_name = deployment[1]
|
|
if deployment[2] != None:
|
|
last_name = deployment[2]
|
|
if deployment[0] == int(selected):
|
|
choice_string = f' <option value="{deployment[0]}" selected>{deployment[0]} {first_name} {last_name}</option>\n'
|
|
else:
|
|
choice_string = f' <option value="{deployment[0]}">{deployment[0]} {first_name} {last_name}</option>\n'
|
|
selector_string = selector_string + choice_string
|
|
#print(selector_string)
|
|
|
|
html_string = html_string.replace("###INSTALLS###",selector_string)
|
|
return html_string
|
|
|
|
def GetDeviceDetails(cur, deployment_ids, location_id):
|
|
|
|
#ID, Well id, MAC, Last_Message, Location, Description, Deployment
|
|
macs = [mac for _, mac in deployment_ids]
|
|
#macs = list(deployment_ids.keys())
|
|
macs_string_nq = ",".join(macs)
|
|
macs_string = "'" + "','".join(macs) + "'"
|
|
|
|
if location_id == -1:
|
|
sql = f"""
|
|
WITH ordered_macs AS (
|
|
SELECT unnest(string_to_array('{macs_string_nq}', ',')) as mac,
|
|
generate_series(1, array_length(string_to_array('{macs_string_nq}', ','), 1)) as position
|
|
)
|
|
SELECT d.*
|
|
FROM public.devices d
|
|
JOIN ordered_macs om ON d.device_mac = om.mac::text
|
|
WHERE device_mac IN ({macs_string})
|
|
ORDER BY om.position;
|
|
"""
|
|
else:
|
|
sql = f"""
|
|
WITH ordered_macs AS (
|
|
SELECT unnest(string_to_array('{macs_string_nq}', ',')) as mac,
|
|
generate_series(1, array_length(string_to_array('{macs_string_nq}', ','), 1)) as position
|
|
)
|
|
SELECT d.*
|
|
FROM public.devices d
|
|
JOIN ordered_macs om ON d.device_mac = om.mac::text
|
|
WHERE device_mac IN ({macs_string}) AND location = {location_id}
|
|
ORDER BY om.position;
|
|
"""
|
|
|
|
cur.execute(sql)
|
|
print(sql)
|
|
devices_ids_records = cur.fetchall()
|
|
all_details = []
|
|
|
|
|
|
devices_ids_list = [x[0] for x in devices_ids_records]
|
|
device_ids_string = ",".join(map(str, devices_ids_list))
|
|
#sql = f"SELECT device_id, MAX(time) as last_reading_time FROM sensor_readings WHERE device_id IN ({device_ids_string}) GROUP BY device_id" #to slow
|
|
sql = f"SELECT DISTINCT ON (device_id) device_id, time as last_reading_time FROM sensor_readings WHERE device_id IN ({device_ids_string}) AND time > now() - INTERVAL '1 day' ORDER BY device_id, time DESC"
|
|
cur.execute(sql)
|
|
print(sql)
|
|
devices_times = cur.fetchall()#cur.fetchone()
|
|
found_device_details = {}
|
|
for device_record in devices_times:
|
|
device_id, last_message_time = device_record
|
|
found_device_details[device_id] = last_message_time
|
|
cnt = 0
|
|
for device_table_record in devices_ids_records:
|
|
if len(devices_times) > 0:
|
|
|
|
if device_id in found_device_details:
|
|
last_message_time = found_device_details[device_id]
|
|
last_message_epoch = int(last_message_time.timestamp())
|
|
else:
|
|
last_message_time = int(device_table_record[14])
|
|
last_message_epoch = last_message_time
|
|
else:
|
|
last_message_time = 0
|
|
last_message_epoch = 0
|
|
|
|
#print(last_message_epoch)
|
|
#print(type(last_message_epoch))
|
|
device_id = device_table_record[0]
|
|
mac = device_table_record[1]
|
|
well_id = device_table_record[2]
|
|
description = device_table_record[3]
|
|
if description == None:
|
|
description = ""
|
|
if device_table_record[5] != None:
|
|
if device_table_record[5] != "":
|
|
description = description + " Close to " + device_table_record[5]
|
|
location_id = device_table_record[4]
|
|
row_data = [device_id, well_id, mac, last_message_epoch, location_names[location_id], description, deployment_ids[cnt][0]]
|
|
cnt += 1
|
|
all_details.append(row_data)
|
|
|
|
return all_details
|
|
|
|
def GetVisibleDevices(deployments):
|
|
|
|
devices_details = []
|
|
stt = time.time()
|
|
with get_db_connection() as conn:
|
|
#list all devices that user has access to
|
|
if deployments == "-1":
|
|
sql = "SELECT deployment_id, devices FROM public.deployment_details"
|
|
else:
|
|
sql = f"SELECT deployment_id, devices FROM public.deployment_details WHERE deployment_id IN ({deployments})"
|
|
|
|
with conn.cursor() as cur:
|
|
print(sql)
|
|
cur.execute(sql)
|
|
devices_groups = cur.fetchall()#cur.fetchone()
|
|
deployment_ids = []
|
|
for deployment_id, dev_group in devices_groups:
|
|
if dev_group != None:
|
|
if len(dev_group) > 10:
|
|
if "[" not in dev_group:
|
|
if "," not in dev_group:
|
|
dev_group = '["' + dev_group + '"]'
|
|
else:
|
|
dev_group = dev_group.replace(" ", "")
|
|
dev_group = dev_group.replace(",", '","')
|
|
dev_group = '["' + dev_group + '"]'
|
|
|
|
macs_group = literal_eval(dev_group)
|
|
|
|
for mac in macs_group:
|
|
deployment_ids.append((deployment_id, mac))
|
|
else:
|
|
print(f"Deployment {deployment_id} has dev_group empty")
|
|
devices_details = GetDeviceDetails(cur, deployment_ids, -1)
|
|
#devices_details.append(devices_detail)
|
|
|
|
return devices_details
|
|
|
|
def GetVisibleDevicesPerLocation(deployments, location):
|
|
|
|
devices_details = []
|
|
|
|
with get_db_connection() as conn:
|
|
#list all devices that user has access to
|
|
if deployments == "-1" or deployments == "0":
|
|
sql = "SELECT deployment_id, devices FROM public.deployment_details"
|
|
else:
|
|
sql = f"SELECT deployment_id, devices FROM public.deployment_details WHERE deployment_id IN ({deployments})"
|
|
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
devices_groups = cur.fetchall()#cur.fetchone()
|
|
deployment_ids = []
|
|
for deployment_id, dev_group in devices_groups:
|
|
if dev_group != None:
|
|
if len(dev_group) > 10:
|
|
if dev_group[0] == "[":
|
|
macs_group = literal_eval(dev_group)
|
|
else:
|
|
macs_group = dev_group.split(',')
|
|
for mac in macs_group:
|
|
deployment_ids.append((deployment_id, mac))
|
|
|
|
devices_details = GetDeviceDetails(cur, deployment_ids, location_indexes[location])
|
|
#devices_details.append(devices_detail)
|
|
|
|
return devices_details
|
|
|
|
def GetUsersFromDeployments(deployments):
|
|
#list all devices that user has access to
|
|
deployments_dets = []
|
|
with get_db_connection() as conn:
|
|
try:
|
|
if deployments == "-1":
|
|
sql = f"""
|
|
SELECT dd.deployment_id, pd.first_name, pd.last_name
|
|
FROM deployment_details dd
|
|
JOIN person_details pd ON dd.beneficiary_id = pd.user_id
|
|
ORDER BY dd.deployment_id;
|
|
"""
|
|
else:
|
|
sql = f"""
|
|
SELECT dd.deployment_id, pd.first_name, pd.last_name
|
|
FROM deployment_details dd
|
|
JOIN person_details pd ON dd.beneficiary_id = pd.user_id
|
|
WHERE dd.deployment_id IN ({deployments})
|
|
ORDER BY dd.deployment_id;
|
|
"""
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
deployments_dets = cur.fetchall()#cur.fetchone()
|
|
except Exception as err:
|
|
logger.error("GetUsersFromDeployments "+str(err) +" "+sql)
|
|
|
|
return deployments_dets
|
|
|
|
def GetPreviousDate(current_date):
|
|
date_obj = datetime.datetime.strptime(current_date, "%Y-%m-%d")
|
|
# Subtract one day
|
|
previous_date = date_obj - timedelta(days=1)
|
|
# Convert back to string format
|
|
previous_date_str = previous_date.strftime("%Y-%m-%d")
|
|
return(previous_date_str)
|
|
|
|
def CovertToIsoTime(date_s, n_minute):
|
|
|
|
hours = n_minute // 60 # Integer division
|
|
minutes = n_minute % 60
|
|
|
|
base_date = datetime.datetime.strptime(date_s, "%Y-%m-%d")
|
|
final_datetime = base_date + timedelta(hours=hours, minutes=minutes)
|
|
iso_timestamp = final_datetime.isoformat()
|
|
return iso_timestamp
|
|
|
|
def GetSensorsDetailsFromDeployment(deployment_id, ddate, filter_minutes):
|
|
#list all devices that user has access to
|
|
deployments_dets = []
|
|
with get_db_connection() as conn:
|
|
try:
|
|
sql = f"""
|
|
SELECT pd.user_id, pd.first_name, pd.last_name, pd.address_street, pd.picture
|
|
FROM deployment_details dd
|
|
JOIN person_details pd ON dd.beneficiary_id = pd.user_id
|
|
WHERE dd.deployment_id ={deployment_id};
|
|
"""
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
deployments_dets = cur.fetchone()
|
|
except Exception as err:
|
|
logger.error("GetSensorsDetailsFromDeployment "+str(err) +" "+sql)
|
|
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
|
|
|
#Which sensor is in: Bathroom, Kitchen, Bedroom ?
|
|
bathrooms = []
|
|
kitchens = []
|
|
bedrooms = []
|
|
dev_id_to_location = {0: "Outside/?"}
|
|
for device in devices_list:
|
|
dev_id_to_location[device[1]] = device[2]
|
|
if Consolidataed_locations[device[2]] == "Bathroom":
|
|
bathrooms.append(device[1])
|
|
elif Consolidataed_locations[device[2]] == "Kitchen":
|
|
kitchens.append(device[1])
|
|
elif Consolidataed_locations[device[2]] == "Bedroom":
|
|
bedrooms.append(device[1])
|
|
|
|
#we need to determine where user is seen last, and user sensor data from there...
|
|
locations_file = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_daily_locations.png"
|
|
logger.debug(f"locations_file1 ={locations_file}")
|
|
locations_list_s = ReadObjectMinIO("daily-maps", locations_file+".bin")
|
|
|
|
force_recreate = False
|
|
file_exists1, file_modified_utc1 = check_file_exists(locations_file+".bin")
|
|
if file_exists1:
|
|
file_modified_local = file_modified_utc1.astimezone(pytz.timezone(time_zone_s))
|
|
file_modified_date_local = file_modified_local.date() #local date
|
|
file_modified_date_utc = file_modified_utc1.date()
|
|
file_date_utc = MapFileToDate(locations_file) #locations_file is UTC
|
|
#if file_modified_date_local < file_date_utc:
|
|
if file_modified_utc1.date() < file_date_utc:
|
|
force_recreate = True
|
|
else: #same date
|
|
current_time = datetime.datetime.now(pytz.timezone(time_zone_s))
|
|
time_passed = current_time - file_modified_local
|
|
if time_passed.seconds > 30: #recreate if older than 5 minutes
|
|
force_recreate = True
|
|
else:
|
|
force_recreate = True
|
|
|
|
|
|
logger.debug(f"force_recreate={str(force_recreate)}")
|
|
|
|
if force_recreate:
|
|
CreateLocationsStripe(locations_file, time_zone_s)
|
|
locations_list_s = ReadObjectMinIO("daily-maps", locations_file+".bin")
|
|
|
|
|
|
last_present_device = 0
|
|
last_present = 0
|
|
last_bathroom = 0
|
|
last_kitchen = 0
|
|
last_bedroom = 0
|
|
last_bathroom_date = ddate
|
|
last_kitchen_date = ddate
|
|
last_bedroom_date = ddate
|
|
|
|
if locations_list_s is not None:
|
|
locations_list = json.loads(locations_list_s)
|
|
|
|
if len(locations_list) > 1:
|
|
if locations_list[-1][0] > 0:
|
|
last_present_device = locations_list[-1][0]
|
|
last_present = locations_list[-1][1] + locations_list[-1][2]
|
|
else:
|
|
last_present_device = locations_list[-2][0]
|
|
last_present = locations_list[-2][1] + locations_list[-2][2]
|
|
elif len(locations_list) == 1:
|
|
last_present_device = locations_list[0][0]
|
|
|
|
|
|
#Lets find last bathroom presence time
|
|
|
|
if len(locations_list) > 0 and len(bathrooms) > 0:
|
|
for loc_time in reversed(locations_list):
|
|
for device_id_temp in bathrooms:
|
|
if device_id_temp == loc_time[0]:
|
|
if (loc_time[1] + loc_time[2]) > last_bathroom:
|
|
last_bathroom = loc_time[1] + loc_time[2]
|
|
last_bathroom_date = ddate
|
|
|
|
|
|
#Lets find last kitchen presence time
|
|
|
|
if len(locations_list) > 0 and len(kitchens) > 0:
|
|
for loc_time in reversed(locations_list):
|
|
for device_id_temp in kitchens:
|
|
if device_id_temp == loc_time[0]:
|
|
if (loc_time[1] + loc_time[2]) > last_kitchen:
|
|
last_kitchen = loc_time[1] + loc_time[2]
|
|
last_kitchen_date = ddate
|
|
|
|
|
|
#Lets find last bedroom presence time
|
|
|
|
if len(locations_list) > 0 and len(bedrooms) > 0:
|
|
for loc_time in reversed(locations_list):
|
|
for device_id_temp in bedrooms:
|
|
if device_id_temp == loc_time[0]:
|
|
if (loc_time[1] + loc_time[2]) > last_bedroom:
|
|
last_bedroom = loc_time[1] + loc_time[2]
|
|
last_bedroom_date = ddate
|
|
|
|
|
|
if last_bathroom == 0 or last_kitchen == 0 or last_bedroom == 0:
|
|
ddate = GetPreviousDate(ddate)
|
|
|
|
locations_file = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_daily_locations.png"
|
|
logger.debug(f"locations_file2 ={locations_file}")
|
|
|
|
force_recreate = False
|
|
file_exists1, file_modified_utc1 = check_file_exists(locations_file+".bin")
|
|
logger.debug(f"file_exists1={str(file_exists1)}")
|
|
logger.debug(f"file_modified_utc1={str(file_modified_utc1)}")
|
|
#file_exists1, file_modified_utc1
|
|
if file_exists1:
|
|
file_modified_local = file_modified_utc1.astimezone(pytz.timezone(time_zone_s))
|
|
file_modified_date_local = file_modified_local.date()
|
|
file_date_utc = MapFileToDate(locations_file)
|
|
if file_modified_utc1.date() < file_date_utc:
|
|
force_recreate = True
|
|
else: #same date
|
|
current_time = datetime.datetime.now(pytz.timezone(time_zone_s))
|
|
time_passed = current_time - file_modified_local
|
|
if time_passed.seconds > 30: #recreate if older than 5 minutes
|
|
force_recreate = True
|
|
else:
|
|
force_recreate = True
|
|
|
|
|
|
if force_recreate:
|
|
CreateLocationsStripe(locations_file, time_zone_s)
|
|
locations_list_s = ReadObjectMinIO("daily-maps", locations_file+".bin")
|
|
|
|
|
|
|
|
|
|
if (locations_list_s is not None):
|
|
|
|
locations_list = json.loads(locations_list_s)
|
|
|
|
if last_present_device == 0:
|
|
if len(locations_list) > 1:
|
|
if locations_list[-1][0] > 0:
|
|
last_present_device = locations_list[-1][0]
|
|
else:
|
|
last_present_device = locations_list[-2][0]
|
|
elif len(locations_list) == 1:
|
|
last_present_device = locations_list[0][0]
|
|
|
|
if last_bathroom == 0:
|
|
if len(locations_list) > 0 and len(bathrooms) > 0:
|
|
for loc_time in reversed(locations_list):
|
|
for device_id_temp in bathrooms:
|
|
if device_id_temp == loc_time[0]:
|
|
if (loc_time[1] + loc_time[2]) > last_bathroom:
|
|
last_bathroom = loc_time[1] + loc_time[2]
|
|
last_bathroom_date = ddate
|
|
|
|
if last_kitchen == 0:
|
|
if len(locations_list) > 0 and len(kitchens) > 0:
|
|
for loc_time in reversed(locations_list):
|
|
for device_id_temp in kitchens:
|
|
if device_id_temp == loc_time[0]:
|
|
if (loc_time[1] + loc_time[2]) > last_kitchen:
|
|
last_kitchen = loc_time[1] + loc_time[2]
|
|
last_kitchen_date = ddate
|
|
|
|
if last_bedroom == 0:
|
|
if len(locations_list) > 0 and len(bedrooms) > 0:
|
|
for loc_time in reversed(locations_list):
|
|
for device_id_temp in bedrooms:
|
|
if device_id_temp == loc_time[0]:
|
|
if (loc_time[1] + loc_time[2]) > last_bedroom:
|
|
last_bedroom = loc_time[1] + loc_time[2]
|
|
last_bedroom_date = ddate
|
|
|
|
|
|
|
|
last_bathroom_time = "2023-01-01T00:00:00"
|
|
if last_bathroom > 0:
|
|
last_bathroom_time = CovertToIsoTime(last_bathroom_date, last_bathroom)
|
|
|
|
last_kitchen_time = "2023-01-01T00:00:00"
|
|
if last_kitchen > 0:
|
|
last_kitchen_time = CovertToIsoTime(last_kitchen_date, last_kitchen)
|
|
|
|
last_bedroom_time = "2023-01-01T00:00:00"
|
|
if last_bedroom > 0:
|
|
last_bedroom_time = CovertToIsoTime(last_bedroom_date, last_bedroom)
|
|
|
|
last_present_time = "2023-01-01T00:00:00"
|
|
if last_present > 0:
|
|
last_present_time = CovertToIsoTime(ddate, last_present)
|
|
|
|
# debug for 48h bug
|
|
if last_bathroom_time == "2023-01-01T00:00:00" or last_kitchen_time == "2023-01-01T00:00:00" or last_bedroom_time == "2023-01-01T00:00:00":
|
|
#last_bathroom_time = "48h" if last_bathroom_time == "2023-01-01T00:00:00" else f"{last_bathroom-last_bathroom_time}"
|
|
#last_kitchen_time = "48h" if last_kitchen_time == "2023-01-01T00:00:00" else f"{last_kitchen-last_kitchen_time}"
|
|
#last_bedroom_time = "48h" if last_bedroom_time == "2023-01-01T00:00:00" else f"{last_bedroom-last_bedroom_time}"
|
|
|
|
logger.debug(f"48h-> deployment_id={str(deployment_id)}, ddate={str(ddate)}")
|
|
logger.debug(f"48h-> force_recreate={force_recreate}")
|
|
logger.debug(f"48h-> last_bathroom_time={last_bathroom_time}|last_kitchen_time={last_kitchen_time}|last_bedroom_time={last_bedroom_time}")
|
|
logger.debug(f"48h-> devices_list={str(devices_list)}")
|
|
logger.debug(f"48h-> bathrooms={str(bathrooms)}")
|
|
logger.debug(f"48h-> kitchens={str(kitchens)}")
|
|
logger.debug(f"48h-> bedrooms={str(bedrooms)}")
|
|
logger.debug(f"48h-> locations_list_s={str(locations_list_s)}")
|
|
|
|
try:
|
|
sql = f"""
|
|
SELECT * FROM sensor_readings
|
|
WHERE device_id = {last_present_device}
|
|
ORDER BY time DESC
|
|
LIMIT 1;
|
|
"""
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
sensor_dets = cur.fetchone()
|
|
except Exception as err:
|
|
logger.error("GetSensorsDetailsFromDeployment1 "+str(err) +" "+sql)
|
|
|
|
# wellness_score_percent
|
|
wellness_score_percent = 90
|
|
|
|
# bedroom_temperature
|
|
bedroom_temperature = 0
|
|
|
|
# sleep_bathroom_visit_count
|
|
sleep_bathroom_visit_count = 0
|
|
|
|
# bedroom_co2
|
|
bedroom_co2 = 400
|
|
|
|
# shower_detected_time
|
|
shower_detected_time = last_bathroom_time
|
|
|
|
# breakfast_detected_time
|
|
breakfast_detected_time = 0
|
|
|
|
# living_room_detected_time
|
|
living_room_time_spent = 0
|
|
|
|
# outside_hours
|
|
outside_hours = 0
|
|
|
|
#lets find last time seen at Bathroom, Kitchen, Bedroom pd.first_name, pd.last_name, pd.address_street, pd.picture
|
|
|
|
picture_url = deployments_dets[4]
|
|
report = {"user_id":deployments_dets[0],
|
|
"name":deployments_dets[1] + " " + deployments_dets[2],
|
|
"address":deployments_dets[3],
|
|
"time_zone":time_zone_s,
|
|
"picture":picture_url,
|
|
"bathroom_at": last_bathroom_time,
|
|
"kitchen_at": last_kitchen_time,
|
|
"bedroom_at": last_bedroom_time,
|
|
"temperature": (sensor_dets[2] - 16) if sensor_dets != None else 0,
|
|
"smell": "clean",
|
|
"bathroom_delayed": [6, 12],
|
|
"kitchen_delayed": [6, 12],
|
|
"bedroom_delayed": [13, 16],
|
|
"last_location": dev_id_to_location[last_present_device],
|
|
"last_detected_time": last_present_time,
|
|
"wellness_score_percent": wellness_score_percent,
|
|
"wellness_descriptor_color": "bg-green-100 text-green-700",
|
|
"bedroom_temperature": bedroom_temperature,
|
|
"sleep_bathroom_visit_count": sleep_bathroom_visit_count,
|
|
"bedroom_co2": bedroom_co2,
|
|
"shower_detected_time": shower_detected_time,
|
|
"breakfast_detected_time": breakfast_detected_time,
|
|
"living_room_time_spent": living_room_time_spent,
|
|
"outside_hours": outside_hours,
|
|
"wellness_descriptor": "Great!",
|
|
"last_seen_alert": "Alert = None",
|
|
"last_seen_alert_colors": "bg-green-100 text-green-700", #https://tailwindcss.com/docs/colors
|
|
"most_time_spent_in": "Bedroom",
|
|
"sleep_hours": "7.9"
|
|
}
|
|
|
|
return report
|
|
|
|
def ToList(input_data):
|
|
# If input is already a list
|
|
if isinstance(input_data, list):
|
|
return [str(x).strip() for x in input_data]
|
|
|
|
# If input is string
|
|
if isinstance(input_data, str):
|
|
# Remove outer brackets if present
|
|
cleaned = input_data.strip('()')
|
|
cleaned = cleaned.strip('[]')
|
|
# Remove extra quotes
|
|
cleaned = cleaned.replace('"', '').replace("'", '')
|
|
# Split by comma and clean each element
|
|
return [x.strip() for x in cleaned.split(',')]
|
|
|
|
raise ValueError(f"Unsupported input type: {type(input_data)}")
|
|
|
|
def MACsToWellIds(cur, macs_list):
|
|
|
|
device_ids = []
|
|
device_list = []
|
|
|
|
macs_string = ",".join(f"'{mac}'" for mac in macs_list)
|
|
if macs_string != "'None'":
|
|
sqlr = f"SELECT well_id, device_mac, device_id, location, description, radar_threshold, close_to FROM public.devices WHERE device_mac IN ({macs_string})"
|
|
|
|
#print (sqlr)
|
|
macs_map = {}
|
|
cur.execute(sqlr)
|
|
proximitys_list = cur.fetchall()
|
|
for well_id, mac, device_id, location, description, radar_threshold, close_to in proximitys_list:
|
|
macs_map[mac] = (well_id, device_id, location_names[location], description, mac, radar_threshold, close_to)
|
|
|
|
|
|
for mac in macs_list:
|
|
device_ids.append(macs_map[mac][1])
|
|
device_list.append(macs_map[mac])
|
|
|
|
return device_ids, device_list
|
|
|
|
def MACsStrToDevIds(cur, macs):
|
|
|
|
device_ids = []
|
|
#we need to repcakage string to contain '
|
|
macs_list = ToList(macs)
|
|
macs_string = ",".join(f"'{mac}'" for mac in macs_list)
|
|
if macs_string != "'None'":
|
|
|
|
sqlr = f"SELECT device_mac, device_id FROM public.devices WHERE device_mac IN ({macs_string})"
|
|
print (sqlr)
|
|
macs_map = {}
|
|
cur.execute(sqlr)
|
|
proximitys_list = cur.fetchall()
|
|
for mac, device_id in proximitys_list:
|
|
device_ids.append((mac, device_id))
|
|
|
|
return device_ids
|
|
|
|
|
|
|
|
def ReadCleanStringDB(cur, sql):
|
|
cur.execute(sql)
|
|
temp_string = cur.fetchone()
|
|
if temp_string == None:
|
|
return ""
|
|
else:
|
|
return str(temp_string[0]).strip()
|
|
|
|
# obtain device_list, device_ids for deployment_id on time as epoch_from_file_s (usually today)
|
|
# it tries first overridden/newly-installed (FROM public.deployment_history)
|
|
# then if none found there searches FROM public.deployment_details
|
|
def GetProximityList(deployment_id, epoch_from_file_s):
|
|
|
|
#both are valid:
|
|
#64B70888FA84,64B70888F6F0,64B70888F860,64B70889062C,64B70888FAB0,64B708896BDC,64B708897428
|
|
#['64B70888FA84', '64B70888F6F0', '64B70888F860', '64B70889062C', '64B70888FAB0', '64B708896BDC', '64B708897428']
|
|
|
|
#result_list = []
|
|
#well_ids = []
|
|
with get_db_connection() as conn:
|
|
|
|
sqlr = f"""
|
|
SELECT * FROM (
|
|
SELECT proximity
|
|
FROM public.deployment_history
|
|
WHERE deployment_id = {deployment_id}
|
|
AND time <= {epoch_from_file_s}
|
|
ORDER BY time DESC
|
|
LIMIT 1
|
|
) AS latest_deployment
|
|
"""
|
|
#print (sqlr)
|
|
with conn.cursor() as cur:
|
|
devices_string = ReadCleanStringDB(cur, sqlr)
|
|
|
|
if devices_string == "":
|
|
sqlr = f"SELECT devices from public.deployment_details WHERE deployment_id ={deployment_id}"
|
|
#print (sqlr)
|
|
devices_string = ReadCleanStringDB(cur, sqlr)
|
|
|
|
|
|
if devices_string == "":
|
|
return []
|
|
|
|
macs_list = ToList(devices_string)
|
|
device_ids, device_list = MACsToWellIds(cur, macs_list)
|
|
return device_list, device_ids
|
|
|
|
def FilterList(to_filter: str, allowed: str) -> str:
|
|
# Convert comma-separated strings to sets
|
|
filter_set = set(to_filter.split(','))
|
|
allowed_set = set(allowed.split(','))
|
|
|
|
# Find intersection and sort the result
|
|
filtered = sorted(filter_set.intersection(allowed_set), key=int)
|
|
|
|
# Join back to comma-separated string
|
|
return ','.join(filtered)
|
|
|
|
def GetMatchingDevices(privileges, group, deployment, location):
|
|
|
|
global LocationsMap
|
|
|
|
results=[]
|
|
if privileges != "-1":
|
|
if deployment == "" or deployment == "0":
|
|
deployment = privileges
|
|
|
|
privileges_list = privileges.split(',')
|
|
if deployment != "0":
|
|
if "," in deployment:
|
|
deployment = FilterList(deployment, privileges)
|
|
else:
|
|
if deployment not in privileges_list:
|
|
return results
|
|
else:
|
|
if deployment == "0":
|
|
deployment = "-1"
|
|
|
|
devices = GetVisibleDevicesPerLocation(deployment, location)
|
|
return devices
|
|
|
|
def getOldestDeploymentHistoryFromBeneficiary(deployment_id):
|
|
#this will return oldest entry as well as last proximity (devices)
|
|
st = time.time()
|
|
print(f"*0 ----{time.time() - st}")
|
|
results=[]
|
|
well_ids_last = [] #this needs to be list of tuples (well_id, Location_st, Description)
|
|
oldest_time = None
|
|
try:
|
|
print(f"*0a ----{time.time() - st}")
|
|
with get_db_connection() as conn:
|
|
sqlr = f"""
|
|
SELECT * FROM (
|
|
SELECT time, proximity
|
|
FROM public.deployment_history
|
|
WHERE deployment_id = {deployment_id}
|
|
ORDER BY time ASC
|
|
) AS latest_deployment
|
|
"""
|
|
print (sqlr)
|
|
print(f"*1 ----{time.time() - st}")
|
|
with conn.cursor() as cur:
|
|
cur.execute(sqlr)
|
|
print(f"*2 ----{time.time() - st}")
|
|
results = cur.fetchall()
|
|
print(f"*3 ----{time.time() - st}")
|
|
#lets find which of historical sets has data in DB
|
|
if results == None or results == []: #look in deployment_details
|
|
sqlr = f"SELECT devices from public.deployment_details WHERE deployment_id ={deployment_id}"
|
|
#print (sqlr)
|
|
print(f"*4 ----{time.time() - st}")
|
|
devices_string = ReadCleanStringDB(cur, sqlr)
|
|
print(f"*5 ----{time.time() - st}")
|
|
macs_list = ToList(devices_string)
|
|
print(f"*6 ----{time.time() - st}")
|
|
device_ids_last, device_alls_last = MACsToWellIds(cur, macs_list)
|
|
sql_query = """
|
|
SELECT device_id, first_seen_at
|
|
FROM device_first_seen
|
|
WHERE device_id = ANY(%s)
|
|
GROUP BY device_id;
|
|
"""
|
|
print(f"*7 ----{time.time() - st}")
|
|
try:
|
|
cur.execute(sql_query, (device_ids_last,))
|
|
results1 = cur.fetchall()
|
|
if results1 == []:
|
|
pass
|
|
else:
|
|
oldest_time = results1[0][1]
|
|
except Exception as e:
|
|
AddToLog(traceback.format_exc())
|
|
AddToLog(str(e))
|
|
print(f"*8 ----{time.time() - st}")
|
|
else:
|
|
history_entry = results[-1]
|
|
macs_list = ToList(history_entry[1])
|
|
print(f"*9 ----{time.time() - st}")
|
|
device_ids_last, device_alls_last = MACsToWellIds(cur, macs_list)
|
|
|
|
|
|
for history_entry in results:
|
|
macs_list = ToList(history_entry[1])
|
|
print(f"*10 ----{time.time() - st}")
|
|
device_ids, device_alls = MACsToWellIds(cur, macs_list)
|
|
print(f"*11 ----{time.time() - st}")
|
|
sql_query = """
|
|
SELECT time as oldest_record_time
|
|
FROM sensor_readings
|
|
WHERE device_id = ANY(%s)
|
|
ORDER BY time ASC
|
|
LIMIT 1;
|
|
"""
|
|
print(f"*12 ----{time.time() - st}")
|
|
try:
|
|
cur.execute(sql_query, (device_ids_last,))
|
|
results1 = cur.fetchall()
|
|
oldest_time = results1[0][0]
|
|
if oldest_time != None:
|
|
break
|
|
|
|
|
|
except Exception as e:
|
|
print(str(e))
|
|
print(f"*13 ----{time.time() - st}")
|
|
|
|
except Exception as e:
|
|
print(f"*0b ----{time.time() - st}")
|
|
AddToLog(traceback.format_exc())
|
|
|
|
print(f"*14 ----{time.time() - st}")
|
|
return oldest_time, device_alls_last
|
|
|
|
|
|
def getLastEditedBeneficiary(beneficiary):
|
|
|
|
#lets generate token here to elliminate issues with outdated token...
|
|
token = generate_token(beneficiary)
|
|
url = 'https://well-api.azurewebsites.net/api/well_api'
|
|
params = {
|
|
"name": "beneficiary_detail",
|
|
"beneficiary": beneficiary,
|
|
"token": token
|
|
}
|
|
#{"id": "user_beneficiary_bernhard@wellnuo.com", "MAC": "BENEFICIARY", "email": "bernhard@wellnuo.com", "edit_date": "Fri Aug 16 06:45:01 2024", "c_password": "bern1", "first_name": "Bernhard", "last_name": "Knigge", "address": "776 Dubanski Dr.", "address_city": "San Jose", "address_state": "CA", "address_zip": "95123", "address_country": "United States", "phone_number": "4087055709", "persons": "2", "gender": "M", "race": "W", "born": "1972", "pets": "1", "creds": "", "devs": "[[203, 'Living Room', '', '64B708890B14'], [251, 'Bathroom', '', '64B7088909E8'], [252, 'Bedroom', '', '64B708890734'], [204, 'Bathroom', 'Guest', '64B708890288'], [201, 'Kitchen', 'toaster', '64B708890584'], [202, 'Kitchen', 'stove', '64B7088906D8'], [205, 'Office', '', '64B708897018']]", "tzone": "America/Los_Angeles", "ttl": -1, "_rid": "R60hANIG-K+qTQIAAAAAAg==", "_self": "dbs/R60hAA==/colls/R60hANIG-K8=/docs/R60hANIG-K+qTQIAAAAAAg==/", "_etag": "\"3500a0ae-0000-0800-0000-66bef56d0000\"", "_attachments": "attachments/", "_ts": 1723790701}
|
|
response = requests.get(url, params=params)
|
|
if response.status_code == 200:
|
|
|
|
text = response.text
|
|
#print(text)
|
|
if text == "Log-Out":
|
|
return text
|
|
if text[0] == "{":
|
|
data = json.loads(response.text)
|
|
date_string = data["edit_date"]
|
|
parsed_date = datetime.datetime.strptime(date_string, '%c')
|
|
# Convert the datetime object to a timestamp (epoch time)
|
|
epoch_str = str(time.mktime(parsed_date.timetuple()))
|
|
devices = data["devs"]
|
|
return(epoch_str, devices)
|
|
else:
|
|
return text,""
|
|
else:
|
|
logger.debug((f"Failed to retrieve the data, status code: {response.status_code}"))
|
|
|
|
return "",""
|
|
|
|
def GetDeploymentNameFromId(Id):
|
|
|
|
con = sqlite3.connect(main_db)
|
|
con.text_factory = str
|
|
cur = con.cursor()
|
|
results=[]
|
|
SQL = "SELECT name FROM deployments WHERE id =" + Id
|
|
df = cur.execute(SQL)
|
|
results = cur.fetchall()
|
|
if len(results) > 0:
|
|
return results[0][0]
|
|
else:
|
|
return ""
|
|
|
|
def GetTimeZoneOfDeployment(deployment_id):
|
|
time_zone_st = 'America/Los_Angeles'
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
sqlr = f"SELECT time_zone_s from public.deployments WHERE deployment_id ={deployment_id}"
|
|
time_zone_st = ReadCleanStringDB(cur, sqlr)
|
|
return time_zone_st
|
|
|
|
def StringToEpoch(date_string, time_zone_s):
|
|
"""
|
|
Convert a date string to epoch timestamp for start of day (midnight) in specified timezone
|
|
|
|
Args:
|
|
date_string (str): Date in 'YYYY-MM-DD' format
|
|
time_zone_s (str): Timezone string (e.g. 'America/Los_Angeles')
|
|
|
|
Returns:
|
|
float: Epoch timestamp in seconds
|
|
"""
|
|
# Parse the date string
|
|
date_format = '%Y-%m-%d'
|
|
naive_date = datetime.datetime.strptime(date_string, date_format)
|
|
|
|
# Get the timezone
|
|
timezone = pytz.timezone(time_zone_s)
|
|
|
|
# Localize the date to midnight in the specified timezone
|
|
local_date = timezone.localize(naive_date)
|
|
|
|
# Convert to epoch timestamp
|
|
epoch_time = local_date.timestamp()
|
|
|
|
return epoch_time
|
|
|
|
def LocalDateToUTCEpoch(local_date_str, time_zone_s):
|
|
"""
|
|
Convert a date string to epoch timestamp for start of day (midnight) in UTC
|
|
|
|
Args:
|
|
local_date_str (str): Date in 'YYYY-MM-DD' format
|
|
time_zone_s (str): Timezone string (e.g. 'America/Los_Angeles')
|
|
|
|
Returns:
|
|
float: Epoch UTC timestamp in seconds
|
|
"""
|
|
timezone = pytz.timezone(time_zone_s)
|
|
# Parse the date string
|
|
date_format = '%Y-%m-%d'
|
|
local_datetime = datetime.datetime.strptime(local_date_str, date_format)
|
|
local_datetime = timezone.localize(local_datetime)
|
|
|
|
utc_datetime = local_datetime.astimezone(pytz.UTC)
|
|
epoch_time = int(utc_datetime.timestamp())
|
|
|
|
return epoch_time
|
|
|
|
def GetDeploymentDatesBoth(deployment_in):
|
|
|
|
#when looking at the date, date is defined in TZ where device is!
|
|
#Lets take oldest data from first member of deployment
|
|
st = time.time()
|
|
date_list = []
|
|
print(f"&0 ----{time.time() - st}")
|
|
time_zone_st = GetTimeZoneOfDeployment(deployment_in)
|
|
print(f"&1 ----{time.time() - st}")
|
|
oldest_date_dt_utc, devices_all = getOldestDeploymentHistoryFromBeneficiary(deployment_in)
|
|
print(f"&2 ----{time.time() - st}")
|
|
if oldest_date_dt_utc != None:
|
|
#get date in local time zone from UTC datetime
|
|
|
|
#oldest_date_dt
|
|
# Get today's date
|
|
local_timezone = pytz.timezone(time_zone_st) # Replace with your local timezone
|
|
oldest_date_dt_local = oldest_date_dt_utc.astimezone(local_timezone)
|
|
today_date = datetime.datetime.now(local_timezone)
|
|
|
|
# Generate a list of date strings from oldest_date to today in inverted order
|
|
date_list = [(today_date - timedelta(days=x)).strftime('%Y-%m-%d') for x in range((today_date - oldest_date_dt_local).days + 1)]
|
|
print(f"&3 ----{time.time() - st}")
|
|
return date_list, devices_all
|
|
|
|
def check_file_exists(file_name, bucket_name="daily-maps"):
|
|
try:
|
|
# Try to get the object's stats - this will raise an exception if the object doesn't exist
|
|
stat_result = miniIO_blob_client.stat_object(bucket_name, file_name)
|
|
last_modified_utc = stat_result.last_modified
|
|
return True, last_modified_utc
|
|
except S3Error as e:
|
|
if e.code == 'NoSuchKey':
|
|
return False, 0
|
|
# Re-raise if it's a different error
|
|
raise
|
|
|
|
def get_text_dimensions(text, font, font_scale, thickness):
|
|
(width, height), baseline = cv2.getTextSize(text, font, font_scale, thickness)
|
|
return {
|
|
'width': width,
|
|
'height': height,
|
|
'baseline': baseline,
|
|
'total_height': height + baseline
|
|
}
|
|
|
|
def save_to_minio(image, filename, bucket_name="daily-maps", content_type="image/png"):
|
|
"""
|
|
Save a PIL Image directly to MinIO
|
|
|
|
Args:
|
|
image (PIL.Image): Image to save
|
|
filename (str): Filename to use in MinIO
|
|
bucket_name (str): MinIO bucket name
|
|
content_type (str): Content type of the file
|
|
|
|
Returns:
|
|
bool: True if successful, False otherwise
|
|
"""
|
|
logger = logging.getLogger(__name__)
|
|
try:
|
|
# Convert PIL image to bytes
|
|
img_byte_arr = io.BytesIO()
|
|
image.save(img_byte_arr, format='PNG')
|
|
img_byte_arr.seek(0) # Move to start of the BytesIO buffer
|
|
|
|
# Upload to MinIO
|
|
miniIO_blob_client.put_object(
|
|
DAILY_MAPS_BUCKET_NAME,
|
|
filename,
|
|
img_byte_arr,
|
|
length=len(img_byte_arr.getvalue()),
|
|
content_type=content_type
|
|
)
|
|
return True
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error saving to MinIO: {traceback.format_exc()}")
|
|
return False
|
|
|
|
|
|
def SaveImageInBlob(file_name, arr_stretched, labels = []):
|
|
|
|
#labels=[(caption,(x,y),font,scale,color,thickness,line_type)]
|
|
try:
|
|
|
|
image_with_text = arr_stretched.copy()
|
|
|
|
for label in labels:
|
|
cv2.putText(
|
|
image_with_text, # Image
|
|
label[0], # Text to write
|
|
label[1], # Position (x, y)
|
|
label[2], # Font type
|
|
label[3], # Font scale
|
|
label[4], # Color (BGR)
|
|
label[5], # Thickness
|
|
label[6] # Line type
|
|
)
|
|
|
|
# Encode the image to a memory buffer using imencode
|
|
success, encoded_image = cv2.imencode('.png', image_with_text)
|
|
AddToLog(f"success={success}")
|
|
if not success:
|
|
raise Exception("Could not encode image!")
|
|
|
|
#AddToLog(f"DAILY_MAPS_BUCKET_NAME={DAILY_MAPS_BUCKET_NAME}")
|
|
|
|
image_bytes = encoded_image.tobytes()
|
|
AddToLog(f"len(image_bytes)={len(image_bytes)}")
|
|
miniIO_blob_client.put_object(
|
|
DAILY_MAPS_BUCKET_NAME,
|
|
file_name,
|
|
io.BytesIO(image_bytes),
|
|
len(image_bytes))
|
|
return True
|
|
except Exception as e:
|
|
AddToLog(f"{traceback.format_exc()}")
|
|
logger.error(f"{traceback.format_exc()}")
|
|
return False
|
|
|
|
def SaveImageInBlobLabelsOut(file_name, arr_stretched, labels, title_labels):
|
|
|
|
#labels=[(caption,(x,y),font,scale,color,thickness,line_type)]
|
|
try:
|
|
|
|
image_with_text = arr_stretched.copy()
|
|
|
|
for label in labels:
|
|
cv2.putText(
|
|
image_with_text, # Image
|
|
label[0], # Text to write
|
|
label[1], # Position (x, y)
|
|
label[2], # Font type
|
|
label[3], # Font scale
|
|
label[4], # Color (BGR)
|
|
label[5], # Thickness
|
|
label[6] # Line type
|
|
)
|
|
|
|
for label in title_labels:
|
|
cv2.putText(
|
|
image_with_text, # Image
|
|
label[0], # Text to write
|
|
label[1], # Position (x, y)
|
|
label[2], # Font type
|
|
label[3], # Font scale
|
|
label[4], # Color (BGR)
|
|
label[5], # Thickness
|
|
label[6] # Line type
|
|
)
|
|
|
|
|
|
# Encode the image to a memory buffer using imencode
|
|
success, encoded_image = cv2.imencode('.png', image_with_text)
|
|
AddToLog(f"success={success}")
|
|
if not success:
|
|
raise Exception("Could not encode image!")
|
|
|
|
#AddToLog(f"DAILY_MAPS_BUCKET_NAME={DAILY_MAPS_BUCKET_NAME}")
|
|
|
|
image_bytes = encoded_image.tobytes()
|
|
AddToLog(f"len(image_bytes)={len(image_bytes)}")
|
|
miniIO_blob_client.put_object(
|
|
DAILY_MAPS_BUCKET_NAME,
|
|
file_name,
|
|
io.BytesIO(image_bytes),
|
|
len(image_bytes))
|
|
return True
|
|
except Exception as e:
|
|
AddToLog(f"{traceback.format_exc()}")
|
|
logger.error(f"{traceback.format_exc()}")
|
|
return False
|
|
|
|
def GetLocalTimeForDate(selected_date, time_zone_s, minutes_padding = 0):
|
|
# Parse the selected date
|
|
local_tz = pytz.timezone(time_zone_s)
|
|
|
|
# Convert selected_date string to datetime object (start of day in local time)
|
|
local_date = datetime.datetime.strptime(selected_date, "%Y-%m-%d")
|
|
local_start = local_tz.localize(local_date)
|
|
|
|
# Get the next day
|
|
local_next = local_start + timedelta(days=1)
|
|
|
|
if minutes_padding > 0:
|
|
local_start = local_start - timedelta(minutes=minutes_padding)
|
|
local_next = local_next + timedelta(minutes=minutes_padding)
|
|
|
|
# Convert to UTC
|
|
utc_start = local_start.astimezone(pytz.UTC)
|
|
utc_next = local_next.astimezone(pytz.UTC)
|
|
|
|
# Format as strings
|
|
time_from_str = utc_start.strftime("%Y-%m-%d %H:%M:%S")
|
|
time_to_str = utc_next.strftime("%Y-%m-%d %H:%M:%S")
|
|
|
|
return time_from_str + "+0000", time_to_str + "+0000"
|
|
|
|
def GetLocalTimeForDateSimple(selected_date, time_zone_s, minutes_padding = 0):
|
|
# Parse the selected date
|
|
local_tz = pytz.timezone(time_zone_s)
|
|
|
|
# Convert selected_date string to datetime object (start of day in local time)
|
|
local_date = datetime.datetime.strptime(selected_date, "%Y-%m-%d")
|
|
local_start = local_tz.localize(local_date)
|
|
|
|
# Get the next day
|
|
local_next = local_start + timedelta(days=1)
|
|
|
|
if minutes_padding > 0:
|
|
local_start = local_start - timedelta(minutes=minutes_padding)
|
|
local_next = local_next + timedelta(minutes=minutes_padding)
|
|
|
|
# Convert to UTC
|
|
utc_start = local_start.astimezone(pytz.UTC)
|
|
utc_next = local_next.astimezone(pytz.UTC)
|
|
|
|
return utc_start, utc_next
|
|
|
|
def GetLocalTimeEpochsForDate(selected_date, time_zone_s):
|
|
"""
|
|
Get start and end of day epochs for a given date in a specific timezone.
|
|
|
|
Args:
|
|
selected_date (str): Date in "YYYY-MM-DD" format
|
|
time_zone_s (str): Timezone string (e.g., "America/New_York")
|
|
|
|
Returns:
|
|
tuple: (start_epoch, end_epoch) - Unix timestamps for start and end of day
|
|
"""
|
|
# Parse the selected date
|
|
local_tz = pytz.timezone(time_zone_s)
|
|
|
|
# Convert selected_date string to datetime object (start of day in local time)
|
|
local_date = datetime.datetime.strptime(selected_date, "%Y-%m-%d")
|
|
local_start = local_tz.localize(local_date)
|
|
|
|
# Get the next day
|
|
local_next = local_start + timedelta(days=1)
|
|
|
|
# Convert to UTC
|
|
utc_start = local_start.astimezone(pytz.UTC)
|
|
utc_next = local_next.astimezone(pytz.UTC)
|
|
|
|
# Convert to epochs (Unix timestamps)
|
|
start_epoch = int(utc_start.timestamp())
|
|
end_epoch = int(utc_next.timestamp())
|
|
|
|
return start_epoch, end_epoch
|
|
|
|
def UTC2Local(utc_time, time_zone_s):
|
|
# Parse the selected date
|
|
local_tz = pytz.timezone(time_zone_s)
|
|
|
|
# Convert selected_date string to datetime object (start of day in local time)
|
|
#local_date = datetime.datetime.strptime(selected_date, "%Y-%m-%d")
|
|
local_start = local_tz.localize(selected_date)
|
|
|
|
# Convert to UTC
|
|
utc_start = local_start.astimezone(pytz.UTC)
|
|
utc_next = local_next.astimezone(pytz.UTC)
|
|
|
|
# Format as strings
|
|
time_from_str = utc_start.strftime("%Y-%m-%d %H:%M:%S")
|
|
time_to_str = utc_next.strftime("%Y-%m-%d %H:%M:%S")
|
|
|
|
return time_from_str + "+0000", time_to_str + "+0000"
|
|
|
|
def get_timezone_aware_datetime(time_str, timezone_str="America/Los_Angeles"):
|
|
"""
|
|
Convert a naive datetime string to a timezone-aware datetime object.
|
|
|
|
Parameters:
|
|
time_str: String in format 'YYYY-MM-DD HH:MM:SS'
|
|
timezone_str: String representing the timezone (default: "America/Los_Angeles")
|
|
|
|
Returns:
|
|
datetime: A timezone-aware datetime object
|
|
"""
|
|
# Parse the naive datetime
|
|
naive_dt = datetime.datetime.strptime(time_str, '%Y-%m-%d %H:%M:%S')
|
|
|
|
# Get the timezone
|
|
tz = pytz.timezone(timezone_str)
|
|
|
|
# Localize the datetime (make it timezone-aware)
|
|
# localize() is the correct way to do this, as it handles DST transitions properly
|
|
aware_dt = tz.localize(naive_dt)
|
|
return aware_dt
|
|
|
|
def fast_fill_array_from_timescale(day_data, time_from_str, devices_list, arr_source, timezone_str="Europe/Berlin"):
|
|
"""
|
|
Optimized version of array filling from TimeScaleDB data.
|
|
Uses vectorized operations for significant speed improvement.
|
|
"""
|
|
# Convert start time to timezone-aware datetime
|
|
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
|
|
#start_time = start_time.replace(tzinfo=timezone.utc)
|
|
|
|
# Create device index mapping
|
|
device_to_index = {device_id: idx for idx, device_id in enumerate(devices_list)}
|
|
|
|
# Define column mappings (sensor type to position in record)
|
|
columns = {
|
|
'avg_temperature': 2,
|
|
'avg_humidity': 3,
|
|
'pressure_amplitude': 4,
|
|
'max_light': 5,
|
|
'radar': 6
|
|
}
|
|
# Add sensor columns dynamically
|
|
cols_len = len(columns)
|
|
for i in range(10):
|
|
columns[f'sensor_min_s{i}'] = i + cols_len #smell * 10 + 5
|
|
|
|
# Pre-process data into a more efficient structure
|
|
# Group by device_id to reduce lookup operations
|
|
device_data = defaultdict(list)
|
|
for record in day_data:
|
|
if record[0] and record[1]: # If time and device_id exist
|
|
device_data[record[1]].append(record)
|
|
|
|
# Process each device's data in bulk
|
|
for device_id, records in device_data.items():
|
|
if device_id not in device_to_index:
|
|
continue
|
|
|
|
base_idx = device_to_index[device_id] * len(columns)
|
|
|
|
# Convert records to numpy array for faster processing
|
|
records_array = np.array(records, dtype=object)
|
|
|
|
# Calculate all minute deltas at once
|
|
times = records_array[:, 0]
|
|
#print(times[0], start_time, (times[0] - start_time).total_seconds())
|
|
minute_deltas = np.array([(t - start_time).total_seconds() / 60 for t in times], dtype=int)
|
|
|
|
# Filter valid minute deltas
|
|
valid_mask = (minute_deltas >= 0) & (minute_deltas < arr_source.shape[1])
|
|
if not np.any(valid_mask):
|
|
continue
|
|
|
|
minute_deltas = minute_deltas[valid_mask]
|
|
records_array = records_array[valid_mask]
|
|
|
|
# Process each column type in bulk
|
|
for col_name, col_offset in columns.items():
|
|
row_idx = base_idx + list(columns.keys()).index(col_name)
|
|
values = records_array[:, col_offset]
|
|
|
|
# Filter out None values
|
|
valid_values = ~np.equal(values, None)
|
|
if not np.any(valid_values):
|
|
continue
|
|
|
|
# Update array in bulk
|
|
arr_source[row_idx, minute_deltas[valid_values]] = values[valid_values]
|
|
|
|
return arr_source
|
|
|
|
def fast_fill_smell_array_from_timescale(day_data, time_from_str, device_to_index, arr_source, timezone_str="Europe/Berlin"):
|
|
"""
|
|
Optimized version of array filling from TimeScaleDB data.
|
|
Uses vectorized operations for significant speed improvement.
|
|
"""
|
|
# Convert start time to timezone-aware datetime
|
|
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
|
|
#start_time = start_time.replace(tzinfo=timezone.utc)
|
|
|
|
# Define column mappings (sensor type to position in record)
|
|
columns = {
|
|
's0': 2,
|
|
's1': 3,
|
|
's2': 4,
|
|
's3': 5,
|
|
's4': 6,
|
|
's5': 7,
|
|
's6': 8,
|
|
's7': 9,
|
|
's8': 10,
|
|
's9': 11
|
|
}
|
|
## Add sensor columns dynamically
|
|
#cols_len = len(columns)
|
|
#for i in range(10):
|
|
#columns[f'sensor_min_s{i}'] = i + cols_len #smell * 10 + 5
|
|
|
|
# Pre-process data into a more efficient structure
|
|
# Group by device_id to reduce lookup operations
|
|
device_data = defaultdict(list)
|
|
for record in day_data:
|
|
if record[0] and record[1]: # If time and device_id exist
|
|
device_data[record[1]].append(record)
|
|
|
|
# Process each device's data in bulk
|
|
for device_id, records in device_data.items():
|
|
if device_id not in device_to_index:
|
|
continue
|
|
|
|
base_idx = device_to_index[device_id] * len(columns)
|
|
|
|
# Convert records to numpy array for faster processing
|
|
records_array = np.array(records, dtype=object)
|
|
|
|
# Calculate all minute deltas at once
|
|
times = records_array[:, 0]
|
|
#print(times[0], start_time, (times[0] - start_time).total_seconds())
|
|
minute_deltas = np.array([(t - start_time).total_seconds() / 60 for t in times], dtype=int)
|
|
|
|
# Filter valid minute deltas
|
|
valid_mask = (minute_deltas >= 0) & (minute_deltas < arr_source.shape[1])
|
|
if not np.any(valid_mask):
|
|
continue
|
|
|
|
minute_deltas = minute_deltas[valid_mask]
|
|
records_array = records_array[valid_mask]
|
|
|
|
# Process each column type in bulk
|
|
for col_name, col_offset in columns.items():
|
|
row_idx = base_idx + list(columns.keys()).index(col_name)
|
|
values = records_array[:, col_offset]
|
|
|
|
# Filter out None values
|
|
valid_values = ~np.equal(values, None)
|
|
if not np.any(valid_values):
|
|
continue
|
|
|
|
# Update array in bulk
|
|
arr_source[row_idx, minute_deltas[valid_values]] = values[valid_values]
|
|
|
|
return arr_source
|
|
|
|
def fast_fill_radar_array_from_timescale(day_data, time_from_str, devices_list, arr_source, timezone_str="Europe/Berlin"):
|
|
"""
|
|
Optimized version of array filling from TimeScaleDB data.
|
|
Uses vectorized operations for significant speed improvement.
|
|
"""
|
|
# Convert start time to timezone-aware datetime
|
|
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
|
|
#start_time = start_time.replace(tzinfo=timezone.utc)
|
|
|
|
# Create device index mapping
|
|
device_to_index = {device_id: idx for idx, device_id in enumerate(devices_list)}
|
|
|
|
# Define column mappings (sensor type to position in record)
|
|
columns = {
|
|
's2': 2,
|
|
's3': 3,
|
|
's4': 4,
|
|
's5': 5,
|
|
's6': 6,
|
|
's7': 7,
|
|
's8': 8
|
|
}
|
|
|
|
# Pre-process data into a more efficient structure
|
|
# Group by device_id to reduce lookup operations
|
|
device_data = defaultdict(list)
|
|
for record in day_data:
|
|
if record[0] and record[1]: # If time and device_id exist
|
|
device_data[record[1]].append(record)
|
|
|
|
# Process each device's data in bulk
|
|
for device_id, records in device_data.items():
|
|
if device_id not in device_to_index:
|
|
continue
|
|
|
|
base_idx = device_to_index[device_id] * len(columns)
|
|
|
|
# Convert records to numpy array for faster processing
|
|
records_array = np.array(records, dtype=object)
|
|
|
|
# Calculate all minute deltas at once
|
|
times = records_array[:, 0]
|
|
#print(times[0], start_time, (times[0] - start_time).total_seconds())
|
|
minute_deltas = np.array([(t - start_time).total_seconds() / 60 for t in times], dtype=int)
|
|
|
|
# Filter valid minute deltas
|
|
valid_mask = (minute_deltas >= 0) & (minute_deltas < arr_source.shape[1])
|
|
if not np.any(valid_mask):
|
|
continue
|
|
|
|
minute_deltas = minute_deltas[valid_mask]
|
|
records_array = records_array[valid_mask]
|
|
|
|
# Process each column type in bulk
|
|
for col_name, col_offset in columns.items():
|
|
row_idx = base_idx + list(columns.keys()).index(col_name)
|
|
values = records_array[:, col_offset]
|
|
|
|
# Filter out None values
|
|
valid_values = ~np.equal(values, None)
|
|
if not np.any(valid_values):
|
|
continue
|
|
|
|
# Update array in bulk
|
|
arr_source[row_idx, minute_deltas[valid_values]] = values[valid_values]
|
|
|
|
return arr_source
|
|
|
|
def ScaleTemperature(temperature_in_f):
|
|
# Define our key temperature points and their corresponding color values
|
|
temp_points = [30, 50, 70, 90, 110]
|
|
color_values = [768, 640, 384, 128, 0] # Color values in the rainbow scale
|
|
|
|
# Clamp the temperature to our range
|
|
clamped_temp = max(min(temperature_in_f, 110), 30)
|
|
|
|
# Find which segment the temperature falls into
|
|
for i in range(len(temp_points) - 1):
|
|
if temp_points[i] <= clamped_temp <= temp_points[i + 1]:
|
|
# Linear interpolation between the two nearest points
|
|
t = (clamped_temp - temp_points[i]) / (temp_points[i + 1] - temp_points[i])
|
|
color_value = int(color_values[i] + t * (color_values[i + 1] - color_values[i]))
|
|
return color_value
|
|
|
|
# Fallback (should never reach here due to clamping)
|
|
return 0
|
|
|
|
def GetTemperatureColor(temperature_in_f):
|
|
color_value = ScaleTemperature(temperature_in_f)
|
|
return BestColor(color_value)
|
|
|
|
def BestColor(in_val):
|
|
#this function uses numbers from 0 to 1279 to convert to rainbow from Blue to Red(1024) to Violet 1279
|
|
r,g,b=0,0,0
|
|
in_val = int(in_val)
|
|
if(in_val > 1279):
|
|
in_val = 1279
|
|
if (in_val < 256):
|
|
r = 255
|
|
g = in_val
|
|
elif (in_val < 512):
|
|
r = 511 - in_val
|
|
g = 255
|
|
elif (in_val < 768):
|
|
g = 255
|
|
b = in_val-512
|
|
elif (in_val < 1024):
|
|
g = 1023 - in_val
|
|
b = 255
|
|
else:
|
|
r = in_val - 1024
|
|
b = 255
|
|
|
|
#if (r > 255):
|
|
# print(in_val)
|
|
# print(int(r),int(g),int(b))
|
|
return(int(r),int(g),int(b))
|
|
|
|
def GrayColor(in_val):
|
|
#this function uses numbers from 0 to 1279 to convert to rainbow from Blue to Red(1024) to Violet 1279
|
|
r,g,b=0,0,0
|
|
in_val = int(in_val)
|
|
if(in_val < 0):
|
|
in_val = 0
|
|
if(in_val > 255):
|
|
in_val = 255
|
|
|
|
r = in_val
|
|
g = r
|
|
b = r
|
|
return(int(r),int(g),int(b))
|
|
|
|
def fill_array_from_timescale(day_data, time_from_str, devices_list, arr_source, timezone_str):
|
|
"""
|
|
Fill numpy array with data from TimeScaleDB query results.
|
|
|
|
Parameters:
|
|
day_data: List of tuples from database query
|
|
time_from_str: Starting datetime string in format 'YYYY-MM-DD HH:MM:SS'
|
|
devices_list: List of device IDs
|
|
arr_source: Pre-initialized numpy array to fill
|
|
|
|
Returns:
|
|
numpy.ndarray: Filled array
|
|
"""
|
|
# Parse the start time
|
|
#start_time = get_timezone_aware_datetime(time_from_str, timezone_str)
|
|
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
|
|
# Create mapping of device_ids to their index positions
|
|
device_to_index = {device_id: idx for idx, device_id in enumerate(devices_list)}
|
|
|
|
# Define columns and their positions in the result tuple
|
|
columns = {
|
|
'avg_temperature': 2,
|
|
'avg_humidity': 3,
|
|
'pressure_amplitude': 4,
|
|
'max_light': 5,
|
|
'radar': 6,
|
|
'sensor_min_s0': 7,
|
|
'sensor_min_s1': 8,
|
|
'sensor_min_s2': 9,
|
|
'sensor_min_s3': 10,
|
|
'sensor_min_s4': 11,
|
|
'sensor_min_s5': 12,
|
|
'sensor_min_s6': 13,
|
|
'sensor_min_s7': 14,
|
|
'sensor_min_s8': 15,
|
|
'sensor_min_s9': 16
|
|
}
|
|
|
|
# Process each record
|
|
for record in day_data:
|
|
# Get minute and device_id from record
|
|
record_time = record[0] # minute column
|
|
device_id = record[1] # device_id column
|
|
|
|
if record_time and device_id:
|
|
# Calculate minute delta
|
|
minute_delta = int((record_time - start_time).total_seconds() / 60)
|
|
|
|
if 0 <= minute_delta < arr_source.shape[1]:
|
|
# Calculate base index for this device
|
|
base_idx = device_to_index[device_id] * len(columns)
|
|
|
|
# Fill data for each sensor/measurement type
|
|
for col_name, col_offset in columns.items():
|
|
value = record[col_offset]
|
|
if value is not None: # Skip NULL values
|
|
row_idx = base_idx + list(columns.keys()).index(col_name)
|
|
arr_source[row_idx, minute_delta] = value
|
|
|
|
return arr_source
|
|
|
|
def fast_fill_array_from_timescale_bad(day_data, time_from_str, devices_list, arr_source, timezone_str="Europe/Berlin"):
|
|
"""
|
|
Optimized version of array filling from TimeScaleDB data.
|
|
Uses vectorized operations for significant speed improvement.
|
|
"""
|
|
# Convert start time to timezone-aware datetime
|
|
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
|
|
|
|
# Create device index mapping
|
|
device_to_index = {device_id: idx for idx, device_id in enumerate(devices_list)}
|
|
|
|
# Define column mappings (sensor type to position in record) - KEEP EXACT SAME ORDER as original
|
|
columns = {
|
|
'avg_temperature': 2,
|
|
'avg_humidity': 3,
|
|
'pressure_amplitude': 4,
|
|
'max_light': 5,
|
|
'radar': 6,
|
|
'sensor_min_s0': 7,
|
|
'sensor_min_s1': 8,
|
|
'sensor_min_s2': 9,
|
|
'sensor_min_s3': 10,
|
|
'sensor_min_s4': 11,
|
|
'sensor_min_s5': 12,
|
|
'sensor_min_s6': 13,
|
|
'sensor_min_s7': 14,
|
|
'sensor_min_s8': 15,
|
|
'sensor_min_s9': 16
|
|
}
|
|
|
|
# Pre-compute column keys list for consistent indexing
|
|
column_keys = list(columns.keys())
|
|
|
|
# Pre-process data into a more efficient structure
|
|
# Group by device_id to reduce lookup operations
|
|
device_data = defaultdict(list)
|
|
for record in day_data:
|
|
if record[0] and record[1]: # If time and device_id exist
|
|
device_data[record[1]].append(record)
|
|
|
|
# Process each device's data in bulk
|
|
for device_id, records in device_data.items():
|
|
if device_id not in device_to_index:
|
|
continue
|
|
|
|
base_idx = device_to_index[device_id] * len(columns)
|
|
|
|
# Convert records to numpy array for faster processing
|
|
records_array = np.array(records, dtype=object)
|
|
|
|
# Calculate all minute deltas at once
|
|
times = records_array[:, 0]
|
|
minute_deltas = np.array([(t - start_time).total_seconds() / 60 for t in times], dtype=int)
|
|
|
|
# Filter valid minute deltas
|
|
valid_mask = (minute_deltas >= 0) & (minute_deltas < arr_source.shape[1])
|
|
if not np.any(valid_mask):
|
|
continue
|
|
|
|
minute_deltas = minute_deltas[valid_mask]
|
|
records_array = records_array[valid_mask]
|
|
|
|
# Process each column type in bulk
|
|
for col_name, col_offset in columns.items():
|
|
# Use pre-computed column_keys list for consistent indexing
|
|
row_idx = base_idx + column_keys.index(col_name)
|
|
values = records_array[:, col_offset]
|
|
|
|
# Filter out None values
|
|
valid_values = ~np.equal(values, None)
|
|
if not np.any(valid_values):
|
|
continue
|
|
|
|
# Update array in bulk
|
|
arr_source[row_idx, minute_deltas[valid_values]] = values[valid_values]
|
|
|
|
return arr_source
|
|
|
|
def fast_fill_array_from_timescale(day_data, time_from_str, devices_list, arr_source, timezone_str="Europe/Berlin"):
|
|
"""
|
|
Optimized version of array filling from TimeScaleDB data.
|
|
Uses vectorized operations for significant speed improvement.
|
|
"""
|
|
# Convert start time to timezone-aware datetime
|
|
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
|
|
|
|
# Create device index mapping
|
|
device_to_index = {device_id: idx for idx, device_id in enumerate(devices_list)}
|
|
|
|
# Define column mappings (sensor type to position in record) - KEEP EXACT SAME ORDER as original
|
|
columns = {
|
|
'avg_temperature': 2,
|
|
'avg_humidity': 3,
|
|
'pressure_amplitude': 4,
|
|
'max_light': 5,
|
|
'radar': 6,
|
|
'sensor_min_s0': 7,
|
|
'sensor_min_s1': 8,
|
|
'sensor_min_s2': 9,
|
|
'sensor_min_s3': 10,
|
|
'sensor_min_s4': 11,
|
|
'sensor_min_s5': 12,
|
|
'sensor_min_s6': 13,
|
|
'sensor_min_s7': 14,
|
|
'sensor_min_s8': 15,
|
|
'sensor_min_s9': 16
|
|
}
|
|
|
|
# Pre-compute column keys list for consistent indexing
|
|
column_keys = list(columns.keys())
|
|
|
|
# Pre-process data into a more efficient structure
|
|
# Group by device_id to reduce lookup operations
|
|
device_data = defaultdict(list)
|
|
for record in day_data:
|
|
if record[0] and record[1]: # If time and device_id exist
|
|
device_data[record[1]].append(record)
|
|
|
|
# Process each device's data in bulk
|
|
for device_id, records in device_data.items():
|
|
if device_id not in device_to_index:
|
|
continue
|
|
|
|
base_idx = device_to_index[device_id] * len(columns)
|
|
|
|
# Convert records to numpy array for faster processing
|
|
records_array = np.array(records, dtype=object)
|
|
|
|
# Calculate all minute deltas at once
|
|
times = records_array[:, 0]
|
|
minute_deltas = np.array([(t - start_time).total_seconds() / 60 for t in times], dtype=int)
|
|
|
|
# Filter valid minute deltas
|
|
valid_mask = (minute_deltas >= 0) & (minute_deltas < arr_source.shape[1])
|
|
if not np.any(valid_mask):
|
|
continue
|
|
|
|
minute_deltas = minute_deltas[valid_mask]
|
|
records_array = records_array[valid_mask]
|
|
|
|
# Process each column type in bulk
|
|
for col_name, col_offset in columns.items():
|
|
# Use pre-computed column_keys list for consistent indexing
|
|
row_idx = base_idx + column_keys.index(col_name)
|
|
values = records_array[:, col_offset]
|
|
|
|
# Filter out None values
|
|
valid_values = ~np.equal(values, None)
|
|
if not np.any(valid_values):
|
|
continue
|
|
|
|
# Update array in bulk
|
|
arr_source[row_idx, minute_deltas[valid_values]] = values[valid_values]
|
|
|
|
return arr_source
|
|
|
|
def CalcExtremes(arr_source, length, height):
|
|
"""
|
|
Calculate min and max values for each row within legal bounds.
|
|
Optimized version using numpy vectorized operations.
|
|
|
|
Parameters:
|
|
arr_source: numpy array of shape (height, length+4) containing data and bounds
|
|
length: number of data points to process (typically 1440 for minutes in a day)
|
|
height: number of rows in the array
|
|
|
|
Returns:
|
|
numpy array with min/max values stored in columns 1442 and 1443
|
|
"""
|
|
# Extract the data portion and bounds
|
|
data = arr_source[:, :length]
|
|
ignore_below = arr_source[:, 1440:1441] # Keep 2D shape for broadcasting
|
|
ignore_above = arr_source[:, 1441:1442] # Keep 2D shape for broadcasting
|
|
|
|
# Create masks for valid values
|
|
above_min_mask = data >= ignore_below
|
|
below_max_mask = data <= ignore_above
|
|
valid_mask = above_min_mask & below_max_mask
|
|
|
|
# Create a masked array to handle invalid values
|
|
masked_data = np.ma.array(data, mask=~valid_mask)
|
|
|
|
# Calculate min and max values for each row
|
|
row_mins = np.ma.min(masked_data, axis=1).filled(-0.001)
|
|
row_maxs = np.ma.max(masked_data, axis=1).filled(-0.001)
|
|
|
|
# Store results
|
|
arr_source[:, 1442] = row_mins
|
|
arr_source[:, 1443] = row_maxs
|
|
|
|
return arr_source
|
|
|
|
def plot(arr, filename="histogram.png", title="Histogram Plot", figsize=(12, 6),
|
|
color='blue', style='histogram', bins=1000):
|
|
"""
|
|
Plot a 1D numpy array as a line or scatter plot
|
|
|
|
Parameters:
|
|
arr : 1D numpy array
|
|
title : str, plot title
|
|
figsize : tuple, figure size in inches
|
|
color : str, line/point color
|
|
style : str, 'line' or 'scatter'
|
|
"""
|
|
title = filename
|
|
plt.figure(figsize=figsize)
|
|
x = np.arange(len(arr))
|
|
|
|
if style == 'line':
|
|
x = np.arange(len(arr))
|
|
plt.plot(x, arr, color=color)
|
|
elif style == 'scatter':
|
|
x = np.arange(len(arr))
|
|
plt.scatter(x, arr, color=color, alpha=0.6)
|
|
elif style == 'histogram':
|
|
plt.hist(arr.ravel(), bins=bins, range=(0, 100), color=color, alpha=0.8)
|
|
plt.yscale('log') # Using log scale for better visualization
|
|
plt.xlabel('Signal Value')
|
|
plt.ylabel('Frequency')
|
|
|
|
plt.title(title)
|
|
plt.xlabel('Index')
|
|
plt.ylabel('Value')
|
|
plt.grid(True, alpha=0.3)
|
|
plt.tight_layout()
|
|
plt.savefig(filename)
|
|
plt.close()
|
|
print(f"Plot saved to: {filename}")
|
|
#plt.show()
|
|
|
|
def ShowArray(arr, threshold, filename="histogram.png", title="Histogram Plot", figsize=(12, 6),
|
|
color='blue', style='histogram', bins=1000):
|
|
"""
|
|
Plot a 1D numpy array as a line or scatter plot
|
|
|
|
Parameters:
|
|
arr : 1D numpy array
|
|
title : str, plot title
|
|
figsize : tuple, figure size in inches
|
|
color : str, line/point color
|
|
style : str, 'line' or 'scatter'
|
|
"""
|
|
title = filename
|
|
plt.figure(figsize=figsize)
|
|
x = np.arange(len(arr))
|
|
|
|
if style == 'line':
|
|
x = np.arange(len(arr))
|
|
plt.plot(x, arr, color=color)
|
|
plt.axhline(y=threshold, color='red', linestyle='--',
|
|
label=f'Threshold: {threshold:.3f}')
|
|
plt.xlabel('Index')
|
|
plt.ylabel('Value')
|
|
elif style == 'scatter':
|
|
x = np.arange(len(arr))
|
|
plt.scatter(x, arr, color=color, alpha=0.6)
|
|
elif style == 'histogram':
|
|
plt.hist(arr.ravel(), bins=bins, range=(0, 100), color=color, alpha=0.8)
|
|
plt.yscale('log') # Using log scale for better visualization
|
|
plt.xlabel('Signal Value')
|
|
plt.ylabel('Frequency')
|
|
|
|
plt.title(title)
|
|
plt.xlabel('Index')
|
|
plt.ylabel('Value')
|
|
plt.grid(True, alpha=0.3)
|
|
plt.tight_layout()
|
|
plt.savefig(filename)
|
|
plt.close()
|
|
print(f"Plot saved to: {filename}")
|
|
#plt.show()
|
|
|
|
def AddLimits_optimized(arr_source, devices_c, sensors_c, percentile):
|
|
"""
|
|
Vectorized version of AddLimits that processes all sensors at once.
|
|
|
|
Parameters:
|
|
arr_source: array of shape (devices_c * sensors_c, 1444)
|
|
devices_c: number of devices
|
|
sensors_c: number of sensors per device
|
|
percentile: parameter for clean_data_vectorized
|
|
"""
|
|
total_sensors = devices_c * sensors_c
|
|
|
|
# Create arrays of sensor indices for all rows
|
|
sensor_indices = np.arange(total_sensors) % sensors_c
|
|
|
|
# Convert sensor_legal_values into arrays for vectorized access
|
|
sensor_types = np.array([s_table[i] for i in range(sensors_c)])
|
|
min_vals = np.array([sensor_legal_values[t][0] for t in sensor_types])
|
|
max_vals = np.array([sensor_legal_values[t][1] for t in sensor_types])
|
|
windows = np.array([sensor_legal_values[t][2] for t in sensor_types])
|
|
|
|
# Get values for each row based on sensor type
|
|
row_windows = windows[sensor_indices]
|
|
row_mins = min_vals[sensor_indices]
|
|
row_maxs = max_vals[sensor_indices]
|
|
|
|
# Process rows that need cleaning (window > 2)
|
|
clean_mask = row_windows > 2
|
|
if np.any(clean_mask):
|
|
# Clean each row with its corresponding window size
|
|
for window in np.unique(row_windows[clean_mask]):
|
|
# Get indices of rows that need this window size
|
|
rows_to_clean = np.where(clean_mask & (row_windows == window))[0]
|
|
|
|
# Clean each row individually (since clean_data_vectorized expects 1D input)
|
|
for row_idx in rows_to_clean:
|
|
arr_source[row_idx, :1440] = clean_data_vectorized(
|
|
arr_source[row_idx, :1440],
|
|
window,
|
|
percentile
|
|
)
|
|
|
|
# Set min/max values for all rows at once
|
|
arr_source[:, 1440] = row_mins
|
|
arr_source[:, 1441] = row_maxs
|
|
|
|
return arr_source
|
|
|
|
def AddSmellLimits_optimized(arr_source, devices_c, sensors_c, percentile):
|
|
"""
|
|
Vectorized version of AddLimits that processes all sensors at once.
|
|
Parameters:
|
|
arr_source: array of shape (devices_c * sensors_c, 1444)
|
|
devices_c: number of devices
|
|
sensors_c: number of sensors per device
|
|
percentile: parameter for clean_data_vectorized
|
|
"""
|
|
total_sensors = devices_c * sensors_c
|
|
# Create arrays of sensor indices for all rows
|
|
sensor_indices = np.arange(total_sensors) % sensors_c
|
|
# Convert sensor_legal_values into arrays for vectorized access
|
|
sensor_types = np.array([smells_table[i] for i in range(sensors_c)])
|
|
min_vals = np.array([smell_legal_values[t][0] for t in sensor_types])
|
|
max_vals = np.array([smell_legal_values[t][1] for t in sensor_types])
|
|
# Get values for each row based on sensor type
|
|
row_mins = min_vals[sensor_indices]
|
|
row_maxs = max_vals[sensor_indices]
|
|
|
|
# Replace values smaller than smell_min and larger than smell_max with no_smell
|
|
# Create a mask for the data points (first 1440 columns)
|
|
data_mask_below = arr_source[:, :1440] < smell_min
|
|
data_mask_above = arr_source[:, :1440] > smell_max
|
|
data_mask_invalid = data_mask_below | data_mask_above
|
|
|
|
# Replace invalid values with no_smell
|
|
arr_source[:, :1440][data_mask_invalid] = no_smell
|
|
|
|
# Set min/max values for all rows at once
|
|
arr_source[:, 1440] = row_mins
|
|
arr_source[:, 1441] = row_maxs
|
|
return arr_source
|
|
|
|
def AddLimits(arr_source, devices_c, sensors_c, percentile):
|
|
for y in range(devices_c*sensors_c):
|
|
sensor_index = y % sensors_c
|
|
min_ok, max_ok, window = sensor_legal_values[s_table[sensor_index]]
|
|
#if EnablePlot:
|
|
#if (y == 33):
|
|
#print("stop")
|
|
#plot(arr_source[y, :1440], "before_clean_sensor.png")
|
|
if window > 2:
|
|
arr_source[y, :1440] = clean_data_vectorized(arr_source[y, :1440], window, percentile)
|
|
|
|
#if EnablePlot:
|
|
#if (y == 33):
|
|
#print("stop")
|
|
#plot(arr_source[y, :1440], "after_clean_sensor.png")
|
|
|
|
arr_source[y][1440] = min_ok
|
|
arr_source[y][1441] = max_ok
|
|
return arr_source
|
|
|
|
def clean_data_vectorized(data, window, percentile):
|
|
"""
|
|
Vectorized version of clean_data function using pure numpy
|
|
First removes zeros, then cleans outliers
|
|
|
|
Parameters:
|
|
data: numpy array of sensor readings
|
|
window: int, size of rolling window
|
|
percentile: float, percentile threshold for deviation filtering
|
|
"""
|
|
# Create a copy to avoid modifying original data
|
|
working_data = data.copy()
|
|
|
|
# Replace zeros with NaN
|
|
zero_mask = working_data == 0
|
|
working_data[zero_mask] = np.nan
|
|
|
|
# Create rolling window view of the data
|
|
def rolling_window(a, window):
|
|
shape = a.shape[:-1] + (a.shape[-1] - window + 1, window)
|
|
strides = a.strides + (a.strides[-1],)
|
|
return np.lib.stride_tricks.as_strided(a, shape=shape, strides=strides)
|
|
|
|
# Pad array for edge handling
|
|
pad_width = window // 2
|
|
padded = np.pad(working_data, pad_width, mode='edge')
|
|
|
|
# Create rolling windows
|
|
windows = rolling_window(padded, window)
|
|
|
|
# Calculate rolling median (ignoring NaN values)
|
|
medians = np.nanmedian(windows, axis=1)
|
|
|
|
# Forward/backward fill any NaN in medians
|
|
# Forward fill
|
|
mask = np.isnan(medians)
|
|
idx = np.where(~mask, np.arange(mask.shape[0]), 0)
|
|
np.maximum.accumulate(idx, out=idx)
|
|
medians[mask] = medians[idx[mask]]
|
|
|
|
# Backward fill any remaining NaNs
|
|
mask = np.isnan(medians)
|
|
idx = np.where(~mask, np.arange(mask.shape[0]), mask.shape[0] - 1)
|
|
idx = np.minimum.accumulate(idx[::-1])[::-1]
|
|
medians[mask] = medians[idx[mask]]
|
|
|
|
# Calculate deviations (ignoring NaN values)
|
|
deviations = np.abs(working_data - medians)
|
|
|
|
# Calculate threshold (ignoring NaN values)
|
|
threshold = np.nanpercentile(deviations, percentile)
|
|
|
|
# Create mask and replace outliers with median values
|
|
# Points are good if they're not NaN and deviation is within threshold
|
|
good_points = (~np.isnan(working_data)) & (deviations <= threshold)
|
|
|
|
# Replace all bad points (including zeros and outliers) with median values
|
|
result = np.where(good_points, working_data, medians)
|
|
|
|
return result
|
|
|
|
|
|
def process_chunk(args):
|
|
"""
|
|
Process a chunk of rows
|
|
"""
|
|
chunk, sensors_c, sensor_legal_values, s_table, window, percentile = args
|
|
result = np.copy(chunk)
|
|
|
|
# Process all time series in the chunk at once
|
|
result[:, :1440] = np.array([
|
|
clean_data_vectorized(row[:1440], window, percentile)
|
|
for row in chunk
|
|
])
|
|
|
|
# Set limits for all rows in chunk using vectorized operations
|
|
sensor_indices = np.arange(len(chunk)) % sensors_c
|
|
min_values = np.array([sensor_legal_values[s_table[i]][0] for i in sensor_indices])
|
|
max_values = np.array([sensor_legal_values[s_table[i]][1] for i in sensor_indices])
|
|
|
|
result[:, 1440] = min_values
|
|
result[:, 1441] = max_values
|
|
|
|
return result
|
|
|
|
|
|
def FillImage_optimized(scaled_day, devices_c, sensors_c, arr_stretched, group_by, bw):
|
|
"""
|
|
Optimized version of FillImage function that fills the stretched array with colored sensor data.
|
|
|
|
Parameters:
|
|
scaled_day: 2D array of shape (stripes, minutes+4) containing sensor readings
|
|
devices_c: number of devices
|
|
sensors_c: number of sensors per device
|
|
arr_stretched: 3D array of shape (stripes*stretch_by, minutes, 3) to fill with RGB values
|
|
group_by: grouping strategy ("sensortype" or other)
|
|
bw: boolean flag for black and white output
|
|
|
|
Returns:
|
|
arr_stretched: Filled array with RGB values
|
|
and vocs_scaled aray from 0 to 1280
|
|
"""
|
|
stripes = devices_c * sensors_c
|
|
stretch_by = arr_stretched.shape[0] // stripes
|
|
minutes = arr_stretched.shape[1]
|
|
|
|
# Pre-calculate VOC rows mask
|
|
if group_by != "sensortype":
|
|
voc_rows = np.arange(stripes) >= 5 * devices_c
|
|
else:
|
|
voc_rows = (np.arange(stripes) % sensors_c) >= 5
|
|
|
|
# Pre-calculate destination row mapping for sensortype grouping
|
|
if group_by == "sensortype":
|
|
row_indices = np.arange(stripes)
|
|
sensor_indices = row_indices % sensors_c
|
|
device_indices = row_indices // sensors_c
|
|
dest_rows = sensor_indices * devices_c + device_indices
|
|
dest_rows = dest_rows[:, np.newaxis] * stretch_by + np.arange(stretch_by)
|
|
else:
|
|
row_indices = np.arange(stripes)[:, np.newaxis] * stretch_by + np.arange(stretch_by)
|
|
|
|
# Optimize color calculation functions
|
|
def best_color_vectorized(vals):
|
|
"""Vectorized version of BestColor that matches the original implementation exactly"""
|
|
vals = np.clip(vals, 0, 1279).astype(np.int32)
|
|
r = np.zeros_like(vals, dtype=np.int32)
|
|
g = np.zeros_like(vals, dtype=np.int32)
|
|
b = np.zeros_like(vals, dtype=np.int32)
|
|
|
|
# Region 0-255
|
|
mask1 = vals < 256
|
|
r[mask1] = 255
|
|
g[mask1] = vals[mask1]
|
|
|
|
# Region 256-511
|
|
mask2 = (vals >= 256) & (vals < 512)
|
|
r[mask2] = 511 - vals[mask2]
|
|
g[mask2] = 255
|
|
|
|
# Region 512-767
|
|
mask3 = (vals >= 512) & (vals < 768)
|
|
g[mask3] = 255
|
|
b[mask3] = vals[mask3] - 512
|
|
|
|
# Region 768-1023
|
|
mask4 = (vals >= 768) & (vals < 1024)
|
|
g[mask4] = 1023 - vals[mask4]
|
|
b[mask4] = 255
|
|
|
|
# Region 1024-1279
|
|
mask5 = vals >= 1024
|
|
r[mask5] = vals[mask5] - 1024
|
|
b[mask5] = 255
|
|
|
|
return r, g, b
|
|
|
|
def gray_color_vectorized(vals):
|
|
"""Vectorized version of GrayColor that matches the original implementation exactly"""
|
|
vals = np.clip(vals, 0, 255).astype(np.int32)
|
|
return vals, vals, vals
|
|
|
|
color_func = gray_color_vectorized if bw else best_color_vectorized
|
|
|
|
# Process all rows at once
|
|
valid_mask = scaled_day[:, :minutes] != -0.001
|
|
big_min = scaled_day[:, 1442:1443] # Keep 2D shape for broadcasting
|
|
big_max = scaled_day[:, 1443:1444]
|
|
|
|
# Calculate k factors where max > min
|
|
valid_range_mask = big_max > big_min
|
|
k = np.zeros_like(big_min)
|
|
k[valid_range_mask] = (1280 if not bw else 255) / (big_max[valid_range_mask] - big_min[valid_range_mask])
|
|
|
|
# Calculate normalized values for all rows at once
|
|
normalized_vals = np.zeros_like(scaled_day[:, :minutes])
|
|
valid_range_indices = np.where(valid_range_mask)[0]
|
|
|
|
normalized_vals[valid_range_indices] = (
|
|
k[valid_range_indices] *
|
|
(scaled_day[valid_range_indices, :minutes] - big_min[valid_range_indices])
|
|
)
|
|
|
|
# Invert VOC rows
|
|
normalized_vals[voc_rows] = (1279 if not bw else 255) - normalized_vals[voc_rows]
|
|
|
|
# Apply valid mask
|
|
normalized_vals[~valid_mask] = 0
|
|
|
|
# Convert to RGB
|
|
r, g, b = color_func(normalized_vals)
|
|
|
|
# Create RGB array
|
|
rgb_values = np.stack([r, g, b], axis=-1)
|
|
|
|
# Handle special case where max == min
|
|
equal_range_mask = ~valid_range_mask
|
|
if np.any(equal_range_mask):
|
|
rgb_values[equal_range_mask.ravel()] = 128
|
|
|
|
# Fill the stretched array efficiently
|
|
if group_by == "sensortype":
|
|
arr_stretched[dest_rows] = rgb_values[:, None]
|
|
else:
|
|
arr_stretched[row_indices] = rgb_values[:, None]
|
|
|
|
return arr_stretched, normalized_vals[voc_rows]
|
|
|
|
|
|
def FillSmellImage_optimized(scaled_day, arr_stretched, y_offset):
|
|
"""
|
|
Fill the stretched array with colored sensor data from scaled_day.
|
|
|
|
Parameters:
|
|
scaled_day: 2D array of shape (70, 1444) containing sensor readings
|
|
arr_stretched: 3D array of shape (2685, 1640, 3) to fill with RGB values
|
|
|
|
Returns:
|
|
arr_stretched: Filled array with RGB values
|
|
"""
|
|
stretch_by = 20
|
|
x_offset = 200
|
|
|
|
def best_color_vectorizedS(vals):
|
|
"""Vectorized version of BestColor that matches the original implementation exactly"""
|
|
vals = np.clip(vals, 0, 1279).astype(np.int32)
|
|
r = np.zeros_like(vals, dtype=np.int32)
|
|
g = np.zeros_like(vals, dtype=np.int32)
|
|
b = np.zeros_like(vals, dtype=np.int32)
|
|
|
|
# Region 0-255
|
|
mask1 = vals < 256
|
|
r[mask1] = 255
|
|
g[mask1] = vals[mask1]
|
|
|
|
# Region 256-511
|
|
mask2 = (vals >= 256) & (vals < 512)
|
|
r[mask2] = 511 - vals[mask2]
|
|
g[mask2] = 255
|
|
|
|
# Region 512-767
|
|
mask3 = (vals >= 512) & (vals < 768)
|
|
g[mask3] = 255
|
|
b[mask3] = vals[mask3] - 512
|
|
|
|
# Region 768-1023
|
|
mask4 = (vals >= 768) & (vals < 1024)
|
|
g[mask4] = 1023 - vals[mask4]
|
|
b[mask4] = 255
|
|
|
|
# Region 1024-1279
|
|
mask5 = vals >= 1024
|
|
r[mask5] = vals[mask5] - 1024
|
|
b[mask5] = 255
|
|
|
|
return r, g, b
|
|
|
|
# Process each row in scaled_day
|
|
for row_idx in range(scaled_day.shape[0]):
|
|
# Extract min and max for this row
|
|
row_min = scaled_day[row_idx, 1442]
|
|
row_max = scaled_day[row_idx, 1443]
|
|
|
|
# Get data for this row (first 1440 elements)
|
|
row_data = scaled_day[row_idx, :1440]
|
|
|
|
# Check if min and max are the same
|
|
if row_min == row_max:
|
|
# Create gray stripe
|
|
stripe = np.ones((stretch_by, 1440, 3), dtype=np.int32) * 128
|
|
else:
|
|
# Normalize the data between 0 and 1279
|
|
k = 1280 / (row_max - row_min)
|
|
normalized_vals = k * (row_data - row_min)
|
|
normalized_vals = np.clip(normalized_vals, 0, 1279)
|
|
|
|
# Convert to RGB
|
|
r, g, b = best_color_vectorizedS(normalized_vals)
|
|
|
|
# Create RGB stripe
|
|
stripe = np.zeros((stretch_by, 1440, 3), dtype=np.int32)
|
|
|
|
# Fill stripe with the same color pattern for all stretch_by rows
|
|
for i in range(stretch_by):
|
|
stripe[i, :, 0] = r
|
|
stripe[i, :, 1] = g
|
|
stripe[i, :, 2] = b
|
|
|
|
# Calculate the y position for this stripe
|
|
y_pos = y_offset + row_idx * stretch_by
|
|
|
|
# Place the stripe into arr_stretched
|
|
print(stretch_by, stripe.shape, arr_stretched.shape, y_pos)
|
|
arr_stretched[y_pos:y_pos+stretch_by, x_offset:x_offset+1440, :] = stripe
|
|
|
|
return arr_stretched
|
|
|
|
def FillImage(scaled_day, devices_c, sensors_c, arr_stretched, group_by, bw):
|
|
"""
|
|
Fill the stretched array with colored sensor data.
|
|
|
|
Parameters:
|
|
scaled_day: 2D array of shape (stripes, minutes+4) containing sensor readings
|
|
devices_c: number of devices
|
|
sensors_c: number of sensors per device
|
|
arr_stretched: 3D array of shape (stripes*stretch_by, minutes, 3) to fill with RGB values
|
|
|
|
Returns:
|
|
arr_stretched: Filled array with RGB values
|
|
"""
|
|
stripes = devices_c * sensors_c
|
|
stretch_by = arr_stretched.shape[0] // stripes
|
|
minutes = arr_stretched.shape[1]
|
|
|
|
# Create a boolean mask for VOC sensors
|
|
if group_by != "sensortype":
|
|
voc_rows = np.array([i for i in range(stripes) if int(i/devices_c) >= 5])
|
|
else:
|
|
voc_rows = np.array([i for i in range(stripes) if int(i % sensors_c) >= 5])
|
|
# Vectorize the BestColor function
|
|
if not bw:
|
|
vectorized_best_color = np.vectorize(BestColor)
|
|
else:
|
|
vectorized_best_color = np.vectorize(GrayColor)
|
|
|
|
# Process each row
|
|
for row in range(stripes):
|
|
|
|
row_data = scaled_day[row, :minutes] # Get minute data
|
|
|
|
#if row == 33:
|
|
# print("stop")
|
|
# plot(row_data, "row_data.png")
|
|
big_min = scaled_day[row, 1442] # min value
|
|
big_max = scaled_day[row, 1443] # max value
|
|
|
|
# Create mask for valid values
|
|
valid_mask = row_data != -0.001
|
|
|
|
# Initialize RGB row with zeros
|
|
rgb_row = np.zeros((minutes, 3), dtype=np.uint8)
|
|
|
|
if big_max > big_min:
|
|
# Scale factor
|
|
if not bw:
|
|
k = 1280/(big_max-big_min)
|
|
else:
|
|
k = 255/(big_max-big_min)
|
|
# Calculate normalized values
|
|
normalized_vals = k * (row_data - big_min)
|
|
|
|
# Invert if it's a VOC row
|
|
if row in voc_rows:
|
|
if not bw:
|
|
normalized_vals = 1279 - normalized_vals
|
|
else:
|
|
normalized_vals = 255 - normalized_vals
|
|
|
|
# Apply valid mask
|
|
normalized_vals = np.where(valid_mask, normalized_vals, 0)
|
|
#if row == 33:
|
|
# plot(normalized_vals, "normalized_vals.png")
|
|
|
|
# Convert to RGB colors (vectorized)
|
|
r, g, b = vectorized_best_color(normalized_vals)
|
|
|
|
# Combine into RGB array
|
|
rgb_row[valid_mask] = np.stack([r[valid_mask],
|
|
g[valid_mask],
|
|
b[valid_mask]], axis=1)
|
|
else:
|
|
# Set to gray where valid
|
|
rgb_row[valid_mask] = 128
|
|
|
|
|
|
if group_by == "sensortype":
|
|
|
|
# Fill the stretched rows
|
|
sensor_index = row % sensors_c
|
|
device_index = int(row/sensors_c)
|
|
dest_row = sensor_index * devices_c + device_index #0-0, 1-
|
|
start_idx = dest_row * stretch_by
|
|
end_idx = start_idx + stretch_by
|
|
arr_stretched[start_idx:end_idx] = rgb_row
|
|
else:
|
|
# Fill the stretched rows
|
|
start_idx = row * stretch_by
|
|
end_idx = start_idx + stretch_by
|
|
arr_stretched[start_idx:end_idx] = rgb_row
|
|
|
|
return arr_stretched
|
|
|
|
def FillRadarImage(scaled_day, devices_c, bands, arr_stretched, group_by, map_type):
|
|
"""
|
|
Fill the stretched array with colored sensor data.
|
|
|
|
Parameters:
|
|
scaled_day: 2D array of shape (stripes, minutes+4) containing sensor readings
|
|
devices_c: number of devices
|
|
bands: number of bands per device
|
|
arr_stretched: 3D array of shape (stripes*stretch_by, minutes, 3) to fill with RGB values
|
|
|
|
Returns:
|
|
arr_stretched: Filled array with RGB values
|
|
"""
|
|
stripes = devices_c * bands
|
|
stretch_by = arr_stretched.shape[0] // stripes
|
|
minutes = arr_stretched.shape[1]
|
|
|
|
# Create a boolean mask for VOC sensors
|
|
if group_by != "sensortype":
|
|
voc_rows = np.array([i for i in range(stripes) if int(i/devices_c) >= 5])
|
|
else:
|
|
voc_rows = np.array([i for i in range(stripes) if int(i % bands) >= 5])
|
|
# Vectorize the BestColor function
|
|
if map_type == 3:
|
|
vectorized_best_color = np.vectorize(BestColor)
|
|
else:
|
|
vectorized_best_color = np.vectorize(GrayColor)
|
|
|
|
# Process each row
|
|
for row in range(stripes):
|
|
|
|
row_data = scaled_day[row, :minutes] # Get minute data
|
|
|
|
#if row == 33:
|
|
# print("stop")
|
|
# plot(row_data, "row_data.png")
|
|
big_min = 0 #scaled_day[row, 1442] # min value
|
|
big_max = 255 #scaled_day[row, 1443] # max value
|
|
|
|
# Create mask for valid values
|
|
valid_mask = row_data != -0.001
|
|
|
|
# Initialize RGB row with zeros
|
|
rgb_row = np.zeros((minutes, 3), dtype=np.uint8)
|
|
|
|
if big_max > big_min:
|
|
# Scale factor
|
|
if map_type == 3:
|
|
k = 1280/(big_max-big_min)
|
|
else:
|
|
k = 255/(big_max-big_min)
|
|
# Calculate normalized values
|
|
normalized_vals = k * (row_data - big_min)
|
|
|
|
# Invert if it's a VOC row
|
|
if row in voc_rows:
|
|
if map_type == 3:
|
|
normalized_vals = 1279 - normalized_vals
|
|
else:
|
|
normalized_vals = 255 - normalized_vals
|
|
|
|
# Apply valid mask
|
|
normalized_vals = np.where(valid_mask, normalized_vals, 0)
|
|
#if row == 33:
|
|
# plot(normalized_vals, "normalized_vals.png")
|
|
|
|
# Convert to RGB colors (vectorized)
|
|
r, g, b = vectorized_best_color(normalized_vals)
|
|
|
|
# Combine into RGB array
|
|
rgb_row[valid_mask] = np.stack([r[valid_mask],
|
|
g[valid_mask],
|
|
b[valid_mask]], axis=1)
|
|
else:
|
|
# Set to gray where valid
|
|
rgb_row[valid_mask] = 128
|
|
|
|
|
|
if group_by == "sensortype":
|
|
|
|
# Fill the stretched rows
|
|
band_index = row % bands
|
|
device_index = int(row/bands)
|
|
dest_row = band_index * devices_c + device_index #0-0, 1-
|
|
start_idx = dest_row * stretch_by
|
|
end_idx = start_idx + stretch_by
|
|
arr_stretched[start_idx:end_idx] = rgb_row
|
|
else:
|
|
# Fill the stretched rows
|
|
start_idx = row * stretch_by
|
|
end_idx = start_idx + stretch_by
|
|
arr_stretched[start_idx:end_idx] = rgb_row
|
|
|
|
return arr_stretched
|
|
|
|
def GetFullLocMapDetails(map_file):
|
|
#'/Volumes/XTRM-Q/wellnuo/daily_maps/1/1_2023-11-07_dayly_image.png'
|
|
local_timezone = pytz.timezone('America/Los_Angeles') # Replace with your local timezone
|
|
dest_path = os.path.dirname(map_file)
|
|
|
|
parts = map_file.split("/")
|
|
deployment = parts[-2]
|
|
parts1 = parts[-1].split("_")
|
|
date_string = parts1[1]
|
|
|
|
deployments = GetDeploymentB(deployment, -1) #All
|
|
last_locations_file = ""
|
|
last_per_minute_file = ""
|
|
|
|
today = datetime.today()
|
|
deployment_details = deployments[0]
|
|
deployment_pair = deployment_details[0]
|
|
proximity_lst = deployment_details[1]
|
|
|
|
date_object = datetime.strptime(date_string, "%Y-%m-%d")
|
|
date_object_midnight = local_timezone.localize(date_object.replace(hour=0, minute=0, second=0, microsecond=0))
|
|
selected_epoch = int(date_object_midnight.timestamp())
|
|
|
|
sel_date = datetime.fromtimestamp(selected_epoch)
|
|
devices_list_str = GetDevicesList(deployment_details, sel_date)#.split(',')
|
|
devices_list = ast.literal_eval(devices_list_str)
|
|
return devices_list, selected_epoch, dest_path
|
|
|
|
def median_filter(data, window_size):
|
|
filtered_data = []
|
|
print(len(data))
|
|
window = deque(maxlen=window_size)
|
|
last_value = -1
|
|
offset = 0
|
|
added_old = 0
|
|
for value in data:
|
|
if value != '':
|
|
added_old = 0
|
|
last_value = value
|
|
window.append(value)
|
|
if len(window) == window_size:
|
|
# Sort the window and get the median value
|
|
sorted_window = sorted(window)
|
|
median = sorted_window[window_size // 2]
|
|
filtered_data.append(median)
|
|
else:
|
|
if last_value != -1:
|
|
if added_old < window_size:
|
|
added_old = added_old + 1
|
|
window.append(last_value)
|
|
else:
|
|
window.append(-1)
|
|
|
|
if len(window) == window_size:
|
|
# Sort the window and get the median value
|
|
sorted_window = sorted(window)
|
|
median = sorted_window[window_size // 2]
|
|
filtered_data.append(median)
|
|
|
|
else:
|
|
offset +=1
|
|
|
|
if len(filtered_data) > 0:
|
|
offset += (window_size // 2)
|
|
#if starts empty, just leav it such, do not fake backwards from midnight
|
|
first_val = -1# filtered_data[0]
|
|
last_val = filtered_data[-1]
|
|
front_padding = [first_val] * offset
|
|
remaining = len(data) - len(filtered_data) - len(front_padding)
|
|
back_padding = [last_val] * remaining
|
|
out_data = front_padding + filtered_data + back_padding
|
|
else:
|
|
out_data = data
|
|
#add front and back padding
|
|
|
|
return out_data
|
|
|
|
def FilterGlitches(wave_in, filter_minutes):
|
|
|
|
if(filter_minutes > 0):
|
|
notfiltered_wave = [i[0] for i in wave_in]
|
|
filtered_wave = median_filter(notfiltered_wave, filter_minutes)
|
|
for i, value in enumerate(filtered_wave):
|
|
wave_in[i][0] = value
|
|
|
|
return wave_in
|
|
|
|
def setup_timezone_converter(time_zone_st):
|
|
"""
|
|
Setup timezone converter to be reused
|
|
|
|
Parameters:
|
|
time_zone_st (str): Timezone string (e.g. 'Europe/Berlin')
|
|
|
|
Returns:
|
|
pytz.timezone: Timezone object for conversion
|
|
"""
|
|
return pytz.timezone(time_zone_st)
|
|
|
|
def ReadDailyRadar(MAC, current_date):
|
|
|
|
#This will return all 1 Minute radar data for each gate in the file
|
|
#Will return list (2 items) of lists: Maxes, Mins
|
|
|
|
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
|
|
|
|
start_of_day = ToLocal(calendar.timegm(datetime(current_date.year, current_date.month,current_date.day, 0, 0).timetuple()))
|
|
end_of_day = start_of_day + 1440 * 60
|
|
file = os.path.join(scriptDir, "DB/"+MAC.upper() +"_"+str(current_date.year)+"_"+str(current_date.month).rjust(2, '0')+".db")
|
|
file = file.replace("\\","/")
|
|
file1 = os.path.join(scriptDir, "DB/processed_db/"+MAC.upper() +"_"+str(current_date.year)+"_"+str(current_date.month).rjust(2, '0')+".db")
|
|
file1 = file1.replace("\\","/")
|
|
if (not path.exists(file) and not path.exists(file1)):
|
|
print(file + " and " + file1 + " are not found")
|
|
return []
|
|
result = []
|
|
min_OK = "0"
|
|
sqlr = "SELECT * FROM radars WHERE time >= "+str(start_of_day) +" and time < "+str(end_of_day) +" ORDER BY time ASC"
|
|
#sqlr = "SELECT Date, high, low from "+sensor.lower()+"s1Min"+" WHERE low >= "+min_OK+" and Date >= "+str(start_of_day) +" and Date < "+str(end_of_day)
|
|
print(sqlr)
|
|
if os.path.exists(file):
|
|
result = QuerrySql(file, sqlr)
|
|
elif os.path.exists(file1):
|
|
result = QuerrySql(file1, sqlr)
|
|
|
|
# M0 ............M8 S2 ........S8
|
|
#day_minutes_data = [[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]] * (24 * 60 + 2)
|
|
day_minutes_data = [[0] * 16 for _ in range(24 * 60)]
|
|
#for each gate lets find maximum value per minute
|
|
for mgate in range(9):
|
|
max_per_min = 0
|
|
for minute_data in result:
|
|
seconde = minute_data[0]
|
|
date_time_minute = datetime.fromtimestamp(seconde)
|
|
minute_m = 60*date_time_minute.hour+date_time_minute.minute
|
|
if minute_data[mgate + 6] > day_minutes_data[minute_m][mgate]:
|
|
day_minutes_data[minute_m][mgate] = minute_data[mgate + 6]
|
|
|
|
for sgate in range(7):
|
|
for minute_data in result:
|
|
seconde = minute_data[0]
|
|
date_time_minute = datetime.fromtimestamp(seconde)
|
|
minute_m = 60*date_time_minute.hour+date_time_minute.minute
|
|
if minute_data[sgate + 17] > day_minutes_data[minute_m][sgate+9]:
|
|
day_minutes_data[minute_m][sgate+9] = minute_data[sgate + 17]
|
|
|
|
return day_minutes_data
|
|
|
|
|
|
def FromLocalMidnight(epoch_time, local_delta):
|
|
|
|
# Convert epoch time to UTC datetime object
|
|
print(type(epoch_time))
|
|
print(epoch_time)
|
|
local_datetime = datetime.datetime.utcfromtimestamp(epoch_time+local_delta).replace(tzinfo=pytz.UTC)
|
|
|
|
# Calculate minute count from midnight
|
|
minutes_from_midnight = (local_datetime - local_datetime.replace(hour=0, minute=0, second=0, microsecond=0)).total_seconds() / 60
|
|
return minutes_from_midnight
|
|
|
|
def process_wave_data_numpy(image_file, my_data, time_zone_s, device_id_2_threshold, radar_fields_of_interest):
|
|
"""
|
|
NumPy-based version of wave processing
|
|
|
|
Parameters:
|
|
my_data: List of tuples containing (time_val, device_id, other radar_fields_of_interest)
|
|
time_zone_s: Target timezone string
|
|
device_id_2_threshold: Dictionary mapping device_ids to their thresholds
|
|
|
|
Returns:
|
|
List of [device_id, max_val] pairs for each minute
|
|
"""
|
|
wave_m = None
|
|
tz = pytz.timezone(time_zone_s)
|
|
if not my_data:
|
|
return [["", -1] for _ in range(1440)]
|
|
|
|
vectorized_BestColor = np.vectorize(BestColor)
|
|
stripes = len(device_id_2_threshold)
|
|
stretch_by = 5
|
|
minutes = 1440
|
|
arr_source = np.zeros((int(stripes), minutes), dtype=np.float32)
|
|
arr_stretched = np.zeros((int(stripes*stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
|
|
|
|
|
|
index_map = {word: idx for idx, word in enumerate(radar_fields_of_interest)}
|
|
devices_map = {word: idx for idx, word in enumerate(device_id_2_threshold)}
|
|
times = []
|
|
start_time = 0
|
|
for data_set in my_data:
|
|
time_stamp = data_set[0]
|
|
if start_time == 0:
|
|
|
|
# Convert timestamp to a datetime object in UTC
|
|
local_tz = pytz.timezone(time_zone_s)
|
|
local_time = time_stamp.astimezone(local_tz)
|
|
# Set the time to the start of the day in the local time zone
|
|
start_of_day_local = local_time.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
# Convert the start of the day back to UTC
|
|
start_time = start_of_day_local.astimezone(pytz.utc)
|
|
|
|
diff = time_stamp - start_time
|
|
minute = int(diff.total_seconds() / 60)
|
|
device_id = data_set[1]
|
|
field_name = device_id_2_threshold[device_id][0]
|
|
field_index = index_map[field_name]
|
|
threshold = device_id_2_threshold[device_id][1]
|
|
value = data_set[2+field_index]
|
|
if value > threshold:
|
|
arr_source[devices_map[device_id]][minute] = value
|
|
#np.savetxt('output.csv', arr_source, delimiter=',')
|
|
if False:
|
|
for yy in range(stripes):
|
|
rgb_row = vectorized_BestColor(1280*arr_source[yy]/100)
|
|
rgb_reshaped = np.array(rgb_row).reshape(3, minutes).T
|
|
for stretch_index in range(stretch_by):
|
|
y = yy * stretch_by + stretch_index
|
|
arr_stretched[y, :] = rgb_reshaped
|
|
|
|
SaveImageInBlob(image_file, arr_stretched, [])
|
|
|
|
|
|
max_values = np.max(arr_source, axis=0)
|
|
|
|
# Get indices (0-based)
|
|
wave_m = np.argmax(arr_source, axis=0)
|
|
|
|
# Add 1 to convert to 1-based indexing
|
|
wave_m = wave_m + 1
|
|
|
|
# Set to 0 where the column was all zeros
|
|
wave_m[max_values == 0] = 0
|
|
|
|
return wave_m
|
|
|
|
|
|
|
|
def ReadDailyCollapsedFastRadar(MAC, time_from_str, time_to_str):
|
|
|
|
#This will return all 1 Minute radar data for each gate in the file
|
|
#Will return list (2 items) of lists: Maxes, Mins based on s28 (stationary[2] to [8])
|
|
radar_part = "(s2+s3+s4+s5+s6+s7+s8)/7"
|
|
result = []
|
|
min_OK = "0"
|
|
sqlr = "SELECT radar_max FROM devices WHERE MAC = '"+MAC +"'"
|
|
print(sqlr)
|
|
DB_to_be_found_in_full = os.path.join(scriptDir, "main.db")
|
|
DB_to_be_found_in_full = DB_to_be_found_in_full.replace("\\","/")
|
|
result = QuerrySql(DB_to_be_found_in_full, sqlr)
|
|
sqlr = "SELECT date, low FROM radars1Min WHERE date >= "+str(start_of_day) +" and date < "+str(end_of_day) + " ORDER BY date"
|
|
if len(result)>0:
|
|
if result[0][0] == 1:
|
|
sqlr = "SELECT date, high FROM radars1Min WHERE date >= "+str(start_of_day) +" and date < "+str(end_of_day) + " ORDER BY date"
|
|
|
|
print(sqlr)
|
|
if os.path.exists(file):
|
|
result = QuerrySql(file, sqlr)
|
|
elif os.path.exists(file1):
|
|
result = QuerrySql(file1, sqlr)
|
|
|
|
return result
|
|
|
|
def vectorized_best_color_numpy(values):
|
|
"""Vectorized version of BestColor using pure NumPy"""
|
|
# Ensure values are within range
|
|
values = np.clip(values, 0, 1279)
|
|
|
|
# Initialize output arrays
|
|
r = np.zeros_like(values, dtype=np.uint8)
|
|
g = np.zeros_like(values, dtype=np.uint8)
|
|
b = np.zeros_like(values, dtype=np.uint8)
|
|
|
|
# Create masks for each range
|
|
mask_0_255 = values < 256
|
|
mask_256_511 = (values >= 256) & (values < 512)
|
|
mask_512_767 = (values >= 512) & (values < 768)
|
|
mask_768_1023 = (values >= 768) & (values < 1024)
|
|
mask_1024_plus = values >= 1024
|
|
|
|
# Set values for each range using masks
|
|
r[mask_0_255] = 255
|
|
g[mask_0_255] = values[mask_0_255]
|
|
|
|
r[mask_256_511] = 511 - values[mask_256_511]
|
|
g[mask_256_511] = 255
|
|
|
|
g[mask_512_767] = 255
|
|
b[mask_512_767] = values[mask_512_767] - 512
|
|
|
|
g[mask_768_1023] = 1023 - values[mask_768_1023]
|
|
b[mask_768_1023] = 255
|
|
|
|
r[mask_1024_plus] = values[mask_1024_plus] - 1024
|
|
b[mask_1024_plus] = 255
|
|
|
|
return np.stack([r, g, b], axis=-1)
|
|
|
|
def create_optimized_heatmap(my_data, bw, fields, wave_m, device_to_index, base_minute, timezone_st, min_val, max_val):
|
|
|
|
if len(my_data) < 1:
|
|
return []
|
|
|
|
|
|
local_tz = pytz.timezone(timezone_st)
|
|
|
|
n_fields = len(fields)
|
|
# Convert my_data to numpy array for faster processing
|
|
data_array = np.array(my_data)
|
|
|
|
# Get unique device IDs and create mapping
|
|
|
|
# Convert device IDs to indices using vectorized operation
|
|
device_indices = np.vectorize(device_to_index.get)(data_array[:, 1])
|
|
|
|
# Calculate x coordinates (minutes from base)
|
|
#minute is in local time zone, and base_minute is UTC
|
|
base_minute_local = base_minute #.astimezone(local_tz)
|
|
#x_coords = np.array([(minute.replace(tzinfo=datetime.timezone.utc) - base_minute_local).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
|
|
x_coords = np.array([(minute - base_minute_local).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
|
|
|
|
# Extract values and convert to float
|
|
values = data_array[:, 2:].astype(np.float32)
|
|
|
|
if bw:
|
|
# Process in batches to avoid memory issues
|
|
batch_size = 1000
|
|
for start_idx in range(0, len(data_array), batch_size):
|
|
end_idx = min(start_idx + batch_size, len(data_array))
|
|
batch_slice = slice(start_idx, end_idx)
|
|
|
|
|
|
# Calculate gray values
|
|
gray_values = (values[batch_slice, :] - min_val / (max_val - min_val)) * 255.0
|
|
|
|
# Clip values to valid range
|
|
gray_values = np.clip(gray_values, 0, 255).astype(np.uint8)
|
|
|
|
# Create y coordinates for each record
|
|
y_coords = (device_indices[batch_slice] * n_fields).reshape(-1, 1) + np.arange(n_fields)
|
|
|
|
# Assign values to the image array
|
|
for i in range(end_idx - start_idx):
|
|
wave_m[y_coords[i], x_coords[batch_slice][i]] = gray_values[i, :, np.newaxis]
|
|
|
|
else: # Color mode
|
|
# Process in batches
|
|
batch_size = 1000
|
|
for start_idx in range(0, len(data_array), batch_size):
|
|
end_idx = min(start_idx + batch_size, len(data_array))
|
|
batch_slice = slice(start_idx, end_idx)
|
|
|
|
# Calculate color values
|
|
color_values = np.zeros_like(values[batch_slice])
|
|
|
|
color_values[:, :] = ((values[batch_slice, :] - min_val) / (max_val - min_val)) * 1279.0
|
|
#color_values[:, :] = (values[batch_slice, :] / 100.0) * 1279.0 # other fields
|
|
|
|
# Create y coordinates for each record
|
|
y_coords = (device_indices[batch_slice] * n_fields).reshape(-1, 1) + np.arange(n_fields)
|
|
|
|
# Convert to RGB colors
|
|
for i in range(end_idx - start_idx):
|
|
rgb_values = vectorized_best_color_numpy(color_values[i])
|
|
wave_m[y_coords[i], x_coords[batch_slice][i]] = rgb_values
|
|
|
|
return wave_m
|
|
|
|
def create_light_optimized_heatmap(my_data, bw, fields, wave_m, device_to_index, base_minute, timezone_st, min_val=0, max_val=4095):
|
|
"""
|
|
Create an optimized heatmap for light data (range 0-4095)
|
|
|
|
Parameters:
|
|
my_data (list): Data from the database query
|
|
bw (bool): Whether to create a black and white (True) or color (False) heatmap
|
|
fields (list): List of field names
|
|
wave_m (numpy.ndarray): The image array to fill
|
|
device_to_index (dict): Mapping from device_id to index
|
|
base_minute (datetime): The base minute for time calculations
|
|
timezone_st (str): Timezone string
|
|
min_val (float): Minimum value for normalization (default: 0)
|
|
max_val (float): Maximum value for normalization (default: 4095)
|
|
|
|
Returns:
|
|
numpy.ndarray: The filled image array
|
|
"""
|
|
if len(my_data) < 1:
|
|
return wave_m
|
|
|
|
import numpy as np
|
|
import pytz
|
|
|
|
# Get the local timezone
|
|
local_tz = pytz.timezone(timezone_st)
|
|
|
|
# Number of fields (should be 1 for light data)
|
|
n_fields = len(fields)
|
|
|
|
# Convert my_data to numpy array for faster processing
|
|
data_array = np.array(my_data)
|
|
|
|
# Convert device IDs to indices using vectorized operation
|
|
device_indices = np.vectorize(device_to_index.get)(data_array[:, 1])
|
|
|
|
# Calculate x coordinates (minutes from base)
|
|
x_coords = np.array([(minute - base_minute).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
|
|
|
|
# Extract values and convert to float - light data is in column 2
|
|
# Reshape to match expected format (n_samples, n_fields)
|
|
values = data_array[:, 2].astype(np.float32).reshape(-1, 1)
|
|
|
|
# Process in batches to avoid memory issues
|
|
batch_size = 1000
|
|
|
|
if bw:
|
|
for start_idx in range(0, len(data_array), batch_size):
|
|
end_idx = min(start_idx + batch_size, len(data_array))
|
|
batch_slice = slice(start_idx, end_idx)
|
|
|
|
# Normalize light values (0-4095) to grayscale (0-255)
|
|
gray_values = ((values[batch_slice] - min_val) / (max_val - min_val) * 255.0)
|
|
|
|
# Clip values to valid range
|
|
gray_values = np.clip(gray_values, 0, 255).astype(np.uint8)
|
|
|
|
# Create y coordinates for each record
|
|
y_coords = (device_indices[batch_slice] * n_fields).reshape(-1, 1)
|
|
|
|
# Assign values to the image array
|
|
for i in range(end_idx - start_idx):
|
|
# Create RGB grayscale (same value for R, G, B)
|
|
gray_rgb = np.full(3, gray_values[i, 0], dtype=np.uint8)
|
|
wave_m[y_coords[i, 0], x_coords[batch_slice][i]] = gray_rgb
|
|
else:
|
|
# Color mode
|
|
for start_idx in range(0, len(data_array), batch_size):
|
|
end_idx = min(start_idx + batch_size, len(data_array))
|
|
batch_slice = slice(start_idx, end_idx)
|
|
|
|
# Normalize light values (0-4095) to color range (0-1279)
|
|
color_values = ((values[batch_slice] - min_val) / (max_val - min_val) * 1279.0)
|
|
|
|
# Create y coordinates for each record
|
|
y_coords = (device_indices[batch_slice] * n_fields).reshape(-1, 1)
|
|
|
|
# For each value, calculate its RGB color and assign to the image
|
|
for i in range(end_idx - start_idx):
|
|
# Convert normalized value to RGB using vectorized_best_color_numpy
|
|
rgb_value = vectorized_best_color_numpy(np.array([color_values[i, 0]]))[0]
|
|
wave_m[y_coords[i, 0], x_coords[batch_slice][i]] = rgb_value
|
|
|
|
return wave_m
|
|
|
|
def create_temperature_optimized_heatmap(my_data, bw, fields, wave_m, device_to_index, base_minute, timezone_st, min_val=0, max_val=4095):
|
|
"""
|
|
Create an optimized heatmap for temperature data with alarm levels
|
|
|
|
Parameters:
|
|
my_data (list): Data from the database query with columns for minute, device_id, temperature_avg, alarm_level
|
|
bw (bool): Whether to create a black and white (True) or color (False) heatmap
|
|
fields (list): List of field names - should be ['temperature', 'temperature_state']
|
|
wave_m (numpy.ndarray): The image array to fill
|
|
device_to_index (dict): Mapping from device_id to index
|
|
base_minute (datetime): The base minute for time calculations
|
|
timezone_st (str): Timezone string
|
|
min_val (float): Minimum value for temperature normalization
|
|
max_val (float): Maximum value for temperature normalization
|
|
|
|
Returns:
|
|
numpy.ndarray: The filled image array
|
|
"""
|
|
if len(my_data) < 1:
|
|
return wave_m
|
|
|
|
import numpy as np
|
|
import pytz
|
|
|
|
# Get the local timezone
|
|
local_tz = pytz.timezone(timezone_st)
|
|
|
|
# Number of fields (should be 2 for temperature data: temperature and alarm state)
|
|
n_fields = len(fields)
|
|
|
|
# Convert my_data to numpy array for faster processing
|
|
data_array = np.array(my_data)
|
|
|
|
# Convert device IDs to indices using vectorized operation
|
|
device_indices = np.vectorize(device_to_index.get)(data_array[:, 1])
|
|
|
|
# Calculate x coordinates (minutes from base)
|
|
x_coords = np.array([(minute - base_minute).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
|
|
|
|
# Process in batches to avoid memory issues
|
|
batch_size = 1000
|
|
|
|
for start_idx in range(0, len(data_array), batch_size):
|
|
end_idx = min(start_idx + batch_size, len(data_array))
|
|
batch_slice = slice(start_idx, end_idx)
|
|
|
|
for i in range(end_idx - start_idx):
|
|
# Get data for this record
|
|
temperature = data_array[batch_slice][i, 2]
|
|
alarm_level = 0
|
|
|
|
# If we have an alarm_level column (index 3), use it
|
|
if data_array.shape[1] > 3:
|
|
alarm_level = data_array[batch_slice][i, 3]
|
|
|
|
# Calculate base y-coordinate for this device
|
|
base_y = device_indices[batch_slice][i] * n_fields
|
|
|
|
# Temperature row (even row - index 0, 2, 4...)
|
|
# Normalize temperature to the color range and create color
|
|
if not bw:
|
|
# For color mode
|
|
normalized_temp = np.clip((temperature - min_val) / (max_val - min_val) * 1279.0, 0, 1279)
|
|
temp_rgb = vectorized_best_color_numpy(np.array([normalized_temp]))[0]
|
|
else:
|
|
# For B&W mode
|
|
normalized_temp = np.clip((temperature - min_val) / (max_val - min_val) * 255.0, 0, 255)
|
|
gray_value = int(normalized_temp)
|
|
temp_rgb = np.array([gray_value, gray_value, gray_value], dtype=np.uint8)
|
|
|
|
# Set the temperature color in the even row
|
|
wave_m[base_y, x_coords[batch_slice][i]] = temp_rgb
|
|
|
|
# Alarm level row (odd row - index 1, 3, 5...)
|
|
# Set color based on alarm level (0=green, 1=yellow, 2=red)
|
|
if alarm_level == 0:
|
|
# Green for normal
|
|
alarm_rgb = np.array([0, 255, 0], dtype=np.uint8)
|
|
elif alarm_level == 1:
|
|
# Yellow for warning
|
|
alarm_rgb = np.array([0, 255, 255], dtype=np.uint8)
|
|
else: # alarm_level == 2
|
|
# Red for critical
|
|
alarm_rgb = np.array([0, 0, 255], dtype=np.uint8)
|
|
|
|
# Set the alarm color in the odd row
|
|
wave_m[base_y + 1, x_coords[batch_slice][i]] = alarm_rgb
|
|
|
|
return wave_m
|
|
|
|
|
|
def create_humidity_optimized_heatmap(my_data, bw, fields, wave_m, device_to_index, base_minute, timezone_st, min_val=0, max_val=100):
|
|
"""
|
|
Create a heatmap with the exact blue-cyan-green-yellow-red-violet spectrum
|
|
matching Image 2, with green at position 40
|
|
"""
|
|
if len(my_data) < 1:
|
|
return wave_m
|
|
|
|
# Number of fields
|
|
n_fields = len(fields)
|
|
|
|
# Convert my_data to numpy array for faster processing
|
|
data_array = np.array(my_data)
|
|
|
|
# Convert device IDs to indices using vectorized operation
|
|
device_indices = np.vectorize(device_to_index.get)(data_array[:, 1])
|
|
|
|
# Calculate x coordinates (minutes from base)
|
|
x_coords = np.array([(minute - base_minute).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
|
|
|
|
# Process in batches to avoid memory issues
|
|
batch_size = 1000
|
|
|
|
# Define the color mapping function based on the exact spectrum we want
|
|
def get_color(t):
|
|
"""Get RGB color from humidity 0-100"""
|
|
# Define color stops - exact RGB values at each step
|
|
# Format: (position, (r, g, b))
|
|
#color_stops = [
|
|
#(0, (0, 0, 255)), # Blue
|
|
#(20, (0, 255, 255)), # Cyan
|
|
#(40, (0, 255, 0)), # Green (centered at 40)
|
|
#(60, (255, 255, 0)), # Yellow
|
|
#(80, (255, 0, 0)), # Red
|
|
#(100, (255, 0, 255)) # Violet
|
|
#]
|
|
|
|
color_stops = [
|
|
(0, (0, 0, 255)), # Blue
|
|
(16, (0, 255, 255)), # Cyan
|
|
(32, (0, 255, 0)), # Green (now centered at 32)
|
|
(60, (255, 255, 0)), # Yellow
|
|
(80, (255, 0, 0)), # Red
|
|
(100, (255, 0, 255)) # Violet
|
|
]
|
|
|
|
# Ensure t is within range
|
|
t = max(0, min(100, t))
|
|
|
|
# Find the two stops to interpolate between
|
|
for i in range(len(color_stops) - 1):
|
|
pos1, color1 = color_stops[i]
|
|
pos2, color2 = color_stops[i+1]
|
|
|
|
if pos1 <= t <= pos2:
|
|
# Linear interpolation between the two color stops
|
|
ratio = (t - pos1) / (pos2 - pos1)
|
|
r = int(color1[0] + ratio * (color2[0] - color1[0]))
|
|
g = int(color1[1] + ratio * (color2[1] - color1[1]))
|
|
b = int(color1[2] + ratio * (color2[2] - color1[2]))
|
|
return r, g, b
|
|
|
|
# Should never reach here
|
|
return 0, 0, 0
|
|
|
|
for start_idx in range(0, len(data_array), batch_size):
|
|
end_idx = min(start_idx + batch_size, len(data_array))
|
|
batch_slice = slice(start_idx, end_idx)
|
|
|
|
for i in range(end_idx - start_idx):
|
|
# Get data for this record
|
|
humidity = float(data_array[batch_slice][i, 2])
|
|
|
|
# Map humidity from min_val-max_val to 0-100 for our color function
|
|
normalized_temp = 100.0 * (humidity - min_val) / (max_val - min_val) if max_val > min_val else 0
|
|
normalized_temp = max(0, min(100, normalized_temp)) # Clamp to 0-100
|
|
|
|
alarm_level = 0
|
|
# If we have an alarm_level column (index 3), use it
|
|
if data_array.shape[1] > 3:
|
|
alarm_level = data_array[batch_slice][i, 3]
|
|
|
|
# Calculate base y-coordinate for this device
|
|
base_y = device_indices[batch_slice][i] * n_fields
|
|
|
|
# Temperature row (even row)
|
|
if not bw:
|
|
# Get RGB color from our direct mapping function
|
|
r, g, b = get_color(normalized_temp)
|
|
|
|
# OpenCV uses BGR ordering, not RGB
|
|
temp_rgb = np.array([b, g, r], dtype=np.uint8)
|
|
else:
|
|
# For B&W mode
|
|
gray_value = int(normalized_temp * 2.55) # 0-100 to 0-255
|
|
gray_value = max(0, min(255, gray_value))
|
|
temp_rgb = np.array([gray_value, gray_value, gray_value], dtype=np.uint8)
|
|
|
|
# Set the humidity color in the even row
|
|
wave_m[base_y, x_coords[batch_slice][i]] = temp_rgb
|
|
|
|
# Alarm level row (odd row)
|
|
if alarm_level == 0:
|
|
# Green for normal
|
|
alarm_rgb = np.array([0, 255, 0], dtype=np.uint8) #thisis B,G,R !!!
|
|
elif alarm_level == 1:
|
|
# Yellow for warning
|
|
alarm_rgb = np.array([0, 255, 255], dtype=np.uint8)
|
|
else: # alarm_level == 2
|
|
# Red for critical
|
|
alarm_rgb = np.array([0, 0, 255], dtype=np.uint8)
|
|
|
|
# Set the alarm color in the odd row
|
|
wave_m[base_y + 1, x_coords[batch_slice][i]] = alarm_rgb
|
|
|
|
return wave_m
|
|
|
|
def create_smell_optimized_heatmap(arr_stretched, my_data, bw, fields, device_to_index, base_minute, timezone_st, smell_component_stretch_by, selected_date, y_offset):
|
|
"""
|
|
Create a heatmap with the exact blue-cyan-green-yellow-red-violet spectrum
|
|
matching Image 2, with green at position 40
|
|
"""
|
|
if len(my_data) < 1:
|
|
return
|
|
|
|
minutes = 1440
|
|
devices_c = len(device_to_index)
|
|
sensors_c = len(fields)
|
|
stripes = devices_c * sensors_c #2 for upper maxes, lower mins
|
|
arr_source_template = np.full((stripes, minutes+4), -0.001, dtype=float)
|
|
st = time.time()
|
|
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, timezone_st)
|
|
st = time.time()
|
|
arr_source = fast_fill_smell_array_from_timescale(my_data, time_from_str, device_to_index, arr_source_template, timezone_st)
|
|
arr_source = AddSmellLimits_optimized(arr_source, devices_c, sensors_c, percentile=100)
|
|
scaled_day = CalcExtremes(arr_source, minutes, stripes)
|
|
|
|
arr_stretched = FillSmellImage_optimized(scaled_day, arr_stretched, y_offset)
|
|
|
|
return
|
|
|
|
|
|
def create_optimized_heatmap_simple(my_data, bw, fields, wave_m, device_to_index, base_minute, timezone_st, min_val, max_val):
|
|
|
|
if len(my_data) < 1:
|
|
return []
|
|
|
|
|
|
local_tz = pytz.timezone(timezone_st)
|
|
|
|
n_fields = len(fields)
|
|
# Convert my_data to numpy array for faster processing
|
|
data_array = np.array(my_data)
|
|
|
|
# Get unique device IDs and create mapping
|
|
|
|
# Convert device IDs to indices using vectorized operation
|
|
device_indices = np.vectorize(device_to_index.get)(data_array[:, 1])
|
|
|
|
# Calculate x coordinates (minutes from base)
|
|
#minute is in local time zone, and base_minute is UTC
|
|
base_minute_local = base_minute #.astimezone(local_tz)
|
|
#x_coords = np.array([(minute.replace(tzinfo=datetime.timezone.utc) - base_minute_local).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
|
|
x_coords = np.array([(minute - base_minute_local).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
|
|
|
|
# Extract values and convert to float
|
|
values = data_array[:, 2:].astype(np.float32)
|
|
|
|
if bw:
|
|
# Process in batches to avoid memory issues
|
|
batch_size = 1000
|
|
for start_idx in range(0, len(data_array), batch_size):
|
|
end_idx = min(start_idx + batch_size, len(data_array))
|
|
batch_slice = slice(start_idx, end_idx)
|
|
|
|
|
|
# Calculate gray values
|
|
gray_values = (values[batch_slice, :] - min_val / (max_val - min_val)) * 255.0
|
|
|
|
# Clip values to valid range
|
|
gray_values = np.clip(gray_values, 0, 255).astype(np.uint8)
|
|
|
|
# Create y coordinates for each record
|
|
y_coords = (device_indices[batch_slice] * n_fields).reshape(-1, 1) + np.arange(n_fields)
|
|
|
|
# Assign values to the image array
|
|
for i in range(end_idx - start_idx):
|
|
wave_m[y_coords[i], x_coords[batch_slice][i]] = gray_values[i, :, np.newaxis]
|
|
|
|
else: # Color mode
|
|
# Process in batches
|
|
batch_size = 1000
|
|
for i in range(0, len(data_array)):
|
|
rgb_value = ((values[i] - min_val) / (max_val - min_val)) * 1279.0
|
|
wave_m[i, x_coords[i]] = rgb_value
|
|
|
|
return wave_m
|
|
|
|
|
|
def create_radar_optimized_heatmap(my_data, bw, fields, wave_m, device_to_index, base_minute, timezone_st):
|
|
|
|
if len(my_data) < 1:
|
|
return []
|
|
|
|
|
|
local_tz = pytz.timezone(timezone_st)
|
|
|
|
n_fields = len(fields)
|
|
# Convert my_data to numpy array for faster processing
|
|
data_array = np.array(my_data)
|
|
|
|
# Get unique device IDs and create mapping
|
|
|
|
# Convert device IDs to indices using vectorized operation
|
|
device_indices = np.vectorize(device_to_index.get)(data_array[:, 1])
|
|
|
|
# Calculate x coordinates (minutes from base)
|
|
#minute is in local time zone, and base_minute is UTC
|
|
base_minute_local = base_minute #.astimezone(local_tz)
|
|
#x_coords = np.array([(minute.replace(tzinfo=datetime.timezone.utc) - base_minute_local).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
|
|
x_coords = np.array([(minute - base_minute_local).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
|
|
|
|
# Extract values and convert to float
|
|
values = data_array[:, 2:].astype(np.float32)
|
|
|
|
if bw:
|
|
# Process in batches to avoid memory issues
|
|
batch_size = 1000
|
|
for start_idx in range(0, len(data_array), batch_size):
|
|
end_idx = min(start_idx + batch_size, len(data_array))
|
|
batch_slice = slice(start_idx, end_idx)
|
|
|
|
|
|
# Calculate gray values
|
|
gray_values = (values[batch_slice, :] / 100.0) * 255.0
|
|
|
|
# Clip values to valid range
|
|
gray_values = np.clip(gray_values, 0, 255).astype(np.uint8)
|
|
|
|
# Create y coordinates for each record
|
|
y_coords = (device_indices[batch_slice] * n_fields).reshape(-1, 1) + np.arange(n_fields)
|
|
|
|
# Assign values to the image array
|
|
for i in range(end_idx - start_idx):
|
|
wave_m[y_coords[i], x_coords[batch_slice][i]] = gray_values[i, :, np.newaxis]
|
|
|
|
else: # Color mode
|
|
# Process in batches
|
|
batch_size = 1000
|
|
for start_idx in range(0, len(data_array), batch_size):
|
|
end_idx = min(start_idx + batch_size, len(data_array))
|
|
batch_slice = slice(start_idx, end_idx)
|
|
|
|
# Calculate color values
|
|
color_values = np.zeros_like(values[batch_slice])
|
|
color_values[:, :] = (values[batch_slice, :] / 100.0) * 1279.0 # other fields
|
|
|
|
# Create y coordinates for each record
|
|
y_coords = (device_indices[batch_slice] * n_fields).reshape(-1, 1) + np.arange(n_fields)
|
|
|
|
# Convert to RGB colors
|
|
for i in range(end_idx - start_idx):
|
|
rgb_values = vectorized_best_color_numpy(color_values[i])
|
|
wave_m[y_coords[i], x_coords[batch_slice][i]] = rgb_values
|
|
|
|
return wave_m
|
|
|
|
def visualize_gmm_fit(stationary_signal, output_file='gmm_explanation.png'):
|
|
"""
|
|
Visualize how GMM separates the stationary signal into components
|
|
"""
|
|
# Prepare data
|
|
X = stationary_signal.reshape(-1, 1)
|
|
|
|
# Fit GMM
|
|
gmm = GaussianMixture(n_components=2, random_state=42)
|
|
gmm.fit(X)
|
|
|
|
# Get parameters
|
|
means = gmm.means_.flatten()
|
|
stds = np.sqrt(gmm.covariances_.flatten())
|
|
weights = gmm.weights_
|
|
|
|
# Create histogram of actual data
|
|
plt.figure(figsize=(12, 6))
|
|
|
|
# Plot histogram of actual data
|
|
plt.hist(X, bins=50, density=True, alpha=0.6, color='gray',
|
|
label='Actual Signal Distribution')
|
|
|
|
# Generate points for GMM curves
|
|
x = np.linspace(X.min(), X.max(), 200)
|
|
|
|
# Plot individual components
|
|
for i in range(len(means)):
|
|
plt.plot(x, weights[i] * stats.norm.pdf(x, means[i], stds[i]),
|
|
label=f'Component {i+1}: mean={means[i]:.2f}, std={stds[i]:.2f}')
|
|
|
|
# Plot combined GMM
|
|
gmm_curve = np.zeros_like(x)
|
|
for i in range(len(means)):
|
|
gmm_curve += weights[i] * stats.norm.pdf(x, means[i], stds[i])
|
|
plt.plot(x, gmm_curve, 'r--', linewidth=2, label='Combined GMM')
|
|
|
|
# Add vertical lines for threshold
|
|
baseline = min(means)
|
|
threshold = baseline + 3 * np.sqrt(gmm.covariances_.flatten()[np.argmin(means)])
|
|
plt.axvline(x=baseline, color='g', linestyle='--', label='Baseline')
|
|
plt.axvline(x=threshold, color='r', linestyle='--', label='Threshold')
|
|
|
|
plt.title('Gaussian Mixture Model Components of Stationary Signal')
|
|
plt.xlabel('Signal Value')
|
|
plt.ylabel('Density')
|
|
plt.legend()
|
|
plt.grid(True)
|
|
|
|
# Save and close
|
|
plt.savefig(output_file, dpi=300, bbox_inches='tight')
|
|
plt.close()
|
|
|
|
def process_location_data(location_data):
|
|
"""
|
|
Convert raw location data into aligned time series.
|
|
"""
|
|
timestamps = np.array([t[0] for t in location_data])
|
|
stationary = np.array([t[1] for t in location_data])
|
|
motion = np.array([t[2] for t in location_data])
|
|
return timestamps, stationary, motion
|
|
|
|
def detect_presence_for_location(stationary_signal, motion_signal,
|
|
motion_threshold=5, gmm_components=2):
|
|
"""
|
|
Simplified presence detection for a single location.
|
|
Returns presence mask and parameters.
|
|
"""
|
|
# Fit GMM to stationary signal
|
|
gmm = GaussianMixture(n_components=gmm_components, random_state=42)
|
|
X = stationary_signal.reshape(-1, 1)
|
|
gmm.fit(X)
|
|
|
|
visualize_gmm_fit(stationary_signal, output_file='gmm_explanation.png')
|
|
|
|
# Get baseline and threshold
|
|
baseline = min(gmm.means_)[0]
|
|
components_sorted = sorted(zip(gmm.means_.flatten(), gmm.covariances_.flatten()))
|
|
baseline_std = np.sqrt(components_sorted[0][1])
|
|
threshold = baseline + 3 * baseline_std
|
|
|
|
# Detect presence
|
|
presence_mask = (motion_signal > motion_threshold) | (stationary_signal > threshold)
|
|
|
|
# Smooth presence detection (15 seconds window = 1.5 samples at 10sec sampling)
|
|
smooth_window = 3
|
|
presence_mask = np.convolve(presence_mask.astype(int),
|
|
np.ones(smooth_window)/smooth_window,
|
|
mode='same') > 0.5
|
|
|
|
return presence_mask, threshold
|
|
|
|
def find_current_location(data_sets, start_time, end_time, motion_threshold=10):
|
|
"""
|
|
Analyze presence across multiple locations for each minute.
|
|
|
|
Parameters:
|
|
-----------
|
|
data_sets : dict
|
|
Dictionary of location_name: data_tuples pairs
|
|
start_time : datetime
|
|
Start time for analysis
|
|
end_time : datetime
|
|
End time for analysis
|
|
motion_threshold : float
|
|
Threshold for significant motion detection
|
|
|
|
Returns:
|
|
--------
|
|
dict
|
|
Minute by minute analysis of presence and movement
|
|
"""
|
|
# Process each location's data
|
|
location_data = {}
|
|
for location, data in data_sets.items():
|
|
timestamps, stationary, motion = process_location_data(data)
|
|
presence, threshold = detect_presence_for_location(stationary, motion, motion_threshold)
|
|
location_data[location] = {
|
|
'timestamps': timestamps,
|
|
'presence': presence,
|
|
'motion': motion,
|
|
'stationary': stationary,
|
|
'threshold': threshold
|
|
}
|
|
|
|
# Create minute-by-minute analysis
|
|
current_time = start_time
|
|
results = []
|
|
|
|
while current_time < end_time:
|
|
minute_end = current_time + timedelta(minutes=1)
|
|
|
|
# Analysis for current minute
|
|
minute_status = {
|
|
'timestamp': current_time,
|
|
'locations': [],
|
|
'moving_locations': [],
|
|
'presence_values': {},
|
|
'motion_values': {},
|
|
'status': 'nobody_present'
|
|
}
|
|
|
|
# First pass: collect all presence and motion values
|
|
for location, data in location_data.items():
|
|
# Find indices for current minute
|
|
mask = (data['timestamps'] >= current_time) & (data['timestamps'] < minute_end)
|
|
if not any(mask):
|
|
continue
|
|
|
|
presence_in_minute = data['presence'][mask]
|
|
motion_in_minute = data['motion'][mask]
|
|
stationary_in_minute = data['stationary'][mask]
|
|
|
|
if any(presence_in_minute):
|
|
minute_status['presence_values'][location] = np.max(stationary_in_minute)
|
|
minute_status['motion_values'][location] = np.max(motion_in_minute)
|
|
|
|
# If no presence detected anywhere
|
|
if not minute_status['presence_values']:
|
|
minute_status['status'] = 'nobody_present'
|
|
results.append(minute_status)
|
|
current_time += timedelta(minutes=1)
|
|
continue
|
|
|
|
|
|
# Find location with strongest presence
|
|
primary_location = max(minute_status['presence_values'].items(),
|
|
key=lambda x: x[1])[0]
|
|
|
|
# Count locations with significant motion
|
|
moving_locations = [loc for loc, motion in minute_status['motion_values'].items()
|
|
if motion > motion_threshold]
|
|
|
|
plot(motion, filename=f"motion.png", title=f"Motion", style='line')
|
|
# Update status based on motion and presence
|
|
if len(moving_locations) > 1:
|
|
# Multiple locations with significant motion indicates multiple people
|
|
minute_status['status'] = 'multiple_people_moving'
|
|
minute_status['locations'] = moving_locations
|
|
minute_status['moving_locations'] = moving_locations
|
|
else:
|
|
# Single or no motion - assign to location with strongest presence
|
|
minute_status['locations'] = [primary_location]
|
|
if moving_locations:
|
|
minute_status['status'] = f'single_person_moving_in_{primary_location}'
|
|
minute_status['moving_locations'] = moving_locations
|
|
else:
|
|
minute_status['status'] = f'single_person_stationary_in_{primary_location}'
|
|
|
|
results.append(minute_status)
|
|
current_time += timedelta(minutes=1)
|
|
|
|
return results
|
|
|
|
def get_size(obj, seen=None):
|
|
# Recursively find size of objects and their contents
|
|
if seen is None:
|
|
seen = set()
|
|
|
|
obj_id = id(obj)
|
|
if obj_id in seen:
|
|
return 0
|
|
|
|
seen.add(obj_id)
|
|
size = sys.getsizeof(obj)
|
|
|
|
if isinstance(obj, (list, tuple, set, dict)):
|
|
if isinstance(obj, (list, tuple, set)):
|
|
size += sum(get_size(i, seen) for i in obj)
|
|
else: # dict
|
|
size += sum(get_size(k, seen) + get_size(v, seen) for k, v in obj.items())
|
|
|
|
return size
|
|
|
|
def CreatePresenceMap(location_image_file, devices_list, selected_date,
|
|
map_type, force_recreate, chart_type, bw, motion, scale_global,
|
|
fast, filter_minutes, time_zone_s):
|
|
|
|
#global Id2MACDict
|
|
data_sets = {}
|
|
ids_list = []
|
|
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
|
|
time_from, time_to = GetLocalTimeForDateSimple(selected_date, time_zone_s)
|
|
|
|
for details in devices_list:
|
|
|
|
sql = get_device_radar_only_query(str(details[1]), time_from_str, time_to_str, [details[1]])
|
|
print(sql)
|
|
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
data_sets[details[2]] = cur.fetchall()#cur.fetchone()
|
|
|
|
|
|
|
|
|
|
# Get minute-by-minute analysis
|
|
location_analysis = find_current_location(data_sets, time_from, time_to)
|
|
|
|
# Example of printing results
|
|
for minute in location_analysis:
|
|
print(f"Time: {minute['timestamp']}")
|
|
print(f"Status: {minute['status']}")
|
|
print(f"Present in: {', '.join(minute['locations'])}")
|
|
if minute['moving_locations']:
|
|
print(f"Movement in: {', '.join(minute['moving_locations'])}")
|
|
print("---")
|
|
|
|
print(f"Dictionary size: {get_size(data_sets)} bytes")
|
|
devices_list_str = ','.join(str(device[1]) for device in devices_list)
|
|
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
|
|
|
|
sql = get_device_radar_only_query(devices_list_str, time_from_str, time_to_str, ids_list)
|
|
print(sql)
|
|
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()#cur.fetchone()
|
|
#print(result)
|
|
if my_data == None:
|
|
return False
|
|
|
|
|
|
#thresholds_dict = {}
|
|
|
|
#stretch_to_min_max = True
|
|
#devices_c = len(devices_list)
|
|
|
|
#data_sets = {
|
|
#'living_room': my_data1,
|
|
#'kitchen': my_data2,
|
|
#'bedroom1': my_data3,
|
|
#'bedroom2': my_data4,
|
|
#'hallway': my_data5,
|
|
#'bathroom': my_data6,
|
|
#'office': my_data7
|
|
#}
|
|
|
|
sensors_c = 1#len(sensors_table)
|
|
|
|
|
|
image_file = location_image_file
|
|
|
|
minutes = 1440
|
|
|
|
#search_pattern = os.path.join(scriptDir, "scratch/*_"+str(current_date.year)+"_"+str(current_date.month).rjust(2, '0')+"_"+str(current_date.day)+"_*.pkl")
|
|
|
|
#allFiles = [os.path.join(dest_path, f) for f in glob.glob(search_pattern)]
|
|
#rekreate .pckl files if missing
|
|
today_date = datetime.datetime.fromtimestamp(time.time())
|
|
|
|
fields = ['m0_max', 'm1_max', 'm2_max', 'm3_max', 'm4_max', 'm5_max',
|
|
'm6_max', 'm7_max', 'm8_max', 'm08_max', 's2_max', 's3_max',
|
|
's4_max', 's5_max', 's6_max', 's7_max', 's8_max', 's28_max', 's28_min']
|
|
|
|
fields_n = len(fields)
|
|
|
|
stripes = len(devices_list) * fields_n
|
|
|
|
#device_counter = 0
|
|
stretch_by = 5
|
|
|
|
#arr_source = np.zeros((stripes, minutes), dtype=float)
|
|
arr_stretched = np.zeros((int(stripes*stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
|
|
|
|
|
|
ids_list = []
|
|
|
|
labels = []
|
|
label_font = cv2.FONT_HERSHEY_SIMPLEX
|
|
label_font_scale = 1
|
|
label_font_color = (255, 255, 255)
|
|
label_font_thickness = 2
|
|
label_font_line = cv2.LINE_AA
|
|
cnt = 0
|
|
for details in devices_list:
|
|
dev_id = details[0]
|
|
ids_list.append(details[1])
|
|
descriptor = details[2]
|
|
if details[3] != None and details[3] != "":
|
|
descriptor = descriptor + " " + details[3]
|
|
if details[6] != None and details[6] != "":
|
|
descriptor = descriptor + " " + details[6]
|
|
|
|
text_dimensions = get_text_dimensions(descriptor, label_font, label_font_scale, label_font_thickness)
|
|
text_height = text_dimensions["height"]
|
|
labels.append((descriptor, (10, 10 + text_height + (cnt)*fields_n*stretch_by), label_font, label_font_scale, label_font_color, label_font_thickness, label_font_line))
|
|
cnt += 1
|
|
|
|
sql = get_deployment_radar_only_detailed_query(devices_list_str, time_from_str, time_to_str, ids_list)
|
|
print(sql)
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()#cur.fetchone()
|
|
#print(result)
|
|
if my_data == None:
|
|
return False
|
|
|
|
|
|
|
|
|
|
# Get start and end times from your data
|
|
start_time = min(data_sets['living_room'][0][0],
|
|
data_sets['kitchen'][0][0],
|
|
# ... add other locations
|
|
)
|
|
end_time = max(data_sets['living_room'][-1][0],
|
|
data_sets['kitchen'][-1][0],
|
|
# ... add other locations
|
|
)
|
|
|
|
# Get minute-by-minute analysis
|
|
location_analysis = find_current_location(data_sets, start_time, end_time)
|
|
|
|
# Example of printing results
|
|
for minute in location_analysis:
|
|
print(f"Time: {minute['timestamp']}")
|
|
print(f"Status: {minute['status']}")
|
|
print(f"Present in: {', '.join(minute['locations'])}")
|
|
if minute['moving_locations']:
|
|
print(f"Movement in: {', '.join(minute['moving_locations'])}")
|
|
print("---")
|
|
#----------------------------------------------------------------------------------------------------
|
|
|
|
|
|
|
|
print(sql)
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()#cur.fetchone()
|
|
#print(result)
|
|
if my_data == None:
|
|
return False
|
|
|
|
|
|
#device_ids = sorted(set(record[1] for record in my_data))
|
|
device_to_index = {device: idx for idx, device in enumerate(ids_list)}
|
|
|
|
base_minute = ConvertToBase(time_from_str, time_zone_s)
|
|
st = time.time()
|
|
if True:
|
|
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
|
|
wave_m = create_radar_optimized_heatmap(my_data, bw, fields, wave_m, device_to_index, base_minute, time_zone_s)
|
|
print(time.time()-st)
|
|
|
|
if False:
|
|
#base_minute = my_data[0][0]# min(record[0] for record in my_data)
|
|
|
|
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
|
|
|
|
for record in my_data:
|
|
|
|
#(minute,device_id,absent_min,stationary_max,moving_max,both_max,m0_max,m1_max,m2_max,m3_max,m4_max,
|
|
# m5_max,m6_max,m7_max,m8_max,m08_max,s2_max,s3_max,s4_max,s5_max,s6_max,s7_max,s8_max,s28_max) = record
|
|
minute, device_id = record[0:2]
|
|
values = record[2:] # All the max/min values
|
|
x = int((minute - base_minute).total_seconds()/60)
|
|
device_idx = device_to_index[device_id]
|
|
|
|
if bw:
|
|
for field_idx, value in enumerate(values):
|
|
# Calculate y position
|
|
y = device_idx * fields_n + field_idx
|
|
|
|
# Convert value to grayscale (0-100 to 0-255)
|
|
gray_value = int((value / 100.0) * 255.0)
|
|
|
|
# Set RGB values (all same for grayscale)
|
|
wave_m[y, x] = [gray_value, gray_value, gray_value]
|
|
else: #color
|
|
for field_idx, value in enumerate(values):
|
|
# Calculate y position
|
|
y = device_idx * 22 + field_idx
|
|
|
|
# Convert value to grayscale (0-100 to 0-255)
|
|
gray_value = int((value / 100.0) * 1279.0)
|
|
|
|
# Set RGB values (all same for grayscale)
|
|
wave_m[y, x] = BestColor(gray_value)
|
|
|
|
print(time.time()-st)
|
|
|
|
st = time.time()
|
|
for yy in range(stripes):
|
|
rgb_row = wave_m[yy]
|
|
for stretch_index in range(stretch_by):
|
|
y = yy * stretch_by + stretch_index
|
|
arr_stretched[y, :] = rgb_row
|
|
|
|
print(time.time()-st)
|
|
|
|
SaveImageInBlob(image_file, arr_stretched, labels)
|
|
#arr_source[2*gate, :] = wave_m
|
|
#rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
|
|
|
|
#for col in range(1440):
|
|
#sens_val = wave_m[col]
|
|
#if sens_val != 0:
|
|
#r,g,b=BestColor(km*(sens_val-m_min))
|
|
#if r > 255 or g > 255 or b > 255:
|
|
#print(r,g,b)
|
|
#rgb_row[col] = r,g,b
|
|
|
|
#for stretch_index in range(stretch_by):
|
|
#y = device_counter * (18*stretch_by) + 2*gate * stretch_by + stretch_index
|
|
##print(y, row, devices_c, sensor_index, location_index, stretch_index)
|
|
##arr_stretched[y, :] = rgb_row
|
|
|
|
#if gate > 1:
|
|
#ks = 0
|
|
#if(s_max > s_min):
|
|
#if bw:
|
|
#ks = 255/(s_max - s_min)
|
|
#else:
|
|
#ks = 1280/(s_max - s_min)
|
|
##wave_m = np.array([km*(item[0]-m_min) for item in minute_radar_lists[:1440][gate]])
|
|
#wave_s = np.array([0.0] * 1440)
|
|
|
|
#for minute_m in range(1440):
|
|
#wave_s[minute_m] = minute_radar_lists[minute_m][gate+7]
|
|
##wave_m = np.array([item[0] for item in minute_radar_lists[:1440][gate]])
|
|
##DoDisplay2(wave_m, wave_s, location_name+" "+str(dev_id)+" "+ description+" "+ str(gate))
|
|
#arr_source[2*gate + 1, :] = wave_s
|
|
|
|
#rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
|
|
|
|
#for col in range(1440):
|
|
#sens_val = wave_s[col]
|
|
#if sens_val != 0:
|
|
#if bw:
|
|
#r = ks*(sens_val-s_min)
|
|
#g = r
|
|
#b = r
|
|
#else:
|
|
#r,g,b=BestColor(ks*(sens_val-s_min))
|
|
##print(r,g,b)
|
|
#rgb_row[col] = r,g,b
|
|
|
|
#for stretch_index in range(stretch_by):
|
|
#y = device_counter * (18*stretch_by) + (2*(gate) + 1) * stretch_by + stretch_index
|
|
#arr_stretched[y, :] = rgb_row
|
|
#y = device_counter * (18*stretch_by) + (2*(gate)) * stretch_by + stretch_index
|
|
#arr_stretched[y, :] = rgb_row
|
|
|
|
print("stop")
|
|
|
|
def ConvertToBase(time_from_str, time_zone_s):
|
|
print(time_from_str)
|
|
dt = datetime.datetime.strptime(time_from_str, "%Y-%m-%d %H:%M:%S%z")
|
|
return dt
|
|
|
|
def CreateFullLocationMap(location_image_file, devices_list, selected_date,
|
|
map_type, force_recreate, chart_type, bw, motion, scale_global, fast, filter_minutes, time_zone_s):
|
|
#global Id2MACDict
|
|
|
|
thresholds_dict = {}
|
|
|
|
stretch_to_min_max = True
|
|
devices_c = len(devices_list)
|
|
|
|
if devices_c == 0:
|
|
return
|
|
|
|
sensors_c = 1#len(sensors_table)
|
|
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
|
|
|
|
image_file = location_image_file
|
|
|
|
minutes = 1440
|
|
|
|
|
|
#search_pattern = os.path.join(scriptDir, "scratch/*_"+str(current_date.year)+"_"+str(current_date.month).rjust(2, '0')+"_"+str(current_date.day)+"_*.pkl")
|
|
|
|
#allFiles = [os.path.join(dest_path, f) for f in glob.glob(search_pattern)]
|
|
#rekreate .pckl files if missing
|
|
today_date = datetime.datetime.fromtimestamp(time.time())
|
|
|
|
if scale_global and chart_type != 3 and chart_type != 4: #"digital" and chart_type != "collapsed"
|
|
|
|
max_gate={}
|
|
for gate in range(9):
|
|
max_gate[str(gate)+"_m"] = 0
|
|
max_gate[str(gate)+"_s"] = 0
|
|
|
|
device_counter = 0
|
|
for details in devices_list:
|
|
MAC, threshold, location_name, description = details
|
|
if threshold == None:
|
|
threshold = '["s3_max",12]'
|
|
|
|
|
|
#day_minutes_data = [(0,0)] * (24 * 60 + 2)
|
|
#day_minutes_data_l = [[0] * 10 for _ in range(24 * 60 + 2)]
|
|
|
|
minute_radar_lists = ReadDailyRadar(MAC, current_date)
|
|
|
|
for gate in range(9):
|
|
for minute_m in range(1440):
|
|
if (minute_radar_lists[minute_m][gate] > max_gate[str(gate)+"_m"]):
|
|
max_gate[str(gate)+"_m"] = minute_radar_lists[minute_m][gate]
|
|
|
|
if gate > 1:
|
|
if (minute_radar_lists[minute_m][gate + 7] > max_gate[str(gate)+"_s"]):
|
|
max_gate[str(gate)+"_s"] = minute_radar_lists[minute_m][gate + 7]
|
|
|
|
|
|
if (chart_type == 2): #"analog"
|
|
|
|
|
|
#fields = ['absent_min', 'stationary_max', 'moving_max', 'both_max',
|
|
#'m0_max', 'm1_max', 'm2_max', 'm3_max', 'm4_max', 'm5_max',
|
|
#'m6_max', 'm7_max', 'm8_max', 'm08_max', 's2_max', 's3_max',
|
|
#'s4_max', 's5_max', 's6_max', 's7_max', 's8_max', 's28_max']
|
|
fields = ['m0_max', 'm1_max', 'm2_max', 'm3_max', 'm4_max', 'm5_max',
|
|
'm6_max', 'm7_max', 'm8_max', 'm08_max', 's2_max', 's3_max',
|
|
's4_max', 's5_max', 's6_max', 's7_max', 's8_max', 's28_max', 's28_min']
|
|
|
|
|
|
fields_n = len(fields)
|
|
|
|
stripes = len(devices_list) * fields_n
|
|
|
|
device_counter = 0
|
|
stretch_by = 5
|
|
|
|
arr_source = np.zeros((stripes, minutes), dtype=float)
|
|
arr_stretched = np.zeros((int(stripes*stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
|
|
|
|
|
|
|
|
devices_list_str = ','.join(str(device[1]) for device in devices_list)
|
|
ids_list = []
|
|
|
|
labels = []
|
|
label_font = cv2.FONT_HERSHEY_SIMPLEX
|
|
label_font_scale = 1
|
|
label_font_color = (255, 255, 255)
|
|
label_font_thickness = 2
|
|
label_font_line = cv2.LINE_AA
|
|
cnt = 0
|
|
for details in devices_list:
|
|
dev_id = details[0]
|
|
ids_list.append(details[1])
|
|
descriptor = details[2]
|
|
if details[3] != None and details[3] != "":
|
|
descriptor = descriptor + " " + details[3]
|
|
if details[6] != None and details[6] != "":
|
|
descriptor = descriptor + " " + details[6]
|
|
|
|
text_dimensions = get_text_dimensions(descriptor, label_font, label_font_scale, label_font_thickness)
|
|
text_height = text_dimensions["height"]
|
|
labels.append((descriptor, (10, 10 + text_height + (cnt)*fields_n*stretch_by), label_font, label_font_scale, label_font_color, label_font_thickness, label_font_line))
|
|
cnt += 1
|
|
sql = get_deployment_radar_only_detailed_query(devices_list_str, time_from_str, time_to_str, ids_list)
|
|
print(sql)
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()#cur.fetchone()
|
|
#print(result)
|
|
if my_data == None:
|
|
return False
|
|
|
|
|
|
#device_ids = sorted(set(record[1] for record in my_data))
|
|
device_to_index = {device: idx for idx, device in enumerate(ids_list)}
|
|
# Calculate base minute
|
|
base_minute = ConvertToBase(time_from_str, time_zone_s)
|
|
|
|
st = time.time()
|
|
if True:
|
|
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
|
|
wave_m = create_radar_optimized_heatmap(my_data, bw, fields, wave_m, device_to_index, base_minute, time_zone_s)
|
|
print(time.time()-st)
|
|
|
|
if False:
|
|
#base_minute = my_data[0][0]# min(record[0] for record in my_data)
|
|
|
|
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
|
|
|
|
for record in my_data:
|
|
|
|
#(minute,device_id,absent_min,stationary_max,moving_max,both_max,m0_max,m1_max,m2_max,m3_max,m4_max,
|
|
# m5_max,m6_max,m7_max,m8_max,m08_max,s2_max,s3_max,s4_max,s5_max,s6_max,s7_max,s8_max,s28_max) = record
|
|
minute, device_id = record[0:2]
|
|
values = record[2:] # All the max/min values
|
|
x = int((minute - base_minute).total_seconds()/60)
|
|
device_idx = device_to_index[device_id]
|
|
|
|
if bw:
|
|
for field_idx, value in enumerate(values):
|
|
# Calculate y position
|
|
y = device_idx * fields_n + field_idx
|
|
|
|
# Convert value to grayscale (0-100 to 0-255)
|
|
gray_value = int((value / 100.0) * 255.0)
|
|
|
|
# Set RGB values (all same for grayscale)
|
|
wave_m[y, x] = [gray_value, gray_value, gray_value]
|
|
else: #color
|
|
for field_idx, value in enumerate(values):
|
|
# Calculate y position
|
|
y = device_idx * 22 + field_idx
|
|
|
|
# Convert value to grayscale (0-100 to 0-255)
|
|
gray_value = int((value / 100.0) * 1279.0)
|
|
|
|
# Set RGB values (all same for grayscale)
|
|
wave_m[y, x] = BestColor(gray_value)
|
|
|
|
print(time.time()-st)
|
|
|
|
st = time.time()
|
|
for yy in range(stripes):
|
|
rgb_row = wave_m[yy]
|
|
for stretch_index in range(stretch_by):
|
|
y = yy * stretch_by + stretch_index
|
|
arr_stretched[y, :] = rgb_row
|
|
|
|
print(time.time()-st)
|
|
|
|
SaveImageInBlob(image_file, arr_stretched, labels)
|
|
#arr_source[2*gate, :] = wave_m
|
|
#rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
|
|
|
|
#for col in range(1440):
|
|
#sens_val = wave_m[col]
|
|
#if sens_val != 0:
|
|
#r,g,b=BestColor(km*(sens_val-m_min))
|
|
#if r > 255 or g > 255 or b > 255:
|
|
#print(r,g,b)
|
|
#rgb_row[col] = r,g,b
|
|
|
|
#for stretch_index in range(stretch_by):
|
|
#y = device_counter * (18*stretch_by) + 2*gate * stretch_by + stretch_index
|
|
##print(y, row, devices_c, sensor_index, location_index, stretch_index)
|
|
##arr_stretched[y, :] = rgb_row
|
|
|
|
#if gate > 1:
|
|
#ks = 0
|
|
#if(s_max > s_min):
|
|
#if bw:
|
|
#ks = 255/(s_max - s_min)
|
|
#else:
|
|
#ks = 1280/(s_max - s_min)
|
|
##wave_m = np.array([km*(item[0]-m_min) for item in minute_radar_lists[:1440][gate]])
|
|
#wave_s = np.array([0.0] * 1440)
|
|
|
|
#for minute_m in range(1440):
|
|
#wave_s[minute_m] = minute_radar_lists[minute_m][gate+7]
|
|
##wave_m = np.array([item[0] for item in minute_radar_lists[:1440][gate]])
|
|
##DoDisplay2(wave_m, wave_s, location_name+" "+str(dev_id)+" "+ description+" "+ str(gate))
|
|
#arr_source[2*gate + 1, :] = wave_s
|
|
|
|
#rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
|
|
|
|
#for col in range(1440):
|
|
#sens_val = wave_s[col]
|
|
#if sens_val != 0:
|
|
#if bw:
|
|
#r = ks*(sens_val-s_min)
|
|
#g = r
|
|
#b = r
|
|
#else:
|
|
#r,g,b=BestColor(ks*(sens_val-s_min))
|
|
##print(r,g,b)
|
|
#rgb_row[col] = r,g,b
|
|
|
|
#for stretch_index in range(stretch_by):
|
|
#y = device_counter * (18*stretch_by) + (2*(gate) + 1) * stretch_by + stretch_index
|
|
#arr_stretched[y, :] = rgb_row
|
|
#y = device_counter * (18*stretch_by) + (2*(gate)) * stretch_by + stretch_index
|
|
#arr_stretched[y, :] = rgb_row
|
|
|
|
print("stop")
|
|
elif (chart_type == 3): #"digital"
|
|
device_counter = 0
|
|
for details in devices_list:
|
|
dev_id = details[0]
|
|
MAC, threshold, location_id, description = GetMacThrFromId(dev_id)
|
|
if threshold == None:
|
|
threshold = 30
|
|
|
|
sensor = "Radar"
|
|
location_name = location_names[location_id]
|
|
pickle_file = os.path.join(scriptDir, "scratch/"+MAC.upper() +"_"+str(current_date.year)+"_"+str(current_date.month).rjust(2, '0')+"_"+str(current_date.day)+"_radarM.pkl")
|
|
pickle_file = pickle_file.replace("\\","/")
|
|
#day_minutes_data = [(0,0)] * (24 * 60 + 2)
|
|
#day_minutes_data_l = [[0] * 10 for _ in range(24 * 60 + 2)]
|
|
|
|
minute_radar_lists = ReadDailyRadar(MAC, current_date)
|
|
|
|
y = 0
|
|
sensor_index = 0
|
|
|
|
#location_index = 0
|
|
|
|
for gate in range(9):
|
|
threshold = 15
|
|
if (gate > 1):
|
|
threshold = thresholds_dict[dev_id][gate-2]
|
|
|
|
for minute_m in range(1440):
|
|
if (minute_radar_lists[minute_m][gate] > threshold):
|
|
minute_radar_lists[minute_m][gate] = 100
|
|
else:
|
|
minute_radar_lists[minute_m][gate] = 0
|
|
|
|
if gate > 1:
|
|
if (minute_radar_lists[minute_m][gate + 7] > threshold):
|
|
minute_radar_lists[minute_m][gate + 7] = 100
|
|
else:
|
|
minute_radar_lists[minute_m][gate + 7] = 0
|
|
|
|
m_max = 100
|
|
m_min = 0
|
|
s_max = 100
|
|
s_min = 0
|
|
|
|
km = 0
|
|
if(m_max > m_min):
|
|
km = 1280/(m_max - m_min)
|
|
#wave_m = np.array([km*(item[0]-m_min) for item in minute_radar_lists[:1440][gate]])
|
|
wave_m = np.array([0.0] * 1440)
|
|
|
|
for minute_m in range(1440):
|
|
wave_m[minute_m] = minute_radar_lists[minute_m][gate]
|
|
|
|
if gate < 2:
|
|
DoDisplay(wave_m, location_name+" "+ description+" " + str(gate))
|
|
#wave_m = np.array([item[0] for item in minute_radar_lists[:1440][gate]])
|
|
arr_source[2*gate, :] = wave_m
|
|
rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
|
|
|
|
for col in range(1440):
|
|
sens_val = wave_m[col]
|
|
if sens_val != 0:
|
|
r,g,b=BestColor(km*(sens_val-m_min))
|
|
#print(r,g,b)
|
|
rgb_row[col] = r,g,b
|
|
|
|
for stretch_index in range(stretch_by):
|
|
y = device_counter * (18*stretch_by) + 2*gate * stretch_by + stretch_index
|
|
#print(y, row, devices_c, sensor_index, location_index, stretch_index)
|
|
|
|
#arr_stretched[y, :] = rgb_row
|
|
|
|
if gate > 1:
|
|
ks = 0
|
|
if(s_max > s_min):
|
|
if bw:
|
|
ks = 255/(s_max - s_min)
|
|
else:
|
|
ks = 1280/(s_max - s_min)
|
|
#wave_m = np.array([km*(item[0]-m_min) for item in minute_radar_lists[:1440][gate]])
|
|
wave_s = np.array([0.0] * 1440)
|
|
|
|
for minute_m in range(1440):
|
|
wave_s[minute_m] = minute_radar_lists[minute_m][gate+7]
|
|
#wave_m = np.array([item[0] for item in minute_radar_lists[:1440][gate]])
|
|
DoDisplay2(wave_m, wave_s, location_name+" "+str(dev_id)+" "+ description+" "+ str(gate))
|
|
arr_source[2*gate + 1, :] = wave_s
|
|
|
|
rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
|
|
|
|
for col in range(1440):
|
|
sens_val = wave_s[col]
|
|
if sens_val != 0:
|
|
if bw:
|
|
r = ks*(sens_val-s_min)
|
|
g = r
|
|
b = r
|
|
else:
|
|
r,g,b=BestColor(ks*(sens_val-s_min))
|
|
#print(r,g,b)
|
|
rgb_row[col] = r,g,b
|
|
|
|
for stretch_index in range(stretch_by):
|
|
y = device_counter * (18*stretch_by) + (2*(gate) + 1) * stretch_by + stretch_index
|
|
arr_stretched[y, :] = rgb_row
|
|
y = device_counter * (18*stretch_by) + (2*(gate)) * stretch_by + stretch_index
|
|
arr_stretched[y, :] = rgb_row
|
|
|
|
device_counter += 1
|
|
print("stop")
|
|
|
|
elif (chart_type == 4): #"collapsed"
|
|
|
|
stretch_by = 50
|
|
arr_source = np.zeros((1, minutes), dtype=float)
|
|
arr_stretched = np.zeros((int(stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
|
|
|
|
|
|
device_counter = 0
|
|
wave_m = [["", -1] for _ in range(1440)]
|
|
devices_list_str = ','.join(str(device[1]) for device in devices_list)
|
|
ids_list = []
|
|
radar_fields_of_interest = []
|
|
for details in devices_list:
|
|
threshold_str = details[5]
|
|
try:
|
|
threshold_lst = json.loads(threshold_str)
|
|
except:
|
|
threshold_lst = ["s3_max",50]
|
|
|
|
radar_field = threshold_lst[0]
|
|
if radar_field not in radar_fields_of_interest:
|
|
radar_fields_of_interest.append(radar_field)
|
|
|
|
threshold = threshold_lst[1]
|
|
dev_id = details[0]
|
|
ids_list.append(details[1])
|
|
|
|
sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
|
|
print(sql)
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()#cur.fetchone()
|
|
#print(result)
|
|
if my_data == None:
|
|
return False
|
|
|
|
device_id_2_threshold = {}
|
|
device_id_2_location = {0: "Outside"}
|
|
row_nr_2_device_id = {}
|
|
cnt = 0
|
|
row_nr_2_device_id[0] = 0
|
|
for details in devices_list:
|
|
|
|
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
|
|
|
|
cnt += 1
|
|
row_nr_2_device_id[cnt] = device_id
|
|
|
|
if radar_threshold_group_st == None:
|
|
radar_threshold_group_st = '["s3_max",50]' #last value is threshold to s28 composite
|
|
|
|
if len(radar_threshold_group_st) > 8:
|
|
radar_threshold_group = json.loads(radar_threshold_group_st)
|
|
else:
|
|
radar_threshold_group = ["s3_max",50]
|
|
|
|
device_id_2_location[device_id] = location_name
|
|
|
|
device_id_2_threshold[device_id] = radar_threshold_group
|
|
|
|
target_tz = pytz.timezone(time_zone_s)
|
|
st = time.time()
|
|
|
|
#each record in my_data has time, device_id and radar_fields_of_interest in it
|
|
result_np = None
|
|
try:
|
|
result_np = process_wave_data_numpy(image_file, my_data, time_zone_s, device_id_2_threshold, radar_fields_of_interest)
|
|
print(time.time() - st)
|
|
except Exception as err:
|
|
print(str(err))
|
|
|
|
if False:
|
|
for record in my_data:
|
|
time_val, device_id, min_val, max_val = record
|
|
radar_threshold = device_id_2_threshold[device_id]
|
|
local_time = time_val.astimezone(target_tz)
|
|
minute_m = int((local_time - local_time.replace(hour=0, minute=0, second=0, microsecond=0)).total_seconds() / 60)
|
|
|
|
if (wave_m[minute_m][0] == ""):
|
|
if max_val > radar_threshold:
|
|
wave_m[minute_m][0] = device_id
|
|
wave_m[minute_m][1] = max_val
|
|
else:
|
|
if max_val > radar_threshold:
|
|
if max_val > wave_m[minute_m][1]:
|
|
wave_m[minute_m][0] = device_id
|
|
wave_m[minute_m][1] = max_val
|
|
|
|
|
|
|
|
print(time.time()-st)
|
|
if result_np is not None:
|
|
wave_m = result_np
|
|
rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
|
|
#wave_m = FilterGlitches(wave_m, filter_minutes)
|
|
r = 0
|
|
g = 0
|
|
b = 0
|
|
|
|
if isinstance(wave_m[0], np.int64):
|
|
inital_device_id = row_nr_2_device_id[wave_m[0]]
|
|
else:
|
|
inital_device_id = 0
|
|
|
|
present_at = [[inital_device_id, 0, 1]] #device_id, minute, duration
|
|
|
|
for minute_m in range(1440):
|
|
try:
|
|
|
|
if isinstance(wave_m[minute_m], np.int64):
|
|
device_id = row_nr_2_device_id[wave_m[minute_m]]
|
|
else:
|
|
device_id = 0
|
|
|
|
if device_id != "" and device_id != -1:
|
|
r,g,b = Loc2Color[device_id_2_location[device_id]][0]
|
|
rgb_row[minute_m] = b,g,r
|
|
|
|
if minute_m > 0:
|
|
if present_at[-1][0] != device_id:
|
|
present_at.append([device_id, minute_m, 1])
|
|
else:
|
|
present_at[-1][2] += 1
|
|
|
|
except Exception as err:
|
|
print(str(err))
|
|
|
|
for stretch_index in range(stretch_by):
|
|
y = stretch_index
|
|
arr_stretched[y, :] = rgb_row
|
|
|
|
#print("stop")
|
|
#print(r,g,b)
|
|
SaveObjectInBlob(image_file+".bin", present_at)
|
|
SaveImageInBlob(image_file, arr_stretched)
|
|
|
|
def CreateFullLocationMapLabelsOut(location_image_file, devices_list, selected_date,
|
|
map_type, force_recreate, chart_type, bw, motion, scale_global, fast, filter_minutes, time_zone_s):
|
|
#global Id2MACDict
|
|
|
|
thresholds_dict = {}
|
|
|
|
stretch_to_min_max = True
|
|
devices_c = len(devices_list)
|
|
|
|
if devices_c == 0:
|
|
return
|
|
|
|
sensors_c = 1#len(sensors_table)
|
|
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
|
|
|
|
image_file = location_image_file
|
|
|
|
minutes = 1440
|
|
|
|
|
|
#search_pattern = os.path.join(scriptDir, "scratch/*_"+str(current_date.year)+"_"+str(current_date.month).rjust(2, '0')+"_"+str(current_date.day)+"_*.pkl")
|
|
|
|
#allFiles = [os.path.join(dest_path, f) for f in glob.glob(search_pattern)]
|
|
#rekreate .pckl files if missing
|
|
today_date = datetime.datetime.fromtimestamp(time.time())
|
|
|
|
|
|
if (chart_type == 8): #"all graphs"
|
|
|
|
fields = ['m0_max', 'm1_max', 'm2_max', 'm3_max', 'm4_max', 'm5_max',
|
|
'm6_max', 'm7_max', 'm8_max', 'm08_max', 's2_max', 's3_max',
|
|
's4_max', 's5_max', 's6_max', 's7_max', 's8_max', 's28_max', 's28_min']
|
|
|
|
|
|
show_radar = True
|
|
show_light = True
|
|
show_temperature = True
|
|
show_humidity = True
|
|
show_smell = True
|
|
|
|
labels_width = 200
|
|
title_labels_height = 40
|
|
title_label_width = 100
|
|
|
|
#common
|
|
label_font = cv2.FONT_HERSHEY_SIMPLEX
|
|
label_font_line = cv2.LINE_AA
|
|
|
|
#different
|
|
title_label_font_scale = 1
|
|
title_label_font_color = (0, 0, 0)#(128, 255, 255)
|
|
title_label_font_thickness = 2
|
|
|
|
label_font_scale = 0.5
|
|
label_font_color = (0, 0, 0)#(0, 255, 255)
|
|
label_font_thickness = 1
|
|
|
|
fields_n = len(fields)
|
|
|
|
radar_stripes = len(devices_list) * fields_n
|
|
radar_stretch_by = 5
|
|
|
|
light_stripes = len(devices_list)
|
|
light_stretch_by = 20
|
|
|
|
smell_sensors_stripes = 10 * len(devices_list)
|
|
other_sensors_stripes = len(devices_list)
|
|
|
|
temp_stripe_width = 15
|
|
alarm_stripe_width = 5
|
|
temperature_stretch_by = temp_stripe_width + alarm_stripe_width # Total height per device
|
|
|
|
humidity_stripe_width = 15
|
|
humidity_stretch_by = humidity_stripe_width + alarm_stripe_width
|
|
smell_component_stretch_by = 20
|
|
|
|
text_dimensions = get_text_dimensions("TEST", label_font, label_font_scale, label_font_thickness)
|
|
text_height = text_dimensions["height"]
|
|
|
|
all_maps_height = 0
|
|
|
|
# radar, light, temperature, humidity, smell*10
|
|
|
|
if show_radar:
|
|
all_maps_height = title_labels_height + radar_stripes*radar_stretch_by
|
|
|
|
if show_light:
|
|
all_maps_height = all_maps_height + title_labels_height + other_sensors_stripes*light_stretch_by
|
|
|
|
if show_temperature:
|
|
all_maps_height = all_maps_height + title_labels_height + other_sensors_stripes*temperature_stretch_by
|
|
|
|
if show_humidity:
|
|
all_maps_height = all_maps_height + title_labels_height + other_sensors_stripes*humidity_stretch_by
|
|
|
|
if show_smell:
|
|
all_maps_height = all_maps_height + title_labels_height + other_sensors_stripes*smell_component_stretch_by * 10
|
|
|
|
|
|
if all_maps_height == 0:
|
|
return
|
|
|
|
vertical_offset = 0
|
|
arr_stretched = np.full((all_maps_height, minutes+labels_width, 3), [255, 174, 70], dtype=np.uint8)
|
|
|
|
#Lets add divider lines
|
|
x = 190
|
|
if show_radar:
|
|
stretch_by = radar_stretch_by
|
|
cnt = 0
|
|
for details in devices_list:
|
|
y = vertical_offset + title_labels_height + (cnt)*fields_n*stretch_by
|
|
arr_stretched[y, 190:201, :] = 0
|
|
cnt += 1
|
|
|
|
section_height = title_labels_height + radar_stripes*radar_stretch_by
|
|
vertical_offset = vertical_offset + section_height
|
|
if show_light:
|
|
stretch_by = light_stretch_by
|
|
cnt = 0
|
|
|
|
for details in devices_list:
|
|
y = vertical_offset + title_labels_height+ (cnt)*1*stretch_by
|
|
arr_stretched[y, 190:201, :] = 0
|
|
cnt += 1
|
|
|
|
section_height = title_labels_height + other_sensors_stripes*stretch_by
|
|
vertical_offset = vertical_offset + section_height
|
|
|
|
if show_temperature:
|
|
stretch_by = temperature_stretch_by
|
|
cnt = 0
|
|
|
|
for details in devices_list:
|
|
y = vertical_offset + title_labels_height+ (cnt)*1*stretch_by
|
|
arr_stretched[y, 190:201, :] = 0
|
|
cnt += 1
|
|
section_height = title_labels_height + other_sensors_stripes*stretch_by
|
|
vertical_offset = vertical_offset + section_height
|
|
if show_humidity:
|
|
stretch_by = humidity_stretch_by
|
|
cnt = 0
|
|
|
|
for details in devices_list:
|
|
y = vertical_offset + title_labels_height+ (cnt)*1*stretch_by
|
|
arr_stretched[y, 190:201, :] = 0
|
|
cnt += 1
|
|
section_height = title_labels_height + other_sensors_stripes*humidity_stretch_by
|
|
vertical_offset = vertical_offset + section_height
|
|
if show_smell:
|
|
stretch_by = smell_component_stretch_by
|
|
|
|
cnt = 0
|
|
|
|
for details in devices_list:
|
|
y = vertical_offset + title_labels_height+ (cnt)*10*stretch_by
|
|
arr_stretched[y, 190:201, :] = 0
|
|
cnt += 1
|
|
#section_height = title_labels_height + other_sensors_stripes**stretch_by * 10
|
|
#vertical_offset = vertical_offset + section_height
|
|
|
|
#all_maps_height = all_maps_height + title_labels_height + other_sensors_stripes*stretch_by * 10
|
|
|
|
|
|
|
|
|
|
devices_list_str = ','.join(str(device[1]) for device in devices_list)
|
|
ids_list = []
|
|
labels = []
|
|
title_labels = []
|
|
vertical_offset = 0
|
|
######################################## RADAR ##################################################################
|
|
if show_radar:
|
|
title_label_text = "RADAR"
|
|
fields_s = fields
|
|
stripes = radar_stripes
|
|
stretch_by = radar_stretch_by
|
|
title_text_dimensions = get_text_dimensions(title_label_text, label_font, title_label_font_scale, label_font_thickness)
|
|
title_text_height = title_text_dimensions["height"]
|
|
title_label_width = title_text_dimensions["width"]
|
|
title_label = (title_label_text, (int(labels_width + minutes * 0.5 - title_label_width / 2), vertical_offset + 10 + title_text_height), label_font, title_label_font_scale, title_label_font_color, title_label_font_thickness, label_font_line)
|
|
|
|
title_labels.append(title_label)
|
|
|
|
cnt = 0
|
|
for details in devices_list:
|
|
dev_id = details[0]
|
|
ids_list.append(details[1])
|
|
descriptor = details[2]
|
|
if details[3] != None and details[3] != "":
|
|
descriptor = descriptor + " " + details[3]
|
|
if details[6] != None and details[6] != "":
|
|
descriptor = descriptor + " " + details[6]
|
|
|
|
text_dimensions = get_text_dimensions(descriptor, label_font, label_font_scale, label_font_thickness)
|
|
text_height = text_dimensions["height"]
|
|
labels.append((descriptor, (10, vertical_offset + title_labels_height+40+text_height + (cnt)*fields_n*stretch_by), label_font, label_font_scale, label_font_color, label_font_thickness, label_font_line))
|
|
cnt += 1
|
|
sql = get_deployment_radar_only_detailed_query(devices_list_str, time_from_str, time_to_str, ids_list)
|
|
print(sql)
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()#cur.fetchone()
|
|
#print(result)
|
|
if my_data != None:
|
|
|
|
device_to_index = {device: idx for idx, device in enumerate(ids_list)}
|
|
# Calculate base minute
|
|
base_minute = ConvertToBase(time_from_str, time_zone_s)
|
|
|
|
st = time.time()
|
|
if True:
|
|
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
|
|
wave_m = create_radar_optimized_heatmap(my_data, bw, fields_s, wave_m, device_to_index, base_minute, time_zone_s)
|
|
print(time.time()-st)
|
|
|
|
|
|
st = time.time()
|
|
for yy in range(stripes):
|
|
rgb_row = wave_m[yy]
|
|
for stretch_index in range(radar_stretch_by):
|
|
y = yy * radar_stretch_by + stretch_index
|
|
arr_stretched[title_labels_height+y, 200:] = rgb_row
|
|
|
|
print(time.time()-st)
|
|
vertical_offset = vertical_offset + title_labels_height + stripes*radar_stretch_by
|
|
######################################## LIGHT ##################################################################
|
|
if show_light:
|
|
title_label_text = "LIGHT"
|
|
fields_s = ['light']
|
|
min_val = 0
|
|
max_val = 4095
|
|
stretch_by = light_stretch_by
|
|
stripes = len(devices_list) * len(fields_s) # Calculate number of rows needed
|
|
# Calculate the correct vertical offset for light section
|
|
|
|
# Draw the light section title at the correct position
|
|
title_text_dimensions = get_text_dimensions(title_label_text, label_font, title_label_font_scale, label_font_thickness)
|
|
title_text_height = title_text_dimensions["height"]
|
|
title_label_width = title_text_dimensions["width"]
|
|
title_label = (title_label_text, (int(labels_width + minutes * 0.5 - title_label_width / 2),
|
|
vertical_offset + 10 + title_text_height),
|
|
label_font, title_label_font_scale, title_label_font_color, title_label_font_thickness, label_font_line)
|
|
|
|
title_labels.append(title_label)
|
|
|
|
# Draw device labels for light section
|
|
cnt = 0
|
|
light_ids_list = [] # Create a separate list for light section
|
|
for details in devices_list:
|
|
dev_id = details[0]
|
|
light_ids_list.append(details[1])
|
|
descriptor = details[2]
|
|
if details[3] != None and details[3] != "":
|
|
descriptor = descriptor + " " + details[3]
|
|
if details[6] != None and details[6] != "":
|
|
descriptor = descriptor + " " + details[6]
|
|
|
|
text_dimensions = get_text_dimensions(descriptor, label_font, label_font_scale, label_font_thickness)
|
|
text_height = text_dimensions["height"]
|
|
# Position labels in the light section
|
|
labels.append((descriptor, (10, vertical_offset + title_labels_height + text_height + (cnt)*len(fields_s)*stretch_by),
|
|
label_font, label_font_scale, label_font_color, label_font_thickness, label_font_line))
|
|
cnt += 1
|
|
|
|
# Get light data using the existing query function
|
|
sql = get_deployment_light_only_query(devices_list_str, time_from_str, time_to_str, light_ids_list)
|
|
print(sql)
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()
|
|
|
|
if my_data != None and len(my_data) > 0:
|
|
device_to_index = {device: idx for idx, device in enumerate(light_ids_list)}
|
|
|
|
# Calculate base minute
|
|
base_minute = ConvertToBase(time_from_str, time_zone_s)
|
|
|
|
# Process light data
|
|
st = time.time()
|
|
|
|
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
|
|
|
|
# Use the light-specific function
|
|
wave_m = create_light_optimized_heatmap(my_data, bw, fields_s, wave_m,
|
|
device_to_index, base_minute, time_zone_s,
|
|
min_val, max_val)
|
|
print(f"Light heatmap creation time: {time.time()-st:.4f} seconds")
|
|
|
|
# Stretch the heatmap vertically
|
|
st = time.time()
|
|
section_start = vertical_offset + title_labels_height
|
|
|
|
for yy in range(stripes):
|
|
rgb_row = wave_m[yy]
|
|
for stretch_index in range(stretch_by):
|
|
y = yy * stretch_by + stretch_index
|
|
target_y = section_start + y
|
|
|
|
# Make sure we're within bounds of the array
|
|
if target_y < arr_stretched.shape[0]:
|
|
arr_stretched[target_y, labels_width:] = rgb_row
|
|
else:
|
|
print(f"Warning: Row {target_y} is out of bounds (max: {arr_stretched.shape[0]-1})")
|
|
|
|
vertical_offset = vertical_offset + title_labels_height + stripes*stretch_by
|
|
print(f"Light stretching time: {time.time()-st:.4f} seconds")
|
|
|
|
######################################## TEMPERATURE ##################################################################
|
|
if show_temperature:
|
|
title_label_text = "TEMPERATURE"
|
|
fields_s = ['temperature', 'temperature_state']
|
|
|
|
# Define different stripe widths for temperature and alarm
|
|
|
|
temp_offset = -16.0
|
|
min_val = 20
|
|
max_val = 30
|
|
|
|
# Calculate the correct vertical offset for temperature section
|
|
vertical_offset = 0
|
|
if show_radar:
|
|
vertical_offset += title_labels_height + radar_stripes * radar_stretch_by
|
|
if show_light:
|
|
vertical_offset += title_labels_height + other_sensors_stripes * light_stretch_by
|
|
|
|
stripes = len(devices_list) * len(fields_s) # Number of rows needed in data array
|
|
|
|
# Draw the temperature section title
|
|
title_text_dimensions = get_text_dimensions(title_label_text, label_font, title_label_font_scale, label_font_thickness)
|
|
title_text_height = title_text_dimensions["height"]
|
|
title_label_width = title_text_dimensions["width"]
|
|
title_label = (title_label_text, (int(labels_width + minutes * 0.5 - title_label_width / 2),
|
|
vertical_offset + 10 + title_text_height),
|
|
label_font, title_label_font_scale, title_label_font_color, title_label_font_thickness, label_font_line)
|
|
|
|
title_labels.append(title_label)
|
|
|
|
# Draw device labels for temperature section
|
|
cnt = 0
|
|
temp_ids_list = [] # Create a separate list for temperature section
|
|
for details in devices_list:
|
|
dev_id = details[0]
|
|
temp_ids_list.append(details[1])
|
|
descriptor = details[2]
|
|
if details[3] != None and details[3] != "":
|
|
descriptor = descriptor + " " + details[3]
|
|
if details[6] != None and details[6] != "":
|
|
descriptor = descriptor + " " + details[6]
|
|
|
|
text_dimensions = get_text_dimensions(descriptor, label_font, label_font_scale, label_font_thickness)
|
|
text_height = text_dimensions["height"]
|
|
# Position labels in the temperature section
|
|
y_pos = vertical_offset + title_labels_height + text_height + cnt * temperature_stretch_by
|
|
#y_pos = vertical_offset + title_labels_height + text_height + (cnt)*len(fields_s)*stretch_by)
|
|
labels.append((descriptor, (10, y_pos), label_font, label_font_scale,
|
|
label_font_color, label_font_thickness, label_font_line))
|
|
cnt += 1
|
|
|
|
# Get temperature data
|
|
sql = get_deployment_temperature_only_query(devices_list_str, time_from_str, time_to_str, temp_ids_list, temp_offset)
|
|
print(sql)
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()
|
|
|
|
if my_data != None and len(my_data) > 0:
|
|
device_to_index = {device: idx for idx, device in enumerate(temp_ids_list)}
|
|
base_minute = ConvertToBase(time_from_str, time_zone_s)
|
|
|
|
# Process temperature data
|
|
st = time.time()
|
|
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
|
|
if False:
|
|
# Simulate data for testing
|
|
for i in range(min(len(my_data), 500)):
|
|
if i >= 100: # Only modify indices 100-500
|
|
t = (i - 100) / 4.0 # Temperature value
|
|
|
|
# Set correct alarm levels based on temperature
|
|
if CelsiusToFahrenheit(t) <= 50 or CelsiusToFahrenheit(t) >= 90:
|
|
alarm_level = 2 # Critical - should be red
|
|
elif CelsiusToFahrenheit(t) <= 60 or CelsiusToFahrenheit(t) >= 80:
|
|
alarm_level = 1 # Warning - should be yellow
|
|
else:
|
|
alarm_level = 0 # Normal - should be green
|
|
|
|
# Replace the tuple with new values
|
|
my_data[i] = (my_data[i][0], my_data[i][1], t, alarm_level)
|
|
|
|
# Create the heatmap data
|
|
wave_m = create_temperature_optimized_heatmap(my_data, bw, fields_s, wave_m,
|
|
device_to_index, base_minute, time_zone_s,
|
|
min_val, max_val)
|
|
print(f"Temperature heatmap creation time: {time.time()-st:.4f} seconds")
|
|
|
|
# Stretch the heatmap with different heights for temperature and alarm
|
|
st = time.time()
|
|
section_start = vertical_offset + title_labels_height
|
|
|
|
# Loop through each device
|
|
for device_idx in range(len(temp_ids_list)):
|
|
# Get the data rows for this device
|
|
temp_row = wave_m[device_idx * 2] # Temperature row (even index)
|
|
alarm_row = wave_m[device_idx * 2 + 1] # Alarm row (odd index)
|
|
|
|
# Calculate the starting y-position for this device
|
|
device_y_start = section_start + device_idx * temperature_stretch_by
|
|
|
|
# Draw the temperature stripe (15 pixels)
|
|
for stretch_index in range(temp_stripe_width):
|
|
target_y = device_y_start + stretch_index
|
|
if target_y < arr_stretched.shape[0]:
|
|
arr_stretched[target_y, labels_width:] = temp_row
|
|
|
|
# Draw the alarm stripe (5 pixels)
|
|
for stretch_index in range(alarm_stripe_width):
|
|
target_y = device_y_start + temp_stripe_width + stretch_index
|
|
if target_y < arr_stretched.shape[0]:
|
|
arr_stretched[target_y, labels_width:] = alarm_row
|
|
|
|
print(f"Temperature stretching time: {time.time()-st:.4f} seconds")
|
|
|
|
######################################## HUMIDITY ##################################################################
|
|
'''
|
|
Ideal indoor humidity: 30-50%
|
|
Too dry: Below 30% - Can cause dry skin, irritated eyes, and respiratory issues
|
|
Too humid: Above 60% - Feels warmer than actual temperature, promotes mold growth
|
|
'''
|
|
if show_humidity:
|
|
title_label_text = "HUMIDITY"
|
|
fields_s = ['humidity', 'humidity_state']
|
|
|
|
# Define different stripe widths for humidity and alarm
|
|
|
|
humidity_offset = 0
|
|
min_val = 30#40
|
|
max_val = 60#60
|
|
|
|
# Calculate the correct vertical offset for temperature section
|
|
vertical_offset = 0
|
|
if show_radar:
|
|
vertical_offset += title_labels_height + radar_stripes * radar_stretch_by
|
|
if show_light:
|
|
vertical_offset += title_labels_height + other_sensors_stripes * light_stretch_by
|
|
if show_temperature:
|
|
vertical_offset += title_labels_height + other_sensors_stripes * temperature_stretch_by
|
|
|
|
stripes = len(devices_list) * len(fields_s) # Number of rows needed in data array
|
|
|
|
# Draw the temperature section title
|
|
title_text_dimensions = get_text_dimensions(title_label_text, label_font, title_label_font_scale, label_font_thickness)
|
|
title_text_height = title_text_dimensions["height"]
|
|
title_label_width = title_text_dimensions["width"]
|
|
title_label = (title_label_text, (int(labels_width + minutes * 0.5 - title_label_width / 2),
|
|
vertical_offset + 10 + title_text_height),
|
|
label_font, title_label_font_scale, title_label_font_color, title_label_font_thickness, label_font_line)
|
|
|
|
title_labels.append(title_label)
|
|
|
|
# Draw device labels for temperature section
|
|
cnt = 0
|
|
temp_ids_list = [] # Create a separate list for temperature section
|
|
for details in devices_list:
|
|
dev_id = details[0]
|
|
temp_ids_list.append(details[1])
|
|
descriptor = details[2]
|
|
if details[3] != None and details[3] != "":
|
|
descriptor = descriptor + " " + details[3]
|
|
if details[6] != None and details[6] != "":
|
|
descriptor = descriptor + " " + details[6]
|
|
|
|
text_dimensions = get_text_dimensions(descriptor, label_font, label_font_scale, label_font_thickness)
|
|
text_height = text_dimensions["height"]
|
|
# Position labels in the temperature section
|
|
y_pos = vertical_offset + title_labels_height + text_height + cnt * humidity_stretch_by
|
|
labels.append((descriptor, (10, y_pos), label_font, label_font_scale,
|
|
label_font_color, label_font_thickness, label_font_line))
|
|
cnt += 1
|
|
|
|
# Get humidity data
|
|
sql = get_deployment_humidity_only_query(devices_list_str, time_from_str, time_to_str, temp_ids_list, humidity_offset)
|
|
print(sql)
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()
|
|
|
|
if my_data != None and len(my_data) > 0:
|
|
device_to_index = {device: idx for idx, device in enumerate(temp_ids_list)}
|
|
base_minute = ConvertToBase(time_from_str, time_zone_s)
|
|
|
|
# Process temperature data
|
|
st = time.time()
|
|
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
|
|
if False:
|
|
# Simulate data for testing
|
|
for i in range(min(len(my_data), 500)):
|
|
if i >= 100: # Only modify indices 100-500
|
|
h = (i - 100) / 4.0 # Temperature value
|
|
|
|
# Set correct alarm levels based on temperature
|
|
if h <= 20 or h >= 60:
|
|
alarm_level = 2 # Critical - should be red
|
|
elif h <= 30 or h >= 50:
|
|
alarm_level = 1 # Warning - should be yellow
|
|
else:
|
|
alarm_level = 0 # Normal - should be green
|
|
|
|
# Replace the tuple with new values
|
|
my_data[i] = (my_data[i][0], my_data[i][1], h, alarm_level)
|
|
|
|
# Create the heatmap data
|
|
wave_m = create_humidity_optimized_heatmap(my_data, bw, fields_s, wave_m,
|
|
device_to_index, base_minute, time_zone_s,
|
|
min_val, max_val)
|
|
print(f"Humidity heatmap creation time: {time.time()-st:.4f} seconds")
|
|
|
|
# Stretch the heatmap with different heights for humidity and alarm
|
|
st = time.time()
|
|
section_start = vertical_offset + title_labels_height
|
|
|
|
# Loop through each device
|
|
for device_idx in range(len(temp_ids_list)):
|
|
# Get the data rows for this device
|
|
humidity_row = wave_m[device_idx * 2] # Humidity row (even index)
|
|
alarm_row = wave_m[device_idx * 2 + 1] # Alarm row (odd index)
|
|
|
|
# Calculate the starting y-position for this device
|
|
device_y_start = section_start + device_idx * humidity_stretch_by
|
|
|
|
# Draw the humidity stripe (15 pixels)
|
|
for stretch_index in range(humidity_stripe_width):
|
|
target_y = device_y_start + stretch_index
|
|
if target_y < arr_stretched.shape[0]:
|
|
arr_stretched[target_y, labels_width:] = humidity_row
|
|
|
|
# Draw the alarm stripe (5 pixels)
|
|
for stretch_index in range(alarm_stripe_width):
|
|
target_y = device_y_start + temp_stripe_width + stretch_index
|
|
if target_y < arr_stretched.shape[0]:
|
|
arr_stretched[target_y, labels_width:] = alarm_row
|
|
|
|
print(f"Temperature stretching time: {time.time()-st:.4f} seconds")
|
|
|
|
######################################## SMELL ##################################################################
|
|
if show_smell:
|
|
title_label_text = "SMELL"
|
|
fields_s = ['S0', 'S1', 'S2', 'S3', 'S4', 'S5', 'S6', 'S7', 'S8', 'S9']
|
|
|
|
# Define different stripe widths for humidity and alarm
|
|
|
|
smell_offset = 0
|
|
|
|
# Calculate the correct vertical offset for temperature section
|
|
vertical_offset = 0
|
|
if show_radar:
|
|
vertical_offset += title_labels_height + radar_stripes * radar_stretch_by
|
|
if show_light:
|
|
vertical_offset += title_labels_height + other_sensors_stripes * light_stretch_by
|
|
if show_temperature:
|
|
vertical_offset += title_labels_height + other_sensors_stripes * temperature_stretch_by
|
|
if show_humidity:
|
|
vertical_offset += title_labels_height + other_sensors_stripes * humidity_stretch_by
|
|
|
|
stripes = len(devices_list) * len(fields_s) # Number of rows needed in data array
|
|
|
|
# Draw the temperature section title
|
|
title_text_dimensions = get_text_dimensions(title_label_text, label_font, title_label_font_scale, label_font_thickness)
|
|
title_text_height = title_text_dimensions["height"]
|
|
title_label_width = title_text_dimensions["width"]
|
|
title_label = (title_label_text, (int(labels_width + minutes * 0.5 - title_label_width / 2),
|
|
vertical_offset + 10 + title_text_height),
|
|
label_font, title_label_font_scale, title_label_font_color, title_label_font_thickness, label_font_line)
|
|
|
|
title_labels.append(title_label)
|
|
|
|
# Draw device labels for temperature section
|
|
cnt = 0
|
|
temp_ids_list = [] # Create a separate list for temperature section
|
|
for details in devices_list:
|
|
dev_id = details[0]
|
|
temp_ids_list.append(details[1])
|
|
descriptor = details[2]
|
|
if details[3] != None and details[3] != "":
|
|
descriptor = descriptor + " " + details[3]
|
|
if details[6] != None and details[6] != "":
|
|
descriptor = descriptor + " " + details[6]
|
|
|
|
text_dimensions = get_text_dimensions(descriptor, label_font, label_font_scale, label_font_thickness)
|
|
text_height = text_dimensions["height"]
|
|
# Position labels in the temperature section
|
|
y_pos = vertical_offset + title_labels_height +80+ text_height + cnt * smell_component_stretch_by * 10
|
|
#y_pos = vertical_offset + title_labels_height+40+text_height + (cnt)*fields_n*stretch_by)
|
|
labels.append((descriptor, (10, y_pos), label_font, label_font_scale,
|
|
label_font_color, label_font_thickness, label_font_line))
|
|
cnt += 1
|
|
|
|
# Get smell data
|
|
sql = get_deployment_smell_only_query(devices_list_str, time_from_str, time_to_str, temp_ids_list, smell_offset)
|
|
print(sql)
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()
|
|
|
|
if my_data != None and len(my_data) > 0:
|
|
device_to_index = {device: idx for idx, device in enumerate(temp_ids_list)}
|
|
base_minute = ConvertToBase(time_from_str, time_zone_s)
|
|
|
|
# Create the heatmap data
|
|
create_smell_optimized_heatmap(arr_stretched, my_data, bw, fields_s, device_to_index, base_minute, time_zone_s, smell_component_stretch_by, selected_date, vertical_offset + 18 + title_text_height)
|
|
|
|
|
|
|
|
SaveImageInBlobLabelsOut(image_file, arr_stretched, labels, title_labels)
|
|
|
|
print("stop")
|
|
|
|
|
|
|
|
|
|
|
|
def CreateDailyLocationMap(location_image_file, devices_list, selected_date, filter_minutes, time_zone_s, stretch_by):
|
|
|
|
devices_c = len(devices_list)
|
|
sensors_c = 1#len(sensors_table)
|
|
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
|
|
|
|
image_file = location_image_file
|
|
|
|
minutes = 1440
|
|
|
|
|
|
#search_pattern = os.path.join(scriptDir, "scratch/*_"+str(current_date.year)+"_"+str(current_date.month).rjust(2, '0')+"_"+str(current_date.day)+"_*.pkl")
|
|
|
|
#allFiles = [os.path.join(dest_path, f) for f in glob.glob(search_pattern)]
|
|
#rekreate .pckl files if missing
|
|
today_date = datetime.datetime.fromtimestamp(time.time())
|
|
|
|
arr_source = np.zeros((1, minutes), dtype=float)
|
|
arr_stretched = np.zeros((int(stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
|
|
arr_stretched_sorted = np.zeros((int(stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
|
|
|
|
device_counter = 0
|
|
wave_m = [["", -1] for _ in range(1440)]
|
|
devices_list_str = ','.join(str(device[1]) for device in devices_list)
|
|
ids_list = []
|
|
radar_fields_of_interest = []
|
|
for details in devices_list:
|
|
threshold_str = details[5]
|
|
try:
|
|
threshold_lst = json.loads(threshold_str)
|
|
except:
|
|
threshold_lst = ["s3_max",50]
|
|
|
|
radar_field = threshold_lst[0]
|
|
if radar_field not in radar_fields_of_interest:
|
|
radar_fields_of_interest.append(radar_field)
|
|
|
|
threshold = threshold_lst[1]
|
|
dev_id = details[0]
|
|
ids_list.append(details[1])
|
|
|
|
sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
|
|
print(sql)
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()#cur.fetchone()
|
|
#print(result)
|
|
if my_data == None:
|
|
return False
|
|
|
|
device_id_2_threshold = {}
|
|
device_id_2_location = {0: "Outside"}
|
|
row_nr_2_device_id = {}
|
|
cnt = 0
|
|
row_nr_2_device_id[0] = 0
|
|
for details in devices_list:
|
|
|
|
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
|
|
|
|
cnt += 1
|
|
row_nr_2_device_id[cnt] = device_id
|
|
|
|
if radar_threshold_group_st == None:
|
|
radar_threshold_group_st = '["s3_max",50]' #last value is threshold to s28 composite
|
|
|
|
if len(radar_threshold_group_st) > 8:
|
|
radar_threshold_group = json.loads(radar_threshold_group_st)
|
|
else:
|
|
radar_threshold_group = ["s3_max",50]
|
|
|
|
device_id_2_location[device_id] = location_name
|
|
|
|
device_id_2_threshold[device_id] = radar_threshold_group
|
|
|
|
target_tz = pytz.timezone(time_zone_s)
|
|
st = time.time()
|
|
|
|
#each record in my_data has time, device_id and radar_fields_of_interest in it
|
|
|
|
try:
|
|
result_np = process_wave_data_numpy(image_file, my_data, time_zone_s, device_id_2_threshold, radar_fields_of_interest)
|
|
print(time.time() - st)
|
|
except Exception as err:
|
|
print(str(err))
|
|
|
|
if False:
|
|
for record in my_data:
|
|
time_val, device_id, min_val, max_val = record
|
|
radar_threshold = device_id_2_threshold[device_id]
|
|
local_time = time_val.astimezone(target_tz)
|
|
minute_m = int((local_time - local_time.replace(hour=0, minute=0, second=0, microsecond=0)).total_seconds() / 60)
|
|
|
|
if (wave_m[minute_m][0] == ""):
|
|
if max_val > radar_threshold:
|
|
wave_m[minute_m][0] = device_id
|
|
wave_m[minute_m][1] = max_val
|
|
else:
|
|
if max_val > radar_threshold:
|
|
if max_val > wave_m[minute_m][1]:
|
|
wave_m[minute_m][0] = device_id
|
|
wave_m[minute_m][1] = max_val
|
|
|
|
|
|
|
|
print(time.time()-st)
|
|
wave_m = result_np
|
|
rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
|
|
rgbsorted_row = np.zeros(( 1440, 3), dtype=np.uint8)
|
|
#wave_m = FilterGlitches(wave_m, filter_minutes)
|
|
r = 0
|
|
g = 0
|
|
b = 0
|
|
|
|
presence_minutes = {}
|
|
|
|
#we want to generate present_at array
|
|
if isinstance(wave_m[0], np.int64):
|
|
inital_device_id = row_nr_2_device_id[wave_m[0]]
|
|
else:
|
|
inital_device_id = 0
|
|
present_at = [[inital_device_id, 0, 1]] #device_id, minute, duration
|
|
for minute_m in range(1440):
|
|
try:
|
|
if isinstance(wave_m[minute_m], np.int64):
|
|
device_id = row_nr_2_device_id[wave_m[minute_m]]
|
|
else:
|
|
device_id = 0
|
|
|
|
if device_id != "" and device_id != -1:
|
|
r,g,b = Loc2Color[device_id_2_location[device_id]][0]
|
|
rgb_row[minute_m] = b,g,r
|
|
|
|
if Loc2Color[device_id_2_location[device_id]][1] in presence_minutes:
|
|
presence_minutes[Loc2Color[device_id_2_location[device_id]][1]] = [presence_minutes[Loc2Color[device_id_2_location[device_id]][1]][0] + 1, Loc2Color[device_id_2_location[device_id]][0]]
|
|
else:
|
|
presence_minutes[Loc2Color[device_id_2_location[device_id]][1]] = [1, Loc2Color[device_id_2_location[device_id]][0]]
|
|
|
|
if minute_m > 0:
|
|
if present_at[-1][0] != device_id:
|
|
present_at.append([device_id, minute_m, 1])
|
|
else:
|
|
present_at[-1][2] += 1
|
|
|
|
except Exception as err:
|
|
print(str(err))
|
|
|
|
start_minute = 0
|
|
for color_key in sorted(presence_minutes):
|
|
print(color_key, presence_minutes[color_key])
|
|
rgbsorted_row[start_minute:start_minute+presence_minutes[color_key][0]] = presence_minutes[color_key][1][::-1]
|
|
start_minute += presence_minutes[color_key][0]
|
|
|
|
#we need to save present_at list to blob
|
|
SaveObjectInBlob(image_file+".bin", present_at)
|
|
#present_at_back_s = ReadObjectMinIO("daily-maps", image_file+".bin")
|
|
#present_at_back = json.loads(present_at_back_s)
|
|
#print(present_at_back)
|
|
for stretch_index in range(stretch_by):
|
|
y = stretch_index
|
|
arr_stretched[y, :] = rgb_row
|
|
arr_stretched_sorted[y, :] = rgbsorted_row
|
|
#print("stop")
|
|
#print(r,g,b)
|
|
SaveImageInBlob(image_file, arr_stretched)
|
|
SaveImageInBlob(image_file[:-4]+"S.png", arr_stretched_sorted)
|
|
|
|
|
|
def GenerateFullLocationMap(map_file, deployment_id, ddate, recreate_or_not, chart_type, bw, motion, scale_global, fast, time_zone_s, filter_minutes = 5):
|
|
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
|
|
|
st = time.time()
|
|
if CreateFullLocationMap(map_file, devices_list, ddate, 1, recreate_or_not, chart_type, bw, motion, scale_global, fast, filter_minutes, time_zone_s) == 0: #"[bit] 1=same sensors together, 2=same device together, 4=1 der, 8=2 der
|
|
print(ddate, "Not found")
|
|
else:
|
|
print(ddate, time.time() - st)
|
|
|
|
def GenerateFullLocationMapLabelsOut(map_file, deployment_id, ddate, recreate_or_not, chart_type, bw, motion, scale_global, fast, time_zone_s, filter_minutes = 5):
|
|
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
|
|
|
st = time.time()
|
|
if CreateFullLocationMapLabelsOut(map_file, devices_list, ddate, 1, recreate_or_not, chart_type, bw, motion, scale_global, fast, filter_minutes, time_zone_s) == 0: #"[bit] 1=same sensors together, 2=same device together, 4=1 der, 8=2 der
|
|
print(ddate, "Not found")
|
|
else:
|
|
print(ddate, time.time() - st)
|
|
|
|
def CreateMapFast(map_file, devices_list, selected_date, bw, time_zone_s, radar_part, group_by):
|
|
global Id2MACDict
|
|
|
|
st = time.time()
|
|
if radar_part == "s28":
|
|
radar_part = "(s2+s3+s4+s5+s6+s7+s8)/7"
|
|
try:
|
|
|
|
#stretch_to_min_max = True
|
|
|
|
#current_date_p = selected_date.replace("-", "_")
|
|
#current_date_s = selected_date
|
|
|
|
lower_than200 = 0
|
|
larger_than200 = 0
|
|
ids_list = []
|
|
for details in devices_list[0]:
|
|
dev_id = details[0]
|
|
ids_list.append(details[1])
|
|
if dev_id < 200:
|
|
lower_than200 += 1
|
|
else:
|
|
larger_than200 += 1
|
|
|
|
if lower_than200 > 0 and larger_than200 > 0:
|
|
return False, []
|
|
|
|
if larger_than200 > 0:
|
|
sensors_c = len(s_table)
|
|
else: #old sensors not supported
|
|
return False, []
|
|
|
|
|
|
devices_c = len(devices_list[0])
|
|
devices_list_str = ",".join(map(str, devices_list[1]))
|
|
image_file = map_file
|
|
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
|
|
temp_offset = -16
|
|
sql = get_deployment_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_part, temp_offset)
|
|
print(sql)
|
|
#print(sql)
|
|
#st = time.time()
|
|
print(f"@1 ----{time.time() - st}")
|
|
with get_db_connection() as conn:
|
|
print(f"@1a ----{time.time() - st}")
|
|
with conn.cursor() as cur:
|
|
print(f"@1b ----{time.time() - st}")
|
|
cur.execute(sql)
|
|
day_data = cur.fetchall()#cur.fetchone()
|
|
#print(result)
|
|
if day_data == None:
|
|
print(f"@1c ----{time.time() - st}")
|
|
return False, []
|
|
print(f"@2 ----{time.time() - st}")
|
|
|
|
stretch_by = 10
|
|
minutes = 1440
|
|
stripes = devices_c * sensors_c #2 for upper maxes, lower mins
|
|
arr_source_template = np.full((stripes, minutes+4), -0.001, dtype=float)
|
|
print(f"@3 ----{time.time() - st}")
|
|
st = time.time()
|
|
arr_stretched_template = np.zeros((int(stripes*stretch_by), minutes, 3), dtype=np.uint8) # 3 for RGB channels
|
|
print(f"@4a ----{time.time() - st}")
|
|
#st = time.time()
|
|
#arr_source = fill_array_from_timescale(day_data, time_from_str, devices_list[1], arr_source_template, time_zone_s)
|
|
#print(f"@4b ----{time.time() - st}")
|
|
#st = time.time()
|
|
#arr_source = fast_fill_array_from_timescale_bad(day_data, time_from_str, devices_list[1], arr_source_template, time_zone_s)
|
|
#print(f"@4n ----{time.time() - st}")
|
|
st = time.time()
|
|
arr_source = fast_fill_array_from_timescale(day_data, time_from_str, devices_list[1], arr_source_template, time_zone_s)
|
|
#arr_source = fill_array_from_timescale(day_data, time_from_str, devices_list[1], arr_source_template, time_zone_s)
|
|
print(f"@5 ----{time.time() - st}")
|
|
arr_source = AddLimits_optimized(arr_source, devices_c, sensors_c, percentile=100)
|
|
print(f"@6 ----{time.time() - st}")
|
|
scaled_day = CalcExtremes(arr_source, minutes, stripes)
|
|
print(f"@7 ----{time.time() - st}")
|
|
arr_stretched, vocs_scaled = FillImage_optimized(scaled_day, devices_c, sensors_c, arr_stretched_template, group_by, bw)
|
|
print(f"@8 ----{time.time() - st}")
|
|
SaveImageInBlob(image_file, arr_stretched)
|
|
print(f"@9 ----{time.time() - st}")
|
|
return True, vocs_scaled
|
|
|
|
except Exception as e:
|
|
AddToLog(traceback.format_exc())
|
|
return False, []
|
|
|
|
def get_deployment_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_part, temp_offset):
|
|
"""
|
|
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
radar_part (str): Radar column name, defaults to 'radar'
|
|
|
|
Returns:
|
|
str: Generated SQL query
|
|
"""
|
|
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
sql = f"""
|
|
SELECT
|
|
COALESCE(sr.minute, rr.minute) as minute,
|
|
COALESCE(sr.device_id, rr.device_id) as device_id,
|
|
sr.avg_temperature+ {temp_offset} as avg_temperature,
|
|
sr.avg_humidity,
|
|
sr.pressure_amplitude,
|
|
sr.max_light,
|
|
rr.radar,
|
|
sr.min_s0 as sensor_min_s0,
|
|
sr.min_s1 as sensor_min_s1,
|
|
sr.min_s2 as sensor_min_s2,
|
|
sr.min_s3 as sensor_min_s3,
|
|
sr.min_s4 as sensor_min_s4,
|
|
sr.min_s5 as sensor_min_s5,
|
|
sr.min_s6 as sensor_min_s6,
|
|
sr.min_s7 as sensor_min_s7,
|
|
sr.min_s8 as sensor_min_s8,
|
|
sr.min_s9 as sensor_min_s9
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
AVG(temperature) AS avg_temperature,
|
|
AVG(humidity) AS avg_humidity,
|
|
AVG(pressure) AS pressure_amplitude,
|
|
MAX(light) AS max_light,
|
|
MIN(CASE WHEN s0 > 0 THEN s0 END) AS min_s0,
|
|
MIN(CASE WHEN s1 > 0 THEN s1 END) AS min_s1,
|
|
MIN(CASE WHEN s2 > 0 THEN s2 END) AS min_s2,
|
|
MIN(CASE WHEN s3 > 0 THEN s3 END) AS min_s3,
|
|
MIN(CASE WHEN s4 > 0 THEN s4 END) AS min_s4,
|
|
MIN(CASE WHEN s5 > 0 THEN s5 END) AS min_s5,
|
|
MIN(CASE WHEN s6 > 0 THEN s6 END) AS min_s6,
|
|
MIN(CASE WHEN s7 > 0 THEN s7 END) AS min_s7,
|
|
MIN(CASE WHEN s8 > 0 THEN s8 END) AS min_s8,
|
|
MIN(CASE WHEN s9 > 0 THEN s9 END) AS min_s9
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) sr
|
|
FULL OUTER JOIN (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
MAX({radar_part}) AS radar
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
ON sr.minute = rr.minute AND sr.device_id = rr.device_id
|
|
ORDER BY
|
|
CASE COALESCE(sr.device_id, rr.device_id)
|
|
{case_order}
|
|
END,
|
|
COALESCE(sr.minute, rr.minute);
|
|
"""
|
|
return sql
|
|
|
|
def get_deployment_rd_query(devices_list_str, time_from_str, time_to_str, ids_list, temp_offset):
|
|
#radar detailed
|
|
"""
|
|
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
|
|
Returns:
|
|
str: Generated SQL query
|
|
"""
|
|
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
sql = f"""
|
|
SELECT
|
|
COALESCE(sr.minute, rr.minute) as minute,
|
|
COALESCE(sr.device_id, rr.device_id) as device_id,
|
|
sr.avg_temperature+{temp_offset} as avg_temperature,
|
|
sr.avg_humidity,
|
|
sr.avg_pressure,
|
|
sr.max_light,
|
|
sr.min_s0 as smell_s0,
|
|
sr.min_s1 as smell_s1,
|
|
sr.min_s2 as smell_s2,
|
|
sr.min_s3 as smell_s3,
|
|
sr.min_s4 as smell_s4,
|
|
sr.min_s5 as smell_s5,
|
|
sr.min_s6 as smell_s6,
|
|
sr.min_s7 as smell_s7,
|
|
sr.min_s8 as smell_s8,
|
|
sr.min_s9 as smell_s9,
|
|
rr.absent as radar_absent,
|
|
rr.moving as radar_moving,
|
|
rr.stationary as radar_stationary,
|
|
rr.both as radar_both,
|
|
rr.m0 as radar_m0,
|
|
rr.m1 as radar_m1,
|
|
rr.m2 as radar_m2,
|
|
rr.m3 as radar_m3,
|
|
rr.m4 as radar_m4,
|
|
rr.m5 as radar_m5,
|
|
rr.m6 as radar_m6,
|
|
rr.m7 as radar_m7,
|
|
rr.m8 as radar_m8,
|
|
rr.s2 as radar_s2,
|
|
rr.s3 as radar_s3,
|
|
rr.s4 as radar_s4,
|
|
rr.s5 as radar_s5,
|
|
rr.s6 as radar_s6,
|
|
rr.s7 as radar_s7,
|
|
rr.s8 as radar_s8
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
AVG(temperature) AS avg_temperature,
|
|
AVG(humidity) AS avg_humidity,
|
|
AVG(pressure) AS avg_pressure,
|
|
MAX(light) AS max_light,
|
|
MIN(CASE WHEN s0 > 0 THEN s0 END) AS min_s0,
|
|
MIN(CASE WHEN s1 > 0 THEN s1 END) AS min_s1,
|
|
MIN(CASE WHEN s2 > 0 THEN s2 END) AS min_s2,
|
|
MIN(CASE WHEN s3 > 0 THEN s3 END) AS min_s3,
|
|
MIN(CASE WHEN s4 > 0 THEN s4 END) AS min_s4,
|
|
MIN(CASE WHEN s5 > 0 THEN s5 END) AS min_s5,
|
|
MIN(CASE WHEN s6 > 0 THEN s6 END) AS min_s6,
|
|
MIN(CASE WHEN s7 > 0 THEN s7 END) AS min_s7,
|
|
MIN(CASE WHEN s8 > 0 THEN s8 END) AS min_s8,
|
|
MIN(CASE WHEN s9 > 0 THEN s9 END) AS min_s9
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) sr
|
|
FULL OUTER JOIN (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
MAX(absent) AS absent,
|
|
MAX(moving) AS moving,
|
|
MAX(stationary) AS stationary,
|
|
MAX(\"both\") AS both,
|
|
MAX(m0) AS m0,
|
|
MAX(m1) AS m1,
|
|
MAX(m2) AS m2,
|
|
MAX(m3) AS m3,
|
|
MAX(m4) AS m4,
|
|
MAX(m5) AS m5,
|
|
MAX(m6) AS m6,
|
|
MAX(m7) AS m7,
|
|
MAX(m8) AS m8,
|
|
MAX(s2) AS s2,
|
|
MAX(s3) AS s3,
|
|
MAX(s4) AS s4,
|
|
MAX(s5) AS s5,
|
|
MAX(s6) AS s6,
|
|
MAX(s7) AS s7,
|
|
MAX(s8) AS s8
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
ON sr.minute = rr.minute AND sr.device_id = rr.device_id
|
|
ORDER BY
|
|
CASE COALESCE(sr.device_id, rr.device_id)
|
|
{case_order}
|
|
END,
|
|
COALESCE(sr.minute, rr.minute);
|
|
"""
|
|
return sql
|
|
|
|
def get_deployment_radar_only_query(devices_list_str, time_from_str, time_to_str, ids_list):
|
|
#radar detailed
|
|
"""
|
|
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
|
|
Returns:
|
|
str: Generated SQL query
|
|
"""
|
|
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
sql = f"""
|
|
SELECT
|
|
COALESCE(sr.minute, rr.minute) as minute,
|
|
COALESCE(sr.device_id, rr.device_id) as device_id,
|
|
rr.s2 as radar_s2,
|
|
rr.s3 as radar_s3,
|
|
rr.s4 as radar_s4,
|
|
rr.s5 as radar_s5,
|
|
rr.s6 as radar_s6,
|
|
rr.s7 as radar_s7,
|
|
rr.s8 as radar_s8
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) sr
|
|
FULL OUTER JOIN (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
MAX(s2) AS s2,
|
|
MAX(s3) AS s3,
|
|
MAX(s4) AS s4,
|
|
MAX(s5) AS s5,
|
|
MAX(s6) AS s6,
|
|
MAX(s7) AS s7,
|
|
MAX(s8) AS s8
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
ON sr.minute = rr.minute AND sr.device_id = rr.device_id
|
|
ORDER BY
|
|
CASE COALESCE(sr.device_id, rr.device_id)
|
|
{case_order}
|
|
END,
|
|
COALESCE(sr.minute, rr.minute);
|
|
"""
|
|
return sql
|
|
|
|
def get_device_radar_s28_only_query(time_from_str, time_to_str, device_id):
|
|
sql = f"""
|
|
SELECT
|
|
time,
|
|
(s2+s3+s4+s5+s6+s7+s8)/7 AS s28,
|
|
(m2+m3+m4+m5+m6+m7+m8)/7 AS m28
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id = {device_id}
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
ORDER BY
|
|
time ASC
|
|
"""
|
|
return sql
|
|
|
|
def get_device_radar_only_query(devices_list_str, time_from_str, time_to_str, device_id):
|
|
sql = f"""
|
|
SELECT
|
|
time,
|
|
(s2+s3+s4+s5+s6+s7+s8)/7 AS s28,
|
|
(m2+m3+m4+m5+m6+m7+m8)/7 AS m28
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
ORDER BY
|
|
time ASC
|
|
"""
|
|
return sql
|
|
|
|
|
|
def get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest):
|
|
#radar detailed
|
|
"""
|
|
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
|
|
Returns:
|
|
str: Generated SQL query
|
|
"""
|
|
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
|
|
case_order = "\n ".join(case_statements)
|
|
radar_fields_to_get = ""
|
|
q_parts = ""
|
|
for field in radar_fields_of_interest:
|
|
if field == "s28_min":
|
|
q_part = "MIN((s2+s3+s4+s5+s6+s7+s8)/7) AS s28_min"
|
|
elif field == "s28_max":
|
|
q_part = "MAX((s2+s3+s4+s5+s6+s7+s8)/7) AS s28_max"
|
|
elif field == "m08_max":
|
|
q_part = "MAX((m0+m1+m2+m3+m4+m5+m6+m7+m8)/9) AS m08_max"
|
|
elif field == "s2_max":
|
|
q_part = "MAX(s2) AS s2_max"
|
|
elif field == "s3_max":
|
|
q_part = "MAX(s3) AS s3_max"
|
|
elif field == "s4_max":
|
|
q_part = "MAX(s4) AS s4_max"
|
|
elif field == "s5_max":
|
|
q_part = "MAX(s5) AS s5_max"
|
|
elif field == "s6_max":
|
|
q_part = "MAX(s6) AS s6_max"
|
|
elif field == "s7_max":
|
|
q_part = "MAX(s7) AS s7_max"
|
|
elif field == "s8_max":
|
|
q_part = "MAX(s8) AS s8_max"
|
|
elif field == "m0_max":
|
|
q_part = "MAX(m0) AS m0_max"
|
|
elif field == "m1_max":
|
|
q_part = "MAX(m1) AS m1_max"
|
|
elif field == "m2_max":
|
|
q_part = "MAX(m2) AS m2_max"
|
|
elif field == "m3_max":
|
|
q_part = "MAX(m3) AS m3_max"
|
|
elif field == "m4_max":
|
|
q_part = "MAX(m4) AS m4_max"
|
|
elif field == "m5_max":
|
|
q_part = "MAX(m5) AS m5_max"
|
|
elif field == "m6_max":
|
|
q_part = "MAX(m6) AS m6_max"
|
|
elif field == "m7_max":
|
|
q_part = "MAX(m7) AS m7_max"
|
|
elif field == "m8_max":
|
|
q_part = "MAX(m8) AS m8_max"
|
|
else:
|
|
q_part = field
|
|
|
|
if q_parts == "":
|
|
q_parts = q_part
|
|
else:
|
|
q_parts = q_parts + ", " + q_part
|
|
if radar_fields_to_get == "":
|
|
radar_fields_to_get = field
|
|
else:
|
|
radar_fields_to_get = radar_fields_to_get + ", " + field
|
|
sql = f"""
|
|
SELECT
|
|
minute,
|
|
device_id,
|
|
{radar_fields_to_get}
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
{q_parts}
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute
|
|
"""
|
|
return sql
|
|
|
|
def get_deployment_radar_only_detailed_query(devices_list_str, time_from_str, time_to_str, ids_list):
|
|
#radar detailed
|
|
"""
|
|
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
|
|
Returns:
|
|
str: Generated SQL query
|
|
This is looking for presence, NOT absence... othervise all MAXes need to be converted to MINs
|
|
"""
|
|
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
sql = f"""
|
|
SELECT
|
|
minute,
|
|
device_id,
|
|
m0_max,
|
|
m1_max,
|
|
m2_max,
|
|
m3_max,
|
|
m4_max,
|
|
m5_max,
|
|
m6_max,
|
|
m7_max,
|
|
m8_max,
|
|
m08_max,
|
|
s2_max,
|
|
s3_max,
|
|
s4_max,
|
|
s5_max,
|
|
s6_max,
|
|
s7_max,
|
|
s8_max,
|
|
s28_max,
|
|
s28_min
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
MAX(m0) AS m0_max,
|
|
MAX(m1) AS m1_max,
|
|
MAX(m2) AS m2_max,
|
|
MAX(m3) AS m3_max,
|
|
MAX(m4) AS m4_max,
|
|
MAX(m5) AS m5_max,
|
|
MAX(m6) AS m6_max,
|
|
MAX(m7) AS m7_max,
|
|
MAX(m8) AS m8_max,
|
|
MAX((m0+m1+m2+m3+m4+m5+m6+m7+m8)/9) AS m08_max,
|
|
MAX(s2) AS s2_max,
|
|
MAX(s3) AS s3_max,
|
|
MAX(s4) AS s4_max,
|
|
MAX(s5) AS s5_max,
|
|
MAX(s6) AS s6_max,
|
|
MAX(s7) AS s7_max,
|
|
MAX(s8) AS s8_max,
|
|
MAX((s2+s3+s4+s5+s6+s7+s8)/7) AS s28_max,
|
|
MIN((s2+s3+s4+s5+s6+s7+s8)/7) AS s28_min
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute
|
|
"""
|
|
return sql
|
|
|
|
def get_deployment_light_only_query(devices_list_str, time_from_str, time_to_str, ids_list):
|
|
#light detailed
|
|
"""
|
|
Generate a TimeScaleDB query for light readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
|
|
Returns:
|
|
str: Generated SQL query
|
|
"""
|
|
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
sql = f"""
|
|
SELECT
|
|
minute,
|
|
device_id,
|
|
light_max
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
MAX(light) AS light_max
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute
|
|
"""
|
|
return sql
|
|
|
|
def get_deployment_temperature_only_query(devices_list_str, time_from_str, time_to_str, ids_list, temp_offset):
|
|
"""
|
|
Generate a TimeScaleDB query for temperature readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
temp_offset (float): Temperature offset to apply
|
|
|
|
Returns:
|
|
str: Generated SQL query with temperature and alarm level
|
|
"""
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
# SQL query with added alarm_level calculation
|
|
sql = f"""
|
|
SELECT
|
|
minute,
|
|
device_id,
|
|
temperature_avg,
|
|
CASE
|
|
WHEN (temperature_avg * 9/5 + 32) <= 50 OR (temperature_avg * 9/5 + 32) >= 90 THEN 2
|
|
WHEN (temperature_avg * 9/5 + 32) <= 60 OR (temperature_avg * 9/5 + 32) >= 80 THEN 1
|
|
ELSE 0
|
|
END AS alarm_level
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
AVG(temperature)+{temp_offset} AS temperature_avg
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute
|
|
"""
|
|
return sql
|
|
|
|
def get_deployment_humidity_only_query(devices_list_str, time_from_str, time_to_str, ids_list, humidity_offset):
|
|
"""
|
|
Generate a TimeScaleDB query for humidity readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
temp_offset (float): Temperature offset to apply
|
|
|
|
Returns:
|
|
str: Generated SQL query with humidity and alarm level
|
|
"""
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
case_order = "\n ".join(case_statements)
|
|
col_expr = f"2.3592 * AVG(humidity) + 23.5546" #= 2.3592 * J2 + 33.5546
|
|
|
|
# SQL query with added alarm_level calculation
|
|
sql = f"""
|
|
SELECT
|
|
minute,
|
|
device_id,
|
|
humidity_avg,
|
|
CASE
|
|
WHEN humidity_avg <= 20 OR humidity_avg >= 60 THEN 2
|
|
WHEN humidity_avg <= 30 OR humidity_avg >= 50 THEN 1
|
|
ELSE 0
|
|
END AS alarm_level
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
{col_expr} AS humidity_avg
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute
|
|
"""
|
|
return sql
|
|
|
|
def get_deployment_smell_only_query(devices_list_str, time_from_str, time_to_str, ids_list, humidity_offset):
|
|
"""
|
|
Generate a TimeScaleDB query for smell readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
temp_offset (float): Temperature offset to apply
|
|
|
|
Returns:
|
|
str: Generated SQL query with smell components
|
|
"""
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
# SQL query with added alarm_level calculation
|
|
sql = f"""
|
|
SELECT
|
|
minute,
|
|
device_id,
|
|
min_s0,
|
|
min_s1,
|
|
min_s2,
|
|
min_s3,
|
|
min_s4,
|
|
min_s5,
|
|
min_s6,
|
|
min_s7,
|
|
min_s8,
|
|
min_s9
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
MIN(CASE WHEN s0 > 0 THEN s0 END) AS min_s0,
|
|
MIN(CASE WHEN s1 > 0 THEN s1 END) AS min_s1,
|
|
MIN(CASE WHEN s2 > 0 THEN s2 END) AS min_s2,
|
|
MIN(CASE WHEN s3 > 0 THEN s3 END) AS min_s3,
|
|
MIN(CASE WHEN s4 > 0 THEN s4 END) AS min_s4,
|
|
MIN(CASE WHEN s5 > 0 THEN s5 END) AS min_s5,
|
|
MIN(CASE WHEN s6 > 0 THEN s6 END) AS min_s6,
|
|
MIN(CASE WHEN s7 > 0 THEN s7 END) AS min_s7,
|
|
MIN(CASE WHEN s8 > 0 THEN s8 END) AS min_s8,
|
|
MIN(CASE WHEN s9 > 0 THEN s9 END) AS min_s9
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute
|
|
"""
|
|
return sql
|
|
|
|
|
|
def get_deployment_radar_only_detailed_all_query(devices_list_str, time_from_str, time_to_str, ids_list):
|
|
#radar detailed
|
|
"""
|
|
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
|
|
Returns:
|
|
str: Generated SQL query
|
|
This is looking for presence, NOT absence... othervise all MAXes need to be converted to MINs
|
|
"""
|
|
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
sql = f"""
|
|
SELECT
|
|
minute,
|
|
device_id,
|
|
absent_min,
|
|
stationary_max,
|
|
moving_max,
|
|
both_max,
|
|
m0_max,
|
|
m1_max,
|
|
m2_max,
|
|
m3_max,
|
|
m4_max,
|
|
m5_max,
|
|
m6_max,
|
|
m7_max,
|
|
m8_max,
|
|
m08_max,
|
|
s2_max,
|
|
s3_max,
|
|
s4_max,
|
|
s5_max,
|
|
s6_max,
|
|
s7_max,
|
|
s8_max,
|
|
s28_max
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
MIN(absent) AS absent_min,
|
|
MAX(stationary) AS stationary_max,
|
|
MAX(moving) AS moving_max,
|
|
MAX("both") AS both_max,
|
|
MAX(m0) AS m0_max,
|
|
MAX(m1) AS m1_max,
|
|
MAX(m2) AS m2_max,
|
|
MAX(m3) AS m3_max,
|
|
MAX(m4) AS m4_max,
|
|
MAX(m5) AS m5_max,
|
|
MAX(m6) AS m6_max,
|
|
MAX(m7) AS m7_max,
|
|
MAX(m8) AS m8_max,
|
|
MAX((m0+m1+m2+m3+m4+m5+m6+m7+m8)/9) AS m08_max,
|
|
MAX(s2) AS s2_max,
|
|
MAX(s3) AS s3_max,
|
|
MAX(s4) AS s4_max,
|
|
MAX(s5) AS s5_max,
|
|
MAX(s6) AS s6_max,
|
|
MAX(s7) AS s7_max,
|
|
MAX(s8) AS s8_max,
|
|
MAX((s2+s3+s4+s5+s6+s7+s8)/7) AS s28_max
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute
|
|
"""
|
|
return sql
|
|
def get_deployment_deca_query(devices_list_str, time_from_str, time_to_str, ids_list, temp_offset):
|
|
"""
|
|
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
|
|
Returns:
|
|
str: Generated SQL query
|
|
"""
|
|
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
sql = f"""
|
|
SELECT
|
|
COALESCE(sr.minute, rr.minute) as minute,
|
|
COALESCE(sr.device_id, rr.device_id) as device_id,
|
|
sr.avg_temperature+{temp_offset} as avg_temperature,
|
|
sr.avg_humidity,
|
|
sr.avg_pressure,
|
|
sr.max_light,
|
|
sr.min_s0 as smell_s0,
|
|
sr.min_s1 as smell_s1,
|
|
sr.min_s2 as smell_s2,
|
|
sr.min_s3 as smell_s3,
|
|
sr.min_s4 as smell_s4,
|
|
sr.min_s5 as smell_s5,
|
|
sr.min_s6 as smell_s6,
|
|
sr.min_s7 as smell_s7,
|
|
sr.min_s8 as smell_s8,
|
|
sr.min_s9 as smell_s9,
|
|
rr.absent as radar_absent,
|
|
rr.moving as radar_moving,
|
|
rr.stationary as radar_stationary,
|
|
rr.both as radar_both,
|
|
rr.m0 as radar_m0,
|
|
rr.m1 as radar_m1,
|
|
rr.m2 as radar_m2,
|
|
rr.m3 as radar_m3,
|
|
rr.m4 as radar_m4,
|
|
rr.m5 as radar_m5,
|
|
rr.m6 as radar_m6,
|
|
rr.m7 as radar_m7,
|
|
rr.m8 as radar_m8,
|
|
rr.s2 as radar_s2,
|
|
rr.s3 as radar_s3,
|
|
rr.s4 as radar_s4,
|
|
rr.s5 as radar_s5,
|
|
rr.s6 as radar_s6,
|
|
rr.s7 as radar_s7,
|
|
rr.s8 as radar_s8
|
|
FROM (
|
|
SELECT
|
|
time_bucket('10 seconds', time) AS minute,
|
|
device_id,
|
|
AVG(temperature) AS avg_temperature,
|
|
AVG(humidity) AS avg_humidity,
|
|
AVG(pressure) AS avg_pressure,
|
|
MAX(light) AS max_light,
|
|
MIN(CASE WHEN s0 > 0 THEN s0 END) AS min_s0,
|
|
MIN(CASE WHEN s1 > 0 THEN s1 END) AS min_s1,
|
|
MIN(CASE WHEN s2 > 0 THEN s2 END) AS min_s2,
|
|
MIN(CASE WHEN s3 > 0 THEN s3 END) AS min_s3,
|
|
MIN(CASE WHEN s4 > 0 THEN s4 END) AS min_s4,
|
|
MIN(CASE WHEN s5 > 0 THEN s5 END) AS min_s5,
|
|
MIN(CASE WHEN s6 > 0 THEN s6 END) AS min_s6,
|
|
MIN(CASE WHEN s7 > 0 THEN s7 END) AS min_s7,
|
|
MIN(CASE WHEN s8 > 0 THEN s8 END) AS min_s8,
|
|
MIN(CASE WHEN s9 > 0 THEN s9 END) AS min_s9
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) sr
|
|
FULL OUTER JOIN (
|
|
SELECT
|
|
time_bucket('10 seconds', time) AS minute,
|
|
device_id,
|
|
MAX(absent) AS absent,
|
|
MAX(moving) AS moving,
|
|
MAX(stationary) AS stationary,
|
|
MAX(\"both\") AS both,
|
|
MAX(m0) AS m0,
|
|
MAX(m1) AS m1,
|
|
MAX(m2) AS m2,
|
|
MAX(m3) AS m3,
|
|
MAX(m4) AS m4,
|
|
MAX(m5) AS m5,
|
|
MAX(m6) AS m6,
|
|
MAX(m7) AS m7,
|
|
MAX(m8) AS m8,
|
|
MAX(s2) AS s2,
|
|
MAX(s3) AS s3,
|
|
MAX(s4) AS s4,
|
|
MAX(s5) AS s5,
|
|
MAX(s6) AS s6,
|
|
MAX(s7) AS s7,
|
|
MAX(s8) AS s8
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
ON sr.minute = rr.minute AND sr.device_id = rr.device_id
|
|
ORDER BY
|
|
CASE COALESCE(sr.device_id, rr.device_id)
|
|
{case_order}
|
|
END,
|
|
COALESCE(sr.minute, rr.minute);
|
|
"""
|
|
return sql
|
|
|
|
def export_query_to_minio_chunked(connection_params, query, minio_client, bucket_name, blob_name=None, chunksize=10000):
|
|
"""
|
|
Export query results to MinIO as CSV in chunks to handle large datasets
|
|
|
|
Parameters:
|
|
connection_params (dict): Database connection parameters
|
|
query (str): SQL query to execute
|
|
minio_client: Initialized MinIO client
|
|
bucket_name (str): Name of the MinIO bucket
|
|
blob_name (str): Name for the blob in MinIO. If None, generates timestamped name
|
|
chunksize (int): Number of rows to process at a time
|
|
|
|
Returns:
|
|
str: Name of the created blob
|
|
"""
|
|
try:
|
|
# Create direct connection using psycopg2
|
|
conn = psycopg2.connect(**connection_params)
|
|
|
|
# Generate blob name if not provided
|
|
if blob_name is None:
|
|
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
|
blob_name = f'query_results_{timestamp}.csv'
|
|
|
|
# Create a buffer to store CSV data
|
|
csv_buffer = io.StringIO()
|
|
|
|
# Stream the query results in chunks
|
|
first_chunk = True
|
|
for chunk_df in pd.read_sql_query(query, conn, chunksize=chunksize):
|
|
# Write header only for the first chunk
|
|
chunk_df.to_csv(
|
|
csv_buffer,
|
|
index=False,
|
|
header=first_chunk,
|
|
mode='a'
|
|
)
|
|
first_chunk = False
|
|
|
|
# Get the CSV data as bytes
|
|
csv_buffer.seek(0)
|
|
csv_bytes = csv_buffer.getvalue().encode('utf-8')
|
|
|
|
# Upload to MinIO
|
|
minio_client.put_object(
|
|
bucket_name,
|
|
blob_name,
|
|
io.BytesIO(csv_bytes),
|
|
len(csv_bytes)
|
|
)
|
|
|
|
print(f"Data exported successfully to MinIO: {bucket_name}/{blob_name}")
|
|
return blob_name
|
|
|
|
except Exception as e:
|
|
print(f"Error exporting data: {str(e)}")
|
|
print(f"Traceback: {traceback.format_exc()}")
|
|
raise
|
|
|
|
finally:
|
|
if 'conn' in locals():
|
|
conn.close()
|
|
if 'csv_buffer' in locals():
|
|
csv_buffer.close()
|
|
|
|
def export_query_to_csv_pandas(connection_params, query, output_path=None):
|
|
"""
|
|
Export query results to CSV using pandas with psycopg2 connection
|
|
|
|
Parameters:
|
|
connection_params (dict): Database connection parameters
|
|
query (str): SQL query to execute
|
|
output_path (str): Path for output CSV file. If None, generates timestamped filename
|
|
|
|
Returns:
|
|
str: Path to the created CSV file
|
|
"""
|
|
try:
|
|
# Create direct connection using psycopg2
|
|
conn = psycopg2.connect(**connection_params)
|
|
|
|
# Generate output path if not provided
|
|
if output_path is None:
|
|
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
|
output_path = f'query_results_{timestamp}.csv'
|
|
|
|
# Read query directly into DataFrame using the psycopg2 connection
|
|
df = pd.read_sql_query(query, conn)
|
|
|
|
# Export to CSV with all headers
|
|
df.to_csv(output_path, index=False)
|
|
print(f"Data exported successfully to {output_path}")
|
|
return output_path
|
|
|
|
except Exception as e:
|
|
print(f"Error exporting data: {str(e)}")
|
|
raise
|
|
|
|
finally:
|
|
if 'conn' in locals():
|
|
conn.close()
|
|
|
|
|
|
def CreateDailyCSV(csv_file, devices_list, selected_date, vocs_scaled, time_zone_s, radar_part, consolidated_by, temp_offset):
|
|
global Id2MACDict
|
|
if radar_part == "s28":
|
|
radar_part = "(s2+s3+s4+s5+s6+s7+s8)/7"
|
|
try:
|
|
|
|
#stretch_to_min_max = True
|
|
|
|
#current_date_p = selected_date.replace("-", "_")
|
|
#current_date_s = selected_date
|
|
|
|
lower_than200 = 0
|
|
larger_than200 = 0
|
|
ids_list = []
|
|
for details in devices_list[0]:
|
|
dev_id = details[0]
|
|
ids_list.append(details[1])
|
|
if dev_id < 200:
|
|
lower_than200 += 1
|
|
else:
|
|
larger_than200 += 1
|
|
|
|
if lower_than200 > 0 and larger_than200 > 0:
|
|
return ""
|
|
|
|
if larger_than200 > 0:
|
|
sensors_c = len(s_table)
|
|
else: #old sensors not supported
|
|
return ""
|
|
|
|
devices_c = len(devices_list[0])
|
|
devices_list_str = ",".join(map(str, devices_list[1]))
|
|
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
|
|
if consolidated_by == "by_minute_rc":
|
|
sql = get_deployment_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_part, temp_offset)
|
|
elif consolidated_by == "by_deca_rd":
|
|
sql = get_deployment_deca_query(devices_list_str, time_from_str, time_to_str, ids_list, temp_offset)
|
|
elif consolidated_by == "by_minute_rd":
|
|
sql = get_deployment_rd_query(devices_list_str, time_from_str, time_to_str, ids_list, temp_offset)
|
|
|
|
print(sql)
|
|
|
|
connection_params = {
|
|
'host': DB_HOST,
|
|
'database': DB_NAME,
|
|
'user': DB_USER,
|
|
'password': DB_PASSWORD,
|
|
'port': DB_PORT
|
|
}
|
|
|
|
# Using pandas approach (recommended)
|
|
output_file = export_query_to_minio_chunked(
|
|
connection_params,
|
|
sql,
|
|
miniIO_blob_client,
|
|
"data-downloads",
|
|
csv_file,
|
|
chunksize=10000
|
|
)
|
|
return output_file
|
|
except Exception as e:
|
|
logging.error(str(traceback.format_exc()))
|
|
return ""
|
|
|
|
def GetBlob(file_name, bucket_name="daily-maps"):
|
|
"""
|
|
Retrieve image from blob storage
|
|
|
|
Args:
|
|
file_name (str): Name of the file to retrieve from blob storage
|
|
|
|
Returns:
|
|
tuple: (image_bytes, content_type)
|
|
Returns None, None if image not found or error occurs
|
|
"""
|
|
logger.debug(f"GetBlob({file_name})")
|
|
try:
|
|
# Get the object from blob storage
|
|
data = miniIO_blob_client.get_object(
|
|
bucket_name,
|
|
file_name
|
|
)
|
|
|
|
# Read the data into bytes
|
|
data_bytes = data.read()
|
|
#logger.debug(f"len(data_bytes)={len(data_bytes)}")
|
|
|
|
if bucket_name == "daily-maps":
|
|
return data_bytes, 'image/png'
|
|
else:
|
|
return data_bytes, 'application/zip'
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error: {traceback.format_exc()}")
|
|
return None, None
|
|
|
|
|
|
def MapFileToDate(map_file):
|
|
#'/Volumes/XTRM-Q/wellnuo/daily_maps/1/1_2023-11-07_dayly_image.png'
|
|
parts = map_file.split("/")
|
|
parts = parts[-1].split("_")
|
|
|
|
if "-" in parts[0]:
|
|
date_string = parts[0]
|
|
elif "-" in parts[1]:
|
|
date_string = parts[1]
|
|
|
|
date_object = datetime.datetime.strptime(date_string, "%Y-%m-%d")
|
|
date_only = date_object.date()
|
|
return date_only
|
|
|
|
def CSVFileToDate(csv_file):
|
|
#'/Volumes/XTRM-Q/wellnuo/daily_maps/1/1_2023-11-07_dayly_image.png'
|
|
parts = csv_file.split("/")
|
|
parts = parts[-1].split("_")
|
|
|
|
if "-" in parts[0]:
|
|
date_string = parts[0]
|
|
elif "-" in parts[1]:
|
|
date_string = parts[1]
|
|
|
|
date_object = datetime.datetime.strptime(date_string, "%Y-%m-%d")
|
|
date_only = date_object.date()
|
|
return date_only
|
|
|
|
def GetMACsListSimple(list_of_lists):
|
|
|
|
result = []
|
|
if len(list_of_lists) > 0:
|
|
result = [sublist[3] for sublist in list_of_lists]
|
|
|
|
return(result)
|
|
|
|
def datetime_handler(obj):
|
|
"""Handle datetime serialization for JSON"""
|
|
if isinstance(obj, datetime.datetime):
|
|
if obj.tzinfo:
|
|
return obj.isoformat()
|
|
return obj.strftime('%Y-%m-%d %H:%M:%S.%f')
|
|
raise TypeError(f"Object of type {type(obj)} is not JSON serializable")
|
|
|
|
def ReadCandles(file, sensor, period, time_from, time_to):
|
|
result = []
|
|
if sensor == "voc0":
|
|
sqlr = "SELECT * from vocs_0"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
|
|
elif sensor == "voc1":
|
|
sqlr = "SELECT * from vocs_1"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
|
|
elif sensor == "voc2":
|
|
sqlr = "SELECT * from vocs_2"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
|
|
elif sensor == "voc3":
|
|
sqlr = "SELECT * from vocs_3"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
|
|
elif sensor == "voc4":
|
|
sqlr = "SELECT * from vocs_4"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
|
|
elif sensor == "voc5":
|
|
sqlr = "SELECT * from vocs_5"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
|
|
elif sensor == "voc6":
|
|
sqlr = "SELECT * from vocs_6"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
|
|
elif sensor == "voc7":
|
|
sqlr = "SELECT * from vocs_7"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
|
|
elif sensor == "voc8":
|
|
sqlr = "SELECT * from vocs_8"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
|
|
elif sensor == "voc9":
|
|
sqlr = "SELECT * from vocs_9"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
|
|
else:
|
|
sqlr = "SELECT * from "+sensor+"s"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
|
|
logger.debug(f"sqlr = {sqlr}")
|
|
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
devices_string = ReadCleanStringDB(cur, sqlr)
|
|
result = QuerrySql(file, sqlr)
|
|
return result
|
|
|
|
def ReadSensor(device_id, sensor, time_from_epoch, time_to_epoch, data_type, radar_part, bucket_size="no"):
|
|
|
|
time_utc = datetime.datetime.fromtimestamp(float(time_from_epoch), tz=timezone.utc)
|
|
# Format in ISO 8601 format with timezone
|
|
time_from_str = time_utc.strftime("%Y-%m-%d %H:%M:%S%z")
|
|
|
|
time_utc = datetime.datetime.fromtimestamp(float(time_to_epoch), tz=timezone.utc)
|
|
# Format in ISO 8601 format with timezone
|
|
time_to_str = time_utc.strftime("%Y-%m-%d %H:%M:%S%z")
|
|
|
|
legal_min, legal_max, window = sensor_legal_values[sensor]
|
|
|
|
result = []
|
|
if sensor == "radar":
|
|
if radar_part == "s28":
|
|
radar_part = "(s2+s3+s4+s5+s6+s7+s8)/7"
|
|
sqlr = f"SELECT time, {radar_part} AS radar FROM radar_readings WHERE device_id = {device_id} AND time >= '{time_from_str}' AND time <= '{time_to_str}' ORDER BY time ASC"
|
|
elif sensor[0] == "s":
|
|
sqlr = f"SELECT time, {sensor} AS smell FROM sensor_readings WHERE device_id = {device_id} AND {sensor} >= '{legal_min}' AND {sensor} <= '{legal_max}' AND time >= '{time_from_str}' AND time <= '{time_to_str}' ORDER BY time ASC"
|
|
else:
|
|
if sensor == "temperature":
|
|
sqlr = f"SELECT time, {sensor} - 16 from sensor_readings WHERE device_id = {device_id} AND {sensor} >= '{legal_min}' AND {sensor} <= '{legal_max}' AND time >= '{time_from_str}' AND time <= '{time_to_str}' ORDER BY time ASC"
|
|
else:
|
|
sqlr = f"SELECT time, {sensor} from sensor_readings WHERE device_id = {device_id} AND {sensor} >= '{legal_min}' AND {sensor} <= '{legal_max}' AND time >= '{time_from_str}' AND time <= '{time_to_str}' ORDER BY time ASC"
|
|
|
|
logger.debug(f"sqlr = {sqlr}")
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sqlr)
|
|
result = cur.fetchall()
|
|
|
|
return result
|
|
|
|
def ReadSensor3(device_id, sensor, time_from_epoch, time_to_epoch, data_type, radar_part, bucket_size="no"):
|
|
import datetime
|
|
from datetime import timezone
|
|
|
|
# Convert epoch to datetime and format as ISO 8601 strings with timezone
|
|
time_utc = datetime.datetime.fromtimestamp(float(time_from_epoch), tz=timezone.utc)
|
|
time_from_str = time_utc.strftime("%Y-%m-%d %H:%M:%S%z")
|
|
time_utc = datetime.datetime.fromtimestamp(float(time_to_epoch), tz=timezone.utc)
|
|
time_to_str = time_utc.strftime("%Y-%m-%d %H:%M:%S%z")
|
|
|
|
legal_min, legal_max, window = sensor_legal_values[sensor]
|
|
|
|
# If bucket_size is provided (i.e. not "no"), then use time bucketing.
|
|
use_bucket = bucket_size != "no"
|
|
if use_bucket:
|
|
# Map the shorthand bucket sizes to PostgreSQL interval strings.
|
|
mapping = {
|
|
"10s": "10 seconds",
|
|
"1m": "1 minute",
|
|
"5m": "5 minutes",
|
|
"10m": "10 minutes",
|
|
"15m": "15 minutes",
|
|
"30m": "30 minutes",
|
|
"1h": "1 hour"
|
|
}
|
|
bucket_interval = mapping.get(bucket_size, bucket_size)
|
|
|
|
avgmax = "AVG"
|
|
# Build the SQL query based on sensor type.
|
|
if sensor == "radar":
|
|
# For radar sensors, a special aggregation is needed.
|
|
avgmax = "MAX"
|
|
if radar_part == "s28":
|
|
radar_expr = "(s2+s3+s4+s5+s6+s7+s8)/7"
|
|
else:
|
|
radar_expr = radar_part
|
|
|
|
if use_bucket:
|
|
sqlr = f"""
|
|
SELECT time_bucket('{bucket_interval}', time) AS time,
|
|
{avgmax}({radar_expr}) AS radar
|
|
FROM radar_readings
|
|
WHERE device_id = {device_id}
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
GROUP BY time_bucket('{bucket_interval}', time)
|
|
ORDER BY time ASC;
|
|
"""
|
|
else:
|
|
sqlr = f"""
|
|
SELECT time, {radar_expr} AS radar
|
|
FROM radar_readings
|
|
WHERE device_id = {device_id}
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
ORDER BY time ASC;
|
|
"""
|
|
elif sensor[0] == "s":
|
|
# For sensors whose name starts with "s" (for example, smell sensors)
|
|
if use_bucket:
|
|
sqlr = f"""
|
|
SELECT time_bucket('{bucket_interval}', time) AS time,
|
|
{avgmax}({sensor}) AS smell
|
|
FROM sensor_readings
|
|
WHERE device_id = {device_id}
|
|
AND {sensor} >= '{legal_min}'
|
|
AND {sensor} <= '{legal_max}'
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
GROUP BY time_bucket('{bucket_interval}', time)
|
|
ORDER BY time ASC;
|
|
"""
|
|
else:
|
|
sqlr = f"""
|
|
SELECT time, {sensor} AS smell
|
|
FROM sensor_readings
|
|
WHERE device_id = {device_id}
|
|
AND {sensor} >= '{legal_min}'
|
|
AND {sensor} <= '{legal_max}'
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
ORDER BY time ASC;
|
|
"""
|
|
elif sensor == "co2":
|
|
alias = sensor
|
|
sensor = "s4"
|
|
sqlr = f"""
|
|
WITH s4_values AS (
|
|
SELECT s4
|
|
FROM public.sensor_readings
|
|
WHERE device_id = 559
|
|
AND s4 IS NOT NULL
|
|
),
|
|
s4_percentile AS (
|
|
SELECT percentile_cont(0.25) WITHIN GROUP (ORDER BY s4 DESC) AS s4_25_percentile
|
|
FROM s4_values
|
|
)
|
|
SELECT s4_25_percentile
|
|
FROM s4_percentile;
|
|
"""
|
|
co2_max = 22536000#102400000
|
|
co2_min = 2400000#16825674 #387585
|
|
|
|
real_co2_max = 2000
|
|
real_co2_min = 430
|
|
|
|
|
|
#logger.debug(f"sqlr = {sqlr}")
|
|
#with get_db_connection() as conn:
|
|
#with conn.cursor() as cur:
|
|
#cur.execute(sqlr)
|
|
#result = cur.fetchall()
|
|
#co2_max = result[0][0]
|
|
#co2_min = result[0][1]
|
|
#=E17+E20*(102400000-A24)/B18
|
|
#col_expr = f"{real_co2_min}+({real_co2_max}-{real_co2_min})*(102400000-{sensor})/({co2_min}-{co2_max})"
|
|
col_expr = f"GREATEST({real_co2_min},{real_co2_min}+({real_co2_max}-{real_co2_min})*({co2_max}-percentile_cont(0.5) WITHIN GROUP (ORDER BY {sensor}))/({co2_max}-{co2_min}))"
|
|
|
|
if use_bucket:
|
|
sqlr = f"""
|
|
SELECT time_bucket('{bucket_interval}', time) AS time,
|
|
({col_expr}) AS {alias}
|
|
FROM sensor_readings
|
|
WHERE device_id = {device_id}
|
|
AND {sensor} >= '{legal_min}'
|
|
AND {sensor} <= '{legal_max}'
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
GROUP BY time_bucket('{bucket_interval}', time)
|
|
ORDER BY time ASC;
|
|
"""
|
|
else:
|
|
sqlr = f"""
|
|
SELECT time, {col_expr} AS {alias}
|
|
FROM sensor_readings
|
|
WHERE device_id = {device_id}
|
|
AND {sensor} >= '{legal_min}'
|
|
AND {sensor} <= '{legal_max}'
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
ORDER BY time ASC;
|
|
"""
|
|
elif sensor == "voc":
|
|
sensor = "s9"
|
|
alias = sensor
|
|
col_expr = f"{sensor} - 0"
|
|
|
|
if use_bucket:
|
|
sqlr = f"""
|
|
SELECT time_bucket('{bucket_interval}', time) AS time,
|
|
{avgmax}({col_expr}) AS {alias}
|
|
FROM sensor_readings
|
|
WHERE device_id = {device_id}
|
|
AND {sensor} >= '{legal_min}'
|
|
AND {sensor} <= '{legal_max}'
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
GROUP BY time_bucket('{bucket_interval}', time)
|
|
ORDER BY time ASC;
|
|
"""
|
|
else:
|
|
sqlr = f"""
|
|
SELECT time, {col_expr} AS {alias}
|
|
FROM sensor_readings
|
|
WHERE device_id = {device_id}
|
|
AND {sensor} >= '{legal_min}'
|
|
AND {sensor} <= '{legal_max}'
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
ORDER BY time ASC;
|
|
"""
|
|
elif sensor == "humidity":
|
|
alias = sensor
|
|
col_expr = f"2.3592 * {sensor} + 32.5546" #= 2.3592 * J2 + 33.5546
|
|
|
|
if use_bucket:
|
|
sqlr = f"""
|
|
SELECT time_bucket('{bucket_interval}', time) AS time,
|
|
{avgmax}({col_expr}) AS {alias}
|
|
FROM sensor_readings
|
|
WHERE device_id = {device_id}
|
|
AND {sensor} >= '{legal_min}'
|
|
AND {sensor} <= '{legal_max}'
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
GROUP BY time_bucket('{bucket_interval}', time)
|
|
ORDER BY time ASC;
|
|
"""
|
|
else:
|
|
sqlr = f"""
|
|
SELECT time, {col_expr} AS {alias}
|
|
FROM sensor_readings
|
|
WHERE device_id = {device_id}
|
|
AND {sensor} >= '{legal_min}'
|
|
AND {sensor} <= '{legal_max}'
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
ORDER BY time ASC;
|
|
"""
|
|
else:
|
|
# For other sensors (including temperature, which requires a subtraction)
|
|
alias = sensor
|
|
col_expr = sensor
|
|
if sensor == "temperature":
|
|
col_expr = f"{sensor} - 16"
|
|
alias = "temperature"
|
|
elif sensor == "light":
|
|
avgmax = "MAX"
|
|
|
|
if use_bucket:
|
|
sqlr = f"""
|
|
SELECT time_bucket('{bucket_interval}', time) AS time,
|
|
{avgmax}({col_expr}) AS {alias}
|
|
FROM sensor_readings
|
|
WHERE device_id = {device_id}
|
|
AND {sensor} >= '{legal_min}'
|
|
AND {sensor} <= '{legal_max}'
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
GROUP BY time_bucket('{bucket_interval}', time)
|
|
ORDER BY time ASC;
|
|
"""
|
|
else:
|
|
sqlr = f"""
|
|
SELECT time, {col_expr} AS {alias}
|
|
FROM sensor_readings
|
|
WHERE device_id = {device_id}
|
|
AND {sensor} >= '{legal_min}'
|
|
AND {sensor} <= '{legal_max}'
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
ORDER BY time ASC;
|
|
"""
|
|
|
|
logger.debug(f"sqlr = {sqlr}")
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sqlr)
|
|
result = cur.fetchall()
|
|
|
|
return result
|
|
|
|
def ReadRadarDetail(device_id, sensor, time_from_epoch, time_to_epoch, alt_key_state):
|
|
|
|
time_utc = datetime.datetime.fromtimestamp(float(time_from_epoch), tz=timezone.utc)
|
|
# Format in ISO 8601 format with timezone
|
|
time_from_str = time_utc.strftime("%Y-%m-%d %H:%M:%S%z")
|
|
|
|
time_utc = datetime.datetime.fromtimestamp(float(time_to_epoch), tz=timezone.utc)
|
|
# Format in ISO 8601 format with timezone
|
|
time_to_str = time_utc.strftime("%Y-%m-%d %H:%M:%S%z")
|
|
#sensor_index = int(sensor_index)
|
|
|
|
|
|
result = []
|
|
|
|
#time_period_sec can be "10" (RAW) or "60"
|
|
if alt_key_state == "1": #"RAW = 10 sec"
|
|
radar_part = sensor
|
|
if sensor == "m08_max":
|
|
radar_part = "(m0+m1+m2+m3+m4+m5+m6+m7+m8)/9"
|
|
elif sensor == "s28_max" or sensor == "s28_min":
|
|
radar_part = "(s2+s3+s4+s5+s6+s7+s8)/7"
|
|
sqlr = f"SELECT time, {radar_part} AS radar FROM radar_readings WHERE device_id = {device_id} AND time >= '{time_from_str}' AND time <= '{time_to_str}' ORDER BY time ASC"
|
|
else:
|
|
if sensor == "m08_max":
|
|
radar_part = "MAX((m0+m1+m2+m3+m4+m5+m6+m7+m8)/7) AS m08_max"
|
|
elif sensor == "s28_max":
|
|
radar_part = f"MAX((s2+s3+s4+s5+s6+s7+s8)/7) AS {sensor}"
|
|
elif sensor == "s28_min":
|
|
radar_part = f"MIN((s2+s3+s4+s5+s6+s7+s8)/7) AS {sensor}"
|
|
else:
|
|
radar_part = f"MAX({sensor}) AS {sensor}"
|
|
|
|
sqlr = f"""
|
|
SELECT
|
|
minute,
|
|
{sensor} as {sensor}
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
{radar_part}
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id = {device_id}
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute
|
|
) rr
|
|
|
|
ORDER BY
|
|
minute
|
|
"""
|
|
logger.debug(f"sqlr = {sqlr}")
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sqlr)
|
|
result = cur.fetchall()
|
|
|
|
return result
|
|
|
|
def check_and_parse(data_str):
|
|
# Remove whitespace to handle cases with spaces
|
|
cleaned = data_str.strip()
|
|
# Check if second character is '['
|
|
is_list_of_lists = cleaned[1] == '['
|
|
if cleaned[0] == '[':
|
|
# Parse the string regardless of type
|
|
parsed = json.loads(cleaned)
|
|
else:
|
|
parsed = cleaned.split(",")
|
|
return is_list_of_lists, parsed
|
|
|
|
def clean_data_with_rolling_spline(line_part_t, window=5, threshold=2.0):
|
|
"""
|
|
Filter outliers using rolling median and replace with spline interpolation
|
|
Returns data in the same format as input: [(timestamp, value), ...]
|
|
"""
|
|
# Unzip the input tuples
|
|
x, y = zip(*line_part_t)
|
|
x = np.array(x)
|
|
y = np.array(y, dtype=float) # explicitly convert to float
|
|
|
|
# Calculate rolling median and MAD using a safer approach
|
|
rolling_median = []
|
|
rolling_mad = []
|
|
|
|
for i in range(len(y)):
|
|
start_idx = max(0, i - window//2)
|
|
end_idx = min(len(y), i + window//2 + 1)
|
|
window_values = y[start_idx:end_idx]
|
|
|
|
# Skip if window is empty or contains invalid values
|
|
if len(window_values) == 0 or np.any(np.isnan(window_values)):
|
|
rolling_median.append(y[i])
|
|
rolling_mad.append(0)
|
|
continue
|
|
|
|
med = np.median(window_values)
|
|
mad = np.median(np.abs(window_values - med))
|
|
|
|
rolling_median.append(med)
|
|
rolling_mad.append(mad)
|
|
|
|
rolling_median = np.array(rolling_median)
|
|
rolling_mad = np.array(rolling_mad)
|
|
|
|
# Identify outliers (protect against division by zero)
|
|
outlier_mask = np.abs(y - rolling_median) > threshold * (rolling_mad + 1e-10)
|
|
good_data_mask = ~outlier_mask
|
|
|
|
if np.sum(good_data_mask) < 4:
|
|
return line_part_t # return original data if we can't interpolate
|
|
|
|
try:
|
|
# Create and apply spline
|
|
spline = interpolate.InterpolatedUnivariateSpline(
|
|
x[good_data_mask],
|
|
y[good_data_mask],
|
|
k=3
|
|
)
|
|
|
|
y_cleaned = y.copy()
|
|
y_cleaned[outlier_mask] = spline(x[outlier_mask])
|
|
except Exception as e:
|
|
print(f"Spline interpolation failed: {e}")
|
|
return line_part_t
|
|
|
|
# Return in the same format as input
|
|
return list(zip(x, y_cleaned))
|
|
|
|
def DatesSpan(date_from: str, date_to: str) -> list:
|
|
"""
|
|
Generate a list of dates between date_from and date_to (inclusive).
|
|
Handles cases where date_from is later than date_to.
|
|
|
|
Args:
|
|
date_from (str): Start date in 'YYYY-MM-DD' format
|
|
date_to (str): End date in 'YYYY-MM-DD' format
|
|
|
|
Returns:
|
|
list: List of dates in 'YYYY-MM-DD' format
|
|
"""
|
|
# Convert string dates to datetime objects
|
|
start_date = datetime.datetime.strptime(date_from, '%Y-%m-%d')
|
|
end_date = datetime.datetime.strptime(date_to, '%Y-%m-%d')
|
|
|
|
# Determine direction and swap dates if necessary
|
|
if start_date > end_date:
|
|
start_date, end_date = end_date, start_date
|
|
|
|
# Generate list of dates
|
|
dates_list = []
|
|
current_date = start_date
|
|
|
|
while current_date <= end_date:
|
|
dates_list.append(current_date.strftime('%Y-%m-%d'))
|
|
current_date += timedelta(days=1)
|
|
|
|
# Reverse the list if original date_from was later than date_to
|
|
#if datetime.datetime.strptime(date_from, '%Y-%m-%d') > datetime.datetime.strptime(date_to, '%Y-%m-%d'):
|
|
# dates_list.reverse()
|
|
|
|
return dates_list
|
|
|
|
def zip_blobs(blob_paths, zip_blob_name, bucket_name, minio_client=None):
|
|
"""
|
|
Zip multiple blobs from MinIO storage into a single zip file without saving locally.
|
|
|
|
Args:
|
|
blob_paths (list): List of blob paths to zip
|
|
zip_blob_name (str): Name/path for the output zip file in MinIO
|
|
bucket_name (str): MinIO bucket name
|
|
minio_client (Minio, optional): Existing MinIO client instance
|
|
|
|
Returns:
|
|
bool: True if successful, False otherwise
|
|
"""
|
|
try:
|
|
# Create zip file in memory
|
|
zip_buffer = BytesIO()
|
|
|
|
with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zip_file:
|
|
# Process each blob
|
|
for blob_path in blob_paths:
|
|
# Get file name from path for zip entry
|
|
file_name = blob_path.split('/')[-1]
|
|
|
|
# Get blob data into memory
|
|
data = minio_client.get_object(bucket_name, blob_path.lstrip('/'))
|
|
|
|
# Add file to zip
|
|
zip_file.writestr(file_name, data.read())
|
|
|
|
# Close the object to free memory
|
|
data.close()
|
|
|
|
# Seek to start of zip file
|
|
zip_buffer.seek(0)
|
|
|
|
# Upload zip file to MinIO
|
|
minio_client.put_object(
|
|
bucket_name,
|
|
zip_blob_name.lstrip('/'),
|
|
zip_buffer,
|
|
length=zip_buffer.getbuffer().nbytes
|
|
)
|
|
|
|
return True
|
|
|
|
except Exception as e:
|
|
print(f"Error creating zip file: {str(e)}")
|
|
return False
|
|
finally:
|
|
# Clean up
|
|
zip_buffer.close()
|
|
|
|
def clean_data_with_spline(x, y, threshold=2.0):
|
|
"""
|
|
Filter outliers and replace with spline interpolation
|
|
|
|
Parameters:
|
|
x : array-like, timestamps or x-coordinates
|
|
y : array-like, values to be filtered
|
|
threshold : float, number of median absolute deviations for outlier detection
|
|
|
|
Returns:
|
|
array-like : cleaned data with outliers replaced by spline interpolation
|
|
"""
|
|
# Convert inputs to numpy arrays
|
|
x = np.array(x)
|
|
y = np.array(y)
|
|
|
|
# Calculate median and median absolute deviation
|
|
median = np.median(y)
|
|
mad = stats.median_abs_deviation(y)
|
|
|
|
# Identify outliers
|
|
outlier_mask = np.abs(y - median) > threshold * mad
|
|
good_data_mask = ~outlier_mask
|
|
|
|
# If we have too few good points for interpolation, adjust threshold
|
|
min_points_needed = 4 # minimum points needed for cubic spline
|
|
if np.sum(good_data_mask) < min_points_needed:
|
|
return y # return original data if we can't interpolate
|
|
|
|
# Create spline with non-outlier data
|
|
spline = interpolate.InterpolatedUnivariateSpline(
|
|
x[good_data_mask],
|
|
y[good_data_mask],
|
|
k=3 # cubic spline
|
|
)
|
|
|
|
# Replace outliers with interpolated values
|
|
y_cleaned = y.copy()
|
|
y_cleaned[outlier_mask] = spline(x[outlier_mask])
|
|
|
|
return y_cleaned
|
|
|
|
def clean_data(line_part_t, window=5, threshold=2.0):
|
|
"""
|
|
Remove obvious outliers based on window comparison
|
|
Returns cleaned data in the same format: [(timestamp, value), ...]
|
|
"""
|
|
if len(line_part_t) < window:
|
|
return line_part_t
|
|
|
|
x, y = zip(*line_part_t)
|
|
x = np.array(x)
|
|
y = np.array(y, dtype=float)
|
|
|
|
cleaned_data = []
|
|
|
|
for i in range(len(y)):
|
|
# Get window around current point
|
|
start_idx = max(0, i - window//2)
|
|
end_idx = min(len(y), i + window//2 + 1)
|
|
window_values = y[start_idx:end_idx]
|
|
|
|
# Calculate median and MAD for the window
|
|
window_median = np.median(window_values)
|
|
deviation = abs(y[i] - window_median)
|
|
|
|
# Keep point if it's not too far from window median
|
|
if deviation <= threshold * window_median:
|
|
cleaned_data.append((x[i], y[i]))
|
|
#else:
|
|
#print(window_values)
|
|
return cleaned_data
|
|
|
|
def clean_data_fast(line_part_t, window=5, threshold=2.0):
|
|
"""
|
|
Remove obvious outliers based on window comparison - vectorized version
|
|
Returns cleaned data in the same format: [(timestamp, value), ...]
|
|
"""
|
|
if len(line_part_t) < window:
|
|
return line_part_t
|
|
|
|
x, y = zip(*line_part_t)
|
|
x = np.array(x)
|
|
y = np.array(y, dtype=float)
|
|
|
|
# Calculate rolling median using numpy
|
|
half_window = window // 2
|
|
medians = np.array([
|
|
np.median(y[max(0, i-half_window):min(len(y), i+half_window+1)])
|
|
for i in range(len(y))
|
|
])
|
|
|
|
# Calculate deviations for all points at once
|
|
deviations = np.abs(y - medians)
|
|
|
|
# Create mask for good points
|
|
good_points = deviations <= threshold * medians
|
|
|
|
# Return filtered data using boolean indexing
|
|
return list(zip(x[good_points], y[good_points]))
|
|
|
|
def clean_data_pd(line_part_t, window=5, percentile=99):
|
|
"""
|
|
Remove obvious outliers based on window comparison - pandas version
|
|
Returns cleaned data in the same format: [(timestamp, value), ...]
|
|
"""
|
|
#line_part_t = line_part_t[2000:2100]
|
|
if len(line_part_t) < window:
|
|
return line_part_t
|
|
|
|
x, y = zip(*line_part_t)
|
|
|
|
# Create pandas Series and calculate rolling median
|
|
series = pd.Series(y)
|
|
medians = series.rolling(window=window, center=True, min_periods=1).median()
|
|
|
|
# Calculate deviations
|
|
deviations = np.abs(series - medians)
|
|
|
|
largest_deviations = deviations.nlargest(10)
|
|
#print(largest_deviations)
|
|
|
|
# Create mask for good points
|
|
deviation_threshold = np.percentile(deviations, percentile)
|
|
good_points = deviations <= deviation_threshold
|
|
|
|
# Convert back to numpy arrays for filtering
|
|
x = np.array(x)
|
|
y = np.array(y)
|
|
|
|
# Return filtered data
|
|
return list(zip(x[good_points], y[good_points]))
|
|
|
|
def CombineStripes(result_filename, stripes_files):
|
|
try:
|
|
# Open the first image to get the width and initialize the height
|
|
first_image = Image.open(stripes_files[0])
|
|
width, height = first_image.size
|
|
|
|
# Calculate the total height of the combined image
|
|
total_height = height * len(stripes_files)
|
|
|
|
# Create a new blank image with the same width and the calculated height
|
|
result_image = Image.new('RGB', (width, total_height))
|
|
|
|
# Paste each image onto the result image vertically
|
|
y_offset = 0
|
|
for file_name in stripes_files:
|
|
image = Image.open(file_name)
|
|
result_image.paste(image, (0, y_offset))
|
|
y_offset += height
|
|
|
|
# Save the result image
|
|
result_image.save(result_filename)
|
|
|
|
# Return success flag
|
|
return True
|
|
|
|
except Exception as e:
|
|
print("Error:", e)
|
|
return False
|
|
|
|
def FindFirstLocalMinimum(counts, bins):
|
|
"""
|
|
Find the first local minimum in a histogram after the main peak and calculate its offset.
|
|
|
|
Parameters:
|
|
hist: tuple of (counts, bin_edges) from np.histogram()
|
|
The histogram data to analyze
|
|
|
|
Returns:
|
|
tuple: (TR, THR_OFFSET)
|
|
TR: float, the bin value (position) of the first local minimum after the main peak
|
|
THR_OFFSET: int, number of bins between the global maximum and the local minimum
|
|
"""
|
|
|
|
# Find the main peak (global maximum)
|
|
main_peak_idx = np.argmax(counts)
|
|
|
|
# Look for the first local minimum after the main peak
|
|
for i in range(main_peak_idx + 1, len(counts) - 1):
|
|
# Check if current point is less than or equal to both neighbors
|
|
if counts[i] <= counts[i-1] and counts[i] <= counts[i+1]:
|
|
# Calculate the bin center value for TR
|
|
TR = (bins[i] + bins[i+1]) / 2
|
|
# Calculate offset from main peak in number of bins
|
|
THR_OFFSET = (bins[i] + bins[i+1]) / 2 - (bins[main_peak_idx] + bins[main_peak_idx+1]) / 2
|
|
return (bins[main_peak_idx] + bins[main_peak_idx+1]) / 2, TR, THR_OFFSET
|
|
|
|
# If no local minimum is found, return None for both values
|
|
return None, None
|
|
|
|
def process_raw_data(data_tuples):
|
|
"""
|
|
Convert list of (timestamp, stationary, motion) tuples to separate arrays
|
|
|
|
Parameters:
|
|
-----------
|
|
data_tuples : list of tuples
|
|
Each tuple contains (datetime, stationary_value, motion_value)
|
|
|
|
Returns:
|
|
--------
|
|
timestamps : array of datetime
|
|
stationary : array of float
|
|
motion : array of float
|
|
"""
|
|
timestamps = np.array([t[0] for t in data_tuples])
|
|
stationary = np.array([t[1] for t in data_tuples])
|
|
motion = np.array([t[2] for t in data_tuples])
|
|
|
|
return timestamps, stationary, motion
|
|
|
|
def rolling_std_fast(arr, window_size):
|
|
"""
|
|
Fast calculation of rolling standard deviation using NumPy's stride tricks.
|
|
|
|
Parameters:
|
|
-----------
|
|
arr : numpy array
|
|
Input array
|
|
window_size : int
|
|
Size of rolling window
|
|
|
|
Returns:
|
|
--------
|
|
numpy array
|
|
Rolling standard deviation
|
|
"""
|
|
# Compute rolling sum of squares
|
|
r = np.array(arr, dtype=float)
|
|
r2 = np.array(arr, dtype=float) ** 2
|
|
|
|
# Calculate cumulative sums
|
|
cum = np.cumsum(np.insert(r, 0, 0))
|
|
cum2 = np.cumsum(np.insert(r2, 0, 0))
|
|
|
|
# Get rolling windows
|
|
x = (cum[window_size:] - cum[:-window_size])
|
|
x2 = (cum2[window_size:] - cum2[:-window_size])
|
|
|
|
# Calculate standard deviation
|
|
n = window_size
|
|
return np.sqrt((x2/n) - (x/n) ** 2)
|
|
|
|
def detect_presence(timestamps, stationary_signal, motion_signal, window_size=100,
|
|
motion_threshold=5, gmm_components=2):
|
|
"""
|
|
Detect presence using both stationary and motion signals with adaptive thresholding.
|
|
|
|
Parameters:
|
|
-----------
|
|
timestamps : array-like
|
|
Array of datetime objects
|
|
stationary_signal : array-like
|
|
Time series of stationary signal (0-100)
|
|
motion_signal : array-like
|
|
Time series of motion signal (0-100)
|
|
window_size : int
|
|
Size of rolling window for statistics (used only for temporal smoothing)
|
|
motion_threshold : float
|
|
Threshold for significant motion
|
|
gmm_components : int
|
|
Number of components for Gaussian Mixture Model
|
|
|
|
Returns:
|
|
--------
|
|
presence_mask : numpy array
|
|
Boolean array indicating presence
|
|
baseline : float
|
|
Computed baseline for stationary signal
|
|
threshold : float
|
|
Computed threshold for stationary signal
|
|
"""
|
|
|
|
# Convert inputs to numpy arrays
|
|
stationary_signal = np.array(stationary_signal)
|
|
motion_signal = np.array(motion_signal)
|
|
|
|
# 1. Fit Gaussian Mixture Model to stationary signal
|
|
gmm = GaussianMixture(n_components=gmm_components, random_state=42)
|
|
X = stationary_signal.reshape(-1, 1)
|
|
gmm.fit(X)
|
|
|
|
# Get the component with lowest mean as baseline
|
|
baseline = min(gmm.means_)[0]
|
|
|
|
# 2. Calculate adaptive threshold using GMM components
|
|
components_sorted = sorted(zip(gmm.means_.flatten(), gmm.covariances_.flatten()))
|
|
baseline_std = np.sqrt(components_sorted[0][1])
|
|
threshold = baseline + 3 * baseline_std # 3 sigma rule
|
|
|
|
# 3. Combine motion and stationary detection
|
|
presence_mask = np.zeros(len(stationary_signal), dtype=bool)
|
|
|
|
# Vectorized operations instead of loop
|
|
presence_mask = (motion_signal > motion_threshold) | (stationary_signal > threshold)
|
|
|
|
# 4. Apply temporal smoothing to reduce false transitions
|
|
smooth_window = min(window_size // 4, 10) # Smaller window for smoothing
|
|
presence_mask = np.convolve(presence_mask.astype(int),
|
|
np.ones(smooth_window)/smooth_window,
|
|
mode='same') > 0.5
|
|
|
|
return presence_mask, baseline, threshold
|
|
|
|
def visualize_detection(timestamps, stationary_signal, motion_signal, presence_mask,
|
|
baseline, threshold, output_file='presence_detection.png'):
|
|
"""
|
|
Visualize the detection results and save to file.
|
|
|
|
Parameters:
|
|
-----------
|
|
timestamps : array-like
|
|
Array of datetime objects
|
|
stationary_signal : array-like
|
|
Time series of stationary signal
|
|
motion_signal : array-like
|
|
Time series of motion signal
|
|
presence_mask : array-like
|
|
Boolean array indicating presence
|
|
baseline : float
|
|
Computed baseline for stationary signal
|
|
threshold : float
|
|
Computed threshold for stationary signal
|
|
output_file : str
|
|
Path to save the output PNG file
|
|
"""
|
|
plt.figure(figsize=(15, 10))
|
|
|
|
# Configure time formatting
|
|
date_formatter = mdates.DateFormatter('%H:%M:%S')
|
|
|
|
# Plot signals
|
|
plt.subplot(3, 1, 1)
|
|
plt.plot(timestamps, stationary_signal, label='Stationary Signal')
|
|
plt.axhline(y=baseline, color='g', linestyle='--', label='Baseline')
|
|
plt.axhline(y=threshold, color='r', linestyle='--', label='Threshold')
|
|
plt.gca().xaxis.set_major_formatter(date_formatter)
|
|
plt.legend()
|
|
plt.title('Stationary Signal with Baseline and Threshold')
|
|
plt.grid(True)
|
|
|
|
plt.subplot(3, 1, 2)
|
|
plt.plot(timestamps, motion_signal, label='Motion Signal')
|
|
plt.gca().xaxis.set_major_formatter(date_formatter)
|
|
plt.legend()
|
|
plt.title('Motion Signal')
|
|
plt.grid(True)
|
|
|
|
plt.subplot(3, 1, 3)
|
|
plt.plot(timestamps, presence_mask, label='Presence Detection')
|
|
plt.gca().xaxis.set_major_formatter(date_formatter)
|
|
plt.ylim(-0.1, 1.1)
|
|
plt.legend()
|
|
plt.title('Presence Detection Result')
|
|
plt.grid(True)
|
|
|
|
plt.tight_layout()
|
|
|
|
# Save to file and close figure to free memory
|
|
plt.savefig(output_file, dpi=300, bbox_inches='tight')
|
|
plt.close()
|
|
|
|
def FindZeroIntersection(counts, bins, save_plot, device_id):
|
|
"""
|
|
Find the zero intersection point by fitting a parabola to the descending slope
|
|
between 50% and 10% of the maximum peak height. Also returns the peak position.
|
|
|
|
Parameters:
|
|
counts: array-like
|
|
The histogram counts
|
|
bins: array-like
|
|
The histogram bin edges
|
|
save_plot: str or None, optional
|
|
If provided, saves the visualization to the specified file path
|
|
|
|
Returns:
|
|
tuple: (zero_intersections, peak_position)
|
|
zero_intersections: list of floats, x-coordinates where parabola intersects y=0
|
|
peak_position: float, x-coordinate of the histogram maximum peak
|
|
"""
|
|
# Find the main peak
|
|
main_peak_idx = np.argmax(counts)
|
|
peak_height = counts[main_peak_idx]
|
|
peak_position = (bins[main_peak_idx] + bins[main_peak_idx+1]) / 2
|
|
|
|
# Calculate 75% and 25% of peak height
|
|
height_50 = 0.50 * peak_height
|
|
height_10 = 0.10 * peak_height
|
|
|
|
# Find indices where counts cross these thresholds after the peak
|
|
idx_50 = main_peak_idx
|
|
idx_10 = main_peak_idx
|
|
|
|
for i in range(main_peak_idx, len(counts)):
|
|
if counts[i] <= height_50 and idx_50 == main_peak_idx:
|
|
idx_50 = i
|
|
if counts[i] <= height_10:
|
|
idx_10 = i
|
|
break
|
|
|
|
# If we couldn't find valid points, return None
|
|
if idx_50 == main_peak_idx or idx_10 == main_peak_idx:
|
|
return None, peak_position
|
|
|
|
# Get x and y coordinates for fitting
|
|
# Use bin centers for x coordinates
|
|
x_points = np.array([(bins[i] + bins[i+1])/2 for i in range(idx_50, idx_10+1)])
|
|
y_points = counts[idx_50:idx_10+1]
|
|
|
|
# Define quadratic function for fitting
|
|
def quadratic(x, a, b, c):
|
|
return a*x**2 + b*x + c
|
|
|
|
try:
|
|
popt, pcov = curve_fit(quadratic, x_points, y_points)
|
|
a, b, c = popt
|
|
|
|
# Find zeros using quadratic formula
|
|
if a != 0:
|
|
discriminant = b**2 - 4*a*c
|
|
if discriminant >= 0:
|
|
x1 = (-b + np.sqrt(discriminant)) / (2*a)
|
|
x2 = (-b - np.sqrt(discriminant)) / (2*a)
|
|
zero_intersections = sorted([x1, x2])
|
|
# Filter zeros to only include those after the peak
|
|
zero_intersections = [x for x in zero_intersections if x > peak_position]
|
|
else:
|
|
zero_intersections = []
|
|
else:
|
|
# If a ≈ 0, fallback to linear solution
|
|
if b != 0:
|
|
zero_intersections = [-c/b]
|
|
else:
|
|
zero_intersections = []
|
|
|
|
if save_plot:
|
|
plt.figure(figsize=(10, 6))
|
|
|
|
# Plot histogram
|
|
bin_centers = [(bins[i] + bins[i+1])/2 for i in range(len(counts))]
|
|
plt.bar(bin_centers, counts, width=bins[1]-bins[0], alpha=0.6,
|
|
color='skyblue', label='Histogram')
|
|
|
|
# Plot peak height lines
|
|
plt.axhline(y=height_50, color='g', linestyle='--', alpha=0.5,
|
|
label='50% Peak Height')
|
|
plt.axhline(y=height_10, color='r', linestyle='--', alpha=0.5,
|
|
label='10% Peak Height')
|
|
|
|
# Plot fitted parabola
|
|
x_fit = np.linspace(min(x_points), max(x_points), 100)
|
|
y_fit = quadratic(x_fit, a, b, c)
|
|
plt.plot(x_fit, y_fit, 'r-', label='Fitted Parabola')
|
|
|
|
# Plot points used for fitting
|
|
plt.plot(x_points, y_points, 'ro', alpha=0.5, label='Fitting Points')
|
|
|
|
# Plot zero intersections
|
|
for x_zero in zero_intersections:
|
|
plt.plot(x_zero, 0, 'ko', label='Zero Intersection')
|
|
|
|
# Plot peak position
|
|
plt.axvline(x=peak_position, color='purple', linestyle='--', alpha=0.5,
|
|
label='Peak Position')
|
|
|
|
# Add labels and legend
|
|
plt.xlabel('Bin Values')
|
|
plt.ylabel('Counts')
|
|
plt.title(f'Histogram Analysis with Parabolic Fit {device_id}')
|
|
plt.legend()
|
|
|
|
# Show zero line
|
|
plt.axhline(y=0, color='k', linestyle='-', alpha=0.2)
|
|
|
|
# Add text with intersection and peak values
|
|
text = f'Peak Position: {peak_position:.2f}\n'
|
|
if zero_intersections:
|
|
text += f'Zero Intersection(s): {", ".join([f"{x:.2f}" for x in zero_intersections])}'
|
|
plt.text(0.02, 0.98, text, transform=plt.gca().transAxes,
|
|
verticalalignment='top',
|
|
bbox=dict(boxstyle='round', facecolor='white', alpha=0.8))
|
|
|
|
# Save plot to file
|
|
plt.savefig(save_plot, dpi=300, bbox_inches='tight')
|
|
plt.close()
|
|
|
|
return zero_intersections, peak_position
|
|
except RuntimeError:
|
|
print("Warning: Failed to fit parabola")
|
|
return None, peak_position
|
|
|
|
|
|
def GeneratePresenceHistory(filename, recreate_in, deployment_id, filter_minutes, ddate, to_date, now_date, time_zone_s):
|
|
|
|
#maps_dates, proximity = GetDeploymentDatesBoth(deployment_id)
|
|
minutes = 1440
|
|
stripes_files = []
|
|
|
|
date1_obj = datetime.datetime.strptime(ddate, '%Y-%m-%d')
|
|
date2_obj = datetime.datetime.strptime(to_date, '%Y-%m-%d')
|
|
|
|
start_date = min(date1_obj, date2_obj)
|
|
end_date = max(date1_obj, date2_obj)
|
|
|
|
# Generate list of all dates
|
|
maps_dates = [
|
|
(start_date + timedelta(days=x)).strftime('%Y-%m-%d')
|
|
for x in range((end_date - start_date).days + 1)
|
|
]
|
|
maps_dates.reverse()
|
|
days = len(maps_dates)
|
|
stretch_by = int(1000 / days)
|
|
if stretch_by > 50:
|
|
stretch_by = 50
|
|
|
|
#lets use 1000 pixels
|
|
#arr_stretched = np.zeros((int(days*stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
|
|
result_image = Image.new('RGB', (minutes, int(days*stretch_by)))
|
|
|
|
|
|
# Paste each image onto the result image vertically
|
|
y_offset = 0
|
|
|
|
for ddate in maps_dates:
|
|
|
|
force_recreate = recreate_in
|
|
|
|
filename_day = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_{stretch_by}_daily_locations.png"
|
|
|
|
if not force_recreate:
|
|
|
|
file_exists, time_modified_utc = check_file_exists(filename_day)
|
|
if file_exists:
|
|
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
|
|
file_modified_date = time_modified_local.date()
|
|
file_date = MapFileToDate(filename_day)
|
|
if file_modified_date <= file_date:
|
|
force_recreate = True
|
|
else:
|
|
force_recreate = True
|
|
|
|
if force_recreate:
|
|
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
|
|
|
CreateDailyLocationMap(filename_day, devices_list, ddate, filter_minutes, time_zone_s, stretch_by)
|
|
|
|
|
|
image_bytes, content_type = GetBlob(filename_day)
|
|
if image_bytes != None:
|
|
image_stream = io.BytesIO(image_bytes)
|
|
image = Image.open(image_stream)
|
|
|
|
#image = Image.open(file_name)
|
|
result_image.paste(image, (0, y_offset))
|
|
image.close()
|
|
image_stream.close()
|
|
|
|
y_offset += stretch_by
|
|
|
|
# Save directly to MinIO instead of local file
|
|
success = save_to_minio(result_image, filename, DAILY_MAPS_BUCKET_NAME)
|
|
# Clean up
|
|
result_image.close()
|
|
return success
|
|
|
|
def AddText(room_image_cv2, x, y, room_name, font_size):
|
|
pil_im = Image.fromarray(room_image_cv2)
|
|
draw = ImageDraw.Draw(pil_im)
|
|
font_path = os.path.join(os.path.dirname(__file__), "fonts", "Poppins-Regular.ttf")
|
|
#print(f"Attempting to load font from: {font_path}")
|
|
try:
|
|
font = ImageFont.truetype(font_path, font_size) # 12px size
|
|
except:
|
|
logger.error(f"Poppins font not found in {font_path}. Please ensure the font file is in your working directory")
|
|
# Fallback to default font if Poppins is not available
|
|
font = ImageFont.load_default()
|
|
draw.text((x, y), room_name, font=font, fill=(150, 150, 150)) # Black color in RGB
|
|
room_image_cv2 = cv2.cvtColor(np.array(pil_im), cv2.COLOR_RGB2BGR)
|
|
return room_image_cv2
|
|
|
|
def AddTextList(room_image_cv2, strings_list, font_size):
|
|
pil_im = Image.fromarray(room_image_cv2)
|
|
draw = ImageDraw.Draw(pil_im)
|
|
font_path = os.path.join(os.path.dirname(__file__), "fonts", "Poppins-Regular.ttf")
|
|
try:
|
|
font = ImageFont.truetype(font_path, font_size) # 12px size
|
|
except:
|
|
logger.error("Poppins font not found. Please ensure the font file is in your working directory")
|
|
# Fallback to default font if Poppins is not available
|
|
font = ImageFont.load_default()
|
|
|
|
for x, y, room_name in strings_list:
|
|
draw.text((x, y), room_name, font=font, fill=(150, 150, 150)) # Black color in RGB
|
|
|
|
room_image_cv2 = cv2.cvtColor(np.array(pil_im), cv2.COLOR_RGB2BGR)
|
|
return room_image_cv2
|
|
|
|
|
|
def AddRoomData(room_image, room_name, data):
|
|
|
|
# Example usage:
|
|
radius = 10
|
|
color_t = data["color"] # BGR format for red
|
|
color = (color_t[2], color_t[1], color_t[0])
|
|
x_offset = 12
|
|
|
|
room_image = AddText(room_image, 13, 20, room_name, 50)
|
|
print(data)
|
|
for present in data["presence"]:
|
|
device_id, minute, duration = present
|
|
#duration = 10
|
|
top_left = (x_offset + minute, 140) #bottom_right = (300, 200)
|
|
bottom_right = (x_offset + minute + duration, 260)
|
|
draw_rounded_rectangle(room_image, top_left, bottom_right, radius, color)
|
|
return room_image
|
|
|
|
def AddFooterData(image):
|
|
'12, 370, 736, 1092, 1452'
|
|
step_size = 1440 / 4
|
|
string_width = 60
|
|
offset = 12
|
|
yoffset = 30
|
|
step = 0
|
|
font_size = 40
|
|
image = AddText(image, offset+step * step_size + step_size/2 - string_width/2, yoffset, "3 AM", font_size)
|
|
step = 1
|
|
image = AddText(image, offset+step * step_size + step_size/2 - string_width/2, yoffset, "9 AM", font_size)
|
|
step = 2
|
|
image = AddText(image, offset+step * step_size + step_size/2 - string_width/2, yoffset, "3 PM", font_size)
|
|
step = 3
|
|
image = AddText(image, offset+step * step_size + step_size/2 - string_width/2, yoffset, "9 PM", font_size)
|
|
return image
|
|
|
|
def draw_rounded_rectangle(image, top_left, bottom_right, radius, color):
|
|
"""
|
|
Draw a filled rectangle with rounded corners, using simple rectangle for small dimensions
|
|
:param image: Image to draw on
|
|
:param top_left: Top-left corner coordinates (x, y)
|
|
:param bottom_right: Bottom-right corner coordinates (x, y)
|
|
:param radius: Desired corner radius (will be adjusted if needed)
|
|
:param color: Rectangle color in BGR format
|
|
"""
|
|
x1, y1 = top_left
|
|
x2, y2 = bottom_right
|
|
|
|
# Calculate width
|
|
width = x2 - x1
|
|
|
|
|
|
# Adjust radius if width or height is too small
|
|
# Maximum radius should be half of the smaller dimension
|
|
max_radius = abs(width) // 2
|
|
radius = min(radius, max_radius)
|
|
|
|
# If width is too small, fallback to regular rectangle
|
|
if width <= 4 or radius <= 1:
|
|
cv2.rectangle(image, top_left, bottom_right, color, -1)
|
|
return
|
|
|
|
# Adjust radius if needed
|
|
radius = min(radius, width // 2)
|
|
|
|
# Create points for the main rectangle
|
|
pts = np.array([
|
|
[x1 + radius, y1],
|
|
[x2 - radius, y1],
|
|
[x2, y1 + radius],
|
|
[x2, y2 - radius],
|
|
[x2 - radius, y2],
|
|
[x1 + radius, y2],
|
|
[x1, y2 - radius],
|
|
[x1, y1 + radius]
|
|
], np.int32)
|
|
|
|
# Fill the main shape
|
|
cv2.fillPoly(image, [pts], color)
|
|
|
|
# Fill the corners
|
|
cv2.ellipse(image, (x1 + radius, y1 + radius), (radius, radius), 180, 0, 90, color, -1)
|
|
cv2.ellipse(image, (x2 - radius, y1 + radius), (radius, radius), 270, 0, 90, color, -1)
|
|
cv2.ellipse(image, (x1 + radius, y2 - radius), (radius, radius), 90, 0, 90, color, -1)
|
|
cv2.ellipse(image, (x2 - radius, y2 - radius), (radius, radius), 0, 0, 90, color, -1)
|
|
|
|
def filter_device(locations_list, device_id):
|
|
result = []
|
|
for entry in locations_list:
|
|
if entry[0] == device_id:
|
|
result.append(entry)
|
|
|
|
return result
|
|
|
|
def GenerateLocationsMap(date_st, devices_list, devices_map, locations_list, time_zone_s):
|
|
|
|
devices_list_t = [("date",date_st)]
|
|
|
|
|
|
for mac in devices_list:
|
|
well_id, device_id, room = devices_map[mac]
|
|
#room = devices[well_id][0]
|
|
color = Loc2Color[room][0]
|
|
presence_data = filter_device(locations_list, device_id)
|
|
room_details = (room, {"color": color, "presence": presence_data})
|
|
devices_list_t.append(room_details)
|
|
|
|
well_id = 0
|
|
device_id = 0
|
|
room = "Outside/?"
|
|
color = (0, 0, 0)
|
|
|
|
#lets's not draw future unknown!
|
|
presence_data = filter_device(locations_list, device_id)
|
|
current_utc = datetime.datetime.now(pytz.UTC)
|
|
current_date_local = current_utc.astimezone(pytz.timezone(time_zone_s))
|
|
current_minute_of_day = current_date_local.hour * 60 + current_date_local.minute
|
|
|
|
if date_st == current_date_local.strftime('%Y-%m-%d'):
|
|
filtered_presence_data = []
|
|
for entry in presence_data:
|
|
if entry[1] < current_minute_of_day :
|
|
if entry[1] + entry[2] < current_minute_of_day:
|
|
filtered_presence_data.append(entry)
|
|
else:
|
|
entry[2] = (current_minute_of_day - entry[2])
|
|
if entry[2] > 0:
|
|
filtered_presence_data.append(entry)
|
|
#print(presence_data)
|
|
else:
|
|
filtered_presence_data = presence_data
|
|
|
|
room_details = (room, {"color": color, "presence": filtered_presence_data})
|
|
devices_list_t.append(room_details)
|
|
|
|
return devices_list_t
|
|
|
|
def CreateDailyLocationChart(filename_chart_image_day, locations):
|
|
result = False
|
|
header_image_file = "header.png"
|
|
room_image_file = "room.png"
|
|
footer_image_file = "footer.png"
|
|
|
|
#ToDo: change it so it reads files from MinIo
|
|
header_image_file = os.path.join(filesDir, header_image_file)
|
|
header_image_file = header_image_file.replace("\\","/")
|
|
header_image = cv2.imread(header_image_file)
|
|
#header_height, header_width = header_image.shape[:2]
|
|
|
|
room_image_file = os.path.join(filesDir, room_image_file)
|
|
room_image_file = room_image_file.replace("\\","/")
|
|
room_image = cv2.imread(room_image_file)
|
|
#room_height, room_width = room_image.shape[:2]
|
|
|
|
footer_image_file = os.path.join(filesDir, footer_image_file)
|
|
footer_image_file = footer_image_file.replace("\\","/")
|
|
footer_image = cv2.imread(footer_image_file)
|
|
|
|
all_images = [header_image]
|
|
for item_c in locations:
|
|
item = item_c[0]
|
|
if item == "date":
|
|
date = item_c[1]
|
|
else:
|
|
room_image = cv2.imread(room_image_file)
|
|
data = item_c[1]
|
|
room_image = AddRoomData(room_image, item, data)
|
|
all_images.append(room_image)
|
|
|
|
footer_image = AddFooterData(footer_image)
|
|
all_images.append(footer_image)
|
|
final_image = np.vstack(all_images)
|
|
#this needs to write straight to MinIo !
|
|
SaveImageInBlob(filename_chart_image_day, final_image)
|
|
result = True
|
|
#cv2.imwrite(filename_chart_image_day, final_image)
|
|
#print(rooms_count)
|
|
return result
|
|
|
|
|
|
def GetOptimumFontSize(target_width, text="00", min_size=1, max_size=100, tolerance=1):
|
|
"""
|
|
Find optimal font size to fit text within target width using binary search.
|
|
|
|
Args:
|
|
target_width (int): Desired width in pixels
|
|
text (str): Text to measure (default "00")
|
|
min_size (int): Minimum font size to try
|
|
max_size (int): Maximum font size to try
|
|
tolerance (int): Acceptable difference from target width
|
|
|
|
Returns:
|
|
int: Optimal font size
|
|
"""
|
|
while min_size <= max_size:
|
|
current_size = (min_size + max_size) // 2
|
|
width, _ = GetStringSize(text, current_size)
|
|
|
|
if abs(width - target_width) <= tolerance:
|
|
return current_size
|
|
elif width > target_width:
|
|
max_size = current_size - 1
|
|
else:
|
|
min_size = current_size + 1
|
|
|
|
# Return the largest size that fits within target width
|
|
width, _ = GetStringSize(text, min_size)
|
|
return min_size if width <= target_width else min_size - 1
|
|
|
|
def GetStringSize(some_string, font_size):
|
|
font_path = os.path.join(os.path.dirname(__file__), "fonts", "Poppins-Regular.ttf")
|
|
try:
|
|
font = ImageFont.truetype(font_path, font_size) # 12px size
|
|
except:
|
|
logger.error("Poppins font not found. Please ensure the font file is in your working directory")
|
|
# Fallback to default font if Poppins is not available
|
|
font = ImageFont.load_default()
|
|
|
|
bbox = font.getbbox(some_string)
|
|
return bbox[2] - bbox[0], bbox[3] - bbox[1]
|
|
|
|
def GeneratePresenceHistoryChart(filename, recreate_in, deployment_id, filter_minutes, ddate, to_date, now_date, time_zone_s):
|
|
|
|
#maps_dates, proximity = GetDeploymentDatesBoth(deployment_id)
|
|
minutes = 1440
|
|
stripes_files = []
|
|
|
|
date1_obj = datetime.datetime.strptime(ddate, '%Y-%m-%d')
|
|
date2_obj = datetime.datetime.strptime(to_date, '%Y-%m-%d')
|
|
|
|
start_date = min(date1_obj, date2_obj)
|
|
end_date = max(date1_obj, date2_obj)
|
|
|
|
# Generate list of all dates
|
|
maps_dates = [
|
|
(start_date + timedelta(days=x)).strftime('%Y-%m-%d')
|
|
for x in range((end_date - start_date).days + 1)
|
|
]
|
|
#maps_dates.reverse()
|
|
days = len(maps_dates)
|
|
#stretch_by = int(1000 / days)
|
|
#if stretch_by > 50:
|
|
#stretch_by = 50
|
|
stretch_by = 30
|
|
|
|
#background_image_file = os.path.join(filesDir, "multi_day_template.png")
|
|
background_image_file = os.path.join(filesDir, "multi_day_template2.png")
|
|
background_image_file = background_image_file.replace("\\","/")
|
|
background_image = cv2.imread(background_image_file)
|
|
|
|
rgb_image = background_image #cv2.cvtColor(background_image, cv2.COLOR_BGR2RGB)
|
|
result_image = Image.fromarray(rgb_image) # Convert to PIL Image
|
|
#result_image = Image.new('RGB', (minutes, int(days*stretch_by)))
|
|
|
|
# Paste each image onto the result image vertically
|
|
y_offset = 0
|
|
locations_list = []
|
|
font_size = 50
|
|
string_width, string_height = GetStringSize("00", font_size)
|
|
|
|
|
|
success = False
|
|
if len(maps_dates) == 1:
|
|
|
|
filename_chart_image_day = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_{stretch_by}_daily_locations_chart.png"
|
|
force_recreate = recreate_in
|
|
#it is faster to resize existing daily location chart (length is always 1440), than having to re-create it each time...
|
|
filename_day = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_daily_locations.png"
|
|
filename_chart_data_day = filename_day+".bin"
|
|
if not force_recreate:
|
|
file_exists, time_modified_utc = check_file_exists(filename_chart_image_day)
|
|
if file_exists:
|
|
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
|
|
time_modified_date = time_modified_local.date()
|
|
file_date = ddate
|
|
if time_modified_date <= file_date:
|
|
force_recreate = True
|
|
else:
|
|
force_recreate = True
|
|
|
|
if not force_recreate:
|
|
file_exists1, time_modified_utc1 = check_file_exists(filename_chart_data_day)
|
|
if file_exists1:
|
|
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
|
|
time_modified_date = time_modified_local.date()
|
|
file_date = ddate
|
|
if time_modified_date <= file_date:
|
|
force_recreate = True
|
|
else:
|
|
force_recreate = True
|
|
|
|
|
|
|
|
if force_recreate:
|
|
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
devices_list_a, device_ids = GetProximityList(deployment_id, timee)
|
|
CreateDailyLocationMap(filename_day, devices_list_a, ddate, filter_minutes, time_zone_s, stretch_by)
|
|
locations_list_s = ReadObjectMinIO("daily-maps", filename_chart_data_day)
|
|
locations_list = json.loads(locations_list_s)
|
|
|
|
devices_map = {}
|
|
devices_list = []
|
|
for device_entry in devices_list_a:
|
|
devices_map[device_entry[4]] = [device_entry[0], device_entry[1], device_entry[2]]
|
|
devices_list.append(device_entry[4])
|
|
|
|
locations = GenerateLocationsMap(ddate, devices_list, devices_map, locations_list, time_zone_s)
|
|
success = CreateDailyLocationChart(filename, locations)
|
|
else:
|
|
|
|
day_counter = 0
|
|
day_step_width = int(1780 / days)
|
|
x_offset = 563
|
|
y_offset = 1615
|
|
h_labels_bottom = 1720
|
|
day_width = int(0.9 * day_step_width)
|
|
day_height = 1440
|
|
font_size = GetOptimumFontSize(day_width, "00", 10, 50, 0)
|
|
string_width, string_height = GetStringSize("00", font_size)
|
|
#logger.debug(f"font_size={font_size} string_width={string_width}")
|
|
|
|
y_offset = y_offset - day_height
|
|
filename_chart_image_day = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_{stretch_by}_daily_locations_chart.png"
|
|
|
|
for ddate in maps_dates:
|
|
|
|
force_recreate = recreate_in
|
|
|
|
filename_day = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_{stretch_by}_daily_locations.png"
|
|
|
|
if not force_recreate:
|
|
file_exists, time_modified_utc = check_file_exists(filename_day)
|
|
file_existsS, time_modifiedS_utc = check_file_exists(filename_day[:-4]+"S.png")
|
|
if file_exists and file_existsS:
|
|
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
|
|
time_modified_date = time_modified_local.date()
|
|
file_date = MapFileToDate(filename_day)
|
|
if time_modified_date <= file_date:
|
|
force_recreate = True
|
|
else:
|
|
force_recreate = True
|
|
|
|
|
|
if force_recreate:
|
|
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
|
|
|
CreateDailyLocationMap(filename_day, devices_list, ddate, filter_minutes, time_zone_s, stretch_by)
|
|
|
|
#here we need to rotate and resize to:
|
|
|
|
image_bytes, content_type = GetBlob(filename_day)
|
|
image_bytes_s, content_type_s = GetBlob(filename_day[:-4]+"S.png")
|
|
if image_bytes != None:
|
|
image_stream = io.BytesIO(image_bytes)
|
|
image = Image.open(image_stream)
|
|
numpy_image = np.array(image)
|
|
rotated_image = cv2.rotate(numpy_image, cv2.ROTATE_90_COUNTERCLOCKWISE)
|
|
scaled_image = cv2.resize(rotated_image, (day_width, day_height), interpolation=cv2.INTER_AREA)
|
|
|
|
# Convert from BGR to RGB
|
|
rgb_image = cv2.cvtColor(scaled_image, cv2.COLOR_BGR2RGB)
|
|
# Convert to PIL Image
|
|
pil_image = Image.fromarray(rgb_image)
|
|
|
|
#image = Image.open(file_name)
|
|
x_origin = x_offset + day_step_width * day_counter + int(0.05 * day_step_width)
|
|
result_image.paste(pil_image, (x_origin, y_offset))
|
|
|
|
image_stream = io.BytesIO(image_bytes_s)
|
|
image = Image.open(image_stream)
|
|
numpy_image = np.array(image)
|
|
rotated_image = cv2.rotate(numpy_image, cv2.ROTATE_90_COUNTERCLOCKWISE)
|
|
scaled_image = cv2.resize(rotated_image, (day_width, day_height), interpolation=cv2.INTER_AREA)
|
|
|
|
# Convert from BGR to RGB
|
|
rgb_image = cv2.cvtColor(scaled_image, cv2.COLOR_BGR2RGB)
|
|
# Convert to PIL Image
|
|
pil_image = Image.fromarray(rgb_image)
|
|
|
|
#image = Image.open(file_name)
|
|
x_origin = x_offset + day_step_width * day_counter + int(0.05 * day_step_width)
|
|
result_image.paste(pil_image, (x_origin, 1807+y_offset))
|
|
|
|
|
|
image.close()
|
|
image_stream.close()
|
|
|
|
day_counter += 1
|
|
|
|
pil_im = result_image
|
|
#result_image_cv2 = cv2.cvtColor(np.array(pil_im), cv2.COLOR_RGB2BGR)
|
|
result_image_cv2 = np.array(pil_im)#cv2.cvtColor(np.array(pil_im), cv2.COLOR_RGB2BGR)
|
|
strings_list = []
|
|
day_counter = 0
|
|
for ddate in maps_dates:
|
|
if string_width <= day_width:
|
|
date_str = ddate[8:10]
|
|
x_origin = x_offset + int(day_step_width * (day_counter + 0.5)) - int(string_width / 2)
|
|
strings_list.append((x_origin, h_labels_bottom, date_str))
|
|
day_counter += 1
|
|
result_image_cv2 = AddTextList(result_image_cv2, strings_list, font_size)
|
|
|
|
|
|
#Y 124 to 1636
|
|
labels_bottom = 1636 - 1.5 * string_height
|
|
x_offset = 340
|
|
step = -4 * 60 #4 hours
|
|
font_size = 50
|
|
strings_list = []
|
|
count = 0
|
|
y_offset = labels_bottom + count * step
|
|
strings_list.append((x_offset, y_offset, "12 AM"))
|
|
count = 1
|
|
y_offset = labels_bottom + count * step
|
|
strings_list.append((x_offset, y_offset, "4 AM"))
|
|
count = 2
|
|
y_offset = labels_bottom + count * step
|
|
strings_list.append((x_offset, y_offset, "8 AM"))
|
|
count = 3
|
|
y_offset = labels_bottom + count * step
|
|
strings_list.append((x_offset, y_offset, "12 PM"))
|
|
count = 4
|
|
y_offset = labels_bottom + count * step
|
|
strings_list.append((x_offset, y_offset, "4 PM"))
|
|
count = 5
|
|
y_offset = labels_bottom + count * step
|
|
strings_list.append((x_offset, y_offset, "8 PM"))
|
|
count = 6
|
|
y_offset = labels_bottom + count * step
|
|
strings_list.append((x_offset, y_offset, "12 AM"))
|
|
|
|
result_image_cv2 = AddTextList(result_image_cv2, strings_list, font_size)
|
|
|
|
numpy_image = np.array(result_image_cv2)
|
|
|
|
success = SaveImageInBlob(filename, numpy_image)
|
|
#SaveImageInBlob(filename, result_image)
|
|
# Save directly to MinIO instead of local file
|
|
#if success:
|
|
# success = save_to_minio(result_image, filename, DAILY_MAPS_BUCKET_NAME)
|
|
# Clean up
|
|
if success:
|
|
return filename
|
|
else:
|
|
return ""
|
|
|
|
def GeneratePresenceHistoryFiles(filename, recreate_in, deployment_id, filter_minutes, ddate, to_date, now_date, time_zone_s):
|
|
|
|
date1_obj = datetime.datetime.strptime(ddate, '%Y-%m-%d')
|
|
date2_obj = datetime.datetime.strptime(to_date, '%Y-%m-%d')
|
|
|
|
start_date = min(date1_obj, date2_obj)
|
|
end_date = max(date1_obj, date2_obj)
|
|
stretch_by = 30
|
|
|
|
# Generate list of all dates
|
|
maps_dates = [
|
|
(start_date + timedelta(days=x)).strftime('%Y-%m-%d')
|
|
for x in range((end_date - start_date).days + 1)
|
|
]
|
|
|
|
day_counter = 0
|
|
|
|
for ddate in maps_dates:
|
|
|
|
force_recreate = recreate_in
|
|
|
|
filename_day = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_{stretch_by}_daily_locations.png"
|
|
|
|
if not force_recreate:
|
|
file_exists, time_modified_utc = check_file_exists(filename_day+".bin")
|
|
if file_exists:
|
|
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
|
|
time_modified_date = time_modified_local.date()
|
|
file_date = MapFileToDate(filename_day)
|
|
if time_modified_date <= file_date:
|
|
force_recreate = True
|
|
else:
|
|
force_recreate = True
|
|
|
|
|
|
if force_recreate:
|
|
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
|
|
|
CreateDailyLocationMap(filename_day, devices_list, ddate, filter_minutes, time_zone_s, 10)
|
|
|
|
day_counter += 1
|
|
|
|
return filename
|
|
|
|
def CalcStdevs(row, stdev_range, stdevs):
|
|
half_range = stdev_range // 2
|
|
data_len = len(row)
|
|
|
|
# Calculate standard deviations with proper window alignment
|
|
for i in range(data_len):
|
|
# Calculate window boundaries
|
|
start = max(0, i - half_range)
|
|
end = min(data_len, i + half_range + 1)
|
|
|
|
# Get data within window
|
|
window_data = row[start:end]
|
|
|
|
# Calculate standard deviation if we have data
|
|
if len(window_data) > 0:
|
|
stdevs[i] = np.std(window_data)
|
|
|
|
# Find amplitude (max - min of standard deviations)
|
|
amplitude = np.max(stdevs) - np.min(stdevs)
|
|
|
|
# Scale to range 0-1279
|
|
if amplitude > 0: # Avoid division by zero
|
|
stdevs = ((stdevs - np.min(stdevs)) / amplitude * 1279).astype(np.float32)
|
|
|
|
return stdevs, amplitude
|
|
|
|
def CalcLife(row, stdev_range, stdevs):
|
|
half_range = stdev_range // 2
|
|
data_len = len(row)
|
|
|
|
# Calculate standard deviations with proper window alignment
|
|
for i in range(data_len):
|
|
# Calculate window boundaries
|
|
start = max(0, i - half_range)
|
|
end = min(data_len, i + half_range + 1)
|
|
|
|
# Get data within window
|
|
window_data = row[start:end]
|
|
|
|
# Calculate standard deviation if we have data
|
|
if len(window_data) > 0:
|
|
stdevs[i] = np.std(window_data)
|
|
|
|
# Find amplitude (max - min of standard deviations)
|
|
amplitude = np.max(stdevs) - np.min(stdevs)
|
|
|
|
# Scale to range 0-1279
|
|
if amplitude > 0: # Avoid division by zero
|
|
stdevs = ((stdevs - np.min(stdevs)) / amplitude * 1279).astype(np.float32)
|
|
|
|
return stdevs, amplitude
|
|
|
|
def FindCalibrationDate(device_ids, ddate):
|
|
PCD = 50 #% (Peak Contained Data %)
|
|
PHB = 50 #% (Peak Height from Base %)
|
|
MPW = 10 #? (Max Peak Width)
|
|
MPSD =10 #? (Minimum Presence signal Standard Deviation)
|
|
#Find first day with, for all devices:
|
|
#- enough radar data points collected
|
|
#-Single histogram peak containing more than PCD% of data and peak width (at PHB% height) is < MPW
|
|
#Stdev of Data larger > MPSD
|
|
return ddate
|
|
|
|
def FindThreshold(data, percent_list):
|
|
"""
|
|
Find the threshold value above which lies the specified percentage of points.
|
|
|
|
Args:
|
|
data: numpy array of values
|
|
percent: percentage of points that should be above the threshold (0-100)
|
|
|
|
Returns:
|
|
threshold value
|
|
"""
|
|
percent_from, percent_to = percent_list
|
|
# Sort data in descending order
|
|
sorted_data = np.sort(data)[::-1]
|
|
|
|
# Calculate the index corresponding to the desired percentage
|
|
index_from = int((percent_from / 100) * len(data))
|
|
index_to = int((percent_to / 100) * len(data))
|
|
|
|
# Return the threshold value
|
|
return sorted_data[index_from], sorted_data[index_to]
|
|
|
|
def ShowThresholdGraph(data, filename, threshold_low, threshold_high, title, AveragePercentSpendsThere, location):
|
|
"""
|
|
Create and save a threshold analysis graph with maximum curvature point.
|
|
"""
|
|
dpi=600
|
|
# Get min and max values
|
|
min_val = np.min(data)
|
|
max_val = np.max(data)
|
|
|
|
# Create 1000 threshold levels from max to min
|
|
thresholds = np.linspace(max_val, min_val, 1000)
|
|
threshold_percentages = np.linspace(0, 100, 1000)
|
|
|
|
# Calculate percentage of points above each threshold
|
|
points_above = []
|
|
total_points = len(data)
|
|
|
|
for thresh in thresholds:
|
|
above_count = np.sum(data > thresh)
|
|
percentage = (above_count / total_points) * 100
|
|
points_above.append(percentage)
|
|
|
|
points_above = np.array(points_above)
|
|
|
|
# Calculate derivatives and smooth them
|
|
first_derivative = np.gradient(points_above)
|
|
second_derivative = np.gradient(first_derivative)
|
|
|
|
#first_derivative = savgol_filter(np.gradient(points_above), window_length=51, polyorder=3)
|
|
#second_derivative = savgol_filter(np.gradient(first_derivative), window_length=51, polyorder=3)
|
|
|
|
|
|
# Find the point of maximum absolute second derivative
|
|
# Exclude edges (first and last 5% of points) to avoid edge effects
|
|
edge_margin = len(second_derivative) // 20 # 5% of points
|
|
valid_range = slice(edge_margin, -edge_margin)
|
|
max_curve_idx = edge_margin + np.argmax(np.abs(second_derivative[valid_range]))
|
|
|
|
max_curve_x = threshold_percentages[max_curve_idx]
|
|
max_curve_y = points_above[max_curve_idx]
|
|
max_curve_second_deriv = second_derivative[max_curve_idx]
|
|
|
|
# Calculate the actual threshold value for this point
|
|
threshold2 = max_val - (max_curve_x/100) * (max_val - min_val)
|
|
|
|
# Create subplot figure
|
|
fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(12, 10), height_ratios=[3, 2])
|
|
|
|
# Plot 1: Original curve with thresholds
|
|
ax1.plot(threshold_percentages, points_above, 'b-', linewidth=2, label='Distribution')
|
|
ax1.grid(True, linestyle='--', alpha=0.7)
|
|
|
|
# Add original threshold line if provided
|
|
if threshold_low is not None:
|
|
threshold_percent = ((max_val - threshold_low) / (max_val - min_val)) * 100
|
|
percent_above = (np.sum(data > threshold_low) / total_points) * 100
|
|
|
|
ax1.axvline(x=threshold_percent, color='r', linestyle='--', label=f'Minimum % spent in {location}: {AveragePercentSpendsThere[0]:.3f}')
|
|
ax1.axhline(y=percent_above, color='r', linestyle='--')
|
|
|
|
#ax1.annotate(f'Threshold 1: {threshold_low:.3f}\nPoints above: {percent_above:.1f}%',
|
|
#xy=(threshold_percent, percent_above),
|
|
#xytext=(10, 10), textcoords='offset points',
|
|
#bbox=dict(boxstyle='round,pad=0.5', fc='yellow', alpha=0.5),
|
|
#arrowprops=dict(arrowstyle='->'))
|
|
|
|
if threshold_high is not None:
|
|
threshold_percent = ((max_val - threshold_high) / (max_val - min_val)) * 100
|
|
percent_above = (np.sum(data > threshold_high) / total_points) * 100
|
|
|
|
ax1.axvline(x=threshold_percent, color='b', linestyle='--', label=f'Maximum % spent in {location}: {AveragePercentSpendsThere[1]:.3f}')
|
|
ax1.axhline(y=percent_above, color='b', linestyle='--')
|
|
|
|
#ax1.annotate(f'Threshold 1: {threshold_high:.3f}\nPoints above: {percent_above:.1f}%',
|
|
#xy=(threshold_percent, percent_above),
|
|
#xytext=(10, 10), textcoords='offset points',
|
|
#bbox=dict(boxstyle='round,pad=0.5', fc='yellow', alpha=0.5),
|
|
#arrowprops=dict(arrowstyle='->'))
|
|
|
|
# Add maximum curvature point threshold
|
|
ax1.axvline(x=max_curve_x, color='g', linestyle='--', label=f'Threshold 2: {threshold2:.3f}')
|
|
ax1.axhline(y=max_curve_y, color='g', linestyle='--')
|
|
ax1.plot(max_curve_x, max_curve_y, 'go', markersize=10)
|
|
|
|
ax1.annotate(f'Threshold 2: {threshold2:.3f}\nPoints above: {max_curve_y:.1f}%',
|
|
xy=(max_curve_x, max_curve_y),
|
|
xytext=(10, -20), textcoords='offset points',
|
|
bbox=dict(boxstyle='round,pad=0.5', fc='lightgreen', alpha=0.5),
|
|
arrowprops=dict(arrowstyle='->'))
|
|
|
|
ax1.set_xlabel('Threshold Level (%)\n0% = Maximum, 100% = Minimum')
|
|
ax1.set_ylabel('Points Above Threshold (%)')
|
|
ax1.set_title(title)
|
|
ax1.set_xlim(0, 100)
|
|
ax1.set_ylim(0, 100)
|
|
ax1.legend()
|
|
|
|
# Plot 2: Rate of change
|
|
ax2.plot(threshold_percentages, first_derivative, 'g-', label='First derivative', alpha=0.7)
|
|
ax2.plot(threshold_percentages, second_derivative, 'r-', label='Second derivative', alpha=0.7)
|
|
ax2.grid(True, linestyle='--', alpha=0.7)
|
|
|
|
# Mark maximum curvature point on derivative plot
|
|
ax2.axvline(x=max_curve_x, color='g', linestyle='--')
|
|
# Plot point exactly on the second derivative curve
|
|
ax2.plot(max_curve_x, max_curve_second_deriv, 'go', markersize=10,
|
|
label=f'Max curvature at {max_curve_x:.1f}%')
|
|
|
|
ax2.set_xlabel('Threshold Level (%)')
|
|
ax2.set_ylabel('Rate of Change')
|
|
ax2.set_title('Rate of Change Analysis')
|
|
ax2.legend()
|
|
|
|
plt.tight_layout()
|
|
plt.savefig(filename, dpi=dpi, bbox_inches='tight')
|
|
plt.close()
|
|
|
|
return threshold2, max_curve_x, max_curve_y
|
|
|
|
def add_boundary_points(line_part_t, time_zone):
|
|
"""
|
|
Add boundary points (00:00:00 and 23:59:59) to a time series list.
|
|
|
|
Args:
|
|
line_part_t: List of tuples (timestamp, value)
|
|
time_zone: String representing the timezone (e.g., "America/Los_Angeles")
|
|
|
|
Returns:
|
|
List of tuples with added boundary points
|
|
"""
|
|
if not line_part_t:
|
|
return line_part_t
|
|
|
|
tz = pytz.timezone(time_zone)
|
|
|
|
# Get the date from the first point
|
|
first_dt = datetime.datetime.fromtimestamp(line_part_t[0][0], tz)
|
|
date = first_dt.date()
|
|
|
|
# Create datetime objects for start and end of the day
|
|
start_dt = tz.localize(datetime.datetime.combine(date, datetime.datetime.min.time()))
|
|
end_dt = tz.localize(datetime.datetime.combine(date, datetime.datetime.max.time()))
|
|
|
|
# Convert to timestamps
|
|
start_ts = start_dt.timestamp()
|
|
end_ts = end_dt.timestamp()
|
|
|
|
result = list(line_part_t)
|
|
|
|
# Handle start point (00:00:00)
|
|
first_point_dt = datetime.datetime.fromtimestamp(line_part_t[0][0], tz)
|
|
time_diff = first_point_dt - start_dt
|
|
|
|
start_value = line_part_t[0][1]
|
|
|
|
# Add start point at the beginning
|
|
#result.insert(0, (start_ts, start_value))
|
|
|
|
# Handle end point (23:59:59)
|
|
last_point_dt = datetime.datetime.fromtimestamp(line_part_t[-1][0], tz)
|
|
end_value = line_part_t[-1][1]
|
|
# Add end point
|
|
result.append((end_ts, end_value))
|
|
|
|
return result
|
|
|
|
def calculate_life_and_average(my_data1, stdev_range=5):
|
|
# Convert data to numpy array for faster operations
|
|
data_array = np.array(my_data1)
|
|
|
|
# Calculate half range
|
|
stdev_range_h = stdev_range // 2
|
|
|
|
# Pre-calculate indices for the sliding window
|
|
indices = np.arange(len(data_array) - 2 * stdev_range_h)[:, None] + np.arange(2 * stdev_range_h + 1)
|
|
|
|
# Get sliding windows of data
|
|
windows = data_array[indices]
|
|
|
|
# Calculate average (using column 3)
|
|
average = np.mean(windows[:, :, 3], axis=1)
|
|
|
|
# Calculate life (using columns 2, 3, and 4)
|
|
deltas = windows[:, :, 3] - windows[:, :, 2] + windows[:, :, 4]
|
|
life = np.mean(deltas, axis=1)
|
|
|
|
return life.tolist(), average.tolist()
|
|
|
|
def TryJulia(prompt):
|
|
|
|
if len(prompt) > 0:
|
|
|
|
if prompt[0] == "#":
|
|
return prompt.upper()
|
|
|
|
if prompt not in utterances:
|
|
return ""
|
|
else:
|
|
intent = utterances[prompt]
|
|
action = intents[intent]
|
|
|
|
return action[0]
|
|
else:
|
|
return ""
|
|
|
|
def AskGPT(in_prompt, language_from, language_to):
|
|
|
|
if len(in_prompt) > 4:
|
|
|
|
prompt = in_prompt.lower()
|
|
|
|
if language_to.lower() not in language_from.lower():
|
|
prompt = in_prompt + " Answer in " + language_to
|
|
|
|
|
|
print(prompt)
|
|
|
|
#lets see if question is looking for OSM query
|
|
pattern = "what is only the node line for query for * on openstreetmap api? do not answer with url to nominatim, but with query!"
|
|
|
|
|
|
if match_with_wildcard(prompt, pattern):
|
|
differing_part = extract_differing_part(prompt, pattern)
|
|
|
|
if differing_part != "":
|
|
|
|
print(differing_part)
|
|
|
|
if differing_part in searches_dict:
|
|
response = searches_dict[differing_part]
|
|
print(response)
|
|
return response, language_to
|
|
else:
|
|
#check if one of synonims:
|
|
if differing_part in searches_dict["synonims"]:
|
|
differing_part = searches_dict["synonims"][differing_part]
|
|
if differing_part != "":
|
|
if differing_part in searches_dict[differing_part]:
|
|
response = searches_dict[differing_part]
|
|
print(response)
|
|
return response, language_to
|
|
|
|
hash_string = hashlib.sha256(str(prompt).encode('utf-8')).hexdigest()
|
|
#filename=os.path.join(cache_path, "chgpt_query_" + hash_string+".pkl")
|
|
|
|
julia_present = False
|
|
if prompt.startswith("julia"):
|
|
prompt = prompt[len("julia") + 1:]
|
|
julia_present = True
|
|
|
|
completion = ""
|
|
if julia_present == False:
|
|
completion = TryJulia(prompt)
|
|
#if completion == "":
|
|
# if os.path.exists(filename):
|
|
# #completion = pickle.load(open( filename, "rb" ))
|
|
# completion = (completion.choices[0].message.content.strip(), language_to)[0]
|
|
else:
|
|
completion = TryJulia(prompt)
|
|
|
|
|
|
if completion == "":
|
|
|
|
st = time.time()
|
|
#import wandb
|
|
|
|
#run = wandb.init(project='GPT-4 in Python')
|
|
#prediction_table = wandb.Table(columns=["prompt", "prompt tokens", "completion", "completion tokens", "model", "total tokens"])
|
|
|
|
print(time.time() - st)
|
|
|
|
openai.api_key = OPENAI_API_KEY
|
|
client = OpenAI(
|
|
# This is the default and can be omitted
|
|
api_key = OPENAI_API_KEY
|
|
)
|
|
|
|
completion = client.chat.completions.create(
|
|
messages=[
|
|
{
|
|
"role": "user",
|
|
"content": prompt,
|
|
}
|
|
],
|
|
model="gpt-3.5-turbo",
|
|
)
|
|
|
|
|
|
#with open(filename, 'wb') as handle:
|
|
#pickle.dump(completion, handle, protocol=pickle.HIGHEST_PROTOCOL)
|
|
|
|
response = (completion.choices[0].message.content.strip(), language_to)
|
|
else:
|
|
response = (completion, language_to)
|
|
|
|
else:
|
|
response = ("question is too short", language_to)
|
|
|
|
|
|
print(response)
|
|
return response
|
|
|
|
def AskGPTPure(in_prompt):
|
|
|
|
if len(in_prompt) > 4:
|
|
|
|
prompt = in_prompt.lower()
|
|
|
|
print(prompt)
|
|
|
|
st = time.time()
|
|
|
|
print(time.time() - st)
|
|
|
|
openai.api_key = OPENAI_API_KEY
|
|
client = OpenAI(
|
|
# This is the default and can be omitted
|
|
api_key = OPENAI_API_KEY
|
|
)
|
|
|
|
completion = client.chat.completions.create(
|
|
messages=[
|
|
{
|
|
"role": "user",
|
|
"content": prompt,
|
|
}
|
|
],
|
|
model="gpt-3.5-turbo",
|
|
)
|
|
|
|
response = completion.choices[0].message.content.strip()
|
|
|
|
|
|
else:
|
|
response = "question is too short"
|
|
|
|
|
|
print(response)
|
|
return response
|
|
|
|
def get_last_n_days(n=14, timezone_str='America/Los_Angeles'):
|
|
# Get current UTC time
|
|
utc_now = datetime.datetime.now(pytz.UTC)
|
|
|
|
# Convert to the specified timezone
|
|
local_now = utc_now.astimezone(pytz.timezone(timezone_str))
|
|
|
|
# Get the current date in the specified timezone
|
|
current_date = local_now.date()
|
|
|
|
# Determine the last whole day
|
|
if local_now.hour > 0 or local_now.minute > 0 or local_now.second > 0:
|
|
# Yesterday in the specified timezone
|
|
last_whole_day = current_date - timedelta(days=1)
|
|
else:
|
|
# If it's exactly midnight, the last whole day is two days ago
|
|
last_whole_day = current_date - timedelta(days=2)
|
|
|
|
# Generate list of n days, ending with the last whole day
|
|
date_list = []
|
|
for i in range(n-1, -1, -1):
|
|
day = last_whole_day - timedelta(days=i)
|
|
date_list.append(day.strftime('%Y-%m-%d'))
|
|
|
|
return date_list
|
|
|
|
|
|
def numpy_to_json(arr, devices_list):
|
|
"""
|
|
Convert numpy array to JSON-serializable format
|
|
|
|
Args:
|
|
arr (numpy.ndarray): 2D numpy array to serialize
|
|
|
|
Returns:
|
|
str: JSON string containing array data and metadata
|
|
"""
|
|
if not isinstance(arr, np.ndarray):
|
|
raise TypeError("Input must be a numpy array")
|
|
|
|
array_dict = {
|
|
'dtype': str(arr.dtype),
|
|
'shape': arr.shape,
|
|
'devices_list': devices_list,
|
|
'data': arr.tolist() # Convert to nested Python lists
|
|
}
|
|
|
|
return json.dumps(array_dict)
|
|
|
|
def format_time_difference(minutes):
|
|
# Calculate days, hours, minutes
|
|
days = int(minutes // (24 * 60))
|
|
remaining_minutes = minutes % (24 * 60)
|
|
hours = int(remaining_minutes // 60)
|
|
mins = int(remaining_minutes % 60)
|
|
|
|
parts = []
|
|
|
|
# Add days if any
|
|
if days > 0:
|
|
parts.append(f"{days} day{'s' if days != 1 else ''}")
|
|
|
|
# Add hours if any
|
|
if hours > 0:
|
|
parts.append(f"{hours} hour{'s' if hours != 1 else ''}")
|
|
|
|
# Add minutes if any
|
|
if mins > 0 or (days == 0 and hours == 0):
|
|
parts.append(f"{mins} minute{'s' if mins != 1 else ''}")
|
|
|
|
# Combine the parts into a sentence
|
|
if len(parts) == 1:
|
|
return parts[0]
|
|
elif len(parts) == 2:
|
|
return f"{parts[0]} and {parts[1]}"
|
|
else:
|
|
return f"{parts[0]}, {parts[1]}, and {parts[2]}"
|
|
|
|
def RunCommand(commmand, args_dictionary, deployment_id):
|
|
|
|
to_return = ""
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
local_tz = pytz.timezone(time_zone_s)
|
|
|
|
filter_minutes = 5
|
|
dates = get_last_n_days(28, time_zone_s)
|
|
ddate = dates[0] #2025-02-02 req.params.get("date")
|
|
to_date = dates[-1]
|
|
date_s = datetime.datetime.now(pytz.UTC).astimezone(local_tz).date().strftime("%Y-%m-%d")
|
|
|
|
if commmand == "#STATUS#":
|
|
force_recreate_orig = False #True
|
|
|
|
filename = f"/{deployment_id}/{deployment_id}_{ddate}_{to_date}_{filter_minutes}_history_image.png"
|
|
filename = GeneratePresenceHistoryFiles(filename, force_recreate_orig, deployment_id, filter_minutes, ddate, to_date, ddate, time_zone_s)
|
|
|
|
date1_obj = datetime.datetime.strptime(ddate, '%Y-%m-%d')
|
|
date2_obj = datetime.datetime.strptime(to_date, '%Y-%m-%d')
|
|
|
|
start_date = min(date1_obj, date2_obj)
|
|
end_date = max(date1_obj, date2_obj)
|
|
stretch_by = 30
|
|
|
|
# Generate list of all dates
|
|
maps_dates = [
|
|
(start_date + timedelta(days=x)).strftime('%Y-%m-%d')
|
|
for x in range((end_date - start_date).days + 1)
|
|
]
|
|
|
|
loclist = []
|
|
day_counter = 0
|
|
minutes_spent_there_list = []
|
|
for ddate in maps_dates:
|
|
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
|
|
|
Id2Location = {}
|
|
for device in devices_list:
|
|
Id2Location[device[1]] = device[2]
|
|
Id2Location[0] = "Outside/?"
|
|
|
|
filename_day = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_{stretch_by}_daily_locations.png.bin"
|
|
locations_list_s = ReadObjectMinIO("daily-maps", filename_day)
|
|
locations_list = ast.literal_eval(locations_list_s)
|
|
#print(locations_list_s)
|
|
minutes_spent_there = {}
|
|
|
|
|
|
for loc in Id2Location:
|
|
minutes_spent_there[Id2Location[loc]] = 0
|
|
minutes_spent_there[Id2Location[0]] = 0
|
|
|
|
for loc in locations_list:
|
|
minutes_spent_there[Id2Location[loc[0]]] += loc[2]
|
|
|
|
for loc in minutes_spent_there:
|
|
minutes_spent_there[loc] = int(1000 * minutes_spent_there[loc] / 1440) / 10
|
|
minutes_spent_there_list.append((ddate, minutes_spent_there))
|
|
|
|
dailyloclist = []
|
|
for loc in locations_list:
|
|
dailyloclist.append((Id2Location[loc[0]],loc[2])) # provide only loc[2] which is len_minutes or how long subject was there, ignore loc[1] which is minutes_from
|
|
loclist.append((ddate, dailyloclist))
|
|
|
|
data_part = str(minutes_spent_there_list)
|
|
print(data_part)
|
|
|
|
prompt2 = "Consider:\n"
|
|
prompt2 += "- older person living alone in home where each room has multi-sensor IoT device \n"
|
|
prompt2 += "- from the data we can produce a list for each day of locations and minutes spent there\n"
|
|
prompt2 += "- unknown location is listed as \"Outside/?\"\n"
|
|
prompt2 += "- office and living room are equivalent for this individual. Entertainment is consumed on computer (office) and in living room TV.\n"
|
|
prompt2 += "- person is also napping in living room\n"
|
|
prompt2 += "\n"
|
|
prompt2 += "Questions:\n"
|
|
prompt2 += "- list all potential health related information can be recognized from this data (examples based on patterns of bathroom usage for urinating vs pooing, showering, sleep, and list all other)\n"
|
|
prompt2 += "- for each pattern consider; how long data time span is required, reliability range, how to improve (what additional information could be useful from additional sensors or devices)\n"
|
|
prompt2 += "- analyze example data \n"
|
|
prompt2 += "\n"
|
|
prompt2 += "Data example to analyze:\n"
|
|
for day in loclist:
|
|
prompt2 += f"Date: {day[0]}\n "
|
|
prompt2 += ", ".join(f"{location} {minutes}min" for location, minutes in day[1])
|
|
prompt2 += "\n"
|
|
print(prompt2)
|
|
|
|
prompt = "Attached is 4 weeks of data representing % of time where person living alone is spending each day"
|
|
prompt = prompt + " Assess his last week compared to previous 3 weeks. Comment only on significant changes."
|
|
prompt = prompt + " Ignore days where data is all (or mostly) 0!"
|
|
prompt = prompt + " Consider that office and living room are equivalent for this individual. Entertainment is consumed on computer (office) and in living room TV."
|
|
prompt = prompt + " But he is also napping in living room. Comment on his sleeping pattern as well"
|
|
prompt = prompt + " Can you summarize all in 1 sentence?"
|
|
prompt = prompt + " " + data_part
|
|
result = AskGPTPure(prompt)
|
|
|
|
to_return = result
|
|
#to_return = "Your father appears to be fine. He was walking around the house 10 minutes ago and is currently in the living room. And I can smell coffee"
|
|
|
|
elif commmand == "#STATUS_F#":
|
|
to_return = "Your mother is doing well. She slept 8hr and 23min last night. She used the restroom twice last night. She is now in the kitchen. I can smell coffee."
|
|
elif commmand == "#HELP#":
|
|
to_return = "There is number of things you can ask me about. For example: 'how is my dad doing?' Or 'How is his environment' or any other question you like"
|
|
elif commmand == "#SLEEP#":
|
|
to_return = "Your dad slept approximately 8 hours last night, took a shower before bed, and got up 4 times during the night."
|
|
elif commmand == "#SLEEP_F#":
|
|
to_return = "Your mom slept approximately 8 hours last night, took a shower before bed, and got up 4 times during the night."
|
|
elif commmand == "#ENVIRONMENT#":
|
|
to_return = "The temperature in the house is 23 degrees Celsius, CO2 level is 662 ppm, and I can smell coffee brewing. Your dad slept approximately 8 hours last night, took a shower before bed, and got up 4 times during the night."
|
|
elif commmand == "#WEEK#":
|
|
to_return = "Showing his weekly activity"
|
|
elif commmand == "#WEEK_F#":
|
|
to_return = "Showing her weekly activity"
|
|
elif commmand == "#ACTIVITY#":
|
|
to_return = "Your dad has been less active this week than usual. He spent more time sitting in the living room and he got up later than usual by 38min. He also did not go outside as frequently and had less visitors. He only showered once this week."
|
|
elif commmand == "#ACTIVITY_F#":
|
|
to_return = "Your mom has been less active this week than usual. She spent more time sitting in the living room and she got up later than usual by 38min. She also did not go outside as frequently and had less visitors. She only showered once this week."
|
|
elif commmand == "#ACTIVITY_COMPARE#":
|
|
to_return = "Overall your dad is less active this year compared to last year. He slept longer in the mornings and had less visitors. Also his shower activity is reduced from typically 2 times a week to once a week."
|
|
elif commmand == "#ACTIVITY_COMPARE_F#":
|
|
to_return = "Overall your mom is less active this year compared to last year. She slept longer in the mornings and had less visitors. Also her shower activity is reduced from typically 2 times a week to once a week."
|
|
elif commmand == "#LOCATION#":
|
|
filterr = 5
|
|
details = GetSensorsDetailsFromDeployment(deployment_id, date_s, filterr)
|
|
#current_time = datetime.datetime.now()
|
|
current_time = datetime.datetime.now(datetime.timezone.utc)
|
|
last_location = details["last_location"]
|
|
detected_time = datetime.datetime.fromisoformat(details["last_detected_time"])
|
|
local_time = local_tz.localize(detected_time)
|
|
# Convert to UTC
|
|
detected_utc_time = local_time.astimezone(pytz.UTC)
|
|
|
|
time_diff = current_time - detected_utc_time
|
|
minutes = time_diff.total_seconds() / 60
|
|
time_sentence = format_time_difference(minutes)
|
|
if minutes == 0:
|
|
to_return = f"He is now in the {last_location}."
|
|
else:
|
|
to_return = f"He was last detected in the {last_location} {time_sentence} ago"
|
|
elif commmand == "#SHOWER#":
|
|
to_return = "In the last 7 days, your Dad took a shower on Friday, Sunday and Tuesday"
|
|
elif commmand == "#SHOWER_F#":
|
|
to_return = "The last time your mom took a shower was Yesterda at 9:33AM"
|
|
elif commmand == "#BATHROOM#":
|
|
to_return = "Last night your Dad used the restroom only once at 6.10am"
|
|
elif commmand == "#KITCHEN#":
|
|
to_return = "Your Dad only cooked Dinner on Wednesday and he turned off the stove afterwards"
|
|
elif commmand == "#MOLD#":
|
|
to_return = "I cannot smell any mold. Also, the humidity is very low. In any of the rooms never exceeded 27% RH in the last 7 days."
|
|
elif commmand == "#VISITORS#":
|
|
to_return = "Yes, on Tuesday, I could detect motion in both office and kitchen at the same time and CO2 levels in the living room exceeded 900ppm."
|
|
elif commmand == "#TEMPERATURE#":
|
|
filterr = 5
|
|
details = GetSensorsDetailsFromDeployment(deployment_id, date_s, filterr)
|
|
current_time = datetime.datetime.now(datetime.timezone.utc)
|
|
last_location = details["last_location"]
|
|
temperature = int(details["temperature"])
|
|
if "America" in time_zone_s:
|
|
temperature_sentence = f"{int(CelsiusToFahrenheit(temperature))} degrees Farenhight"
|
|
else:
|
|
temperature_sentence = f"{temperature} degrees Celsius."
|
|
to_return = f"The temperature in the {last_location} is {temperature_sentence}."
|
|
elif commmand == "#TEMPERATURE_B#":
|
|
to_return = "The temperature in the main bathroom is 80 degrees Farenhight."
|
|
elif commmand == "#OXYGEN#":
|
|
to_return = "His last oxygen level was at 95%."
|
|
elif commmand == "#OXYGEN_F#":
|
|
to_return = "Her last oxygen level was at 95%."
|
|
elif commmand == "#HEART_RATE#":
|
|
to_return = "His last heart rate was 74 bpm."
|
|
elif commmand == "#BLOOD_PRESSURE#":
|
|
to_return = "His latest blood pressure was measured 5 hours ago and it was 137 over 83."
|
|
elif commmand == "#BLOOD_PRESSURE_F#":
|
|
to_return = "Her latest blood pressure was measured 5 hours ago and it was 137 over 83."
|
|
elif commmand == "#EKG#":
|
|
to_return = "His latest HeartBeam EKG was done on Monday and it was within his baseline!"
|
|
elif commmand == "#EKG_F#":
|
|
to_return = "Her latest HeartBeam EKG was done on Monday and it was within her baseline!"
|
|
return to_return
|
|
|
|
def ScaleToCommon(data, sensor):
|
|
|
|
if sensor == "temperature":
|
|
new_min = 0
|
|
new_max = 100
|
|
elif sensor == "humidity":
|
|
new_min = 100
|
|
new_max = 200
|
|
elif sensor == "light":
|
|
new_min = 200
|
|
new_max = 300
|
|
elif sensor == "radar":
|
|
new_min = 300
|
|
new_max = 400
|
|
elif sensor == "s0":
|
|
new_min = 400
|
|
new_max = 500
|
|
elif sensor == "s1":
|
|
new_min = 500
|
|
new_max = 600
|
|
elif sensor == "s2":
|
|
new_min = 600
|
|
new_max = 700
|
|
elif sensor == "s3":
|
|
new_min = 700
|
|
new_max = 800
|
|
elif sensor == "s4":
|
|
new_min = 800
|
|
new_max = 900
|
|
elif sensor == "s5":
|
|
new_min = 900
|
|
new_max = 1000
|
|
elif sensor == "s6":
|
|
new_min = 1000
|
|
new_max = 1100
|
|
elif sensor == "s7":
|
|
new_min = 1100
|
|
new_max = 1200
|
|
elif sensor == "s8":
|
|
new_min = 1200
|
|
new_max = 1300
|
|
else: #s9
|
|
new_min = 1300
|
|
new_max = 1400
|
|
|
|
# Split timestamps and values into separate arrays
|
|
timestamps = np.array([x[0] for x in data])
|
|
values = np.array([x[1] for x in data])
|
|
|
|
# Get current min and max
|
|
if len(values) > 0:
|
|
current_min = np.min(values)
|
|
current_max = np.max(values)
|
|
else:
|
|
current_min = 0;
|
|
current_max = 0;
|
|
|
|
# Scale the values using the min-max formula
|
|
if current_max - current_min > 0:
|
|
scaled_values = (values - current_min) * (new_max - new_min) / (current_max - current_min) + new_min
|
|
else:
|
|
mid_val = (new_max + new_min) / 2
|
|
scaled_values = np.full_like(values, mid_val)
|
|
|
|
# Zip back together with original timestamps
|
|
return list(zip(timestamps, scaled_values))
|
|
|
|
def CreateLocationsStripe(locations_file, time_zone_s):
|
|
|
|
parts = locations_file.split("/")
|
|
parts1 = parts[2].split("_")
|
|
ddate = parts1[1]
|
|
deployment_id = parts1[0]
|
|
filter_minutes = parts1[2]
|
|
bw = False
|
|
chart_type = 4
|
|
force_recreate = True
|
|
motion = False
|
|
scale_global = False
|
|
fast = True
|
|
GenerateFullLocationMap(locations_file, deployment_id, ddate, force_recreate, chart_type, bw, motion, scale_global, fast, time_zone_s, filter_minutes)
|
|
|
|
|
|
def CelsiusToFahrenheit(C):
|
|
F = (C * 9/5) + 32
|
|
return F
|
|
|
|
def CelsiusToFahrenheitList(compressed_readings: List[Tuple[datetime.datetime, np.float64]]) -> List[Tuple[datetime.datetime, np.float64]]:
|
|
|
|
# Create a new list with converted temperatures
|
|
converted_readings = [
|
|
[reading[0], CelsiusToFahrenheit(reading[1])]
|
|
for reading in compressed_readings
|
|
]
|
|
|
|
return converted_readings
|
|
|
|
def GetPriviledgesOnly(user_name):
|
|
with get_db_connection() as conn:
|
|
if isinstance(user_name, (int)) or user_name.isdigit():
|
|
sql = "SELECT access_to_deployments FROM public.person_details WHERE user_id = " + user_name
|
|
else:
|
|
sql = "SELECT access_to_deployments FROM public.person_details WHERE user_name = '" + user_name + "'"
|
|
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
result = cur.fetchall()#cur.fetchone()
|
|
if result != None:
|
|
return result[0][0]
|
|
else:
|
|
return "0"
|
|
|
|
def AddToLog(message):
|
|
"""Add message to log"""
|
|
logger.info(message)
|
|
|
|
def FillFields(blob_data, record, form_type):
|
|
"""
|
|
Fill in the input fields in the HTML blob_data with values from the caretaker dictionary.
|
|
|
|
:param blob_data: str - The initial HTML string containing empty or placeholder input fields.
|
|
:param caretaker: dict - The dictionary containing values to populate the fields.
|
|
:return: str - The HTML string with the input fields filled with the appropriate values.
|
|
"""
|
|
# Ensure blob_data is a string
|
|
#blob_data = str(blob_data)
|
|
|
|
# Populate the fields
|
|
for field in record:
|
|
logger.debug(f"field= {field}")
|
|
if field == "user_id":
|
|
if record[field] is not None:
|
|
escaped_string = html.escape(str(record[field]))
|
|
# Create a regex pattern to match the span with specific id
|
|
pattern = rf'(<span[^>]+id="editing_user_id"[^>]*>)([^<]*)(</span>)'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(3)}', blob_data)
|
|
elif field == "deployment_id":
|
|
if record[field] is not None:
|
|
escaped_string = html.escape(str(record[field]))
|
|
# Create a regex pattern to match the span with specific id
|
|
pattern = rf'(<span[^>]+id="editing_deployment_id"[^>]*>)([^<]*)(</span>)'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(3)}', blob_data)
|
|
elif field == "device_id":
|
|
if record[field] is not None:
|
|
escaped_string = html.escape(str(record[field]))
|
|
# Create a regex pattern to match the span with specific id
|
|
pattern = rf'(<span[^>]+id="editing_device_id"[^>]*>)([^<]*)(</span>)'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(3)}', blob_data)
|
|
elif field == "user_name":
|
|
if record[field] != None:
|
|
escaped_string = html.escape(record[field])
|
|
pattern = rf'(<input[^>]+id="new_user_name"[^>]+value=")[^"]*(")'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(2)}', blob_data)
|
|
|
|
# Add value attribute if it does not exist
|
|
pattern = rf'(<input[^>]+id="new_user_name"[^>]*)(>)'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)} value="{escaped_string}"{m.group(2)}', blob_data)
|
|
|
|
elif field == "location":
|
|
if record[field] != None:
|
|
blob_data = SelectOption(blob_data, 'location', record[field])
|
|
|
|
elif field == "gender":
|
|
if record[field] != None:
|
|
blob_data = SelectOption(blob_data, 'gender', record[field])
|
|
|
|
elif field == "race":
|
|
if record[field] != None:
|
|
blob_data = SelectOption(blob_data, 'race', record[field])
|
|
|
|
elif field == "time_zone_s":
|
|
if record[field] != None:
|
|
blob_data = SelectOption(blob_data, 'time_zone_s', record[field])
|
|
|
|
elif field == "time_edit" or field == "user_edit":
|
|
pass
|
|
else:
|
|
if record[field] != None:
|
|
escaped_string = html.escape(str(record[field]))
|
|
pattern = rf'(<input[^>]+id="{field}"[^>]+value=")[^"]*(")'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(2)}', blob_data)
|
|
|
|
# Add value attribute if it does not exist
|
|
pattern = rf'(<input[^>]+id="{field}"[^>]*)(>)'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)} value="{escaped_string}"{m.group(2)}', blob_data)
|
|
|
|
return blob_data
|
|
|
|
def convert_timestamps_lc(data, time_zone_s):
|
|
target_tz = pytz.timezone(time_zone_s)
|
|
return [[datetime.datetime.fromtimestamp(epoch, pytz.UTC).astimezone(target_tz), value]
|
|
for epoch, value in data]
|
|
|
|
|
|
subbedToL = [("/wellget",1),("/wellget_cmp",1),("/well_hub",1)]
|
|
def on_connectL(client_, userdata, flags, rc):
|
|
print(MQTTSERVERL + " L. Connected with result code "+str(rc))
|
|
|
|
# Subscribing in on_connect() means that if we lose the connection and
|
|
# reconnect then subscriptions will be renewed.
|
|
client_.subscribe(subbedToL)
|
|
print("SubscribedL to: "+str(subbedToL))
|
|
|
|
def on_messageL(client_, userdata, msg): #message from GUI
|
|
print(msg.topic+" "+str(msg.payload))
|
|
#msga = msg.payload.decode("ascii")
|
|
#print(msg.timestamp)
|
|
#in_queue.append((str(time.time()), msg.topic, msg.payload))
|
|
|
|
def MQSendL(topic, content, qos=1):
|
|
print(topic, content[0:100])
|
|
#return MQSend(topic, content)
|
|
#currentTime = int(time.time())
|
|
try:
|
|
if "_cmp" in topic:
|
|
enc_msg = zlib.compress(content.encode('utf-8'))
|
|
else:
|
|
enc_msg = content
|
|
clientL.publish(topic, enc_msg, qos=qos, retain=False)
|
|
except Exception as err:
|
|
print ("Err2B:", err)
|
|
try:
|
|
clientL.disconnect()
|
|
#client.username_pw_set('telegraf', 'well18')
|
|
clientL.connect(MQTTSERVERL, MQTT_PortL, 60)
|
|
except Exception as e:
|
|
print ("Err3b:", e)
|
|
|
|
# CORS Middleware
|
|
class CORSMiddleware:
|
|
def process_request(self, req, resp):
|
|
resp.set_header('Access-Control-Allow-Origin', '*')
|
|
resp.set_header('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE, OPTIONS')
|
|
resp.set_header('Access-Control-Allow-Headers', '*')
|
|
|
|
def process_response(self, req, resp, resource, req_succeeded):
|
|
if req.method == 'OPTIONS': # Handle preflight requests
|
|
resp.status = falcon.HTTP_200
|
|
|
|
# Add this class to your code
|
|
class RequestParser:
|
|
def __init__(self):
|
|
# Detect if we're running in debug/development mode
|
|
self.debug_mode = __name__ == "__main__" or os.environ.get('DEBUG', 'false').lower() in ('true', '1', 'yes')
|
|
logger.debug(f"RequestParser initialized in {'DEBUG' if self.debug_mode else 'PRODUCTION'} mode")
|
|
|
|
def process_request(self, req, resp):
|
|
"""Pre-process the request to ensure media is parsed early"""
|
|
logger.debug(f"RequestParser processing: {req.method} {req.path}")
|
|
|
|
# Initialize an empty form_data dict
|
|
req.context.form_data = {}
|
|
|
|
# Only process POST requests with the right content type
|
|
if req.method != 'POST' or not req.content_type or 'form-urlencoded' not in req.content_type:
|
|
logger.debug("RequestParser: Skipping (not a form POST)")
|
|
return
|
|
|
|
try:
|
|
# Different handling based on environment
|
|
if self.debug_mode:
|
|
self._process_debug(req)
|
|
else:
|
|
self._process_production(req)
|
|
|
|
except Exception as e:
|
|
logger.error(f"RequestParser error: {str(e)}")
|
|
logger.error(traceback.format_exc())
|
|
|
|
def _process_debug(self, req):
|
|
"""Process request in debug mode - optimized for local development"""
|
|
logger.debug("RequestParser: Using DEBUG mode processing")
|
|
|
|
# In debug mode, we can use Content-Length and know it's reliable
|
|
content_length = req.get_header('content-length')
|
|
|
|
if content_length:
|
|
# Content-Length is present
|
|
content_length = int(content_length)
|
|
logger.debug(f"RequestParser: Reading {content_length} bytes using Content-Length")
|
|
|
|
raw_body = req.stream.read(content_length)
|
|
if raw_body:
|
|
body_text = raw_body.decode('utf-8')
|
|
logger.debug(f"RequestParser: Successfully read {len(body_text)} chars")
|
|
|
|
# Parse the form data
|
|
import urllib.parse
|
|
form_data = dict(urllib.parse.parse_qsl(body_text))
|
|
|
|
# Store in context
|
|
req.context.form_data = form_data
|
|
logger.debug(f"RequestParser: Parsed form data: {form_data}")
|
|
|
|
# Reset the stream with the original content
|
|
import io
|
|
req.stream = io.BytesIO(raw_body)
|
|
else:
|
|
logger.debug("RequestParser: No body data read")
|
|
else:
|
|
logger.debug("RequestParser (debug): No Content-Length header")
|
|
|
|
def _process_production(self, req):
|
|
"""Process request in production mode - optimized for OpenFaaS/faasd deployment"""
|
|
logger.debug("RequestParser: Using PRODUCTION mode processing")
|
|
|
|
# Simple direct read approach for production (OpenFaaS/faasd)
|
|
# We'll limit the read to 1MB for safety
|
|
MAX_SIZE = 1024 * 1024 # 1MB
|
|
|
|
# Just read directly from the stream without checking
|
|
raw_body = req.stream.read(MAX_SIZE)
|
|
if raw_body:
|
|
body_text = raw_body.decode('utf-8')
|
|
logger.debug(f"RequestParser: Successfully read {len(body_text)} chars")
|
|
|
|
# Parse the form data
|
|
import urllib.parse
|
|
form_data = dict(urllib.parse.parse_qsl(body_text))
|
|
|
|
# Store in context
|
|
req.context.form_data = form_data
|
|
logger.debug(f"RequestParser: Parsed form data: {form_data}")
|
|
|
|
# Reset the stream with the original content
|
|
import io
|
|
req.stream = io.BytesIO(raw_body)
|
|
else:
|
|
logger.debug("RequestParser: No body data read")
|
|
|
|
|
|
|
|
|
|
# Add this function to your code
|
|
def get_form_data(req):
|
|
"""Helper function to get form data from either context or req.media"""
|
|
# First check if we pre-parsed the form data
|
|
if hasattr(req.context, 'form_data') and req.context.form_data:
|
|
logger.debug("Using pre-parsed form data from context")
|
|
return req.context.form_data
|
|
|
|
# Otherwise try to get from req.media (for json)
|
|
try:
|
|
if req.content_type and (
|
|
falcon.MEDIA_JSON in req.content_type or
|
|
falcon.MEDIA_URLENCODED in req.content_type
|
|
):
|
|
logger.debug("Attempting to get form data from req.media")
|
|
return req.media or {}
|
|
except Exception as e:
|
|
logger.error(f"Error getting req.media: {str(e)}")
|
|
|
|
logger.debug("No form data available, returning empty dict")
|
|
return {}
|
|
|
|
# Path handling middleware
|
|
class StripPathMiddleware:
|
|
def process_request(self, req, resp):
|
|
# Strip the '/function/well-api' prefix if present
|
|
path = req.path
|
|
logger.info(f"Original request path: {path}")
|
|
|
|
# Define patterns to match different URL formats
|
|
patterns = [
|
|
r'^/function/well-api', # Standard OpenFaaS path
|
|
r'^/api/well_api', # API path
|
|
]
|
|
|
|
for pattern in patterns:
|
|
if re.match(pattern, path):
|
|
# Strip the matched prefix
|
|
path = re.sub(pattern, '', path)
|
|
# Ensure path starts with a slash
|
|
if not path.startswith('/'):
|
|
path = '/' + path
|
|
# Update the request path
|
|
req.path = path
|
|
logger.info(f"Modified request path: {path}")
|
|
break
|
|
|
|
# Main API class
|
|
class WellApi:
|
|
def on_get_healthz(self, req, resp):
|
|
"""Health check endpoint"""
|
|
resp.status = HTTP_200
|
|
resp.content_type = falcon.MEDIA_TEXT
|
|
resp.text = "OK"
|
|
|
|
def on_get(self, req, resp, path=""):
|
|
"""Handle GET requests"""
|
|
logger.debug(f"GET request to path: {path}")
|
|
logger.debug(f"Sent variables: {req.params}")
|
|
logger.debug(f"All headers: {dict(req.headers)}")
|
|
if path == "" or path == "/":
|
|
# Serve the main portal page
|
|
blob_data = read_file("well_portal.html")
|
|
if blob_data:
|
|
resp.content_type = "text/html"
|
|
resp.text = blob_data
|
|
else:
|
|
# Fall back to JSON response if file not found
|
|
resp.media = {"message": "Hello from OpenFaaS Serverless Web Server!", "method": "GET"}
|
|
return
|
|
elif path == "favicon.ico":
|
|
favicon_path = "favicon.ico"
|
|
if os.path.isfile(favicon_path):
|
|
resp.content_type = 'image/x-icon'
|
|
resp.data = read_file(favicon_path, type_="BIN")
|
|
resp.status = HTTP_200
|
|
else:
|
|
resp.status = HTTP_404
|
|
return
|
|
elif path == "health":
|
|
resp.status = HTTP_200
|
|
resp.content_type = falcon.MEDIA_JSON
|
|
resp.text = json.dumps({"status": "healthy"})
|
|
return
|
|
|
|
# Authentication and authorization
|
|
token = req.params.get('token')
|
|
user_name = req.params.get('user_name')
|
|
user_info = verify_token(token)
|
|
|
|
if user_info == None or user_info["username"] != user_name:
|
|
resp.media = package_response("Log-Out", HTTP_401)
|
|
return
|
|
|
|
get_function_name = req.params.get('name')
|
|
logger.debug(f"[{datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}] - {__name__}.GET_API->{get_function_name}")
|
|
privileges = GetPriviledgesOnly(user_name)
|
|
|
|
if token and user_name:
|
|
user_info = verify_token(token)
|
|
if user_info is None or user_info["username"] != user_name:
|
|
resp.media = package_response("Log-Out", HTTP_401)
|
|
return
|
|
|
|
get_function_name = req.params.get('name')
|
|
logger.debug(f"[{datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}] - {__name__}.GET_API->{get_function_name}")
|
|
|
|
if get_function_name == "deployment_add":
|
|
user_id = req.params.get('user_id')
|
|
blob_data = read_file("edit_deployment.html")
|
|
caretaker = {'deployment_id': 0, 'beneficiary_id': user_id, 'caretaker_id': user_id, 'owner_id': user_id, 'installer_id': user_id, 'user_id': 0, 'role_ids': '2', 'access_to_deployments': '', 'email': '', 'user_name': '', 'first_name': '', 'last_name': '', 'address_street': '', 'address_city': '', 'address_zip': '', 'address_state': '', 'address_country': '', 'phone_number': '', 'picture': '/', 'key': ''}
|
|
blob_data = FillFields(blob_data, caretaker, 1)
|
|
resp.content_type = "text/html"
|
|
resp.text = blob_data
|
|
return
|
|
|
|
elif get_function_name == "devices_list":
|
|
st = time.time()
|
|
user_id = req.params.get('user_id')
|
|
privileges = GetPriviledgesOnly(user_id)
|
|
first_s = req.params.get('first')
|
|
last_s = req.params.get('last')
|
|
|
|
try:
|
|
first = int(first_s)
|
|
except ValueError:
|
|
first = 0
|
|
|
|
try:
|
|
last = int(last_s)
|
|
except ValueError:
|
|
last = 1000000
|
|
blob_data = read_file("my_devices.html")
|
|
|
|
devices = GetVisibleDevices(privileges)
|
|
users = GetUsersFromDeployments(privileges)
|
|
blob_data = UpdateDevicesTable(blob_data, devices, users)
|
|
blob_data = UpdateDeploymentsSelector(blob_data, users)
|
|
resp.content_type = "text/html"
|
|
resp.text = blob_data
|
|
#print(blob_data)
|
|
return
|
|
|
|
elif get_function_name == "deployment_edit":
|
|
deployment_id = req.params.get('deployment_id')
|
|
blob_data = read_file("edit_deployment.html")
|
|
|
|
deployment = DeploymentDetails(deployment_id)
|
|
#blob_data = blob_data.decode("utf-8")
|
|
blob_data = FillFields(blob_data, deployment, 1)
|
|
resp.content_type = "text/html"
|
|
resp.text = blob_data
|
|
return
|
|
|
|
elif get_function_name == "caretaker_add":
|
|
|
|
blob_data = read_file("edit_caretaker.html")
|
|
caretaker = {'user_id': 0, 'role_ids': '2', 'access_to_deployments': '', 'email': '', 'user_name': '', 'first_name': '', 'last_name': '', 'address_street': '', 'address_city': '', 'address_zip': '', 'address_state': '', 'address_country': '', 'phone_number': '', 'picture': '/', 'key': ''}
|
|
blob_data = FillFields(blob_data, caretaker, 1)
|
|
resp.content_type = "text/html"
|
|
resp.text = blob_data
|
|
return
|
|
elif get_function_name == "caretaker_edit":
|
|
user_id = req.params.get('user_id')
|
|
blob_data = read_file("edit_caretaker.html")
|
|
|
|
caretaker = UserDetails(user_id)
|
|
#blob_data = blob_data.decode("utf-8")
|
|
blob_data = FillFields(blob_data, caretaker, 1)
|
|
resp.content_type = "text/html"
|
|
resp.text = blob_data
|
|
return
|
|
|
|
elif get_function_name == "device_add":
|
|
|
|
blob_data = read_file("edit_device.html")
|
|
device = {'device_id': 0, 'device_mac': '', 'well_id': '', 'description': '', 'location': '', 'close_to': '', 'radar_threshold': '["s3_max",50]', 'temperature_calib': '0.0,1.0,0.0', 'humidity_calib': '0.0,1.0,0.0'}
|
|
blob_data = FillFields(blob_data, device, 1)
|
|
resp.content_type = "text/html"
|
|
resp.text = blob_data
|
|
return
|
|
elif get_function_name == "device_edit":
|
|
mac = req.params.get('mac')
|
|
|
|
blob_data = read_file("edit_device.html")
|
|
|
|
device_det = DeviceDetails(mac)
|
|
if device_det['radar_threshold'] == None or device_det['radar_threshold'] == "":
|
|
device_det['radar_threshold'] = '["s3_max",12]'
|
|
#blob_data = blob_data.decode("utf-8")
|
|
blob_data = FillFields(blob_data, device_det, 1)
|
|
resp.content_type = "text/html"
|
|
resp.text = blob_data
|
|
return
|
|
|
|
|
|
elif get_function_name == "beneficiary_edit":
|
|
user_id = req.params.get('user_id')
|
|
blob_data = read_file("edit_beneficiary.html")
|
|
|
|
beneficiary = UserDetails(user_id)
|
|
#blob_data = blob_data.decode("utf-8")
|
|
blob_data = FillFields(blob_data, beneficiary, 1)
|
|
resp.content_type = "text/html"
|
|
resp.text = blob_data
|
|
return
|
|
elif get_function_name == "beneficiary_add":
|
|
|
|
blob_data = read_file("edit_beneficiary.html")
|
|
beneficiary = {'user_id': 0, 'role_ids': '1', 'access_to_deployments': '', 'email': '', 'user_name': '', 'first_name': '', 'last_name': '', 'address_street': '', 'address_city': '', 'address_zip': '', 'address_state': '', 'address_country': '', 'phone_number': '', 'picture': '/', 'key': ''}
|
|
blob_data = FillFields(blob_data, beneficiary, 1)
|
|
resp.content_type = "text/html"
|
|
resp.text = blob_data
|
|
return
|
|
elif get_function_name == "get_image_file":
|
|
#image represents day in local time
|
|
|
|
deployment_id = req.params.get('deployment_id')
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
ddate = req.params.get("date")
|
|
ddate = ddate.replace("_","-")
|
|
group_by = req.params.get("group_by")
|
|
timee = StringToEpoch(ddate, time_zone_s)
|
|
force_recreate = req.params.get("re_create") == "true"
|
|
radar_part = req.params.get("radar_part")
|
|
map_type = int(req.params.get("map_type"))
|
|
|
|
bw = req.params.get("bw") == "true"
|
|
unique_identifier = req.params.get("unique_identifier")
|
|
filename = f"/{deployment_id}/{deployment_id}_{ddate}_{group_by}_{radar_part}_{map_type}_{bw}_dayly_image.png"
|
|
|
|
#print(check_file_exists(filename))
|
|
if not force_recreate:
|
|
file_exists, time_modified_utc = check_file_exists(filename)
|
|
if file_exists:
|
|
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
|
|
time_modified_date = time_modified_local.date()
|
|
file_date = MapFileToDate(filename)
|
|
if time_modified_date <= file_date:
|
|
force_recreate = True
|
|
else:
|
|
force_recreate = True
|
|
|
|
#ddate is in Local Time
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
#time that describes new devices in deployment_history is in UTC therefore timee is in UTC
|
|
|
|
st = time.time()
|
|
vocs_scaled = {}
|
|
#file_date is in Local time, so we are comparing that and current Local (to install) Date
|
|
if force_recreate:
|
|
st = time.time()
|
|
vocs_scaled = {}
|
|
devices_list = GetProximityList(deployment_id, timee)
|
|
stored, vocs_scaled = CreateMapFast(filename, devices_list, ddate, bw, time_zone_s, radar_part, group_by) #"[bit] 1=same sensors together, 2=same device together, 4=1 der, 8=2 der
|
|
if stored != True:
|
|
AddToLog("Map not created")
|
|
#logger.warning("Map not created")
|
|
resp.media = package_response("Map not created", HTTP_401)
|
|
return
|
|
else:
|
|
AddToLog("Map created")
|
|
#lets send over MQTT vocs_scaled
|
|
json_data = numpy_to_json(vocs_scaled, devices_list)
|
|
MQSendL("/"+unique_identifier, json_data)
|
|
#print(time.time() - st)
|
|
|
|
#lets read and send image from blob
|
|
image_bytes, content_type = GetBlob(filename)
|
|
if debug:
|
|
resp.media = package_response(f'Log: {debug_string}', HTTP_200)
|
|
else:
|
|
if image_bytes is None:
|
|
raise falcon.HTTPNotFound(
|
|
title='Image not found',
|
|
description=f'Image {filename} could not be found or retrieved'
|
|
)
|
|
sys.stdout.flush()
|
|
# Set response content type and body
|
|
resp.content_type = content_type
|
|
resp.data = image_bytes
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif get_function_name == "get_full_location_map":
|
|
|
|
raw = req.params.get("raw") == "true"
|
|
|
|
if raw:
|
|
#function=request_deployment_map_new
|
|
#token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6InJvYnN0ZXIiLCJleHAiOjE3MzgxNzYzNTZ9.5wzC2dVQhKlMygHPZfombTINbltNq8vxdilLIugNTtA&
|
|
#user_name=robster&
|
|
#date=2025-01-27&
|
|
#deployment_id=21&
|
|
#map_type=2
|
|
chart_type = 8
|
|
else:
|
|
chart_type = int(req.params.get("map_type"))
|
|
|
|
#image represents day in local time
|
|
logger.debug("get_full_location_map")
|
|
deployment_id = req.params.get('deployment_id')
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
ddate = req.params.get("date")
|
|
ddate = ddate.replace("_","-")
|
|
|
|
to_date = ddate
|
|
|
|
try:
|
|
to_date = req.params.get("to_date")
|
|
to_date = to_date.replace("_","-")
|
|
except:
|
|
pass
|
|
|
|
if to_date != ddate:
|
|
chart_type = int(req.params.get("map_type"))
|
|
|
|
force_recreate = req.params.get("re_create") == "true"
|
|
force_recreate_orig = force_recreate
|
|
scale_global = req.params.get("scale_global") == "true"
|
|
fast = req.params.get("fast") == "true"
|
|
bw = req.params.get("bw") == "true"
|
|
motion = req.params.get("motion") == "true"
|
|
timee = StringToEpoch(ddate, time_zone_s)
|
|
|
|
filter_minutes = int(req.params.get("filter"))
|
|
|
|
if "flavor" in req.params: #this is to be used only when creating
|
|
flavor = int(req.params.get("flavor"))
|
|
else:
|
|
flavor = 0
|
|
|
|
if bw:
|
|
bw_s = "BW"
|
|
else:
|
|
bw_s = "CLR"
|
|
|
|
if fast:
|
|
fast_s = "FAST"
|
|
else:
|
|
fast_s = "SLOW"
|
|
|
|
if motion:
|
|
motion_s = "M"
|
|
else:
|
|
motion_s = "S"
|
|
|
|
if scale_global:
|
|
scl_s = "scl"
|
|
else:
|
|
scl_s = "nscl"
|
|
|
|
|
|
if chart_type == 5 or chart_type == 7:
|
|
#now_date = req.params.get("now_date")
|
|
#now_date = now_date.replace("_","-")
|
|
filename = f"/{deployment_id}/{deployment_id}_{ddate}_{to_date}_{filter_minutes}_history_image.png"
|
|
else:
|
|
filename = f"/{deployment_id}/{deployment_id}_{ddate}_{bw_s}_{motion_s}_{scl_s}_{chart_type}_Flocation_image.png"
|
|
|
|
if not force_recreate:
|
|
file_exists, time_modified_utc = check_file_exists(filename)
|
|
#file_exists1, time_modified_utc1 = check_file_exists(filename+".bin")
|
|
if file_exists:
|
|
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
|
|
time_modified_date = time_modified_local.date()
|
|
file_date = MapFileToDate(filename)
|
|
if time_modified_date <= file_date:
|
|
force_recreate = True
|
|
else: #same date
|
|
current_time = datetime.datetime.now(pytz.timezone(time_zone_s))
|
|
time_passed = current_time - time_modified_local
|
|
#if time_passed.seconds > 300: #recreate if older than 5 minutes
|
|
# force_recreate = True
|
|
|
|
else:
|
|
force_recreate = True
|
|
|
|
if force_recreate:
|
|
ddate = ddate.replace("_","-")
|
|
#filter_minutes = 5
|
|
#filename = os.path.join(scriptDir+"/daily_maps/"+deployment, proximity_string+"_"+deployment+"_"+ddate+"_dayly_image.png")
|
|
|
|
filename = filename.replace('\\','/')
|
|
if chart_type == 4: #"collapsed":
|
|
GenerateFullLocationMap(filename, deployment_id, ddate, force_recreate, chart_type, bw, motion, scale_global, fast, time_zone_s, filter_minutes)
|
|
elif chart_type == 5: #"history":
|
|
GeneratePresenceHistory(filename, force_recreate, deployment_id, filter_minutes, ddate, to_date, ddate, time_zone_s)
|
|
elif chart_type == 7: #"history full chart":
|
|
filename = GeneratePresenceHistoryChart(filename, force_recreate_orig, deployment_id, filter_minutes, ddate, to_date, ddate, time_zone_s)
|
|
elif chart_type == 8: #"set for mobile"
|
|
GenerateFullLocationMapLabelsOut(filename, deployment_id, ddate, force_recreate, chart_type, bw, motion, scale_global, fast,time_zone_s, filter_minutes)
|
|
else:
|
|
GenerateFullLocationMap(filename, deployment_id, ddate, force_recreate, chart_type, bw, motion, scale_global, fast,time_zone_s, filter_minutes)
|
|
|
|
#lets read and send image from blob
|
|
image_bytes, content_type = GetBlob(filename)
|
|
|
|
if image_bytes is None:
|
|
raise falcon.HTTPNotFound(
|
|
title='Image not found',
|
|
description=f'Image {filename} could not be found or retrieved'
|
|
)
|
|
|
|
# Set response content type and body
|
|
resp.content_type = content_type
|
|
resp.data = image_bytes
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif get_function_name == "get_presence_map":
|
|
#image represents day in local time
|
|
|
|
deployment_id = req.params.get('deployment_id')
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
ddate = req.params.get("date")
|
|
ddate = ddate.replace("_","-")
|
|
force_recreate = req.params.get("re_create") == "true"
|
|
scale_global = req.params.get("scale_global") == "true"
|
|
fast = req.params.get("fast") == "true"
|
|
bw = req.params.get("bw") == "true"
|
|
motion = req.params.get("motion") == "true"
|
|
timee = StringToEpoch(ddate, time_zone_s)
|
|
chart_type = int(req.params.get("map_type"))
|
|
filter_minutes = int(req.params.get("filter"))
|
|
|
|
if bw:
|
|
bw_s = "BW"
|
|
else:
|
|
bw_s = "CLR"
|
|
|
|
if fast:
|
|
fast_s = "FAST"
|
|
else:
|
|
fast_s = "SLOW"
|
|
|
|
if motion:
|
|
motion_s = "M"
|
|
else:
|
|
motion_s = "S"
|
|
|
|
if scale_global:
|
|
scl_s = "scl"
|
|
else:
|
|
scl_s = "nscl"
|
|
|
|
|
|
filename = f"/{deployment_id}/{deployment_id}_{ddate}_{bw_s}_{motion_s}_{scl_s}_{chart_type}_Flocation_image.png"
|
|
|
|
if not force_recreate:
|
|
file_exists, time_modified_utc = check_file_exists(filename)
|
|
if file_exists:
|
|
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
|
|
time_modified_date = time_modified_local.date()
|
|
file_date = MapFileToDate(filename)
|
|
if time_modified_date <= file_date:
|
|
force_recreate = True
|
|
else:
|
|
force_recreate = True
|
|
|
|
|
|
if force_recreate:
|
|
ddate = ddate.replace("_","-")
|
|
days = 7
|
|
|
|
filename = filename.replace('\\','/')
|
|
if chart_type == 6: #"AI Locations":
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
|
|
|
st = time.time()
|
|
if CreatePresenceMap(filename, devices_list, ddate, 1, force_recreate, chart_type, bw, motion, scale_global, fast, filter_minutes, time_zone_s) == 0: #"[bit] 1=same sensors together, 2=same device together, 4=1 der, 8=2 der
|
|
print(ddate, "Not found")
|
|
else:
|
|
print(ddate, time.time() - st)
|
|
#lets read and send image from blob
|
|
image_bytes, content_type = GetBlob(filename)
|
|
|
|
if image_bytes is None:
|
|
raise falcon.HTTPNotFound(
|
|
title='Image not found',
|
|
description=f'Image {filename} could not be found or retrieved'
|
|
)
|
|
|
|
# Set response content type and body
|
|
resp.content_type = content_type
|
|
resp.data = image_bytes
|
|
resp.status = falcon.HTTP_200
|
|
|
|
return
|
|
elif get_function_name == "download":
|
|
|
|
deployment_id = req.params.get('deployment_id')
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
date_from = req.params.get("date_from")
|
|
date_to = req.params.get("date_to")
|
|
date_from = date_from.replace("_","-")
|
|
date_to = date_to.replace("_","-")
|
|
consolidated_by = req.params.get("consolidated_by")
|
|
force_recreate = req.params.get("re_create") == "true"
|
|
radar_part = req.params.get("radar_part")
|
|
zip_filename = f"/{deployment_id}/{deployment_id}_{date_from}_{date_to}_{consolidated_by}_data.zip"
|
|
|
|
#print(check_file_exists(filename))
|
|
if not force_recreate:
|
|
file_exists, time_modified_utc = check_file_exists(zip_filename, bucket_name="data-downloads")
|
|
if file_exists:
|
|
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
|
|
time_modified_date = time_modified_local.date()
|
|
file_date = CSVFileToDate(zip_filename)
|
|
if time_modified_date <= file_date:
|
|
force_recreate = True
|
|
else:
|
|
force_recreate = True
|
|
|
|
#ddate is in Local Time
|
|
dates = DatesSpan(date_from, date_to)
|
|
to_zip = []
|
|
for ddate in dates:
|
|
force_recreate_csv = force_recreate
|
|
csv_dayly_filename = f"/{deployment_id}/{deployment_id}_{ddate}_{consolidated_by}_data.csv"
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
if not force_recreate_csv:
|
|
#time that describes new devices in deployment_history is in UTC therefore timee is in UTC
|
|
file_exists, time_modified_utc = check_file_exists(csv_dayly_filename, bucket_name="data-downloads")
|
|
if file_exists:
|
|
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
|
|
time_modified_date = time_modified_local.date()
|
|
file_date = CSVFileToDate(csv_dayly_filename)
|
|
if time_modified_date <= file_date:
|
|
force_recreate_csv = True
|
|
else:
|
|
force_recreate_csv = True
|
|
st = time.time()
|
|
vocs_scaled = {}
|
|
#file_date is in Local time, so we are comparing that and current Local (to install) Date
|
|
if force_recreate_csv:
|
|
st = time.time()
|
|
vocs_scaled = {}
|
|
|
|
devices_list = GetProximityList(deployment_id, timee)
|
|
temp_offset = -16
|
|
file_stored = CreateDailyCSV(csv_dayly_filename, devices_list, ddate, vocs_scaled, time_zone_s, radar_part, consolidated_by, temp_offset) #"[bit] 1=same sensors together, 2=same device together, 4=1 der, 8=2 der
|
|
to_zip.append(file_stored)
|
|
else:
|
|
to_zip.append(csv_dayly_filename)
|
|
|
|
if to_zip:
|
|
success = zip_blobs(
|
|
blob_paths=to_zip,
|
|
zip_blob_name=zip_filename,
|
|
bucket_name="data-downloads",
|
|
minio_client=miniIO_blob_client
|
|
)
|
|
|
|
if success:
|
|
print("Files successfully zipped")
|
|
else:
|
|
print("Error occurred while zipping files")
|
|
|
|
#pack CSV files from BLOB into ZIP
|
|
#lets read and send image from blob
|
|
zip_bytes, content_type = GetBlob(zip_filename, bucket_name="data-downloads")
|
|
if debug:
|
|
resp.media = package_response(f'Log: {debug_string}', HTTP_200)
|
|
else:
|
|
if zip_bytes is None:
|
|
raise falcon.HTTPNotFound(
|
|
title='File not found',
|
|
description=f'File {zip_filename} could not be found or retrieved'
|
|
)
|
|
|
|
# Set response content type and body
|
|
resp.content_type = content_type
|
|
resp.data = zip_bytes
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
resp.media = package_response("Use POST method for this endpoint", HTTP_400)
|
|
|
|
# Default response for unmatched paths
|
|
#resp.media = package_response(f"Path: /{path}", HTTP_200)
|
|
|
|
def on_post(self, req, resp, path=""):
|
|
"""Handle POST requests"""
|
|
logger.debug(f"on_post called with path: {path}")
|
|
logger.debug(f"Request method: {req.method}")
|
|
logger.debug(f"Request path: {req.path}")
|
|
logger.debug(f"Request query string: {req.query_string}")
|
|
logger.debug(f"Request headers: {req.headers}")
|
|
logger.debug(f"Request content type: {req.content_type}")
|
|
|
|
# Get form data using our helper function - but don't read stream again
|
|
form_data = get_form_data(req)
|
|
logger.debug(f"Form data: {form_data}")
|
|
|
|
## Special cases for specific endpoints
|
|
#if path == "users":
|
|
#logger.info("POST request to users endpoint")
|
|
#resp.status = HTTP_201
|
|
#resp.content_type = falcon.MEDIA_JSON
|
|
#resp.text = json.dumps({"id": "new-user-id", "message": "User created"})
|
|
#return
|
|
#elif path == "items":
|
|
#logger.info("POST request to items endpoint")
|
|
#resp.status = HTTP_201
|
|
#resp.content_type = falcon.MEDIA_JSON
|
|
#resp.text = json.dumps({"id": "new-item-id", "message": "Item created"})
|
|
#return
|
|
|
|
try:
|
|
# Get basic parameters
|
|
function = form_data.get('function')
|
|
user_name = form_data.get('user_name')
|
|
logger.debug(f"Function: {function}, User: {user_name}")
|
|
|
|
|
|
if function != "credentials":
|
|
token = form_data.get('token')
|
|
|
|
user_info = verify_token(token)
|
|
|
|
if user_info == None:
|
|
resp.media = package_response("Log-Out", HTTP_401)
|
|
return
|
|
|
|
user_info = verify_token(token)
|
|
|
|
if user_info == None or user_info["username"] != user_name:
|
|
resp.media = package_response("Log-Out", HTTP_401)
|
|
return
|
|
|
|
|
|
#with get_db_connection() as db_conn:
|
|
privileges = GetPriviledgesOnly(user_name)
|
|
|
|
# Handle credentials function - most common case
|
|
if function == "credentials":
|
|
|
|
clientId = form_data.get('clientId')
|
|
nonce = form_data.get('nonce')
|
|
ps = form_data.get('ps')
|
|
|
|
if not user_name:
|
|
resp.media = package_response("Required field 'user_name' is missing", HTTP_400)
|
|
return
|
|
|
|
if not clientId:
|
|
resp.media = package_response("Required field 'clientId' is missing", HTTP_400)
|
|
return
|
|
|
|
if not nonce:
|
|
resp.media = package_response("Required field 'nonce' is missing", HTTP_400)
|
|
return
|
|
|
|
if not ps:
|
|
resp.media = package_response("Required field 'ps' is missing", HTTP_400)
|
|
return
|
|
|
|
|
|
|
|
if user_name == MASTER_ADMIN and ps == MASTER_PS:
|
|
access_token = generate_token(user_name)
|
|
privileges, user_id = ValidUser(user_name, ps)
|
|
privileges = "-1"
|
|
else:
|
|
#lets check for real
|
|
privileges, user_id = ValidUser(user_name, ps)
|
|
if privileges == "0":
|
|
access_token = 0
|
|
privileges = 0
|
|
else:
|
|
access_token = generate_token(user_name)
|
|
|
|
token_payload = {'access_token': access_token, 'privileges': privileges, 'user_id': user_id}
|
|
resp.media = package_response(token_payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
# Handle token-protected functions
|
|
elif function == "messages_age":
|
|
|
|
macs = form_data.get('macs')
|
|
|
|
with get_db_connection() as conn:
|
|
|
|
#print (sqlr)
|
|
with conn.cursor() as cur:
|
|
|
|
devices = MACsStrToDevIds(cur, macs)
|
|
|
|
devices_string = ",".join(f"{device_id}" for mac, device_id in devices)
|
|
|
|
|
|
sqlr = f"""
|
|
SELECT
|
|
device_id,
|
|
GREATEST(
|
|
radar_last_time,
|
|
sensor_last_time
|
|
) AS latest_time
|
|
FROM
|
|
(SELECT unnest(ARRAY[{devices_string}]) AS device_id) d
|
|
LEFT JOIN LATERAL (
|
|
SELECT time AS radar_last_time
|
|
FROM radar_readings
|
|
WHERE device_id = d.device_id
|
|
ORDER BY time DESC
|
|
LIMIT 1
|
|
) r ON true
|
|
LEFT JOIN LATERAL (
|
|
SELECT time AS sensor_last_time
|
|
FROM sensor_readings
|
|
WHERE device_id = d.device_id
|
|
ORDER BY time DESC
|
|
LIMIT 1
|
|
) s ON true;"""
|
|
logger.debug(f"sqlr= {sqlr}")
|
|
cur.execute(sqlr)
|
|
times_list = cur.fetchall()
|
|
result = {}
|
|
for i in range(len(times_list)):
|
|
if times_list[i][1] is not None:
|
|
result[devices[i][0]] = times_list[i][1].timestamp()
|
|
else:
|
|
result[devices[i][0]] = 0
|
|
|
|
dataa = {}
|
|
dataa['Command'] = "REPORT"
|
|
dataa['body'] = result
|
|
dataa['time'] = time.time()
|
|
#json_data = json.dumps(dataa)
|
|
payload = {'ok': True, 'response': dataa}
|
|
resp.media = package_response(payload)
|
|
logger.warning(f"Responded: {str(payload)}")
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "voice_ask":
|
|
|
|
question = form_data.get('question')
|
|
deployment_id = form_data.get('deployment_id')
|
|
|
|
if ('language_from' in form_data):
|
|
language_from = form_data.get('language_from').strip()
|
|
else:
|
|
language_from = "English"
|
|
|
|
if ('language_to' in form_data):
|
|
language_to = form_data.get('language_to').strip()
|
|
else:
|
|
language_to = "English"
|
|
|
|
|
|
result, language = AskGPT(question, language_from, language_to)
|
|
|
|
if result[0] == "#":
|
|
result = RunCommand(result, {}, deployment_id)
|
|
|
|
dataa = {}
|
|
dataa['Command'] = "REPORT"
|
|
dataa['body'] = result
|
|
dataa['name'] = ""
|
|
dataa['reflected'] = ""
|
|
dataa['language'] = language
|
|
dataa['time'] = time.time()
|
|
#json_data = json.dumps(dataa)
|
|
payload = {'ok': True, 'response': dataa}
|
|
resp.media = package_response(payload)
|
|
logger.warning(f"Responded: {str(payload)}")
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
|
|
elif function == "calibrate_thresholds":
|
|
#this will use current date to calibrate radar presence thresholds.
|
|
#make sure that data is well defined (has clear absence/presence signature) for all rooms for chosen day
|
|
#Format of radar_threshold field = [gates_to_use_Presence_list, p_threshold]
|
|
#We need to automate this functionality!!!
|
|
deployment_id = form_data.get('deployment_id')
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
ddate = form_data.get("date")
|
|
ddate = ddate.replace("_","-")
|
|
selected_date = ddate
|
|
|
|
|
|
|
|
stdev_range = int(form_data.get("stdev_range"))
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5
|
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
|
|
|
selected_date = FindCalibrationDate(device_ids, ddate)
|
|
|
|
devices_c = len(devices_list[0])
|
|
|
|
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s, stdev_range)
|
|
fields = ["radar_s_min", "radar_s_max", "radar_m_max", "radar_stdev"]
|
|
cnt = 0
|
|
ids_list = []
|
|
for details in devices_list:
|
|
ids_list.append(details[1])
|
|
devices_list_str = ",".join(map(str, ids_list))
|
|
device_to_index = {device: idx for idx, device in enumerate(ids_list)}
|
|
|
|
minutes = 1440
|
|
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
for device_index in range(devices_c):
|
|
well_id = devices_list[device_index][0]
|
|
device_id = devices_list[device_index][1]
|
|
location = devices_list[device_index][2]
|
|
|
|
sql = get_device_radar_s28_only_query(time_from_str, time_to_str, device_id)
|
|
print(sql)
|
|
|
|
#sql1 = get_deployment_radar_only_colapsed_query(str(device_id), time_from_str, time_to_str, [device_id])
|
|
#print(sql1)
|
|
st = time.time()
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()
|
|
|
|
timestamps, stationary, motion = process_raw_data(my_data)
|
|
print(type(stationary))
|
|
# Find threshold above which 20% of points lie
|
|
AveragePercentSpendsThere = AveragePercentPerLocation[Consolidataed_locations[location]]
|
|
threshold_high, threshold_low = FindThreshold(stationary, AveragePercentSpendsThere)
|
|
file_save = f"threshold_graph_{location}.png"
|
|
title = f"{well_id}_{location}"
|
|
|
|
threshold2, x_percent, y_percent = ShowThresholdGraph(stationary, file_save, threshold_low, threshold_high, title, AveragePercentSpendsThere, location)
|
|
|
|
print(f"Maximum curvature point found at:")
|
|
print(f"Threshold value: {threshold2:.3f}")
|
|
print(f"X: {x_percent:.1f}% of range")
|
|
print(f"Y: {y_percent:.1f}% of points above")
|
|
|
|
ShowArray(stationary, threshold2, filename=f"stationary_{devices_list[device_index][0]}.png", title=f"stationary_{devices_list[device_index][0]}_{devices_list[device_index][2]}", style='line')
|
|
|
|
|
|
##threshold
|
|
##presence_mask, baseline, threshold = detect_presence(timestamps, stationary, motion)
|
|
|
|
### Save visualization to file
|
|
##visualize_detection(timestamps, stationary, motion, presence_mask,
|
|
## baseline, threshold)
|
|
|
|
#cur.execute(sql1)
|
|
#my_data1 = cur.fetchall()#cur.fetchone()
|
|
#print(time.time() - st)
|
|
#if my_data == None or my_data1 == None:
|
|
#logger.warning(f"No data found for device_id {device_id}")
|
|
#else:
|
|
#print(type(my_data))
|
|
##minute,
|
|
##device_id,
|
|
##s_min as radar_s_min,
|
|
##s_max as radar_s_max,
|
|
##m_max as radar_m_max
|
|
|
|
#values = [tup[1] for tup in my_data] #10 sec (RAW) data
|
|
|
|
#hist, bins = np.histogram(values, bins=1000, range=(0, 100))
|
|
#TR, BR = FindZeroIntersection(hist, bins, f'raw_{device_id}_histogram.png', device_id)
|
|
#if True:#device_id == 560:
|
|
#plot(values, filename=f"radar_{device_id}_s28.png", title=f"Radar s28 {device_id}", style='line')
|
|
#plot(hist, filename=f"radar_{device_id}_s28_hist.png", title=f"Radar s28 {device_id} histogram", style='line')
|
|
|
|
##life = [tup[3] - tup[2] + tup[4] for tup in my_data1]
|
|
#life, average = calculate_life_and_average(my_data1, stdev_range) #5 min data
|
|
#lhist, lbins = np.histogram(life, bins=1000)
|
|
#TLIFE, BLIFE = FindZeroIntersection(lhist, lbins, f'life_{device_id}_histogram.png', device_id)
|
|
|
|
#StoreThresholds2DB(device_id, TR, BR, TLIFE, BLIFE)
|
|
##for now not needed...
|
|
##ahist, abins = np.histogram(average, bins=1000)
|
|
##dummy1, dummy = FindZeroIntersection(ahist, abins)
|
|
#if True:#device_id == 560:
|
|
#plot(average, filename=f"average_{device_id}.png", title=f"Average {device_id}", style='line')
|
|
#plot(life, filename=f"life_{device_id}.png", title=f"Life {device_id}", style='line')
|
|
#plot(lhist, filename=f"life_{device_id}_hist.png", title=f"life {device_id} histogram", style='line')
|
|
##plot(ahist, filename=f"average_{device_id}_hist.png", title=f"average {device_id} histogram", style='line')
|
|
|
|
|
|
sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list)
|
|
print(sql)
|
|
my_data = []
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()#cur.fetchone()
|
|
#print(result)
|
|
if my_data == None:
|
|
return False
|
|
|
|
fields_n = len(fields)
|
|
stripes = devices_c * fields_n #radar_min and radar_max
|
|
print(my_data)
|
|
base_minute = ConvertToBase(time_from_str, time_zone_s)
|
|
#base_minute = my_data[0][0]# min(record[0] for record in my_data)
|
|
#remember: base_minute is offset (smaller) by numbr of minutes in stdev_range
|
|
st = time.time()
|
|
wave_m = np.zeros((stripes, 1440+2*stdev_range, 1), dtype=np.float32)
|
|
|
|
for record in my_data:
|
|
#(minute,device_id,s28_min,s28_max) = record
|
|
minute, device_id = record[0:2]
|
|
values = record[2:] # All the min/max values
|
|
x = int((minute - base_minute).total_seconds()/60)
|
|
|
|
device_idx = device_to_index[device_id]
|
|
#value[0] are mins, value[1] are maxes
|
|
#when trying to illustrate presence, use s28_max, when absence (night leaving bed) use s28s_min
|
|
for field_idx, value in enumerate(values):
|
|
# Calculate y position
|
|
y = device_idx * fields_n + field_idx
|
|
wave_m[y, x] = value
|
|
|
|
print(time.time()-st)
|
|
|
|
#we need to reliably determine presence and LIFE (motion) in every 5 minutes of data...
|
|
#presence is determined by average value being significntly different from last known base
|
|
#last known base is determined by average value during extended periods ( >= H hours) of low stdev (<) while it is determined that:
|
|
#person is moving elsewhere, and only 1 person is determined to be in monitored area.
|
|
|
|
#lets calculate stdevs
|
|
for device_index in range(devices_c):
|
|
y = device_index * fields_n
|
|
row = wave_m[y]
|
|
stdevs = np.zeros((1440+2*stdev_range, 1), dtype=np.float32)
|
|
stdevs, amplitude = CalcStdevs(row, stdev_range, stdevs)
|
|
wave_m[y+3] = stdevs
|
|
plot(stdevs, filename=f"radar{device_index}_stdevs.png", title=f"Radar Stedevs {device_index}", style='line')
|
|
|
|
minutes = 1440
|
|
|
|
|
|
device_index = 0
|
|
y = 0
|
|
for device in devices_list:
|
|
wave = wave_m[y][stdev_range: stdev_range + minutes]
|
|
plot(wave,
|
|
filename="radar_wave_min.png",
|
|
title="Radar Signal Min",
|
|
style='line')
|
|
# Create histogram with 1000 bins
|
|
hist, bins = np.histogram(wave, bins=1000, range=(0, 100))
|
|
|
|
#bin_centers = (bins[:-1] + bins[1:]) / 2
|
|
hist_line = hist # These are your y values
|
|
|
|
# Plot with proper axis labels
|
|
plot(hist_line,
|
|
filename="radar_histogram_min.png",
|
|
title="Radar Signal Histogram Min (1000 bins)",
|
|
style='line')
|
|
|
|
wave = wave_m[y+1]
|
|
plot(wave,
|
|
filename="radar_wave_max.png",
|
|
title="Radar Signal",
|
|
style='line')
|
|
# Create histogram with 1000 bins
|
|
hist, bins = np.histogram(wave, bins=1000, range=(0, 100))
|
|
|
|
#bin_centers = (bins[:-1] + bins[1:]) / 2
|
|
hist_line = hist # These are your y values
|
|
|
|
# Plot with proper axis labels
|
|
plot(hist_line,
|
|
filename="radar_histogram_max.png",
|
|
title="Radar Signal Histogram Max(1000 bins)",
|
|
style='line')
|
|
|
|
print(wave)
|
|
device_to_index += 1
|
|
|
|
#lets see this map
|
|
stretch_by = 5
|
|
arr_stretched = np.zeros((int(stripes*stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
|
|
st = time.time()
|
|
for yy in range(stripes):
|
|
rgb_row = []
|
|
row = wave_m[yy]
|
|
for x in range(minutes):
|
|
value = 1280 * row[x] / 100
|
|
rgb_row.append(BestColor(value))
|
|
for stretch_index in range(stretch_by):
|
|
y = yy * stretch_by + stretch_index
|
|
arr_stretched[y, :] = rgb_row
|
|
|
|
print(time.time()-st)
|
|
filename = f"{deployment_id}/{deployment_id}_{ddate}_min_max_radar.png"
|
|
SaveImageInBlob(filename, arr_stretched, [])
|
|
|
|
|
|
return
|
|
|
|
elif function == "request_single_slice":
|
|
deployment_id = form_data.get('deployment_id')
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
selected_date = form_data.get('date')
|
|
devices_list = form_data.get('devices_list')
|
|
#devices_list = '[267,560,"?",null,"64B70888F6F0"]'
|
|
#devices_list = '[[267,560,"?",null,"64B70888F6F0"],[268,561,"?",null,"64B70888F6F1"]]'
|
|
sensor_list_loc = [form_data.get('sensor_list')]
|
|
is_nested, device_details = check_and_parse(devices_list)
|
|
if not is_nested:
|
|
device_ids_list = [device_details[1]]
|
|
well_ids_list = [device_details[0]]
|
|
else:
|
|
device_ids_list = list(map(lambda x: x[1], device_details))
|
|
well_ids_list =list(map(lambda x: x[0], device_details))
|
|
|
|
data_type = form_data.get('data_type')
|
|
epoch_from_utc, epoch_to_utc = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
|
|
|
|
#epoch_to = '1730592010' #smal sample to test
|
|
radar_part = form_data.get('radar_part')
|
|
well_id = well_ids_list[0]
|
|
all_slices = {}
|
|
device_id2_mac = {device_details[1]: device_details[4]}
|
|
for device_id in device_ids_list:
|
|
device_id2_mac
|
|
sensor_data = {}
|
|
for sensor in sensor_list_loc:
|
|
st = time.time()
|
|
line_part = ReadSensor(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part)
|
|
window = sensor_legal_values[sensor][2]
|
|
#print("@1", time.time() - st)
|
|
#first = 3300
|
|
#last = 3400
|
|
#line_part = line_part[first:last]
|
|
line_part_t = []
|
|
#st = time.time()
|
|
#line_part_t = [tuple(x[:2]) for x in line_part]
|
|
#print(time.time() - st)
|
|
#st = time.time()
|
|
#line_part_t = list({(dt.timestamp(), value) for dt, value in line_part})
|
|
#print(time.time() - st)
|
|
|
|
line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
|
|
st = time.time()
|
|
cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
|
|
cleaned_values = add_boundary_points(cleaned_values_t, time_zone_s)
|
|
#print("@2", time.time() - st)
|
|
|
|
#Lets add point in minute 0 and minute 1439
|
|
|
|
#st = time.time()
|
|
#cleaned_values = clean_data_fast(line_part_t, window=5, threshold=2.0)
|
|
#print("@3", time.time() - st)
|
|
|
|
sensor_data[sensor] = cleaned_values
|
|
all_slices[device_id2_mac[device_id]] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
|
|
|
|
dataa = {}
|
|
dataa['Function'] = "single_slicedata"
|
|
dataa['devices_list'] = devices_list
|
|
dataa['all_slices'] = all_slices
|
|
dataa['time_zone_st'] = time_zone_s
|
|
dataa['well_id'] = well_id
|
|
resp.media = package_response(dataa)
|
|
resp.status = falcon.HTTP_200
|
|
#return
|
|
elif function == "get_sensor_bucketed_data_by_room_sensor":
|
|
# Inputs:
|
|
# user_name and token
|
|
# deployment_id - from which report gets deployment set (all rooms and devices) to get timezone
|
|
# date - one day in a format YYYY-MM-DD
|
|
# sensor - temperature/radar/etc.. see full list
|
|
# (tells what sensor data to be retrieved)
|
|
# "voc" for all smell use s4 (lower reading is higher smell, max=0 find min for 100%)
|
|
# "radar" returns s28
|
|
# radar_part - optional and applies only to radar (tells which segment of radar to be retrieved)
|
|
# bucket_size - ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
|
|
# location - room name (has to be unique)
|
|
# data_type - ML
|
|
# Output: son structure with the following info
|
|
# chart_data with rooms : [list]
|
|
deployment_id = form_data.get('deployment_id')
|
|
selected_date = form_data.get('date')
|
|
sensor = form_data.get('sensor') # one sensor
|
|
radar_part = form_data.get('radar_part')
|
|
buckets = ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
|
|
bucket_size = "no" if (result := form_data.get('bucket_size')) in (None, "") else (result.strip() if result.strip() in buckets else "no")
|
|
#bucket_size = res2 if (res := form_data.get('bucket_size')) is not None and (res2 := str(res).strip()) and res2 in {'no', '10s', '1m', '5m', '10m', '15m', '30m', '1h'} else 'no'
|
|
location = form_data.get('location')
|
|
data_type = form_data.get('data_type')
|
|
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
epoch_from_utc, epoch_to_utc = GetLocalTimeEpochsForDate(selected_date, time_zone_s)
|
|
|
|
# obtain devices_list for deployment_id
|
|
selected_date = selected_date.replace("_","-")
|
|
devices_list, device_ids = GetProximityList(deployment_id, epoch_from_utc)
|
|
sensor_data = {}
|
|
units = "°C"
|
|
if "America" in time_zone_s:
|
|
units = "°F"
|
|
# see https://www.w3schools.com/cssref/css_colors.php
|
|
sensor_props = {"temperature": ["red", units],
|
|
"humidity": ["blue", "%"],
|
|
"voc": ["orange", "PPM"],
|
|
"co2": ["orange", "PPM"],
|
|
"pressure": ["magenta", "Bar"],
|
|
"radar": ["cyan", "%"],
|
|
"light": ["yellow", "Lux"]}
|
|
|
|
current_time_la = datetime.datetime.now(pytz.timezone(time_zone_s))
|
|
formatted_time = current_time_la.strftime('%Y-%m-%dT%H:%M:%S') #"2025-02-06T20:09:00"
|
|
|
|
result_dictionary = {
|
|
"last_report_at": formatted_time,
|
|
"color": sensor_props[sensor][0] if sensor in s_table else "grey",
|
|
"units": sensor_props[sensor][1] if sensor in s_table else "?"
|
|
}
|
|
#sensor_mapping = {"co2": "s4", "voc": "s9"}
|
|
#sensor = sensor_mapping.get(sensor, sensor)
|
|
|
|
chart_data = []
|
|
# example data in each element of devices_list is (266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
|
|
for well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to in devices_list:
|
|
if location_name == location:
|
|
line_part = ReadSensor3(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part, bucket_size)
|
|
window = sensor_legal_values[sensor][2]
|
|
line_part_t = []
|
|
line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
|
|
st = time.time()
|
|
cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
|
|
cleaned_values = cleaned_values_t #add_boundary_points(cleaned_values_t, time_zone_s)
|
|
compressed_readings = convert_timestamps_lc(cleaned_values, time_zone_s)
|
|
if sensor == "temperature":
|
|
if units == "°F":#"America" in time_zone_s:
|
|
compressed_readings = CelsiusToFahrenheitList(compressed_readings)
|
|
|
|
|
|
|
|
sensor_data[sensor] = compressed_readings
|
|
chart_data.append({'name': location_name, 'data': compressed_readings})
|
|
result_dictionary['chart_data'] = chart_data
|
|
payload = result_dictionary
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
|
|
elif function == "get_sensor_data_by_deployment_id":
|
|
# Inputs:
|
|
# user_name and token
|
|
# deployment_id - from which report gets deployment set (all rooms and devices)
|
|
# date - one day in a format YYYY-MM-DD
|
|
# sensor - temperature/radar/etc.. see full list (tells what sensor data to be retrieved)
|
|
# radar_part - optional and applies only to radar (tells which segment of radar to be retrieved)
|
|
# bucket_size - ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
|
|
# data_type - ML
|
|
# Output: son structure with the following info
|
|
# chart_data with rooms : [list]
|
|
deployment_id = form_data.get('deployment_id')
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
selected_date = form_data.get('date')
|
|
sensor = form_data.get('sensor') # one sensor
|
|
radar_part = form_data.get('radar_part')
|
|
buckets = ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
|
|
bucket_size = "no" if (result := form_data.get('bucket_size')) in (None, "") else (result.strip() if result.strip() in buckets else "no")
|
|
#bucket_size = res2 if (res := form_data.get('bucket_size')) is not None and (res2 := str(res).strip()) and res2 in {'no', '10s', '1m', '5m', '10m', '15m', '30m', '1h'} else 'no'
|
|
data_type = form_data.get('data_type')
|
|
|
|
epoch_from_utc, epoch_to_utc = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
|
|
# obtain devices_list for deployment_id
|
|
selected_date = selected_date.replace("_","-")
|
|
#timee = LocalDateToUTCEpoch(selected_date, time_zone_s)+5
|
|
devices_list, device_ids = GetProximityList(deployment_id, epoch_from_utc)
|
|
sensor_data = {}
|
|
# see https://www.w3schools.com/cssref/css_colors.php
|
|
sensor_props = {"temperature": ["red", "°C"],
|
|
"humidity": ["blue", "%"],
|
|
"voc": ["orange", "PPM"],
|
|
"co2": ["orange", "PPM"],
|
|
"pressure": ["magenta", "Bar"],
|
|
"radar": ["cyan", "%"],
|
|
"light": ["yellow", "Lux"]}
|
|
result_dictionary = {
|
|
"last_report_at": "2025-02-06T20:09:00",
|
|
"color": sensor_props[sensor][0] if sensor in s_table else "grey",
|
|
"units": sensor_props[sensor][1] if sensor in s_table else "?"
|
|
}
|
|
#sensor_mapping = {"co2": "s4", "voc": "s9"}
|
|
#sensor = sensor_mapping.get(sensor, sensor)
|
|
|
|
chart_data = []
|
|
for room_details in devices_list:
|
|
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = room_details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
|
|
line_part = ReadSensor3(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part, bucket_size)
|
|
window = sensor_legal_values[sensor][2]
|
|
line_part_t = []
|
|
line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
|
|
st = time.time()
|
|
cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
|
|
cleaned_values = add_boundary_points(cleaned_values_t, time_zone_s)
|
|
compressed_readings = convert_timestamps_lc(cleaned_values, time_zone_s)
|
|
|
|
#compressed_readings = [(time.strftime("%H:%M", time.gmtime(lst[0][0])), float(sum(t for _, t in lst)/len(lst)))
|
|
#for _, lst in ((k, list(g))
|
|
#for k, g in itertools.groupby(cleaned_values, key=lambda x: time.gmtime(x[0]).tm_hour))]
|
|
sensor_data[sensor] = compressed_readings
|
|
chart_data.append({'name': location_name,
|
|
'data': compressed_readings})
|
|
result_dictionary['chart_data'] = chart_data
|
|
#all_slices[device_id2_mac[device_id]] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
|
|
#is_neste, device_details = check_and_parse(devices_list)
|
|
#if not is_nested:
|
|
#device_ids_list = [device_details[1]]
|
|
#well_ids_list = [device_details[0]]
|
|
#else:
|
|
#device_ids_list = list(map(lambda x: x[1], device_details))
|
|
#well_ids_list =list(map(lambda x: x[0], device_details))
|
|
#well_id = well_ids_list[0]
|
|
#all_slices = {}
|
|
#device_id2_mac = {device_details[1]: device_details[4]}
|
|
#for device_id in device_ids_list:
|
|
#device_id2_mac
|
|
#sensor_data = {}
|
|
#for sensor in sensor_list_loc:
|
|
#st = time.time()
|
|
#line_part = ReadSensor(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part)
|
|
#window = sensor_legal_values[sensor][2]
|
|
#line_part_t = []
|
|
#line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
|
|
#st = time.time()
|
|
#cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
|
|
#cleaned_values = add_boundary_points(cleaned_values_t, time_zone_s)
|
|
#sensor_data[sensor] = cleaned_values
|
|
#all_slices[device_id2_mac[device_id]] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
|
|
|
|
#dataa = {}
|
|
#dataa['Function'] = "single_slicedata"
|
|
#dataa['devices_list'] = devices_list
|
|
#dataa['all_slices'] = all_slices
|
|
#dataa['time_zone_st'] = time_zone_s
|
|
#dataa['well_id'] = well_id
|
|
#resp.media = package_response(dataa)
|
|
#resp.status = falcon.HTTP_200
|
|
result_dictionary2 = {
|
|
"alert_text": "No alert",
|
|
"alert_color": "bg-green-100 text-green-700",
|
|
"last_report_at": "ISO TIMESTAMP",
|
|
"chart_data": [
|
|
{
|
|
"rooms": [
|
|
{ "name": "Bathroom",
|
|
"data": [
|
|
{"title": "12AM","value": 20},
|
|
{"title": "01AM","value": 20},
|
|
{"title": "02AM","value": 26},
|
|
{"title": "03AM","value": 16},
|
|
{"title": "04AM","value": 27},
|
|
{"title": "05AM","value": 23},
|
|
{"title": "06AM","value": 26},
|
|
{"title": "07AM","value": 17},
|
|
{"title": "08AM","value": 18},
|
|
{"title": "09AM","value": 21},
|
|
{"title": "10AM","value": 28},
|
|
{"title": "11AM","value": 24},
|
|
{"title": "12PM","value": 18},
|
|
{"title": "01PM","value": 27},
|
|
{"title": "02PM","value": 27},
|
|
{"title": "03PM","value": 19},
|
|
{"title": "04PM","value": 0},
|
|
{"title": "05PM","value": 0},
|
|
{"title": "06PM","value": 0},
|
|
{"title": "07PM","value": 0},
|
|
{"title": "08PM","value": 0},
|
|
{"title": "09PM","value": 0},
|
|
{"title": "10PM","value": 0},
|
|
{"title": "11PM","value": 0}
|
|
]
|
|
},
|
|
{ "name": "Kitchen",
|
|
"data": [
|
|
{"title": "00AM","value": 19},
|
|
{"title": "01AM","value": 10},
|
|
{"title": "02AM","value": 8},
|
|
{"title": "03AM","value": 14},
|
|
{"title": "04AM","value": 20},
|
|
{"title": "05AM","value": 8},
|
|
{"title": "06AM","value": 7},
|
|
{"title": "07AM","value": 17},
|
|
{"title": "08AM","value": 3},
|
|
{"title": "09AM","value": 19},
|
|
{"title": "10AM","value": 4},
|
|
{"title": "11AM","value": 6},
|
|
{"title": "12PM","value": 4},
|
|
{"title": "01PM","value": 14},
|
|
{"title": "02PM","value": 17},
|
|
{"title": "03PM","value": 20},
|
|
{"title": "04PM","value": 19},
|
|
{"title": "05PM","value": 15},
|
|
{"title": "06PM","value": 5},
|
|
{"title": "07PM","value": 19},
|
|
{"title": "08PM","value": 3},
|
|
{"title": "09PM","value": 30},
|
|
{"title": "10PM","value": 1},
|
|
{"title": "11PM","value": 12 }
|
|
]
|
|
},
|
|
{ "name": "Living Room",
|
|
"data": [
|
|
{"title": "00AM","value": 25},
|
|
{"title": "01AM","value": 24},
|
|
{"title": "02AM","value": 19},
|
|
{"title": "03AM","value": 20},
|
|
{"title": "04AM","value": 22},
|
|
{"title": "05AM","value": 20},
|
|
{"title": "06AM","value": 11},
|
|
{"title": "07AM","value": 5},
|
|
{"title": "08AM","value": 16},
|
|
{"title": "09AM","value": 22},
|
|
{"title": "10AM","value": 23},
|
|
{"title": "11AM","value": 14},
|
|
{"title": "12PM","value": 0},
|
|
{"title": "01PM","value": 7},
|
|
{"title": "02PM","value": 25},
|
|
{"title": "03PM","value": 29},
|
|
{"title": "04PM","value": 23},
|
|
{"title": "05PM","value": 27},
|
|
{"title": "06PM","value": 27},
|
|
{"title": "07PM","value": 20},
|
|
{"title": "08PM","value": 2},
|
|
{"title": "09PM","value": 24},
|
|
{"title": "10PM","value": 21},
|
|
{"title": "11PM","value": 14 }
|
|
]
|
|
}
|
|
]
|
|
}
|
|
]
|
|
}
|
|
payload = result_dictionary
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
#AddToLog(payload)
|
|
#return
|
|
elif function == "request_device_slice":
|
|
deployment_id = form_data.get('deployment_id')
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
epoch_from_utc = form_data.get('epoch_from')
|
|
epoch_to_utc = form_data.get('epoch_to')
|
|
device_id = form_data.get('device_id')
|
|
well_id = form_data.get('well_id')
|
|
MAC = form_data.get('MAC')
|
|
sensor_list_loc = form_data.get('sensors_list')
|
|
sensor_list = sensor_list_loc.split(",")
|
|
device_ids_list = [device_id]
|
|
well_ids_list = [well_id]
|
|
maps_dates, positions_list = GetDeploymentDatesBoth(deployment_id)
|
|
|
|
data_type = "RL"
|
|
#epoch_from_utc, epoch_to_utc = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
|
|
|
|
#epoch_to = '1730592010' #smal sample to test
|
|
radar_part = form_data.get('radar_part')
|
|
well_id = well_ids_list[0]
|
|
all_slices = {}
|
|
#device_id2_mac = {device_details[1]: device_details[4]}
|
|
for device_id in device_ids_list:
|
|
#device_id2_mac
|
|
sensor_data = {}
|
|
for sensor in sensor_list:
|
|
st = time.time()
|
|
line_part = ReadSensor(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part)
|
|
window = sensor_legal_values[sensor][2]
|
|
#print("@1", time.time() - st)
|
|
#first = 3300
|
|
#last = 3400
|
|
#line_part = line_part[first:last]
|
|
line_part_t = []
|
|
#st = time.time()
|
|
#line_part_t = [tuple(x[:2]) for x in line_part]
|
|
#print(time.time() - st)
|
|
#st = time.time()
|
|
#line_part_t = list({(dt.timestamp(), value) for dt, value in line_part})
|
|
#print(time.time() - st)
|
|
|
|
line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
|
|
st = time.time()
|
|
cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
|
|
#cleaned_values = cleaned_values_t #add_boundary_points(cleaned_values_t, time_zone_s)
|
|
#print("@2", time.time() - st)
|
|
|
|
#Lets add point in minute 0 and minute 1439
|
|
|
|
#st = time.time()
|
|
#cleaned_values = clean_data_fast(line_part_t, window=5, threshold=2.0)
|
|
#print("@3", time.time() - st)
|
|
cleaned_values = ScaleToCommon(cleaned_values_t, sensor)
|
|
sensor_data[sensor] = cleaned_values
|
|
all_slices[device_id] = sensor_data
|
|
|
|
dataa = {}
|
|
dataa['Function'] = "device_slicedata"
|
|
dataa['all_slices'] = all_slices
|
|
dataa['time_zone_st'] = time_zone_s
|
|
dataa['proximity'] = positions_list
|
|
dataa['well_id'] = well_id
|
|
dataa['MAC'] = MAC
|
|
resp.media = package_response(dataa)
|
|
resp.status = falcon.HTTP_200
|
|
#return
|
|
elif function == "request_single_radar_slice":
|
|
deployment_id = form_data.get('deployment_id')
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
selected_date = form_data.get('date')
|
|
devices_list = form_data.get('devices_list')
|
|
ctrl_key_state = form_data.get('ctrl_key_state')
|
|
alt_key_state = form_data.get('alt_key_state')
|
|
#devices_list = '[267,560,"?",null,"64B70888F6F0"]'
|
|
#devices_list = '[[267,560,"?",null,"64B70888F6F0"],[268,561,"?",null,"64B70888F6F1"]]'
|
|
sensor_index_list = [form_data.get('sensor_index_list')]
|
|
is_nested, device_details = check_and_parse(devices_list)
|
|
if not is_nested:
|
|
device_ids_list = [device_details[1]]
|
|
well_ids_list = [device_details[0]]
|
|
else:
|
|
device_ids_list = list(map(lambda x: x[1], device_details))
|
|
well_ids_list =list(map(lambda x: x[0], device_details))
|
|
|
|
epoch_from_utc, epoch_to_utc = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
|
|
|
|
#epoch_to = '1730592010' #smal sample to test
|
|
radar_part = form_data.get('radar_part')
|
|
well_id = well_ids_list[0]
|
|
all_slices = {}
|
|
device_id2_mac = {device_details[1]: device_details[4]}
|
|
for device_id in device_ids_list:
|
|
device_id2_mac
|
|
sensor_data = {}
|
|
for sensor_index in sensor_index_list:
|
|
st = time.time()
|
|
sensor = ["m0", "m1", "m2", "m3", "m4", "m5", "m6", "m7", "m8", "m08_max", "s2", "s3", "s4", "s5", "s6", "s7", "s8", "s28_max", "s28_min"][int(sensor_index)]
|
|
|
|
line_part = ReadRadarDetail(device_id, sensor, epoch_from_utc, epoch_to_utc, alt_key_state)
|
|
window = sensor_legal_values["radar"][2]
|
|
|
|
line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
|
|
st = time.time()
|
|
cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
|
|
cleaned_values = add_boundary_points(cleaned_values_t, time_zone_s)
|
|
if len(sensor) < 4:
|
|
sensor_data[sensor+"_max"] = cleaned_values
|
|
else:
|
|
sensor_data[sensor] = cleaned_values
|
|
all_slices[device_id2_mac[device_id]] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
|
|
|
|
dataa = {}
|
|
dataa['Function'] = "single_slicedata"
|
|
dataa['devices_list'] = devices_list
|
|
dataa['all_slices'] = all_slices
|
|
dataa['time_zone_st'] = time_zone_s
|
|
dataa['well_id'] = well_id
|
|
resp.media = package_response(dataa)
|
|
resp.status = falcon.HTTP_200
|
|
elif function == "get_deployment":
|
|
blob_data = read_file("deployment.html")
|
|
deployment_id = form_data.get('deployment_id')
|
|
#lets update "Deployments" select
|
|
users = GetUsersFromDeployments(privileges)
|
|
blob_data = UpdateDeploymentsSelector(blob_data, users, False, deployment_id)
|
|
|
|
resp.content_type = "text/html"
|
|
resp.text = blob_data
|
|
return
|
|
elif function == "request_deployment_map_new":
|
|
st = time.time()
|
|
print(f"$0 ----{time.time() - st}")
|
|
deployment_id = form_data.get('deployment_id')
|
|
map_type = form_data.get('map_type')
|
|
print(f"$1 ----{time.time() - st}")
|
|
maps_dates, positions_list = GetDeploymentDatesBoth(deployment_id)
|
|
print(f"$2 ----{time.time() - st}")
|
|
datee = form_data.get('date')
|
|
if maps_dates != []:
|
|
|
|
if datee == "2022-4-2": #that one is default in HTML so disregard
|
|
datee = maps_dates[0]
|
|
|
|
locations_desc_map = {}
|
|
for details in positions_list:
|
|
well_id = details[0]
|
|
location = details[2]
|
|
if details[3] != None and details[3] != "":
|
|
location = location +" "+ details[3]
|
|
|
|
if details[6] != None and details[6] != "":
|
|
location = location +" "+ details[6]
|
|
|
|
MAC = details[4]
|
|
locations_desc_map[well_id] = location
|
|
print(f"$3 ----{time.time() - st}")
|
|
|
|
dataa = {}
|
|
dataa['Function'] = "deployments_maps_report"
|
|
dataa['proximity'] = positions_list
|
|
maps_dates.sort(reverse = True)
|
|
dataa['maps_dates'] = maps_dates
|
|
dataa['device_count'] = len(positions_list)
|
|
dataa['map_type'] = map_type
|
|
|
|
#MACs_list = GetMACsListSimple(positions_list)
|
|
#MACs_map = {}
|
|
|
|
#for details in positions_list:
|
|
# id = details[0]
|
|
# MAC = details[3]
|
|
# MACs_map[id] = MAC
|
|
#for i in range(len(MACs_list)):
|
|
# MACs_map[devices_list[i]] = MACs_list[i][0]
|
|
|
|
id = positions_list[0][0]
|
|
#dataa['MACs_map'] = MACs_map
|
|
dataa['locations_desc_map'] = locations_desc_map
|
|
#proximity_list = proximity.split(",")
|
|
print(f"$4 ----{time.time() - st}")
|
|
|
|
if id < 200:
|
|
checkmarks_string = 'T><input checked type="checkbox" id="t-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'H><input type="checkbox" id="h-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'P><input type="checkbox" id="p-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'C><input type="checkbox" id="c-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'V><input type="checkbox" id="v-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'L><input type="checkbox" id="l-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'R><input type="checkbox" id="r-check" value="0" onchange="UpdateSelections();"/><br>'
|
|
else: #>200 = ["Temperature", "Humidity", "Pressure", "Light", "Radar", "VOC"]
|
|
|
|
checkmarks_string = 'T><input checked type="checkbox" id="t-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'H><input type="checkbox" id="h-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'P><input type="checkbox" id="p-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'L><input type="checkbox" id="l-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'R><input type="checkbox" id="r-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
|
|
checkmarks_string = checkmarks_string + 'S0><input type="checkbox" id="v0-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'S1><input type="checkbox" id="v1-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'S2><input type="checkbox" id="v2-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'S3><input type="checkbox" id="v3-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'S4><input type="checkbox" id="v4-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'S5><input type="checkbox" id="v5-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'S6><input type="checkbox" id="v6-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'S7><input type="checkbox" id="v7-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'S8><input type="checkbox" id="v8-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'S9><input type="checkbox" id="v9-check" value="0" onchange="UpdateSelections();"/><br>'
|
|
|
|
checked_or_not = " checked"
|
|
|
|
for index in range(len(positions_list)):
|
|
details = positions_list[index]
|
|
device_id = details[0]
|
|
location = details[2]
|
|
if details[3] != None and details[3] != "":
|
|
location = location + " " + details[3]
|
|
|
|
if details[6] != None and details[6] != "":
|
|
location = location + " " + details[6]
|
|
|
|
checkmarks_string = checkmarks_string + str(device_id) + '><input'+checked_or_not+' type="checkbox" id="device_check'+str(index)+'" value="0" title="'+location+'" onchange="UpdateSelections();" />\n'
|
|
checked_or_not = ''
|
|
|
|
print(f"$5 ----{time.time() - st}")
|
|
|
|
dataa['checkmarks'] = checkmarks_string
|
|
resp.media = package_response(dataa)
|
|
resp.status = falcon.HTTP_200
|
|
|
|
elif function == "request_proximity":
|
|
deployment = form_data.get('deployment_id')
|
|
timee = form_data.get('time')
|
|
#timee = StringToEpoch(datee)
|
|
#print(deployment, timee)
|
|
well_ids, device_ids = GetProximityList(deployment, timee)
|
|
#print(proximity)
|
|
dataa = {}
|
|
dataa['Function'] = "proximity_report"
|
|
if len(well_ids) > 0:
|
|
dataa['proximity'] = well_ids
|
|
else:
|
|
dataa['proximity'] = []
|
|
|
|
resp.media = package_response(dataa)
|
|
resp.status = falcon.HTTP_200
|
|
elif function == "request_devices":
|
|
deployment_id = form_data.get('deployment_id')
|
|
group_id = form_data.get('group_id')
|
|
location = form_data.get('location')
|
|
if location == "0":
|
|
location = "All"
|
|
is_fresh = form_data.get('is_fresh')
|
|
matching_devices = GetMatchingDevices(privileges, group_id, deployment_id, location)
|
|
dataa = {}
|
|
dataa['Function'] = "devices_report"
|
|
if len(matching_devices) > 0:
|
|
dataa['devices'] = matching_devices
|
|
else:
|
|
dataa['devices'] = []
|
|
|
|
resp.media = package_response(dataa)
|
|
resp.status = falcon.HTTP_200
|
|
|
|
elif function == "device_form":
|
|
editing_device_id = form_data.get('editing_device_id')
|
|
|
|
ok = StoreDevice2DB(form_data, editing_device_id)
|
|
if ok == 1:
|
|
payload = {'ok': ok}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
else:
|
|
payload = {'ok': ok, 'error': debug_string}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
elif function == "device_delete":
|
|
|
|
#check if admin!
|
|
|
|
ok = DeleteRecordFromDB(form_data)
|
|
|
|
payload = {'ok': ok}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "get_raw_data":
|
|
container = GetReference("/MAC")
|
|
MAC = req_dict["MAC"][0]
|
|
sensor = req_dict["sensor"][0]
|
|
if "part" in req_dict:
|
|
part = req_dict["part"][0]
|
|
else:
|
|
part = ""
|
|
from_time = req_dict["from_time"][0]
|
|
to_time = req_dict["to_time"][0]
|
|
timezone_str = req_dict["tzone"][0]
|
|
AddToLog("get_raw_data:" + str(MAC) +","+ str(sensor) + "," + str(from_time) + "," + str(to_time) + "," + part+ "," + timezone_str)
|
|
#raw_data = GetRawSensorData(container, MAC, sensor, from_time, to_time, timezone_str)
|
|
raw_data = GetRawSensorDataFromBlobStorage(MAC, sensor, part, from_time, to_time, timezone_str)
|
|
data_payload = {'raw_data': raw_data}
|
|
resp.media = package_response(data_payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "get_candle_data":
|
|
container = GetReference("/MAC")
|
|
MAC = req_dict["MAC"][0]
|
|
sensor = req_dict["sensor"][0]
|
|
from_time = req_dict["from_time"][0]
|
|
to_time = req_dict["to_time"][0]
|
|
part = req_dict["part"][0]
|
|
tzone = req_dict["tzone"][0]
|
|
AddToLog(str(req_dict))
|
|
candle_data = GetCandleSensorData(container, MAC, sensor, from_time, to_time, part, tzone)
|
|
data_payload = {'candle_data': candle_data}
|
|
resp.media = package_response(data_payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "deployment_form":
|
|
editing_deployment_id = form_data.get('editing_deployment_id')
|
|
|
|
ok = StoreDeployment2DB(form_data, editing_deployment_id)
|
|
if ok == 1:
|
|
payload = {'ok': ok}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
else:
|
|
payload = {'ok': ok, 'error': debug_string}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
elif function == "deployment_delete":
|
|
ok = DeleteRecordFromDB(form_data)
|
|
|
|
payload = {'ok': ok}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "deployments_list":
|
|
result_list = []
|
|
first_s = form_data.get('first')
|
|
last_s = form_data.get('last')
|
|
|
|
try:
|
|
first = int(first_s)
|
|
except ValueError:
|
|
first = 0
|
|
|
|
try:
|
|
last = int(last_s)
|
|
except ValueError:
|
|
last = 1000000
|
|
|
|
user_id = form_data.get('user_id')
|
|
all_deployments = ListDeployments(privileges, user_id)
|
|
|
|
cnt = 0
|
|
|
|
for deployment in all_deployments:
|
|
cnt += 1
|
|
if cnt >= first:
|
|
caretaker_min_object = {"deployment_id": deployment['deployment_id'], "email": user_id_2_user[deployment['beneficiary_id']][3], "first_name": user_id_2_user[deployment['beneficiary_id']][5], "last_name": user_id_2_user[deployment['beneficiary_id']][6]}
|
|
result_list.append(caretaker_min_object)
|
|
if cnt > last:
|
|
break
|
|
|
|
payload = {'result_list': result_list}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "caretaker_form":
|
|
editing_user_id = form_data.get('editing_user_id')
|
|
email = form_data.get('email')
|
|
|
|
if "@" in email:
|
|
ok = StoreCaretaker2DB(form_data, editing_user_id)
|
|
if ok == 1:
|
|
payload = {'ok': ok}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
else:
|
|
payload = {'ok': ok, 'error': debug_string}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
else:
|
|
resp.media = package_response("Missing or illegal 'email' parameter", HTTP_400)
|
|
return
|
|
|
|
elif function == "caretaker_delete":
|
|
if privileges == "-1":
|
|
ok = DeleteRecordFromDB(form_data)
|
|
else:
|
|
ok = 0
|
|
AddToLog(ok)
|
|
payload = {'ok': ok}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "caretakers_list":
|
|
result_list = []
|
|
first_s = form_data.get('first')
|
|
last_s = form_data.get('last')
|
|
|
|
try:
|
|
first = int(first_s)
|
|
except ValueError:
|
|
first = 0
|
|
|
|
try:
|
|
last = int(last_s)
|
|
except ValueError:
|
|
last = 1000000
|
|
|
|
if privileges == "-1":
|
|
all_caretakers = ListCaretakers()
|
|
|
|
cnt = 0
|
|
|
|
for caretaker in all_caretakers:
|
|
cnt += 1
|
|
if cnt >= first:
|
|
caretaker_min_object = {"user_id": caretaker[0], "email": caretaker[3], "first_name": caretaker[5], "last_name": caretaker[6]}
|
|
result_list.append(caretaker_min_object)
|
|
if cnt > last:
|
|
break
|
|
|
|
payload = {'result_list': result_list}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "beneficiary_form":
|
|
editing_user_id = form_data.get('editing_user_id')
|
|
email = form_data.get('email')
|
|
if "@" in email:
|
|
ok = StoreBeneficiary2DB(form_data, editing_user_id)
|
|
if ok == 1:
|
|
payload = {'ok': ok}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
else:
|
|
payload = {'ok': ok, 'error': debug_string}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
else:
|
|
payload = {'ok': ok}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "beneficiary_delete":
|
|
|
|
|
|
ok = DeleteRecordFromDB(form_data)
|
|
|
|
payload = {'ok': ok}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "beneficiaries_list":
|
|
result_list = []
|
|
first_s = form_data.get('first')
|
|
last_s = form_data.get('last')
|
|
|
|
try:
|
|
first = int(first_s)
|
|
except ValueError:
|
|
first = 0
|
|
|
|
try:
|
|
last = int(last_s)
|
|
except ValueError:
|
|
last = 1000000
|
|
|
|
user_id = form_data.get('user_id')
|
|
all_beneficiaries = ListBeneficiaries(privileges, user_id)
|
|
|
|
cnt = 0
|
|
|
|
for beneficiary in all_beneficiaries:
|
|
cnt += 1
|
|
if cnt >= first:
|
|
beneficiary_min_object = {"user_id": beneficiary[0], "email": beneficiary[3], "first_name": beneficiary[5], "last_name": beneficiary[6]}
|
|
result_list.append(beneficiary_min_object)
|
|
if cnt > last:
|
|
break
|
|
|
|
payload = {'result_list': result_list}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "activities_report_details":
|
|
deployment_id = form_data.get('deployment_id')
|
|
result_dictionary = {
|
|
"alert_text": "No alert",
|
|
"alert_color": "bg-green-100 text-green-700",
|
|
"chart_data": [
|
|
{
|
|
"name": "Weekly",
|
|
"rooms": [
|
|
{
|
|
"name": "Bathroom",
|
|
"color": "purple",
|
|
"data": [
|
|
{ "title": "Monday", "events": 186, "hours": 80 },
|
|
{ "title": "Tuesday", "events": 305, "hours": 200 },
|
|
{ "title": "Wednesday", "events": 237, "hours": 120 },
|
|
{ "title": "Thursday", "events": 73, "hours": 190 },
|
|
{ "title": "Friday", "events": 209, "hours": 130 },
|
|
{ "title": "Saturday", "events": 214, "hours": 140 },
|
|
{ "title": "Sunday", "events": 150, "hours": 100 }
|
|
]
|
|
},
|
|
{
|
|
"name": "Bedroom",
|
|
"color": "#3b82f6",
|
|
"data": [
|
|
{ "title": "Monday", "events": 186, "hours": 80 },
|
|
{ "title": "Tuesday", "events": 305, "hours": 200 },
|
|
{ "title": "Wednesday", "events": 237, "hours": 120 },
|
|
{ "title": "Thursday", "events": 73, "hours": 190 },
|
|
{ "title": "Friday", "events": 209, "hours": 130 },
|
|
{ "title": "Saturday", "events": 214, "hours": 140 },
|
|
{ "title": "Sunday", "events": 150, "hours": 100 }
|
|
]
|
|
},
|
|
{
|
|
"name": "Kitchen",
|
|
"color": "orange",
|
|
"data": [
|
|
{ "title": "Monday", "events": 186, "hours": 80 },
|
|
{ "title": "Tuesday", "events": 305, "hours": 200 },
|
|
{ "title": "Wednesday", "events": 237, "hours": 120 },
|
|
{ "title": "Thursday", "events": 73, "hours": 190 },
|
|
{ "title": "Friday", "events": 209, "hours": 130 },
|
|
{ "title": "Saturday", "events": 214, "hours": 140 },
|
|
{ "title": "Sunday", "events": 150, "hours": 100 }
|
|
]
|
|
},
|
|
{
|
|
"name": "Other",
|
|
"color": "hotpink",
|
|
"data": [
|
|
{ "title": "Monday", "events": 186, "hours": 80 },
|
|
{ "title": "Tuesday", "events": 305, "hours": 200 },
|
|
{ "title": "Wednesday", "events": 237, "hours": 120 },
|
|
{ "title": "Thursday", "events": 73, "hours": 190 },
|
|
{ "title": "Friday", "events": 209, "hours": 130 },
|
|
{ "title": "Saturday", "events": 214, "hours": 140 },
|
|
{ "title": "Sunday", "events": 150, "hours": 100 }
|
|
]
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "Monthly",
|
|
"rooms": [
|
|
{
|
|
"name": "Bathroom",
|
|
"color": "purple",
|
|
"data": [
|
|
{ "title": "01", "events": 67, "hours": 45 },
|
|
{ "title": "02", "events": 97, "hours": 67 },
|
|
{ "title": "03", "events": 87, "hours": 23 },
|
|
{ "title": "04", "events": 42, "hours": 12 },
|
|
{ "title": "05", "events": 64, "hours": 48 },
|
|
{ "title": "06", "events": 53, "hours": 34 },
|
|
{ "title": "07", "events": 75, "hours": 23 },
|
|
{ "title": "08", "events": 45, "hours": 56 },
|
|
{ "title": "09", "events": 85, "hours": 47 },
|
|
{ "title": "10", "events": 34, "hours": 29 },
|
|
{ "title": "11", "events": 49, "hours": 30 },
|
|
{ "title": "12", "events": 62, "hours": 33 },
|
|
{ "title": "13", "events": 75, "hours": 44 },
|
|
{ "title": "14", "events": 88, "hours": 57 },
|
|
{ "title": "15", "events": 94, "hours": 65 },
|
|
{ "title": "16", "events": 45, "hours": 21 },
|
|
{ "title": "17", "events": 76, "hours": 54 },
|
|
{ "title": "18", "events": 85, "hours": 62 },
|
|
{ "title": "19", "events": 43, "hours": 28 },
|
|
{ "title": "20", "events": 59, "hours": 34 },
|
|
{ "title": "21", "events": 78, "hours": 56 },
|
|
{ "title": "22", "events": 64, "hours": 39 },
|
|
{ "title": "23", "events": 93, "hours": 72 },
|
|
{ "title": "24", "events": 52, "hours": 28 },
|
|
{ "title": "25", "events": 71, "hours": 48 },
|
|
{ "title": "26", "events": 85, "hours": 63 }
|
|
]
|
|
},
|
|
{
|
|
"name": "Bedroom",
|
|
"color": "#3b82f6",
|
|
"data": [
|
|
{ "title": "01", "events": 61, "hours": 42 },
|
|
{ "title": "02", "events": 72, "hours": 36 },
|
|
{ "title": "03", "events": 94, "hours": 49 },
|
|
{ "title": "04", "events": 67, "hours": 59 },
|
|
{ "title": "05", "events": 54, "hours": 20 },
|
|
{ "title": "06", "events": 77, "hours": 64 },
|
|
{ "title": "07", "events": 81, "hours": 70 },
|
|
{ "title": "08", "events": 53, "hours": 25 },
|
|
{ "title": "09", "events": 79, "hours": 42 },
|
|
{ "title": "10", "events": 84, "hours": 65 },
|
|
{ "title": "11", "events": 62, "hours": 54 },
|
|
{ "title": "12", "events": 45, "hours": 23 },
|
|
{ "title": "13", "events": 88, "hours": 71 },
|
|
{ "title": "14", "events": 74, "hours": 44 },
|
|
{ "title": "15", "events": 91, "hours": 59 },
|
|
{ "title": "16", "events": 46, "hours": 31 },
|
|
{ "title": "17", "events": 73, "hours": 40 },
|
|
{ "title": "18", "events": 85, "hours": 63 },
|
|
{ "title": "19", "events": 78, "hours": 66 },
|
|
{ "title": "20", "events": 66, "hours": 42 },
|
|
{ "title": "21", "events": 95, "hours": 78 },
|
|
{ "title": "22", "events": 57, "hours": 39 },
|
|
{ "title": "23", "events": 72, "hours": 48 },
|
|
{ "title": "24", "events": 48, "hours": 21 },
|
|
{ "title": "25", "events": 89, "hours": 61 },
|
|
{ "title": "26", "events": 77, "hours": 44 }
|
|
]
|
|
},
|
|
{
|
|
"name": "Kitchen",
|
|
"color": "orange",
|
|
"data": [
|
|
{ "title": "01", "events": 94, "hours": 59 },
|
|
{ "title": "02", "events": 62, "hours": 48 },
|
|
{ "title": "03", "events": 76, "hours": 38 },
|
|
{ "title": "04", "events": 81, "hours": 62 },
|
|
{ "title": "05", "events": 64, "hours": 27 },
|
|
{ "title": "06", "events": 53, "hours": 31 },
|
|
{ "title": "07", "events": 92, "hours": 65 },
|
|
{ "title": "08", "events": 85, "hours": 42 },
|
|
{ "title": "09", "events": 74, "hours": 35 },
|
|
{ "title": "10", "events": 67, "hours": 55 },
|
|
{ "title": "11", "events": 49, "hours": 23 },
|
|
{ "title": "12", "events": 88, "hours": 75 },
|
|
{ "title": "13", "events": 93, "hours": 66 },
|
|
{ "title": "14", "events": 76, "hours": 34 },
|
|
{ "title": "15", "events": 59, "hours": 39 },
|
|
{ "title": "16", "events": 72, "hours": 51 },
|
|
{ "title": "17", "events": 83, "hours": 44 },
|
|
{ "title": "18", "events": 74, "hours": 33 },
|
|
{ "title": "19", "events": 69, "hours": 28 },
|
|
{ "title": "20", "events": 85, "hours": 56 },
|
|
{ "title": "21", "events": 53, "hours": 22 },
|
|
{ "title": "22", "events": 92, "hours": 70 },
|
|
{ "title": "23", "events": 71, "hours": 41 },
|
|
{ "title": "24", "events": 67, "hours": 25 },
|
|
{ "title": "25", "events": 86, "hours": 74 },
|
|
{ "title": "26", "events": 94, "hours": 68 }
|
|
]
|
|
},
|
|
{
|
|
"name": "Other",
|
|
"color": "hotpink",
|
|
"data": [
|
|
{ "title": "01", "events": 57, "hours": 27 },
|
|
{ "title": "02", "events": 74, "hours": 33 },
|
|
{ "title": "03", "events": 84, "hours": 53 },
|
|
{ "title": "04", "events": 95, "hours": 68 },
|
|
{ "title": "05", "events": 71, "hours": 48 },
|
|
{ "title": "06", "events": 92, "hours": 76 },
|
|
{ "title": "07", "events": 85, "hours": 62 },
|
|
{ "title": "08", "events": 49, "hours": 25 },
|
|
{ "title": "09", "events": 66, "hours": 38 },
|
|
{ "title": "10", "events": 63, "hours": 31 },
|
|
{ "title": "11", "events": 75, "hours": 47 },
|
|
{ "title": "12", "events": 94, "hours": 72 },
|
|
{ "title": "13", "events": 79, "hours": 49 },
|
|
{ "title": "14", "events": 72, "hours": 45 },
|
|
{ "title": "15", "events": 88, "hours": 61 },
|
|
{ "title": "16", "events": 83, "hours": 52 },
|
|
{ "title": "17", "events": 92, "hours": 76 },
|
|
{ "title": "18", "events": 73, "hours": 40 },
|
|
{ "title": "19", "events": 65, "hours": 28 },
|
|
{ "title": "20", "events": 76, "hours": 63 },
|
|
{ "title": "21", "events": 58, "hours": 30 },
|
|
{ "title": "22", "events": 84, "hours": 67 },
|
|
{ "title": "23", "events": 72, "hours": 41 },
|
|
{ "title": "24", "events": 79, "hours": 46 },
|
|
{ "title": "25", "events": 63, "hours": 29 },
|
|
{ "title": "26", "events": 68, "hours": 39 }
|
|
]
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "6 Months",
|
|
"rooms": [
|
|
{
|
|
"name": "Bathroom",
|
|
"color": "purple",
|
|
"data": [
|
|
{ "title": "October", "events": 62, "hours": 23 },
|
|
{ "title": "November", "events": 76, "hours": 42 },
|
|
{ "title": "December", "events": 85, "hours": 54 },
|
|
{ "title": "January", "events": 94, "hours": 67 },
|
|
{ "title": "February", "events": 63, "hours": 35 },
|
|
{ "title": "March", "events": 81, "hours": 46 }
|
|
]
|
|
},
|
|
{
|
|
"name": "Bedroom",
|
|
"color": "#3b82f6",
|
|
"data": [
|
|
{ "title": "October", "events": 64, "hours": 35 },
|
|
{ "title": "November", "events": 88, "hours": 71 },
|
|
{ "title": "December", "events": 79, "hours": 54 },
|
|
{ "title": "January", "events": 72, "hours": 49 },
|
|
{ "title": "February", "events": 53, "hours": 32 },
|
|
{ "title": "March", "events": 93, "hours": 67 }
|
|
]
|
|
},
|
|
{
|
|
"name": "Kitchen",
|
|
"color": "orange",
|
|
"data": [
|
|
{ "title": "October", "events": 92, "hours": 65 },
|
|
{ "title": "November", "events": 85, "hours": 62 },
|
|
{ "title": "December", "events": 74, "hours": 49 },
|
|
{ "title": "January", "events": 63, "hours": 33 },
|
|
{ "title": "February", "events": 78, "hours": 56 },
|
|
{ "title": "March", "events": 69, "hours": 41 }
|
|
]
|
|
},
|
|
{
|
|
"name": "Other",
|
|
"color": "hotpink",
|
|
"data": [
|
|
{ "title": "October", "events": 88, "hours": 54 },
|
|
{ "title": "November", "events": 72, "hours": 39 },
|
|
{ "title": "December", "events": 84, "hours": 63 },
|
|
{ "title": "January", "events": 76, "hours": 46 },
|
|
{ "title": "February", "events": 93, "hours": 72 },
|
|
{ "title": "March", "events": 68, "hours": 29 }
|
|
]
|
|
}
|
|
]
|
|
}
|
|
]
|
|
}
|
|
|
|
payload = result_dictionary #{'result_dictionary': result_dictionary}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
AddToLog(payload)
|
|
return
|
|
|
|
|
|
elif function == "dashboard_list":
|
|
# works in UTC only
|
|
caretaker = user_name
|
|
#date_s = form_data.get('date')
|
|
time_s = form_data.get('time')
|
|
date_s = datetime.datetime.utcnow().strftime("%Y-%m-%d")
|
|
filterr = form_data.get('filter')
|
|
if filterr == None:
|
|
filterr = 5
|
|
|
|
privileges = GetPriviledgesOnly(caretaker)
|
|
|
|
deployments_list = GetUsersFromDeployments(privileges)
|
|
|
|
#all_beneficiaries = ListBeneficiariesOfCaretaker(caretaker) #GetPriviledgesOnly
|
|
#AddToLog(all_beneficiaries)
|
|
|
|
result_list = []
|
|
|
|
for deployment_id, first_name, last_name in deployments_list:
|
|
details = GetSensorsDetailsFromDeployment(deployment_id, date_s, filterr)
|
|
details["units"] = "°C"
|
|
if "America" in details["time_zone"]:
|
|
details["temperature"] = CelsiusToFahrenheit(details["temperature"])
|
|
details["units"] = "°F"
|
|
devices_list, device_ids = GetProximityList(deployment_id, date_s)
|
|
# convert dates back to UTC
|
|
#details['bathroom_at'] = pytz.timezone(details['time_zone']).localize(datetime.datetime.strptime(details['bathroom_at'], "%Y-%m-%dT%H:%M:%S")).astimezone(pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S")
|
|
#details['kitchen_at'] = pytz.timezone(details['time_zone']).localize(datetime.datetime.strptime(details['bathroom_at'], "%Y-%m-%dT%H:%M:%S")).astimezone(pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S")
|
|
#details['bedroom_at'] = pytz.timezone(details['time_zone']).localize(datetime.datetime.strptime(details['bedroom_at'], "%Y-%m-%dT%H:%M:%S")).astimezone(pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S")
|
|
#details['last_detected_time'] = pytz.timezone(details['time_zone']).localize(datetime.datetime.strptime(details['last_detected_time'], "%Y-%m-%dT%H:%M:%S")).astimezone(pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S")
|
|
location_list = []
|
|
for room_details in devices_list:
|
|
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = room_details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
|
|
location_list.append(location_name)
|
|
|
|
details["deployment_id"] = deployment_id
|
|
details["location_list"] = location_list
|
|
result_list.append(details)
|
|
|
|
payload = {'result_list': result_list}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
AddToLog(payload)
|
|
return
|
|
|
|
|
|
elif function == "dashboard_single":
|
|
caretaker = user_name
|
|
#date_s = form_data.get('date')
|
|
date_s = datetime.datetime.utcnow().strftime("%Y-%m-%d")
|
|
deployment_id = form_data.get('deployment_id')
|
|
filterr = form_data.get('filter')
|
|
if filterr == None:
|
|
filterr = 5
|
|
|
|
|
|
#all_beneficiaries = ListBeneficiariesOfCaretaker(caretaker) #GetPriviledgesOnly
|
|
#AddToLog(all_beneficiaries)
|
|
|
|
result_list = []
|
|
|
|
details = GetSensorsDetailsFromDeployment(deployment_id, date_s, filterr)
|
|
details["units"] = "°C"
|
|
if "America" in details["time_zone"]:
|
|
details["temperature"] = CelsiusToFahrenheit(details["temperature"])
|
|
details["units"] = "°F"
|
|
devices_list, device_ids = GetProximityList(deployment_id, date_s)
|
|
location_list = []
|
|
for room_details in devices_list:
|
|
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = room_details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
|
|
location_list.append(location_name)
|
|
|
|
details["deployment_id"] = deployment_id
|
|
details["location_list"] = location_list
|
|
settings = {"wellness_score": False, "last_seen": False, "sleep_report": False, "activity_report": False, "temperature": True, "humidity": True, "air_pressure": True, "light": True, "air_quality": True, "radar": True, "other_activities": False}
|
|
details["settings"] = settings
|
|
|
|
|
|
result_list.append(details)
|
|
|
|
payload = {'result_list': result_list}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
AddToLog(payload)
|
|
return
|
|
|
|
else:
|
|
AddToLog("Error: function not recognized!")
|
|
payload = {'ok': ok, 'error': debug_string}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
except Exception as e:
|
|
print(traceback.format_exc())
|
|
resp.media = package_response(f"Error: {str(e)} {traceback.format_exc()}", HTTP_500)
|
|
|
|
|
|
def on_put(self, req, resp, path=""):
|
|
"""Handle PUT requests"""
|
|
if path == "users":
|
|
logger.info("PUT request to users endpoint")
|
|
try:
|
|
# Parse the request body
|
|
request_data = json.loads(req.stream.read().decode('utf-8'))
|
|
|
|
# TODO: Implement user update logic
|
|
|
|
resp.status = HTTP_200
|
|
resp.content_type = falcon.MEDIA_JSON
|
|
resp.text = json.dumps({"id": request_data.get("id"), "message": "User updated"})
|
|
except json.JSONDecodeError:
|
|
resp.status = HTTP_400
|
|
resp.content_type = falcon.MEDIA_JSON
|
|
resp.text = json.dumps({"error": "Invalid JSON"})
|
|
else:
|
|
resp.media = package_response(f"PUT to /{path} not implemented", HTTP_400)
|
|
|
|
def on_delete(self, req, resp, path=""):
|
|
"""Handle DELETE requests"""
|
|
if path == "users":
|
|
logger.info("DELETE request to users endpoint")
|
|
resp.status = HTTP_200
|
|
resp.content_type = falcon.MEDIA_JSON
|
|
resp.text = json.dumps({"message": "User deleted"})
|
|
else:
|
|
resp.media = package_response(f"DELETE to /{path} not implemented", HTTP_400)
|
|
|
|
# Initialize data files
|
|
|
|
|
|
logger.error(f"------------------------------- STARTED ------------------------------------------")
|
|
try:
|
|
searches_text = read_file("searches.json")
|
|
searches_dict = json.loads(searches_text) if searches_text else {}
|
|
|
|
dialogs_data = read_file("dialog.json")
|
|
dialog_dict = json.loads(dialogs_data) if dialogs_data else {"utterances": {}, "intents": {}}
|
|
|
|
intent_map = dialog_dict.get("utterances", {})
|
|
utterances = {}
|
|
for key in intent_map:
|
|
logger.debug(key)
|
|
list_of_utterances = intent_map[key]
|
|
for utterance in list_of_utterances:
|
|
utterances[utterance] = key
|
|
|
|
intents = dialog_dict.get("intents", {})
|
|
except Exception as e:
|
|
logger.error(f"Error initializing data files: {str(e)}")
|
|
searches_dict = {}
|
|
utterances = {}
|
|
intents = {}
|
|
|
|
# Create Falcon application with middleware
|
|
middlewares = [CORSMiddleware(), RequestParser(), StripPathMiddleware()]
|
|
try:
|
|
# For newer Falcon versions
|
|
app = falcon.App(middleware=middlewares)
|
|
except:
|
|
# For older Falcon versions
|
|
app = falcon.API(middleware=middlewares)
|
|
|
|
# Add routes for well-api
|
|
well_api_instance = WellApi()
|
|
|
|
# New routes for well_api with multiple access paths
|
|
app.add_route('/function/well-api', well_api_instance)
|
|
app.add_route('/function/well-api/{path}', well_api_instance)
|
|
app.add_route('/api/well_api', well_api_instance)
|
|
app.add_route('/api/well_api/{path}', well_api_instance)
|
|
app.add_route('/healthz', well_api_instance, suffix='healthz')
|
|
|
|
# Add routes for the standard API paths
|
|
app.add_route('/health', well_api_instance)
|
|
app.add_route('/users', well_api_instance)
|
|
app.add_route('/items', well_api_instance)
|
|
|
|
# Keep the original routes for backward compatibility
|
|
app.add_route('/', well_api_instance)
|
|
app.add_route('/{path}', well_api_instance)
|
|
|
|
MQTTSERVERL = "eluxnetworks.net"
|
|
MQTT_PortL = 443
|
|
|
|
MyName = "well-api"
|
|
|
|
clientL = mqtt.Client(client_id=MyName+str(time.time()), transport="websockets")
|
|
clientL.tls_set(cert_reqs=ssl.CERT_NONE) # For self-signed certs, use proper CA in production
|
|
clientL.ws_set_options(path="/mqtt") # Important! Same path as in your JS code
|
|
clientL.username_pw_set("well_user","We3l1_best!")
|
|
|
|
clientL.on_connect = on_connectL
|
|
clientL.on_message = on_messageL
|
|
|
|
#clientL.connect(MQTTSERVERL, MQTT_PortL, 60)
|
|
#lientL.loop_start()
|
|
|
|
|
|
# This code runs when executed directly (for development/debugging)
|
|
if __name__ == "__main__":
|
|
from wsgiref.simple_server import make_server
|
|
|
|
# Use port 8000 for local debugging
|
|
port = int(os.environ.get('PORT', 8002))
|
|
|
|
# Create a WSGI server
|
|
with make_server('', port, app) as httpd:
|
|
print(f'Serving on port {port}...')
|
|
|
|
# Serve until process is killed
|
|
httpd.serve_forever()
|