#!/usr/bin/env python3
#Version 2.0.2 9/25/2025
import os
import sys
import ast
from ast import literal_eval
import falcon
from falcon import HTTP_200, HTTP_400, HTTP_401, HTTP_500
import json
import logging
from dotenv import load_dotenv
import calendar
import io
import datetime
from datetime import timedelta, timezone
import jwt
import psycopg2
import html
import re
import fnmatch
import traceback
import time
import pytz
from PIL import Image, ImageDraw, ImageFont
import paho.mqtt.client as mqtt
import ssl
import hashlib
import itertools
from collections import defaultdict, deque
from io import BytesIO
import zipfile
from minio import Minio
from minio.error import S3Error
import numpy as np
import cv2
from sklearn.mixture import GaussianMixture
import openai
from openai import OpenAI
from typing import List, Tuple
import redis
import base64
import requests
import uuid
import csv
import random
import urllib.parse
base_url = "http://192.168.68.70:5050"
# Try to import the module
try:
from filter_short_groups import filter_short_groups_c
print("Successfully imported filter_short_groups_c")
except ImportError as e:
print(f"Error importing module: {e}")
exit(1)
device_lookup_cache = {}
humidity_offset = 34
temperature_offset = -10
st = 0
if True:
#from scipy import interpolate
from scipy.optimize import curve_fit
from scipy import stats
import pandas as pd
#from scipy.signal import savgol_filter
EnablePlot = False #True
if EnablePlot:
import matplotlib
matplotlib.use('Agg') # Set the backend before importing pyplot
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
# Configure logging
logging.basicConfig(
level=logging.DEBUG,# .ERROR,
format='%(asctime)s [%(levelname)s] %(message)s'
)
logger = logging.getLogger(__name__)
location_names = {-1:"All",0:"?",5:"Office",6:"Hallway",7:"Garage",8:"Outside",9:"Conference Room",10:"Room",34:"Kitchen",
56:"Bedroom",78:"Living Room",102:"Bathroom",103:"Dining Room",104:"Bathroom Main",105:"Bathroom Guest",
106:"Bedroom Master", 107:"Bedroom Guest", 108:"Conference Room", 109:"Basement", 110:"Attic", 200:"Other"}
#Loc2Color = {"?":(0,0,0),"Office":(255,255,0),"Hallway":(128,128,128),"Garage":(128,0,0),"Outside":(0,0,0),"Conference Room":(0,0,128),
#"Room":(64,64,64),"Kitchen":(255,0,0),"Bedroom":(16,255,16),"Living Room":(160,32,240),"Bathroom":(0,0,255),
#"Dining Room":(255,128,0),"Bathroom Main":(16,16,255), "Bedroom Master":(0,255,0),"Bathroom Guest":(32,32,255),
#"Bedroom Guest":(32,255,32), "Basement":(64,64,64), "Attic":(255,165,0), "Other":(192,192,192)}
Loc2Color = {"Bedroom":((16,255,16),0),"Bedroom Master":((0,255,0),0),"Bedroom Guest":((32,255,32),0),"Bathroom":((0,0,255),1),
"Bathroom Main":((16,16,255),1),"Bathroom Guest":((32,32,255),1),"Kitchen":((255,0,0),2),"Dining Room":((255,128,0),3),"Dining":((255,128,0),3),
"Office":((255,255,0),4),"Conference Room":((0,0,128),5),"Conference":((0,0,128),5),"Room":((64,64,64),6),"Living Room":((160,32,240),7),"Living":((160,32,240),7),"Hallway":((128,128,128),8),
"Garage":((128,0,0),9),"Basement":((64,64,64), 10),"Attic":((255,165,0), 11),"Other":((192,192,192),12),"?":((0,0,0),13),"Outside":((0,0,0),14)}
s_table = ["temperature", "humidity", "pressure", "light", "radar", "voc0", "voc1", "voc2", "voc3", "voc4", "voc5", "voc6", "voc7", "voc8", "voc9"] # derived
smells_table = ["s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "s8", "s9"] # derived
s_table_80 = ["temperature", "humidity", "pressure", "light", "radar"] + [f"s{i}" for i in range(80)]
s_table_temp = []
Consolidataed_locations = {"?":"Room","Office":"Office","Hallway":"Hallway","Garage":"Garage","Outside":"Outside","Conference Room":"Office",
"Room":"Room","Kitchen":"Kitchen","Bedroom":"Bedroom","Living Room":"Living Room","Bathroom Guest":"Bathroom",
"Dining Room":"Dining Room","Bathroom":"Bathroom", "Bathroom Main":"Bathroom","Bedroom Master":"Bedroom",
"Bedroom Guest":"Bedroom", "Basement":"Basement", "Attic":"Attic", "Other":"Room"}
AveragePercentPerLocation = {"Bedroom":[29, 37.5], "Bathroom":[2, 4], "Office":[10, 40],"Hallway":[0.1, 0.2],"Garage":[2, 3],"Outside":[5, 10],
"Room":[5, 10],"Kitchen":[5, 12.5], "Living Room":[5, 10],
"Dining Room":[5, 10], "Basement":[0, 0.2], "Attic":[0, 0.2]}
races = ["Asian","Black or African American","White","Native American or Alaskan Native","Native Hawaiian or other Pacific Islander","Hispanic or Latino","Middle Eastern","Other"]
sexes = ["Male","Female"]
location_indexes = {}
for i in location_names:
location_indexes[location_names[i]] = i
# HTTP Status codes
HTTP_200 = falcon.HTTP_200
HTTP_201 = falcon.HTTP_201
HTTP_400 = falcon.HTTP_400
HTTP_401 = falcon.HTTP_401
HTTP_404 = falcon.HTTP_404
HTTP_500 = falcon.HTTP_500
load_dotenv()
DB_NAME = os.getenv('DB_NAME')
DB_USER = os.getenv('DB_USER')
DB_PASSWORD = os.getenv('DB_PASSWORD')
DB_HOST = os.getenv('DB_HOST')
DB_PORT = os.getenv('DB_PORT')
MINIO_ACCESS_KEY = os.getenv('MINIO_ACCESS_KEY')
MINIO_SECRET_KEY = os.getenv('MINIO_SECRET_KEY')
MINIO_HOST = os.getenv('MINIO_HOST')
MINIO_PORT = os.getenv('MINIO_PORT')
DAILY_MAPS_BUCKET_NAME = os.getenv('DAILY_MAPS_BUCKET_NAME')
JWT_SECRET = os.getenv('JWT_SECRET')
OPENAI_API_KEY = os.getenv('OPENAI_API_KEY')
#logger.debug(f"OPENAI_API_KEY: {OPENAI_API_KEY}")
model_engine = os.getenv('OPENAI_API_MODEL_ENGINE')
# Redis Configuration
REDIS_HOST = os.getenv('REDIS_HOST', 'localhost')
REDIS_PORT = int(os.getenv('REDIS_PORT'))
REDIS_DB = int(os.getenv('REDIS_DB', 0))
REDIS_PASSWORD = os.getenv('REDIS_PASSWORD', None)
DEFAULT_TTS_VOICE = "female"
DEFAULT_TTS_LANGUAGE = "en-US"
TELNYX_API_KEY = os.getenv('TELNYX_API_KEY')
TELNYX_API_BASE_URL = os.getenv("TELNYX_API_BASE_URL")
MQTT_USER = os.getenv('MQTT_USER')
MQTT_PASS = os.getenv('MQTT_PASS')
#logger.debug(f"REDIS_PORT: {REDIS_PORT}")
#logger.debug(f"TELNYX_API_KEY: {TELNYX_API_KEY}")
#logger.debug(f"TELNYX_API_BASE_URL: {TELNYX_API_BASE_URL}")
redis_host = os.getenv('REDIS_HOST', '192.168.68.70')
redis_host = '192.168.68.70'
use_pdb = True
debug = False
debug_string = ""
logger.debug(f"Environment variables: {os.environ}")
filesDir = "/home/app/well_web_storage" #os.path.dirname(os.path.realpath(__file__))
min_io_address = MINIO_HOST + ":" + MINIO_PORT
miniIO_blob_client = Minio(min_io_address, access_key=MINIO_ACCESS_KEY, secret_key=MINIO_SECRET_KEY, secure=False)
user_id_2_user = {}
smell_min = 1
no_smell = 102400000
smell_max = no_smell - 1
sensor_legal_values = {
"radar": (0, 1000, 1),
"co2": (smell_min, smell_max, 31),
"humidity": (1, 99, 31),
"light": (0, 4095, 1),
"pressure": (0, 10000, 5),
"temperature": (1, 60, 31),
"voc": (smell_min, smell_max, 31),
# Keep your existing voc0-voc9 for backward compatibility
"voc0": (smell_min, smell_max, 31), "voc1": (smell_min, smell_max, 31),
"voc2": (smell_min, smell_max, 31), "voc3": (smell_min, smell_max, 31),
"voc4": (smell_min, smell_max, 31), "voc5": (smell_min, smell_max, 31),
"voc6": (smell_min, smell_max, 31), "voc7": (smell_min, smell_max, 31),
"voc8": (smell_min, smell_max, 31), "voc9": (smell_min, smell_max, 31)
}
sensor_legal_values = {
"radar": (0, 1000, 1),
"co2": (smell_min, smell_max, 1),
"humidity": (1, 99, 1),
"light": (0, 4095, 1),
"pressure": (0, 10000, 1),
"temperature": (1, 60, 1),
"voc": (smell_min, smell_max, 1),
# Keep your existing voc0-voc9 for backward compatibility
"voc0": (smell_min, smell_max, 1), "voc1": (smell_min, smell_max, 1),
"voc2": (smell_min, smell_max, 1), "voc3": (smell_min, smell_max, 1),
"voc4": (smell_min, smell_max, 1), "voc5": (smell_min, smell_max, 1),
"voc6": (smell_min, smell_max, 1), "voc7": (smell_min, smell_max, 1),
"voc8": (smell_min, smell_max, 1), "voc9": (smell_min, smell_max, 1)
}
#extend to s0-79
for i in range(80):
sensor_legal_values[f"s{i}"] = (smell_min, smell_max, 1)
smell_legal_values = {f"s{i}": (smell_min, smell_max, 1) for i in range(10)}
def format_address_component(component, component_type):
"""
Apply proper capitalization rules based on component type
"""
if not component:
return ""
component = component.strip()
if component_type == 'street_number':
# Street numbers should remain as-is
return component
elif component_type in ['street_name', 'city', 'country']:
# Title case with special handling for common patterns
return title_case_address(component)
elif component_type == 'state':
# States should be uppercase if abbreviation, title case if full name
if len(component) == 2:
return component.upper()
else:
return title_case_address(component)
elif component_type == 'zip_code':
# ZIP codes remain as-is
return component
elif component_type in ['apt', 'unit', 'suite']:
# Apartment/unit numbers remain as-is
return component
else:
# Default to title case
return title_case_address(component)
def title_case_address(text):
"""
Apply proper title case with address-specific rules
"""
# Words that should remain lowercase (unless at start)
lowercase_words = {
'and', 'at', 'by', 'for', 'in', 'of', 'on', 'to', 'up', 'via', 'with'
}
# Words that should be uppercase
uppercase_words = {
'ne', 'nw', 'se', 'sw', 'n', 's', 'e', 'w', # Directions
'st', 'nd', 'rd', 'th', # Ordinal suffixes
'po', 'llc', 'inc', 'corp' # Business suffixes
}
# Street type abbreviations that should be title case
street_types = {
'st': 'St', 'ave': 'Ave', 'rd': 'Rd', 'dr': 'Dr', 'ln': 'Ln',
'ct': 'Ct', 'pl': 'Pl', 'blvd': 'Blvd', 'pkwy': 'Pkwy',
'hwy': 'Hwy', 'way': 'Way', 'circle': 'Circle', 'court': 'Court',
'drive': 'Drive', 'lane': 'Lane', 'place': 'Place', 'road': 'Road',
'street': 'Street', 'avenue': 'Avenue', 'boulevard': 'Boulevard'
}
words = text.lower().split()
result = []
for i, word in enumerate(words):
# Remove punctuation for comparison
clean_word = word.rstrip('.,;:')
punctuation = word[len(clean_word):]
if clean_word in uppercase_words:
result.append(clean_word.upper() + punctuation)
elif clean_word in street_types:
result.append(street_types[clean_word] + punctuation)
elif i > 0 and clean_word in lowercase_words:
result.append(clean_word + punctuation)
else:
# Handle special cases like "McDonald", "O'Connor"
formatted = format_special_cases(clean_word)
result.append(formatted + punctuation)
return ' '.join(result)
def format_special_cases(word):
"""
Handle special capitalization cases
"""
# Handle names with apostrophes (O'Connor, D'Angelo)
if "'" in word:
parts = word.split("'")
return "'".join(part.capitalize() for part in parts)
# Handle hyphenated words
if "-" in word:
parts = word.split("-")
return "-".join(part.capitalize() for part in parts)
# Handle Mc/Mac prefixes
if word.startswith('mc') and len(word) > 2:
return 'Mc' + word[2:].capitalize()
elif word.startswith('mac') and len(word) > 3:
return 'Mac' + word[3:].capitalize()
# Default capitalization
return word.capitalize()
def GetRedisInt(key_name):
try:
result = int(redis_conn.get(key_name).decode('utf-8'))
except:
result = None
return result
def GetRedisFloat(key_name):
try:
result = float(redis_conn.get(key_name).decode('utf-8'))
except:
result = None
return result
def GetRedisString(key_name):
try:
result = redis_conn.get(key_name).decode('utf-8')
except:
result = None
return result
def GetRedisMap(key_name):
try:
result_bytes = redis_conn.hgetall(key_name)
result = {k.decode('utf-8'): v.decode('utf-8') for k, v in result_bytes.items()}
except:
result = {}
return result
def read_file(file_name, source = "LOCAL", type_ = "TEXT", bucket_name="daily-maps"):
blob_data = ""
if source == "MINIO":
blob_data = ReadObjectMinIO(bucket_name, file_name)
elif source == "LOCAL":
login_file = os.path.join(filesDir, file_name)
login_file = login_file.replace("\\","/")
logger.debug(f"Full file path: {login_file}")
logger.debug(f"File exists: {os.path.exists(login_file)}")
#print(login_file)
if type_ == "TEXT":
with open(login_file, encoding="utf8") as f:
blob_data = f.read()
else:
with open(login_file, 'rb') as f:
blob_data = f.read()
elif source == "AZURE":
try:
blob_data = ""#container_client.download_blob(file_name).readall()
except Exception as err:
logger.error("Not reading Azure blob "+str(err))
blob_data = ""
return blob_data
else:
pass
return blob_data
def match_with_wildcard(string, pattern):
return fnmatch.fnmatchcase(string, pattern)
def extract_differing_part(string, pattern):
regex_pattern = re.escape(pattern).replace(r'\*', r'(.+)')
match = re.match(regex_pattern, string)
if match:
return match.group(1)
else:
return None
def get_db_connection():
#when new_table created:
#GRANT ALL PRIVILEGES ON TABLE public.new_table TO well_app;
#GRANT ALL PRIVILEGES ON TABLE public.new_table TO postgres;
return psycopg2.connect(dbname=DB_NAME, user=DB_USER, password=DB_PASSWORD, host=DB_HOST, port=DB_PORT)
def generate_token(username):
expiration = datetime.datetime.now(timezone.utc) + timedelta(hours=24)
token = jwt.encode({"username": username, "exp": expiration}, JWT_SECRET, algorithm="HS256")
return token
def verify_token(token):
try:
payload = jwt.decode(token, JWT_SECRET, algorithms=["HS256"])
return payload
except jwt.ExpiredSignatureError:
return None
except jwt.InvalidTokenError:
return None
def SmartSplit(data_string):
"""
Splits a comma-separated string into a list, properly handling nested structures
and converting values to appropriate Python types using only the ast library.
"""
if not data_string:
return []
# Remove trailing comma if present
data_string = data_string.rstrip(',')
items = []
current_item = ""
bracket_count = 0
in_quotes = False
quote_char = None
i = 0
while i < len(data_string):
char = data_string[i]
# Handle quotes
if char in ('"', "'") and (i == 0 or data_string[i-1] != '\\'):
if not in_quotes:
in_quotes = True
quote_char = char
elif char == quote_char:
in_quotes = False
quote_char = None
# Track brackets only when not in quotes
if not in_quotes:
if char in '[{(':
bracket_count += 1
elif char in ']}':
bracket_count -= 1
# Split on comma only when not inside brackets/quotes
if char == ',' and bracket_count == 0 and not in_quotes:
items.append(current_item.strip())
current_item = ""
else:
current_item += char
i += 1
# Add the last item
if current_item.strip():
items.append(current_item.strip())
# Convert each item using ast.literal_eval when possible
result = []
for item in items:
if item == '':
result.append(None)
else:
try:
# Try to evaluate as Python literal
converted = ast.literal_eval(item)
result.append(converted)
except (ValueError, SyntaxError):
# If it fails, keep as string
result.append(item)
return result
def SaveObjectInBlob(file_name, obj):
"""
Saves a Python object to MinIO blob storage using JSON serialization
Args:
file_name (str): Name of the file to save in blob storage
obj: Python object to serialize and save
"""
try:
# Convert object to JSON string
json_str = json.dumps(obj)
# Convert string to bytes
json_bytes = json_str.encode('utf-8')
# Save to MinIO
miniIO_blob_client.put_object(
DAILY_MAPS_BUCKET_NAME,
file_name,
io.BytesIO(json_bytes),
len(json_bytes)
)
return True
except Exception as e:
logger.error(f"Error saving object to blob: {traceback.format_exc()}")
return False
def SaveGenericObjectInBlob(bucket_name, file_name, obj):
"""
Saves a Python object to MinIO blob storage using JSON serialization
Args:
file_name (str): Name of the file to save in blob storage
obj: Python object to serialize and save
"""
try:
# Convert object to JSON string
json_str = json.dumps(obj)
# Convert string to bytes
json_bytes = json_str.encode('utf-8')
# Save to MinIO
miniIO_blob_client.put_object(
bucket_name,
file_name,
io.BytesIO(json_bytes),
len(json_bytes)
)
return True
except Exception as e:
logger.error(f"Error saving object to blob: {traceback.format_exc()}")
return False
def ReadObjectMinIO(bucket_name, file_name, filter_date=None):
"""
Read object from MinIO with optional date filtering.
Args:
bucket_name (str): Name of the MinIO bucket
file_name (str): Name of the file/object
filter_date (str, optional): Date string in format "YYYY-MM-DD".
If provided, returns empty string if object
was modified before or on this date.
Returns:
str: Object content as string, empty string if filtered out, or None on error
"""
try:
# If date filtering is requested, check object's last modified date first
if filter_date:
try:
# Get object metadata to check last modified date
stat = miniIO_blob_client.stat_object(bucket_name, file_name)
last_modified = stat.last_modified
# Parse filter date (assuming format YYYY-MM-DD)
target_date = datetime.datetime.strptime(filter_date, "%Y-%m-%d").date()
# If object was modified before or on target date, return empty string
if last_modified.date() <= target_date:
return None
except S3Error as e:
logger.error(f"Error getting metadata for {file_name}: {e}")
return None
except ValueError as e:
logger.error(f"Invalid date format '{filter_date}': {e}")
return None
# Retrieve the object data
response = miniIO_blob_client.get_object(bucket_name, file_name)
# Read the data from response
data_bytes = response.read()
# Convert bytes to string
data_string = data_bytes.decode('utf-8')
# Don't forget to close the response
response.close()
response.release_conn()
return data_string
except S3Error as e:
logger.error(f"An error occurred while reading {file_name}: {e}")
return None
except:
logger.error(f"An error occurred while decoding {file_name}")
return None
#def ReadObjectMinIO(bucket_name, file_name):
#try:
## Retrieve the object data
#response = miniIO_blob_client.get_object(bucket_name, file_name)
## Read the data from response
#data_bytes = response.read()
## Convert bytes to string and then load into a dictionary
#data_string = data_bytes.decode('utf-8')
## Don't forget to close the response
#response.close()
#response.release_conn()
#return data_string
#except S3Error as e:
#logger.error(f"An error occurred: {e}")
#return None
def package_response_C(payload, status_code=HTTP_200):
"""Package response in a standard format"""
if status_code == HTTP_200:
return {"status": "success", "data": payload}
else:
return {"status": "error", "message": payload, "code": status_code}
def package_response(content, status=falcon.HTTP_200):
"""
Format the HTTP response.
:param content: The content to be returned in the response.
:param status: HTTP status code (default is 200 OK).
:return: A dictionary containing the formatted response.
"""
if isinstance(content, str):
# If content is a string, try to parse it as JSON
try:
response = json.loads(content)
except json.JSONDecodeError:
# If it's not valid JSON, use it as message
response = {"message": content}
elif isinstance(content, dict):
# If content is a dictionary, serialize it with datetime handling
try:
# First serialize to JSON string with datetime handling
json_str = json.dumps(content, default=datetime_handler)
# Then parse back to dict
response = json.loads(json_str)
except TypeError as e:
response = {"message": f"Serialization error: {str(e)}"}
else:
# For any other type, convert to string and use as message
response = {"message": str(content)}
# Add status code to the response
response["status"] = status
# Handle specific status codes
if status == falcon.HTTP_400:
response["error"] = "Bad Request"
elif status == falcon.HTTP_401:
response["error"] = "Unauthorized"
elif status == falcon.HTTP_500:
response["error"] = "Internal Server Error"
return response
def GetPriviledges(conn, user_name, password):
sql = "SELECT key, access_to_deployments, user_id FROM public.person_details WHERE user_name = '" + user_name + "'"
with conn.cursor() as cur:
cur.execute(sql)
result = cur.fetchall()#cur.fetchone()
if result != None and result != []:
if result[0][0] == password:
return result[0][1], result[0][2]
else:
return "0", "0"
else:
return "0", "0"
def GetPriviledgesOnly(user):
with get_db_connection() as conn:
if isinstance(user, (int)) or user.isdigit():
sql = "SELECT access_to_deployments FROM public.person_details WHERE user_id = " + user
else:
sql = "SELECT access_to_deployments FROM public.person_details WHERE user_name = '" + user + "'"
with conn.cursor() as cur:
cur.execute(sql)
result = cur.fetchall()#cur.fetchone()
if result != None:
return result[0][0]
else:
return "0"
def GetUserId(user_name):
with get_db_connection() as conn:
sql = "SELECT user_id FROM public.person_details WHERE user_name = '" + user_name + "'"
with conn.cursor() as cur:
cur.execute(sql)
result = cur.fetchall()#cur.fetchone()
if result != None:
return result[0][0]
else:
return "0"
def GetNameFromUserId(user_id):
with get_db_connection() as conn:
sql = f"SELECT user_name, first_name, last_name FROM public.person_details WHERE user_id = {user_id}"
with conn.cursor() as cur:
cur.execute(sql)
result = cur.fetchall()#cur.fetchone()
if result != None:
return result[0]
else:
return None
def ListDeployments(priviledges, user_id):
global user_id_2_user
conn = get_db_connection()
if priviledges == "-1":
sql = "SELECT * FROM public.deployments ORDER BY deployment_id ASC;"
else:
sql = f"SELECT * FROM public.deployments WHERE deployment_id IN ({priviledges}) OR user_edit = {user_id} ORDER BY deployment_id ASC;"
try:
with conn.cursor() as cur:
cur.execute(sql)
result = cur.fetchall()#cur.fetchone()
if result == None:
complete_result = []
else:
deployment_ids = []
deployment_records_dict = {}
for record in result:
deployment_id = record[0]
deployment_ids.append(deployment_id)
deployment_records_dict[deployment_id] = record
sql = f"SELECT * FROM public.deployment_details WHERE deployment_id IN ({','.join(map(str, deployment_ids))}) ORDER BY deployment_id ASC;"
cur.execute(sql)
details_result = cur.fetchall()
beneficiary_ids = []
for record_details in details_result:
if record_details[1] != None and record_details[1] not in beneficiary_ids:
beneficiary_ids.append(record_details[1])
sql = f"SELECT * FROM public.person_details WHERE user_id IN ({','.join(map(str, beneficiary_ids))});"
cur.execute(sql)
user_id_2_user = {}
users = cur.fetchall()#cur.fetchone()
for usr_record in users:
user_id_2_user[usr_record[0]] = usr_record
complete_result = []
if details_result != None:
for record_details in details_result:
deployment_record = deployment_records_dict[record_details[0]]
complete_record = {'deployment_id': record_details[0], 'beneficiary_id': record_details[1], 'caretaker_id': record_details[2],
'owner_id': record_details[3], 'installer_id': record_details[4],
'address_street': record_details[6], 'address_city': record_details[7], 'address_zip': record_details[8],
'address_state': record_details[9], 'address_country': record_details[10],
'devices': record_details[5], 'wifis': record_details[11], 'persons': deployment_record[4], 'gender': deployment_record[5],
'race': deployment_record[6], 'born': deployment_record[7], 'pets': deployment_record[8], 'time_zone': deployment_record[3]
}
complete_result.append(complete_record)
except:
logger.debug(f"Error: {traceback.format_exc()}")
return complete_result
def ListCaretakers(privileges, user_name):
conn = get_db_connection()
if privileges == "-1":
sql = "SELECT * FROM public.person_details WHERE role_ids LIKE '%2%' ORDER BY last_name;" #2 is caretaker
with conn.cursor() as cur:
cur.execute(sql)
result = cur.fetchall()#cur.fetchone()
if result == None:
result = []
else:
#we need to check if
sql = f"SELECT * FROM public.person_details WHERE user_name = '{user_name}';" #2 is caretaker
with conn.cursor() as cur:
cur.execute(sql)
result = cur.fetchall()#cur.fetchone()
if result == None:
result = []
pass
return result
def ListBeneficiaries(privilidges, user_info):
conn = get_db_connection()
with conn.cursor() as cur:
if (privilidges == "-1"):
sql = "SELECT * FROM public.person_details WHERE role_ids LIKE '%1%' ORDER BY last_name;" #1 is beneficiary
else:
#we need to find beneficiaries from list of deployments
sql = f"SELECT beneficiary_id FROM public.deployment_details WHERE deployment_id IN ({privilidges}) ORDER BY deployment_id ASC;"
cur.execute(sql)
result1 = cur.fetchall()#cur.fetchone()
if result1 == None:
result = []
return result
beneficiaries = ",".join(str(x[0]) for x in result1)
sql = f"SELECT * FROM public.person_details WHERE user_id IN ({beneficiaries}) OR user_edit = {user_info} AND role_ids LIKE '%1%' ORDER BY last_name;" #1 is beneficiary
logger.debug(f"sql= {sql}")
cur.execute(sql)
result = cur.fetchall()#cur.fetchone()
if result == None:
result = []
return result
def UserDetails(user_id):
conn = get_db_connection()
sql = "SELECT column_name FROM information_schema.columns WHERE table_schema = 'public' AND table_name = 'person_details';"
with conn.cursor() as cur:
cur.execute(sql)
columns_names = cur.fetchall()
sql = "SELECT * FROM public.person_details WHERE user_id = "+user_id
caretaker_record = {}
with conn.cursor() as cur:
cur.execute(sql)
result = cur.fetchone() #cur.fetchall()
if result != None:
cnt = 0
for field in columns_names:
caretaker_record[field[0]] = result[cnt]
cnt += 1
return caretaker_record
def DeviceDetails(mac):
conn = get_db_connection()
sql = "SELECT column_name FROM information_schema.columns WHERE table_schema = 'public' AND table_name = 'devices';"
with conn.cursor() as cur:
cur.execute(sql)
columns_names = cur.fetchall()
sql = "SELECT * FROM public.devices WHERE device_mac = '" + mac + "'"
device_record = {}
with conn.cursor() as cur:
cur.execute(sql)
result = cur.fetchone() #cur.fetchall()
if result != None:
cnt = 0
for field in columns_names:
device_record[field[0]] = result[cnt]
cnt += 1
return device_record
def GetDeviceDetailsSingle(device_id):
conn = get_db_connection()
sql = "SELECT column_name FROM information_schema.columns WHERE table_schema = 'public' AND table_name = 'devices';"
with conn.cursor() as cur:
cur.execute(sql)
columns_names = cur.fetchall()
sql = "SELECT * FROM public.devices WHERE device_id = '" + device_id + "'"
device_record = {}
with conn.cursor() as cur:
cur.execute(sql)
result = cur.fetchone() #cur.fetchall()
if result != None:
cnt = 0
for field in columns_names:
device_record[field[0]] = result[cnt]
cnt += 1
return device_record
def GetDeviceDetailsSingleFromMac(device_mac):
conn = get_db_connection()
sql = "SELECT column_name FROM information_schema.columns WHERE table_schema = 'public' AND table_name = 'devices';"
with conn.cursor() as cur:
cur.execute(sql)
columns_names = cur.fetchall()
sql = "SELECT * FROM public.devices WHERE device_mac = '" + device_mac + "'"
device_record = {}
with conn.cursor() as cur:
cur.execute(sql)
result = cur.fetchone() #cur.fetchall()
if result != None:
cnt = 0
for field in columns_names:
device_record[field[0]] = result[cnt]
cnt += 1
else:
#device is not in DB so first lets find it in
pass
return device_record
def DeploymentDetails(deployment_id):
deployment_record = {}
conn = get_db_connection()
with conn.cursor() as cur:
sql = "SELECT * FROM information_schema.columns WHERE table_schema = 'public' AND table_name = 'deployments';"
cur.execute(sql)
columns_names = cur.fetchall()
sql = "SELECT * FROM public.deployments WHERE deployment_id = '" + deployment_id + "'"
cur.execute(sql)
result = cur.fetchone() #cur.fetchall()
if result != None:
cnt = 0
for field in columns_names:
deployment_record[field[3]] = result[cnt]
cnt += 1
sql = "SELECT * FROM information_schema.columns WHERE table_schema = 'public' AND table_name = 'deployment_details';"
cur.execute(sql)
columns_names = cur.fetchall()
sql = "SELECT * FROM public.deployment_details WHERE deployment_id = '" + deployment_id + "'"
cur.execute(sql)
result = cur.fetchone() #cur.fetchall()
if result != None:
cnt = 0
for field in columns_names:
deployment_record[field[3]] = result[cnt]
cnt += 1
return deployment_record
def ValidUser(user_name, password):
if use_pdb:
with get_db_connection() as db_conn:
priviledges, user_id= GetPriviledges(db_conn, user_name, password)
return priviledges, user_id
else:
pass
#container = GetReference("/MAC")
#try:
## We can do an efficient point read lookup on partition key and id
##response = container.read_item(item="64B708896BD8_temperature_2024-01-01_00", partition_key="64B708896BD8") #OK
##items = query_items(container, '64B708896BD8') #Too slow
##AddToLog("1!")
#privileges = GetCaretakers(container, email, password)
#return privileges
#except Exception as err:
#AddToLog("Error !1 "+str(err))
def GetMaxRole(user_name):
with get_db_connection() as db_conn:
sql = "SELECT role_ids FROM public.person_details WHERE user_name = '" + user_name + "'"
with db_conn.cursor() as cur:
cur.execute(sql)
result = cur.fetchall()#cur.fetchone()
if result != None and result != []:
return str(result[0])
return ""
def SelectOption(html_code, select_id, selected_item):
"""
Modifies HTML code to set the selected attribute for a specific option in a select element.
Args:
html_code (str): Original HTML code
select_id (str): ID of the select element to modify
selected_item (str or int): Value of the option to be selected
Returns:
str: Modified HTML code with the selected attribute added
"""
# Convert selected_item to string for comparison
selected_item = str(selected_item)
# Find the select element with the given ID
select_pattern = rf''
select_match = re.search(select_pattern, html_code, re.IGNORECASE | re.DOTALL)
if not select_match:
return html_code # Return unchanged if select element not found
select_content = select_match.group(0)
select_content_orig = select_content
# Remove any existing selected attributes
select_content = re.sub(r'\s+selected(?=[>\s])', '', select_content, flags=re.IGNORECASE)
# Add selected attribute to the matching option
def replace_option(match):
value = re.search(r'value=[\'"]?([^\'">\s]+)', match.group(0))
if value and value.group(1) == selected_item:
# Add selected attribute before the closing >
return match.group(0).rstrip('>') + ' selected>'
return match.group(0)
modified_select = re.sub(
r'
if include_all:
selector_string = f' \n'
else:
selector_string = ''
for deployment in deployments:
first_name = ""
last_name = ""
if deployment[1] != None:
first_name = deployment[1]
if deployment[2] != None:
last_name = deployment[2]
if deployment[0] == int(selected):
choice_string = f' \n'
else:
choice_string = f' \n'
selector_string = selector_string + choice_string
#print(selector_string)
html_string = html_string.replace("###INSTALLS###",selector_string)
return html_string
def GetDeviceDetails(cur, deployment_ids, location_id):
#ID, Well id, MAC, Last_Message, Location, Description, Deployment
macs = [mac for _, mac in deployment_ids]
#macs = list(deployment_ids.keys())
macs_string_nq = ",".join(macs)
macs_string = "'" + "','".join(macs) + "'"
if location_id == -1:
sql = f"""
WITH ordered_macs AS (
SELECT unnest(string_to_array('{macs_string_nq}', ',')) as mac,
generate_series(1, array_length(string_to_array('{macs_string_nq}', ','), 1)) as position
)
SELECT d.*
FROM public.devices d
JOIN ordered_macs om ON d.device_mac = om.mac::text
WHERE device_mac IN ({macs_string})
ORDER BY om.position;
"""
else:
sql = f"""
WITH ordered_macs AS (
SELECT unnest(string_to_array('{macs_string_nq}', ',')) as mac,
generate_series(1, array_length(string_to_array('{macs_string_nq}', ','), 1)) as position
)
SELECT d.*
FROM public.devices d
JOIN ordered_macs om ON d.device_mac = om.mac::text
WHERE device_mac IN ({macs_string}) AND location = {location_id}
ORDER BY om.position;
"""
cur.execute(sql)
print(sql)
devices_ids_records = cur.fetchall()
all_details = []
devices_ids_list = [x[0] for x in devices_ids_records]
device_ids_string = ",".join(map(str, devices_ids_list))
#sql = f"SELECT device_id, MAX(time) as last_reading_time FROM sensor_readings WHERE device_id IN ({device_ids_string}) GROUP BY device_id" #to slow
sql = f"SELECT DISTINCT ON (device_id) device_id, time as last_reading_time FROM sensor_readings WHERE device_id IN ({device_ids_string}) AND time > now() - INTERVAL '1 day' ORDER BY device_id, time DESC"
cur.execute(sql)
print(sql)
devices_times = cur.fetchall()#cur.fetchone()
found_device_details = {}
for device_record in devices_times:
device_id, last_message_time = device_record
found_device_details[device_id] = last_message_time
cnt = 0
for device_table_record in devices_ids_records:
if len(devices_times) > 0:
device_id = device_table_record[0]
if device_id in found_device_details:
last_message_time = found_device_details[device_id]
last_message_epoch = int(last_message_time.timestamp())
else:
try:
last_message_time = int(device_table_record[14])
except:
last_message_time = 0
last_message_epoch = last_message_time
else:
last_message_time = 0
last_message_epoch = 0
#print(last_message_epoch)
#print(type(last_message_epoch))
device_id = device_table_record[0]
mac = device_table_record[1]
well_id = device_table_record[2]
description = device_table_record[3]
if description == None:
description = ""
if device_table_record[5] != None:
if device_table_record[5] != "":
description = description + " Close to " + device_table_record[5]
location_id = device_table_record[4]
if location_id == None:
location_id = 0
try:
row_data = [device_id, well_id, mac, last_message_epoch, location_names[location_id], description, deployment_ids[cnt][0]]
except:
row_data = [device_id, well_id, mac, last_message_epoch, location_names[location_id], description, deployment_ids[cnt][0]]
cnt += 1
all_details.append(row_data)
return all_details
def GetDeviceDetailsComplete(cur, deployment_ids, location_id):
#ID, Well id, MAC, Last_Message, Location, Description, Deployment
macs = [mac for _, mac in deployment_ids]
#macs = list(deployment_ids.keys())
macs_string_nq = ",".join(macs)
macs_string = "'" + "','".join(macs) + "'"
if location_id == -1:
sql = f"""
WITH ordered_macs AS (
SELECT unnest(string_to_array('{macs_string_nq}', ',')) as mac,
generate_series(1, array_length(string_to_array('{macs_string_nq}', ','), 1)) as position
)
SELECT d.*
FROM public.devices d
JOIN ordered_macs om ON d.device_mac = om.mac::text
WHERE device_mac IN ({macs_string})
ORDER BY om.position;
"""
else:
sql = f"""
WITH ordered_macs AS (
SELECT unnest(string_to_array('{macs_string_nq}', ',')) as mac,
generate_series(1, array_length(string_to_array('{macs_string_nq}', ','), 1)) as position
)
SELECT d.*
FROM public.devices d
JOIN ordered_macs om ON d.device_mac = om.mac::text
WHERE device_mac IN ({macs_string}) AND location = {location_id}
ORDER BY om.position;
"""
cur.execute(sql)
print(sql)
devices_ids_records = cur.fetchall()
all_details = []
devices_ids_list = [x[0] for x in devices_ids_records]
device_ids_string = ",".join(map(str, devices_ids_list))
#sql = f"SELECT device_id, MAX(time) as last_reading_time FROM sensor_readings WHERE device_id IN ({device_ids_string}) GROUP BY device_id" #to slow
sql = f"SELECT DISTINCT ON (device_id) device_id, time as last_reading_time FROM sensor_readings WHERE device_id IN ({device_ids_string}) AND time > now() - INTERVAL '1 day' ORDER BY device_id, time DESC"
cur.execute(sql)
print(sql)
devices_times = cur.fetchall()#cur.fetchone()
found_device_details = {}
for device_record in devices_times:
device_id, last_message_time = device_record
found_device_details[device_id] = last_message_time
cnt = 0
for device_table_record in devices_ids_records:
if len(devices_times) > 0:
if device_id in found_device_details:
last_message_time = found_device_details[device_id]
last_message_epoch = int(last_message_time.timestamp())
else:
try:
last_message_time = int(device_table_record[14])
except:
last_message_time = 0
last_message_epoch = last_message_time
else:
last_message_time = 0
last_message_epoch = 0
#print(last_message_epoch)
#print(type(last_message_epoch))
device_id = device_table_record[0]
mac = device_table_record[1]
well_id = device_table_record[2]
description = device_table_record[3]
alarm_details = device_table_record[16]
if description == None:
description = ""
if device_table_record[5] != None:
if device_table_record[5] != "":
description = description + " Close to " + device_table_record[5]
location_id = device_table_record[4]
if location_id == None:
location_id = 0
#try:
# row_data = [device_id, well_id, mac, last_message_epoch, location_names[location_id], description, deployment_ids[cnt][0], alarm_details]
#except:
row_data = [device_id, well_id, mac, last_message_epoch, location_names[location_id], description, deployment_ids[cnt][0], alarm_details]
cnt += 1
all_details.append(row_data)
return all_details
def GetVisibleDevices(deployments):
devices_details = []
stt = time.time()
with get_db_connection() as conn:
with conn.cursor() as cur:
#list all devices that user has access to
if deployments == "-1":
sql = "SELECT device_mac FROM public.devices ORDER BY device_id ASC"# SELECT deployment_id, devices FROM public.deployment_details"
macs_group = []
deployment_ids = []
print(sql)
cur.execute(sql)
macs_records = cur.fetchall()#cur.fetchone()
for record in macs_records:
deployment_ids.append((0, record[0]))
devices_details = GetDeviceDetails(cur, deployment_ids, -1)
else:
sql = f"SELECT deployment_id, devices FROM public.deployment_details WHERE deployment_id IN ({deployments})"
print(sql)
cur.execute(sql)
devices_groups = cur.fetchall()#cur.fetchone()
deployment_ids = []
for deployment_id, dev_group in devices_groups:
if dev_group != None and dev_group != "":
if len(dev_group) > 10:
if "[" not in dev_group:
if "," not in dev_group:
dev_group = '["' + dev_group + '"]'
else:
dev_group = dev_group.replace(" ", "")
dev_group = dev_group.replace(",", '","')
dev_group = '["' + dev_group + '"]'
macs_group = literal_eval(dev_group)
for mac in macs_group:
deployment_ids.append((deployment_id, mac))
else:
print(f"Deployment {deployment_id} has dev_group empty")
devices_details = []
if deployment_ids != []:
devices_details = GetDeviceDetails(cur, deployment_ids, -1)
#devices_details.append(devices_detail)
return devices_details
def GetVisibleDevicesPerLocation(deployments, location):
devices_details = []
with get_db_connection() as conn:
#list all devices that user has access to
if deployments == "-1" or deployments == "0":
sql = "SELECT deployment_id, devices FROM public.deployment_details"
else:
sql = f"SELECT deployment_id, devices FROM public.deployment_details WHERE deployment_id IN ({deployments})"
with conn.cursor() as cur:
cur.execute(sql)
devices_groups = cur.fetchall()#cur.fetchone()
deployment_ids = []
for deployment_id, dev_group in devices_groups:
if dev_group != None:
if len(dev_group) > 10:
if dev_group[0] == "[":
macs_group = literal_eval(dev_group)
else:
macs_group = dev_group.split(',')
for mac in macs_group:
deployment_ids.append((deployment_id, mac))
devices_details = GetDeviceDetails(cur, deployment_ids, location_indexes[location])
#devices_details.append(devices_detail)
return devices_details
def GetVisibleDevicesPerLocationComplete(deployments, location):
devices_details = []
with get_db_connection() as conn:
#list all devices that user has access to
if deployments == "-1" or deployments == "0":
sql = "SELECT deployment_id, devices FROM public.deployment_details"
else:
sql = f"SELECT deployment_id, devices FROM public.deployment_details WHERE deployment_id IN ({deployments})"
with conn.cursor() as cur:
cur.execute(sql)
devices_groups = cur.fetchall()#cur.fetchone()
deployment_ids = []
for deployment_id, dev_group in devices_groups:
if dev_group != None:
if len(dev_group) > 10:
if dev_group[0] == "[":
macs_group = literal_eval(dev_group)
else:
macs_group = dev_group.split(',')
for mac in macs_group:
deployment_ids.append((deployment_id, mac))
devices_details = GetDeviceDetailsComplete(cur, deployment_ids, location_indexes[location])
#devices_details.append(devices_detail)
return devices_details
def GetUsersFromDeployments(deployments):
#list all devices that user has access to
deployments_dets = []
with get_db_connection() as conn:
try:
if deployments == "-1":
sql = f"""
SELECT dd.deployment_id, pd.first_name, pd.last_name
FROM deployment_details dd
JOIN person_details pd ON dd.beneficiary_id = pd.user_id
ORDER BY dd.deployment_id;
"""
else:
sql = f"""
SELECT dd.deployment_id, pd.first_name, pd.last_name
FROM deployment_details dd
JOIN person_details pd ON dd.beneficiary_id = pd.user_id
WHERE dd.deployment_id IN ({deployments})
ORDER BY dd.deployment_id;
"""
with conn.cursor() as cur:
cur.execute(sql)
print(sql)
deployments_dets = cur.fetchall()#cur.fetchone()
except Exception as err:
logger.error("GetUsersFromDeployments "+str(err) +" "+sql)
return deployments_dets
def GetPreviousDate(current_date):
date_obj = datetime.datetime.strptime(current_date, "%Y-%m-%d")
# Subtract one day
previous_date = date_obj - timedelta(days=1)
# Convert back to string format
previous_date_str = previous_date.strftime("%Y-%m-%d")
return(previous_date_str)
def CovertToIsoTime(date_s, n_minute):
hours = n_minute // 60 # Integer division
minutes = n_minute % 60
base_date = datetime.datetime.strptime(date_s, "%Y-%m-%d")
final_datetime = base_date + timedelta(hours=hours, minutes=minutes)
iso_timestamp = final_datetime.isoformat()
return iso_timestamp
def sleep_length(presence_list, short_absence_threshold=15):
"""
Calculate the total sleep duration and wake time based on presence data.
This function correctly interprets the presence_list to determine sleep duration by:
1. Properly aggregating the total sleep time from all significant in-bed periods
2. Considering short absences as part of the same sleep session
3. Determining the wake time when the main sleep session ended
Args:
presence_list (list): List of tuples indicating bed presence/absence
short_absence_threshold (int, optional): Maximum duration in decas to consider
an absence "short" and still count as sleep.
Default is 15 (2.5 minutes)
Returns:
tuple: (sleep_duration_minutes, wake_time_minutes)
sleep_duration_minutes: Total sleep duration in minutes
wake_time_minutes: Minute in the day when person was determined to be
done sleeping (minutes since midnight)
"""
# Extract in-bed periods and out-of-bed periods
in_bed_periods = []
out_bed_periods = []
# First process the raw data into periods
for i in range(len(presence_list)):
deca_index, deca_count = presence_list[i]
# Skip separator tuples where deca_count is 0
if deca_count == 0:
continue
if deca_count > 0: # In bed
# Special case for the midnight (first) tuple
if i == 0 and deca_index == 0:
# This is time in bed before midnight
start_deca = -deca_count # Negative because it's before midnight
end_deca = 0 # Midnight
else:
start_deca = deca_index
end_deca = deca_index + deca_count
in_bed_periods.append({
'start': start_deca,
'end': end_deca,
'duration': deca_count
})
else: # Out of bed
out_bed_periods.append({
'start': deca_index,
'end': deca_index + abs(deca_count),
'duration': abs(deca_count)
})
# Sort periods to ensure chronological order
in_bed_periods.sort(key=lambda p: p['start'])
out_bed_periods.sort(key=lambda p: p['start'])
# Merge in-bed periods that are separated by short absences
merged_periods = []
current_period = None
for period in in_bed_periods:
# If we're at the start or after a long break, begin a new period
if current_period is None:
current_period = period.copy()
else:
# Check if this period starts shortly after the previous one ends
gap = period['start'] - current_period['end']
# If the gap is negative, the periods overlap (data error), treat as continuous
if gap < 0:
gap = 0
# If the gap is short enough, merge the periods
if gap <= short_absence_threshold:
# Extend the current period
current_period['end'] = period['end']
current_period['duration'] += period['duration'] + gap # Include gap
else:
# Gap too long, add the completed period and start a new one
merged_periods.append(current_period)
current_period = period.copy()
# Add the last period if there is one
if current_period is not None:
merged_periods.append(current_period)
# Find significant sleep periods (at least 30 minutes) - REMOVED night-time restriction
significant_sleep_threshold = 180 # 30 minutes (180 decas)
significant_periods = [p for p in merged_periods if p['duration'] >= significant_sleep_threshold]
if significant_periods:
# Find the LATEST significant sleep period (for multi-day data)
main_sleep_period = max(significant_periods, key=lambda p: p['start'])
# Calculate total sleep duration
sleep_duration_minutes = round(main_sleep_period['duration'] / 6) # Convert to minutes
# Wake time is when this period ended
# Handle multi-day scenarios by taking modulo 24 hours
wake_time_deca = main_sleep_period['end']
wake_time_minutes = round(wake_time_deca / 6)
# If wake time is beyond 24 hours, wrap it to the current day
if wake_time_minutes >= 1440: # 1440 = 24 hours in minutes
wake_time_minutes = wake_time_minutes % 1440
return (sleep_duration_minutes, wake_time_minutes)
# No significant sleep periods found
return (0, 0)
# Example usage:
# sleep_minutes = sleep_length(presence_list) # Use default threshold
# sleep_minutes = sleep_length(presence_list, short_absence_threshold=30) # Allow longer absences (5 minutes)
# Example usage:
# presence_list = [
# [0, 554], [3303, 3857], [3303, 0], [3387, -84], [3387, 0], [3388, 1], [3388, 0],
# [3668, -280], [3668, 0], [3669, 1], [3669, 0], [3699, -30], [3699, 0], [3700, 1],
# [3700, 0], [3863, -163], [3863, 0], [3864, 1], [3864, 0], [4418, -554], [4418, 0],
# [4419, 1], [4419, 0], [4547, -128], [4547, 0], [4548, 1], [4548, 0], [4603, -55],
# [4603, 0], [4604, 1], [4604, 0], [4965, -361], [4965, 0], [4966, 1], [4966, 0],
# [4984, -18], [4984, 0], [4985, 1], [4985, 0], [8639, -3654]
# ]
# print(f"Sleep duration: {sleep_length(presence_list)} minutes")
# Example usage:
# sleep_minutes = sleep_length(presence_list) # Use default threshold
# sleep_minutes = sleep_length(presence_list, short_absence_threshold=30) # Allow longer absences (5 minutes)
# Example usage:
# presence_list = [
# [0, 554], [3303, 3857], [3303, 0], [3387, -84], [3387, 0], [3388, 1], [3388, 0],
# [3668, -280], [3668, 0], [3669, 1], [3669, 0], [3699, -30], [3699, 0], [3700, 1],
# [3700, 0], [3863, -163], [3863, 0], [3864, 1], [3864, 0], [4418, -554], [4418, 0],
# [4419, 1], [4419, 0], [4547, -128], [4547, 0], [4548, 1], [4548, 0], [4603, -55],
# [4603, 0], [4604, 1], [4604, 0], [4965, -361], [4965, 0], [4966, 1], [4966, 0],
# [4984, -18], [4984, 0], [4985, 1], [4985, 0], [8639, -3654]
# ]
# print(f"Sleep duration: {sleep_length(presence_list)} minutes")
# Example usage:
# sleep_minutes = sleep_length(presence_list) # Use default threshold
# sleep_minutes = sleep_length(presence_list, short_absence_threshold=30) # Allow longer absences (5 minutes)
# Example usage:
# presence_list = [
# [0, 554], [3303, 3857], [3303, 0], [3387, -84], [3387, 0], [3388, 1], [3388, 0],
# [3668, -280], [3668, 0], [3669, 1], [3669, 0], [3699, -30], [3699, 0], [3700, 1],
# [3700, 0], [3863, -163], [3863, 0], [3864, 1], [3864, 0], [4418, -554], [4418, 0],
# [4419, 1], [4419, 0], [4547, -128], [4547, 0], [4548, 1], [4548, 0], [4603, -55],
# [4603, 0], [4604, 1], [4604, 0], [4965, -361], [4965, 0], [4966, 1], [4966, 0],
# [4984, -18], [4984, 0], [4985, 1], [4985, 0], [8639, -3654]
# ]
# print(f"Sleep duration: {sleep_length(presence_list)} minutes")
def filter_short_groups_c_wc_old(presence_list, filter_size, device_id_str, from_date, to_date, time_zone_s, refresh = False):
#days = presence_list
#for from_date, to_date
tz = pytz.timezone(time_zone_s)
# Get current time in that timezone
current_time = datetime.datetime.now(tz)
# Return just the date part as string
now_date_str = current_time.strftime("%Y-%m-%d")
start_date = datetime.datetime.strptime(from_date, "%Y-%m-%d")
end_date = datetime.datetime.strptime(to_date, "%Y-%m-%d")
last_offset = 0
#if to_date == now_date_str:
# last_offset = 1
# Loop through each date (including end_date)
current_date = start_date
dates_list = []
days_difference = 1 + (end_date - start_date).days
whole_result = [0] * 6 * 1440 * (days_difference)
is_long = False
if len(presence_list)/(6 * 1440) > (days_difference): #long version
is_long = True
while current_date <= end_date:
current_date_str = current_date.strftime("%Y-%m-%d")
print(current_date_str)
dates_list.append(current_date_str)
current_date += timedelta(days=1)
for day in range(1, days_difference-last_offset+1):
print(day)
end_index = (1 + day) * 6 * 1440
if end_index > len(presence_list):
end_index = len(presence_list)
if is_long:
start_index = end_index - 2 * 6 * 1440
else:
start_index = end_index - 6 * 1440
current_date_str = dates_list[day-1]
filename_day_presence = f"/{device_id_str}/{device_id_str}_{current_date_str}_{filter_size}_presence.bin"
filtered_day_str = None
if refresh == False:
filtered_day_str = ReadObjectMinIO("filtered-presence", filename_day_presence)
if filtered_day_str == None:
filtered_day = filter_short_groups_c(presence_list[start_index:end_index], filter_size, device_id_str, from_date)
SaveGenericObjectInBlob("filtered-presence", filename_day_presence, filtered_day)
else:
filtered_day = json.loads(filtered_day_str)
whole_result[start_index:end_index] = filtered_day
if current_date_str != to_date:
end_index = len(presence_list)
start_index = end_index - 2 * 6 * 1440
filtered_day = filter_short_groups_c(presence_list[start_index:end_index], filter_size, device_id_str, from_date)
whole_result[start_index:end_index] = filtered_day
return whole_result
def filter_short_groups_c_wc(presence_list, filter_size, device_id_str, from_date, to_date, time_zone_s, refresh=False):
"""
Filter out short groups across multiple days.
For each target day, processes [previous_day + target_day] and extracts only the target_day result.
"""
# Setup timezone and current time
tz = pytz.timezone(time_zone_s)
current_time = datetime.datetime.now(tz)
now_date_str = current_time.strftime("%Y-%m-%d")
start_date = datetime.datetime.strptime(from_date, "%Y-%m-%d")
end_date = datetime.datetime.strptime(to_date, "%Y-%m-%d")
# Build dates list
current_date = start_date
dates_list = []
while current_date <= end_date:
current_date_str = current_date.strftime("%Y-%m-%d")
print(current_date_str)
dates_list.append(current_date_str)
current_date += timedelta(days=1)
days_difference = len(dates_list)
# Handle current day limitation
samples_per_day = 6 * 1440
total_samples = samples_per_day * days_difference
# If today is the last day, limit the data
effective_total_samples = total_samples
if to_date == now_date_str:
current_minute_of_day = current_time.hour * 60 + current_time.minute
current_sample_of_day = min(current_minute_of_day * 6, samples_per_day)
effective_total_samples = (days_difference - 1) * samples_per_day + current_sample_of_day
print(f"Today detected: limiting to {current_sample_of_day} samples for last day")
# Initialize result - use effective total samples
whole_result = [0] * effective_total_samples
# Process each day (0-indexed to avoid confusion)
for day_idx in range(days_difference):
current_date_str = dates_list[day_idx]
print(f"Processing day {day_idx + 1}: {current_date_str}")
# Calculate result array indices for this day
result_start_idx = day_idx * samples_per_day
result_end_idx = (day_idx + 1) * samples_per_day
# For the last day, if it's today, limit the end index
if day_idx == days_difference - 1 and to_date == now_date_str:
result_end_idx = result_start_idx + current_sample_of_day
# Skip if this day's range is beyond our result array
if result_start_idx >= len(whole_result):
break
# Ensure we don't exceed result array bounds
result_end_idx = min(result_end_idx, len(whole_result))
# Calculate input data range - FIXED LOGIC
if day_idx == 0:
# First day: no previous day available in our data, process only current day
input_start_idx = 0
input_end_idx = min(len(presence_list), samples_per_day)
has_previous_day = False
else:
# Other days: use previous day + current day
input_start_idx = (day_idx - 1) * samples_per_day
input_end_idx = min(len(presence_list), (day_idx + 1) * samples_per_day)
has_previous_day = True
# Skip if no input data available
if input_start_idx >= input_end_idx or input_start_idx >= len(presence_list):
print(f"No input data available for {current_date_str}")
continue
# Try to load cached data
filename_day_presence = f"/{device_id_str}/{device_id_str}_{current_date_str}_{filter_size}_presence.bin"
filtered_day_str = None
if not refresh:
filtered_day_str = ReadObjectMinIO("filtered-presence", filename_day_presence, current_date_str)
if filtered_day_str is not None and filtered_day_str != "":
has_larger = bool(re.search(r'\b(?:[2-9]|\d{2,})\.\d+\b', filtered_day_str))
if has_larger:
filtered_day_str = None
if filtered_day_str is None or filtered_day_str == "":
# Filter the input data
input_data = presence_list[input_start_idx:input_end_idx]
print(f"Input range: {input_start_idx}:{input_end_idx}, length: {len(input_data)}, has_previous_day: {has_previous_day}")
filtered_data = filter_short_groups_c(input_data, filter_size, device_id_str, from_date)
# Calculate how much data we need for this day
needed_samples = result_end_idx - result_start_idx
# Determine extraction offset - FIXED LOGIC
if has_previous_day and len(filtered_data) >= 2 * samples_per_day:
# We processed [previous_day + current_day], take the second day (current_day)
day_data_start = samples_per_day # Skip the first day (previous day context)
print(f"Extracting day 1 from 2-day filtered result: [{day_data_start}:{day_data_start + needed_samples}]")
elif has_previous_day and len(filtered_data) >= samples_per_day:
# We have previous day context but less than 2 full days
# Take from the portion that corresponds to current day
available_current_day_samples = len(filtered_data) - samples_per_day
day_data_start = samples_per_day
needed_samples = min(needed_samples, available_current_day_samples)
print(f"Extracting partial day 1: [{day_data_start}:{day_data_start + needed_samples}]")
else:
# First day or single day processing, take from beginning
day_data_start = 0
print(f"Extracting day 0 (first/single day): [{day_data_start}:{day_data_start + needed_samples}]")
day_data_end = day_data_start + needed_samples
# Extract the day's portion, ensuring we don't exceed bounds
if day_data_start < len(filtered_data):
filtered_day = filtered_data[day_data_start:min(day_data_end, len(filtered_data))]
else:
filtered_day = []
print(f"Filtered data length: {len(filtered_data)}")
print(f"Extracted day data: start={day_data_start}, end={day_data_end}, length={len(filtered_day)}")
# Cache the result
SaveGenericObjectInBlob("filtered-presence", filename_day_presence, filtered_day)
else:
filtered_day = json.loads(filtered_day_str)
# Copy to result array
copy_length = min(len(filtered_day), result_end_idx - result_start_idx)
if copy_length > 0:
whole_result[result_start_idx:result_start_idx + copy_length] = filtered_day[:copy_length]
print(f"Completed {current_date_str}: copied {copy_length} samples")
return whole_result
def GetLastDurationMinutes(deployment_id, selected_devices, filter, ddate):
global device_lookup_cache
max_sleep = 0
max_device_id = 0
max_woke_up = 0
presence_list = []
to_date = ddate
date_obj = datetime.datetime.strptime(ddate, "%Y-%m-%d")
# Subtract one day
previous_day = date_obj - timedelta(days=1)
# Convert back to string
prev_date = previous_day.strftime("%Y-%m-%d")
data_type = "z-graph"
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
timee = LocalDateToUTCEpoch(ddate, time_zone_s) + 24 * 3600 - 1
devices_list, device_ids = GetProximityList(deployment_id, timee)
#Lets filter bedrooms only
just_selected_devices = []
for device_details in devices_list:
if device_details[1] in selected_devices:
just_selected_devices.append(device_details)
devices_list = just_selected_devices
time_from_str, _ = GetLocalTimeForDate(ddate, time_zone_s)
_, time_to_str = GetLocalTimeForDate(to_date, time_zone_s)
#time_from_z = datetime.datetime.strptime(time_from_z_str, '%Y-%m-%d %H:%M:%S%z')
#epoch_time = calendar.timegm(time_from_z.utctimetuple())
time_from = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
epoch_time = calendar.timegm(time_from.utctimetuple())
time_to = datetime.datetime.strptime(time_to_str, '%Y-%m-%d %H:%M:%S%z')
presence_map = {}
presence_map["time_start"] = epoch_time
presence_map["time_zone"] = time_zone_s
# Calculate the difference in days
days_difference = (time_to - time_from).days
days_difference_long = days_difference + 1
if data_type == "all" or data_type == "z-graph" or data_type == "multiple":
# Convert string to datetime object
date_obj = datetime.datetime.strptime(time_from_str, "%Y-%m-%d %H:%M:%S%z")
# Subtract one day
previous_day = date_obj - timedelta(days=1)
# Format back to string in the same format
time_from_z_str = previous_day.strftime("%Y-%m-%d %H:%M:%S%z")
device_id_2_threshold = {}
device_id_2_location = {0: "Outside"}
for details in devices_list:
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
if radar_threshold_group_st == None:
radar_threshold_group_st = '["s3_max",12]' #last value is threshold to s28 composite
if len(radar_threshold_group_st) > 8:
radar_threshold_group = json.loads(radar_threshold_group_st)
else:
radar_threshold_group = ["s3_max",12]
print(well_id, radar_threshold_group)
device_id_2_location[device_id] = location_name
device_id_2_threshold[device_id] = radar_threshold_group
ids_list = []
well_ids = []
id2well_id = {}
radar_fields_of_interest = []
device_field_indexes = {}
for details in devices_list:
threshold_str = details[5]
if "," in threshold_str:
try:
threshold_lst = json.loads(threshold_str)
except:
threshold_lst = ["s3",12]
else:
if is_number(threshold_str):
threshold_lst = ["s3",float(threshold_str)]
print(threshold_lst)
radar_field = threshold_lst[0]
#since we are getting 10 sec dat, no more need for min or max...
radar_field = radar_field.split("_")[0]
if radar_field not in radar_fields_of_interest:
device_field_indexes[radar_field] = len(radar_fields_of_interest)
radar_fields_of_interest.append(radar_field)
ids_list.append(details[1])
id2well_id[details[1]] = details[0]
well_ids.append(details[0])
presence_map["well_ids"] = well_ids
if len(devices_list) > 0:
devices_list_str = ','.join(str(device[1]) for device in devices_list)
#sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
#sql = get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
#print(sql)
if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
#zsql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_z_str, time_to_str, ids_list, radar_fields_of_interest)
zsql = get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_z_str, time_to_str, ids_list, radar_fields_of_interest)
print(zsql)
with get_db_connection() as conn:
with conn.cursor() as cur:
#cur.execute(sql)
#my_data = None
myz_data = None
#my_data = cur.fetchall()
cur.execute(zsql)
myz_data = cur.fetchall()
#if my_data != None:
#device_id_2_threshold = {}
#device_id_2_location = {0: "Outside"}
#row_nr_2_device_id = {}
#cnt = 0
#row_nr_2_device_id[0] = 0
##presence_map['longpresence'] and temporary_map_day_plus are similar, except one is used for Z-graph, and another for multiple persons detection
#if data_type == "presence" or data_type == "all" or data_type == "z-graph" or data_type == "multiple":
#presence_map['presence'] = {}
#presence_map['longpresence'] = {}
#if data_type == "raw" or data_type == "all":
#presence_map['raw'] = {}
#for details in devices_list:
##(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]','')
#well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details
#if data_type == "raw" or data_type == "all":
#presence_map['raw'][well_id] = [0] * 6 * 1440 * days_difference
#if data_type == "presence" or data_type == "all" or data_type == "z-graph" or data_type == "multiple":
#presence_map['presence'][well_id] = [0] * 6 * 1440 * days_difference
##presence_map[][well_id] = zeros_list
#cnt += 1
#row_nr_2_device_id[cnt] = well_id
#if radar_threshold_group_st == None:
#radar_threshold_group_st = '["s3",12]' #last value is threshold to s28 composite
#if len(radar_threshold_group_st) > 8:
#radar_threshold_group = json.loads(radar_threshold_group_st)
#else:
#radar_threshold_group = ["s3",12]
#device_id_2_location[well_id] = location_name
#device_id_2_threshold[well_id] = radar_threshold_group
#if len(my_data) > 1:
#start_time_ = my_data[0][0]
#parsed_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
#start_time = datetime.datetime(
#parsed_time.year,
#parsed_time.month,
#parsed_time.day,
#parsed_time.hour, # Adjust for UTC-7
#parsed_time.minute,
#parsed_time.second,
#tzinfo=datetime.timezone(datetime.timedelta(hours=-7))
#)
#presence_map = optimized_radar_processing(my_data, start_time_, id2well_id, device_id_2_threshold, device_field_indexes, presence_map, data_type)
if myz_data != None:
temporary_map_day_plus = {}
presence_map['z_graph'] = {}
presence_map['longpresence'] = {}
for details in devices_list:
#(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]','')
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details
presence_map['z_graph'][well_id] = [] #just place holder
temporary_map_day_plus[well_id] = [0] * 6 * 1440 * days_difference_long
presence_map['longpresence'][well_id] = [0] * 6 * 1440 * days_difference_long #just place holder
print(deployment_id)
print(time_from_z_str)
print(devices_list)
parsed_time = datetime.datetime.strptime(time_from_z_str, '%Y-%m-%d %H:%M:%S%z')
start_time = datetime.datetime(
parsed_time.year,
parsed_time.month,
parsed_time.day,
parsed_time.hour, # Adjust for UTC-7
parsed_time.minute,
parsed_time.second,
tzinfo=datetime.timezone(datetime.timedelta(hours=-7))
)
#start_time_ = myz_data[0][0]
st = time.time()
device_lookup_cache = {}
temporary_map_day_plus = optimized_processing(myz_data, start_time, id2well_id, device_id_2_threshold, device_field_indexes, temporary_map_day_plus, data_type)
presence_map['longpresence'][well_id] = temporary_map_day_plus[well_id]
for device_id in ids_list:
device_id_str = str(device_id)
if data_type == "presence" or data_type == "all" or data_type == "z-graph":
if filter > 1:
#presence_list = filter_short_groups_numpy(presence_map["presence"][id2well_id[device_id]], filter, device_id, ddate+"-"+to_date)
#presence_list = filter_short_groups_c_wc(presence_map["presence"][id2well_id[device_id]], filter, device_id_str, ddate, to_date, time_zone_s)
#presence_map["presence"][id2well_id[device_id]] = presence_list
#longpresence_list = filter_short_groups_numpy(presence_map["longpresence"][id2well_id[device_id]], filter, device_id, ddate+"-"+to_date)
longpresence_list = filter_short_groups_c_wc(presence_map["longpresence"][id2well_id[device_id]], filter, device_id_str, prev_date, to_date, time_zone_s, True)
presence_map["longpresence"][id2well_id[device_id]] = longpresence_list
else: #straight decas
#presence_list = presence_map["presence"][id2well_id[device_id]]
longpresence_list = presence_map["longpresence"][id2well_id[device_id]]
max_sleep = 0
max_device_id = 0
max_woke_up = 0
for device_id in ids_list:
#print(device_id_2_threshold[id2well_id[device_id]])
z_graph = CreateZGraphAI(presence_map["longpresence"][id2well_id[device_id]]) #temporary_map_day_plus[id2well_id[device_id]])
sleep_minutes, woke_up = sleep_length(z_graph)
if sleep_minutes > max_sleep:
max_sleep = sleep_minutes
max_device_id = device_id
max_woke_up = woke_up
presence_map = {}
return max_sleep, max_device_id, max_woke_up, z_graph
def is_number(s):
try:
float(s)
return True
except ValueError:
return False
def GetTempOffset(device_id):
result = 0
sql = f"""
SELECT temperature_calib
FROM public.devices
WHERE device_id = {device_id};
"""
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sql)
result = cur.fetchone()
try:
if isinstance(result, tuple):
if "," in result[0]:
string_nr = result[0].split(',')[-1].strip()
if is_number(string_nr):
return float(string_nr)
else:
return temperature_offset
else:
if is_number(result[0]):
return float(result[0])
else:
return temperature_offset
else:
return temperature_offset
except:
return temperature_offset
def GetTemperature(bedroom_device_id):
result = 0
sql = f"""
SELECT *
FROM public.sensor_readings
WHERE device_id = {bedroom_device_id} and temperature > 0
ORDER BY "time" DESC
LIMIT 1;
"""
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sql)
result = cur.fetchone()
if result == None:
return 0
else:
return result[2]
def GetSensorsDetailsFromDeployment(deployment_id, ddate, filter_minutes, fast=False):
#list all devices that user has access to
deployments_dets = []
with get_db_connection() as conn:
try:
sql = f"""
SELECT pd.user_id, pd.first_name, pd.last_name, pd.address_street, pd.picture
FROM deployment_details dd
JOIN person_details pd ON dd.beneficiary_id = pd.user_id
WHERE dd.deployment_id ={deployment_id};
"""
with conn.cursor() as cur:
cur.execute(sql)
deployments_dets = cur.fetchone()
except Exception as err:
logger.error("GetSensorsDetailsFromDeployment "+str(err) +" "+sql)
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
timee = LocalDateToUTCEpoch(ddate, time_zone_s) + 24 * 3600 - 1
devices_list, device_ids = GetProximityList(deployment_id, timee)
#Which sensor is in: Bathroom, Kitchen, Bedroom ?
bathrooms = []
kitchens = []
bedrooms = []
dev_id_to_location = {0: "Outside/?"}
for device in devices_list:
dev_id_to_location[device[1]] = device[2]
if Consolidataed_locations[device[2]] == "Bathroom":
bathrooms.append(device[1])
elif Consolidataed_locations[device[2]] == "Kitchen":
kitchens.append(device[1])
elif Consolidataed_locations[device[2]] == "Bedroom":
bedrooms.append(device[1])
#we need to determine where user is seen last, and user sensor data from there...
locations_file = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_daily_locations.png"
logger.debug(f"locations_file1 ={locations_file}")
locations_list_s = ReadObjectMinIO("daily-maps", locations_file+".bin")
force_recreate = False
file_exists1, file_modified_utc1 = check_file_exists(locations_file+".bin")
if file_exists1:
file_modified_local = file_modified_utc1.astimezone(pytz.timezone(time_zone_s))
file_modified_date_local = file_modified_local.date() #local date
file_modified_date_utc = file_modified_utc1.date()
file_date_utc = MapFileToDate(locations_file) #locations_file is UTC
#if file_modified_date_local < file_date_utc:
if file_modified_utc1.date() < file_date_utc:
force_recreate = True
else: #same date
current_time = datetime.datetime.now(pytz.timezone(time_zone_s))
time_passed = current_time - file_modified_local
if time_passed.seconds > 30: #recreate if older than 5 minutes
force_recreate = True
else:
force_recreate = True
logger.debug(f"force_recreate={str(force_recreate)}")
if force_recreate:
CreateLocationsStripe(locations_file, time_zone_s)
locations_list_s = ReadObjectMinIO("daily-maps", locations_file+".bin")
last_present_device = 0
last_present = 0
last_bathroom = 0
last_kitchen = 0
last_bedroom = 0
last_bathroom_date = ddate
last_kitchen_date = ddate
last_bedroom_date = ddate
before_last_present_device = 0
last_present_duration = 0
if locations_list_s is not None:
locations_list = json.loads(locations_list_s)
if len(locations_list) > 1:
if locations_list[-1][0] > 0:
last_present_device = locations_list[-1][0]
last_present = locations_list[-1][1] + locations_list[-1][2]
before_last_present_device, last_present_duration = FindPreviousLocation(locations_list, last_present_device, -1)
#before_last_present_device = locations_list[-3][0]
#last_present_duration = locations_list[-1][2]
else:
last_present_device = locations_list[-2][0]
last_present = locations_list[-2][1] + locations_list[-2][2]
before_last_present_device, last_present_duration = FindPreviousLocation(locations_list, last_present_device, -2)
#before_last_present_device = locations_list[-4][0]
#last_present_duration = locations_list[-2][2]
elif len(locations_list) == 1:
last_present_device = locations_list[0][0]
#Lets find last bathroom presence time
if len(locations_list) > 0 and len(bathrooms) > 0:
for loc_time in reversed(locations_list):
for device_id_temp in bathrooms:
if device_id_temp == loc_time[0]:
if (loc_time[1] + loc_time[2]) > last_bathroom:
last_bathroom = loc_time[1] + loc_time[2]
last_bathroom_date = ddate
#Lets find last kitchen presence time
if len(locations_list) > 0 and len(kitchens) > 0:
for loc_time in reversed(locations_list):
for device_id_temp in kitchens:
if device_id_temp == loc_time[0]:
if (loc_time[1] + loc_time[2]) > last_kitchen:
last_kitchen = loc_time[1] + loc_time[2]
last_kitchen_date = ddate
#Lets find last bedroom presence time
if len(locations_list) > 0 and len(bedrooms) > 0:
for loc_time in reversed(locations_list):
for device_id_temp in bedrooms:
if device_id_temp == loc_time[0]:
if (loc_time[1] + loc_time[2]) > last_bedroom:
last_bedroom = loc_time[1] + loc_time[2]
last_bedroom_date = ddate
if last_bathroom == 0 or last_kitchen == 0 or last_bedroom == 0:
ddate = GetPreviousDate(ddate)
locations_file = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_daily_locations.png"
logger.debug(f"locations_file2 ={locations_file}")
force_recreate = False
file_exists1, file_modified_utc1 = check_file_exists(locations_file+".bin")
logger.debug(f"file_exists1={str(file_exists1)}")
logger.debug(f"file_modified_utc1={str(file_modified_utc1)}")
#file_exists1, file_modified_utc1
if file_exists1:
file_modified_local = file_modified_utc1.astimezone(pytz.timezone(time_zone_s))
file_modified_date_local = file_modified_local.date()
file_date_utc = MapFileToDate(locations_file)
logger.debug(f"file_modified_utc1={str(file_modified_utc1.date())} file_date_utc={str(file_date_utc)}")
if file_modified_utc1.date() < file_date_utc:
force_recreate = True
else: #same date
current_time = datetime.datetime.now(pytz.timezone(time_zone_s))
time_passed = current_time - file_modified_local
logger.debug(f"current_time={current_time} file_modified_local={file_modified_local} time_passed={time_passed}")
if time_passed.seconds > 30: #recreate if older than 5 minutes
force_recreate = True
else:
force_recreate = True
if force_recreate:
CreateLocationsStripe(locations_file, time_zone_s)
locations_list_s = ReadObjectMinIO("daily-maps", locations_file+".bin")
logger.debug(f"locations_list_s={locations_list_s}")
if (locations_list_s is not None):
locations_list = json.loads(locations_list_s)
if last_present_device == 0:
if len(locations_list) > 1:
if locations_list[-1][0] > 0:
last_present_device = locations_list[-1][0]
else:
last_present_device = locations_list[-2][0]
elif len(locations_list) == 1:
last_present_device = locations_list[0][0]
if last_bathroom == 0:
if len(locations_list) > 0 and len(bathrooms) > 0:
for loc_time in reversed(locations_list):
for device_id_temp in bathrooms:
if device_id_temp == loc_time[0]:
if (loc_time[1] + loc_time[2]) > last_bathroom:
last_bathroom = loc_time[1] + loc_time[2]
last_bathroom_date = ddate
if last_kitchen == 0:
if len(locations_list) > 0 and len(kitchens) > 0:
for loc_time in reversed(locations_list):
for device_id_temp in kitchens:
if device_id_temp == loc_time[0]:
if (loc_time[1] + loc_time[2]) > last_kitchen:
last_kitchen = loc_time[1] + loc_time[2]
last_kitchen_date = ddate
if last_bedroom == 0:
if len(locations_list) > 0 and len(bedrooms) > 0:
for loc_time in reversed(locations_list):
for device_id_temp in bedrooms:
if device_id_temp == loc_time[0]:
if (loc_time[1] + loc_time[2]) > last_bedroom:
last_bedroom = loc_time[1] + loc_time[2]
last_bedroom_date = ddate
last_bathroom_time = "2023-01-01T00:00:00"
if last_bathroom > 0:
last_bathroom_time = CovertToIsoTime(last_bathroom_date, last_bathroom)
last_kitchen_time = "2023-01-01T00:00:00"
if last_kitchen > 0:
last_kitchen_time = CovertToIsoTime(last_kitchen_date, last_kitchen)
last_bedroom_time = "2023-01-01T00:00:00"
if last_bedroom > 0:
last_bedroom_time = CovertToIsoTime(last_bedroom_date, last_bedroom)
last_present_time = "2023-01-01T00:00:00"
if last_present > 0:
last_present_time = CovertToIsoTime(ddate, last_present)
# debug for 48h bug
if last_bathroom_time == "2023-01-01T00:00:00" or last_kitchen_time == "2023-01-01T00:00:00" or last_bedroom_time == "2023-01-01T00:00:00":
#last_bathroom_time = "48h" if last_bathroom_time == "2023-01-01T00:00:00" else f"{last_bathroom-last_bathroom_time}"
#last_kitchen_time = "48h" if last_kitchen_time == "2023-01-01T00:00:00" else f"{last_kitchen-last_kitchen_time}"
#last_bedroom_time = "48h" if last_bedroom_time == "2023-01-01T00:00:00" else f"{last_bedroom-last_bedroom_time}"
logger.debug(f"48h-> deployment_id={str(deployment_id)}, ddate={str(ddate)}")
logger.debug(f"48h-> force_recreate={force_recreate}")
logger.debug(f"48h-> last_bathroom_time={last_bathroom_time}|last_kitchen_time={last_kitchen_time}|last_bedroom_time={last_bedroom_time}")
logger.debug(f"48h-> devices_list={str(devices_list)}")
logger.debug(f"48h-> bathrooms={str(bathrooms)}")
logger.debug(f"48h-> kitchens={str(kitchens)}")
logger.debug(f"48h-> bedrooms={str(bedrooms)}")
logger.debug(f"48h-> locations_list_s={str(locations_list_s)}")
# wellness_score_percent
wellness_score_percent = 90
sleep_filter_minutes = 5
time_from_str, _ = GetLocalTimeForDate(ddate, time_zone_s)
sleep_bathroom_visit_count = 0
# bedroom_co2
bedroom_co2 = 500
device_detail = None
bedroom_temperature = 0
sleep_hours = 0
if fast == False:
if len(bedrooms) > 0:
sleep_minutes, bedroom_device_id, woke_up, presence_list = GetLastDurationMinutes(deployment_id, bedrooms, sleep_filter_minutes, ddate)
sleep_hours = sleep_minutes/ 60
# bedroom_temperature
temp_offset = GetTempOffset(bedroom_device_id)
bedroom_temperature = GetTemperature(bedroom_device_id) + temp_offset
# sleep_bathroom_visit_count
date_obj = datetime.datetime.strptime(time_from_str, "%Y-%m-%d %H:%M:%S%z")
if sleep_minutes < woke_up: # went to sleep after midnight
date_sleep = ddate
to_sleep = woke_up - sleep_minutes
else:# went to sleep before midnight
to_sleep = 1440 + woke_up - sleep_minutes
# Convert string to datetime object
previous_day = date_obj - timedelta(days=1)
date_sleep = previous_day.strftime("%Y-%m-%d %H:%M:%S%z")
# shower_detected_time
shower_detected_time = last_bathroom_time
# breakfast_detected_time
breakfast_detected_time = 0
# living_room_detected_time
living_room_time_spent = 0
# outside_hours
outside_hours = 0
#lets find last time seen at Bathroom, Kitchen, Bedroom pd.first_name, pd.last_name, pd.address_street, pd.picture
picture_url = deployments_dets[4]
temp_offset = GetTempOffset(last_present_device)
temperature = GetTemperature(last_present_device) + temp_offset
report = {}
try:
if before_last_present_device != None:
before_last_location = dev_id_to_location[before_last_present_device]
else:
before_last_location = ""
report = {"user_id":deployments_dets[0],
"name":deployments_dets[1] + " " + deployments_dets[2],
"address":deployments_dets[3],
"time_zone":time_zone_s,
"picture":picture_url,
"bathroom_at": last_bathroom_time,
"kitchen_at": last_kitchen_time,
"bedroom_at": last_bedroom_time,
"temperature": temperature,
"smell": "clean",
"bathroom_delayed": [6, 12],
"kitchen_delayed": [6, 12],
"bedroom_delayed": [13, 16],
"last_location": dev_id_to_location[last_present_device],
"last_detected_time": last_present_time,
"before_last_location": before_last_location,
"last_present_duration": last_present_duration,
"wellness_score_percent": wellness_score_percent,
"wellness_descriptor_color": "bg-green-100 text-green-700",
"bedroom_temperature": round(bedroom_temperature, 2),
"sleep_bathroom_visit_count": sleep_bathroom_visit_count,
"bedroom_co2": bedroom_co2,
"shower_detected_time": shower_detected_time,
"breakfast_detected_time": breakfast_detected_time,
"living_room_time_spent": round(living_room_time_spent, 2),
"outside_hours": round(outside_hours, 2),
"wellness_descriptor": "Great!",
"last_seen_alert": "Alert = None",
"last_seen_alert_colors": "bg-green-100 text-green-700", #https://tailwindcss.com/docs/colors
"most_time_spent_in": "Bedroom",
"sleep_hours": round(sleep_hours, 2)
}
except Exception as e:
print(traceback.format_exc())
return report
def FindPreviousLocation(locations_list, last_present_device, start_index):
"""
Find the previous device_id (not last_present_device and not 0) counting backwards from start_index.
Also sum the minutes for all entries except the one being reported.
Args:
locations_list: List of [device_id, start_time, minutes] entries
last_present_device: Device ID to exclude from search
start_index: Index to start searching backwards from
Returns:
tuple: (device_id, total_minutes) or (None, total_minutes) if no valid device found
"""
total_minutes = 0
found_device = None
end_index = -len(locations_list) - 1
print(locations_list[-4:])
# Count backwards from start_index
for i in range(start_index, end_index, -1):
device_id = locations_list[i][0]
minutes = locations_list[i][2]
# If we haven't found our target device yet, check if this one qualifies
if found_device is None and device_id != last_present_device and device_id != 0:
found_device = device_id
break
else:
# Add minutes for all entries except the one we're reporting
total_minutes += minutes
return found_device, total_minutes
def ToList(input_data):
# If input is already a list
if isinstance(input_data, list):
return [str(x).strip() for x in input_data]
# If input is string
if isinstance(input_data, str):
# Remove outer brackets if present
cleaned = input_data.strip('()')
cleaned = cleaned.strip('[]')
# Remove extra quotes
cleaned = cleaned.replace('"', '').replace("'", '')
# Split by comma and clean each element
return [x.strip() for x in cleaned.split(',')]
raise ValueError(f"Unsupported input type: {type(input_data)}")
def MACsToWellIds(cur, macs_list):
device_ids = []
device_list = []
macs_string = ",".join(f"'{mac}'" for mac in macs_list)
if macs_string != "'None'":
sqlr = f"SELECT well_id, device_mac, device_id, location, description, radar_threshold, close_to FROM public.devices WHERE device_mac IN ({macs_string})"
print (sqlr)
macs_map = {}
cur.execute(sqlr)
proximitys_list = cur.fetchall()
for well_id, mac, device_id, location, description, radar_threshold, close_to in proximitys_list:
if location == None:
location = -1
if radar_threshold == None:
radar_threshold = "50"
if close_to == None:
close_to = ""
macs_map[mac] = (well_id, device_id, location_names[location], description, mac, radar_threshold, close_to)
for mac in macs_list:
if mac != "":
print(mac)
device_ids.append(macs_map[mac][1])
device_list.append(macs_map[mac])
return device_ids, device_list
def MACsToDeviceId(MAC):
sqlr = f"SELECT device_id FROM public.devices WHERE device_mac = '{MAC}'"
device_id = 0
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sqlr)
result = cur.fetchone()
if result != None:
device_id = result[0]
return device_id
def DeviceId2MAC(device_id):
try:
with get_db_connection() as conn:
sqlr = f"SELECT device_mac FROM public.devices WHERE device_id ='{device_id}'"
with conn.cursor() as cur:
cur.execute(sqlr)
mac = cur.fetchone()[0]
return mac
except:
return ""
def WellId2Details(well_ids):
try:
with get_db_connection() as conn:
#sql = f"SELECT deployment_id, devices FROM public.deployment_details WHERE deployment_id IN ({deployments})"
#device_ids_string = ",".join(map(str, devices_ids_list))
sqlr = f"SELECT well_id, device_id, device_mac, location, description FROM public.devices WHERE well_id IN ({well_ids})"
print(sqlr)
with conn.cursor() as cur:
cur.execute(sqlr)
details = cur.fetchall()
return details
except:
return ""
def MACsStrToDevIds(cur, macs):
device_ids = []
#we need to repcakage string to contain '
macs_list = ToList(macs)
macs_string = ",".join(f"'{mac}'" for mac in macs_list)
if macs_string != "'None'":
sqlr = f"SELECT device_mac, device_id FROM public.devices WHERE device_mac IN ({macs_string})"
print (sqlr)
macs_map = {}
cur.execute(sqlr)
proximitys_list = cur.fetchall()
for mac, device_id in proximitys_list:
device_ids.append((mac, device_id))
return device_ids
def ReadCleanStringDB(cur, sql):
cur.execute(sql)
temp_string = cur.fetchone()
if temp_string == None:
return ""
else:
return str(temp_string[0]).strip()
# obtain device_list, device_ids for deployment_id on time as epoch_from_file_s (usually today)
# it tries first overridden/newly-installed (FROM public.deployment_history)
# then if none found there searches FROM public.deployment_details
def GetProximityList(deployment_id, epoch_from_file_s):
#both are valid:
#64B70888FA84,64B70888F6F0,64B70888F860,64B70889062C,64B70888FAB0,64B708896BDC,64B708897428
#['64B70888FA84', '64B70888F6F0', '64B70888F860', '64B70889062C', '64B70888FAB0', '64B708896BDC', '64B708897428']
#result_list = []
#well_ids = []
with get_db_connection() as conn:
sqlr = f"""
SELECT * FROM (
SELECT proximity
FROM public.deployment_history
WHERE deployment_id = {deployment_id}
AND time <= {epoch_from_file_s}
ORDER BY time DESC
LIMIT 1
) AS latest_deployment
"""
print (sqlr)
with conn.cursor() as cur:
devices_string = ReadCleanStringDB(cur, sqlr)
if devices_string == "":
sqlr = f"SELECT devices from public.deployment_details WHERE deployment_id ={deployment_id}"
#print (sqlr)
devices_string = ReadCleanStringDB(cur, sqlr)
if devices_string == "":
return [], []
macs_list = ToList(devices_string)
device_ids, device_list = MACsToWellIds(cur, macs_list)
return device_list, device_ids
def FilterList(to_filter: str, allowed: str) -> str:
# Convert comma-separated strings to sets
filter_set = set(to_filter.split(','))
allowed_set = set(allowed.split(','))
# Find intersection and sort the result
filtered = sorted(filter_set.intersection(allowed_set), key=int)
# Join back to comma-separated string
return ','.join(filtered)
def GetMatchingDevices(privileges, group, deployment, location):
global LocationsMap
results=[]
if privileges != "-1":
if deployment == "" or deployment == "0":
deployment = privileges
privileges_list = privileges.split(',')
if deployment != "0":
if "," in deployment:
deployment = FilterList(deployment, privileges)
else:
if deployment not in privileges_list:
return results
else:
if deployment == "0":
deployment = "-1"
devices = GetVisibleDevicesPerLocation(deployment, location)
return devices
def GetMatchingDevicesComplete(privileges, group, deployment, location):
global LocationsMap
results=[]
if privileges != "-1":
if deployment == "" or deployment == "0":
deployment = privileges
privileges_list = privileges.split(',')
if deployment != "0":
if "," in deployment:
deployment = FilterList(deployment, privileges)
else:
if deployment not in privileges_list:
return results
else:
if deployment == "0":
deployment = "-1"
devices = GetVisibleDevicesPerLocationComplete(deployment, location)
return devices
def getOldestDeploymentHistoryFromBeneficiary(deployment_id):
#this will return oldest entry as well as last proximity (devices)
st = time.time()
print(f"*0 ----{time.time() - st}")
results=[]
well_ids_last = [] #this needs to be list of tuples (well_id, Location_st, Description)
oldest_time = None
try:
print(f"*0a ----{time.time() - st}")
with get_db_connection() as conn:
sqlr = f"""
SELECT * FROM (
SELECT time, proximity
FROM public.deployment_history
WHERE deployment_id = {deployment_id}
ORDER BY time ASC
) AS latest_deployment
"""
print (sqlr)
print(f"*1 ----{time.time() - st}")
with conn.cursor() as cur:
cur.execute(sqlr)
print(f"*2 ----{time.time() - st}")
results = cur.fetchall()
print(f"*3 ----{time.time() - st}")
#lets find which of historical sets has data in DB
if results == None or results == []: #look in deployment_details
sqlr = f"SELECT devices from public.deployment_details WHERE deployment_id ={deployment_id}"
#print (sqlr)
print(f"*4 ----{time.time() - st}")
devices_string = ReadCleanStringDB(cur, sqlr)
print(f"*5 ----{time.time() - st}")
macs_list = ToList(devices_string)
print(f"*6 ----{time.time() - st}")
device_ids_last, device_alls_last = MACsToWellIds(cur, macs_list)
sql_query = """
SELECT device_id, first_seen_at
FROM device_first_seen
WHERE device_id = ANY(%s)
GROUP BY device_id;
"""
print(f"*7 ----{time.time() - st}")
try:
cur.execute(sql_query, (device_ids_last,))
results1 = cur.fetchall()
if results1 == []:
try:
# Find the oldest timestamp for each device from sensor_readings
oldest_query = """
SELECT device_id, MIN(time) as first_seen_at
FROM sensor_readings
WHERE device_id = ANY(%s)
GROUP BY device_id
"""
cur.execute(oldest_query, (device_ids_last,))
oldest_results = cur.fetchall()
if oldest_results:
# Insert the oldest records into device_first_seen
insert_query = """
INSERT INTO device_first_seen (device_id, first_seen_at)
VALUES (%s, %s)
ON CONFLICT (device_id) DO NOTHING
"""
# Insert each device's first seen timestamp
for device_id, first_seen_at in oldest_results:
cur.execute(insert_query, (device_id, first_seen_at))
# Commit the inserts
conn.commit()
# Set oldest_time to the earliest timestamp found
oldest_time = min(result[1] for result in oldest_results)
AddToLog(f"Inserted {len(oldest_results)} device first seen records")
else:
# No sensor readings found for these devices
AddToLog("No sensor readings found for the specified devices")
oldest_time = None
except Exception as e:
AddToLog(f"Error determining oldest records: {traceback.format_exc()}")
AddToLog(str(e))
oldest_time = None
# Rollback in case of error
conn.rollback()
else:
oldest_time = results1[0][1]
except Exception as e:
AddToLog(traceback.format_exc())
AddToLog(str(e))
print(f"*8 ----{time.time() - st}")
else:
history_entry = results[-1]
macs_list = ToList(history_entry[1])
print(f"*9 ----{time.time() - st}")
device_ids_last, device_alls_last = MACsToWellIds(cur, macs_list)
for history_entry in results:
macs_list = ToList(history_entry[1])
print(f"*10 ----{time.time() - st}")
device_ids, device_alls = MACsToWellIds(cur, macs_list)
#print(f"*11 ----{time.time() - st}")
#sql_query = """
#SELECT time as oldest_record_time
#FROM sensor_readings
#WHERE device_id = ANY(%s)
#ORDER BY time ASC
#LIMIT 1;
#"""
print(f"*12 ----{time.time() - st}")
print("Getting oldest record time for devices:", device_ids_last)
#print(sql_query, device_ids_last)
#try:
#cur.execute(sql_query, (device_ids_last,))
#results1 = cur.fetchall()
#oldest_time = results1[0][0]
#if oldest_time != None:
#break
#except Exception as e:
#print(str(e))
try:
oldest_time = get_oldest_record_time_optimized(cur, device_ids_last)
if oldest_time is not None:
break
except Exception as e:
print(str(e))
print(f"*13 ----{time.time() - st}")
except Exception as e:
print(f"*0b ----{time.time() - st}")
AddToLog(traceback.format_exc())
print(f"*14 ----{time.time() - st}")
return oldest_time, device_alls_last
def get_oldest_record_time_optimized(cur, device_ids):
"""
Get the oldest record time for devices, using device_first_seen cache
and only querying sensor_readings for devices not in cache.
"""
if not device_ids:
return None
# First, check which devices already have cached first_seen times
device_ids_tuple = tuple(device_ids)
cache_query = """
SELECT device_id, first_seen_at
FROM device_first_seen
WHERE device_id = ANY(%s)
"""
cur.execute(cache_query, (device_ids,))
cached_results = cur.fetchall()
cached_device_times = {device_id: first_seen for device_id, first_seen in cached_results}
# Find devices that are NOT in cache
uncached_device_ids = [device_id for device_id in device_ids if device_id not in cached_device_times]
# For uncached devices, query sensor_readings and populate cache
for device_id in uncached_device_ids:
try:
# Query oldest record for this specific device
oldest_query = """
SELECT time as oldest_record_time
FROM sensor_readings
WHERE device_id = %s
ORDER BY time ASC
LIMIT 1;
"""
cur.execute(oldest_query, (device_id,))
result = cur.fetchone()
if result and result[0] is not None:
oldest_time = result[0]
# Insert into cache table (use ON CONFLICT in case of race conditions)
insert_cache_query = """
INSERT INTO device_first_seen (device_id, first_seen_at)
VALUES (%s, %s)
ON CONFLICT (device_id) DO NOTHING;
"""
cur.execute(insert_cache_query, (device_id, oldest_time))
# Add to our local cache dict
cached_device_times[device_id] = oldest_time
except Exception as e:
print(f"Error processing device_id {device_id}: {str(e)}")
continue
# Return the earliest time among all devices that have data
valid_times = [time for time in cached_device_times.values() if time is not None]
return min(valid_times) if valid_times else None
def getLastEditedBeneficiary(beneficiary):
#lets generate token here to elliminate issues with outdated token...
token = generate_token(beneficiary)
url = 'https://well-api.azurewebsites.net/api/well_api'
params = {
"name": "beneficiary_detail",
"beneficiary": beneficiary,
"token": token
}
#{"id": "user_beneficiary_bernhard@wellnuo.com", "MAC": "BENEFICIARY", "email": "bernhard@wellnuo.com", "edit_date": "Fri Aug 16 06:45:01 2024", "c_password": "bern1", "first_name": "Bernhard", "last_name": "Knigge", "address": "776 Dubanski Dr.", "address_city": "San Jose", "address_state": "CA", "address_zip": "95123", "address_country": "United States", "phone_number": "4087055709", "persons": "2", "gender": "M", "race": "W", "born": "1972", "pets": "1", "creds": "", "devs": "[[203, 'Living Room', '', '64B708890B14'], [251, 'Bathroom', '', '64B7088909E8'], [252, 'Bedroom', '', '64B708890734'], [204, 'Bathroom', 'Guest', '64B708890288'], [201, 'Kitchen', 'toaster', '64B708890584'], [202, 'Kitchen', 'stove', '64B7088906D8'], [205, 'Office', '', '64B708897018']]", "tzone": "America/Los_Angeles", "ttl": -1, "_rid": "R60hANIG-K+qTQIAAAAAAg==", "_self": "dbs/R60hAA==/colls/R60hANIG-K8=/docs/R60hANIG-K+qTQIAAAAAAg==/", "_etag": "\"3500a0ae-0000-0800-0000-66bef56d0000\"", "_attachments": "attachments/", "_ts": 1723790701}
response = requests.get(url, params=params)
if response.status_code == 200:
text = response.text
#print(text)
if text == "Log-Out":
return text
if text[0] == "{":
data = json.loads(response.text)
date_string = data["edit_date"]
parsed_date = datetime.datetime.strptime(date_string, '%c')
# Convert the datetime object to a timestamp (epoch time)
epoch_str = str(time.mktime(parsed_date.timetuple()))
devices = data["devs"]
return(epoch_str, devices)
else:
return text,""
else:
logger.debug((f"Failed to retrieve the data, status code: {response.status_code}"))
return "",""
def GetDeploymentNameFromId(Id):
deployment_name = ""
with get_db_connection() as conn:
with conn.cursor() as cur:
sqlr = f"SELECT name FROM deployments WHERE id = {Id}"
deployment_name = ReadCleanStringDB(cur, sqlr)
return deployment_name
def GetDeploymentDetailsFromBeneficiary(beneficiary_id, editing_deployment_id):
results = []
with get_db_connection() as conn:
with conn.cursor() as cur:
if editing_deployment_id == "0":
sqlr = f"SELECT * FROM deployment_details WHERE beneficiary_id = {beneficiary_id}"
else:
sqlr = f"SELECT * FROM deployment_details WHERE deployment_id = {editing_deployment_id}"
cur.execute(sqlr)
results = cur.fetchall()
if len(results) > 0:
return results
return []
def GetTimeZoneOfDeployment(deployment_id):
time_zone_st = 'America/Los_Angeles'
with get_db_connection() as conn:
with conn.cursor() as cur:
sqlr = f"SELECT time_zone_s from public.deployments WHERE deployment_id ={deployment_id}"
time_zone_st = ReadCleanStringDB(cur, sqlr)
return time_zone_st
def GetDeploymentHistoryLast(deployment_id):
results = []
with get_db_connection() as conn:
with conn.cursor() as cur:
sqlr = f"SELECT * FROM deployment_history WHERE deployment_id = {deployment_id} ORDER BY time DESC LIMIT 1"
cur.execute(sqlr)
results = cur.fetchall()
if len(results) > 0:
return results[0]
return []
def StringToEpoch(date_string, time_zone_s):
"""
Convert a date string to epoch timestamp for start of day (midnight) in specified timezone
Args:
date_string (str): Date in 'YYYY-MM-DD' format
time_zone_s (str): Timezone string (e.g. 'America/Los_Angeles')
Returns:
float: Epoch timestamp in seconds
"""
# Parse the date string
date_format = '%Y-%m-%d'
naive_date = datetime.datetime.strptime(date_string, date_format)
# Get the timezone
timezone = pytz.timezone(time_zone_s)
# Localize the date to midnight in the specified timezone
local_date = timezone.localize(naive_date)
# Convert to epoch timestamp
epoch_time = local_date.timestamp()
return epoch_time
def LocalDateToUTCEpoch(local_date_str, time_zone_s):
"""
Convert a date string to epoch timestamp for start of day (midnight) in UTC
Args:
local_date_str (str): Date in 'YYYY-MM-DD' format
time_zone_s (str): Timezone string (e.g. 'America/Los_Angeles')
Returns:
float: Epoch UTC timestamp in seconds
"""
timezone = pytz.timezone(time_zone_s)
# Parse the date string
date_format = '%Y-%m-%d'
local_datetime = datetime.datetime.strptime(local_date_str, date_format)
local_datetime = timezone.localize(local_datetime)
utc_datetime = local_datetime.astimezone(pytz.UTC)
epoch_time = int(utc_datetime.timestamp())
return epoch_time
def GetDeploymentDatesBoth(deployment_in):
#when looking at the date, date is defined in TZ where device is!
#Lets take oldest data from first member of deployment
st = time.time()
date_list = []
print(f"&0 ----{time.time() - st}")
time_zone_st = GetTimeZoneOfDeployment(deployment_in)
print(f"&1 ----{time.time() - st}")
oldest_date_dt_utc, devices_all = getOldestDeploymentHistoryFromBeneficiary(deployment_in)
print(f"&2 ----{time.time() - st}")
if oldest_date_dt_utc != None:
#get date in local time zone from UTC datetime
#oldest_date_dt
# Get today's date
local_timezone = pytz.timezone(time_zone_st) # Replace with your local timezone
oldest_date_dt_local = oldest_date_dt_utc.astimezone(local_timezone)
today_date = datetime.datetime.now(local_timezone)
# Generate a list of date strings from oldest_date to today in inverted order
date_list = [(today_date - timedelta(days=x)).strftime('%Y-%m-%d') for x in range((today_date - oldest_date_dt_local).days + 1)]
print(f"&3 ----{time.time() - st}")
return date_list, devices_all, time_zone_st
def check_file_exists(file_name, bucket_name="daily-maps"):
try:
# Try to get the object's stats - this will raise an exception if the object doesn't exist
stat_result = miniIO_blob_client.stat_object(bucket_name, file_name)
last_modified_utc = stat_result.last_modified
return True, last_modified_utc
except S3Error as e:
if e.code == 'NoSuchKey':
return False, 0
# Re-raise if it's a different error
raise
def get_text_dimensions(text, font, font_scale, thickness):
(width, height), baseline = cv2.getTextSize(text, font, font_scale, thickness)
return {
'width': width,
'height': height,
'baseline': baseline,
'total_height': height + baseline
}
def save_to_minio(image, filename, bucket_name="daily-maps", content_type="image/png"):
"""
Save a PIL Image directly to MinIO
Args:
image (PIL.Image): Image to save
filename (str): Filename to use in MinIO
bucket_name (str): MinIO bucket name
content_type (str): Content type of the file
Returns:
bool: True if successful, False otherwise
"""
logger = logging.getLogger(__name__)
try:
# Convert PIL image to bytes
img_byte_arr = io.BytesIO()
image.save(img_byte_arr, format='PNG')
img_byte_arr.seek(0) # Move to start of the BytesIO buffer
# Upload to MinIO
miniIO_blob_client.put_object(
DAILY_MAPS_BUCKET_NAME,
filename,
img_byte_arr,
length=len(img_byte_arr.getvalue()),
content_type=content_type
)
return True
except Exception as e:
logger.error(f"Error saving to MinIO: {traceback.format_exc()}")
return False
def SaveImageInBlob(file_name, arr_stretched, labels = [], metadata=None):
#labels=[(caption,(x,y),font,scale,color,thickness,line_type)]
try:
image_with_text = arr_stretched.copy()
for label in labels:
cv2.putText(
image_with_text, # Image
label[0], # Text to write
label[1], # Position (x, y)
label[2], # Font type
label[3], # Font scale
label[4], # Color (BGR)
label[5], # Thickness
label[6] # Line type
)
# Encode the image to a memory buffer using imencode
success, encoded_image = cv2.imencode('.png', image_with_text)
AddToLog(f"success={success}")
if not success:
raise Exception("Could not encode image!")
#AddToLog(f"DAILY_MAPS_BUCKET_NAME={DAILY_MAPS_BUCKET_NAME}")
image_bytes = encoded_image.tobytes()
AddToLog(f"len(image_bytes)={len(image_bytes)}")
# Prepare metadata (all values must be strings)
user_metadata = {}
if metadata:
user_metadata = {k: str(v) for k, v in metadata.items()}
miniIO_blob_client.put_object(
DAILY_MAPS_BUCKET_NAME,
file_name,
io.BytesIO(image_bytes),
len(image_bytes),
metadata=user_metadata
)
return True
except Exception as e:
AddToLog(f"{traceback.format_exc()}")
logger.error(f"{traceback.format_exc()}")
return False
def SaveImageInBlobLabelsOut(file_name, arr_stretched, labels, title_labels):
#labels=[(caption,(x,y),font,scale,color,thickness,line_type)]
try:
image_with_text = arr_stretched.copy()
for label in labels:
cv2.putText(
image_with_text, # Image
label[0], # Text to write
label[1], # Position (x, y)
label[2], # Font type
label[3], # Font scale
label[4], # Color (BGR)
label[5], # Thickness
label[6] # Line type
)
for label in title_labels:
cv2.putText(
image_with_text, # Image
label[0], # Text to write
label[1], # Position (x, y)
label[2], # Font type
label[3], # Font scale
label[4], # Color (BGR)
label[5], # Thickness
label[6] # Line type
)
# Encode the image to a memory buffer using imencode
success, encoded_image = cv2.imencode('.png', image_with_text)
AddToLog(f"success={success}")
if not success:
raise Exception("Could not encode image!")
#AddToLog(f"DAILY_MAPS_BUCKET_NAME={DAILY_MAPS_BUCKET_NAME}")
image_bytes = encoded_image.tobytes()
AddToLog(f"len(image_bytes)={len(image_bytes)}")
miniIO_blob_client.put_object(
DAILY_MAPS_BUCKET_NAME,
file_name,
io.BytesIO(image_bytes),
len(image_bytes))
return True
except Exception as e:
AddToLog(f"{traceback.format_exc()}")
logger.error(f"{traceback.format_exc()}")
return False
def GetLocalTimeForDate(selected_date, time_zone_s, minutes_padding = 0):
# Parse the selected date
local_tz = pytz.timezone(time_zone_s)
# Convert selected_date string to datetime object (start of day in local time)
local_date = datetime.datetime.strptime(selected_date, "%Y-%m-%d")
local_start = local_tz.localize(local_date)
# Get the next day
local_next = local_start + timedelta(days=1)
if minutes_padding > 0:
local_start = local_start - timedelta(minutes=minutes_padding)
local_next = local_next + timedelta(minutes=minutes_padding)
# Convert to UTC
utc_start = local_start.astimezone(pytz.UTC)
utc_next = local_next.astimezone(pytz.UTC)
# Format as strings
time_from_str = utc_start.strftime("%Y-%m-%d %H:%M:%S")
time_to_str = utc_next.strftime("%Y-%m-%d %H:%M:%S")
return time_from_str + "+0000", time_to_str + "+0000"
def GetLocalTimeForDateSimple(selected_date, time_zone_s, minutes_padding = 0):
# Parse the selected date
local_tz = pytz.timezone(time_zone_s)
# Convert selected_date string to datetime object (start of day in local time)
local_date = datetime.datetime.strptime(selected_date, "%Y-%m-%d")
local_start = local_tz.localize(local_date)
# Get the next day
local_next = local_start + timedelta(days=1)
if minutes_padding > 0:
local_start = local_start - timedelta(minutes=minutes_padding)
local_next = local_next + timedelta(minutes=minutes_padding)
# Convert to UTC
utc_start = local_start.astimezone(pytz.UTC)
utc_next = local_next.astimezone(pytz.UTC)
return utc_start, utc_next
def GetLocalTimeEpochsForDate(selected_date, time_zone_s):
"""
Get start and end of day epochs for a given date in a specific timezone.
Args:
selected_date (str): Date in "YYYY-MM-DD" format
time_zone_s (str): Timezone string (e.g., "America/New_York")
Returns:
tuple: (start_epoch, end_epoch) - Unix timestamps for start and end of day
"""
# Parse the selected date
local_tz = pytz.timezone(time_zone_s)
# Convert selected_date string to datetime object (start of day in local time)
local_date = datetime.datetime.strptime(selected_date, "%Y-%m-%d")
local_start = local_tz.localize(local_date)
# Get the next day
local_next = local_start + timedelta(days=1)
# Convert to UTC
utc_start = local_start.astimezone(pytz.UTC)
utc_next = local_next.astimezone(pytz.UTC)
# Convert to epochs (Unix timestamps)
start_epoch = int(utc_start.timestamp())
end_epoch = int(utc_next.timestamp())
return start_epoch, end_epoch
def UTC2Local(utc_time, time_zone_s):
# Parse the selected date
local_tz = pytz.timezone(time_zone_s)
# Convert selected_date string to datetime object (start of day in local time)
#local_date = datetime.datetime.strptime(selected_date, "%Y-%m-%d")
local_start = local_tz.localize(selected_date)
# Convert to UTC
utc_start = local_start.astimezone(pytz.UTC)
utc_next = local_next.astimezone(pytz.UTC)
# Format as strings
time_from_str = utc_start.strftime("%Y-%m-%d %H:%M:%S")
time_to_str = utc_next.strftime("%Y-%m-%d %H:%M:%S")
return time_from_str + "+0000", time_to_str + "+0000"
def get_timezone_aware_datetime(time_str, timezone_str="America/Los_Angeles"):
"""
Convert a naive datetime string to a timezone-aware datetime object.
Parameters:
time_str: String in format 'YYYY-MM-DD HH:MM:SS'
timezone_str: String representing the timezone (default: "America/Los_Angeles")
Returns:
datetime: A timezone-aware datetime object
"""
# Parse the naive datetime
naive_dt = datetime.datetime.strptime(time_str, '%Y-%m-%d %H:%M:%S')
# Get the timezone
tz = pytz.timezone(timezone_str)
# Localize the datetime (make it timezone-aware)
# localize() is the correct way to do this, as it handles DST transitions properly
aware_dt = tz.localize(naive_dt)
return aware_dt
def fast_fill_array_from_timescale(day_data, time_from_str, devices_list, arr_source, timezone_str="Europe/Berlin"):
"""
Optimized version of array filling from TimeScaleDB data.
Uses vectorized operations for significant speed improvement.
Now handles mtype field with multiple records per minute.
"""
# Convert start time to timezone-aware datetime
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
# Create device index mapping
device_to_index = {device_id: idx for idx, device_id in enumerate(devices_list)}
# Define column mappings (sensor type to position in record)
columns = {
'temperature': 2,
'humidity': 3,
'pressure': 4,
'light': 5,
'radar': 6
}
# Add sensor columns dynamically (s0-s9)
cols_len = len(columns)
for i in range(10):
columns[f's{i}'] = i + cols_len
# For mtype mode, we need to store all mtype records differently
# Check if we have mtype data
has_mtype_data = len(day_data) > 0 and len(day_data[0]) > 16
if has_mtype_data:
# Store mtype data separately for later processing
global mtype_data_cache
mtype_data_cache = []
for record in day_data:
if record[0] and record[1] and record[16] is not None: # time, device_id, mtype
minute = int((record[0] - start_time).total_seconds() / 60)
if 0 <= minute < arr_source.shape[1]:
mtype_data_cache.append((minute, record[1], record[16], record))
# Pre-process data into a more efficient structure
device_data = defaultdict(list)
for record in day_data:
if record[0] and record[1]: # If time and device_id exist
device_data[record[1]].append(record)
# Process each device's data in bulk
for device_id, records in device_data.items():
if device_id not in device_to_index:
continue
base_idx = device_to_index[device_id] * len(columns)
# Convert records to numpy array for faster processing
records_array = np.array(records, dtype=object)
# Calculate all minute deltas at once
times = records_array[:, 0]
minute_deltas = np.array([(t - start_time).total_seconds() / 60 for t in times], dtype=int)
# Filter valid minute deltas
valid_mask = (minute_deltas >= 0) & (minute_deltas < arr_source.shape[1])
if not np.any(valid_mask):
continue
minute_deltas = minute_deltas[valid_mask]
records_array = records_array[valid_mask]
# Process each column type in bulk
for col_name, col_offset in columns.items():
row_idx = base_idx + list(columns.keys()).index(col_name)
values = records_array[:, col_offset]
# Filter out None values
valid_values = ~np.equal(values, None)
if not np.any(valid_values):
continue
# Update array in bulk (for non-mtype mode, use first valid value per minute)
if not has_mtype_data:
arr_source[row_idx, minute_deltas[valid_values]] = values[valid_values]
else:
# For mtype mode, we'll aggregate differently
# Group by minute and use mean/min/max as appropriate
for minute in np.unique(minute_deltas[valid_values]):
minute_mask = minute_deltas == minute
minute_values = values[valid_values & minute_mask]
if len(minute_values) > 0:
if col_name in ['temperature', 'humidity', 'pressure']:
arr_source[row_idx, minute] = np.mean(minute_values)
elif col_name == 'light':
arr_source[row_idx, minute] = np.max(minute_values)
elif col_name.startswith('s'):
# For sensor values, use minimum of positive values
positive_vals = minute_values[minute_values > 0]
if len(positive_vals) > 0:
arr_source[row_idx, minute] = np.min(positive_vals)
return arr_source
def fast_fill_smell_array_from_timescale(day_data, time_from_str, device_to_index, arr_source, timezone_str="Europe/Berlin"):
"""
Optimized version of array filling from TimeScaleDB data.
Uses vectorized operations for significant speed improvement.
"""
# Convert start time to timezone-aware datetime
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
#start_time = start_time.replace(tzinfo=timezone.utc)
# Define column mappings (sensor type to position in record)
columns = {
's0': 2,
's1': 3,
's2': 4,
's3': 5,
's4': 6,
's5': 7,
's6': 8,
's7': 9,
's8': 10,
's9': 11
}
## Add sensor columns dynamically
#cols_len = len(columns)
#for i in range(10):
#columns[f'sensor_min_s{i}'] = i + cols_len #smell * 10 + 5
# Pre-process data into a more efficient structure
# Group by device_id to reduce lookup operations
device_data = defaultdict(list)
for record in day_data:
if record[0] and record[1]: # If time and device_id exist
device_data[record[1]].append(record)
# Process each device's data in bulk
for device_id, records in device_data.items():
if device_id not in device_to_index:
continue
base_idx = device_to_index[device_id] * len(columns)
# Convert records to numpy array for faster processing
records_array = np.array(records, dtype=object)
# Calculate all minute deltas at once
times = records_array[:, 0]
#print(times[0], start_time, (times[0] - start_time).total_seconds())
minute_deltas = np.array([(t - start_time).total_seconds() / 60 for t in times], dtype=int)
# Filter valid minute deltas
valid_mask = (minute_deltas >= 0) & (minute_deltas < arr_source.shape[1])
if not np.any(valid_mask):
continue
minute_deltas = minute_deltas[valid_mask]
records_array = records_array[valid_mask]
# Process each column type in bulk
for col_name, col_offset in columns.items():
row_idx = base_idx + list(columns.keys()).index(col_name)
values = records_array[:, col_offset]
# Filter out None values
valid_values = ~np.equal(values, None)
if not np.any(valid_values):
continue
# Update array in bulk
arr_source[row_idx, minute_deltas[valid_values]] = values[valid_values]
return arr_source
def fast_fill_radar_array_from_timescale(day_data, time_from_str, devices_list, arr_source, timezone_str="Europe/Berlin"):
"""
Optimized version of array filling from TimeScaleDB data.
Uses vectorized operations for significant speed improvement.
"""
# Convert start time to timezone-aware datetime
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
#start_time = start_time.replace(tzinfo=timezone.utc)
# Create device index mapping
device_to_index = {device_id: idx for idx, device_id in enumerate(devices_list)}
# Define column mappings (sensor type to position in record)
columns = {
's2': 2,
's3': 3,
's4': 4,
's5': 5,
's6': 6,
's7': 7,
's8': 8
}
# Pre-process data into a more efficient structure
# Group by device_id to reduce lookup operations
device_data = defaultdict(list)
for record in day_data:
if record[0] and record[1]: # If time and device_id exist
device_data[record[1]].append(record)
# Process each device's data in bulk
for device_id, records in device_data.items():
if device_id not in device_to_index:
continue
base_idx = device_to_index[device_id] * len(columns)
# Convert records to numpy array for faster processing
records_array = np.array(records, dtype=object)
# Calculate all minute deltas at once
times = records_array[:, 0]
#print(times[0], start_time, (times[0] - start_time).total_seconds())
minute_deltas = np.array([(t - start_time).total_seconds() / 60 for t in times], dtype=int)
# Filter valid minute deltas
valid_mask = (minute_deltas >= 0) & (minute_deltas < arr_source.shape[1])
if not np.any(valid_mask):
continue
minute_deltas = minute_deltas[valid_mask]
records_array = records_array[valid_mask]
# Process each column type in bulk
for col_name, col_offset in columns.items():
row_idx = base_idx + list(columns.keys()).index(col_name)
values = records_array[:, col_offset]
# Filter out None values
valid_values = ~np.equal(values, None)
if not np.any(valid_values):
continue
# Update array in bulk
arr_source[row_idx, minute_deltas[valid_values]] = values[valid_values]
return arr_source
def ScaleTemperature(temperature_in_f):
# Define our key temperature points and their corresponding color values
temp_points = [30, 50, 70, 90, 110]
color_values = [768, 640, 384, 128, 0] # Color values in the rainbow scale
# Clamp the temperature to our range
clamped_temp = max(min(temperature_in_f, 110), 30)
# Find which segment the temperature falls into
for i in range(len(temp_points) - 1):
if temp_points[i] <= clamped_temp <= temp_points[i + 1]:
# Linear interpolation between the two nearest points
t = (clamped_temp - temp_points[i]) / (temp_points[i + 1] - temp_points[i])
color_value = int(color_values[i] + t * (color_values[i + 1] - color_values[i]))
return color_value
# Fallback (should never reach here due to clamping)
return 0
def GetTemperatureColor(temperature_in_f):
color_value = ScaleTemperature(temperature_in_f)
return BestColor(color_value)
def BestColor(in_val):
#this function uses numbers from 0 to 1279 to convert to rainbow from Blue to Red(1024) to Violet 1279
r,g,b=0,0,0
in_val = int(in_val)
if(in_val > 1279):
in_val = 1279
if (in_val < 256):
r = 255
g = in_val
elif (in_val < 512):
r = 511 - in_val
g = 255
elif (in_val < 768):
g = 255
b = in_val-512
elif (in_val < 1024):
g = 1023 - in_val
b = 255
else:
r = in_val - 1024
b = 255
#if (r > 255):
# print(in_val)
# print(int(r),int(g),int(b))
return(int(r),int(g),int(b))
def GrayColor(in_val):
#this function uses numbers from 0 to 1279 to convert to rainbow from Blue to Red(1024) to Violet 1279
r,g,b=0,0,0
in_val = int(in_val)
if(in_val < 0):
in_val = 0
if(in_val > 255):
in_val = 255
r = in_val
g = r
b = r
return(int(r),int(g),int(b))
def fill_array_from_timescale(day_data, time_from_str, devices_list, arr_source, timezone_str):
"""
Fill numpy array with data from TimeScaleDB query results.
Parameters:
day_data: List of tuples from database query
time_from_str: Starting datetime string in format 'YYYY-MM-DD HH:MM:SS'
devices_list: List of device IDs
arr_source: Pre-initialized numpy array to fill
Returns:
numpy.ndarray: Filled array
"""
# Parse the start time
#start_time = get_timezone_aware_datetime(time_from_str, timezone_str)
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
# Create mapping of device_ids to their index positions
device_to_index = {device_id: idx for idx, device_id in enumerate(devices_list)}
# Define columns and their positions in the result tuple
columns = {
'avg_temperature': 2,
'avg_humidity': 3,
'pressure_amplitude': 4,
'max_light': 5,
'radar': 6,
'sensor_min_s0': 7,
'sensor_min_s1': 8,
'sensor_min_s2': 9,
'sensor_min_s3': 10,
'sensor_min_s4': 11,
'sensor_min_s5': 12,
'sensor_min_s6': 13,
'sensor_min_s7': 14,
'sensor_min_s8': 15,
'sensor_min_s9': 16
}
# Process each record
for record in day_data:
# Get minute and device_id from record
record_time = record[0] # minute column
device_id = record[1] # device_id column
if record_time and device_id:
# Calculate minute delta
minute_delta = int((record_time - start_time).total_seconds() / 60)
if 0 <= minute_delta < arr_source.shape[1]:
# Calculate base index for this device
base_idx = device_to_index[device_id] * len(columns)
# Fill data for each sensor/measurement type
for col_name, col_offset in columns.items():
value = record[col_offset]
if value is not None: # Skip NULL values
row_idx = base_idx + list(columns.keys()).index(col_name)
arr_source[row_idx, minute_delta] = value
return arr_source
def fast_fill_array_from_timescale_bad(day_data, time_from_str, devices_list, arr_source, timezone_str="Europe/Berlin"):
"""
Optimized version of array filling from TimeScaleDB data.
Uses vectorized operations for significant speed improvement.
"""
# Convert start time to timezone-aware datetime
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
# Create device index mapping
device_to_index = {device_id: idx for idx, device_id in enumerate(devices_list)}
# Define column mappings (sensor type to position in record) - KEEP EXACT SAME ORDER as original
columns = {
'avg_temperature': 2,
'avg_humidity': 3,
'pressure_amplitude': 4,
'max_light': 5,
'radar': 6,
'sensor_min_s0': 7,
'sensor_min_s1': 8,
'sensor_min_s2': 9,
'sensor_min_s3': 10,
'sensor_min_s4': 11,
'sensor_min_s5': 12,
'sensor_min_s6': 13,
'sensor_min_s7': 14,
'sensor_min_s8': 15,
'sensor_min_s9': 16
}
# Pre-compute column keys list for consistent indexing
column_keys = list(columns.keys())
# Pre-process data into a more efficient structure
# Group by device_id to reduce lookup operations
device_data = defaultdict(list)
for record in day_data:
if record[0] and record[1]: # If time and device_id exist
device_data[record[1]].append(record)
# Process each device's data in bulk
for device_id, records in device_data.items():
if device_id not in device_to_index:
continue
base_idx = device_to_index[device_id] * len(columns)
# Convert records to numpy array for faster processing
records_array = np.array(records, dtype=object)
# Calculate all minute deltas at once
times = records_array[:, 0]
minute_deltas = np.array([(t - start_time).total_seconds() / 60 for t in times], dtype=int)
# Filter valid minute deltas
valid_mask = (minute_deltas >= 0) & (minute_deltas < arr_source.shape[1])
if not np.any(valid_mask):
continue
minute_deltas = minute_deltas[valid_mask]
records_array = records_array[valid_mask]
# Process each column type in bulk
for col_name, col_offset in columns.items():
# Use pre-computed column_keys list for consistent indexing
row_idx = base_idx + column_keys.index(col_name)
values = records_array[:, col_offset]
# Filter out None values
valid_values = ~np.equal(values, None)
if not np.any(valid_values):
continue
# Update array in bulk
arr_source[row_idx, minute_deltas[valid_values]] = values[valid_values]
return arr_source
def fast_fill_array_from_timescale(day_data, time_from_str, devices_list, arr_source, timezone_str="Europe/Berlin"):
"""
Optimized version of array filling from TimeScaleDB data.
Uses vectorized operations for significant speed improvement.
"""
# Convert start time to timezone-aware datetime
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
# Create device index mapping
device_to_index = {device_id: idx for idx, device_id in enumerate(devices_list)}
# Define column mappings (sensor type to position in record) - KEEP EXACT SAME ORDER as original
columns = {
'avg_temperature': 2,
'avg_humidity': 3,
'pressure_amplitude': 4,
'max_light': 5,
'radar': 6,
'sensor_min_s0': 7,
'sensor_min_s1': 8,
'sensor_min_s2': 9,
'sensor_min_s3': 10,
'sensor_min_s4': 11,
'sensor_min_s5': 12,
'sensor_min_s6': 13,
'sensor_min_s7': 14,
'sensor_min_s8': 15,
'sensor_min_s9': 16
}
# Pre-compute column keys list for consistent indexing
column_keys = list(columns.keys())
# Pre-process data into a more efficient structure
# Group by device_id to reduce lookup operations
device_data = defaultdict(list)
for record in day_data:
if record[0] and record[1]: # If time and device_id exist
device_data[record[1]].append(record)
# Process each device's data in bulk
for device_id, records in device_data.items():
if device_id not in device_to_index:
continue
base_idx = device_to_index[device_id] * len(columns)
# Convert records to numpy array for faster processing
records_array = np.array(records, dtype=object)
# Calculate all minute deltas at once
times = records_array[:, 0]
minute_deltas = np.array([(t - start_time).total_seconds() / 60 for t in times], dtype=int)
# Filter valid minute deltas
valid_mask = (minute_deltas >= 0) & (minute_deltas < arr_source.shape[1])
if not np.any(valid_mask):
continue
minute_deltas = minute_deltas[valid_mask]
records_array = records_array[valid_mask]
# Process each column type in bulk
for col_name, col_offset in columns.items():
# Use pre-computed column_keys list for consistent indexing
row_idx = base_idx + column_keys.index(col_name)
values = records_array[:, col_offset]
# Filter out None values
valid_values = ~np.equal(values, None)
if not np.any(valid_values):
continue
# Update array in bulk
arr_source[row_idx, minute_deltas[valid_values]] = values[valid_values]
return arr_source
def fast_fill_array_from_timescale_single(day_data, time_from_str, devices_list, arr_source, sensor, timezone_str="Europe/Berlin"):
"""
Optimized version of array filling from TimeScaleDB data.
Uses vectorized operations for significant speed improvement.
"""
# Convert start time to timezone-aware datetime
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
# Create device index mapping
device_to_index = {device_id: idx for idx, device_id in enumerate(devices_list)}
# Pre-process data into a more efficient structure
# Group by device_id to reduce lookup operations
device_data = defaultdict(list)
for record in day_data:
if record[0] and record[1]: # If time and device_id exist
device_data[record[1]].append(record)
if sensor != None:
columns = {
'avg_temperature': 2,
'avg_humidity': 2,
'pressure_amplitude': 2,
'max_light': 2,
'radar': 2,
'sensor_min_s0': 2,
'sensor_min_s1': 2,
'sensor_min_s2': 2,
'sensor_min_s3': 2,
'sensor_min_s4': 2,
'sensor_min_s5': 2,
'sensor_min_s6': 2,
'sensor_min_s7': 2,
'sensor_min_s8': 2,
'sensor_min_s9': 2
}
else:
columns = {
'avg_temperature': 2,
'avg_humidity': 3,
'pressure_amplitude': 4,
'max_light': 5,
'radar': 6,
'sensor_min_s0': 7,
'sensor_min_s1': 8,
'sensor_min_s2': 9,
'sensor_min_s3': 10,
'sensor_min_s4': 11,
'sensor_min_s5': 12,
'sensor_min_s6': 13,
'sensor_min_s7': 14,
'sensor_min_s8': 15,
'sensor_min_s9': 16
}
column_keys = list(columns.keys())
# Process each device's data in bulk
for device_id, records in device_data.items():
if device_id not in device_to_index:
continue
base_idx = device_to_index[device_id] #* len(columns)
# Convert records to numpy array for faster processing
records_array = np.array(records, dtype=object)
# Calculate all minute deltas at once
times = records_array[:, 0]
minute_deltas = np.array([(t - start_time).total_seconds() / 60 for t in times], dtype=int)
# Filter valid minute deltas
valid_mask = (minute_deltas >= 0) & (minute_deltas < arr_source.shape[1])
if not np.any(valid_mask):
continue
minute_deltas = minute_deltas[valid_mask]
records_array = records_array[valid_mask]
# Process each column type in bulk
# Use pre-computed column_keys list for consistent indexing
#row_idx = base_idx + 2#column_keys.index(col_name)
#values = records_array[:, column_keys.index(col_name)]
## Filter out None values
#valid_values = ~np.equal(values, None)
#if not np.any(valid_values):
#continue
# Process each column type in bulk
for col_name, col_offset in columns.items():
row_idx = base_idx + list(columns.keys()).index(col_name)
values = records_array[:, col_offset]
# Filter out None values
valid_values = ~np.equal(values, None)
if not np.any(valid_values):
continue
# Update array in bulk
arr_source[row_idx, minute_deltas[valid_values]] = values[valid_values]
return arr_source
def CalcExtremes(arr_source, length, height):
"""
Calculate min and max values for each row within legal bounds.
Optimized version using numpy vectorized operations.
Parameters:
arr_source: numpy array of shape (height, length+4) containing data and bounds
length: number of data points to process (typically 1440 for minutes in a day)
height: number of rows in the array
Returns:
numpy array with min/max values stored in columns 1442 and 1443
"""
# Extract the data portion and bounds
data = arr_source[:, :length]
ignore_below = arr_source[:, 1440:1441] # Keep 2D shape for broadcasting
ignore_above = arr_source[:, 1441:1442] # Keep 2D shape for broadcasting
# Create masks for valid values
above_min_mask = data >= ignore_below
below_max_mask = data <= ignore_above
valid_mask = above_min_mask & below_max_mask
# Create a masked array to handle invalid values
masked_data = np.ma.array(data, mask=~valid_mask)
# Calculate min and max values for each row
row_mins = np.ma.min(masked_data, axis=1).filled(-0.001)
row_maxs = np.ma.max(masked_data, axis=1).filled(-0.001)
# Store results
arr_source[:, 1442] = row_mins
arr_source[:, 1443] = row_maxs
return arr_source
def plot(arr, filename="histogram.png", title="Histogram Plot", figsize=(12, 6),
color='blue', style='histogram', bins=1000):
"""
Plot a 1D numpy array as a line or scatter plot
Parameters:
arr : 1D numpy array
title : str, plot title
figsize : tuple, figure size in inches
color : str, line/point color
style : str, 'line' or 'scatter'
"""
title = filename
plt.figure(figsize=figsize)
x = np.arange(len(arr))
if style == 'line':
x = np.arange(len(arr))
plt.plot(x, arr, color=color)
elif style == 'scatter':
x = np.arange(len(arr))
plt.scatter(x, arr, color=color, alpha=0.6)
elif style == 'histogram':
plt.hist(arr.ravel(), bins=bins, range=(0, 100), color=color, alpha=0.8)
plt.yscale('log') # Using log scale for better visualization
plt.xlabel('Signal Value')
plt.ylabel('Frequency')
plt.title(title)
plt.xlabel('Index')
plt.ylabel('Value')
plt.grid(True, alpha=0.3)
plt.tight_layout()
plt.savefig(filename)
plt.close()
print(f"Plot saved to: {filename}")
#plt.show()
def ShowArray(arr, threshold, filename="histogram.png", title="Histogram Plot", figsize=(12, 6),
color='blue', style='histogram', bins=1000):
"""
Plot a 1D numpy array as a line or scatter plot
Parameters:
arr : 1D numpy array
title : str, plot title
figsize : tuple, figure size in inches
color : str, line/point color
style : str, 'line' or 'scatter'
"""
title = filename
plt.figure(figsize=figsize)
x = np.arange(len(arr))
if style == 'line':
x = np.arange(len(arr))
plt.plot(x, arr, color=color)
plt.axhline(y=threshold, color='red', linestyle='--',
label=f'Threshold: {threshold:.3f}')
plt.xlabel('Index')
plt.ylabel('Value')
elif style == 'scatter':
x = np.arange(len(arr))
plt.scatter(x, arr, color=color, alpha=0.6)
elif style == 'histogram':
plt.hist(arr.ravel(), bins=bins, range=(0, 100), color=color, alpha=0.8)
plt.yscale('log') # Using log scale for better visualization
plt.xlabel('Signal Value')
plt.ylabel('Frequency')
plt.title(title)
plt.xlabel('Index')
plt.ylabel('Value')
plt.grid(True, alpha=0.3)
plt.tight_layout()
plt.savefig(filename)
plt.close()
print(f"Plot saved to: {filename}")
#plt.show()
def AddLimits_optimized(arr_source, devices_c, sensors_c, percentile):
"""
Vectorized version of AddLimits that processes all sensors at once.
Parameters:
arr_source: array of shape (devices_c * sensors_c, 1444)
devices_c: number of devices
sensors_c: number of sensors per device
percentile: parameter for clean_data_vectorized
"""
total_sensors = devices_c * sensors_c
# Create arrays of sensor indices for all rows
sensor_indices = np.arange(total_sensors) % sensors_c
# Convert sensor_legal_values into arrays for vectorized access
sensor_types = np.array([s_table[i] for i in range(sensors_c)])
min_vals = np.array([sensor_legal_values[t][0] for t in sensor_types])
max_vals = np.array([sensor_legal_values[t][1] for t in sensor_types])
windows = np.array([sensor_legal_values[t][2] for t in sensor_types])
# Get values for each row based on sensor type
row_windows = windows[sensor_indices]
row_mins = min_vals[sensor_indices]
row_maxs = max_vals[sensor_indices]
# Process rows that need cleaning (window > 2)
clean_mask = row_windows > 2
if np.any(clean_mask):
# Clean each row with its corresponding window size
for window in np.unique(row_windows[clean_mask]):
# Get indices of rows that need this window size
rows_to_clean = np.where(clean_mask & (row_windows == window))[0]
# Clean each row individually (since clean_data_vectorized expects 1D input)
for row_idx in rows_to_clean:
arr_source[row_idx, :1440] = clean_data_vectorized(
arr_source[row_idx, :1440],
window,
percentile
)
# Set min/max values for all rows at once
arr_source[:, 1440] = row_mins
arr_source[:, 1441] = row_maxs
return arr_source
def GenerateTestPattern(arr_source, devices_c):
#smell_min = 1
#no_smell = 102400000
#smell_max = no_smell - 1
#sensor_legal_values = {
#"radar": (0, 1000, 1),
#"co2": (smell_min, smell_max, 31),
#"humidity": (1, 99, 31),
#"light": (0, 4095, 1),
#"pressure": (0, 10000, 5),
#"temperature": (1, 60, 31),
#"voc": (smell_min, smell_max, 31),
length = 1440
measurements_per_device = 85 #THPLR S'
dev_count = len(devices_c[0])
for d in range(dev_count):
for r in range(measurements_per_device):
y = d * measurements_per_device + r
up_down = r + 1
if r == 0: #T
upper = 30
lower = 20
elif r == 1: #H
upper = 30
lower = 20
elif r == 2: #P
upper = 30
lower = 20
elif r == 3: #L
upper = 30
lower = 20
elif r == 4: #R
upper = 30
lower = 20
else:
upper = 250
lower = 150
for x in range(0, length, 2 * up_down):
for z in range(d+10):
arr_source[y, z] = 0
z = z + 1
arr_source[y, z+x:min(z+x + up_down, length)] = upper
# Lower part
arr_source[y, min(z+x + up_down, length):min(z+x + 2 * up_down, length)] = lower
return arr_source
def AddLimits_optimized_80(arr_source, devices_c, percentile):
"""
Updated AddLimits for 80-sensor unwrapped format.
arr_source shape: (devices_c * 85, 1444) - 85 = 5 other measurements + 80 sensors
"""
total_rows = arr_source.shape[0]
measurements_per_device = 85 # 5 other + 80 sensors
# Create sensor type mapping for each row using the simple s_table_80
sensor_types = []
for device_idx in range(devices_c):
sensor_types.extend(s_table_80) # temperature, humidity, pressure, light, radar, s0, s1, ..., s79
sensor_types = np.array(sensor_types[:total_rows])
# Get legal values for each sensor type
min_vals = np.array([sensor_legal_values.get(t, [-0.001, 10000, 1])[0] for t in sensor_types])
max_vals = np.array([sensor_legal_values.get(t, [-0.001, 10000, 1])[1] for t in sensor_types])
windows = np.array([sensor_legal_values.get(t, [-0.001, 10000, 1])[2] for t in sensor_types])
# Process rows that need cleaning (window > 2)
clean_mask = windows > 2
if np.any(clean_mask):
for window in np.unique(windows[clean_mask]):
rows_to_clean = np.where(clean_mask & (windows == window))[0]
for row_idx in rows_to_clean:
arr_source[row_idx, :1440] = clean_data_vectorized(
arr_source[row_idx, :1440],
window,
percentile
)
# Set min/max values
arr_source[:, 1440] = min_vals
arr_source[:, 1441] = max_vals
return arr_source
def AddSmellLimits_optimized(arr_source, devices_c, sensors_c, percentile):
"""
Vectorized version of AddLimits that processes all sensors at once.
Parameters:
arr_source: array of shape (devices_c * sensors_c, 1444)
devices_c: number of devices
sensors_c: number of sensors per device
percentile: parameter for clean_data_vectorized
"""
total_sensors = devices_c * sensors_c
# Create arrays of sensor indices for all rows
sensor_indices = np.arange(total_sensors) % sensors_c
# Convert sensor_legal_values into arrays for vectorized access
sensor_types = np.array([smells_table[i] for i in range(sensors_c)])
min_vals = np.array([smell_legal_values[t][0] for t in sensor_types])
max_vals = np.array([smell_legal_values[t][1] for t in sensor_types])
# Get values for each row based on sensor type
row_mins = min_vals[sensor_indices]
row_maxs = max_vals[sensor_indices]
# Replace values smaller than smell_min and larger than smell_max with no_smell
# Create a mask for the data points (first 1440 columns)
data_mask_below = arr_source[:, :1440] < smell_min
data_mask_above = arr_source[:, :1440] > smell_max
data_mask_invalid = data_mask_below | data_mask_above
# Replace invalid values with no_smell
arr_source[:, :1440][data_mask_invalid] = no_smell
# Set min/max values for all rows at once
arr_source[:, 1440] = row_mins
arr_source[:, 1441] = row_maxs
return arr_source
def AddLimits(arr_source, devices_c, sensors_c, percentile):
for y in range(devices_c*sensors_c):
sensor_index = y % sensors_c
min_ok, max_ok, window = sensor_legal_values[s_table[sensor_index]]
#if EnablePlot:
#if (y == 33):
#print("stop")
#plot(arr_source[y, :1440], "before_clean_sensor.png")
if window > 2:
arr_source[y, :1440] = clean_data_vectorized(arr_source[y, :1440], window, percentile)
#if EnablePlot:
#if (y == 33):
#print("stop")
#plot(arr_source[y, :1440], "after_clean_sensor.png")
arr_source[y][1440] = min_ok
arr_source[y][1441] = max_ok
return arr_source
def clean_data_vectorized(data, window, percentile):
"""
Vectorized version of clean_data function using pure numpy
First removes zeros, then cleans outliers
Parameters:
data: numpy array of sensor readings
window: int, size of rolling window
percentile: float, percentile threshold for deviation filtering
"""
# Create a copy to avoid modifying original data
working_data = data.copy()
# Replace zeros with NaN
zero_mask = working_data == 0
working_data[zero_mask] = np.nan
# Create rolling window view of the data
def rolling_window(a, window):
shape = a.shape[:-1] + (a.shape[-1] - window + 1, window)
strides = a.strides + (a.strides[-1],)
return np.lib.stride_tricks.as_strided(a, shape=shape, strides=strides)
# Pad array for edge handling
pad_width = window // 2
padded = np.pad(working_data, pad_width, mode='edge')
# Create rolling windows
windows = rolling_window(padded, window)
# Calculate rolling median (ignoring NaN values)
medians = np.nanmedian(windows, axis=1)
# Forward/backward fill any NaN in medians
# Forward fill
mask = np.isnan(medians)
idx = np.where(~mask, np.arange(mask.shape[0]), 0)
np.maximum.accumulate(idx, out=idx)
medians[mask] = medians[idx[mask]]
# Backward fill any remaining NaNs
mask = np.isnan(medians)
idx = np.where(~mask, np.arange(mask.shape[0]), mask.shape[0] - 1)
idx = np.minimum.accumulate(idx[::-1])[::-1]
medians[mask] = medians[idx[mask]]
# Calculate deviations (ignoring NaN values)
deviations = np.abs(working_data - medians)
# Calculate threshold (ignoring NaN values)
threshold = np.nanpercentile(deviations, percentile)
# Create mask and replace outliers with median values
# Points are good if they're not NaN and deviation is within threshold
good_points = (~np.isnan(working_data)) & (deviations <= threshold)
# Replace all bad points (including zeros and outliers) with median values
result = np.where(good_points, working_data, medians)
return result
def process_chunk(args):
"""
Process a chunk of rows
"""
chunk, sensors_c, sensor_legal_values, s_table, window, percentile = args
result = np.copy(chunk)
# Process all time series in the chunk at once
result[:, :1440] = np.array([
clean_data_vectorized(row[:1440], window, percentile)
for row in chunk
])
# Set limits for all rows in chunk using vectorized operations
sensor_indices = np.arange(len(chunk)) % sensors_c
min_values = np.array([sensor_legal_values[s_table[i]][0] for i in sensor_indices])
max_values = np.array([sensor_legal_values[s_table[i]][1] for i in sensor_indices])
result[:, 1440] = min_values
result[:, 1441] = max_values
return result
def FillImage_backward_compatible(scaled_day, devices_c, arr_stretched_template, bw):
"""
Backward compatible visualization that handles both mtype 0 and mtype 17 formats
"""
measurements_per_device = 85
#for device_idx in range(devices_c):
#device_offset = device_idx * measurements_per_device
#if device_idx == 0: # Debug first device only
#print(f"\nDevice {device_idx} smell sensor debug (minute 0):")
#for sensor_idx in range(10):
#row_idx = device_offset + 5 + sensor_idx
#if row_idx < scaled_day.shape[0]:
#value = scaled_day[row_idx, 0]
#min_val = scaled_day[row_idx, 1442] if scaled_day.shape[1] > 1442 else 0
#max_val = scaled_day[row_idx, 1443] if scaled_day.shape[1] > 1443 else 1
#print(f" s{sensor_idx}: row={row_idx}, value={value}, min={min_val},max={max_val}")
minutes = scaled_day.shape[1] - 4
vocs_scaled = {}
# Process each device
for device_idx in range(devices_c):
device_offset = device_idx * measurements_per_device
device_y_offset = device_idx * 150
# Fill visualization for all minutes
for minute in range(minutes):
# Environmental sensors (5 stripes * 10 pixels each)
for env_idx in range(5):
row_idx = device_offset + env_idx
value = scaled_day[row_idx, minute] if row_idx < scaled_day.shape[0] else -0.001
# Calculate color with bounds checking
if value >= 0:
min_val = scaled_day[row_idx, 1442] if scaled_day.shape[1] > 1442 else 0
max_val = scaled_day[row_idx, 1443] if scaled_day.shape[1] > 1443 else 1
if max_val > min_val:
normalized = max(0, min(1, (value - min_val) / (max_val - min_val)))
else:
normalized = 0.5
if bw:
color_val = max(0, min(255, int(normalized * 255)))
color = [color_val, color_val, color_val]
else:
if env_idx == 0: # Temperature
temp_f = value * 9/5 + 32
color = list(GetTemperatureColor(temp_f))
else:
color_val = max(0, min(1279, int(normalized * 1279)))
color = list(BestColor(color_val))
# Ensure all color values are valid uint8
color = [max(0, min(255, int(c))) for c in color]
else:
color = [0, 0, 0]
# Fill 10-pixel stripe
for stripe_y in range(10):
pixel_y = device_y_offset + env_idx * 10 + stripe_y
if pixel_y < arr_stretched_template.shape[0]:
arr_stretched_template[pixel_y, minute] = color
# Smell sensors - handle both old format (mtype 0/17) and unwrapped format
# For old format: s0-s9 are directly at positions 5-14 (device_offset + 5 through 14)
# These were already filled by unwrapping, so we just read them directly
for group_idx in range(10):
# For mtype 0 data, after unwrapping, each s0-s9 sensor was duplicated
# across 8 positions. We can just read the first occurrence.
# Position for s0 is at device_offset + 5 + (0*10 + 0) = device_offset + 5
# Position for s1 is at device_offset + 5 + (0*10 + 1) = device_offset + 6, etc.
sensor_row = device_offset + 5 + group_idx
if sensor_row < scaled_day.shape[0]:
value = scaled_day[sensor_row, minute]
if value >= 0:
min_val = scaled_day[sensor_row, 1442] if scaled_day.shape[1] > 1442 else 0
max_val = scaled_day[sensor_row, 1443] if scaled_day.shape[1] > 1443 else 1
if max_val > min_val:
normalized = max(0, min(1, (value - min_val) / (max_val - min_val)))
else:
normalized = 0.5
# Invert like original VOC sensors
normalized = 1 - normalized
if bw:
color_val = max(0, min(255, int(normalized * 255)))
color = [color_val, color_val, color_val]
else:
color_val = max(0, min(1279, int(normalized * 1279)))
color = list(BestColor(color_val))
# Ensure valid uint8 values
color = [max(0, min(255, int(c))) for c in color]
else:
color = [0, 0, 0]
# Fill 10-pixel stripe
for stripe_y in range(10):
pixel_y = device_y_offset + (5 + group_idx) * 10 + stripe_y
if pixel_y < arr_stretched_template.shape[0]:
arr_stretched_template[pixel_y, minute] = color
# Create vocs_scaled array (empty for now)
vocs_scaled_array = np.array([])
return arr_stretched_template, vocs_scaled_array
def FillImage_old_format(scaled_day, devices_c, arr_stretched_template, bw, group_by):
"""
Old format visualization with group_by support
"""
minutes = scaled_day.shape[1] - 4
measurements_per_device = 85
sensors_c = 15 # 5 environmental + 10 smell
# +++ 1. DEFINE A CONSTANT FOR THE STRIPE HEIGHT +++
STRIPE_HEIGHT = 8
vocs_scaled = {}
# Collect VOC data
for device_idx in range(devices_c):
device_offset = device_idx * measurements_per_device
voc_rows = []
for sensor_idx in range(10):
sensor_row_idx = device_offset + 5 + sensor_idx
if sensor_row_idx < scaled_day.shape[0]:
voc_rows.append(sensor_row_idx)
if voc_rows:
voc_data = scaled_day[voc_rows, :minutes]
vocs_scaled[device_idx] = voc_data
# Calculate pixel positions based on group_by mode
def get_pixel_y(device_idx, sensor_idx, stripe_offset):
"""Calculate Y pixel position based on grouping mode"""
if group_by == "sensortype":
# All devices' sensor 0 together, then all sensor 1, etc.
return (sensor_idx * devices_c + device_idx) * STRIPE_HEIGHT + stripe_offset
else:
# All sensors for device 0, then all for device 1, etc.
return (device_idx * sensors_c + sensor_idx) * STRIPE_HEIGHT + stripe_offset
# Fill visualization
for minute in range(minutes):
for device_idx in range(devices_c):
device_offset = device_idx * measurements_per_device
# Environmental sensors (5 stripes × 10 pixels)
for env_idx in range(5):
row_idx = device_offset + env_idx
if row_idx < scaled_day.shape[0]:
value = scaled_day[row_idx, minute]
if value >= 0:
min_val = scaled_day[row_idx, 1442] if scaled_day.shape[1] > 1442 else 0
max_val = scaled_day[row_idx, 1443] if scaled_day.shape[1] > 1443 else 1
if max_val > min_val:
normalized = max(0, min(1, (value - min_val) / (max_val - min_val)))
else:
normalized = 0.5
if bw:
color = list(GrayColor(int(normalized * 255)))
else:
if env_idx == 0:
temp_f = value * 9/5 + 32
color = list(GetTemperatureColor(temp_f))
else:
color = list(BestColor(int(normalized * 1279)))
else:
color = [0, 0, 0]
for stripe_y in range(STRIPE_HEIGHT):
pixel_y = get_pixel_y(device_idx, env_idx, stripe_y)
if pixel_y < arr_stretched_template.shape[0]:
arr_stretched_template[pixel_y, minute] = color
# Smell sensors (10 stripes × 10 pixels)
for smell_idx in range(10):
row_idx = device_offset + 5 + smell_idx
if row_idx < scaled_day.shape[0]:
value = scaled_day[row_idx, minute]
if value >= 0:
min_val = scaled_day[row_idx, 1442] if scaled_day.shape[1] > 1442 else 0
max_val = scaled_day[row_idx, 1443] if scaled_day.shape[1] > 1443 else 1
if max_val > min_val:
normalized = max(0, min(1, (value - min_val) / (max_val - min_val)))
else:
normalized = 0.5
normalized = 1 - normalized
if bw:
color = list(GrayColor(int(normalized * 255)))
else:
color = list(BestColor(int(normalized * 1279)))
else:
color = [0, 0, 0]
for stripe_y in range(STRIPE_HEIGHT):
pixel_y = get_pixel_y(device_idx, 5 + smell_idx, stripe_y)
if pixel_y < arr_stretched_template.shape[0]:
arr_stretched_template[pixel_y, minute] = color
if vocs_scaled:
all_voc_data = [vocs_scaled[device_idx] for device_idx in sorted(vocs_scaled.keys())]
vocs_scaled_array = np.vstack(all_voc_data)
else:
vocs_scaled_array = np.array([])
return arr_stretched_template, vocs_scaled_array
def FillImage_new_format(scaled_day, devices_c, arr_stretched_template, bw, group_by):
"""
New format visualization with group_by support
group_by == "sensortype": Groups all devices by sensor type
group_by != "sensortype": Groups all sensors by device
"""
ENV_STRIPE_HEIGHT = 8
ENV_SENSOR_COUNT = 5
SMELL_SENSOR_COUNT = 80
ENV_BLOCK_HEIGHT = ENV_SENSOR_COUNT * ENV_STRIPE_HEIGHT # 5 * 8 = 40
DEVICE_BLOCK_HEIGHT = ENV_BLOCK_HEIGHT + SMELL_SENSOR_COUNT # 40 + 80 = 120
minutes = scaled_day.shape[1] - 4
measurements_per_device = 85
vocs_scaled = {}
# Collect VOC data
for device_idx in range(devices_c):
device_offset = device_idx * measurements_per_device
voc_rows = []
for decade in range(8):
for voc_sensor in [5, 6, 7, 8, 9]:
sensor_row_idx = device_offset + 5 + (decade * 8 + voc_sensor)
if sensor_row_idx < scaled_day.shape[0]:
voc_rows.append(sensor_row_idx)
if voc_rows:
voc_data = scaled_day[voc_rows, :minutes]
vocs_scaled[device_idx] = voc_data
# Calculate pixel positions based on group_by mode
def get_env_pixel_y(device_idx, env_idx, stripe_offset):
"""Environmental sensor Y position (8 pixels tall)"""
if group_by == "sensortype":
# This logic also needs to use the new height
return env_idx * devices_c * ENV_STRIPE_HEIGHT + device_idx * ENV_STRIPE_HEIGHT + stripe_offset
else:
# CORRECTED: Use the new DEVICE_BLOCK_HEIGHT constant (120)
return device_idx * DEVICE_BLOCK_HEIGHT + env_idx * ENV_STRIPE_HEIGHT + stripe_offset
def get_smell_pixel_y(device_idx, sensor_idx):
"""Smell sensor Y position (1 pixel tall)"""
if group_by == "sensortype":
# CORRECTED: Use the new ENV_BLOCK_HEIGHT constant
env_total_height = ENV_SENSOR_COUNT * devices_c * ENV_STRIPE_HEIGHT
return env_total_height + sensor_idx * devices_c + device_idx
else:
# CORRECTED: Use the new constants
# This is equivalent to your original correct line, but more readable
return device_idx * DEVICE_BLOCK_HEIGHT + ENV_BLOCK_HEIGHT + sensor_idx
# Fill visualization
for minute in range(minutes):
for device_idx in range(devices_c):
device_offset = device_idx * measurements_per_device
# Environmental sensors (5 stripes × 10 pixels)
for env_idx in range(5):
row_idx = device_offset + env_idx
if row_idx < scaled_day.shape[0]:
value = scaled_day[row_idx, minute]
if value >= 0:
min_val = scaled_day[row_idx, 1442] if scaled_day.shape[1] > 1442 else 0
max_val = scaled_day[row_idx, 1443] if scaled_day.shape[1] > 1443 else 1
if max_val > min_val:
normalized = max(0, min(1, (value - min_val) / (max_val - min_val)))
else:
normalized = 0.5
if bw:
color = list(GrayColor(int(normalized * 255)))
else:
if env_idx == 0:
temp_f = value * 9/5 + 32
color = list(GetTemperatureColor(temp_f))
else:
color = list(BestColor(int(normalized * 1279)))
else:
color = [0, 0, 0]
for stripe_y in range(8):
pixel_y = get_env_pixel_y(device_idx, env_idx, stripe_y)
if pixel_y < arr_stretched_template.shape[0]:
arr_stretched_template[pixel_y, minute] = color
# Smell sensors (80 individual pixels)
for sensor_idx in range(80):
row_idx = device_offset + 5 + sensor_idx
if row_idx < scaled_day.shape[0]:
pixel_y = get_smell_pixel_y(device_idx, sensor_idx)
value = scaled_day[row_idx, minute]
if value >= 0:
min_val = scaled_day[row_idx, 1442] if scaled_day.shape[1] > 1442 else 0
max_val = scaled_day[row_idx, 1443] if scaled_day.shape[1] > 1443 else 1
if max_val > min_val:
normalized = max(0, min(1, (value - min_val) / (max_val - min_val)))
else:
normalized = 0.5
normalized = 1 - normalized
if bw:
color = list(GrayColor(int(normalized * 255)))
else:
color = list(BestColor(int(normalized * 1279)))
if pixel_y < arr_stretched_template.shape[0]:
arr_stretched_template[pixel_y, minute] = color
else:
if pixel_y < arr_stretched_template.shape[0]:
arr_stretched_template[pixel_y, minute] = [0, 0, 0]
if vocs_scaled:
all_voc_data = [vocs_scaled[device_idx] for device_idx in sorted(vocs_scaled.keys())]
vocs_scaled_array = np.vstack(all_voc_data)
else:
vocs_scaled_array = np.array([])
return arr_stretched_template, vocs_scaled_array
def FillImage_mixed_formats(scaled_day, devices_c, arr_stretched_template, bw, device_formats):
"""
Handle mixed old/new format devices in same image
device_formats: dict mapping device_idx -> 'old' or 'new'
"""
minutes = scaled_day.shape[1] - 4
measurements_per_device = 85
vocs_scaled = {}
for device_idx in range(devices_c):
device_offset = device_idx * measurements_per_device
device_y_offset = device_idx * 150
device_format = device_formats.get(device_idx, 'old')
print(f"\nDevice {device_idx} (format={device_format}):")
print(f" device_offset={device_offset}, device_y_offset={device_y_offset}")
print(f" Expected Y range: {device_y_offset} to {device_y_offset + 149}")
for minute in range(minutes):
if minute == 0 and device_idx < 2: # First 2 devices only
for env_idx in range(5):
row_idx = device_offset + env_idx
value = scaled_day[row_idx, minute]
pixel_y_start = device_y_offset + env_idx * 10
print(f" Env sensor {env_idx}: row_idx={row_idx}, value={value:.2f}, pixel_y={pixel_y_start}-{pixel_y_start+9}")
# Environmental sensors (same for both formats)
for env_idx in range(5):
row_idx = device_offset + env_idx
value = scaled_day[row_idx, minute] if row_idx < scaled_day.shape[0] else -0.001
if value >= 0:
min_val = scaled_day[row_idx, 1442] if scaled_day.shape[1] > 1442 else 0
max_val = scaled_day[row_idx, 1443] if scaled_day.shape[1] > 1443 else 1
if max_val > min_val:
normalized = max(0, min(1, (value - min_val) / (max_val - min_val)))
else:
normalized = 0.5
if bw:
color_val = max(0, min(255, int(normalized * 255)))
color = [color_val, color_val, color_val]
else:
if env_idx == 0:
temp_f = value * 9/5 + 32
color = list(GetTemperatureColor(temp_f))
else:
color_val = max(0, min(1279, int(normalized * 1279)))
color = list(BestColor(color_val))
color = [max(0, min(255, int(c))) for c in color]
else:
color = [0, 0, 0]
for stripe_y in range(10):
pixel_y = device_y_offset + env_idx * 10 + stripe_y
if pixel_y < arr_stretched_template.shape[0]:
arr_stretched_template[pixel_y, minute] = color
# Smell sensors - format-specific handling
if device_format == 'old':
# Old format: read s0-s9 directly from positions 5-14
for group_idx in range(10):
sensor_row = device_offset + 5 + group_idx
if sensor_row < scaled_day.shape[0]:
value = scaled_day[sensor_row, minute]
if value >= 0:
value = 102400000 - value if value <= 102400000 else 0
min_val = scaled_day[sensor_row, 1442] if scaled_day.shape[1] > 1442 else 0
max_val = scaled_day[sensor_row, 1443] if scaled_day.shape[1] > 1443 else 1
if max_val > min_val:
normalized = max(0, min(1, (value - min_val) / (max_val - min_val)))
else:
normalized = 0.5
normalized = 1 - normalized
if bw:
color_val = max(0, min(255, int(normalized * 255)))
color = [color_val, color_val, color_val]
else:
color_val = max(0, min(1279, int(normalized * 1279)))
color = list(BestColor(color_val))
color = [max(0, min(255, int(c))) for c in color]
else:
color = [0, 0, 0]
for stripe_y in range(10):
pixel_y = device_y_offset + (5 + group_idx) * 10 + stripe_y
if pixel_y < arr_stretched_template.shape[0]:
arr_stretched_template[pixel_y, minute] = color
else: # device_format == 'new'
# New format: aggregate 8 sensors per group from 80 unwrapped sensors
for group_idx in range(10):
group_values = []
for i in range(8):
sensor_idx = group_idx * 8 + i
if sensor_idx < 80:
row_idx = device_offset + 5 + sensor_idx
if row_idx < scaled_day.shape[0]:
val = scaled_day[row_idx, minute]
if val >= 0:
group_values.append(val)
if group_values:
avg_value = sum(group_values) / len(group_values)
ref_row = device_offset + 5 + group_idx * 8
if ref_row < scaled_day.shape[0]:
min_val = scaled_day[ref_row, 1442] if scaled_day.shape[1] > 1442 else 0
max_val = scaled_day[ref_row, 1443] if scaled_day.shape[1] > 1443 else 1
if max_val > min_val:
normalized = max(0, min(1, (avg_value - min_val) / (max_val - min_val)))
else:
normalized = 0.5
normalized = 1 - normalized
if bw:
color_val = max(0, min(255, int(normalized * 255)))
color = [color_val, color_val, color_val]
else:
color_val = max(0, min(1279, int(normalized * 1279)))
color = list(BestColor(color_val))
color = [max(0, min(255, int(c))) for c in color]
else:
color = [128, 128, 128]
else:
color = [0, 0, 0]
for stripe_y in range(10):
pixel_y = device_y_offset + (5 + group_idx) * 10 + stripe_y
if pixel_y < arr_stretched_template.shape[0]:
arr_stretched_template[pixel_y, minute] = color
return arr_stretched_template, np.array([])
def FillImage_optimized(scaled_day, devices_c, sensors_c, arr_stretched, group_by, bw, use_mtype_mode=False, day_data=None):
"""
Optimized version of FillImage function that fills the stretched array with colored sensor data.
Now supports mtype-based positioning for new sensor layout.
Parameters:
scaled_day: 2D array of shape (stripes, minutes+4) containing sensor readings
devices_c: number of devices
sensors_c: number of sensors per device
arr_stretched: 3D array to fill with RGB values
group_by: grouping strategy ("sensortype" or other)
bw: boolean flag for black and white output
use_mtype_mode: boolean flag for new mtype-based positioning
day_data: raw data from database (needed for mtype values)
Returns:
arr_stretched: Filled array with RGB values
and vocs_scaled array from 0 to 1280
"""
stripes = devices_c * sensors_c
minutes = arr_stretched.shape[1]
if use_mtype_mode and day_data:
# New mtype mode: process each mtype record individually
return process_mtype_data(day_data, devices_c, sensors_c, arr_stretched, minutes, bw)
else:
# Old mode: use existing logic
return process_legacy_data(scaled_day, devices_c, sensors_c, arr_stretched, group_by, bw, minutes)
def process_mtype_data(day_data, devices_c, sensors_c, arr_stretched, minutes, bw):
"""
Process data with mtype-based positioning.
"""
# Parse start time from first record to calculate minute offsets
if not day_data:
return arr_stretched, np.array([])
start_time = day_data[0][0].replace(hour=0, minute=0, second=0, microsecond=0)
# Group data by minute and mtype
minute_mtype_data = defaultdict(dict)
for record in day_data:
if record[0] and record[1] and record[16] is not None: # time, device_id, mtype
minute = int((record[0] - start_time).total_seconds() / 60)
if 0 <= minute < minutes:
mtype = record[16]
if 100 <= mtype <= 170:
if minute not in minute_mtype_data:
minute_mtype_data[minute] = {}
minute_mtype_data[minute][mtype] = record
# Color calculation functions
def best_color_vectorized(vals):
vals = np.clip(vals, 0, 1279).astype(np.int32)
r = np.zeros_like(vals, dtype=np.int32)
g = np.zeros_like(vals, dtype=np.int32)
b = np.zeros_like(vals, dtype=np.int32)
mask1 = vals < 256
r[mask1] = 255
g[mask1] = vals[mask1]
mask2 = (vals >= 256) & (vals < 512)
r[mask2] = 511 - vals[mask2]
g[mask2] = 255
mask3 = (vals >= 512) & (vals < 768)
g[mask3] = 255
b[mask3] = vals[mask3] - 512
mask4 = (vals >= 768) & (vals < 1024)
g[mask4] = 1023 - vals[mask4]
b[mask4] = 255
mask5 = vals >= 1024
r[mask5] = vals[mask5] - 1024
b[mask5] = 255
return r, g, b
def gray_color_vectorized(vals):
vals = np.clip(vals, 0, 255).astype(np.int32)
return vals, vals, vals
color_func = gray_color_vectorized if bw else best_color_vectorized
# Collect all sensor values for min/max calculation
all_sensor_values = []
voc_values = []
for minute_data in minute_mtype_data.values():
for mtype, record in minute_data.items():
# Extract sensor values (s0-s9 are at indices 7-16)
for sensor_idx in range(10):
sensor_val = record[7 + sensor_idx] # s0-s9 at positions 7-16
if sensor_val is not None and sensor_val > 0:
all_sensor_values.append(sensor_val)
# VOC sensors are typically s5-s9
if sensor_idx >= 5:
voc_values.append(sensor_val)
# Calculate global min/max for normalization
if all_sensor_values:
global_min = np.min(all_sensor_values)
global_max = np.max(all_sensor_values)
if global_max > global_min:
scale_factor = (1279 if not bw else 255) / (global_max - global_min)
else:
scale_factor = 1
global_min = 0
else:
global_min = 0
global_max = 1
scale_factor = 1
# Process each minute
for minute, mtype_data in minute_mtype_data.items():
for mtype, record in mtype_data.items():
if 100 <= mtype <= 170:
base_y = mtype - 100
# Process each sensor (s0-s9)
for sensor_idx in range(10):
sensor_val = record[7 + sensor_idx] # s0-s9 at positions 7-16
if sensor_val is not None and sensor_val > 0:
# Calculate y position
y_pos = base_y + sensor_idx
if 0 <= y_pos < arr_stretched.shape[0]:
# Normalize the sensor value
normalized_val = scale_factor * (sensor_val - global_min)
# Invert VOC sensors (s5-s9)
if sensor_idx >= 5:
normalized_val = (1279 if not bw else 255) - normalized_val
# Convert to color
r, g, b = color_func(np.array([normalized_val]))
arr_stretched[y_pos, minute] = [r[0], g[0], b[0]]
# Return normalized VOC values
if voc_values:
voc_normalized = [(scale_factor * (v - global_min)) for v in voc_values]
voc_inverted = [(1279 if not bw else 255) - v for v in voc_normalized]
return arr_stretched, np.array(voc_inverted)
else:
return arr_stretched, np.array([])
def process_legacy_data(scaled_day, devices_c, sensors_c, arr_stretched, group_by, bw, minutes):
"""
Process data using the original logic for mtype=17 or non-mtype data.
"""
stripes = devices_c * sensors_c
stretch_by = arr_stretched.shape[0] // stripes
# Pre-calculate VOC rows mask
if group_by != "sensortype":
voc_rows = np.arange(stripes) >= 5 * devices_c
else:
voc_rows = (np.arange(stripes) % sensors_c) >= 5
# Pre-calculate destination row mapping for sensortype grouping
if group_by == "sensortype":
row_indices = np.arange(stripes)
sensor_indices = row_indices % sensors_c
device_indices = row_indices // sensors_c
dest_rows = sensor_indices * devices_c + device_indices
dest_rows = dest_rows[:, np.newaxis] * stretch_by + np.arange(stretch_by)
else:
row_indices = np.arange(stripes)[:, np.newaxis] * stretch_by + np.arange(stretch_by)
# Color calculation functions
def best_color_vectorized(vals):
vals = np.clip(vals, 0, 1279).astype(np.int32)
r = np.zeros_like(vals, dtype=np.int32)
g = np.zeros_like(vals, dtype=np.int32)
b = np.zeros_like(vals, dtype=np.int32)
mask1 = vals < 256
r[mask1] = 255
g[mask1] = vals[mask1]
mask2 = (vals >= 256) & (vals < 512)
r[mask2] = 511 - vals[mask2]
g[mask2] = 255
mask3 = (vals >= 512) & (vals < 768)
g[mask3] = 255
b[mask3] = vals[mask3] - 512
mask4 = (vals >= 768) & (vals < 1024)
g[mask4] = 1023 - vals[mask4]
b[mask4] = 255
mask5 = vals >= 1024
r[mask5] = vals[mask5] - 1024
b[mask5] = 255
return r, g, b
def gray_color_vectorized(vals):
vals = np.clip(vals, 0, 255).astype(np.int32)
return vals, vals, vals
color_func = gray_color_vectorized if bw else best_color_vectorized
# Process all rows at once
valid_mask = scaled_day[:, :minutes] != -0.001
big_min = scaled_day[:, 1442:1443] # Keep 2D shape for broadcasting
big_max = scaled_day[:, 1443:1444]
# Calculate k factors where max > min
valid_range_mask = big_max > big_min
k = np.zeros_like(big_min)
k[valid_range_mask] = (1280 if not bw else 255) / (big_max[valid_range_mask] - big_min[valid_range_mask])
# Calculate normalized values for all rows at once
normalized_vals = np.zeros_like(scaled_day[:, :minutes])
valid_range_indices = np.where(valid_range_mask)[0]
normalized_vals[valid_range_indices] = (
k[valid_range_indices] *
(scaled_day[valid_range_indices, :minutes] - big_min[valid_range_indices])
)
# Invert VOC rows
normalized_vals[voc_rows] = (1279 if not bw else 255) - normalized_vals[voc_rows]
# Apply valid mask
normalized_vals[~valid_mask] = 0
# Convert to RGB
r, g, b = color_func(normalized_vals)
# Create RGB array
rgb_values = np.stack([r, g, b], axis=-1)
# Handle special case where max == min
equal_range_mask = ~valid_range_mask
if np.any(equal_range_mask):
rgb_values[equal_range_mask.ravel()] = 128
# Fill the stretched array efficiently
if group_by == "sensortype":
arr_stretched[dest_rows] = rgb_values[:, None]
else:
arr_stretched[row_indices] = rgb_values[:, None]
return arr_stretched, normalized_vals[voc_rows]
def FillSmellImage_80_sensors(scaled_day, arr_stretched, y_offset, device_to_index):
"""
Renders only the 80 smell sensor data as 1-pixel high lines for each device.
This is the new replacement for FillSmellImage_optimized.
"""
minutes = 1440
devices_c = len(device_to_index)
measurements_per_device = 85 # 5 environmental + 80 smell sensors
x_offset = 200 # Assuming the same x_offset as your original function
for minute in range(minutes):
for device_idx in range(devices_c):
device_offset = device_idx * measurements_per_device
# Loop through all 80 smell sensors
for sensor_idx in range(80):
# Calculate the source row in the scaled_day array
# The +5 is crucial to skip the 5 empty environmental sensor slots
row_idx = device_offset + 5 + sensor_idx
# Calculate the destination Y pixel on the final image
# This stacks each 80-pixel block for each device
pixel_y = y_offset + (device_idx * 80) + sensor_idx
# --- This logic is copied directly from the working 'FillImage_new_format' ---
value = scaled_day[row_idx, minute]
if value >= 0:
min_val = scaled_day[row_idx, 1442]
max_val = scaled_day[row_idx, 1443]
if max_val > min_val:
normalized = (value - min_val) / (max_val - min_val)
else:
normalized = 0.5
# Invert for display (higher values = hotter colors)
normalized_inverted = 1 - normalized
final_normalized = np.clip(normalized_inverted, 0, 1)
color = BestColor(final_normalized * 1279)
# Draw the pixel onto the canvas
if pixel_y < arr_stretched.shape[0] and (x_offset + minute) < arr_stretched.shape[1]:
arr_stretched[pixel_y, x_offset + minute] = color
return arr_stretched
def FillSmellImage_optimized(scaled_day, arr_stretched, y_offset):
"""
Fill the stretched array with colored sensor data from scaled_day.
Parameters:
scaled_day: 2D array of shape (70, 1444) containing sensor readings
arr_stretched: 3D array of shape (2685, 1640, 3) to fill with RGB values
Returns:
arr_stretched: Filled array with RGB values
"""
x_offset = 200
stretch_by = 8
def best_color_vectorizedS(vals):
"""Vectorized version of BestColor that matches the original implementation exactly"""
vals = np.clip(vals, 0, 1279).astype(np.int32)
r = np.zeros_like(vals, dtype=np.int32)
g = np.zeros_like(vals, dtype=np.int32)
b = np.zeros_like(vals, dtype=np.int32)
# Region 0-255
mask1 = vals < 256
r[mask1] = 255
g[mask1] = vals[mask1]
# Region 256-511
mask2 = (vals >= 256) & (vals < 512)
r[mask2] = 511 - vals[mask2]
g[mask2] = 255
# Region 512-767
mask3 = (vals >= 512) & (vals < 768)
g[mask3] = 255
b[mask3] = vals[mask3] - 512
# Region 768-1023
mask4 = (vals >= 768) & (vals < 1024)
g[mask4] = 1023 - vals[mask4]
b[mask4] = 255
# Region 1024-1279
mask5 = vals >= 1024
r[mask5] = vals[mask5] - 1024
b[mask5] = 255
return r, g, b
# Process each row in scaled_day
for row_idx in range(scaled_day.shape[0]):
# Extract min and max for this row
row_min = scaled_day[row_idx, 1442]
row_max = scaled_day[row_idx, 1443]
# Get data for this row (first 1440 elements)
row_data = scaled_day[row_idx, :1440]
# Check if min and max are the same
if row_min == row_max:
# Create gray stripe
stripe = np.ones((stretch_by, 1440, 3), dtype=np.int32) * 128
else:
# Normalize the data between 0 and 1279
k = 1280 / (row_max - row_min)
normalized_vals = k * (row_data - row_min)
normalized_vals = np.clip(normalized_vals, 0, 1279)
# Convert to RGB
r, g, b = best_color_vectorizedS(normalized_vals)
# Create RGB stripe
stripe = np.zeros((stretch_by, 1440, 3), dtype=np.int32)
# Fill stripe with the same color pattern for all stretch_by rows
for i in range(stretch_by):
stripe[i, :, 0] = r
stripe[i, :, 1] = g
stripe[i, :, 2] = b
# Calculate the y position for this stripe
y_pos = y_offset + row_idx * stretch_by
end_y = y_pos + stretch_by
end_x = x_offset + 1440
if end_y <= arr_stretched.shape[0] and end_x <= arr_stretched.shape[1]:
arr_stretched[y_pos:end_y, x_offset:end_x, :] = stripe
return arr_stretched
def FillImage(scaled_day, devices_c, sensors_c, arr_stretched, group_by, bw):
"""
Fill the stretched array with colored sensor data.
Parameters:
scaled_day: 2D array of shape (stripes, minutes+4) containing sensor readings
devices_c: number of devices
sensors_c: number of sensors per device
arr_stretched: 3D array of shape (stripes*stretch_by, minutes, 3) to fill with RGB values
Returns:
arr_stretched: Filled array with RGB values
"""
stripes = devices_c * sensors_c
stretch_by = arr_stretched.shape[0] // stripes
minutes = arr_stretched.shape[1]
# Create a boolean mask for VOC sensors
if group_by != "sensortype":
voc_rows = np.array([i for i in range(stripes) if int(i/devices_c) >= 5])
else:
voc_rows = np.array([i for i in range(stripes) if int(i % sensors_c) >= 5])
# Vectorize the BestColor function
if not bw:
vectorized_best_color = np.vectorize(BestColor)
else:
vectorized_best_color = np.vectorize(GrayColor)
# Process each row
for row in range(stripes):
row_data = scaled_day[row, :minutes] # Get minute data
#if row == 33:
# print("stop")
# plot(row_data, "row_data.png")
big_min = scaled_day[row, 1442] # min value
big_max = scaled_day[row, 1443] # max value
# Create mask for valid values
valid_mask = row_data != -0.001
# Initialize RGB row with zeros
rgb_row = np.zeros((minutes, 3), dtype=np.uint8)
if big_max > big_min:
# Scale factor
if not bw:
k = 1280/(big_max-big_min)
else:
k = 255/(big_max-big_min)
# Calculate normalized values
normalized_vals = k * (row_data - big_min)
# Invert if it's a VOC row
if row in voc_rows:
if not bw:
normalized_vals = 1279 - normalized_vals
else:
normalized_vals = 255 - normalized_vals
# Apply valid mask
normalized_vals = np.where(valid_mask, normalized_vals, 0)
#if row == 33:
# plot(normalized_vals, "normalized_vals.png")
# Convert to RGB colors (vectorized)
r, g, b = vectorized_best_color(normalized_vals)
# Combine into RGB array
rgb_row[valid_mask] = np.stack([r[valid_mask],
g[valid_mask],
b[valid_mask]], axis=1)
else:
# Set to gray where valid
rgb_row[valid_mask] = 128
if group_by == "sensortype":
# Fill the stretched rows
sensor_index = row % sensors_c
device_index = int(row/sensors_c)
dest_row = sensor_index * devices_c + device_index #0-0, 1-
start_idx = dest_row * stretch_by
end_idx = start_idx + stretch_by
arr_stretched[start_idx:end_idx] = rgb_row
else:
# Fill the stretched rows
start_idx = row * stretch_by
end_idx = start_idx + stretch_by
arr_stretched[start_idx:end_idx] = rgb_row
return arr_stretched
def FillRadarImage(scaled_day, devices_c, bands, arr_stretched, group_by, map_type):
"""
Fill the stretched array with colored sensor data.
Parameters:
scaled_day: 2D array of shape (stripes, minutes+4) containing sensor readings
devices_c: number of devices
bands: number of bands per device
arr_stretched: 3D array of shape (stripes*stretch_by, minutes, 3) to fill with RGB values
Returns:
arr_stretched: Filled array with RGB values
"""
stripes = devices_c * bands
stretch_by = arr_stretched.shape[0] // stripes
minutes = arr_stretched.shape[1]
# Create a boolean mask for VOC sensors
if group_by != "sensortype":
voc_rows = np.array([i for i in range(stripes) if int(i/devices_c) >= 5])
else:
voc_rows = np.array([i for i in range(stripes) if int(i % bands) >= 5])
# Vectorize the BestColor function
if map_type == 3:
vectorized_best_color = np.vectorize(BestColor)
else:
vectorized_best_color = np.vectorize(GrayColor)
# Process each row
for row in range(stripes):
row_data = scaled_day[row, :minutes] # Get minute data
#if row == 33:
# print("stop")
# plot(row_data, "row_data.png")
big_min = 0 #scaled_day[row, 1442] # min value
big_max = 255 #scaled_day[row, 1443] # max value
# Create mask for valid values
valid_mask = row_data != -0.001
# Initialize RGB row with zeros
rgb_row = np.zeros((minutes, 3), dtype=np.uint8)
if big_max > big_min:
# Scale factor
if map_type == 3:
k = 1280/(big_max-big_min)
else:
k = 255/(big_max-big_min)
# Calculate normalized values
normalized_vals = k * (row_data - big_min)
# Invert if it's a VOC row
if row in voc_rows:
if map_type == 3:
normalized_vals = 1279 - normalized_vals
else:
normalized_vals = 255 - normalized_vals
# Apply valid mask
normalized_vals = np.where(valid_mask, normalized_vals, 0)
#if row == 33:
# plot(normalized_vals, "normalized_vals.png")
# Convert to RGB colors (vectorized)
r, g, b = vectorized_best_color(normalized_vals)
# Combine into RGB array
rgb_row[valid_mask] = np.stack([r[valid_mask],
g[valid_mask],
b[valid_mask]], axis=1)
else:
# Set to gray where valid
rgb_row[valid_mask] = 128
if group_by == "sensortype":
# Fill the stretched rows
band_index = row % bands
device_index = int(row/bands)
dest_row = band_index * devices_c + device_index #0-0, 1-
start_idx = dest_row * stretch_by
end_idx = start_idx + stretch_by
arr_stretched[start_idx:end_idx] = rgb_row
else:
# Fill the stretched rows
start_idx = row * stretch_by
end_idx = start_idx + stretch_by
arr_stretched[start_idx:end_idx] = rgb_row
return arr_stretched
def GetFullLocMapDetails(map_file):
#'/Volumes/XTRM-Q/wellnuo/daily_maps/1/1_2023-11-07_dayly_image.png'
local_timezone = pytz.timezone('America/Los_Angeles') # Replace with your local timezone
dest_path = os.path.dirname(map_file)
parts = map_file.split("/")
deployment = parts[-2]
parts1 = parts[-1].split("_")
date_string = parts1[1]
deployments = GetDeploymentB(deployment, -1) #All
last_locations_file = ""
last_per_minute_file = ""
today = datetime.today()
deployment_details = deployments[0]
deployment_pair = deployment_details[0]
proximity_lst = deployment_details[1]
date_object = datetime.strptime(date_string, "%Y-%m-%d")
date_object_midnight = local_timezone.localize(date_object.replace(hour=0, minute=0, second=0, microsecond=0))
selected_epoch = int(date_object_midnight.timestamp())
sel_date = datetime.fromtimestamp(selected_epoch)
devices_list_str = GetDevicesList(deployment_details, sel_date)#.split(',')
devices_list = ast.literal_eval(devices_list_str)
return devices_list, selected_epoch, dest_path
def median_filter(data, window_size):
filtered_data = []
print(len(data))
window = deque(maxlen=window_size)
last_value = -1
offset = 0
added_old = 0
for value in data:
if value != '':
added_old = 0
last_value = value
window.append(value)
if len(window) == window_size:
# Sort the window and get the median value
sorted_window = sorted(window)
median = sorted_window[window_size // 2]
filtered_data.append(median)
else:
if last_value != -1:
if added_old < window_size:
added_old = added_old + 1
window.append(last_value)
else:
window.append(-1)
if len(window) == window_size:
# Sort the window and get the median value
sorted_window = sorted(window)
median = sorted_window[window_size // 2]
filtered_data.append(median)
else:
offset +=1
if len(filtered_data) > 0:
offset += (window_size // 2)
#if starts empty, just leav it such, do not fake backwards from midnight
first_val = -1# filtered_data[0]
last_val = filtered_data[-1]
front_padding = [first_val] * offset
remaining = len(data) - len(filtered_data) - len(front_padding)
back_padding = [last_val] * remaining
out_data = front_padding + filtered_data + back_padding
else:
out_data = data
#add front and back padding
return out_data
def FilterGlitches(wave_in, filter_minutes):
if(filter_minutes > 0):
notfiltered_wave = [i[0] for i in wave_in]
filtered_wave = median_filter(notfiltered_wave, filter_minutes)
for i, value in enumerate(filtered_wave):
wave_in[i][0] = value
return wave_in
def setup_timezone_converter(time_zone_st):
"""
Setup timezone converter to be reused
Parameters:
time_zone_st (str): Timezone string (e.g. 'Europe/Berlin')
Returns:
pytz.timezone: Timezone object for conversion
"""
return pytz.timezone(time_zone_st)
def ReadDailyRadar(MAC, current_date):
#This will return all 1 Minute radar data for each gate in the file
#Will return list (2 items) of lists: Maxes, Mins
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
start_of_day = ToLocal(calendar.timegm(datetime(current_date.year, current_date.month,current_date.day, 0, 0).timetuple()))
end_of_day = start_of_day + 1440 * 60
file = os.path.join(scriptDir, "DB/"+MAC.upper() +"_"+str(current_date.year)+"_"+str(current_date.month).rjust(2, '0')+".db")
file = file.replace("\\","/")
file1 = os.path.join(scriptDir, "DB/processed_db/"+MAC.upper() +"_"+str(current_date.year)+"_"+str(current_date.month).rjust(2, '0')+".db")
file1 = file1.replace("\\","/")
if (not path.exists(file) and not path.exists(file1)):
print(file + " and " + file1 + " are not found")
return []
result = []
min_OK = "0"
sqlr = "SELECT * FROM radars WHERE time >= "+str(start_of_day) +" and time < "+str(end_of_day) +" ORDER BY time ASC"
#sqlr = "SELECT Date, high, low from "+sensor.lower()+"s1Min"+" WHERE low >= "+min_OK+" and Date >= "+str(start_of_day) +" and Date < "+str(end_of_day)
print(sqlr)
if os.path.exists(file):
result = QuerrySql(file, sqlr)
elif os.path.exists(file1):
result = QuerrySql(file1, sqlr)
# M0 ............M8 S2 ........S8
#day_minutes_data = [[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]] * (24 * 60 + 2)
day_minutes_data = [[0] * 16 for _ in range(24 * 60)]
#for each gate lets find maximum value per minute
for mgate in range(9):
max_per_min = 0
for minute_data in result:
seconde = minute_data[0]
date_time_minute = datetime.fromtimestamp(seconde)
minute_m = 60*date_time_minute.hour+date_time_minute.minute
if minute_data[mgate + 6] > day_minutes_data[minute_m][mgate]:
day_minutes_data[minute_m][mgate] = minute_data[mgate + 6]
for sgate in range(7):
for minute_data in result:
seconde = minute_data[0]
date_time_minute = datetime.fromtimestamp(seconde)
minute_m = 60*date_time_minute.hour+date_time_minute.minute
if minute_data[sgate + 17] > day_minutes_data[minute_m][sgate+9]:
day_minutes_data[minute_m][sgate+9] = minute_data[sgate + 17]
return day_minutes_data
def FromLocalMidnight(epoch_time, local_delta):
# Convert epoch time to UTC datetime object
print(type(epoch_time))
print(epoch_time)
local_datetime = datetime.datetime.utcfromtimestamp(epoch_time+local_delta).replace(tzinfo=pytz.UTC)
# Calculate minute count from midnight
minutes_from_midnight = (local_datetime - local_datetime.replace(hour=0, minute=0, second=0, microsecond=0)).total_seconds() / 60
return minutes_from_midnight
def process_wave_data_numpy(image_file, my_data, time_zone_s, device_id_2_threshold, radar_fields_of_interest):
"""
NumPy-based version of wave processing
Parameters:
my_data: List of tuples containing (time_val, device_id, other radar_fields_of_interest)
time_zone_s: Target timezone string
device_id_2_threshold: Dictionary mapping device_ids to their thresholds
Returns:
List of [device_id, max_val] pairs for each minute
"""
wave_m = None
tz = pytz.timezone(time_zone_s)
if not my_data:
return [["", -1] for _ in range(1440)]
vectorized_BestColor = np.vectorize(BestColor)
stripes = len(device_id_2_threshold)
stretch_by = 5
minutes = 1440
arr_source = np.zeros((int(stripes), minutes), dtype=np.float32)
arr_stretched = np.zeros((int(stripes*stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
index_map = {word: idx for idx, word in enumerate(radar_fields_of_interest)}
devices_map = {word: idx for idx, word in enumerate(device_id_2_threshold)}
times = []
start_time = 0
for data_set in my_data:
time_stamp = data_set[0]
if start_time == 0:
# Convert timestamp to a datetime object in UTC
local_tz = pytz.timezone(time_zone_s)
local_time = time_stamp.astimezone(local_tz)
# Set the time to the start of the day in the local time zone
start_of_day_local = local_time.replace(hour=0, minute=0, second=0, microsecond=0)
# Convert the start of the day back to UTC
start_time = start_of_day_local.astimezone(pytz.utc)
diff = time_stamp - start_time
minute = int(diff.total_seconds() / 60)
device_id = data_set[1]
field_name = device_id_2_threshold[device_id][0]
field_index = index_map[field_name]
threshold = device_id_2_threshold[device_id][1]
value = data_set[2+field_index]
if value > threshold:
arr_source[devices_map[device_id]][minute] = value
#np.savetxt('output.csv', arr_source, delimiter=',')
if False:
for yy in range(stripes):
rgb_row = vectorized_BestColor(1280*arr_source[yy]/100)
rgb_reshaped = np.array(rgb_row).reshape(3, minutes).T
for stretch_index in range(stretch_by):
y = yy * stretch_by + stretch_index
arr_stretched[y, :] = rgb_reshaped
SaveImageInBlob(image_file, arr_stretched, [])
max_values = np.max(arr_source, axis=0)
# Get indices (0-based)
wave_m = np.argmax(arr_source, axis=0)
# Add 1 to convert to 1-based indexing
wave_m = wave_m + 1
# Set to 0 where the column was all zeros
wave_m[max_values == 0] = 0
return wave_m
def ReadDailyCollapsedFastRadar(MAC, time_from_str, time_to_str):
#This will return all 1 Minute radar data for each gate in the file
#Will return list (2 items) of lists: Maxes, Mins based on s28 (stationary[2] to [8])
radar_part = "(s2+s3+s4+s5+s6+s7+s8)/7"
result = []
min_OK = "0"
sqlr = "SELECT radar_max FROM devices WHERE MAC = '"+MAC +"'"
print(sqlr)
DB_to_be_found_in_full = os.path.join(scriptDir, "main.db")
DB_to_be_found_in_full = DB_to_be_found_in_full.replace("\\","/")
result = QuerrySql(DB_to_be_found_in_full, sqlr)
sqlr = "SELECT date, low FROM radars1Min WHERE date >= "+str(start_of_day) +" and date < "+str(end_of_day) + " ORDER BY date"
if len(result)>0:
if result[0][0] == 1:
sqlr = "SELECT date, high FROM radars1Min WHERE date >= "+str(start_of_day) +" and date < "+str(end_of_day) + " ORDER BY date"
print(sqlr)
if os.path.exists(file):
result = QuerrySql(file, sqlr)
elif os.path.exists(file1):
result = QuerrySql(file1, sqlr)
return result
def vectorized_best_color_numpy(values):
"""Vectorized version of BestColor using pure NumPy"""
# Ensure values are within range
values = np.clip(values, 0, 1279)
# Initialize output arrays
r = np.zeros_like(values, dtype=np.uint8)
g = np.zeros_like(values, dtype=np.uint8)
b = np.zeros_like(values, dtype=np.uint8)
# Create masks for each range
mask_0_255 = values < 256
mask_256_511 = (values >= 256) & (values < 512)
mask_512_767 = (values >= 512) & (values < 768)
mask_768_1023 = (values >= 768) & (values < 1024)
mask_1024_plus = values >= 1024
# Set values for each range using masks
r[mask_0_255] = 255
g[mask_0_255] = values[mask_0_255]
r[mask_256_511] = 511 - values[mask_256_511]
g[mask_256_511] = 255
g[mask_512_767] = 255
b[mask_512_767] = values[mask_512_767] - 512
g[mask_768_1023] = 1023 - values[mask_768_1023]
b[mask_768_1023] = 255
r[mask_1024_plus] = values[mask_1024_plus] - 1024
b[mask_1024_plus] = 255
return np.stack([r, g, b], axis=-1)
def create_optimized_heatmap(my_data, bw, fields, wave_m, device_to_index, base_minute, timezone_st, min_val, max_val):
if len(my_data) < 1:
return []
local_tz = pytz.timezone(timezone_st)
n_fields = len(fields)
# Convert my_data to numpy array for faster processing
data_array = np.array(my_data)
# Get unique device IDs and create mapping
# Convert device IDs to indices using vectorized operation
device_indices = np.vectorize(device_to_index.get)(data_array[:, 1])
# Calculate x coordinates (minutes from base)
#minute is in local time zone, and base_minute is UTC
base_minute_local = base_minute #.astimezone(local_tz)
#x_coords = np.array([(minute.replace(tzinfo=datetime.timezone.utc) - base_minute_local).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
x_coords = np.array([(minute - base_minute_local).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
# Extract values and convert to float
values = data_array[:, 2:].astype(np.float32)
if bw:
# Process in batches to avoid memory issues
batch_size = 1000
for start_idx in range(0, len(data_array), batch_size):
end_idx = min(start_idx + batch_size, len(data_array))
batch_slice = slice(start_idx, end_idx)
# Calculate gray values
gray_values = (values[batch_slice, :] - min_val / (max_val - min_val)) * 255.0
# Clip values to valid range
gray_values = np.clip(gray_values, 0, 255).astype(np.uint8)
# Create y coordinates for each record
y_coords = (device_indices[batch_slice] * n_fields).reshape(-1, 1) + np.arange(n_fields)
# Assign values to the image array
for i in range(end_idx - start_idx):
wave_m[y_coords[i], x_coords[batch_slice][i]] = gray_values[i, :, np.newaxis]
else: # Color mode
# Process in batches
batch_size = 1000
for start_idx in range(0, len(data_array), batch_size):
end_idx = min(start_idx + batch_size, len(data_array))
batch_slice = slice(start_idx, end_idx)
# Calculate color values
color_values = np.zeros_like(values[batch_slice])
color_values[:, :] = ((values[batch_slice, :] - min_val) / (max_val - min_val)) * 1279.0
#color_values[:, :] = (values[batch_slice, :] / 100.0) * 1279.0 # other fields
# Create y coordinates for each record
y_coords = (device_indices[batch_slice] * n_fields).reshape(-1, 1) + np.arange(n_fields)
# Convert to RGB colors
for i in range(end_idx - start_idx):
rgb_values = vectorized_best_color_numpy(color_values[i])
wave_m[y_coords[i], x_coords[batch_slice][i]] = rgb_values
return wave_m
def create_light_optimized_heatmap(my_data, bw, fields, wave_m, device_to_index, base_minute, timezone_st, min_val=0, max_val=4095):
"""
Create an optimized heatmap for light data (range 0-4095)
Parameters:
my_data (list): Data from the database query
bw (bool): Whether to create a black and white (True) or color (False) heatmap
fields (list): List of field names
wave_m (numpy.ndarray): The image array to fill
device_to_index (dict): Mapping from device_id to index
base_minute (datetime): The base minute for time calculations
timezone_st (str): Timezone string
min_val (float): Minimum value for normalization (default: 0)
max_val (float): Maximum value for normalization (default: 4095)
Returns:
numpy.ndarray: The filled image array
"""
if len(my_data) < 1:
return wave_m
# Get the local timezone
local_tz = pytz.timezone(timezone_st)
# Number of fields (should be 1 for light data)
n_fields = len(fields)
# Convert my_data to numpy array for faster processing
data_array = np.array(my_data)
# Convert device IDs to indices using vectorized operation
device_indices = np.vectorize(device_to_index.get)(data_array[:, 1])
# Calculate x coordinates (minutes from base)
x_coords = np.array([(minute - base_minute).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
# Extract values and convert to float - light data is in column 2
# Reshape to match expected format (n_samples, n_fields)
values = data_array[:, 2].astype(np.float32).reshape(-1, 1)
# Process in batches to avoid memory issues
batch_size = 1000
if bw:
for start_idx in range(0, len(data_array), batch_size):
end_idx = min(start_idx + batch_size, len(data_array))
batch_slice = slice(start_idx, end_idx)
# Normalize light values (0-4095) to grayscale (0-255)
gray_values = ((values[batch_slice] - min_val) / (max_val - min_val) * 255.0)
# Clip values to valid range
gray_values = np.clip(gray_values, 0, 255).astype(np.uint8)
# Create y coordinates for each record
y_coords = (device_indices[batch_slice] * n_fields).reshape(-1, 1)
# Assign values to the image array
for i in range(end_idx - start_idx):
# Create RGB grayscale (same value for R, G, B)
gray_rgb = np.full(3, gray_values[i, 0], dtype=np.uint8)
wave_m[y_coords[i, 0], x_coords[batch_slice][i]] = gray_rgb
else:
# Color mode
for start_idx in range(0, len(data_array), batch_size):
end_idx = min(start_idx + batch_size, len(data_array))
batch_slice = slice(start_idx, end_idx)
# Normalize light values (0-4095) to color range (0-1279)
color_values = ((values[batch_slice] - min_val) / (max_val - min_val) * 1279.0)
# Create y coordinates for each record
y_coords = (device_indices[batch_slice] * n_fields).reshape(-1, 1)
# For each value, calculate its RGB color and assign to the image
for i in range(end_idx - start_idx):
# Convert normalized value to RGB using vectorized_best_color_numpy
rgb_value = vectorized_best_color_numpy(np.array([color_values[i, 0]]))[0]
wave_m[y_coords[i, 0], x_coords[batch_slice][i]] = rgb_value
return wave_m
def create_temperature_optimized_heatmap(my_data, bw, fields, wave_m, device_to_index, base_minute, timezone_st, min_val=0, max_val=4095):
"""
Create an optimized heatmap for temperature data with alarm levels
Parameters:
my_data (list): Data from the database query with columns for minute, device_id, temperature_avg, alarm_level
bw (bool): Whether to create a black and white (True) or color (False) heatmap
fields (list): List of field names - should be ['temperature', 'temperature_state']
wave_m (numpy.ndarray): The image array to fill
device_to_index (dict): Mapping from device_id to index
base_minute (datetime): The base minute for time calculations
timezone_st (str): Timezone string
min_val (float): Minimum value for temperature normalization
max_val (float): Maximum value for temperature normalization
Returns:
numpy.ndarray: The filled image array
"""
if len(my_data) < 1:
return wave_m
# Get the local timezone
local_tz = pytz.timezone(timezone_st)
# Number of fields (should be 2 for temperature data: temperature and alarm state)
n_fields = len(fields)
# Convert my_data to numpy array for faster processing
data_array = np.array(my_data)
# Convert device IDs to indices using vectorized operation
device_indices = np.vectorize(device_to_index.get)(data_array[:, 1])
# Calculate x coordinates (minutes from base)
x_coords = np.array([(minute - base_minute).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
# Process in batches to avoid memory issues
batch_size = 1000
for start_idx in range(0, len(data_array), batch_size):
end_idx = min(start_idx + batch_size, len(data_array))
batch_slice = slice(start_idx, end_idx)
for i in range(end_idx - start_idx):
# Get data for this record
temperature = data_array[batch_slice][i, 2]
if temperature == None:
temperature = min_val
alarm_level = 0
# If we have an alarm_level column (index 3), use it
if data_array.shape[1] > 3:
alarm_level = data_array[batch_slice][i, 3]
# Calculate base y-coordinate for this device
base_y = device_indices[batch_slice][i] * n_fields
# Temperature row (even row - index 0, 2, 4...)
# Normalize temperature to the color range and create color
if not bw:
# For color mode
normalized_temp = np.clip((temperature - min_val) / (max_val - min_val) * 1279.0, 0, 1279)
temp_rgb = vectorized_best_color_numpy(np.array([normalized_temp]))[0]
else:
# For B&W mode
normalized_temp = np.clip((temperature - min_val) / (max_val - min_val) * 255.0, 0, 255)
gray_value = int(normalized_temp)
temp_rgb = np.array([gray_value, gray_value, gray_value], dtype=np.uint8)
# Set the temperature color in the even row
wave_m[base_y, x_coords[batch_slice][i]] = temp_rgb
# Alarm level row (odd row - index 1, 3, 5...)
# Set color based on alarm level (0=green, 1=yellow, 2=red)
if alarm_level == 0:
# Green for normal
alarm_rgb = np.array([0, 255, 0], dtype=np.uint8)
elif alarm_level == 1:
# Yellow for warning
alarm_rgb = np.array([0, 255, 255], dtype=np.uint8)
else: # alarm_level == 2
# Red for critical
alarm_rgb = np.array([0, 0, 255], dtype=np.uint8)
# Set the alarm color in the odd row
wave_m[base_y + 1, x_coords[batch_slice][i]] = alarm_rgb
return wave_m
def create_humidity_optimized_heatmap(my_data, bw, fields, wave_m, device_to_index, base_minute, timezone_st, min_val=0, max_val=100):
"""
Create a heatmap with the exact blue-cyan-green-yellow-red-violet spectrum
matching Image 2, with green at position 40
"""
if len(my_data) < 1:
return wave_m
# Number of fields
n_fields = len(fields)
# Convert my_data to numpy array for faster processing
data_array = np.array(my_data)
# Convert device IDs to indices using vectorized operation
device_indices = np.vectorize(device_to_index.get)(data_array[:, 1])
# Calculate x coordinates (minutes from base)
x_coords = np.array([(minute - base_minute).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
# Process in batches to avoid memory issues
batch_size = 1000
# Define the color mapping function based on the exact spectrum we want
def get_color(t):
"""Get RGB color from humidity 0-100"""
# Define color stops - exact RGB values at each step
# Format: (position, (r, g, b))
#color_stops = [
#(0, (0, 0, 255)), # Blue
#(20, (0, 255, 255)), # Cyan
#(40, (0, 255, 0)), # Green (centered at 40)
#(60, (255, 255, 0)), # Yellow
#(80, (255, 0, 0)), # Red
#(100, (255, 0, 255)) # Violet
#]
color_stops = [
(0, (0, 0, 255)), # Blue
(16, (0, 255, 255)), # Cyan
(32, (0, 255, 0)), # Green (now centered at 32)
(60, (255, 255, 0)), # Yellow
(80, (255, 0, 0)), # Red
(100, (255, 0, 255)) # Violet
]
# Ensure t is within range
t = max(0, min(100, t))
# Find the two stops to interpolate between
for i in range(len(color_stops) - 1):
pos1, color1 = color_stops[i]
pos2, color2 = color_stops[i+1]
if pos1 <= t <= pos2:
# Linear interpolation between the two color stops
ratio = (t - pos1) / (pos2 - pos1)
r = int(color1[0] + ratio * (color2[0] - color1[0]))
g = int(color1[1] + ratio * (color2[1] - color1[1]))
b = int(color1[2] + ratio * (color2[2] - color1[2]))
return r, g, b
# Should never reach here
return 0, 0, 0
humidity = min_val
for start_idx in range(0, len(data_array), batch_size):
end_idx = min(start_idx + batch_size, len(data_array))
batch_slice = slice(start_idx, end_idx)
for i in range(end_idx - start_idx):
# Get data for this record
if data_array[batch_slice][i, 2] != None:
humidity = float(data_array[batch_slice][i, 2])
# Map humidity from min_val-max_val to 0-100 for our color function
normalized_temp = 100.0 * (humidity - min_val) / (max_val - min_val) if max_val > min_val else 0
normalized_temp = max(0, min(100, normalized_temp)) # Clamp to 0-100
alarm_level = 0
# If we have an alarm_level column (index 3), use it
if data_array.shape[1] > 3:
alarm_level = data_array[batch_slice][i, 3]
# Calculate base y-coordinate for this device
base_y = device_indices[batch_slice][i] * n_fields
# Temperature row (even row)
if not bw:
# Get RGB color from our direct mapping function
r, g, b = get_color(normalized_temp)
# OpenCV uses BGR ordering, not RGB
temp_rgb = np.array([b, g, r], dtype=np.uint8)
else:
# For B&W mode
gray_value = int(normalized_temp * 2.55) # 0-100 to 0-255
gray_value = max(0, min(255, gray_value))
temp_rgb = np.array([gray_value, gray_value, gray_value], dtype=np.uint8)
# Set the humidity color in the even row
wave_m[base_y, x_coords[batch_slice][i]] = temp_rgb
# Alarm level row (odd row)
if alarm_level == 0:
# Green for normal
alarm_rgb = np.array([0, 255, 0], dtype=np.uint8) #thisis B,G,R !!!
elif alarm_level == 1:
# Yellow for warning
alarm_rgb = np.array([0, 255, 255], dtype=np.uint8)
else: # alarm_level == 2
# Red for critical
alarm_rgb = np.array([0, 0, 255], dtype=np.uint8)
# Set the alarm color in the odd row
wave_m[base_y + 1, x_coords[batch_slice][i]] = alarm_rgb
return wave_m
def create_smell_optimized_heatmap(arr_stretched, my_data, bw, fields_s, device_to_index, base_minute, time_zone_s, smell_component_stretch_by, selected_date, y_offset, has_new_format):
"""
HYBRID version that detects the data format and calls the appropriate visualization pipeline.
- If mtype 100-170 is found, it uses the new 80-sensor, 1-pixel line method.
- Otherwise, it defaults to the original 10-sensor, stretched stripe method.
"""
if not my_data:
return
# --- 2. SELECT THE CORRECT PIPELINE ---
if has_new_format:
#
# --- EXECUTE THE NEW 80-SENSOR PIPELINE ---
#
print("New smell format detected. Running 80-sensor visualization.")
devices_c = len(device_to_index)
devices_list = list(device_to_index.keys())
measurements_per_device = 85
minutes = 1440
stripes = devices_c * measurements_per_device
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
# a. Unwrap data using the specialized smell-only unwrapper
unwrapped_data = unwrap_smell_only_data(my_data, devices_list, time_from_str)
# b. Interpolate data correctly
interpolated_data = fast_interpolate_unwrapped_data(unwrapped_data)
# c. Fill the source array This are real measurements, so 5+80
arr_source_template = np.full((stripes, minutes + 4), -0.001, dtype=float)
arr_source = fast_fill_array_from_unwrapped(interpolated_data, devices_list, arr_source_template, time_from_str)
# d. Add limits (using the 80-sensor version)
arr_source = AddLimits_optimized_80(arr_source, devices_c, percentile=100)
# e. Calculate min/max
scaled_day = CalcExtremes(arr_source, minutes, stripes)
# f. Render the image (using the new 80-sensor renderer with the OverflowError fix)
arr_stretched = FillSmellImage_80_sensors(scaled_day, arr_stretched, y_offset, device_to_index)
else:
#
# --- EXECUTE THE ORIGINAL 10-SENSOR PIPELINE ---
#
print("Old smell format detected. Running original 10-sensor visualization.")
minutes = 1440
devices_c = len(device_to_index)
sensors_c = len(fields_s) # Use fields_s for old format
stripes = devices_c * sensors_c
# a. Fill the source array (using the original function)
arr_source_template = np.full((stripes, minutes + 4), -0.001, dtype=float)
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
arr_source = fast_fill_smell_array_from_timescale(my_data, time_from_str, device_to_index, arr_source_template, time_zone_s)
# b. Add limits (using the original function)
arr_source = AddSmellLimits_optimized(arr_source, devices_c, sensors_c, percentile=100)
# c. Calculate min/max
scaled_day = CalcExtremes(arr_source, minutes, stripes)
# d. Render the image (using the original renderer)
arr_stretched = FillSmellImage_optimized(scaled_day, arr_stretched, y_offset)
return arr_stretched # Return the modified array
def create_optimized_heatmap_simple(my_data, bw, fields, wave_m, device_to_index, base_minute, timezone_st, min_val, max_val):
if len(my_data) < 1:
return []
local_tz = pytz.timezone(timezone_st)
n_fields = len(fields)
# Convert my_data to numpy array for faster processing
data_array = np.array(my_data)
# Get unique device IDs and create mapping
# Convert device IDs to indices using vectorized operation
device_indices = np.vectorize(device_to_index.get)(data_array[:, 1])
# Calculate x coordinates (minutes from base)
#minute is in local time zone, and base_minute is UTC
base_minute_local = base_minute #.astimezone(local_tz)
#x_coords = np.array([(minute.replace(tzinfo=datetime.timezone.utc) - base_minute_local).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
x_coords = np.array([(minute - base_minute_local).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
# Extract values and convert to float
values = data_array[:, 2:].astype(np.float32)
if bw:
# Process in batches to avoid memory issues
batch_size = 1000
for start_idx in range(0, len(data_array), batch_size):
end_idx = min(start_idx + batch_size, len(data_array))
batch_slice = slice(start_idx, end_idx)
# Calculate gray values
gray_values = (values[batch_slice, :] - min_val / (max_val - min_val)) * 255.0
# Clip values to valid range
gray_values = np.clip(gray_values, 0, 255).astype(np.uint8)
# Create y coordinates for each record
y_coords = (device_indices[batch_slice] * n_fields).reshape(-1, 1) + np.arange(n_fields)
# Assign values to the image array
for i in range(end_idx - start_idx):
wave_m[y_coords[i], x_coords[batch_slice][i]] = gray_values[i, :, np.newaxis]
else: # Color mode
# Process in batches
batch_size = 1000
for i in range(0, len(data_array)):
rgb_value = ((values[i] - min_val) / (max_val - min_val)) * 1279.0
wave_m[i, x_coords[i]] = rgb_value
return wave_m
def create_radar_optimized_heatmap(my_data, bw, fields, wave_m, device_to_index, base_minute, timezone_st):
if len(my_data) < 1:
return []
local_tz = pytz.timezone(timezone_st)
n_fields = len(fields)
# Convert my_data to numpy array for faster processing
data_array = np.array(my_data)
# Get unique device IDs and create mapping
# Convert device IDs to indices using vectorized operation
device_indices = np.vectorize(device_to_index.get)(data_array[:, 1])
# Calculate x coordinates (minutes from base)
#minute is in local time zone, and base_minute is UTC
base_minute_local = base_minute #.astimezone(local_tz)
#x_coords = np.array([(minute.replace(tzinfo=datetime.timezone.utc) - base_minute_local).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
x_coords = np.array([(minute - base_minute_local).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
# Extract values and convert to float
values = data_array[:, 2:].astype(np.float32)
if bw:
# Process in batches to avoid memory issues
batch_size = 1000
for start_idx in range(0, len(data_array), batch_size):
end_idx = min(start_idx + batch_size, len(data_array))
batch_slice = slice(start_idx, end_idx)
# Calculate gray values
gray_values = (values[batch_slice, :] / 100.0) * 255.0
# Clip values to valid range
gray_values = np.clip(gray_values, 0, 255).astype(np.uint8)
# Create y coordinates for each record
y_coords = (device_indices[batch_slice] * n_fields).reshape(-1, 1) + np.arange(n_fields)
# Assign values to the image array
for i in range(end_idx - start_idx):
wave_m[y_coords[i], x_coords[batch_slice][i]] = gray_values[i, :, np.newaxis]
else: # Color mode
# Process in batches
batch_size = 1000
for start_idx in range(0, len(data_array), batch_size):
end_idx = min(start_idx + batch_size, len(data_array))
batch_slice = slice(start_idx, end_idx)
# Calculate color values
color_values = np.zeros_like(values[batch_slice])
color_values[:, :] = (values[batch_slice, :] / 100.0) * 1279.0 # other fields
# Create y coordinates for each record
y_coords = (device_indices[batch_slice] * n_fields).reshape(-1, 1) + np.arange(n_fields)
# Convert to RGB colors
for i in range(end_idx - start_idx):
rgb_values = vectorized_best_color_numpy(color_values[i])
wave_m[y_coords[i], x_coords[batch_slice][i]] = rgb_values
return wave_m
def visualize_gmm_fit(stationary_signal, output_file='gmm_explanation.png'):
"""
Visualize how GMM separates the stationary signal into components
"""
# Prepare data
X = stationary_signal.reshape(-1, 1)
# Fit GMM
gmm = GaussianMixture(n_components=2, random_state=42)
gmm.fit(X)
# Get parameters
means = gmm.means_.flatten()
stds = np.sqrt(gmm.covariances_.flatten())
weights = gmm.weights_
# Create histogram of actual data
plt.figure(figsize=(12, 6))
# Plot histogram of actual data
plt.hist(X, bins=50, density=True, alpha=0.6, color='gray',
label='Actual Signal Distribution')
# Generate points for GMM curves
x = np.linspace(X.min(), X.max(), 200)
# Plot individual components
for i in range(len(means)):
plt.plot(x, weights[i] * stats.norm.pdf(x, means[i], stds[i]),
label=f'Component {i+1}: mean={means[i]:.2f}, std={stds[i]:.2f}')
# Plot combined GMM
gmm_curve = np.zeros_like(x)
for i in range(len(means)):
gmm_curve += weights[i] * stats.norm.pdf(x, means[i], stds[i])
plt.plot(x, gmm_curve, 'r--', linewidth=2, label='Combined GMM')
# Add vertical lines for threshold
baseline = min(means)
threshold = baseline + 3 * np.sqrt(gmm.covariances_.flatten()[np.argmin(means)])
plt.axvline(x=baseline, color='g', linestyle='--', label='Baseline')
plt.axvline(x=threshold, color='r', linestyle='--', label='Threshold')
plt.title('Gaussian Mixture Model Components of Stationary Signal')
plt.xlabel('Signal Value')
plt.ylabel('Density')
plt.legend()
plt.grid(True)
# Save and close
plt.savefig(output_file, dpi=300, bbox_inches='tight')
plt.close()
def process_location_data(location_data):
"""
Convert raw location data into aligned time series.
"""
timestamps = np.array([t[0] for t in location_data])
stationary = np.array([t[1] for t in location_data])
motion = np.array([t[2] for t in location_data])
return timestamps, stationary, motion
def detect_presence_for_location(stationary_signal, motion_signal,
motion_threshold=5, gmm_components=2):
"""
Simplified presence detection for a single location.
Returns presence mask and parameters.
"""
# Fit GMM to stationary signal
gmm = GaussianMixture(n_components=gmm_components, random_state=42)
X = stationary_signal.reshape(-1, 1)
gmm.fit(X)
visualize_gmm_fit(stationary_signal, output_file='gmm_explanation.png')
# Get baseline and threshold
baseline = min(gmm.means_)[0]
components_sorted = sorted(zip(gmm.means_.flatten(), gmm.covariances_.flatten()))
baseline_std = np.sqrt(components_sorted[0][1])
threshold = baseline + 3 * baseline_std
# Detect presence
presence_mask = (motion_signal > motion_threshold) | (stationary_signal > threshold)
# Smooth presence detection (15 seconds window = 1.5 samples at 10sec sampling)
smooth_window = 3
presence_mask = np.convolve(presence_mask.astype(int),
np.ones(smooth_window)/smooth_window,
mode='same') > 0.5
return presence_mask, threshold
def find_current_location(data_sets, start_time, end_time, motion_threshold=10):
"""
Analyze presence across multiple locations for each minute.
Parameters:
-----------
data_sets : dict
Dictionary of location_name: data_tuples pairs
start_time : datetime
Start time for analysis
end_time : datetime
End time for analysis
motion_threshold : float
Threshold for significant motion detection
Returns:
--------
dict
Minute by minute analysis of presence and movement
"""
# Process each location's data
location_data = {}
for location, data in data_sets.items():
timestamps, stationary, motion = process_location_data(data)
presence, threshold = detect_presence_for_location(stationary, motion, motion_threshold)
location_data[location] = {
'timestamps': timestamps,
'presence': presence,
'motion': motion,
'stationary': stationary,
'threshold': threshold
}
# Create minute-by-minute analysis
current_time = start_time
results = []
while current_time < end_time:
minute_end = current_time + timedelta(minutes=1)
# Analysis for current minute
minute_status = {
'timestamp': current_time,
'locations': [],
'moving_locations': [],
'presence_values': {},
'motion_values': {},
'status': 'nobody_present'
}
# First pass: collect all presence and motion values
for location, data in location_data.items():
# Find indices for current minute
mask = (data['timestamps'] >= current_time) & (data['timestamps'] < minute_end)
if not any(mask):
continue
presence_in_minute = data['presence'][mask]
motion_in_minute = data['motion'][mask]
stationary_in_minute = data['stationary'][mask]
if any(presence_in_minute):
minute_status['presence_values'][location] = np.max(stationary_in_minute)
minute_status['motion_values'][location] = np.max(motion_in_minute)
# If no presence detected anywhere
if not minute_status['presence_values']:
minute_status['status'] = 'nobody_present'
results.append(minute_status)
current_time += timedelta(minutes=1)
continue
# Find location with strongest presence
primary_location = max(minute_status['presence_values'].items(),
key=lambda x: x[1])[0]
# Count locations with significant motion
moving_locations = [loc for loc, motion in minute_status['motion_values'].items()
if motion > motion_threshold]
plot(motion, filename=f"motion.png", title=f"Motion", style='line')
# Update status based on motion and presence
if len(moving_locations) > 1:
# Multiple locations with significant motion indicates multiple people
minute_status['status'] = 'multiple_people_moving'
minute_status['locations'] = moving_locations
minute_status['moving_locations'] = moving_locations
else:
# Single or no motion - assign to location with strongest presence
minute_status['locations'] = [primary_location]
if moving_locations:
minute_status['status'] = f'single_person_moving_in_{primary_location}'
minute_status['moving_locations'] = moving_locations
else:
minute_status['status'] = f'single_person_stationary_in_{primary_location}'
results.append(minute_status)
current_time += timedelta(minutes=1)
return results
def get_size(obj, seen=None):
# Recursively find size of objects and their contents
if seen is None:
seen = set()
obj_id = id(obj)
if obj_id in seen:
return 0
seen.add(obj_id)
size = sys.getsizeof(obj)
if isinstance(obj, (list, tuple, set, dict)):
if isinstance(obj, (list, tuple, set)):
size += sum(get_size(i, seen) for i in obj)
else: # dict
size += sum(get_size(k, seen) + get_size(v, seen) for k, v in obj.items())
return size
def CreatePresenceMap(location_image_file, devices_list, selected_date,
map_type, force_recreate, chart_type, bw, motion, scale_global,
fast, filter_minutes, time_zone_s):
#global Id2MACDict
data_sets = {}
ids_list = []
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
time_from, time_to = GetLocalTimeForDateSimple(selected_date, time_zone_s)
for details in devices_list:
sql = get_device_radar_only_query(str(details[1]), time_from_str, time_to_str, [details[1]])
print(sql)
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sql)
data_sets[details[2]] = cur.fetchall()#cur.fetchone()
# Get minute-by-minute analysis
location_analysis = find_current_location(data_sets, time_from, time_to)
# Example of printing results
for minute in location_analysis:
print(f"Time: {minute['timestamp']}")
print(f"Status: {minute['status']}")
print(f"Present in: {', '.join(minute['locations'])}")
if minute['moving_locations']:
print(f"Movement in: {', '.join(minute['moving_locations'])}")
print("---")
print(f"Dictionary size: {get_size(data_sets)} bytes")
devices_list_str = ','.join(str(device[1]) for device in devices_list)
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
sql = get_device_radar_only_query(devices_list_str, time_from_str, time_to_str, ids_list)
print(sql)
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sql)
my_data = cur.fetchall()#cur.fetchone()
#print(result)
if my_data == None:
return False
#thresholds_dict = {}
#stretch_to_min_max = True
#devices_c = len(devices_list)
#data_sets = {
#'living_room': my_data1,
#'kitchen': my_data2,
#'bedroom1': my_data3,
#'bedroom2': my_data4,
#'hallway': my_data5,
#'bathroom': my_data6,
#'office': my_data7
#}
sensors_c = 1#len(sensors_table)
image_file = location_image_file
minutes = 1440
#search_pattern = os.path.join(scriptDir, "scratch/*_"+str(current_date.year)+"_"+str(current_date.month).rjust(2, '0')+"_"+str(current_date.day)+"_*.pkl")
#allFiles = [os.path.join(dest_path, f) for f in glob.glob(search_pattern)]
#rekreate .pckl files if missing
today_date = datetime.datetime.fromtimestamp(time.time())
fields = ['m0_max', 'm1_max', 'm2_max', 'm3_max', 'm4_max', 'm5_max',
'm6_max', 'm7_max', 'm8_max', 'm08_max', 's2_max', 's3_max',
's4_max', 's5_max', 's6_max', 's7_max', 's8_max', 's28_max', 's28_min'] #Why 'm8_max' and 'm08_max' ?because m08 is m0 + m1 .. to 8!
fields_n = len(fields)
stripes = len(devices_list) * fields_n
#device_counter = 0
stretch_by = 5
#arr_source = np.zeros((stripes, minutes), dtype=float)
arr_stretched = np.zeros((int(stripes*stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
ids_list = []
labels = []
label_font = cv2.FONT_HERSHEY_SIMPLEX
label_font_scale = 1
label_font_color = (255, 255, 255)
label_font_thickness = 2
label_font_line = cv2.LINE_AA
cnt = 0
for details in devices_list:
dev_id = details[0]
ids_list.append(details[1])
descriptor = details[2]
if details[3] != None and details[3] != "":
descriptor = descriptor + " " + details[3]
if details[6] != None and details[6] != "":
descriptor = descriptor + " " + details[6]
text_dimensions = get_text_dimensions(descriptor, label_font, label_font_scale, label_font_thickness)
text_height = text_dimensions["height"]
labels.append((descriptor, (10, 10 + text_height + (cnt)*fields_n*stretch_by), label_font, label_font_scale, label_font_color, label_font_thickness, label_font_line))
cnt += 1
sql = get_deployment_radar_only_detailed_query(devices_list_str, time_from_str, time_to_str, ids_list)
print(sql)
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sql)
my_data = cur.fetchall()#cur.fetchone()
#print(result)
if my_data == None:
return False
# Get start and end times from your data
start_time = min(data_sets['living_room'][0][0],
data_sets['kitchen'][0][0],
# ... add other locations
)
end_time = max(data_sets['living_room'][-1][0],
data_sets['kitchen'][-1][0],
# ... add other locations
)
# Get minute-by-minute analysis
location_analysis = find_current_location(data_sets, start_time, end_time)
# Example of printing results
for minute in location_analysis:
print(f"Time: {minute['timestamp']}")
print(f"Status: {minute['status']}")
print(f"Present in: {', '.join(minute['locations'])}")
if minute['moving_locations']:
print(f"Movement in: {', '.join(minute['moving_locations'])}")
print("---")
#----------------------------------------------------------------------------------------------------
print(sql)
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sql)
my_data = cur.fetchall()#cur.fetchone()
#print(result)
if my_data == None:
return False
#device_ids = sorted(set(record[1] for record in my_data))
device_to_index = {device: idx for idx, device in enumerate(ids_list)}
base_minute = ConvertToBase(time_from_str, time_zone_s)
st = time.time()
if True:
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
wave_m = create_radar_optimized_heatmap(my_data, bw, fields, wave_m, device_to_index, base_minute, time_zone_s)
print(time.time()-st)
if False:
#base_minute = my_data[0][0]# min(record[0] for record in my_data)
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
for record in my_data:
#(minute,device_id,absent_min,stationary_max,moving_max,both_max,m0_max,m1_max,m2_max,m3_max,m4_max,
# m5_max,m6_max,m7_max,m8_max,m08_max,s2_max,s3_max,s4_max,s5_max,s6_max,s7_max,s8_max,s28_max) = record
minute, device_id = record[0:2]
values = record[2:] # All the max/min values
x = int((minute - base_minute).total_seconds()/60)
device_idx = device_to_index[device_id]
if bw:
for field_idx, value in enumerate(values):
# Calculate y position
y = device_idx * fields_n + field_idx
# Convert value to grayscale (0-100 to 0-255)
gray_value = int((value / 100.0) * 255.0)
# Set RGB values (all same for grayscale)
wave_m[y, x] = [gray_value, gray_value, gray_value]
else: #color
for field_idx, value in enumerate(values):
# Calculate y position
y = device_idx * 22 + field_idx
# Convert value to grayscale (0-100 to 0-255)
gray_value = int((value / 100.0) * 1279.0)
# Set RGB values (all same for grayscale)
wave_m[y, x] = BestColor(gray_value)
print(time.time()-st)
st = time.time()
for yy in range(stripes):
rgb_row = wave_m[yy]
for stretch_index in range(stretch_by):
y = yy * stretch_by + stretch_index
arr_stretched[y, :] = rgb_row
print(time.time()-st)
SaveImageInBlob(image_file, arr_stretched, labels)
#arr_source[2*gate, :] = wave_m
#rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
#for col in range(1440):
#sens_val = wave_m[col]
#if sens_val != 0:
#r,g,b=BestColor(km*(sens_val-m_min))
#if r > 255 or g > 255 or b > 255:
#print(r,g,b)
#rgb_row[col] = r,g,b
#for stretch_index in range(stretch_by):
#y = device_counter * (18*stretch_by) + 2*gate * stretch_by + stretch_index
##print(y, row, devices_c, sensor_index, location_index, stretch_index)
##arr_stretched[y, :] = rgb_row
#if gate > 1:
#ks = 0
#if(s_max > s_min):
#if bw:
#ks = 255/(s_max - s_min)
#else:
#ks = 1280/(s_max - s_min)
##wave_m = np.array([km*(item[0]-m_min) for item in minute_radar_lists[:1440][gate]])
#wave_s = np.array([0.0] * 1440)
#for minute_m in range(1440):
#wave_s[minute_m] = minute_radar_lists[minute_m][gate+7]
##wave_m = np.array([item[0] for item in minute_radar_lists[:1440][gate]])
##DoDisplay2(wave_m, wave_s, location_name+" "+str(dev_id)+" "+ description+" "+ str(gate))
#arr_source[2*gate + 1, :] = wave_s
#rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
#for col in range(1440):
#sens_val = wave_s[col]
#if sens_val != 0:
#if bw:
#r = ks*(sens_val-s_min)
#g = r
#b = r
#else:
#r,g,b=BestColor(ks*(sens_val-s_min))
##print(r,g,b)
#rgb_row[col] = r,g,b
#for stretch_index in range(stretch_by):
#y = device_counter * (18*stretch_by) + (2*(gate) + 1) * stretch_by + stretch_index
#arr_stretched[y, :] = rgb_row
#y = device_counter * (18*stretch_by) + (2*(gate)) * stretch_by + stretch_index
#arr_stretched[y, :] = rgb_row
print("stop")
def ConvertToBase(time_from_str, time_zone_s):
print(time_from_str)
dt = datetime.datetime.strptime(time_from_str, "%Y-%m-%d %H:%M:%S%z")
return dt
def GetTimeAndEvents(data):
"""
Calculates non-zero elements and consecutive non-zero groups using itertools.
This is often the most readable and efficient pure Python approach.
"""
# Fast way to count non-zeros since they are all 1.0
#non_zeros = int(sum(data))
non_zeros = sum(1 for x in data if x != 0)
# Count groups of non-zero elements
events = sum(1 for key, group in itertools.groupby(data) if key != 0.0)
return non_zeros, events
def current_date_at_tz(timezone_str):
"""
Returns the current date in the specified timezone in yyyy-mm-dd format.
Args:
timezone_str (str): Timezone string like "America/Los_Angeles"
Returns:
str: Current date in yyyy-mm-dd format
"""
# Get the timezone object
tz = pytz.timezone(timezone_str)
# Get current datetime in the specified timezone
current_dt = datetime.datetime.now(tz)
# Format as yyyy-mm-dd
return current_dt.strftime('%Y-%m-%d')
def GetActivities(device_id, well_id, date_str, filter_size, refresh, timezone_str, radar_threshold_group_st):
#filtered_day has non 0 points that exceeded threshold of radar reads
device_id_str = str(device_id)
try:
time_from_str, time_to_str = GetLocalTimeForDate(date_str, timezone_str)
filename_day_presence = f"/{device_id_str}/{device_id_str}_{date_str}_{filter_size}_presence.bin"
filtered_day_str = None
if refresh == False and date_str != current_date_at_tz(timezone_str):
has_larger = False
filtered_day_str = ReadObjectMinIO("filtered-presence", filename_day_presence, date_str)
if filtered_day_str != None and filtered_day_str != "":
has_larger = bool(re.search(r'\b(?:[2-9]|\d{2,})\.\d+\b', filtered_day_str))
if has_larger:
filtered_day_str = None
if filtered_day_str == None:
radar_fields_of_interest = []
try:
threshold_lst = json.loads(radar_threshold_group_st)
except:
threshold_lst = ["s3_max",12]
radar_fields_of_interest = [threshold_lst[0]]
ids_list = [int(device_id)]
devices_list_str = device_id_str
#sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
sql = get_deployment_radar_10sec_snapped_query_min_max(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
print(sql)
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sql)
my_data = None
my_data = cur.fetchall()
days_difference_long = 2
presence_map = {'longpresence': {}, 'raw': {}}
presence_map['longpresence'][well_id] = [0] * 6 * 1440 * days_difference_long
presence_map['raw'][well_id] = [0] * 6 * 1440 * days_difference_long
if radar_threshold_group_st == None:
radar_threshold_group_st = '["s3",12]' #last value is threshold to s28 composite
if len(radar_threshold_group_st) > 8:
radar_threshold_group = json.loads(radar_threshold_group_st)
else:
radar_threshold_group = ["s3",12]
#device_id_2_location = {device_id: ""}
device_id_2_threshold = {device_id: radar_threshold_group}
device_field_indexes = {radar_threshold_group[0].split("_")[0]: 1} #len(radar_fields_of_interest)
id2well_id = {device_id: well_id}
if len(my_data) > 1:
start_time_ = my_data[0][0]
parsed_time_ = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
#start_time = datetime.datetime(
#parsed_time.year,
#parsed_time.month,
#parsed_time.day,
#parsed_time.hour, # Adjust for UTC-7
#parsed_time.minute,
#parsed_time.second,
#tzinfo=datetime.timezone(datetime.timedelta(hours=-7))
#)
presence_map = optimized_radar_processing(my_data, start_time_, id2well_id, device_id_2_threshold, device_field_indexes, presence_map, "presence")
presence_list = filter_short_groups_c_wc(presence_map["longpresence"][id2well_id[device_id]], filter_size, device_id_str, date_str, date_str, timezone_str)
filtered_day_str = ReadObjectMinIO("filtered-presence", filename_day_presence)
filtered_day = json.loads(filtered_day_str)
else:
filtered_day = json.loads(filtered_day_str)
non_zeros, events = GetTimeAndEvents(filtered_day)
return(non_zeros / 360, events) #decas to hours
except Exception as e:
print(filename_day_presence)
print(filtered_day_str)
print(traceback.format_exc())
return(0, 0)
def CreateFullLocationMap(location_image_file, devices_list, selected_date,
map_type, force_recreate, chart_type, bw, motion, scale_global, fast, filter_minutes, time_zone_s):
#global Id2MACDict
thresholds_dict = {}
stretch_to_min_max = True
devices_c = len(devices_list)
if devices_c == 0:
return
sensors_c = 1#len(sensors_table)
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
image_file = location_image_file
minutes = 1440
#search_pattern = os.path.join(scriptDir, "scratch/*_"+str(current_date.year)+"_"+str(current_date.month).rjust(2, '0')+"_"+str(current_date.day)+"_*.pkl")
#allFiles = [os.path.join(dest_path, f) for f in glob.glob(search_pattern)]
#rekreate .pckl files if missing
today_date = datetime.datetime.fromtimestamp(time.time())
if scale_global and chart_type != 3 and chart_type != 4: #"digital" and chart_type != "collapsed"
max_gate={}
for gate in range(9):
max_gate[str(gate)+"_m"] = 0
max_gate[str(gate)+"_s"] = 0
device_counter = 0
for details in devices_list:
MAC, threshold, location_name, description = details
if threshold == None:
threshold = '["s3_max",12]'
#day_minutes_data = [(0,0)] * (24 * 60 + 2)
#day_minutes_data_l = [[0] * 10 for _ in range(24 * 60 + 2)]
minute_radar_lists = ReadDailyRadar(MAC, current_date)
for gate in range(9):
for minute_m in range(1440):
if (minute_radar_lists[minute_m][gate] > max_gate[str(gate)+"_m"]):
max_gate[str(gate)+"_m"] = minute_radar_lists[minute_m][gate]
if gate > 1:
if (minute_radar_lists[minute_m][gate + 7] > max_gate[str(gate)+"_s"]):
max_gate[str(gate)+"_s"] = minute_radar_lists[minute_m][gate + 7]
if (chart_type == 2): #"analog"
#fields = ['absent_min', 'stationary_max', 'moving_max', 'both_max',
#'m0_max', 'm1_max', 'm2_max', 'm3_max', 'm4_max', 'm5_max',
#'m6_max', 'm7_max', 'm8_max', 'm08_max', 's2_max', 's3_max',
#'s4_max', 's5_max', 's6_max', 's7_max', 's8_max', 's28_max']
fields = ['m0_max', 'm1_max', 'm2_max', 'm3_max', 'm4_max', 'm5_max',
'm6_max', 'm7_max', 'm8_max', 'm08_max', 's2_max', 's3_max',
's4_max', 's5_max', 's6_max', 's7_max', 's8_max', 's28_max', 's28_min']
fields_n = len(fields)
stripes = len(devices_list) * fields_n
device_counter = 0
stretch_by = 5
arr_source = np.zeros((stripes, minutes), dtype=float)
arr_stretched = np.zeros((int(stripes*stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
devices_list_str = ','.join(str(device[1]) for device in devices_list)
ids_list = []
labels = []
label_font = cv2.FONT_HERSHEY_SIMPLEX
label_font_scale = 1
label_font_color = (255, 255, 255)
label_font_thickness = 2
label_font_line = cv2.LINE_AA
cnt = 0
for details in devices_list:
dev_id = details[0]
ids_list.append(details[1])
descriptor = details[2]
if details[3] != None and details[3] != "":
descriptor = descriptor + " " + details[3]
if details[6] != None and details[6] != "":
descriptor = descriptor + " " + details[6]
text_dimensions = get_text_dimensions(descriptor, label_font, label_font_scale, label_font_thickness)
text_height = text_dimensions["height"]
labels.append((descriptor, (10, 10 + text_height + (cnt)*fields_n*stretch_by), label_font, label_font_scale, label_font_color, label_font_thickness, label_font_line))
cnt += 1
sql = get_deployment_radar_only_detailed_query(devices_list_str, time_from_str, time_to_str, ids_list)
print(sql)
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sql)
my_data = cur.fetchall()#cur.fetchone()
#print(result)
if my_data == None or my_data == []:
return False
#device_ids = sorted(set(record[1] for record in my_data))
device_to_index = {device: idx for idx, device in enumerate(ids_list)}
# Calculate base minute
base_minute = ConvertToBase(time_from_str, time_zone_s)
st = time.time()
if True:
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
wave_m = create_radar_optimized_heatmap(my_data, bw, fields, wave_m, device_to_index, base_minute, time_zone_s)
print(time.time()-st)
if False:
#base_minute = my_data[0][0]# min(record[0] for record in my_data)
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
for record in my_data:
#(minute,device_id,absent_min,stationary_max,moving_max,both_max,m0_max,m1_max,m2_max,m3_max,m4_max,
# m5_max,m6_max,m7_max,m8_max,m08_max,s2_max,s3_max,s4_max,s5_max,s6_max,s7_max,s8_max,s28_max) = record
minute, device_id = record[0:2]
values = record[2:] # All the max/min values
x = int((minute - base_minute).total_seconds()/60)
device_idx = device_to_index[device_id]
if bw:
for field_idx, value in enumerate(values):
# Calculate y position
y = device_idx * fields_n + field_idx
# Convert value to grayscale (0-100 to 0-255)
gray_value = int((value / 100.0) * 255.0)
# Set RGB values (all same for grayscale)
wave_m[y, x] = [gray_value, gray_value, gray_value]
else: #color
for field_idx, value in enumerate(values):
# Calculate y position
y = device_idx * 22 + field_idx
# Convert value to grayscale (0-100 to 0-255)
gray_value = int((value / 100.0) * 1279.0)
# Set RGB values (all same for grayscale)
wave_m[y, x] = BestColor(gray_value)
print(time.time()-st)
st = time.time()
for yy in range(stripes):
rgb_row = wave_m[yy]
for stretch_index in range(stretch_by):
y = yy * stretch_by + stretch_index
arr_stretched[y, :] = rgb_row
print(time.time()-st)
SaveImageInBlob(image_file, arr_stretched, labels)
#arr_source[2*gate, :] = wave_m
#rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
#for col in range(1440):
#sens_val = wave_m[col]
#if sens_val != 0:
#r,g,b=BestColor(km*(sens_val-m_min))
#if r > 255 or g > 255 or b > 255:
#print(r,g,b)
#rgb_row[col] = r,g,b
#for stretch_index in range(stretch_by):
#y = device_counter * (18*stretch_by) + 2*gate * stretch_by + stretch_index
##print(y, row, devices_c, sensor_index, location_index, stretch_index)
##arr_stretched[y, :] = rgb_row
#if gate > 1:
#ks = 0
#if(s_max > s_min):
#if bw:
#ks = 255/(s_max - s_min)
#else:
#ks = 1280/(s_max - s_min)
##wave_m = np.array([km*(item[0]-m_min) for item in minute_radar_lists[:1440][gate]])
#wave_s = np.array([0.0] * 1440)
#for minute_m in range(1440):
#wave_s[minute_m] = minute_radar_lists[minute_m][gate+7]
##wave_m = np.array([item[0] for item in minute_radar_lists[:1440][gate]])
##DoDisplay2(wave_m, wave_s, location_name+" "+str(dev_id)+" "+ description+" "+ str(gate))
#arr_source[2*gate + 1, :] = wave_s
#rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
#for col in range(1440):
#sens_val = wave_s[col]
#if sens_val != 0:
#if bw:
#r = ks*(sens_val-s_min)
#g = r
#b = r
#else:
#r,g,b=BestColor(ks*(sens_val-s_min))
##print(r,g,b)
#rgb_row[col] = r,g,b
#for stretch_index in range(stretch_by):
#y = device_counter * (18*stretch_by) + (2*(gate) + 1) * stretch_by + stretch_index
#arr_stretched[y, :] = rgb_row
#y = device_counter * (18*stretch_by) + (2*(gate)) * stretch_by + stretch_index
#arr_stretched[y, :] = rgb_row
print("stop")
elif (chart_type == 3): #"digital"
device_counter = 0
for details in devices_list:
dev_id = details[0]
MAC, threshold, location_id, description = GetMacThrFromId(dev_id)
if threshold == None:
threshold = 30
sensor = "Radar"
location_name = location_names[location_id]
pickle_file = os.path.join(scriptDir, "scratch/"+MAC.upper() +"_"+str(current_date.year)+"_"+str(current_date.month).rjust(2, '0')+"_"+str(current_date.day)+"_radarM.pkl")
pickle_file = pickle_file.replace("\\","/")
#day_minutes_data = [(0,0)] * (24 * 60 + 2)
#day_minutes_data_l = [[0] * 10 for _ in range(24 * 60 + 2)]
minute_radar_lists = ReadDailyRadar(MAC, current_date)
y = 0
sensor_index = 0
#location_index = 0
for gate in range(9):
threshold = 15
if (gate > 1):
threshold = thresholds_dict[dev_id][gate-2]
for minute_m in range(1440):
if (minute_radar_lists[minute_m][gate] > threshold):
minute_radar_lists[minute_m][gate] = 100
else:
minute_radar_lists[minute_m][gate] = 0
if gate > 1:
if (minute_radar_lists[minute_m][gate + 7] > threshold):
minute_radar_lists[minute_m][gate + 7] = 100
else:
minute_radar_lists[minute_m][gate + 7] = 0
m_max = 100
m_min = 0
s_max = 100
s_min = 0
km = 0
if(m_max > m_min):
km = 1280/(m_max - m_min)
#wave_m = np.array([km*(item[0]-m_min) for item in minute_radar_lists[:1440][gate]])
wave_m = np.array([0.0] * 1440)
for minute_m in range(1440):
wave_m[minute_m] = minute_radar_lists[minute_m][gate]
if gate < 2:
DoDisplay(wave_m, location_name+" "+ description+" " + str(gate))
#wave_m = np.array([item[0] for item in minute_radar_lists[:1440][gate]])
arr_source[2*gate, :] = wave_m
rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
for col in range(1440):
sens_val = wave_m[col]
if sens_val != 0:
r,g,b=BestColor(km*(sens_val-m_min))
#print(r,g,b)
rgb_row[col] = r,g,b
for stretch_index in range(stretch_by):
y = device_counter * (18*stretch_by) + 2*gate * stretch_by + stretch_index
#print(y, row, devices_c, sensor_index, location_index, stretch_index)
#arr_stretched[y, :] = rgb_row
if gate > 1:
ks = 0
if(s_max > s_min):
if bw:
ks = 255/(s_max - s_min)
else:
ks = 1280/(s_max - s_min)
#wave_m = np.array([km*(item[0]-m_min) for item in minute_radar_lists[:1440][gate]])
wave_s = np.array([0.0] * 1440)
for minute_m in range(1440):
wave_s[minute_m] = minute_radar_lists[minute_m][gate+7]
#wave_m = np.array([item[0] for item in minute_radar_lists[:1440][gate]])
DoDisplay2(wave_m, wave_s, location_name+" "+str(dev_id)+" "+ description+" "+ str(gate))
arr_source[2*gate + 1, :] = wave_s
rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
for col in range(1440):
sens_val = wave_s[col]
if sens_val != 0:
if bw:
r = ks*(sens_val-s_min)
g = r
b = r
else:
r,g,b=BestColor(ks*(sens_val-s_min))
#print(r,g,b)
rgb_row[col] = r,g,b
for stretch_index in range(stretch_by):
y = device_counter * (18*stretch_by) + (2*(gate) + 1) * stretch_by + stretch_index
arr_stretched[y, :] = rgb_row
y = device_counter * (18*stretch_by) + (2*(gate)) * stretch_by + stretch_index
arr_stretched[y, :] = rgb_row
device_counter += 1
print("stop")
elif (chart_type == 4): #"collapsed"
stretch_by = 50
arr_source = np.zeros((1, minutes), dtype=float)
arr_stretched = np.zeros((int(stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
device_counter = 0
wave_m = [["", -1] for _ in range(1440)]
devices_list_str = ','.join(str(device[1]) for device in devices_list)
ids_list = []
radar_fields_of_interest = []
for details in devices_list:
threshold_str = details[5]
try:
threshold_lst = json.loads(threshold_str)
except:
threshold_lst = ["s3_max",12]
if isinstance(threshold_lst, int):
threshold_lst = ["s3_max",threshold_lst]
radar_field = threshold_lst[0]
if radar_field not in radar_fields_of_interest:
radar_fields_of_interest.append(radar_field)
threshold = threshold_lst[1]
dev_id = details[0]
ids_list.append(details[1])
sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
print(sql)
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sql)
my_data = cur.fetchall()#cur.fetchone()
#print(result)
if my_data == None:
return False
device_id_2_threshold = {}
device_id_2_location = {0: "Outside"}
row_nr_2_device_id = {}
cnt = 0
row_nr_2_device_id[0] = 0
for details in devices_list:
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
cnt += 1
row_nr_2_device_id[cnt] = device_id
if radar_threshold_group_st == None:
radar_threshold_group_st = '["s3_max",12]' #last value is threshold to s28 composite
if len(radar_threshold_group_st) > 8:
radar_threshold_group = json.loads(radar_threshold_group_st)
else:
radar_threshold_group = ["s3_max",12]
device_id_2_location[device_id] = location_name
device_id_2_threshold[device_id] = radar_threshold_group
target_tz = pytz.timezone(time_zone_s)
st = time.time()
#each record in my_data has time, device_id and radar_fields_of_interest in it
result_np = None
try:
result_np = process_wave_data_numpy(image_file, my_data, time_zone_s, device_id_2_threshold, radar_fields_of_interest)
print(time.time() - st)
except Exception as err:
print(str(err))
if False:
for record in my_data:
time_val, device_id, min_val, max_val = record
radar_threshold = device_id_2_threshold[device_id]
local_time = time_val.astimezone(target_tz)
minute_m = int((local_time - local_time.replace(hour=0, minute=0, second=0, microsecond=0)).total_seconds() / 60)
if (wave_m[minute_m][0] == ""):
if max_val > radar_threshold:
wave_m[minute_m][0] = device_id
wave_m[minute_m][1] = max_val
else:
if max_val > radar_threshold:
if max_val > wave_m[minute_m][1]:
wave_m[minute_m][0] = device_id
wave_m[minute_m][1] = max_val
print(time.time()-st)
if result_np is not None:
wave_m = result_np
rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
#wave_m = FilterGlitches(wave_m, filter_minutes)
r = 0
g = 0
b = 0
if isinstance(wave_m[0], np.int64):
inital_device_id = row_nr_2_device_id[wave_m[0]]
else:
inital_device_id = 0
present_at = [[inital_device_id, 0, 1]] #device_id, minute, duration
for minute_m in range(1440):
try:
if isinstance(wave_m[minute_m], np.int64):
device_id = row_nr_2_device_id[wave_m[minute_m]]
else:
device_id = 0
if device_id != "" and device_id != -1:
r,g,b = Loc2Color[device_id_2_location[device_id]][0]
rgb_row[minute_m] = b,g,r
if minute_m > 0:
if present_at[-1][0] != device_id:
present_at.append([device_id, minute_m, 1])
else:
present_at[-1][2] += 1
except Exception as err:
print(str(err))
for stretch_index in range(stretch_by):
y = stretch_index
arr_stretched[y, :] = rgb_row
#print("stop")
#print(r,g,b)
SaveObjectInBlob(image_file+".bin", present_at)
SaveImageInBlob(image_file, arr_stretched, [])
def CreateFullLocationMapLabelsOut(location_image_file, devices_list, selected_date,
map_type, force_recreate, chart_type, bw, motion, scale_global, fast, filter_minutes, time_zone_s):
#global Id2MACDict
thresholds_dict = {}
stretch_to_min_max = True
devices_c = len(devices_list)
if devices_c == 0:
return
sensors_c = 1#len(sensors_table)
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
image_file = location_image_file
minutes = 1440
#search_pattern = os.path.join(scriptDir, "scratch/*_"+str(current_date.year)+"_"+str(current_date.month).rjust(2, '0')+"_"+str(current_date.day)+"_*.pkl")
#allFiles = [os.path.join(dest_path, f) for f in glob.glob(search_pattern)]
#rekreate .pckl files if missing
today_date = datetime.datetime.fromtimestamp(time.time())
if (chart_type == 8): #"all graphs"
fields = ['m0_max', 'm1_max', 'm2_max', 'm3_max', 'm4_max', 'm5_max',
'm6_max', 'm7_max', 'm8_max', 'm08_max', 's2_max', 's3_max',
's4_max', 's5_max', 's6_max', 's7_max', 's8_max', 's28_max', 's28_min']
show_radar = True
show_light = True
show_temperature = True
show_humidity = True
show_smell = True
labels_width = 200
title_labels_height = 40
title_label_width = 100
#common
label_font = cv2.FONT_HERSHEY_SIMPLEX
label_font_line = cv2.LINE_AA
#different
title_label_font_scale = 1
title_label_font_color = (0, 0, 0)#(128, 255, 255)
title_label_font_thickness = 2
label_font_scale = 0.5
label_font_color = (0, 0, 0)#(0, 255, 255)
label_font_thickness = 1
fields_n = len(fields)
radar_stripes = len(devices_list) * fields_n
radar_stretch_by = 5
light_stripes = len(devices_list)
light_stretch_by = 20
smell_sensors_stripes = 10 * len(devices_list)
other_sensors_stripes = len(devices_list)
temp_stripe_width = 15
alarm_stripe_width = 5
temperature_stretch_by = temp_stripe_width + alarm_stripe_width # Total height per device
humidity_stripe_width = 15
humidity_stretch_by = humidity_stripe_width + alarm_stripe_width
smell_component_stretch_by = 8
text_dimensions = get_text_dimensions("TEST", label_font, label_font_scale, label_font_thickness)
text_height = text_dimensions["height"]
all_maps_height = 0
# radar, light, temperature, humidity, smell*10
if show_radar:
all_maps_height = title_labels_height + radar_stripes*radar_stretch_by
if show_light:
all_maps_height = all_maps_height + title_labels_height + other_sensors_stripes*light_stretch_by
if show_temperature:
all_maps_height = all_maps_height + title_labels_height + other_sensors_stripes*temperature_stretch_by
if show_humidity:
all_maps_height = all_maps_height + title_labels_height + other_sensors_stripes*humidity_stretch_by
if show_smell:
all_maps_height = all_maps_height + title_labels_height + other_sensors_stripes*smell_component_stretch_by * 10
if all_maps_height == 0:
return
vertical_offset = 0
arr_stretched = np.full((all_maps_height, minutes+labels_width, 3), [255, 174, 70], dtype=np.uint8)
#Lets add divider lines
x = 190
if show_radar:
stretch_by = radar_stretch_by
cnt = 0
for details in devices_list:
y = vertical_offset + title_labels_height + (cnt)*fields_n*stretch_by
arr_stretched[y, 190:201, :] = 0
cnt += 1
section_height = title_labels_height + radar_stripes*radar_stretch_by
vertical_offset = vertical_offset + section_height
if show_light:
stretch_by = light_stretch_by
cnt = 0
for details in devices_list:
y = vertical_offset + title_labels_height+ (cnt)*1*stretch_by
arr_stretched[y, 190:201, :] = 0
cnt += 1
section_height = title_labels_height + other_sensors_stripes*stretch_by
vertical_offset = vertical_offset + section_height
if show_temperature:
stretch_by = temperature_stretch_by
cnt = 0
for details in devices_list:
y = vertical_offset + title_labels_height+ (cnt)*1*stretch_by
arr_stretched[y, 190:201, :] = 0
cnt += 1
section_height = title_labels_height + other_sensors_stripes*stretch_by
vertical_offset = vertical_offset + section_height
if show_humidity:
stretch_by = humidity_stretch_by
cnt = 0
for details in devices_list:
y = vertical_offset + title_labels_height+ (cnt)*1*stretch_by
arr_stretched[y, 190:201, :] = 0
cnt += 1
section_height = title_labels_height + other_sensors_stripes*humidity_stretch_by
vertical_offset = vertical_offset + section_height
if show_smell:
stretch_by = smell_component_stretch_by
cnt = 0
for details in devices_list:
y = vertical_offset + title_labels_height+ (cnt)*10*stretch_by
arr_stretched[y, 190:201, :] = 0
cnt += 1
#section_height = title_labels_height + other_sensors_stripes**stretch_by * 10
#vertical_offset = vertical_offset + section_height
#all_maps_height = all_maps_height + title_labels_height + other_sensors_stripes*stretch_by * 10
devices_list_str = ','.join(str(device[1]) for device in devices_list)
ids_list = []
labels = []
title_labels = []
vertical_offset = 0
######################################## RADAR ##################################################################
if show_radar:
title_label_text = "RADAR"
fields_s = fields
stripes = radar_stripes
stretch_by = radar_stretch_by
title_text_dimensions = get_text_dimensions(title_label_text, label_font, title_label_font_scale, label_font_thickness)
title_text_height = title_text_dimensions["height"]
title_label_width = title_text_dimensions["width"]
title_label = (title_label_text, (int(labels_width + minutes * 0.5 - title_label_width / 2), vertical_offset + 10 + title_text_height), label_font, title_label_font_scale, title_label_font_color, title_label_font_thickness, label_font_line)
title_labels.append(title_label)
cnt = 0
for details in devices_list:
dev_id = details[0]
ids_list.append(details[1])
descriptor = details[2]
if details[3] != None and details[3] != "":
descriptor = descriptor + " " + details[3]
if details[6] != None and details[6] != "":
descriptor = descriptor + " " + details[6]
text_dimensions = get_text_dimensions(descriptor, label_font, label_font_scale, label_font_thickness)
text_height = text_dimensions["height"]
labels.append((descriptor, (10, vertical_offset + title_labels_height+40+text_height + (cnt)*fields_n*stretch_by), label_font, label_font_scale, label_font_color, label_font_thickness, label_font_line))
cnt += 1
sql = get_deployment_radar_only_detailed_query(devices_list_str, time_from_str, time_to_str, ids_list)
print(sql)
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sql)
my_data = cur.fetchall()#cur.fetchone()
#print(result)
if my_data != None and my_data != []:
device_to_index = {device: idx for idx, device in enumerate(ids_list)}
# Calculate base minute
base_minute = ConvertToBase(time_from_str, time_zone_s)
st = time.time()
if True:
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
wave_m = create_radar_optimized_heatmap(my_data, bw, fields_s, wave_m, device_to_index, base_minute, time_zone_s)
print(time.time()-st)
st = time.time()
for yy in range(stripes):
rgb_row = wave_m[yy]
for stretch_index in range(radar_stretch_by):
y = yy * radar_stretch_by + stretch_index
arr_stretched[title_labels_height+y, 200:] = rgb_row
print(time.time()-st)
vertical_offset = vertical_offset + title_labels_height + stripes*radar_stretch_by
######################################## LIGHT ##################################################################
if show_light:
title_label_text = "LIGHT"
fields_s = ['light']
min_val = 0
max_val = 4095
stretch_by = light_stretch_by
stripes = len(devices_list) * len(fields_s) # Calculate number of rows needed
# Calculate the correct vertical offset for light section
# Draw the light section title at the correct position
title_text_dimensions = get_text_dimensions(title_label_text, label_font, title_label_font_scale, label_font_thickness)
title_text_height = title_text_dimensions["height"]
title_label_width = title_text_dimensions["width"]
title_label = (title_label_text, (int(labels_width + minutes * 0.5 - title_label_width / 2),
vertical_offset + 10 + title_text_height),
label_font, title_label_font_scale, title_label_font_color, title_label_font_thickness, label_font_line)
title_labels.append(title_label)
# Draw device labels for light section
cnt = 0
light_ids_list = [] # Create a separate list for light section
for details in devices_list:
dev_id = details[0]
light_ids_list.append(details[1])
descriptor = details[2]
if details[3] != None and details[3] != "":
descriptor = descriptor + " " + details[3]
if details[6] != None and details[6] != "":
descriptor = descriptor + " " + details[6]
text_dimensions = get_text_dimensions(descriptor, label_font, label_font_scale, label_font_thickness)
text_height = text_dimensions["height"]
# Position labels in the light section
labels.append((descriptor, (10, vertical_offset + title_labels_height + text_height + (cnt)*len(fields_s)*stretch_by),
label_font, label_font_scale, label_font_color, label_font_thickness, label_font_line))
cnt += 1
# Get light data using the existing query function
sql = get_deployment_light_only_query(devices_list_str, time_from_str, time_to_str, light_ids_list)
print(sql)
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sql)
my_data = cur.fetchall()
if my_data != None and len(my_data) > 0:
device_to_index = {device: idx for idx, device in enumerate(light_ids_list)}
# Calculate base minute
base_minute = ConvertToBase(time_from_str, time_zone_s)
# Process light data
st = time.time()
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
# Use the light-specific function
wave_m = create_light_optimized_heatmap(my_data, bw, fields_s, wave_m,
device_to_index, base_minute, time_zone_s,
min_val, max_val)
print(f"Light heatmap creation time: {time.time()-st:.4f} seconds")
# Stretch the heatmap vertically
st = time.time()
section_start = vertical_offset + title_labels_height
for yy in range(stripes):
rgb_row = wave_m[yy]
for stretch_index in range(stretch_by):
y = yy * stretch_by + stretch_index
target_y = section_start + y
# Make sure we're within bounds of the array
if target_y < arr_stretched.shape[0]:
arr_stretched[target_y, labels_width:] = rgb_row
else:
print(f"Warning: Row {target_y} is out of bounds (max: {arr_stretched.shape[0]-1})")
vertical_offset = vertical_offset + title_labels_height + stripes*stretch_by
print(f"Light stretching time: {time.time()-st:.4f} seconds")
######################################## TEMPERATURE ##################################################################
if show_temperature:
title_label_text = "TEMPERATURE"
fields_s = ['temperature', 'temperature_state']
# Define different stripe widths for temperature and alarm
temp_offset = -10#GetTempOffset(device_id)
min_val = 20
max_val = 30
# Calculate the correct vertical offset for temperature section
vertical_offset = 0
if show_radar:
vertical_offset += title_labels_height + radar_stripes * radar_stretch_by
if show_light:
vertical_offset += title_labels_height + other_sensors_stripes * light_stretch_by
stripes = len(devices_list) * len(fields_s) # Number of rows needed in data array
# Draw the temperature section title
title_text_dimensions = get_text_dimensions(title_label_text, label_font, title_label_font_scale, label_font_thickness)
title_text_height = title_text_dimensions["height"]
title_label_width = title_text_dimensions["width"]
title_label = (title_label_text, (int(labels_width + minutes * 0.5 - title_label_width / 2),
vertical_offset + 10 + title_text_height),
label_font, title_label_font_scale, title_label_font_color, title_label_font_thickness, label_font_line)
title_labels.append(title_label)
# Draw device labels for temperature section
cnt = 0
temp_ids_list = [] # Create a separate list for temperature section
for details in devices_list:
dev_id = details[0]
temp_ids_list.append(details[1])
descriptor = details[2]
if details[3] != None and details[3] != "":
descriptor = descriptor + " " + details[3]
if details[6] != None and details[6] != "":
descriptor = descriptor + " " + details[6]
text_dimensions = get_text_dimensions(descriptor, label_font, label_font_scale, label_font_thickness)
text_height = text_dimensions["height"]
# Position labels in the temperature section
y_pos = vertical_offset + title_labels_height + text_height + cnt * temperature_stretch_by
#y_pos = vertical_offset + title_labels_height + text_height + (cnt)*len(fields_s)*stretch_by)
labels.append((descriptor, (10, y_pos), label_font, label_font_scale,
label_font_color, label_font_thickness, label_font_line))
cnt += 1
# Get temperature data
sql = get_deployment_temperature_only_query(devices_list_str, time_from_str, time_to_str, temp_ids_list, temp_offset)
print(sql)
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sql)
my_data = cur.fetchall()
if my_data != None and len(my_data) > 0:
device_to_index = {device: idx for idx, device in enumerate(temp_ids_list)}
base_minute = ConvertToBase(time_from_str, time_zone_s)
# Process temperature data
st = time.time()
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
if False:
# Simulate data for testing
for i in range(min(len(my_data), 500)):
if i >= 100: # Only modify indices 100-500
t = (i - 100) / 4.0 # Temperature value
# Set correct alarm levels based on temperature
if CelsiusToFahrenheit(t) <= 50 or CelsiusToFahrenheit(t) >= 90:
alarm_level = 2 # Critical - should be red
elif CelsiusToFahrenheit(t) <= 60 or CelsiusToFahrenheit(t) >= 80:
alarm_level = 1 # Warning - should be yellow
else:
alarm_level = 0 # Normal - should be green
# Replace the tuple with new values
my_data[i] = (my_data[i][0], my_data[i][1], t, alarm_level)
# Create the heatmap data
wave_m = create_temperature_optimized_heatmap(my_data, bw, fields_s, wave_m,
device_to_index, base_minute, time_zone_s,
min_val, max_val)
print(f"Temperature heatmap creation time: {time.time()-st:.4f} seconds")
# Stretch the heatmap with different heights for temperature and alarm
st = time.time()
section_start = vertical_offset + title_labels_height
# Loop through each device
for device_idx in range(len(temp_ids_list)):
# Get the data rows for this device
temp_row = wave_m[device_idx * 2] # Temperature row (even index)
alarm_row = wave_m[device_idx * 2 + 1] # Alarm row (odd index)
# Calculate the starting y-position for this device
device_y_start = section_start + device_idx * temperature_stretch_by
# Draw the temperature stripe (15 pixels)
for stretch_index in range(temp_stripe_width):
target_y = device_y_start + stretch_index
if target_y < arr_stretched.shape[0]:
arr_stretched[target_y, labels_width:] = temp_row
# Draw the alarm stripe (5 pixels)
for stretch_index in range(alarm_stripe_width):
target_y = device_y_start + temp_stripe_width + stretch_index
if target_y < arr_stretched.shape[0]:
arr_stretched[target_y, labels_width:] = alarm_row
print(f"Temperature stretching time: {time.time()-st:.4f} seconds")
######################################## HUMIDITY ##################################################################
'''
Ideal indoor humidity: 30-50%
Too dry: Below 30% - Can cause dry skin, irritated eyes, and respiratory issues
Too humid: Above 60% - Feels warmer than actual temperature, promotes mold growth
'''
if show_humidity:
title_label_text = "HUMIDITY"
fields_s = ['humidity', 'humidity_state']
# Define different stripe widths for humidity and alarm
humidity_offset = 0
min_val = 40
max_val = 90#60
# Calculate the correct vertical offset for temperature section
vertical_offset = 0
if show_radar:
vertical_offset += title_labels_height + radar_stripes * radar_stretch_by
if show_light:
vertical_offset += title_labels_height + other_sensors_stripes * light_stretch_by
if show_temperature:
vertical_offset += title_labels_height + other_sensors_stripes * temperature_stretch_by
stripes = len(devices_list) * len(fields_s) # Number of rows needed in data array
# Draw the temperature section title
title_text_dimensions = get_text_dimensions(title_label_text, label_font, title_label_font_scale, label_font_thickness)
title_text_height = title_text_dimensions["height"]
title_label_width = title_text_dimensions["width"]
title_label = (title_label_text, (int(labels_width + minutes * 0.5 - title_label_width / 2),
vertical_offset + 10 + title_text_height),
label_font, title_label_font_scale, title_label_font_color, title_label_font_thickness, label_font_line)
title_labels.append(title_label)
# Draw device labels for temperature section
cnt = 0
temp_ids_list = [] # Create a separate list for temperature section
for details in devices_list:
dev_id = details[0]
temp_ids_list.append(details[1])
descriptor = details[2]
if details[3] != None and details[3] != "":
descriptor = descriptor + " " + details[3]
if details[6] != None and details[6] != "":
descriptor = descriptor + " " + details[6]
text_dimensions = get_text_dimensions(descriptor, label_font, label_font_scale, label_font_thickness)
text_height = text_dimensions["height"]
# Position labels in the temperature section
y_pos = vertical_offset + title_labels_height + text_height + cnt * humidity_stretch_by
labels.append((descriptor, (10, y_pos), label_font, label_font_scale,
label_font_color, label_font_thickness, label_font_line))
cnt += 1
# Get humidity data
sql = get_deployment_humidity_only_query(devices_list_str, time_from_str, time_to_str, temp_ids_list, humidity_offset)
print(sql)
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sql)
my_data = cur.fetchall()
if my_data != None and len(my_data) > 0:
device_to_index = {device: idx for idx, device in enumerate(temp_ids_list)}
base_minute = ConvertToBase(time_from_str, time_zone_s)
# Process temperature data
st = time.time()
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
if False:
# Simulate data for testing
for i in range(min(len(my_data), 500)):
if i >= 100: # Only modify indices 100-500
h = (i - 100) / 4.0 # Temperature value
# Set correct alarm levels based on temperature
if h <= 20 or h >= 60:
alarm_level = 2 # Critical - should be red
elif h <= 30 or h >= 50:
alarm_level = 1 # Warning - should be yellow
else:
alarm_level = 0 # Normal - should be green
# Replace the tuple with new values
my_data[i] = (my_data[i][0], my_data[i][1], h, alarm_level)
# Create the heatmap data
wave_m = create_humidity_optimized_heatmap(my_data, bw, fields_s, wave_m,
device_to_index, base_minute, time_zone_s,
min_val, max_val)
print(f"Humidity heatmap creation time: {time.time()-st:.4f} seconds")
# Stretch the heatmap with different heights for humidity and alarm
st = time.time()
section_start = vertical_offset + title_labels_height
# Loop through each device
for device_idx in range(len(temp_ids_list)):
# Get the data rows for this device
humidity_row = wave_m[device_idx * 2] # Humidity row (even index)
alarm_row = wave_m[device_idx * 2 + 1] # Alarm row (odd index)
# Calculate the starting y-position for this device
device_y_start = section_start + device_idx * humidity_stretch_by
# Draw the humidity stripe (15 pixels)
for stretch_index in range(humidity_stripe_width):
target_y = device_y_start + stretch_index
if target_y < arr_stretched.shape[0]:
arr_stretched[target_y, labels_width:] = humidity_row
# Draw the alarm stripe (5 pixels)
for stretch_index in range(alarm_stripe_width):
target_y = device_y_start + temp_stripe_width + stretch_index
if target_y < arr_stretched.shape[0]:
arr_stretched[target_y, labels_width:] = alarm_row
print(f"Temperature stretching time: {time.time()-st:.4f} seconds")
######################################## SMELL ##################################################################
if show_smell:
title_label_text = "SMELL"
fields_s = ['S0', 'S1', 'S2', 'S3', 'S4', 'S5', 'S6', 'S7', 'S8', 'S9']
# Define different stripe widths for humidity and alarm
smell_offset = 0
# Calculate the correct vertical offset for temperature section
vertical_offset = 0
if show_radar:
vertical_offset += title_labels_height + radar_stripes * radar_stretch_by
if show_light:
vertical_offset += title_labels_height + other_sensors_stripes * light_stretch_by
if show_temperature:
vertical_offset += title_labels_height + other_sensors_stripes * temperature_stretch_by
if show_humidity:
vertical_offset += title_labels_height + other_sensors_stripes * humidity_stretch_by
stripes = len(devices_list) * len(fields_s) # Number of rows needed in data array
# Draw the temperature section title
title_text_dimensions = get_text_dimensions(title_label_text, label_font, title_label_font_scale, label_font_thickness)
title_text_height = title_text_dimensions["height"]
title_label_width = title_text_dimensions["width"]
title_label = (title_label_text, (int(labels_width + minutes * 0.5 - title_label_width / 2),
vertical_offset + 10 + title_text_height),
label_font, title_label_font_scale, title_label_font_color, title_label_font_thickness, label_font_line)
title_labels.append(title_label)
# Draw device labels for temperature section
cnt = 0
temp_ids_list = [] # Create a separate list for temperature section
for details in devices_list:
temp_ids_list.append(details[1])
# Get smell data
sql = get_deployment_smell_only_query(devices_list_str, time_from_str, time_to_str, temp_ids_list, smell_offset)
print(sql)
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sql)
my_data = cur.fetchall()
if my_data != None and len(my_data) > 0:
device_to_index = {device: idx for idx, device in enumerate(temp_ids_list)}
base_minute = ConvertToBase(time_from_str, time_zone_s)
has_new_format = any(len(rec) > 12 and 100 <= rec[12] <= 170 for rec in my_data if rec[12] is not None)
# Create the heatmap data
arr_stretched = create_smell_optimized_heatmap(arr_stretched, my_data, bw, fields_s, device_to_index, base_minute, time_zone_s, smell_component_stretch_by, selected_date, vertical_offset + 18 + title_text_height, has_new_format)
for details in devices_list:
dev_id = details[0]
descriptor = details[2]
if details[3] != None and details[3] != "":
descriptor = descriptor + " " + details[3]
if details[6] != None and details[6] != "":
descriptor = descriptor + " " + details[6]
text_dimensions = get_text_dimensions(descriptor, label_font, label_font_scale, label_font_thickness)
text_height = text_dimensions["height"]
# Position labels in the temperature section
if has_new_format:
y_pos = vertical_offset + title_labels_height +20+ text_height + cnt * 80
else:
y_pos = vertical_offset + title_labels_height +20+ text_height + cnt * smell_component_stretch_by * 10
#y_pos = vertical_offset + title_labels_height+40+text_height + (cnt)*fields_n*stretch_by)
labels.append((descriptor, (10, y_pos), label_font, label_font_scale,
label_font_color, label_font_thickness, label_font_line))
cnt += 1
SaveImageInBlobLabelsOut(image_file, arr_stretched, labels, title_labels)
print("stop")
def CreateDailyLocationMap(location_image_file, devices_list, selected_date, filter_minutes, time_zone_s, stretch_by):
devices_c = len(devices_list)
sensors_c = 1#len(sensors_table)
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
image_file = location_image_file
minutes = 1440
#search_pattern = os.path.join(scriptDir, "scratch/*_"+str(current_date.year)+"_"+str(current_date.month).rjust(2, '0')+"_"+str(current_date.day)+"_*.pkl")
#allFiles = [os.path.join(dest_path, f) for f in glob.glob(search_pattern)]
#rekreate .pckl files if missing
today_date = datetime.datetime.fromtimestamp(time.time())
arr_source = np.zeros((1, minutes), dtype=float)
arr_stretched = np.zeros((int(stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
arr_stretched_sorted = np.zeros((int(stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
device_counter = 0
wave_m = [["", -1] for _ in range(1440)]
devices_list_str = ','.join(str(device[1]) for device in devices_list)
ids_list = []
radar_fields_of_interest = []
for details in devices_list:
threshold_str = details[5]
try:
threshold_lst = json.loads(threshold_str)
if len(threshold_lst) > 2:
threshold_lst = ["s3_max",12]
radar_field = threshold_lst[0]
except:
threshold_lst = ["s3_max",12]
radar_field = threshold_lst[0]
if radar_field not in radar_fields_of_interest:
radar_fields_of_interest.append(radar_field)
threshold = threshold_lst[1]
dev_id = details[0]
ids_list.append(details[1])
sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
print(sql)
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sql)
my_data = cur.fetchall()#cur.fetchone()
#print(result)
if my_data == None:
return False
device_id_2_threshold = {}
device_id_2_location = {0: "Outside"}
row_nr_2_device_id = {}
cnt = 0
row_nr_2_device_id[0] = 0
for details in devices_list:
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
cnt += 1
row_nr_2_device_id[cnt] = device_id
if radar_threshold_group_st == None:
radar_threshold_group_st = '["s3_max",12]' #last value is threshold to s28 composite
if len(radar_threshold_group_st) > 8:
radar_threshold_group = json.loads(radar_threshold_group_st)
else:
radar_threshold_group = ["s3_max",12]
device_id_2_location[device_id] = location_name
device_id_2_threshold[device_id] = radar_threshold_group
target_tz = pytz.timezone(time_zone_s)
st = time.time()
#each record in my_data has time, device_id and radar_fields_of_interest in it
try:
result_np = process_wave_data_numpy(image_file, my_data, time_zone_s, device_id_2_threshold, radar_fields_of_interest)
print(time.time() - st)
except Exception as err:
print(str(err))
if False:
for record in my_data:
time_val, device_id, min_val, max_val = record
radar_threshold = device_id_2_threshold[device_id]
local_time = time_val.astimezone(target_tz)
minute_m = int((local_time - local_time.replace(hour=0, minute=0, second=0, microsecond=0)).total_seconds() / 60)
if (wave_m[minute_m][0] == ""):
if max_val > radar_threshold:
wave_m[minute_m][0] = device_id
wave_m[minute_m][1] = max_val
else:
if max_val > radar_threshold:
if max_val > wave_m[minute_m][1]:
wave_m[minute_m][0] = device_id
wave_m[minute_m][1] = max_val
print(time.time()-st)
wave_m = result_np
rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
rgbsorted_row = np.zeros(( 1440, 3), dtype=np.uint8)
#wave_m = FilterGlitches(wave_m, filter_minutes)
r = 0
g = 0
b = 0
presence_minutes = {}
#we want to generate present_at array
if isinstance(wave_m[0], np.int64):
inital_device_id = row_nr_2_device_id[wave_m[0]]
else:
inital_device_id = 0
present_at = [[inital_device_id, 0, 1]] #device_id, minute, duration
for minute_m in range(1440):
try:
if isinstance(wave_m[minute_m], np.int64):
device_id = row_nr_2_device_id[wave_m[minute_m]]
else:
device_id = 0
if device_id != "" and device_id != -1:
r,g,b = Loc2Color[device_id_2_location[device_id]][0]
rgb_row[minute_m] = b,g,r
if Loc2Color[device_id_2_location[device_id]][1] in presence_minutes:
presence_minutes[Loc2Color[device_id_2_location[device_id]][1]] = [presence_minutes[Loc2Color[device_id_2_location[device_id]][1]][0] + 1, Loc2Color[device_id_2_location[device_id]][0]]
else:
presence_minutes[Loc2Color[device_id_2_location[device_id]][1]] = [1, Loc2Color[device_id_2_location[device_id]][0]]
if minute_m > 0:
if present_at[-1][0] != device_id:
present_at.append([device_id, minute_m, 1])
else:
present_at[-1][2] += 1
except Exception as err:
print(str(err))
start_minute = 0
for color_key in sorted(presence_minutes):
print(color_key, presence_minutes[color_key])
rgbsorted_row[start_minute:start_minute+presence_minutes[color_key][0]] = presence_minutes[color_key][1][::-1]
start_minute += presence_minutes[color_key][0]
#we need to save present_at list to blob
SaveObjectInBlob(image_file+".bin", present_at)
#present_at_back_s = ReadObjectMinIO("daily-maps", image_file+".bin")
#present_at_back = json.loads(present_at_back_s)
#print(present_at_back)
for stretch_index in range(stretch_by):
y = stretch_index
arr_stretched[y, :] = rgb_row
arr_stretched_sorted[y, :] = rgbsorted_row
#print("stop")
#print(r,g,b)
SaveImageInBlob(image_file, arr_stretched, [])
SaveImageInBlob(image_file[:-4]+"S.png", arr_stretched_sorted, [])
def GenerateFullLocationMap(map_file, deployment_id, ddate, recreate_or_not, chart_type, bw, motion, scale_global, fast, time_zone_s, filter_minutes = 5):
timee = LocalDateToUTCEpoch(ddate, time_zone_s) + 24 * 3600 - 1
devices_list, device_ids = GetProximityList(deployment_id, timee)
st = time.time()
if CreateFullLocationMap(map_file, devices_list, ddate, 1, recreate_or_not, chart_type, bw, motion, scale_global, fast, filter_minutes, time_zone_s) == 0: #"[bit] 1=same sensors together, 2=same device together, 4=1 der, 8=2 der
print(ddate, "Not found")
else:
print(ddate, time.time() - st)
def GenerateFullLocationMapLabelsOut(map_file, deployment_id, ddate, recreate_or_not, chart_type, bw, motion, scale_global, fast, time_zone_s, filter_minutes = 5):
timee = LocalDateToUTCEpoch(ddate, time_zone_s) + 24 * 3600 - 1
devices_list, device_ids = GetProximityList(deployment_id, timee)
st = time.time()
if CreateFullLocationMapLabelsOut(map_file, devices_list, ddate, 1, recreate_or_not, chart_type, bw, motion, scale_global, fast, filter_minutes, time_zone_s) == 0: #"[bit] 1=same sensors together, 2=same device together, 4=1 der, 8=2 der
print(ddate, "Not found")
else:
print(ddate, time.time() - st)
def fast_interpolate_unwrapped_data(unwrapped_data):
"""
Corrected version that interpolates data for each device independently to prevent data corruption.
"""
if not unwrapped_data:
return unwrapped_data
# 1. Group all records by their device_id first
data_by_device = defaultdict(list)
for record in unwrapped_data:
device_id = record[1]
data_by_device[device_id].append(record)
all_interpolated_records = []
# 2. Process each device's data in its own isolated group
for device_id in data_by_device:
device_records = data_by_device[device_id]
# Sort by time to ensure correct interpolation order (important!)
device_records.sort(key=lambda x: x[0])
# --- The following logic is your original code, but now it only runs on one device at a time ---
sensor_data = []
other_data = []
for record in device_records:
other_data.append(record[:7])
sensor_row = [record[i] if i < len(record) and record[i] is not None else np.nan for i in range(7, 87)]
sensor_data.append(sensor_row)
sensor_array = np.array(sensor_data, dtype=float)
for col in range(sensor_array.shape[1]):
values = sensor_array[:, col]
valid_mask = ~np.isnan(values)
if not np.any(valid_mask):
continue
valid_indices = np.where(valid_mask)[0]
if len(valid_indices) > 1:
values = np.interp(np.arange(len(values)), valid_indices, values[valid_indices])
else:
valid_value = values[valid_indices[0]]
values = np.full_like(values, valid_value)
sensor_array[:, col] = values
# Reconstruct this device's data and add it to the final list
for i, other_record in enumerate(other_data):
full_record = list(other_record) + sensor_array[i].tolist()
all_interpolated_records.append(tuple(full_record))
# 3. Return the combined list of all correctly interpolated records
return all_interpolated_records
def FillImage_hybrid(scaled_day, devices_c, arr_stretched_template, bw):
"""
Hybrid visualization:
- Environmental sensors (temp, humidity, pressure, light, radar): 10-pixel tall stripes
- Smell sensors (s0-s79): Individual pixels
Total height: 5*10 + 80 = 130 pixels per device
"""
minutes = scaled_day.shape[1] - 4
measurements_per_device = 85 # 5 environmental + 80 smell sensors
# Environmental sensor indices (first 5 measurements per device)
env_sensor_names = ["temperature", "humidity", "pressure", "light", "radar"]
vocs_scaled = {}
# Process each device
for device_idx in range(devices_c):
device_offset = device_idx * measurements_per_device
# Collect VOC data (smell sensors s5, s15, s25, etc.)
voc_rows = []
for decade in range(8):
for voc_sensor in [5, 6, 7, 8, 9]:
sensor_row_idx = device_offset + 5 + (decade * 10 + voc_sensor)
if sensor_row_idx < scaled_day.shape[0]:
voc_rows.append(sensor_row_idx)
if voc_rows:
voc_data = scaled_day[voc_rows, :minutes]
vocs_scaled[device_idx] = voc_data
# Fill the visualization
for minute in range(minutes):
for device_idx in range(devices_c):
device_offset = device_idx * measurements_per_device
device_y_offset = device_idx * 130 # 130 pixels per device (5*10 + 80)
# Fill environmental sensors (10 pixels each)
for env_idx in range(5):
row_idx = device_offset + env_idx
if row_idx < scaled_day.shape[0]:
value = scaled_day[row_idx, minute]
# Calculate color for this environmental sensor
if value >= 0:
min_val = scaled_day[row_idx, 1442] if scaled_day.shape[1] > 1442 else 0
max_val = scaled_day[row_idx, 1443] if scaled_day.shape[1] > 1443 else 1
if max_val > min_val:
normalized = (value - min_val) / (max_val - min_val)
normalized = max(0, min(1, normalized))
else:
normalized = 0.5
# Convert to color using your existing functions
if bw:
color = list(GrayColor(int(normalized * 255)))
else:
# For temperature, use your special temperature scaling
if env_idx == 0: # Temperature is first environmental sensor
# Convert to Fahrenheit for your temperature scaling
temp_f = value * 9/5 + 32 # Assuming value is in Celsius
color = list(GetTemperatureColor(temp_f))
else:
# Other environmental sensors use standard color mapping
color_val = int(normalized * 1279)
color = list(BestColor(color_val))
# Fill 10 pixels for this environmental sensor
for stripe_y in range(10):
pixel_y = device_y_offset + env_idx * 10 + stripe_y
if pixel_y < arr_stretched_template.shape[0]:
arr_stretched_template[pixel_y, minute] = color
else:
# No data - black pixels
for stripe_y in range(10):
pixel_y = device_y_offset + env_idx * 10 + stripe_y
if pixel_y < arr_stretched_template.shape[0]:
arr_stretched_template[pixel_y, minute] = [0, 0, 0]
# Fill smell sensors (1 pixel each, 80 total)
for sensor_idx in range(80):
row_idx = device_offset + 5 + sensor_idx # +5 for environmental sensors
if row_idx < scaled_day.shape[0]:
pixel_y = device_y_offset + 50 + sensor_idx # +50 for environmental stripes
value = scaled_day[row_idx, minute]
if value >= 0:
min_val = scaled_day[row_idx, 1442] if scaled_day.shape[1] > 1442 else 0
max_val = scaled_day[row_idx, 1443] if scaled_day.shape[1] > 1443 else 1
if max_val > min_val:
normalized = (value - min_val) / (max_val - min_val)
normalized = max(0, min(1, normalized))
else:
normalized = 0.5
# Invert smell sensor colors (like original VOC behavior)
normalized = 1 - normalized
if bw:
color = list(GrayColor(int(normalized * 255)))
else:
color_val = int(normalized * 1279)
color = list(BestColor(color_val))
if pixel_y < arr_stretched_template.shape[0]:
arr_stretched_template[pixel_y, minute] = color
else:
# No data - black pixel
if pixel_y < arr_stretched_template.shape[0]:
arr_stretched_template[pixel_y, minute] = [0, 0, 0]
return arr_stretched_template, vocs_scaled
# Remove this function - we'll use your existing BestColor() and GrayColor() functions
def CreateMapFast_hybrid(map_file, devices_list, selected_date, bw, time_zone_s, radar_part, group_by):
"""Updated CreateMapFast with hybrid visualization"""
global Id2MACDict, s_table_temp
st = time.time()
error_string = ""
if radar_part == "s28":
radar_part = "(s2+s3+s4+s5+s6+s7+s8)/7"
try:
lower_than200 = 0
larger_than200 = 0
ids_list = []
for details in devices_list[0]:
dev_id = details[0]
ids_list.append(details[1])
if dev_id < 200:
lower_than200 += 1
else:
larger_than200 += 1
if lower_than200 > 0 and larger_than200 > 0:
error_string = "Mixed types of devices are not allowed"
return False, [], error_string
if larger_than200 > 0:
#sensors_c = 80
measurements_per_device = 85
else:
error_string = "No new devices (well_id > 300) are found"
return False, [], error_string
devices_c = len(devices_list[0])
devices_list_str = ",".join(map(str, devices_list[1]))
image_file = map_file
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
temp_offset = -10
sql = get_deployment_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_part, temp_offset)
print(sql)
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sql)
day_data = cur.fetchall()
if day_data == None:
error_string = "No data found"
return False, [], error_string
# Detect format in the data
has_old_format = any(record[17] in [0, 17] for record in day_data if len(record) > 17 and record[17] is not None)
has_new_format = any(100 <= record[17] <= 170 for record in day_data if len(record) > 17 and record[17] is not None)
#print(f"Data format detected: old={has_old_format}, new={has_new_format}")
# Don't support mixed formats
#if has_old_format and has_new_format:
# error_string = "Mixed formats in single deployment not supported"
# AddToLog(error_string)
# return False, [], has_old_format, error_string
# Unwrap and interpolate
unwrapped_data = unwrap_sensor_data(day_data, devices_list[1], time_from_str)
interpolated_data = fast_interpolate_unwrapped_data(unwrapped_data)
# Setup arrays based on format
minutes = 1440
stripes = devices_c * measurements_per_device
total_height = devices_c * 120 #15 stripes × 8 pixels
arr_source_template = np.full((stripes, minutes+4), -0.001, dtype=float)
arr_stretched_template = np.zeros((total_height, minutes, 3), dtype=np.uint8)
arr_source = fast_fill_array_from_unwrapped(interpolated_data, devices_list[1], arr_source_template, time_from_str)
#arr_source = GenerateTestPattern(arr_source, devices_list)
arr_source = AddLimits_optimized_80(arr_source, devices_c, percentile=100)
scaled_day = CalcExtremes(arr_source, minutes, stripes)
#if has_old_format:
# arr_stretched, vocs_scaled = FillImage_old_format(scaled_day, devices_c, arr_stretched_template, bw, group_by)
#else:
arr_stretched, vocs_scaled = FillImage_new_format(scaled_day, devices_c, arr_stretched_template, bw, group_by)
metadata = {
'hasoldformat': has_old_format
}
SaveImageInBlob(image_file, arr_stretched, [], metadata)
return True, vocs_scaled, has_old_format, error_string
except Exception as e:
error_string = traceback.format_exc()
AddToLog(error_string)
return False, [], has_old_format, error_string
def CreateMapFast(map_file, devices_list, selected_date, bw, time_zone_s, radar_part, group_by):
global Id2MACDict, s_table_temp
st = time.time()
if radar_part == "s28":
radar_part = "(s2+s3+s4+s5+s6+s7+s8)/7"
try:
lower_than200 = 0
larger_than200 = 0
ids_list = []
for details in devices_list[0]:
dev_id = details[0]
ids_list.append(details[1])
if dev_id < 200:
lower_than200 += 1
else:
larger_than200 += 1
if lower_than200 > 0 and larger_than200 > 0:
return False, []
if larger_than200 > 0:
sensors_c = 80 # 80 sensors after unwrapping
measurements_per_device = 85 # 5 other + 80 sensors
else: #old sensors not supported
return False, []
devices_c = len(devices_list[0])
devices_list_str = ",".join(map(str, devices_list[1]))
image_file = map_file
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
temp_offset = -10
sql = get_deployment_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_part, temp_offset)
print(sql)
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sql)
day_data = cur.fetchall()
if day_data == None:
return False, []
# Unwrap the sensor data into consistent s0-s79 format
unwrapped_data = unwrap_sensor_data(day_data, devices_list[1], time_from_str)
unwrapped_data = fast_interpolate_unwrapped_data(unwrapped_data)
# Setup arrays for 80-sensor format
minutes = 1440
stripes = devices_c * measurements_per_device # Each device has 85 measurements
total_height = 80 # 80 pixels for 80 sensors
arr_source_template = np.full((stripes, minutes+4), -0.001, dtype=float)
arr_stretched_template = np.zeros((total_height, minutes, 3), dtype=np.uint8)
# Fill array using the fixed function
arr_source = fast_fill_array_from_unwrapped(unwrapped_data, devices_list[1], arr_source_template, time_from_str)
# INVERT BME680 sensor values (rows 5-84 for each device = smell sensors)
for device_idx in range(devices_c):
device_offset = device_idx * measurements_per_device
for sensor_idx in range(80): # All 80 smell sensors
row_idx = device_offset + 5 + sensor_idx
if row_idx < arr_source.shape[0]:
# Invert all values in this row
for minute in range(arr_source.shape[1] - 4): # Don't touch the metadata columns
val = arr_source[row_idx, minute]
if val > 0:
arr_source[row_idx, minute] = 102400000 - val
# Use the updated AddLimits function for 80 sensors
arr_source = AddLimits_optimized_80(arr_source, devices_c, percentile=100)
# Calculate extremes
scaled_day = CalcExtremes(arr_source, minutes, stripes)
# Use the fixed uniform processing function
arr_stretched, vocs_scaled = FillImage_uniform(scaled_day, devices_c, arr_stretched_template, bw)
SaveImageInBlob(image_file, arr_stretched, [])
return True, vocs_scaled
except Exception as e:
AddToLog(traceback.format_exc())
return False, []
def FillImage_uniform(scaled_day, devices_c, arr_stretched_template, bw):
"""
Fill image array for uniform 80-sensor data.
scaled_day shape: (devices_c * 85, minutes+4)
"""
minutes = scaled_day.shape[1] - 4 # Subtract the 4 extra columns
measurements_per_device = 85
# Collect all VOC data into a single array
all_voc_data = []
# Process each device
for device_idx in range(devices_c):
device_offset = device_idx * measurements_per_device
# Extract VOC sensor rows (every 10th sensor starting from 5: 5,6,7,8,9,15,16,17,18,19,etc.)
voc_rows = []
for decade in range(8): # 8 decades of 10 sensors each
for voc_sensor in [5, 6, 7, 8, 9]: # VOC sensors in each decade
sensor_row_idx = device_offset + 5 + (decade * 10 + voc_sensor) # +5 for other measurements
if sensor_row_idx < scaled_day.shape[0]:
voc_rows.append(sensor_row_idx)
# Collect VOC data for this device
if voc_rows:
voc_data = scaled_day[voc_rows, :minutes]
all_voc_data.append(voc_data)
# Convert to single numpy array
if all_voc_data:
vocs_scaled = np.vstack(all_voc_data)
else:
vocs_scaled = np.array([])
# Fill the image array (80 pixels tall)
for minute in range(minutes):
for device_idx in range(devices_c):
device_offset = device_idx * measurements_per_device
# Fill sensor data (80 pixels for 80 sensors)
for sensor_idx in range(80):
row_idx = device_offset + 5 + sensor_idx # +5 for other measurements
if row_idx < scaled_day.shape[0]:
pixel_y = sensor_idx # Direct mapping
value = scaled_day[row_idx, minute]
if value >= 0: # Valid data
# Normalize value using min/max from columns 1442, 1443
min_val = scaled_day[row_idx, 1442] if scaled_day.shape[1] > 1442 else 0
max_val = scaled_day[row_idx, 1443] if scaled_day.shape[1] > 1443 else 1
if max_val > min_val:
normalized = (value - min_val) / (max_val - min_val)
normalized = max(0, min(1, normalized)) # Clamp to [0,1]
else:
normalized = 0.5
if bw: # Black and white
intensity = int(normalized * 255)
arr_stretched_template[pixel_y, minute] = [intensity, intensity, intensity]
else: # Color mapping
if normalized < 0.5:
# Blue to green
r = 0
g = int(normalized * 2 * 255)
b = int((1 - normalized * 2) * 255)
else:
# Green to red
r = int((normalized - 0.5) * 2 * 255)
g = int((1 - (normalized - 0.5) * 2) * 255)
b = 0
arr_stretched_template[pixel_y, minute] = [r, g, b]
else:
# No data - black pixel
arr_stretched_template[pixel_y, minute] = [0, 0, 0]
return arr_stretched_template, vocs_scaled
def get_deployment_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_part, temp_offset):
"""
Simple query to get raw sensor data without complex interpolation.
"""
case_statements = []
for index, device_id in enumerate(ids_list, start=1):
case_statements.append(f"WHEN {device_id} THEN {index}")
case_order = "\n ".join(case_statements)
sql = f"""
SELECT
COALESCE(sr.minute, rr.minute) as minute,
COALESCE(sr.device_id, rr.device_id) as device_id,
sr.temperature+ {temp_offset} as temperature,
sr.humidity,
sr.pressure,
sr.light,
rr.radar,
sr.s0, sr.s1, sr.s2, sr.s3, sr.s4, sr.s5, sr.s6, sr.s7, sr.s8, sr.s9,
sr.mtype
FROM (
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
mtype,
AVG(temperature) AS temperature,
AVG(humidity) AS humidity,
AVG(pressure) AS pressure,
MAX(light) AS light,
MIN(CASE WHEN s0 > 0 THEN s0 END) AS s0,
MIN(CASE WHEN s1 > 0 THEN s1 END) AS s1,
MIN(CASE WHEN s2 > 0 THEN s2 END) AS s2,
MIN(CASE WHEN s3 > 0 THEN s3 END) AS s3,
MIN(CASE WHEN s4 > 0 THEN s4 END) AS s4,
MIN(CASE WHEN s5 > 0 THEN s5 END) AS s5,
MIN(CASE WHEN s6 > 0 THEN s6 END) AS s6,
MIN(CASE WHEN s7 > 0 THEN s7 END) AS s7,
MIN(CASE WHEN s8 > 0 THEN s8 END) AS s8,
MIN(CASE WHEN s9 > 0 THEN s9 END) AS s9
FROM sensor_readings
WHERE device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
AND mtype IN (0, 17, 100, 110, 120, 130, 140, 150, 160, 170)
GROUP BY minute, device_id, mtype
) sr
FULL OUTER JOIN (
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
MAX({radar_part}) AS radar
FROM radar_readings
WHERE device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY minute, device_id
) rr
ON sr.minute = rr.minute AND sr.device_id = rr.device_id
ORDER BY
CASE COALESCE(sr.device_id, rr.device_id)
{case_order}
END,
COALESCE(sr.minute, rr.minute),
sr.mtype;
"""
return sql
def get_deployment_query_80(devices_list_str, time_from_str, time_to_str, ids_list, radar_part, temp_offset):
"""
Query to get sensor data with s0-s79 columns mapped from mtype values.
Maps mtype values to column ranges:
- mtype 0, 17, 100 -> s0-s9
- mtype 110 -> s10-s19
- mtype 120 -> s20-s29
- ... up to mtype 170 -> s70-s79
"""
case_statements = []
for index, device_id in enumerate(ids_list, start=1):
case_statements.append(f"WHEN {device_id} THEN {index}")
case_order = "\n ".join(case_statements)
# Generate column mappings for each mtype range
column_mappings = []
# S0-S9 (mtype = 0, 17, or 100)
for i in range(10):
column_mappings.append(f"MIN(CASE WHEN mtype IN (0, 17, 100) AND s{i} > 0 THEN s{i} END) AS s{i}")
# S10-S79 (mtype = 110 to 170)
for mtype_offset in range(11, 18): # 110, 120, 130, 140, 150, 160, 170
mtype_value = mtype_offset * 10
base_col = (mtype_offset - 10) * 10
for i in range(10):
source_col = i
target_col = base_col + i
column_mappings.append(f"MIN(CASE WHEN mtype = {mtype_value} AND s{source_col} > 0 THEN s{source_col} END) AS s{target_col}")
columns_sql = ",\n ".join(column_mappings)
# Generate the s0-s79 column list for the outer SELECT
outer_columns = []
for i in range(80):
outer_columns.append(f"sr.s{i}")
outer_columns_str = ", ".join(outer_columns)
sql = f"""
SELECT
COALESCE(sr.minute, rr.minute) as minute,
COALESCE(sr.device_id, rr.device_id) as device_id,
sr.temperature + {temp_offset} as temperature,
sr.humidity,
sr.pressure,
sr.light,
rr.radar,
{outer_columns_str}
FROM (
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
AVG(temperature) AS temperature,
AVG(humidity) AS humidity,
AVG(pressure) AS pressure,
MAX(light) AS light,
{columns_sql}
FROM sensor_readings
WHERE device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
AND mtype IN (0, 17, 100, 110, 120, 130, 140, 150, 160, 170)
GROUP BY minute, device_id
) sr
FULL OUTER JOIN (
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
MAX({radar_part}) AS radar
FROM radar_readings
WHERE device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY minute, device_id
) rr
ON sr.minute = rr.minute AND sr.device_id = rr.device_id
ORDER BY
CASE COALESCE(sr.device_id, rr.device_id)
{case_order}
END,
COALESCE(sr.minute, rr.minute);
"""
return sql
def unwrap_smell_only_data(day_data, devices_list, time_from_str):
"""
A specialized version of unwrap_sensor_data that handles a data format
containing ONLY smell sensor data (s0-s9) and mtype.
It outputs a standardized 87-column record with placeholders for environmental data.
"""
if not day_data:
return []
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
data_by_minute = defaultdict(list)
for record in day_data:
if record[0] and record[1]:
minute = int((record[0] - start_time).total_seconds() / 60)
data_by_minute[minute].append(record)
unwrapped_data = []
for minute in range(1440):
minute_records = data_by_minute.get(minute, [])
if not minute_records:
continue
records_by_device = defaultdict(list)
for record in minute_records:
device_id = record[1]
records_by_device[device_id].append(record)
for device_id, device_records in records_by_device.items():
sensors_80 = [None] * 80
for record in device_records:
# --- MODIFICATION #1: Adjust indices for the new data format ---
sensor_values = record[2:12] # s0-s9 are at indices 2 through 11
mtype = record[12] if len(record) > 12 and record[12] is not None else 17
# The unwrapping logic for old vs. new format is still valid
if mtype == 17 or mtype == 0:
for decade in range(8):
for sensor_idx in range(10):
if sensor_idx < len(sensor_values) and sensor_values[sensor_idx] is not None:
target_pos = decade * 10 + sensor_idx
sensors_80[target_pos] = sensor_values[sensor_idx]
elif 100 <= mtype <= 170:
base_pos = mtype - 100
for sensor_idx in range(10):
if sensor_idx < len(sensor_values) and sensor_values[sensor_idx] is not None:
target_pos = base_pos + sensor_idx
if target_pos < 80:
sensors_80[target_pos] = sensor_values[sensor_idx]
minute_time = start_time + datetime.timedelta(minutes=minute)
# --- MODIFICATION #2: Create a standard record with placeholders ---
# The downstream functions expect [time, dev_id, temp, hum, pres, light, radar, s0-s79]
unwrapped_record = [
minute_time,
device_id,
None, # temp placeholder
None, # humidity placeholder
None, # pressure placeholder
None, # light placeholder
None, # radar placeholder
] + sensors_80
unwrapped_data.append(tuple(unwrapped_record))
return unwrapped_data
def unwrap_sensor_data(day_data, devices_list, time_from_str):
"""Debug version to see what happens during unwrapping"""
if not day_data:
return []
# Check device distribution in raw data
device_ids_in_raw = {}
for record in day_data:
device_id = record[1]
device_ids_in_raw[device_id] = device_ids_in_raw.get(device_id, 0) + 1
#print(f"Raw data device distribution: {device_ids_in_raw}")
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
data_by_minute = defaultdict(list)
for record in day_data:
if record[0] and record[1]:
minute = int((record[0] - start_time).total_seconds() / 60)
data_by_minute[minute].append(record)
unwrapped_data = []
device_ids_in_unwrapped = {}
for minute in range(1440):
minute_records = data_by_minute.get(minute, [])
if not minute_records:
continue
# Group by device_id FIRST - this is probably missing!
records_by_device = defaultdict(list)
for record in minute_records:
device_id = record[1]
records_by_device[device_id].append(record)
# Process each device separately
for device_id, device_records in records_by_device.items():
sensors_80 = [None] * 80
temperature_vals, humidity_vals, pressure_vals = [], [], []
light_vals, radar_vals = [], []
# vvvvvvvvvvvvvvvv START OF THE FIX vvvvvvvvvvvvvvvv
# 1. Check which mtypes are present for this specific device and minute
mtypes_present = {rec[17] for rec in device_records if len(rec) > 17 and rec[17] is not None}
# 2. Decide if this is a "new format" minute. If any new mtype is found, we treat it as such.
is_new_format_minute = any(100 <= m <= 170 for m in mtypes_present)
# ^^^^^^^^^^^^^^^^^^^ END OF THE FIX ^^^^^^^^^^^^^^^^^^^
for record in device_records:
mtype = record[17] if len(record) > 17 and record[17] is not None else 17
# vvvvvvvvvvvvvvvv START OF THE FIX vvvvvvvvvvvvvvvv
# 3. If this is a new format minute, skip any old format records to prevent data corruption.
if is_new_format_minute and (mtype == 0 or mtype == 17):
continue # Ignore this old-format record
# ^^^^^^^^^^^^^^^^^^^ END OF THE FIX ^^^^^^^^^^^^^^^^^^^
# Collect other measurements
if record[2] is not None: temperature_vals.append(record[2])
if record[3] is not None: humidity_vals.append(record[3])
if record[4] is not None: pressure_vals.append(record[4])
if record[5] is not None: light_vals.append(record[5])
if record[6] is not None: radar_vals.append(record[6])
# Process sensor values
sensor_values = record[7:17]
if mtype == 17 or mtype == 0:
# Old format: duplicate across all decades
for decade in range(8):
for sensor_idx in range(10):
target_pos = decade * 10 + sensor_idx
if sensor_values[sensor_idx] is not None:
sensors_80[target_pos] = sensor_values[sensor_idx]
elif 100 <= mtype <= 170:
# New format: map to specific positions
base_pos = mtype - 100
for sensor_idx in range(10):
target_pos = base_pos + sensor_idx
if target_pos < 80 and sensor_values[sensor_idx] is not None:
sensors_80[target_pos] = sensor_values[sensor_idx]
# Create unwrapped record for this device and minute
minute_time = start_time + datetime.timedelta(minutes=minute)
avg_temp = sum(temperature_vals) / len(temperature_vals) if temperature_vals else None
avg_humidity = sum(humidity_vals) / len(humidity_vals) if humidity_vals else None
avg_pressure = sum(pressure_vals) / len(pressure_vals) if pressure_vals else None
avg_light = max(light_vals) if light_vals else None
avg_radar = sum(radar_vals) / len(radar_vals) if radar_vals else None
unwrapped_record = [
minute_time, device_id, avg_temp, avg_humidity,
avg_pressure, avg_light, avg_radar
] + sensors_80
unwrapped_data.append(tuple(unwrapped_record))
# Track device distribution in unwrapped data
device_ids_in_unwrapped[device_id] = device_ids_in_unwrapped.get(device_id, 0) + 1
#print(f"Unwrapped data device distribution: {device_ids_in_unwrapped}")
return unwrapped_data
def fast_fill_array_from_unwrapped(unwrapped_data, devices_list, arr_source, time_from_str):
"""
Fill array from unwrapped sensor data with proper row structure:
For each device: [temperature, humidity, pressure, light, radar, s0, s1, ..., s79]
"""
device_to_index = {device_id: idx for idx, device_id in enumerate(devices_list)}
#print(f"Device mapping: {device_to_index}")
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
measurements_per_device = 85 # 5 other + 80 sensors
#records_processed = 0
#values_written = 0
#device_data_count = {}
for record in unwrapped_data:
device_id = record[1]
if device_id not in device_to_index:
#print(f"Skipping device_id {device_id} - not in device_to_index")
continue
# Calculate minute from timestamp
record_time = record[0]
minute = int((record_time - start_time).total_seconds() / 60)
if minute < 0 or minute >= arr_source.shape[1]:
#print(f"Skipping minute {minute} - out of bounds")
continue
#records_processed += 1
#if records_processed <= 5: # Debug first 5 records
# print(f"Processing record {records_processed}: device_id={device_id}, minute={minute}")
device_idx = device_to_index[device_id]
device_offset = device_idx * measurements_per_device
# Fill the 5 other measurements first
other_measurements = record[2:7] # temperature, humidity, pressure, light, radar
for i, value in enumerate(other_measurements):
if value is not None:
row_idx = device_offset + i
if row_idx < arr_source.shape[0]:
arr_source[row_idx, minute] = value
#values_written += 1
# Fill the 80 sensor values
sensor_values = record[7:87] # s0-s79
for i, value in enumerate(sensor_values):
if value is not None:
row_idx = device_offset + 5 + i # +5 to skip other measurements
if row_idx < arr_source.shape[0]:
arr_source[row_idx, minute] = value
#values_written += 1
#print(f"Data distribution: {device_data_count}")
#print(f"devices_list: {devices_list}")
#print(f"Total records processed: {records_processed}")
#print(f"Total values written: {values_written}")
return arr_source
def get_deployment_single_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_part, temp_offset, sensor_in):
"""
Generate a TimeScaleDB query for a single sensor reading based on device IDs.
Parameters:
devices_list_str (str): Comma-separated string of device IDs
time_from_str (str): Start time for the query
time_to_str (str): End time for the query
ids_list (list): List of device IDs in priority order for sorting
radar_part (str): Radar column name, defaults to 'radar'
temp_offset (float): Temperature offset to apply
sensor (str): Single sensor to query from s_table
Returns:
str: Generated SQL query
"""
# Generate the CASE statement for ordering based on the provided ids_list
case_statements = []
for index, device_id in enumerate(ids_list, start=1):
case_statements.append(f"WHEN {device_id} THEN {index}")
case_order = "\n ".join(case_statements)
if "_" in sensor_in:
sensor = sensor_in.split("_")[1]
else:
sensor = sensor_in
# Handle different sensor types
if sensor == "radar":
# Query only radar readings
sql = f"""
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
MAX({radar_part}) AS {sensor_in}
FROM
radar_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
ORDER BY
CASE device_id
{case_order}
END,
minute;
"""
elif sensor == "temperature":
# Query temperature with offset
sql = f"""
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
AVG(temperature) + {temp_offset} AS {sensor_in}
FROM
sensor_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
ORDER BY
CASE device_id
{case_order}
END,
minute;
"""
elif sensor == "humidity":
# Query humidity
sql = f"""
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
AVG(humidity) AS {sensor_in}
FROM
sensor_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
ORDER BY
CASE device_id
{case_order}
END,
minute;
"""
elif sensor == "pressure":
# Query pressure
sql = f"""
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
AVG(pressure) AS {sensor_in}
FROM
sensor_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
ORDER BY
CASE device_id
{case_order}
END,
minute;
"""
elif sensor == "light":
# Query light
sql = f"""
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
MAX(light) AS {sensor_in}
FROM
sensor_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
ORDER BY
CASE device_id
{case_order}
END,
minute;
"""
elif sensor.startswith("voc"):
# Query VOC sensors (voc0-voc9) - these correspond to s0-s9 in the original query
voc_num = sensor[3:] # Extract number from "voc0", "voc1", etc.
sql = f"""
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
MIN(CASE WHEN s{voc_num} > 0 THEN s{voc_num} END) AS {sensor}
FROM
sensor_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
ORDER BY
CASE device_id
{case_order}
END,
minute;
"""
else:
raise ValueError(f"Unknown sensor type: {sensor}. Must be one of: temperature, humidity, pressure, light, radar, voc0-voc9")
return sql
def get_deployment_single_query_rz(devices_list_str, time_from_str, time_to_str, ids_list, radar_part, temp_offset, sensor):
"""
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
Parameters:
devices_list_str (str): Comma-separated string of device IDs
time_from_str (str): Start time for the query
time_to_str (str): End time for the query
ids_list (list): List of device IDs in priority order for sorting
radar_part (str): Radar column name, defaults to 'radar'
Returns:
str: Generated SQL query
"""
#table_sens = {"temperature": (f"sr.avg_temperature+ {temp_offset} as avg_temperature", "avg_temperature"),
#"humidity": ("sr.avg_humidity", "avg_humidity"),
#"pressure": ("sr.pressure_amplitude", "pressure_amplitude"),
#"light":("sr.max_light", "max_light"),
#"radar":("rr.radar")
#"voc0":
#"voc1":
#"voc2":
#"voc3":
#"voc4":
#"voc5":
#"voc6":
#"voc7":
#"voc8":
#"voc9": ,
#}
# derived
#if sensor ==
# Generate the CASE statement for ordering based on the provided ids_list
case_statements = []
for index, device_id in enumerate(ids_list, start=1):
case_statements.append(f"WHEN {device_id} THEN {index}")
case_order = "\n ".join(case_statements)
sql = f"""
SELECT
COALESCE(sr.minute, rr.minute) as minute,
COALESCE(sr.device_id, rr.device_id) as device_id,
sr.avg_temperature+ {temp_offset} as avg_temperature,
sr.avg_humidity,
sr.pressure_amplitude,
sr.max_light,
rr.radar,
sr.min_s0 as sensor_min_s0,
sr.min_s1 as sensor_min_s1,
sr.min_s2 as sensor_min_s2,
sr.min_s3 as sensor_min_s3,
sr.min_s4 as sensor_min_s4,
sr.min_s5 as sensor_min_s5,
sr.min_s6 as sensor_min_s6,
sr.min_s7 as sensor_min_s7,
sr.min_s8 as sensor_min_s8,
sr.min_s9 as sensor_min_s9
FROM (
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
AVG(temperature) AS avg_temperature,
AVG(humidity) AS avg_humidity,
AVG(pressure) AS pressure_amplitude,
MAX(light) AS max_light,
MIN(CASE WHEN s0 > 0 THEN s0 END) AS min_s0,
MIN(CASE WHEN s1 > 0 THEN s1 END) AS min_s1,
MIN(CASE WHEN s2 > 0 THEN s2 END) AS min_s2,
MIN(CASE WHEN s3 > 0 THEN s3 END) AS min_s3,
MIN(CASE WHEN s4 > 0 THEN s4 END) AS min_s4,
MIN(CASE WHEN s5 > 0 THEN s5 END) AS min_s5,
MIN(CASE WHEN s6 > 0 THEN s6 END) AS min_s6,
MIN(CASE WHEN s7 > 0 THEN s7 END) AS min_s7,
MIN(CASE WHEN s8 > 0 THEN s8 END) AS min_s8,
MIN(CASE WHEN s9 > 0 THEN s9 END) AS min_s9
FROM
sensor_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
) sr
FULL OUTER JOIN (
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
MAX({radar_part}) AS radar
FROM
radar_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
) rr
ON sr.minute = rr.minute AND sr.device_id = rr.device_id
ORDER BY
CASE COALESCE(sr.device_id, rr.device_id)
{case_order}
END,
COALESCE(sr.minute, rr.minute);
"""
return sql
def get_deployment_rd_query(devices_list_str, time_from_str, time_to_str, ids_list, temp_offset):
#radar detailed
"""
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
Parameters:
devices_list_str (str): Comma-separated string of device IDs
time_from_str (str): Start time for the query
time_to_str (str): End time for the query
ids_list (list): List of device IDs in priority order for sorting
Returns:
str: Generated SQL query
"""
# Generate the CASE statement for ordering based on the provided ids_list
case_statements = []
for index, device_id in enumerate(ids_list, start=1):
case_statements.append(f"WHEN {device_id} THEN {index}")
case_order = "\n ".join(case_statements)
sql = f"""
SELECT
COALESCE(sr.minute, rr.minute) as minute,
COALESCE(sr.device_id, rr.device_id) as device_id,
sr.avg_temperature+{temp_offset} as avg_temperature,
sr.avg_humidity,
sr.avg_pressure,
sr.max_light,
sr.min_s0 as smell_s0,
sr.min_s1 as smell_s1,
sr.min_s2 as smell_s2,
sr.min_s3 as smell_s3,
sr.min_s4 as smell_s4,
sr.min_s5 as smell_s5,
sr.min_s6 as smell_s6,
sr.min_s7 as smell_s7,
sr.min_s8 as smell_s8,
sr.min_s9 as smell_s9,
rr.absent as radar_absent,
rr.moving as radar_moving,
rr.stationary as radar_stationary,
rr.both as radar_both,
rr.m0 as radar_m0,
rr.m1 as radar_m1,
rr.m2 as radar_m2,
rr.m3 as radar_m3,
rr.m4 as radar_m4,
rr.m5 as radar_m5,
rr.m6 as radar_m6,
rr.m7 as radar_m7,
rr.m8 as radar_m8,
rr.s2 as radar_s2,
rr.s3 as radar_s3,
rr.s4 as radar_s4,
rr.s5 as radar_s5,
rr.s6 as radar_s6,
rr.s7 as radar_s7,
rr.s8 as radar_s8
FROM (
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
AVG(temperature) AS avg_temperature,
AVG(humidity) AS avg_humidity,
AVG(pressure) AS avg_pressure,
MAX(light) AS max_light,
MIN(CASE WHEN s0 > 0 THEN s0 END) AS min_s0,
MIN(CASE WHEN s1 > 0 THEN s1 END) AS min_s1,
MIN(CASE WHEN s2 > 0 THEN s2 END) AS min_s2,
MIN(CASE WHEN s3 > 0 THEN s3 END) AS min_s3,
MIN(CASE WHEN s4 > 0 THEN s4 END) AS min_s4,
MIN(CASE WHEN s5 > 0 THEN s5 END) AS min_s5,
MIN(CASE WHEN s6 > 0 THEN s6 END) AS min_s6,
MIN(CASE WHEN s7 > 0 THEN s7 END) AS min_s7,
MIN(CASE WHEN s8 > 0 THEN s8 END) AS min_s8,
MIN(CASE WHEN s9 > 0 THEN s9 END) AS min_s9
FROM
sensor_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
) sr
FULL OUTER JOIN (
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
MAX(absent) AS absent,
MAX(moving) AS moving,
MAX(stationary) AS stationary,
MAX(\"both\") AS both,
MAX(m0) AS m0,
MAX(m1) AS m1,
MAX(m2) AS m2,
MAX(m3) AS m3,
MAX(m4) AS m4,
MAX(m5) AS m5,
MAX(m6) AS m6,
MAX(m7) AS m7,
MAX(m8) AS m8,
MAX(s2) AS s2,
MAX(s3) AS s3,
MAX(s4) AS s4,
MAX(s5) AS s5,
MAX(s6) AS s6,
MAX(s7) AS s7,
MAX(s8) AS s8
FROM
radar_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
) rr
ON sr.minute = rr.minute AND sr.device_id = rr.device_id
ORDER BY
CASE COALESCE(sr.device_id, rr.device_id)
{case_order}
END,
COALESCE(sr.minute, rr.minute);
"""
return sql
def get_deployment_radar_only_query(devices_list_str, time_from_str, time_to_str, ids_list):
#radar detailed
"""
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
Parameters:
devices_list_str (str): Comma-separated string of device IDs
time_from_str (str): Start time for the query
time_to_str (str): End time for the query
ids_list (list): List of device IDs in priority order for sorting
Returns:
str: Generated SQL query
"""
# Generate the CASE statement for ordering based on the provided ids_list
case_statements = []
for index, device_id in enumerate(ids_list, start=1):
case_statements.append(f"WHEN {device_id} THEN {index}")
case_order = "\n ".join(case_statements)
sql = f"""
SELECT
COALESCE(sr.minute, rr.minute) as minute,
COALESCE(sr.device_id, rr.device_id) as device_id,
rr.s2 as radar_s2,
rr.s3 as radar_s3,
rr.s4 as radar_s4,
rr.s5 as radar_s5,
rr.s6 as radar_s6,
rr.s7 as radar_s7,
rr.s8 as radar_s8
FROM (
SELECT
time_bucket('1 minute', time) AS minute,
device_id
FROM
sensor_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
) sr
FULL OUTER JOIN (
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
MAX(s2) AS s2,
MAX(s3) AS s3,
MAX(s4) AS s4,
MAX(s5) AS s5,
MAX(s6) AS s6,
MAX(s7) AS s7,
MAX(s8) AS s8
FROM
radar_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
) rr
ON sr.minute = rr.minute AND sr.device_id = rr.device_id
ORDER BY
CASE COALESCE(sr.device_id, rr.device_id)
{case_order}
END,
COALESCE(sr.minute, rr.minute);
"""
return sql
def get_device_radar_s28_only_query(time_from_str, time_to_str, device_id):
sql = f"""
SELECT
time,
(s2+s3+s4+s5+s6+s7+s8)/7 AS s28,
(m2+m3+m4+m5+m6+m7+m8)/7 AS m28
FROM
radar_readings
WHERE
device_id = {device_id}
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
ORDER BY
time ASC
"""
return sql
def get_device_radar_only_query(devices_list_str, time_from_str, time_to_str, device_id):
sql = f"""
SELECT
time,
(s2+s3+s4+s5+s6+s7+s8)/7 AS s28,
(m2+m3+m4+m5+m6+m7+m8)/7 AS m28
FROM
radar_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
ORDER BY
time ASC
"""
return sql
def get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest):
#radar detailed
"""
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
Parameters:
devices_list_str (str): Comma-separated string of device IDs
time_from_str (str): Start time for the query
time_to_str (str): End time for the query
ids_list (list): List of device IDs in priority order for sorting
radar_fields_of_interest (list) list of different unique fields required across all devices
Returns:
str: Generated SQL query
"""
# Generate the CASE statement for ordering based on the provided ids_list
case_statements = []
for index, device_id in enumerate(ids_list, start=1):
case_statements.append(f"WHEN {device_id} THEN {index}")
case_order = "\n ".join(case_statements)
radar_fields_to_get = ""
q_parts = ""
for field in radar_fields_of_interest:
if field == "s28_min":
q_part = "MIN((s2+s3+s4+s5+s6+s7+s8)/7) AS s28_min"
elif field == "s28_max":
q_part = "MAX((s2+s3+s4+s5+s6+s7+s8)/7) AS s28_max"
elif field == "m08_max":
q_part = "MAX((m0+m1+m2+m3+m4+m5+m6+m7+m8)/9) AS m08_max"
elif field == "s2_max":
q_part = "MAX(s2) AS s2_max"
elif field == "s3_max":
q_part = "MAX(s3) AS s3_max"
elif field == "s4_max":
q_part = "MAX(s4) AS s4_max"
elif field == "s5_max":
q_part = "MAX(s5) AS s5_max"
elif field == "s6_max":
q_part = "MAX(s6) AS s6_max"
elif field == "s7_max":
q_part = "MAX(s7) AS s7_max"
elif field == "s8_max":
q_part = "MAX(s8) AS s8_max"
elif field == "m0_max":
q_part = "MAX(m0) AS m0_max"
elif field == "m1_max":
q_part = "MAX(m1) AS m1_max"
elif field == "m2_max":
q_part = "MAX(m2) AS m2_max"
elif field == "m3_max":
q_part = "MAX(m3) AS m3_max"
elif field == "m4_max":
q_part = "MAX(m4) AS m4_max"
elif field == "m5_max":
q_part = "MAX(m5) AS m5_max"
elif field == "m6_max":
q_part = "MAX(m6) AS m6_max"
elif field == "m7_max":
q_part = "MAX(m7) AS m7_max"
elif field == "m8_max":
q_part = "MAX(m8) AS m8_max"
else:
q_part = field
if q_parts == "":
q_parts = q_part
else:
q_parts = q_parts + ", " + q_part
if radar_fields_to_get == "":
radar_fields_to_get = field
else:
radar_fields_to_get = radar_fields_to_get + ", " + field
sql = f"""
SELECT
minute,
device_id,
{radar_fields_to_get}
FROM (
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
{q_parts}
FROM
radar_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
) rr
ORDER BY
CASE device_id
{case_order}
END,
minute
"""
return sql
def get_deployment_radar_only_colapsed_query_wid(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest):
#radar detailed
"""
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
Parameters:
devices_list_str (str): Comma-separated string of device IDs
time_from_str (str): Start time for the query
time_to_str (str): End time for the query
ids_list (list): List of device IDs in priority order for sorting
Returns:
str: Generated SQL query
"""
# Generate the CASE statement for ordering based on the provided ids_list
case_statements = []
for index, device_id in enumerate(ids_list, start=1):
case_statements.append(f"WHEN {device_id} THEN {index}")
case_order = "\n ".join(case_statements)
radar_fields_to_get = ""
q_parts = ""
for field in radar_fields_of_interest:
if field == "s28_min":
q_part = "MIN((s2+s3+s4+s5+s6+s7+s8)/7) AS s28_min"
elif field == "s28_max":
q_part = "MAX((s2+s3+s4+s5+s6+s7+s8)/7) AS s28_max"
elif field == "m08_max":
q_part = "MAX((m0+m1+m2+m3+m4+m5+m6+m7+m8)/9) AS m08_max"
elif field == "s2_max":
q_part = "MAX(s2) AS s2_max"
elif field == "s3_max":
q_part = "MAX(s3) AS s3_max"
elif field == "s4_max":
q_part = "MAX(s4) AS s4_max"
elif field == "s5_max":
q_part = "MAX(s5) AS s5_max"
elif field == "s6_max":
q_part = "MAX(s6) AS s6_max"
elif field == "s7_max":
q_part = "MAX(s7) AS s7_max"
elif field == "s8_max":
q_part = "MAX(s8) AS s8_max"
elif field == "m0_max":
q_part = "MAX(m0) AS m0_max"
elif field == "m1_max":
q_part = "MAX(m1) AS m1_max"
elif field == "m2_max":
q_part = "MAX(m2) AS m2_max"
elif field == "m3_max":
q_part = "MAX(m3) AS m3_max"
elif field == "m4_max":
q_part = "MAX(m4) AS m4_max"
elif field == "m5_max":
q_part = "MAX(m5) AS m5_max"
elif field == "m6_max":
q_part = "MAX(m6) AS m6_max"
elif field == "m7_max":
q_part = "MAX(m7) AS m7_max"
elif field == "m8_max":
q_part = "MAX(m8) AS m8_max"
else:
q_part = field
if q_parts == "":
q_parts = q_part
else:
q_parts = q_parts + ", " + q_part
if radar_fields_to_get == "":
radar_fields_to_get = field
else:
radar_fields_to_get = radar_fields_to_get + ", " + field
sql = f"""
SELECT
minute,
device_id,
{radar_fields_to_get}
FROM (
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
{q_parts}
FROM
radar_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
) rr
ORDER BY
CASE device_id
{case_order}
END,
minute
"""
return sql
def get_deployment_radar_only_detailed_query(devices_list_str, time_from_str, time_to_str, ids_list):
#radar detailed
"""
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
Parameters:
devices_list_str (str): Comma-separated string of device IDs
time_from_str (str): Start time for the query
time_to_str (str): End time for the query
ids_list (list): List of device IDs in priority order for sorting
Returns:
str: Generated SQL query
This is looking for presence, NOT absence... othervise all MAXes need to be converted to MINs
"""
# Generate the CASE statement for ordering based on the provided ids_list
case_statements = []
for index, device_id in enumerate(ids_list, start=1):
case_statements.append(f"WHEN {device_id} THEN {index}")
case_order = "\n ".join(case_statements)
sql = f"""
SELECT
minute,
device_id,
m0_max,
m1_max,
m2_max,
m3_max,
m4_max,
m5_max,
m6_max,
m7_max,
m8_max,
m08_max,
s2_max,
s3_max,
s4_max,
s5_max,
s6_max,
s7_max,
s8_max,
s28_max,
s28_min
FROM (
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
MAX(m0) AS m0_max,
MAX(m1) AS m1_max,
MAX(m2) AS m2_max,
MAX(m3) AS m3_max,
MAX(m4) AS m4_max,
MAX(m5) AS m5_max,
MAX(m6) AS m6_max,
MAX(m7) AS m7_max,
MAX(m8) AS m8_max,
MAX((m0+m1+m2+m3+m4+m5+m6+m7+m8)/9) AS m08_max,
MAX(s2) AS s2_max,
MAX(s3) AS s3_max,
MAX(s4) AS s4_max,
MAX(s5) AS s5_max,
MAX(s6) AS s6_max,
MAX(s7) AS s7_max,
MAX(s8) AS s8_max,
MAX((s2+s3+s4+s5+s6+s7+s8)/7) AS s28_max,
MIN((s2+s3+s4+s5+s6+s7+s8)/7) AS s28_min
FROM
radar_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
) rr
ORDER BY
CASE device_id
{case_order}
END,
minute
"""
return sql
def get_deployment_light_only_query(devices_list_str, time_from_str, time_to_str, ids_list):
#light detailed
"""
Generate a TimeScaleDB query for light readings based on device IDs.
Parameters:
devices_list_str (str): Comma-separated string of device IDs
time_from_str (str): Start time for the query
time_to_str (str): End time for the query
ids_list (list): List of device IDs in priority order for sorting
Returns:
str: Generated SQL query
"""
# Generate the CASE statement for ordering based on the provided ids_list
case_statements = []
for index, device_id in enumerate(ids_list, start=1):
case_statements.append(f"WHEN {device_id} THEN {index}")
case_order = "\n ".join(case_statements)
sql = f"""
SELECT
minute,
device_id,
light_max
FROM (
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
MAX(light) AS light_max
FROM
sensor_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
) rr
ORDER BY
CASE device_id
{case_order}
END,
minute
"""
return sql
def get_deployment_temperature_only_query(devices_list_str, time_from_str, time_to_str, ids_list, temp_offset):
"""
Generate a TimeScaleDB query for temperature readings based on device IDs.
Parameters:
devices_list_str (str): Comma-separated string of device IDs
time_from_str (str): Start time for the query
time_to_str (str): End time for the query
ids_list (list): List of device IDs in priority order for sorting
temp_offset (float): Temperature offset to apply
Returns:
str: Generated SQL query with temperature and alarm level
"""
# Generate the CASE statement for ordering based on the provided ids_list
case_statements = []
for index, device_id in enumerate(ids_list, start=1):
case_statements.append(f"WHEN {device_id} THEN {index}")
case_order = "\n ".join(case_statements)
# SQL query with added alarm_level calculation
sql = f"""
SELECT
minute,
device_id,
temperature_avg,
CASE
WHEN (temperature_avg * 9/5 + 32) <= 50 OR (temperature_avg * 9/5 + 32) >= 90 THEN 2
WHEN (temperature_avg * 9/5 + 32) <= 60 OR (temperature_avg * 9/5 + 32) >= 80 THEN 1
ELSE 0
END AS alarm_level
FROM (
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
AVG(temperature)+{temp_offset} AS temperature_avg
FROM
sensor_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
) rr
ORDER BY
CASE device_id
{case_order}
END,
minute
"""
return sql
def get_deployment_humidity_only_query(devices_list_str, time_from_str, time_to_str, ids_list, humidity_offset):
"""
Generate a TimeScaleDB query for humidity readings based on device IDs.
Parameters:
devices_list_str (str): Comma-separated string of device IDs
time_from_str (str): Start time for the query
time_to_str (str): End time for the query
ids_list (list): List of device IDs in priority order for sorting
temp_offset (float): Temperature offset to apply
Returns:
str: Generated SQL query with humidity and alarm level
"""
# Generate the CASE statement for ordering based on the provided ids_list
case_statements = []
for index, device_id in enumerate(ids_list, start=1):
case_statements.append(f"WHEN {device_id} THEN {index}")
case_order = "\n ".join(case_statements)
col_expr = f"2.3592 * AVG(humidity) + 23.5546" #= 2.3592 * J2 + 33.5546
# SQL query with added alarm_level calculation
sql = f"""
SELECT
minute,
device_id,
humidity_avg,
CASE
WHEN humidity_avg <= 20 OR humidity_avg >= 60 THEN 2
WHEN humidity_avg <= 30 OR humidity_avg >= 50 THEN 1
ELSE 0
END AS alarm_level
FROM (
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
{col_expr} AS humidity_avg
FROM
sensor_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
) rr
ORDER BY
CASE device_id
{case_order}
END,
minute
"""
return sql
def get_deployment_smell_only_query(devices_list_str, time_from_str, time_to_str, ids_list, humidity_offset):
"""
Generate a TimeScaleDB query for smell readings based on device IDs.
Parameters:
devices_list_str (str): Comma-separated string of device IDs
time_from_str (str): Start time for the query
time_to_str (str): End time for the query
ids_list (list): List of device IDs in priority order for sorting
temp_offset (float): Temperature offset to apply
Returns:
str: Generated SQL query with smell components
"""
# Generate the CASE statement for ordering based on the provided ids_list
case_statements = []
for index, device_id in enumerate(ids_list, start=1):
case_statements.append(f"WHEN {device_id} THEN {index}")
case_order = "\n ".join(case_statements)
# SQL query with added alarm_level calculation
sql = f"""
SELECT
COALESCE(sr.minute) as minute,
COALESCE(sr.device_id) as device_id,
sr.s0, sr.s1, sr.s2, sr.s3, sr.s4, sr.s5, sr.s6, sr.s7, sr.s8, sr.s9,
sr.mtype
FROM (
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
mtype,
MIN(CASE WHEN s0 > 0 THEN s0 END) AS s0,
MIN(CASE WHEN s1 > 0 THEN s1 END) AS s1,
MIN(CASE WHEN s2 > 0 THEN s2 END) AS s2,
MIN(CASE WHEN s3 > 0 THEN s3 END) AS s3,
MIN(CASE WHEN s4 > 0 THEN s4 END) AS s4,
MIN(CASE WHEN s5 > 0 THEN s5 END) AS s5,
MIN(CASE WHEN s6 > 0 THEN s6 END) AS s6,
MIN(CASE WHEN s7 > 0 THEN s7 END) AS s7,
MIN(CASE WHEN s8 > 0 THEN s8 END) AS s8,
MIN(CASE WHEN s9 > 0 THEN s9 END) AS s9
FROM sensor_readings
WHERE device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
AND mtype IN (0, 17, 100, 110, 120, 130, 140, 150, 160, 170)
GROUP BY minute, device_id, mtype
) sr
ORDER BY
CASE COALESCE(sr.device_id)
{case_order}
END,
COALESCE(sr.minute),
sr.mtype;
"""
return sql
def get_deployment_radar_only_detailed_all_query(devices_list_str, time_from_str, time_to_str, ids_list):
#radar detailed
"""
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
Parameters:
devices_list_str (str): Comma-separated string of device IDs
time_from_str (str): Start time for the query
time_to_str (str): End time for the query
ids_list (list): List of device IDs in priority order for sorting
Returns:
str: Generated SQL query
This is looking for presence, NOT absence... othervise all MAXes need to be converted to MINs
"""
# Generate the CASE statement for ordering based on the provided ids_list
case_statements = []
for index, device_id in enumerate(ids_list, start=1):
case_statements.append(f"WHEN {device_id} THEN {index}")
case_order = "\n ".join(case_statements)
sql = f"""
SELECT
minute,
device_id,
absent_min,
stationary_max,
moving_max,
both_max,
m0_max,
m1_max,
m2_max,
m3_max,
m4_max,
m5_max,
m6_max,
m7_max,
m8_max,
m08_max,
s2_max,
s3_max,
s4_max,
s5_max,
s6_max,
s7_max,
s8_max,
s28_max
FROM (
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
MIN(absent) AS absent_min,
MAX(stationary) AS stationary_max,
MAX(moving) AS moving_max,
MAX("both") AS both_max,
MAX(m0) AS m0_max,
MAX(m1) AS m1_max,
MAX(m2) AS m2_max,
MAX(m3) AS m3_max,
MAX(m4) AS m4_max,
MAX(m5) AS m5_max,
MAX(m6) AS m6_max,
MAX(m7) AS m7_max,
MAX(m8) AS m8_max,
MAX((m0+m1+m2+m3+m4+m5+m6+m7+m8)/9) AS m08_max,
MAX(s2) AS s2_max,
MAX(s3) AS s3_max,
MAX(s4) AS s4_max,
MAX(s5) AS s5_max,
MAX(s6) AS s6_max,
MAX(s7) AS s7_max,
MAX(s8) AS s8_max,
MAX((s2+s3+s4+s5+s6+s7+s8)/7) AS s28_max
FROM
radar_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
) rr
ORDER BY
CASE device_id
{case_order}
END,
minute
"""
return sql
def get_deployment_deca_query(devices_list_str, time_from_str, time_to_str, ids_list, temp_offset):
"""
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
Parameters:
devices_list_str (str): Comma-separated string of device IDs
time_from_str (str): Start time for the query
time_to_str (str): End time for the query
ids_list (list): List of device IDs in priority order for sorting
Returns:
str: Generated SQL query
"""
# Generate the CASE statement for ordering based on the provided ids_list
case_statements = []
for index, device_id in enumerate(ids_list, start=1):
case_statements.append(f"WHEN {device_id} THEN {index}")
case_order = "\n ".join(case_statements)
sql = f"""
SELECT
COALESCE(sr.minute, rr.minute) as minute,
COALESCE(sr.device_id, rr.device_id) as device_id,
sr.avg_temperature+{temp_offset} as avg_temperature,
sr.avg_humidity,
sr.avg_pressure,
sr.max_light,
sr.min_s0 as smell_s0,
sr.min_s1 as smell_s1,
sr.min_s2 as smell_s2,
sr.min_s3 as smell_s3,
sr.min_s4 as smell_s4,
sr.min_s5 as smell_s5,
sr.min_s6 as smell_s6,
sr.min_s7 as smell_s7,
sr.min_s8 as smell_s8,
sr.min_s9 as smell_s9,
rr.absent as radar_absent,
rr.moving as radar_moving,
rr.stationary as radar_stationary,
rr.both as radar_both,
rr.m0 as radar_m0,
rr.m1 as radar_m1,
rr.m2 as radar_m2,
rr.m3 as radar_m3,
rr.m4 as radar_m4,
rr.m5 as radar_m5,
rr.m6 as radar_m6,
rr.m7 as radar_m7,
rr.m8 as radar_m8,
rr.s2 as radar_s2,
rr.s3 as radar_s3,
rr.s4 as radar_s4,
rr.s5 as radar_s5,
rr.s6 as radar_s6,
rr.s7 as radar_s7,
rr.s8 as radar_s8
FROM (
SELECT
time_bucket('10 seconds', time) AS minute,
device_id,
AVG(temperature) AS avg_temperature,
AVG(humidity) AS avg_humidity,
AVG(pressure) AS avg_pressure,
MAX(light) AS max_light,
MIN(CASE WHEN s0 > 0 THEN s0 END) AS min_s0,
MIN(CASE WHEN s1 > 0 THEN s1 END) AS min_s1,
MIN(CASE WHEN s2 > 0 THEN s2 END) AS min_s2,
MIN(CASE WHEN s3 > 0 THEN s3 END) AS min_s3,
MIN(CASE WHEN s4 > 0 THEN s4 END) AS min_s4,
MIN(CASE WHEN s5 > 0 THEN s5 END) AS min_s5,
MIN(CASE WHEN s6 > 0 THEN s6 END) AS min_s6,
MIN(CASE WHEN s7 > 0 THEN s7 END) AS min_s7,
MIN(CASE WHEN s8 > 0 THEN s8 END) AS min_s8,
MIN(CASE WHEN s9 > 0 THEN s9 END) AS min_s9
FROM
sensor_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
) sr
FULL OUTER JOIN (
SELECT
time_bucket('10 seconds', time) AS minute,
device_id,
MAX(absent) AS absent,
MAX(moving) AS moving,
MAX(stationary) AS stationary,
MAX(\"both\") AS both,
MAX(m0) AS m0,
MAX(m1) AS m1,
MAX(m2) AS m2,
MAX(m3) AS m3,
MAX(m4) AS m4,
MAX(m5) AS m5,
MAX(m6) AS m6,
MAX(m7) AS m7,
MAX(m8) AS m8,
MAX(s2) AS s2,
MAX(s3) AS s3,
MAX(s4) AS s4,
MAX(s5) AS s5,
MAX(s6) AS s6,
MAX(s7) AS s7,
MAX(s8) AS s8
FROM
radar_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
) rr
ON sr.minute = rr.minute AND sr.device_id = rr.device_id
ORDER BY
CASE COALESCE(sr.device_id, rr.device_id)
{case_order}
END,
COALESCE(sr.minute, rr.minute);
"""
return sql
def get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest):
"""
Generate a TimeScaleDB query for radar readings based on device IDs with time snapped to 10-second intervals.
Parameters:
devices_list_str (str): Comma-separated string of device IDs
time_from_str (str): Start time for the query
time_to_str (str): End time for the query
ids_list (list): List of device IDs in priority order for sorting
radar_fields_of_interest (list): List of field names required across all devices
Returns:
str: Generated SQL query
"""
# Generate the CASE statement for ordering based on the provided ids_list
case_statements = []
for index, device_id in enumerate(ids_list, start=1):
case_statements.append(f"WHEN {device_id} THEN {index}")
case_order = "\n ".join(case_statements)
# Handle fields processing
select_fields = []
for field in radar_fields_of_interest:
if field == "s28":
select_fields.append("(s2+s3+s4+s5+s6+s7+s8)/7 AS s28")
else:
select_fields.append(field)
fields_str = ", ".join(select_fields)
sql = f"""
SELECT
time_bucket('10 seconds', time) AS ten_seconds,
device_id,
{fields_str}
FROM
radar_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
ORDER BY
CASE device_id
{case_order}
END,
ten_seconds
"""
return sql
def get_deployment_radar_10sec_snapped_query_min_max(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest):
"""
Generate a TimeScaleDB query for radar readings based on device IDs with time snapped to 10-second intervals.
Parameters:
devices_list_str (str): Comma-separated string of device IDs
time_from_str (str): Start time for the query
time_to_str (str): End time for the query
ids_list (list): List of device IDs in priority order for sorting
radar_fields_of_interest (list): List of field names required across all devices
Returns:
str: Generated SQL query
"""
# Generate the CASE statement for ordering based on the provided ids_list
case_statements = []
for index, device_id in enumerate(ids_list, start=1):
case_statements.append(f"WHEN {device_id} THEN {index}")
case_order = "\n ".join(case_statements)
# Handle fields processing
select_fields = []
for field in radar_fields_of_interest:
radar_fields = field.split("_")
field_t = radar_fields[0]
if field_t == "s28":
if radar_fields[1] == "max":
select_fields.append("MAX((s2+s3+s4+s5+s6+s7+s8)/7) AS s28")
else:
select_fields.append("MIN((s2+s3+s4+s5+s6+s7+s8)/7) AS s28")
else:
if radar_fields[1] == "max":
select_fields.append(f"MAX({field_t}) as {field}")
else:
select_fields.append(f"MIN({field_t}) as {field}")
fields_str = ", ".join(select_fields)
sql = f"""
SELECT
time_bucket('10 seconds', time) AS ten_seconds,
device_id,
{fields_str}
FROM
radar_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
ten_seconds,
device_id
ORDER BY
CASE device_id
{case_order}
END,
ten_seconds
"""
return sql
def export_query_to_minio_chunked(connection_params, query, minio_client, bucket_name, blob_name=None, chunksize=10000):
"""
Export query results to MinIO as CSV in chunks to handle large datasets
Parameters:
connection_params (dict): Database connection parameters
query (str): SQL query to execute
minio_client: Initialized MinIO client
bucket_name (str): Name of the MinIO bucket
blob_name (str): Name for the blob in MinIO. If None, generates timestamped name
chunksize (int): Number of rows to process at a time
Returns:
str: Name of the created blob
"""
try:
# Create direct connection using psycopg2
conn = psycopg2.connect(**connection_params)
# Generate blob name if not provided
if blob_name is None:
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
blob_name = f'query_results_{timestamp}.csv'
# Create a buffer to store CSV data
csv_buffer = io.StringIO()
# Stream the query results in chunks
first_chunk = True
for chunk_df in pd.read_sql_query(query, conn, chunksize=chunksize):
# Write header only for the first chunk
chunk_df.to_csv(
csv_buffer,
index=False,
header=first_chunk,
mode='a'
)
first_chunk = False
# Get the CSV data as bytes
csv_buffer.seek(0)
csv_bytes = csv_buffer.getvalue().encode('utf-8')
# Upload to MinIO
minio_client.put_object(
bucket_name,
blob_name,
io.BytesIO(csv_bytes),
len(csv_bytes)
)
print(f"Data exported successfully to MinIO: {bucket_name}/{blob_name}")
return blob_name
except Exception as e:
print(f"Error exporting data: {str(e)}")
print(f"Traceback: {traceback.format_exc()}")
raise
finally:
if 'conn' in locals():
conn.close()
if 'csv_buffer' in locals():
csv_buffer.close()
def export_query_to_csv_pandas(connection_params, query, output_path=None):
"""
Export query results to CSV using pandas with psycopg2 connection
Parameters:
connection_params (dict): Database connection parameters
query (str): SQL query to execute
output_path (str): Path for output CSV file. If None, generates timestamped filename
Returns:
str: Path to the created CSV file
"""
try:
# Create direct connection using psycopg2
conn = psycopg2.connect(**connection_params)
# Generate output path if not provided
if output_path is None:
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
output_path = f'query_results_{timestamp}.csv'
# Read query directly into DataFrame using the psycopg2 connection
df = pd.read_sql_query(query, conn)
# Export to CSV with all headers
df.to_csv(output_path, index=False)
print(f"Data exported successfully to {output_path}")
return output_path
except Exception as e:
print(f"Error exporting data: {str(e)}")
raise
finally:
if 'conn' in locals():
conn.close()
def CreateDailyCSV(csv_file, devices_list, selected_date, vocs_scaled, time_zone_s, radar_part, consolidated_by, temp_offset):
global Id2MACDict
if radar_part == "s28":
radar_part = "(s2+s3+s4+s5+s6+s7+s8)/7"
try:
#stretch_to_min_max = True
#current_date_p = selected_date.replace("-", "_")
#current_date_s = selected_date
lower_than200 = 0
larger_than200 = 0
ids_list = []
for details in devices_list[0]:
dev_id = details[0]
ids_list.append(details[1])
if dev_id < 200:
lower_than200 += 1
else:
larger_than200 += 1
if lower_than200 > 0 and larger_than200 > 0:
return ""
if larger_than200 > 0:
sensors_c = len(s_table)
else: #old sensors not supported
return ""
devices_c = len(devices_list[0])
devices_list_str = ",".join(map(str, devices_list[1]))
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
if consolidated_by == "by_minute_rc":
sql = get_deployment_query_80(devices_list_str, time_from_str, time_to_str, ids_list, radar_part, temp_offset)
elif consolidated_by == "by_deca_rd":
sql = get_deployment_deca_query(devices_list_str, time_from_str, time_to_str, ids_list, temp_offset)
elif consolidated_by == "by_minute_rd":
sql = get_deployment_rd_query(devices_list_str, time_from_str, time_to_str, ids_list, temp_offset)
print(sql)
connection_params = {
'host': DB_HOST,
'database': DB_NAME,
'user': DB_USER,
'password': DB_PASSWORD,
'port': DB_PORT
}
# Using pandas approach (recommended)
output_file = export_query_to_minio_chunked(
connection_params,
sql,
miniIO_blob_client,
"data-downloads",
csv_file,
chunksize=10000
)
return output_file
except Exception as e:
logging.error(str(traceback.format_exc()))
return ""
def GetBlob(file_name, bucket_name="daily-maps"):
"""
Retrieve image from blob storage
Args:
file_name (str): Name of the file to retrieve from blob storage
Returns:
tuple: (image_bytes, content_type, metadata)
Returns None, None, None if image not found or error occurs
"""
logger.debug(f"GetBlob({file_name})")
try:
# Get the object from blob storage
data = miniIO_blob_client.get_object(
bucket_name,
file_name
)
# Read the data into bytes
data_bytes = data.read()
#logger.debug(f"len(data_bytes)={len(data_bytes)}")
stat = miniIO_blob_client.stat_object(bucket_name, file_name)
metadata = stat.metadata if stat.metadata else {}
if bucket_name == "daily-maps":
return data_bytes, 'image/png', metadata
else:
return data_bytes, 'application/zip', metadata
except Exception as e:
logger.error(f"Error: {traceback.format_exc()}")
return None, None, None
def GetJPG(file_name, bucket_name="user-pictures"):
"""
Retrieve image from blob storage
Args:
file_name (str): Name of the file to retrieve from blob storage
Returns:
tuple: (image_bytes, content_type)
Returns None, None if image not found or error occurs
"""
logger.debug(f"GetJPG({file_name})")
try:
# Get the object from blob storage
data = miniIO_blob_client.get_object(
bucket_name,
file_name
)
# Read the data into bytes
data_bytes = data.read()
#logger.debug(f"len(data_bytes)={len(data_bytes)}")
if bucket_name == "user-pictures":
return data_bytes, 'image/jpg'
else:
return data_bytes, 'application/zip'
except Exception as e:
logger.error(f"Error: {traceback.format_exc()}")
return None, None
def MapFileToDate(map_file):
#'/Volumes/XTRM-Q/wellnuo/daily_maps/1/1_2023-11-07_dayly_image.png'
parts = map_file.split("/")
parts = parts[-1].split("_")
if "-" in parts[0]:
date_string = parts[0]
elif "-" in parts[1]:
date_string = parts[1]
date_object = datetime.datetime.strptime(date_string, "%Y-%m-%d")
date_only = date_object.date()
return date_only
def CSVFileToDate(csv_file):
#'/Volumes/XTRM-Q/wellnuo/daily_maps/1/1_2023-11-07_dayly_image.png'
parts = csv_file.split("/")
parts = parts[-1].split("_")
if "-" in parts[0]:
date_string = parts[0]
elif "-" in parts[1]:
date_string = parts[1]
date_object = datetime.datetime.strptime(date_string, "%Y-%m-%d")
date_only = date_object.date()
return date_only
def GetMACsListSimple(list_of_lists):
result = []
if len(list_of_lists) > 0:
result = [sublist[3] for sublist in list_of_lists]
return(result)
def datetime_handler(obj):
"""Handle datetime serialization for JSON"""
if isinstance(obj, datetime.datetime):
if obj.tzinfo:
return obj.isoformat()
return obj.strftime('%Y-%m-%d %H:%M:%S.%f')
raise TypeError(f"Object of type {type(obj)} is not JSON serializable")
def ReadCandles(file, sensor, period, time_from, time_to):
result = []
if sensor == "voc0":
sqlr = "SELECT * from vocs_0"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
elif sensor == "voc1":
sqlr = "SELECT * from vocs_1"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
elif sensor == "voc2":
sqlr = "SELECT * from vocs_2"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
elif sensor == "voc3":
sqlr = "SELECT * from vocs_3"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
elif sensor == "voc4":
sqlr = "SELECT * from vocs_4"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
elif sensor == "voc5":
sqlr = "SELECT * from vocs_5"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
elif sensor == "voc6":
sqlr = "SELECT * from vocs_6"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
elif sensor == "voc7":
sqlr = "SELECT * from vocs_7"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
elif sensor == "voc8":
sqlr = "SELECT * from vocs_8"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
elif sensor == "voc9":
sqlr = "SELECT * from vocs_9"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
else:
sqlr = "SELECT * from "+sensor+"s"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
logger.debug(f"sqlr = {sqlr}")
with get_db_connection() as conn:
with conn.cursor() as cur:
devices_string = ReadCleanStringDB(cur, sqlr)
result = QuerrySql(file, sqlr)
return result
def ReadSensor(device_id, sensor, time_from_epoch, time_to_epoch, data_type, radar_part, temp_offset, bucket_size="no"):
time_utc = datetime.datetime.fromtimestamp(float(time_from_epoch), tz=timezone.utc)
# Format in ISO 8601 format with timezone
time_from_str = time_utc.strftime("%Y-%m-%d %H:%M:%S%z")
time_utc = datetime.datetime.fromtimestamp(float(time_to_epoch), tz=timezone.utc)
# Format in ISO 8601 format with timezone
time_to_str = time_utc.strftime("%Y-%m-%d %H:%M:%S%z")
legal_min, legal_max, window = sensor_legal_values[sensor]
result = []
if sensor == "radar":
if radar_part == "s28":
radar_part = "(s2+s3+s4+s5+s6+s7+s8)/7"
sqlr = f"SELECT time, {radar_part} AS radar FROM radar_readings WHERE device_id = {device_id} AND time >= '{time_from_str}' AND time <= '{time_to_str}' ORDER BY time ASC"
elif sensor[0] == "s":
if len(sensor) <= 2:
sqlr = f"SELECT time, {sensor} AS smell FROM sensor_readings WHERE device_id = {device_id} AND {sensor} >= '{legal_min}' AND {sensor} <= '{legal_max}' AND time >= '{time_from_str}' AND time <= '{time_to_str}' ORDER BY time ASC"
else:
#we need to re-map s_index to s0-9 and mtype
s_i = int(sensor[1:])
smell_index = s_i % 10
mtype = 100 + 10 * int(s_i / 10)
sqlr = f"SELECT time, s{smell_index} AS smell FROM sensor_readings WHERE device_id = {device_id} AND s{smell_index} >= '{legal_min}' AND s{smell_index} <= '{legal_max}' AND time >= '{time_from_str}' AND time <= '{time_to_str}' AND mtype={mtype} ORDER BY time ASC"
else:
if sensor == "temperature":
#sqlr = f"SELECT time, {sensor} - 16 from sensor_readings WHERE device_id = {device_id} AND {sensor} >= '{legal_min}' AND {sensor} <= '{legal_max}' AND time >= '{time_from_str}' AND time <= '{time_to_str}' ORDER BY time ASC"
#sqlr = f"SELECT time, {sensor} + {temp_offset} from sensor_readings WHERE device_id = {device_id} AND {sensor} >= '{legal_min}' AND {sensor} <= '{legal_max}' AND time >= '{time_from_str}' AND time <= '{time_to_str}' ORDER BY time ASC"
sqlr = f"SELECT time, {sensor} + {temp_offset} FROM (SELECT time, {sensor}, ROW_NUMBER() OVER (ORDER BY time) as rn FROM sensor_readings WHERE device_id = {device_id} AND {sensor} >= '1' AND {sensor} <= '99' AND time >= '{time_from_str}' AND time <= '{time_to_str}') subquery WHERE rn % 8 = 1 ORDER BY time"
elif sensor == "humidity":
#sqlr = f"SELECT time, {sensor} - 16 from sensor_readings WHERE device_id = {device_id} AND {sensor} >= '{legal_min}' AND {sensor} <= '{legal_max}' AND time >= '{time_from_str}' AND time <= '{time_to_str}' ORDER BY time ASC"
#sqlr = f"SELECT time, {sensor} from sensor_readings WHERE device_id = {device_id} AND {sensor} >= '{legal_min}' AND {sensor} <= '{legal_max}' AND time >= '{time_from_str}' AND time <= '{time_to_str}' ORDER BY time ASC"
sqlr = f"SELECT time, humidity FROM (SELECT time, humidity, ROW_NUMBER() OVER (ORDER BY time) as rn FROM sensor_readings WHERE device_id = {device_id} AND humidity >= '1' AND humidity <= '99' AND time >= '{time_from_str}' AND time <= '{time_to_str}') subquery WHERE rn % 8 = 1 ORDER BY time"
else:
sqlr = f"SELECT time, {sensor} from sensor_readings WHERE device_id = {device_id} AND {sensor} >= '{legal_min}' AND {sensor} <= '{legal_max}' AND time >= '{time_from_str}' AND time <= '{time_to_str}' ORDER BY time ASC"
logger.debug(f"sqlr = {sqlr}")
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sqlr)
result = cur.fetchall()
return result
def ReadSensorDeltas(device_id, sensor, time_from_epoch, time_to_epoch, data_type, radar_part, bucket_size="no"):
'for now pressure and light only'
sensor_2_mtype = {"light": 4, "pressure": 1, "temperature": 2, "humidity": 3}
time_utc = datetime.datetime.fromtimestamp(float(time_from_epoch), tz=timezone.utc)
# Format in ISO 8601 format with timezone
time_from_str = time_utc.strftime("%Y-%m-%d %H:%M:%S%z")
time_utc = datetime.datetime.fromtimestamp(float(time_to_epoch), tz=timezone.utc)
# Format in ISO 8601 format with timezone
time_to_str = time_utc.strftime("%Y-%m-%d %H:%M:%S%z")
legal_min, legal_max, window = sensor_legal_values[sensor]
result = []
if sensor == "radar":
if radar_part == "s28":
radar_part = "(s2+s3+s4+s5+s6+s7+s8)/7"
sqlr = f"SELECT time, {radar_part} AS radar FROM radar_readings WHERE device_id = {device_id} AND time >= '{time_from_str}' AND time <= '{time_to_str}' ORDER BY time ASC"
elif sensor[0] == "s":
sqlr = f"SELECT time, {sensor} AS smell FROM sensor_readings WHERE device_id = {device_id} AND {sensor} >= '{legal_min}' AND {sensor} <= '{legal_max}' AND time >= '{time_from_str}' AND time <= '{time_to_str}' and mtype=4 ORDER BY time ASC"
else:
if sensor == "temperature":
sqlr = f"SELECT time, {sensor} - 16 from sensor_readings WHERE device_id = {device_id} AND {sensor} >= '{legal_min}' AND {sensor} <= '{legal_max}' AND time >= '{time_from_str}' AND time <= '{time_to_str}' ORDER BY time ASC"
elif sensor == "pressure":
sqlr = f"SELECT time, {sensor} from sensor_readings WHERE device_id = {device_id} AND time >= '{time_from_str}' AND time <= '{time_to_str}' AND mtype=1 ORDER BY time ASC"
else:
mtype = sensor_2_mtype[sensor]
sqlr = f"SELECT time, {sensor} from sensor_readings WHERE device_id = {device_id} AND {sensor} >= '{legal_min}' AND {sensor} <= '{legal_max}' AND time >= '{time_from_str}' AND time <= '{time_to_str}' AND mtype={mtype} ORDER BY time ASC"
logger.debug(f"sqlr = {sqlr}")
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sqlr)
result = cur.fetchall()
return result
def ReadSensor3(device_id, sensor, time_from_epoch, time_to_epoch, data_type, radar_part, bucket_size="no"):
if time_to_epoch < time_from_epoch:
time_to_epoch, time_from_epoch = time_from_epoch, time_to_epoch
# Convert epoch to datetime and format as ISO 8601 strings with timezone
time_utc = datetime.datetime.fromtimestamp(float(time_from_epoch), tz=timezone.utc)
time_from_str = time_utc.strftime("%Y-%m-%d %H:%M:%S%z")
time_utc = datetime.datetime.fromtimestamp(float(time_to_epoch), tz=timezone.utc)
time_to_str = time_utc.strftime("%Y-%m-%d %H:%M:%S%z")
legal_min, legal_max, window = sensor_legal_values[sensor]
# If bucket_size is provided (i.e. not "no"), then use time bucketing.
use_bucket = bucket_size != "no"
if use_bucket:
# Map the shorthand bucket sizes to PostgreSQL interval strings.
mapping = {
"10s": "10 seconds",
"1m": "1 minute",
"5m": "5 minutes",
"10m": "10 minutes",
"15m": "15 minutes",
"30m": "30 minutes",
"1h": "1 hour"
}
bucket_interval = mapping.get(bucket_size, bucket_size)
avgmax = "AVG"
# Build the SQL query based on sensor type.
if sensor == "radar":
# For radar sensors, a special aggregation is needed.
avgmax = "MAX"
if radar_part == "s28":
radar_expr = "(s2+s3+s4+s5+s6+s7+s8)/7"
else:
radar_expr = radar_part
if use_bucket:
sqlr = f"""
SELECT time_bucket('{bucket_interval}', time) AS time,
{avgmax}({radar_expr}) AS radar
FROM radar_readings
WHERE device_id = {device_id}
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
GROUP BY time_bucket('{bucket_interval}', time)
ORDER BY time ASC;
"""
else:
sqlr = f"""
SELECT time, {radar_expr} AS radar
FROM radar_readings
WHERE device_id = {device_id}
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
ORDER BY time ASC;
"""
elif sensor[0] == "s":
# For sensors whose name starts with "s" (for example, smell sensors)
if use_bucket:
sqlr = f"""
SELECT time_bucket('{bucket_interval}', time) AS time,
{avgmax}({sensor}) AS smell
FROM sensor_readings
WHERE device_id = {device_id}
AND {sensor} >= '{legal_min}'
AND {sensor} <= '{legal_max}'
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
GROUP BY time_bucket('{bucket_interval}', time)
ORDER BY time ASC;
"""
else:
sqlr = f"""
SELECT time, {sensor} AS smell
FROM sensor_readings
WHERE device_id = {device_id}
AND {sensor} >= '{legal_min}'
AND {sensor} <= '{legal_max}'
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
ORDER BY time ASC;
"""
elif sensor == "co2":
alias = sensor
sensor = "s4"
sqlr = f"""
WITH s4_values AS (
SELECT s4
FROM public.sensor_readings
WHERE device_id = 559
AND s4 IS NOT NULL
),
s4_percentile AS (
SELECT percentile_cont(0.25) WITHIN GROUP (ORDER BY s4 DESC) AS s4_25_percentile
FROM s4_values
)
SELECT s4_25_percentile
FROM s4_percentile;
"""
co2_max = 22536000#102400000
co2_min = 2400000#16825674 #387585
real_co2_max = 2000
real_co2_min = 430
#logger.debug(f"sqlr = {sqlr}")
#with get_db_connection() as conn:
#with conn.cursor() as cur:
#cur.execute(sqlr)
#result = cur.fetchall()
#co2_max = result[0][0]
#co2_min = result[0][1]
#=E17+E20*(102400000-A24)/B18
#col_expr = f"{real_co2_min}+({real_co2_max}-{real_co2_min})*(102400000-{sensor})/({co2_min}-{co2_max})"
col_expr = f"GREATEST({real_co2_min},{real_co2_min}+({real_co2_max}-{real_co2_min})*({co2_max}-percentile_cont(0.5) WITHIN GROUP (ORDER BY {sensor}))/({co2_max}-{co2_min}))"
if use_bucket:
sqlr = f"""
SELECT time_bucket('{bucket_interval}', time) AS time,
({col_expr}) AS {alias}
FROM sensor_readings
WHERE device_id = {device_id}
AND {sensor} >= '{legal_min}'
AND {sensor} <= '{legal_max}'
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
GROUP BY time_bucket('{bucket_interval}', time)
ORDER BY time ASC;
"""
else:
sqlr = f"""
SELECT time, {col_expr} AS {alias}
FROM sensor_readings
WHERE device_id = {device_id}
AND {sensor} >= '{legal_min}'
AND {sensor} <= '{legal_max}'
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
ORDER BY time ASC;
"""
elif sensor == "voc":
sensor = "s9"
alias = sensor
col_expr = f"{sensor} - 0"
if use_bucket:
sqlr = f"""
SELECT time_bucket('{bucket_interval}', time) AS time,
{avgmax}({col_expr}) AS {alias}
FROM sensor_readings
WHERE device_id = {device_id}
AND {sensor} >= '{legal_min}'
AND {sensor} <= '{legal_max}'
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
GROUP BY time_bucket('{bucket_interval}', time)
ORDER BY time ASC;
"""
else:
sqlr = f"""
SELECT time, {col_expr} AS {alias}
FROM sensor_readings
WHERE device_id = {device_id}
AND {sensor} >= '{legal_min}'
AND {sensor} <= '{legal_max}'
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
ORDER BY time ASC;
"""
elif sensor == "humidity":
alias = sensor
#col_expr = f"2.3592 * {sensor} + 32.5546" #= 2.3592 * J2 + 33.5546
col_expr = f"1* {sensor} + 5.0" #= 2.3592 * J2 + 33.5546
if use_bucket:
sqlr = f"""
SELECT time_bucket('{bucket_interval}', time) AS time,
{avgmax}({col_expr}) AS {alias}
FROM sensor_readings
WHERE device_id = {device_id}
AND {sensor} >= '{legal_min}'
AND {sensor} <= '{legal_max}'
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
GROUP BY time_bucket('{bucket_interval}', time)
ORDER BY time ASC;
"""
else:
sqlr = f"""
SELECT time, {col_expr} AS {alias}
FROM sensor_readings
WHERE device_id = {device_id}
AND {sensor} >= '{legal_min}'
AND {sensor} <= '{legal_max}'
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
ORDER BY time ASC;
"""
else:
# For other sensors (including temperature, which requires a subtraction)
alias = sensor
col_expr = sensor
if sensor == "temperature":
col_expr = f"{sensor}"
alias = "temperature"
elif sensor == "light":
avgmax = "MAX"
if use_bucket:
sqlr = f"""
SELECT time_bucket('{bucket_interval}', time) AS time,
{avgmax}({col_expr}) AS {alias}
FROM sensor_readings
WHERE device_id = {device_id}
AND {sensor} >= '{legal_min}'
AND {sensor} <= '{legal_max}'
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
AND mtype > 4
GROUP BY time_bucket('{bucket_interval}', time)
ORDER BY time ASC;
"""
else:
sqlr = f"""
SELECT time, {col_expr} AS {alias}
FROM sensor_readings
WHERE device_id = {device_id}
AND {sensor} >= '{legal_min}'
AND {sensor} <= '{legal_max}'
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
ORDER BY time ASC;
"""
logger.debug(f"sqlr = {sqlr}")
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sqlr)
result = cur.fetchall()
return result
def ReadRadarDetail(device_id, sensor, time_from_epoch, time_to_epoch, alt_key_state):
time_utc = datetime.datetime.fromtimestamp(float(time_from_epoch), tz=timezone.utc)
# Format in ISO 8601 format with timezone
time_from_str = time_utc.strftime("%Y-%m-%d %H:%M:%S%z")
time_utc = datetime.datetime.fromtimestamp(float(time_to_epoch), tz=timezone.utc)
# Format in ISO 8601 format with timezone
time_to_str = time_utc.strftime("%Y-%m-%d %H:%M:%S%z")
#sensor_index = int(sensor_index)
result = []
#time_period_sec can be "10" (RAW) or "60"
if alt_key_state == "1": #"RAW = 10 sec"
radar_part = sensor
if sensor == "m08_max":
radar_part = "(m0+m1+m2+m3+m4+m5+m6+m7+m8)/9"
elif sensor == "s28_max" or sensor == "s28_min":
radar_part = "(s2+s3+s4+s5+s6+s7+s8)/7"
sqlr = f"SELECT time, {radar_part} AS radar FROM radar_readings WHERE device_id = {device_id} AND time >= '{time_from_str}' AND time <= '{time_to_str}' ORDER BY time ASC"
else:
if sensor == "m08_max":
radar_part = "MAX((m0+m1+m2+m3+m4+m5+m6+m7+m8)/7) AS m08_max"
elif sensor == "s28_max":
radar_part = f"MAX((s2+s3+s4+s5+s6+s7+s8)/7) AS {sensor}"
elif sensor == "s28_min":
radar_part = f"MIN((s2+s3+s4+s5+s6+s7+s8)/7) AS {sensor}"
else:
radar_part = f"MAX({sensor}) AS {sensor}"
sqlr = f"""
SELECT
minute,
{sensor} as {sensor}
FROM (
SELECT
time_bucket('1 minute', time) AS minute,
{radar_part}
FROM
radar_readings
WHERE
device_id = {device_id}
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute
) rr
ORDER BY
minute
"""
logger.debug(f"sqlr = {sqlr}")
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sqlr)
result = cur.fetchall()
return result
def check_and_parse(devices_str):
"""Parse devices string and determine if it's nested"""
try:
# Try parsing as Python literal first (handles single quotes)
devices = ast.literal_eval(devices_str)
except (ValueError, SyntaxError):
# Fallback to JSON if that fails
devices = json.loads(devices_str)
# Check if nested: first element is a list
is_nested = isinstance(devices, list) and len(devices) > 0 and isinstance(devices[0], list)
return is_nested, devices
def clean_data_with_rolling_spline(line_part_t, window=5, threshold=2.0):
"""
Filter outliers using rolling median and replace with spline interpolation
Returns data in the same format as input: [(timestamp, value), ...]
"""
# Unzip the input tuples
x, y = zip(*line_part_t)
x = np.array(x)
y = np.array(y, dtype=float) # explicitly convert to float
# Calculate rolling median and MAD using a safer approach
rolling_median = []
rolling_mad = []
for i in range(len(y)):
start_idx = max(0, i - window//2)
end_idx = min(len(y), i + window//2 + 1)
window_values = y[start_idx:end_idx]
# Skip if window is empty or contains invalid values
if len(window_values) == 0 or np.any(np.isnan(window_values)):
rolling_median.append(y[i])
rolling_mad.append(0)
continue
med = np.median(window_values)
mad = np.median(np.abs(window_values - med))
rolling_median.append(med)
rolling_mad.append(mad)
rolling_median = np.array(rolling_median)
rolling_mad = np.array(rolling_mad)
# Identify outliers (protect against division by zero)
outlier_mask = np.abs(y - rolling_median) > threshold * (rolling_mad + 1e-10)
good_data_mask = ~outlier_mask
if np.sum(good_data_mask) < 4:
return line_part_t # return original data if we can't interpolate
try:
# Create and apply spline
spline = interpolate.InterpolatedUnivariateSpline(
x[good_data_mask],
y[good_data_mask],
k=3
)
y_cleaned = y.copy()
y_cleaned[outlier_mask] = spline(x[outlier_mask])
except Exception as e:
print(f"Spline interpolation failed: {e}")
return line_part_t
# Return in the same format as input
return list(zip(x, y_cleaned))
def DatesSpan(date_from: str, date_to: str) -> list:
"""
Generate a list of dates between date_from and date_to (inclusive).
Handles cases where date_from is later than date_to.
Args:
date_from (str): Start date in 'YYYY-MM-DD' format
date_to (str): End date in 'YYYY-MM-DD' format
Returns:
list: List of dates in 'YYYY-MM-DD' format
"""
# Convert string dates to datetime objects
start_date = datetime.datetime.strptime(date_from, '%Y-%m-%d')
end_date = datetime.datetime.strptime(date_to, '%Y-%m-%d')
# Determine direction and swap dates if necessary
if start_date > end_date:
start_date, end_date = end_date, start_date
# Generate list of dates
dates_list = []
current_date = start_date
while current_date <= end_date:
dates_list.append(current_date.strftime('%Y-%m-%d'))
current_date += timedelta(days=1)
# Reverse the list if original date_from was later than date_to
#if datetime.datetime.strptime(date_from, '%Y-%m-%d') > datetime.datetime.strptime(date_to, '%Y-%m-%d'):
# dates_list.reverse()
return dates_list
def zip_blobs(blob_paths, zip_blob_name, bucket_name, minio_client=None):
"""
Zip multiple blobs from MinIO storage into a single zip file without saving locally.
Args:
blob_paths (list): List of blob paths to zip
zip_blob_name (str): Name/path for the output zip file in MinIO
bucket_name (str): MinIO bucket name
minio_client (Minio, optional): Existing MinIO client instance
Returns:
bool: True if successful, False otherwise
"""
try:
# Create zip file in memory
zip_buffer = BytesIO()
with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zip_file:
# Process each blob
for blob_path in blob_paths:
# Get file name from path for zip entry
file_name = blob_path.split('/')[-1]
# Get blob data into memory
data = minio_client.get_object(bucket_name, blob_path.lstrip('/'))
# Add file to zip
zip_file.writestr(file_name, data.read())
# Close the object to free memory
data.close()
# Seek to start of zip file
zip_buffer.seek(0)
# Upload zip file to MinIO
minio_client.put_object(
bucket_name,
zip_blob_name.lstrip('/'),
zip_buffer,
length=zip_buffer.getbuffer().nbytes
)
return True
except Exception as e:
print(f"Error creating zip file: {str(e)}")
return False
finally:
# Clean up
zip_buffer.close()
def clean_data_with_spline(x, y, threshold=2.0):
"""
Filter outliers and replace with spline interpolation
Parameters:
x : array-like, timestamps or x-coordinates
y : array-like, values to be filtered
threshold : float, number of median absolute deviations for outlier detection
Returns:
array-like : cleaned data with outliers replaced by spline interpolation
"""
# Convert inputs to numpy arrays
x = np.array(x)
y = np.array(y)
# Calculate median and median absolute deviation
median = np.median(y)
mad = stats.median_abs_deviation(y)
# Identify outliers
outlier_mask = np.abs(y - median) > threshold * mad
good_data_mask = ~outlier_mask
# If we have too few good points for interpolation, adjust threshold
min_points_needed = 4 # minimum points needed for cubic spline
if np.sum(good_data_mask) < min_points_needed:
return y # return original data if we can't interpolate
# Create spline with non-outlier data
spline = interpolate.InterpolatedUnivariateSpline(
x[good_data_mask],
y[good_data_mask],
k=3 # cubic spline
)
# Replace outliers with interpolated values
y_cleaned = y.copy()
y_cleaned[outlier_mask] = spline(x[outlier_mask])
return y_cleaned
def clean_data(line_part_t, window=5, threshold=2.0):
"""
Remove obvious outliers based on window comparison
Returns cleaned data in the same format: [(timestamp, value), ...]
"""
if len(line_part_t) < window:
return line_part_t
x, y = zip(*line_part_t)
x = np.array(x)
y = np.array(y, dtype=float)
cleaned_data = []
for i in range(len(y)):
# Get window around current point
start_idx = max(0, i - window//2)
end_idx = min(len(y), i + window//2 + 1)
window_values = y[start_idx:end_idx]
# Calculate median and MAD for the window
window_median = np.median(window_values)
deviation = abs(y[i] - window_median)
# Keep point if it's not too far from window median
if deviation <= threshold * window_median:
cleaned_data.append((x[i], y[i]))
#else:
#print(window_values)
return cleaned_data
def clean_data_fast(line_part_t, window=5, threshold=2.0):
"""
Remove obvious outliers based on window comparison - vectorized version
Returns cleaned data in the same format: [(timestamp, value), ...]
"""
if len(line_part_t) < window:
return line_part_t
x, y = zip(*line_part_t)
x = np.array(x)
y = np.array(y, dtype=float)
# Calculate rolling median using numpy
half_window = window // 2
medians = np.array([
np.median(y[max(0, i-half_window):min(len(y), i+half_window+1)])
for i in range(len(y))
])
# Calculate deviations for all points at once
deviations = np.abs(y - medians)
# Create mask for good points
good_points = deviations <= threshold * medians
# Return filtered data using boolean indexing
return list(zip(x[good_points], y[good_points]))
def clean_data_pd(line_part_t, window=5, percentile=99):
"""
Remove obvious outliers based on window comparison - pandas version
Returns cleaned data in the same format: [(timestamp, value), ...]
"""
#line_part_t = line_part_t[2000:2100]
if len(line_part_t) < window:
return line_part_t
x, y = zip(*line_part_t)
# Create pandas Series and calculate rolling median
series = pd.Series(y)
medians = series.rolling(window=window, center=True, min_periods=1).median()
# Calculate deviations
deviations = np.abs(series - medians)
largest_deviations = deviations.nlargest(10)
#print(largest_deviations)
# Create mask for good points
deviation_threshold = np.percentile(deviations, percentile)
good_points = deviations <= deviation_threshold
# Convert back to numpy arrays for filtering
x = np.array(x)
y = np.array(y)
# Return filtered data
return list(zip(x[good_points], y[good_points]))
def CombineStripes(result_filename, stripes_files):
try:
# Open the first image to get the width and initialize the height
first_image = Image.open(stripes_files[0])
width, height = first_image.size
# Calculate the total height of the combined image
total_height = height * len(stripes_files)
# Create a new blank image with the same width and the calculated height
result_image = Image.new('RGB', (width, total_height))
# Paste each image onto the result image vertically
y_offset = 0
for file_name in stripes_files:
image = Image.open(file_name)
result_image.paste(image, (0, y_offset))
y_offset += height
# Save the result image
result_image.save(result_filename)
# Return success flag
return True
except Exception as e:
print("Error:", e)
return False
def FindFirstLocalMinimum(counts, bins):
"""
Find the first local minimum in a histogram after the main peak and calculate its offset.
Parameters:
hist: tuple of (counts, bin_edges) from np.histogram()
The histogram data to analyze
Returns:
tuple: (TR, THR_OFFSET)
TR: float, the bin value (position) of the first local minimum after the main peak
THR_OFFSET: int, number of bins between the global maximum and the local minimum
"""
# Find the main peak (global maximum)
main_peak_idx = np.argmax(counts)
# Look for the first local minimum after the main peak
for i in range(main_peak_idx + 1, len(counts) - 1):
# Check if current point is less than or equal to both neighbors
if counts[i] <= counts[i-1] and counts[i] <= counts[i+1]:
# Calculate the bin center value for TR
TR = (bins[i] + bins[i+1]) / 2
# Calculate offset from main peak in number of bins
THR_OFFSET = (bins[i] + bins[i+1]) / 2 - (bins[main_peak_idx] + bins[main_peak_idx+1]) / 2
return (bins[main_peak_idx] + bins[main_peak_idx+1]) / 2, TR, THR_OFFSET
# If no local minimum is found, return None for both values
return None, None
def process_raw_data(data_tuples):
"""
Convert list of (timestamp, stationary, motion) tuples to separate arrays
Parameters:
-----------
data_tuples : list of tuples
Each tuple contains (datetime, stationary_value, motion_value)
Returns:
--------
timestamps : array of datetime
stationary : array of float
motion : array of float
"""
timestamps = np.array([t[0] for t in data_tuples])
stationary = np.array([t[1] for t in data_tuples])
motion = np.array([t[2] for t in data_tuples])
return timestamps, stationary, motion
def rolling_std_fast(arr, window_size):
"""
Fast calculation of rolling standard deviation using NumPy's stride tricks.
Parameters:
-----------
arr : numpy array
Input array
window_size : int
Size of rolling window
Returns:
--------
numpy array
Rolling standard deviation
"""
# Compute rolling sum of squares
r = np.array(arr, dtype=float)
r2 = np.array(arr, dtype=float) ** 2
# Calculate cumulative sums
cum = np.cumsum(np.insert(r, 0, 0))
cum2 = np.cumsum(np.insert(r2, 0, 0))
# Get rolling windows
x = (cum[window_size:] - cum[:-window_size])
x2 = (cum2[window_size:] - cum2[:-window_size])
# Calculate standard deviation
n = window_size
return np.sqrt((x2/n) - (x/n) ** 2)
def detect_presence(timestamps, stationary_signal, motion_signal, window_size=100,
motion_threshold=5, gmm_components=2):
"""
Detect presence using both stationary and motion signals with adaptive thresholding.
Parameters:
-----------
timestamps : array-like
Array of datetime objects
stationary_signal : array-like
Time series of stationary signal (0-100)
motion_signal : array-like
Time series of motion signal (0-100)
window_size : int
Size of rolling window for statistics (used only for temporal smoothing)
motion_threshold : float
Threshold for significant motion
gmm_components : int
Number of components for Gaussian Mixture Model
Returns:
--------
presence_mask : numpy array
Boolean array indicating presence
baseline : float
Computed baseline for stationary signal
threshold : float
Computed threshold for stationary signal
"""
# Convert inputs to numpy arrays
stationary_signal = np.array(stationary_signal)
motion_signal = np.array(motion_signal)
# 1. Fit Gaussian Mixture Model to stationary signal
gmm = GaussianMixture(n_components=gmm_components, random_state=42)
X = stationary_signal.reshape(-1, 1)
gmm.fit(X)
# Get the component with lowest mean as baseline
baseline = min(gmm.means_)[0]
# 2. Calculate adaptive threshold using GMM components
components_sorted = sorted(zip(gmm.means_.flatten(), gmm.covariances_.flatten()))
baseline_std = np.sqrt(components_sorted[0][1])
threshold = baseline + 3 * baseline_std # 3 sigma rule
# 3. Combine motion and stationary detection
presence_mask = np.zeros(len(stationary_signal), dtype=bool)
# Vectorized operations instead of loop
presence_mask = (motion_signal > motion_threshold) | (stationary_signal > threshold)
# 4. Apply temporal smoothing to reduce false transitions
smooth_window = min(window_size // 4, 10) # Smaller window for smoothing
presence_mask = np.convolve(presence_mask.astype(int),
np.ones(smooth_window)/smooth_window,
mode='same') > 0.5
return presence_mask, baseline, threshold
def visualize_detection(timestamps, stationary_signal, motion_signal, presence_mask,
baseline, threshold, output_file='presence_detection.png'):
"""
Visualize the detection results and save to file.
Parameters:
-----------
timestamps : array-like
Array of datetime objects
stationary_signal : array-like
Time series of stationary signal
motion_signal : array-like
Time series of motion signal
presence_mask : array-like
Boolean array indicating presence
baseline : float
Computed baseline for stationary signal
threshold : float
Computed threshold for stationary signal
output_file : str
Path to save the output PNG file
"""
plt.figure(figsize=(15, 10))
# Configure time formatting
date_formatter = mdates.DateFormatter('%H:%M:%S')
# Plot signals
plt.subplot(3, 1, 1)
plt.plot(timestamps, stationary_signal, label='Stationary Signal')
plt.axhline(y=baseline, color='g', linestyle='--', label='Baseline')
plt.axhline(y=threshold, color='r', linestyle='--', label='Threshold')
plt.gca().xaxis.set_major_formatter(date_formatter)
plt.legend()
plt.title('Stationary Signal with Baseline and Threshold')
plt.grid(True)
plt.subplot(3, 1, 2)
plt.plot(timestamps, motion_signal, label='Motion Signal')
plt.gca().xaxis.set_major_formatter(date_formatter)
plt.legend()
plt.title('Motion Signal')
plt.grid(True)
plt.subplot(3, 1, 3)
plt.plot(timestamps, presence_mask, label='Presence Detection')
plt.gca().xaxis.set_major_formatter(date_formatter)
plt.ylim(-0.1, 1.1)
plt.legend()
plt.title('Presence Detection Result')
plt.grid(True)
plt.tight_layout()
# Save to file and close figure to free memory
plt.savefig(output_file, dpi=300, bbox_inches='tight')
plt.close()
def FindZeroIntersection(counts, bins, save_plot, device_id):
"""
Find the zero intersection point by fitting a parabola to the descending slope
between 50% and 10% of the maximum peak height. Also returns the peak position.
Parameters:
counts: array-like
The histogram counts
bins: array-like
The histogram bin edges
save_plot: str or None, optional
If provided, saves the visualization to the specified file path
Returns:
tuple: (zero_intersections, peak_position)
zero_intersections: list of floats, x-coordinates where parabola intersects y=0
peak_position: float, x-coordinate of the histogram maximum peak
"""
# Find the main peak
main_peak_idx = np.argmax(counts)
peak_height = counts[main_peak_idx]
peak_position = (bins[main_peak_idx] + bins[main_peak_idx+1]) / 2
# Calculate 75% and 25% of peak height
height_50 = 0.50 * peak_height
height_10 = 0.10 * peak_height
# Find indices where counts cross these thresholds after the peak
idx_50 = main_peak_idx
idx_10 = main_peak_idx
for i in range(main_peak_idx, len(counts)):
if counts[i] <= height_50 and idx_50 == main_peak_idx:
idx_50 = i
if counts[i] <= height_10:
idx_10 = i
break
# If we couldn't find valid points, return None
if idx_50 == main_peak_idx or idx_10 == main_peak_idx:
return None, peak_position
# Get x and y coordinates for fitting
# Use bin centers for x coordinates
x_points = np.array([(bins[i] + bins[i+1])/2 for i in range(idx_50, idx_10+1)])
y_points = counts[idx_50:idx_10+1]
# Define quadratic function for fitting
def quadratic(x, a, b, c):
return a*x**2 + b*x + c
try:
popt, pcov = curve_fit(quadratic, x_points, y_points)
a, b, c = popt
# Find zeros using quadratic formula
if a != 0:
discriminant = b**2 - 4*a*c
if discriminant >= 0:
x1 = (-b + np.sqrt(discriminant)) / (2*a)
x2 = (-b - np.sqrt(discriminant)) / (2*a)
zero_intersections = sorted([x1, x2])
# Filter zeros to only include those after the peak
zero_intersections = [x for x in zero_intersections if x > peak_position]
else:
zero_intersections = []
else:
# If a ≈ 0, fallback to linear solution
if b != 0:
zero_intersections = [-c/b]
else:
zero_intersections = []
if save_plot:
plt.figure(figsize=(10, 6))
# Plot histogram
bin_centers = [(bins[i] + bins[i+1])/2 for i in range(len(counts))]
plt.bar(bin_centers, counts, width=bins[1]-bins[0], alpha=0.6,
color='skyblue', label='Histogram')
# Plot peak height lines
plt.axhline(y=height_50, color='g', linestyle='--', alpha=0.5,
label='50% Peak Height')
plt.axhline(y=height_10, color='r', linestyle='--', alpha=0.5,
label='10% Peak Height')
# Plot fitted parabola
x_fit = np.linspace(min(x_points), max(x_points), 100)
y_fit = quadratic(x_fit, a, b, c)
plt.plot(x_fit, y_fit, 'r-', label='Fitted Parabola')
# Plot points used for fitting
plt.plot(x_points, y_points, 'ro', alpha=0.5, label='Fitting Points')
# Plot zero intersections
for x_zero in zero_intersections:
plt.plot(x_zero, 0, 'ko', label='Zero Intersection')
# Plot peak position
plt.axvline(x=peak_position, color='purple', linestyle='--', alpha=0.5,
label='Peak Position')
# Add labels and legend
plt.xlabel('Bin Values')
plt.ylabel('Counts')
plt.title(f'Histogram Analysis with Parabolic Fit {device_id}')
plt.legend()
# Show zero line
plt.axhline(y=0, color='k', linestyle='-', alpha=0.2)
# Add text with intersection and peak values
text = f'Peak Position: {peak_position:.2f}\n'
if zero_intersections:
text += f'Zero Intersection(s): {", ".join([f"{x:.2f}" for x in zero_intersections])}'
plt.text(0.02, 0.98, text, transform=plt.gca().transAxes,
verticalalignment='top',
bbox=dict(boxstyle='round', facecolor='white', alpha=0.8))
# Save plot to file
plt.savefig(save_plot, dpi=300, bbox_inches='tight')
plt.close()
return zero_intersections, peak_position
except RuntimeError:
print("Warning: Failed to fit parabola")
return None, peak_position
def GeneratePresenceHistory(filename, recreate_in, deployment_id, filter_minutes, ddate, to_date, now_date, time_zone_s):
#maps_dates, proximity = GetDeploymentDatesBoth(deployment_id)
minutes = 1440
stripes_files = []
date1_obj = datetime.datetime.strptime(ddate, '%Y-%m-%d')
date2_obj = datetime.datetime.strptime(to_date, '%Y-%m-%d')
start_date = min(date1_obj, date2_obj)
end_date = max(date1_obj, date2_obj)
# Generate list of all dates
maps_dates = [
(start_date + timedelta(days=x)).strftime('%Y-%m-%d')
for x in range((end_date - start_date).days + 1)
]
maps_dates.reverse()
days = len(maps_dates)
stretch_by = int(1000 / days)
if stretch_by > 50:
stretch_by = 50
#lets use 1000 pixels
#arr_stretched = np.zeros((int(days*stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
result_image = Image.new('RGB', (minutes, int(days*stretch_by)))
# Paste each image onto the result image vertically
y_offset = 0
for ddate in maps_dates:
force_recreate = recreate_in
filename_day = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_{stretch_by}_daily_locations.png"
if not force_recreate:
file_exists, time_modified_utc = check_file_exists(filename_day)
if file_exists:
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
file_modified_date = time_modified_local.date()
file_date = MapFileToDate(filename_day)
if file_modified_date <= file_date:
force_recreate = True
else:
force_recreate = True
if force_recreate:
timee = LocalDateToUTCEpoch(ddate, time_zone_s) + 24 * 3600 - 1
devices_list, device_ids = GetProximityList(deployment_id, timee)
CreateDailyLocationMap(filename_day, devices_list, ddate, filter_minutes, time_zone_s, stretch_by)
image_bytes, content_type, metadata = GetBlob(filename_day)
if image_bytes != None:
image_stream = io.BytesIO(image_bytes)
image = Image.open(image_stream)
#image = Image.open(file_name)
result_image.paste(image, (0, y_offset))
image.close()
image_stream.close()
y_offset += stretch_by
# Save directly to MinIO instead of local file
success = save_to_minio(result_image, filename, DAILY_MAPS_BUCKET_NAME)
# Clean up
result_image.close()
return success
def AddText(room_image_cv2, x, y, room_name, font_size):
pil_im = Image.fromarray(room_image_cv2)
draw = ImageDraw.Draw(pil_im)
font_path = os.path.join(os.path.dirname(__file__), "fonts", "Poppins-Regular.ttf")
#print(f"Attempting to load font from: {font_path}")
try:
font = ImageFont.truetype(font_path, font_size) # 12px size
except:
logger.error(f"Poppins font not found in {font_path}. Please ensure the font file is in your working directory")
# Fallback to default font if Poppins is not available
font = ImageFont.load_default()
draw.text((x, y), room_name, font=font, fill=(150, 150, 150)) # Black color in RGB
room_image_cv2 = cv2.cvtColor(np.array(pil_im), cv2.COLOR_RGB2BGR)
return room_image_cv2
def AddTextList(room_image_cv2, strings_list, font_size):
pil_im = Image.fromarray(room_image_cv2)
draw = ImageDraw.Draw(pil_im)
font_path = os.path.join(os.path.dirname(__file__), "fonts", "Poppins-Regular.ttf")
try:
font = ImageFont.truetype(font_path, font_size) # 12px size
except:
logger.error("Poppins font not found. Please ensure the font file is in your working directory")
# Fallback to default font if Poppins is not available
font = ImageFont.load_default()
for x, y, room_name in strings_list:
draw.text((x, y), room_name, font=font, fill=(150, 150, 150)) # Black color in RGB
room_image_cv2 = cv2.cvtColor(np.array(pil_im), cv2.COLOR_RGB2BGR)
return room_image_cv2
def AddRoomData(room_image, room_name, data):
# Example usage:
radius = 10
color_t = data["color"] # BGR format for red
color = (color_t[2], color_t[1], color_t[0])
x_offset = 12
room_image = AddText(room_image, 13, 20, room_name, 50)
print(data)
for present in data["presence"]:
device_id, minute, duration = present
#duration = 10
top_left = (x_offset + minute, 140) #bottom_right = (300, 200)
bottom_right = (x_offset + minute + duration, 260)
draw_rounded_rectangle(room_image, top_left, bottom_right, radius, color)
return room_image
def AddFooterData(image):
'12, 370, 736, 1092, 1452'
step_size = 1440 / 4
string_width = 60
offset = 12
yoffset = 30
step = 0
font_size = 40
image = AddText(image, offset+step * step_size + step_size/2 - string_width/2, yoffset, "3 AM", font_size)
step = 1
image = AddText(image, offset+step * step_size + step_size/2 - string_width/2, yoffset, "9 AM", font_size)
step = 2
image = AddText(image, offset+step * step_size + step_size/2 - string_width/2, yoffset, "3 PM", font_size)
step = 3
image = AddText(image, offset+step * step_size + step_size/2 - string_width/2, yoffset, "9 PM", font_size)
return image
def draw_rounded_rectangle(image, top_left, bottom_right, radius, color):
"""
Draw a filled rectangle with rounded corners, using simple rectangle for small dimensions
:param image: Image to draw on
:param top_left: Top-left corner coordinates (x, y)
:param bottom_right: Bottom-right corner coordinates (x, y)
:param radius: Desired corner radius (will be adjusted if needed)
:param color: Rectangle color in BGR format
"""
x1, y1 = top_left
x2, y2 = bottom_right
# Calculate width
width = x2 - x1
# Adjust radius if width or height is too small
# Maximum radius should be half of the smaller dimension
max_radius = abs(width) // 2
radius = min(radius, max_radius)
# If width is too small, fallback to regular rectangle
if width <= 4 or radius <= 1:
cv2.rectangle(image, top_left, bottom_right, color, -1)
return
# Adjust radius if needed
radius = min(radius, width // 2)
# Create points for the main rectangle
pts = np.array([
[x1 + radius, y1],
[x2 - radius, y1],
[x2, y1 + radius],
[x2, y2 - radius],
[x2 - radius, y2],
[x1 + radius, y2],
[x1, y2 - radius],
[x1, y1 + radius]
], np.int32)
# Fill the main shape
cv2.fillPoly(image, [pts], color)
# Fill the corners
cv2.ellipse(image, (x1 + radius, y1 + radius), (radius, radius), 180, 0, 90, color, -1)
cv2.ellipse(image, (x2 - radius, y1 + radius), (radius, radius), 270, 0, 90, color, -1)
cv2.ellipse(image, (x1 + radius, y2 - radius), (radius, radius), 90, 0, 90, color, -1)
cv2.ellipse(image, (x2 - radius, y2 - radius), (radius, radius), 0, 0, 90, color, -1)
def filter_device(locations_list, device_id):
result = []
for entry in locations_list:
if entry[0] == device_id:
result.append(entry)
return result
def GenerateLocationsMap(date_st, devices_list, devices_map, locations_list, time_zone_s):
devices_list_t = [("date",date_st)]
for mac in devices_list:
well_id, device_id, room = devices_map[mac]
#room = devices[well_id][0]
if room in Loc2Color:
color = Loc2Color[room][0]
else:
color = Loc2Color[room.split()[0]][0]
presence_data = filter_device(locations_list, device_id)
room_details = (room, {"color": color, "presence": presence_data})
devices_list_t.append(room_details)
well_id = 0
device_id = 0
room = "Outside/?"
color = (0, 0, 0)
#lets's not draw future unknown!
presence_data = filter_device(locations_list, device_id)
current_utc = datetime.datetime.now(pytz.UTC)
current_date_local = current_utc.astimezone(pytz.timezone(time_zone_s))
current_minute_of_day = current_date_local.hour * 60 + current_date_local.minute
if date_st == current_date_local.strftime('%Y-%m-%d'):
filtered_presence_data = []
for entry in presence_data:
if entry[1] < current_minute_of_day :
if entry[1] + entry[2] < current_minute_of_day:
filtered_presence_data.append(entry)
else:
entry[2] = (current_minute_of_day - entry[2])
if entry[2] > 0:
filtered_presence_data.append(entry)
#print(presence_data)
else:
filtered_presence_data = presence_data
room_details = (room, {"color": color, "presence": filtered_presence_data})
devices_list_t.append(room_details)
return devices_list_t
def CreateDailyLocationChart(filename_chart_image_day, locations):
result = False
header_image_file = "header.png"
room_image_file = "room.png"
footer_image_file = "footer.png"
#ToDo: change it so it reads files from MinIo
header_image_file = os.path.join(filesDir, header_image_file)
header_image_file = header_image_file.replace("\\","/")
header_image = cv2.imread(header_image_file)
#header_height, header_width = header_image.shape[:2]
room_image_file = os.path.join(filesDir, room_image_file)
room_image_file = room_image_file.replace("\\","/")
room_image = cv2.imread(room_image_file)
#room_height, room_width = room_image.shape[:2]
footer_image_file = os.path.join(filesDir, footer_image_file)
footer_image_file = footer_image_file.replace("\\","/")
footer_image = cv2.imread(footer_image_file)
all_images = [header_image]
for item_c in locations:
item = item_c[0]
if item == "date":
date = item_c[1]
else:
room_image = cv2.imread(room_image_file)
data = item_c[1]
room_image = AddRoomData(room_image, item, data)
all_images.append(room_image)
footer_image = AddFooterData(footer_image)
all_images.append(footer_image)
final_image = np.vstack(all_images)
#this needs to write straight to MinIo !
SaveImageInBlob(filename_chart_image_day, final_image, [])
result = True
#cv2.imwrite(filename_chart_image_day, final_image)
#print(rooms_count)
return result
def GetOptimumFontSize(target_width, text="00", min_size=1, max_size=100, tolerance=1):
"""
Find optimal font size to fit text within target width using binary search.
Args:
target_width (int): Desired width in pixels
text (str): Text to measure (default "00")
min_size (int): Minimum font size to try
max_size (int): Maximum font size to try
tolerance (int): Acceptable difference from target width
Returns:
int: Optimal font size
"""
while min_size <= max_size:
current_size = (min_size + max_size) // 2
width, _ = GetStringSize(text, current_size)
if abs(width - target_width) <= tolerance:
return current_size
elif width > target_width:
max_size = current_size - 1
else:
min_size = current_size + 1
# Return the largest size that fits within target width
width, _ = GetStringSize(text, min_size)
return min_size if width <= target_width else min_size - 1
def GetStringSize(some_string, font_size):
font_path = os.path.join(os.path.dirname(__file__), "fonts", "Poppins-Regular.ttf")
try:
font = ImageFont.truetype(font_path, font_size) # 12px size
except:
logger.error("Poppins font not found. Please ensure the font file is in your working directory")
# Fallback to default font if Poppins is not available
font = ImageFont.load_default()
bbox = font.getbbox(some_string)
return bbox[2] - bbox[0], bbox[3] - bbox[1]
def GeneratePresenceHistoryChart(filename, recreate_in, deployment_id, filter_minutes, ddate, to_date, now_date, time_zone_s):
#maps_dates, proximity = GetDeploymentDatesBoth(deployment_id)
minutes = 1440
stripes_files = []
date1_obj = datetime.datetime.strptime(ddate, '%Y-%m-%d')
date2_obj = datetime.datetime.strptime(to_date, '%Y-%m-%d')
start_date = min(date1_obj, date2_obj)
end_date = max(date1_obj, date2_obj)
# Generate list of all dates
maps_dates = [
(start_date + timedelta(days=x)).strftime('%Y-%m-%d')
for x in range((end_date - start_date).days + 1)
]
#maps_dates.reverse()
days = len(maps_dates)
#stretch_by = int(1000 / days)
#if stretch_by > 50:
#stretch_by = 50
stretch_by = 30
#background_image_file = os.path.join(filesDir, "multi_day_template.png")
background_image_file = os.path.join(filesDir, "multi_day_template2.png")
background_image_file = background_image_file.replace("\\","/")
background_image = cv2.imread(background_image_file)
rgb_image = background_image #cv2.cvtColor(background_image, cv2.COLOR_BGR2RGB)
result_image = Image.fromarray(rgb_image) # Convert to PIL Image
#result_image = Image.new('RGB', (minutes, int(days*stretch_by)))
# Paste each image onto the result image vertically
y_offset = 0
locations_list = []
font_size = 50
string_width, string_height = GetStringSize("00", font_size)
success = False
if len(maps_dates) == 1:
filename_chart_image_day = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_{stretch_by}_daily_locations_chart.png"
force_recreate = recreate_in
#it is faster to resize existing daily location chart (length is always 1440), than having to re-create it each time...
filename_day = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_daily_locations.png"
filename_chart_data_day = filename_day+".bin"
if not force_recreate:
file_exists, time_modified_utc = check_file_exists(filename_chart_image_day)
if file_exists:
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
time_modified_date = time_modified_local.date()
file_date = ddate
if time_modified_date <= file_date:
force_recreate = True
else:
force_recreate = True
if not force_recreate:
file_exists1, time_modified_utc1 = check_file_exists(filename_chart_data_day)
if file_exists1:
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
time_modified_date = time_modified_local.date()
file_date = ddate
if time_modified_date <= file_date:
force_recreate = True
else:
force_recreate = True
if force_recreate:
timee = LocalDateToUTCEpoch(ddate, time_zone_s) + 24 * 3600 - 1
devices_list_a, device_ids = GetProximityList(deployment_id, timee)
CreateDailyLocationMap(filename_day, devices_list_a, ddate, filter_minutes, time_zone_s, stretch_by)
locations_list_s = ReadObjectMinIO("daily-maps", filename_chart_data_day)
locations_list = json.loads(locations_list_s)
devices_map = {}
devices_list = []
for device_entry in devices_list_a:
#if T:
if device_entry[3] == None or device_entry[3].strip() == "":
devices_map[device_entry[4]] = [device_entry[0], device_entry[1], device_entry[2]]
else:
devices_map[device_entry[4]] = [device_entry[0], device_entry[1], device_entry[2] + " " + device_entry[3]]
devices_list.append(device_entry[4])
locations = GenerateLocationsMap(ddate, devices_list, devices_map, locations_list, time_zone_s)
success = CreateDailyLocationChart(filename, locations)
else:
day_counter = 0
day_step_width = int(1780 / days)
x_offset = 563
y_offset = 1615
h_labels_bottom = 1720
day_width = int(0.9 * day_step_width)
day_height = 1440
font_size = GetOptimumFontSize(day_width, "00", 10, 50, 0)
string_width, string_height = GetStringSize("00", font_size)
#logger.debug(f"font_size={font_size} string_width={string_width}")
y_offset = y_offset - day_height
filename_chart_image_day = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_{stretch_by}_daily_locations_chart.png"
for ddate in maps_dates:
force_recreate = recreate_in
filename_day = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_{stretch_by}_daily_locations.png"
if not force_recreate:
file_exists, time_modified_utc = check_file_exists(filename_day)
file_existsS, time_modifiedS_utc = check_file_exists(filename_day[:-4]+"S.png")
if file_exists and file_existsS:
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
time_modified_date = time_modified_local.date()
file_date = MapFileToDate(filename_day)
if time_modified_date <= file_date:
force_recreate = True
else:
force_recreate = True
if force_recreate:
timee = LocalDateToUTCEpoch(ddate, time_zone_s) + 24 * 3600 - 1
devices_list, device_ids = GetProximityList(deployment_id, timee)
CreateDailyLocationMap(filename_day, devices_list, ddate, filter_minutes, time_zone_s, stretch_by)
#here we need to rotate and resize to:
image_bytes, content_type, metadata = GetBlob(filename_day)
image_bytes_s, content_type_s, metadata = GetBlob(filename_day[:-4]+"S.png")
if image_bytes != None:
image_stream = io.BytesIO(image_bytes)
image = Image.open(image_stream)
numpy_image = np.array(image)
rotated_image = cv2.rotate(numpy_image, cv2.ROTATE_90_COUNTERCLOCKWISE)
scaled_image = cv2.resize(rotated_image, (day_width, day_height), interpolation=cv2.INTER_AREA)
# Convert from BGR to RGB
rgb_image = cv2.cvtColor(scaled_image, cv2.COLOR_BGR2RGB)
# Convert to PIL Image
pil_image = Image.fromarray(rgb_image)
#image = Image.open(file_name)
x_origin = x_offset + day_step_width * day_counter + int(0.05 * day_step_width)
result_image.paste(pil_image, (x_origin, y_offset))
image_stream = io.BytesIO(image_bytes_s)
image = Image.open(image_stream)
numpy_image = np.array(image)
rotated_image = cv2.rotate(numpy_image, cv2.ROTATE_90_COUNTERCLOCKWISE)
scaled_image = cv2.resize(rotated_image, (day_width, day_height), interpolation=cv2.INTER_AREA)
# Convert from BGR to RGB
rgb_image = cv2.cvtColor(scaled_image, cv2.COLOR_BGR2RGB)
# Convert to PIL Image
pil_image = Image.fromarray(rgb_image)
#image = Image.open(file_name)
x_origin = x_offset + day_step_width * day_counter + int(0.05 * day_step_width)
result_image.paste(pil_image, (x_origin, 1807+y_offset))
image.close()
image_stream.close()
day_counter += 1
pil_im = result_image
#result_image_cv2 = cv2.cvtColor(np.array(pil_im), cv2.COLOR_RGB2BGR)
result_image_cv2 = np.array(pil_im)#cv2.cvtColor(np.array(pil_im), cv2.COLOR_RGB2BGR)
strings_list = []
day_counter = 0
for ddate in maps_dates:
if string_width <= day_width:
date_str = ddate[8:10]
x_origin = x_offset + int(day_step_width * (day_counter + 0.5)) - int(string_width / 2)
strings_list.append((x_origin, h_labels_bottom, date_str))
day_counter += 1
result_image_cv2 = AddTextList(result_image_cv2, strings_list, font_size)
#Y 124 to 1636
labels_bottom = 1636 - 1.5 * string_height
x_offset = 340
step = -4 * 60 #4 hours
font_size = 50
strings_list = []
count = 0
y_offset = labels_bottom + count * step
strings_list.append((x_offset, y_offset, "12 AM"))
count = 1
y_offset = labels_bottom + count * step
strings_list.append((x_offset, y_offset, "4 AM"))
count = 2
y_offset = labels_bottom + count * step
strings_list.append((x_offset, y_offset, "8 AM"))
count = 3
y_offset = labels_bottom + count * step
strings_list.append((x_offset, y_offset, "12 PM"))
count = 4
y_offset = labels_bottom + count * step
strings_list.append((x_offset, y_offset, "4 PM"))
count = 5
y_offset = labels_bottom + count * step
strings_list.append((x_offset, y_offset, "8 PM"))
count = 6
y_offset = labels_bottom + count * step
strings_list.append((x_offset, y_offset, "12 AM"))
result_image_cv2 = AddTextList(result_image_cv2, strings_list, font_size)
numpy_image = np.array(result_image_cv2)
success = SaveImageInBlob(filename, numpy_image, [])
#SaveImageInBlob(filename, result_image)
# Save directly to MinIO instead of local file
#if success:
# success = save_to_minio(result_image, filename, DAILY_MAPS_BUCKET_NAME)
# Clean up
if success:
return filename
else:
return ""
def GeneratePresenceHistoryFiles(filename, recreate_in, deployment_id, filter_minutes, ddate, to_date, now_date, time_zone_s):
date1_obj = datetime.datetime.strptime(ddate, '%Y-%m-%d')
date2_obj = datetime.datetime.strptime(to_date, '%Y-%m-%d')
start_date = min(date1_obj, date2_obj)
end_date = max(date1_obj, date2_obj)
stretch_by = 30
# Generate list of all dates
maps_dates = [
(start_date + timedelta(days=x)).strftime('%Y-%m-%d')
for x in range((end_date - start_date).days + 1)
]
day_counter = 0
for ddate in maps_dates:
force_recreate = recreate_in
filename_day = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_{stretch_by}_daily_locations.png"
if not force_recreate:
file_exists, time_modified_utc = check_file_exists(filename_day+".bin")
if file_exists:
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
time_modified_date = time_modified_local.date()
file_date = MapFileToDate(filename_day)
if time_modified_date <= file_date:
force_recreate = True
else:
force_recreate = True
if force_recreate:
timee = LocalDateToUTCEpoch(ddate, time_zone_s) + 24 * 3600 - 1
devices_list, device_ids = GetProximityList(deployment_id, timee)
CreateDailyLocationMap(filename_day, devices_list, ddate, filter_minutes, time_zone_s, 10)
day_counter += 1
return filename
def CalcStdevs(row, stdev_range, stdevs):
half_range = stdev_range // 2
data_len = len(row)
# Calculate standard deviations with proper window alignment
for i in range(data_len):
# Calculate window boundaries
start = max(0, i - half_range)
end = min(data_len, i + half_range + 1)
# Get data within window
window_data = row[start:end]
# Calculate standard deviation if we have data
if len(window_data) > 0:
stdevs[i] = np.std(window_data)
# Find amplitude (max - min of standard deviations)
amplitude = np.max(stdevs) - np.min(stdevs)
# Scale to range 0-1279
if amplitude > 0: # Avoid division by zero
stdevs = ((stdevs - np.min(stdevs)) / amplitude * 1279).astype(np.float32)
return stdevs, amplitude
def CalcLife(row, stdev_range, stdevs):
half_range = stdev_range // 2
data_len = len(row)
# Calculate standard deviations with proper window alignment
for i in range(data_len):
# Calculate window boundaries
start = max(0, i - half_range)
end = min(data_len, i + half_range + 1)
# Get data within window
window_data = row[start:end]
# Calculate standard deviation if we have data
if len(window_data) > 0:
stdevs[i] = np.std(window_data)
# Find amplitude (max - min of standard deviations)
amplitude = np.max(stdevs) - np.min(stdevs)
# Scale to range 0-1279
if amplitude > 0: # Avoid division by zero
stdevs = ((stdevs - np.min(stdevs)) / amplitude * 1279).astype(np.float32)
return stdevs, amplitude
def FindCalibrationDate(device_ids, ddate):
PCD = 50 #% (Peak Contained Data %)
PHB = 50 #% (Peak Height from Base %)
MPW = 10 #? (Max Peak Width)
MPSD =10 #? (Minimum Presence signal Standard Deviation)
#Find first day with, for all devices:
#- enough radar data points collected
#-Single histogram peak containing more than PCD% of data and peak width (at PHB% height) is < MPW
#Stdev of Data larger > MPSD
return ddate
def FindThreshold(data, percent_list):
"""
Find the threshold value above which lies the specified percentage of points.
Args:
data: numpy array of values
percent: percentage of points that should be above the threshold (0-100)
Returns:
threshold value
"""
percent_from, percent_to = percent_list
# Sort data in descending order
sorted_data = np.sort(data)[::-1]
# Calculate the index corresponding to the desired percentage
index_from = int((percent_from / 100) * len(data))
index_to = int((percent_to / 100) * len(data))
# Return the threshold value
return sorted_data[index_from], sorted_data[index_to]
def ShowThresholdGraph(data, filename, threshold_low, threshold_high, title, AveragePercentSpendsThere, location):
"""
Create and save a threshold analysis graph with maximum curvature point.
"""
dpi=600
# Get min and max values
min_val = np.min(data)
max_val = np.max(data)
# Create 1000 threshold levels from max to min
thresholds = np.linspace(max_val, min_val, 1000)
threshold_percentages = np.linspace(0, 100, 1000)
# Calculate percentage of points above each threshold
points_above = []
total_points = len(data)
for thresh in thresholds:
above_count = np.sum(data > thresh)
percentage = (above_count / total_points) * 100
points_above.append(percentage)
points_above = np.array(points_above)
# Calculate derivatives and smooth them
first_derivative = np.gradient(points_above)
second_derivative = np.gradient(first_derivative)
#first_derivative = savgol_filter(np.gradient(points_above), window_length=51, polyorder=3)
#second_derivative = savgol_filter(np.gradient(first_derivative), window_length=51, polyorder=3)
# Find the point of maximum absolute second derivative
# Exclude edges (first and last 5% of points) to avoid edge effects
edge_margin = len(second_derivative) // 20 # 5% of points
valid_range = slice(edge_margin, -edge_margin)
max_curve_idx = edge_margin + np.argmax(np.abs(second_derivative[valid_range]))
max_curve_x = threshold_percentages[max_curve_idx]
max_curve_y = points_above[max_curve_idx]
max_curve_second_deriv = second_derivative[max_curve_idx]
# Calculate the actual threshold value for this point
threshold2 = max_val - (max_curve_x/100) * (max_val - min_val)
# Create subplot figure
fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(12, 10), height_ratios=[3, 2])
# Plot 1: Original curve with thresholds
ax1.plot(threshold_percentages, points_above, 'b-', linewidth=2, label='Distribution')
ax1.grid(True, linestyle='--', alpha=0.7)
# Add original threshold line if provided
if threshold_low is not None:
threshold_percent = ((max_val - threshold_low) / (max_val - min_val)) * 100
percent_above = (np.sum(data > threshold_low) / total_points) * 100
ax1.axvline(x=threshold_percent, color='r', linestyle='--', label=f'Minimum % spent in {location}: {AveragePercentSpendsThere[0]:.3f}')
ax1.axhline(y=percent_above, color='r', linestyle='--')
#ax1.annotate(f'Threshold 1: {threshold_low:.3f}\nPoints above: {percent_above:.1f}%',
#xy=(threshold_percent, percent_above),
#xytext=(10, 10), textcoords='offset points',
#bbox=dict(boxstyle='round,pad=0.5', fc='yellow', alpha=0.5),
#arrowprops=dict(arrowstyle='->'))
if threshold_high is not None:
threshold_percent = ((max_val - threshold_high) / (max_val - min_val)) * 100
percent_above = (np.sum(data > threshold_high) / total_points) * 100
ax1.axvline(x=threshold_percent, color='b', linestyle='--', label=f'Maximum % spent in {location}: {AveragePercentSpendsThere[1]:.3f}')
ax1.axhline(y=percent_above, color='b', linestyle='--')
#ax1.annotate(f'Threshold 1: {threshold_high:.3f}\nPoints above: {percent_above:.1f}%',
#xy=(threshold_percent, percent_above),
#xytext=(10, 10), textcoords='offset points',
#bbox=dict(boxstyle='round,pad=0.5', fc='yellow', alpha=0.5),
#arrowprops=dict(arrowstyle='->'))
# Add maximum curvature point threshold
ax1.axvline(x=max_curve_x, color='g', linestyle='--', label=f'Threshold 2: {threshold2:.3f}')
ax1.axhline(y=max_curve_y, color='g', linestyle='--')
ax1.plot(max_curve_x, max_curve_y, 'go', markersize=10)
ax1.annotate(f'Threshold 2: {threshold2:.3f}\nPoints above: {max_curve_y:.1f}%',
xy=(max_curve_x, max_curve_y),
xytext=(10, -20), textcoords='offset points',
bbox=dict(boxstyle='round,pad=0.5', fc='lightgreen', alpha=0.5),
arrowprops=dict(arrowstyle='->'))
ax1.set_xlabel('Threshold Level (%)\n0% = Maximum, 100% = Minimum')
ax1.set_ylabel('Points Above Threshold (%)')
ax1.set_title(title)
ax1.set_xlim(0, 100)
ax1.set_ylim(0, 100)
ax1.legend()
# Plot 2: Rate of change
ax2.plot(threshold_percentages, first_derivative, 'g-', label='First derivative', alpha=0.7)
ax2.plot(threshold_percentages, second_derivative, 'r-', label='Second derivative', alpha=0.7)
ax2.grid(True, linestyle='--', alpha=0.7)
# Mark maximum curvature point on derivative plot
ax2.axvline(x=max_curve_x, color='g', linestyle='--')
# Plot point exactly on the second derivative curve
ax2.plot(max_curve_x, max_curve_second_deriv, 'go', markersize=10,
label=f'Max curvature at {max_curve_x:.1f}%')
ax2.set_xlabel('Threshold Level (%)')
ax2.set_ylabel('Rate of Change')
ax2.set_title('Rate of Change Analysis')
ax2.legend()
plt.tight_layout()
plt.savefig(filename, dpi=dpi, bbox_inches='tight')
plt.close()
return threshold2, max_curve_x, max_curve_y
def add_boundary_points(line_part_t, time_zone):
"""
Add boundary points (00:00:00 and 23:59:59) to a time series list.
Args:
line_part_t: List of tuples (timestamp, value)
time_zone: String representing the timezone (e.g., "America/Los_Angeles")
Returns:
List of tuples with added boundary points
"""
if not line_part_t:
return line_part_t
tz = pytz.timezone(time_zone)
# Get the date from the first point
first_dt = datetime.datetime.fromtimestamp(line_part_t[0][0], tz)
date = first_dt.date()
last_dt = datetime.datetime.fromtimestamp(line_part_t[-1][0], tz)
last_date = last_dt.date()
# Create datetime objects for start and end of the day
start_dt = tz.localize(datetime.datetime.combine(date, datetime.datetime.min.time()))
end_dt = tz.localize(datetime.datetime.combine(last_date, datetime.datetime.max.time()))
# Convert to timestamps
start_ts = start_dt.timestamp()
end_ts = end_dt.timestamp()
result = list(line_part_t)
# Handle start point (00:00:00)
first_point_dt = datetime.datetime.fromtimestamp(line_part_t[0][0], tz)
time_diff = first_point_dt - start_dt
start_value = line_part_t[0][1]
# Add start point at the beginning
#result.insert(0, (start_ts, start_value))
# Handle end point (23:59:59)
last_point_dt = datetime.datetime.fromtimestamp(line_part_t[-1][0], tz)
end_value = line_part_t[-1][1]
# Add end point
result.append((end_ts, end_value))
return result
def calculate_life_and_average(my_data1, stdev_range=5):
# Convert data to numpy array for faster operations
data_array = np.array(my_data1)
# Calculate half range
stdev_range_h = stdev_range // 2
# Pre-calculate indices for the sliding window
indices = np.arange(len(data_array) - 2 * stdev_range_h)[:, None] + np.arange(2 * stdev_range_h + 1)
# Get sliding windows of data
windows = data_array[indices]
# Calculate average (using column 3)
average = np.mean(windows[:, :, 3], axis=1)
# Calculate life (using columns 2, 3, and 4)
deltas = windows[:, :, 3] - windows[:, :, 2] + windows[:, :, 4]
life = np.mean(deltas, axis=1)
return life.tolist(), average.tolist()
def TryJulia(prompt):
if len(prompt) > 0:
if prompt[0] == "#":
return prompt.upper()
if prompt not in utterances:
return ""
else:
intent = utterances[prompt]
action = intents[intent]
return action[0]
else:
return ""
def AskGPT(in_prompt, language_from, language_to):
if len(in_prompt) > 4:
prompt = in_prompt.lower()
if language_to.lower() not in language_from.lower():
prompt = in_prompt + " Answer in " + language_to
print(prompt)
#lets see if question is looking for OSM query
pattern = "what is only the node line for query for * on openstreetmap api? do not answer with url to nominatim, but with query!"
if match_with_wildcard(prompt, pattern):
differing_part = extract_differing_part(prompt, pattern)
if differing_part != "":
print(differing_part)
if differing_part in searches_dict:
response = searches_dict[differing_part]
print(response)
return response, language_to
else:
#check if one of synonims:
if differing_part in searches_dict["synonims"]:
differing_part = searches_dict["synonims"][differing_part]
if differing_part != "":
if differing_part in searches_dict[differing_part]:
response = searches_dict[differing_part]
print(response)
return response, language_to
hash_string = hashlib.sha256(str(prompt).encode('utf-8')).hexdigest()
#filename=os.path.join(cache_path, "chgpt_query_" + hash_string+".pkl")
julia_present = False
if prompt.startswith("julia"):
prompt = prompt[len("julia") + 1:]
julia_present = True
completion = ""
if julia_present == False:
completion = TryJulia(prompt)
#if completion == "":
# if os.path.exists(filename):
# #completion = pickle.load(open( filename, "rb" ))
# completion = (completion.choices[0].message.content.strip(), language_to)[0]
else:
completion = TryJulia(prompt)
if completion == "":
st = time.time()
#import wandb
#run = wandb.init(project='GPT-4 in Python')
#prediction_table = wandb.Table(columns=["prompt", "prompt tokens", "completion", "completion tokens", "model", "total tokens"])
print(time.time() - st)
openai.api_key = OPENAI_API_KEY
client = OpenAI(
# This is the default and can be omitted
api_key = OPENAI_API_KEY
)
completion = client.chat.completions.create(
messages=[
{
"role": "user",
"content": prompt,
}
],
model="gpt-3.5-turbo",
)
#with open(filename, 'wb') as handle:
#pickle.dump(completion, handle, protocol=pickle.HIGHEST_PROTOCOL)
response = (completion.choices[0].message.content.strip(), language_to)
else:
response = (completion, language_to)
else:
response = ("question is too short", language_to)
print(response)
return response
def AskGPTPure(in_prompt):
if len(in_prompt) > 4:
prompt = in_prompt.lower()
print(prompt)
st = time.time()
print(time.time() - st)
openai.api_key = OPENAI_API_KEY
client = OpenAI(
# This is the default and can be omitted
api_key = OPENAI_API_KEY
)
completion = client.chat.completions.create(
messages=[
{
"role": "user",
"content": prompt,
}
],
model="gpt-3.5-turbo",
)
response = completion.choices[0].message.content.strip()
else:
response = "question is too short"
print(response)
return response
def get_last_n_days(n=14, timezone_str='America/Los_Angeles'):
# Get current UTC time
utc_now = datetime.datetime.now(pytz.UTC)
# Convert to the specified timezone
local_now = utc_now.astimezone(pytz.timezone(timezone_str))
# Get the current date in the specified timezone
current_date = local_now.date()
# Determine the last whole day
if local_now.hour > 0 or local_now.minute > 0 or local_now.second > 0:
# Yesterday in the specified timezone
last_whole_day = current_date - timedelta(days=1)
else:
# If it's exactly midnight, the last whole day is two days ago
last_whole_day = current_date - timedelta(days=2)
# Generate list of n days, ending with the last whole day
date_list = []
for i in range(n-1, -1, -1):
day = last_whole_day - timedelta(days=i)
date_list.append(day.strftime('%Y-%m-%d'))
return date_list
def numpy_to_json(arr, devices_list):
"""
Convert numpy array to JSON-serializable format
Args:
arr (numpy.ndarray): 2D numpy array to serialize
Returns:
str: JSON string containing array data and metadata
"""
if not isinstance(arr, np.ndarray):
raise TypeError("Input must be a numpy array")
array_dict = {
'dtype': str(arr.dtype),
'shape': arr.shape,
'devices_list': devices_list,
'data': arr.tolist() # Convert to nested Python lists
}
return json.dumps(array_dict)
def format_time_difference(minutes):
# Calculate days, hours, minutes
days = int(minutes // (24 * 60))
remaining_minutes = minutes % (24 * 60)
hours = int(remaining_minutes // 60)
mins = int(remaining_minutes % 60)
parts = []
# Add days if any
if days > 0:
parts.append(f"{days} day{'s' if days != 1 else ''}")
# Add hours if any
if hours > 0:
parts.append(f"{hours} hour{'s' if hours != 1 else ''}")
# Add minutes if any
if mins > 0 or (days == 0 and hours == 0):
parts.append(f"{mins} minute{'s' if mins != 1 else ''}")
# Combine the parts into a sentence
if len(parts) == 1:
return parts[0]
elif len(parts) == 2:
return f"{parts[0]} and {parts[1]}"
else:
return f"{parts[0]}, {parts[1]}, and {parts[2]}"
def RunCommand(commmand, args_dictionary, deployment_id):
to_return = ""
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
local_tz = pytz.timezone(time_zone_s)
filter_minutes = 5
dates = get_last_n_days(28, time_zone_s)
ddate = dates[0] #2025-02-02 req.params.get("date")
to_date = dates[-1]
date_s = datetime.datetime.now(pytz.UTC).astimezone(local_tz).date().strftime("%Y-%m-%d")
if commmand == "#STATUS#":
force_recreate_orig = False #True
filename = f"/{deployment_id}/{deployment_id}_{ddate}_{to_date}_{filter_minutes}_history_image.png"
filename = GeneratePresenceHistoryFiles(filename, force_recreate_orig, deployment_id, filter_minutes, ddate, to_date, ddate, time_zone_s)
date1_obj = datetime.datetime.strptime(ddate, '%Y-%m-%d')
date2_obj = datetime.datetime.strptime(to_date, '%Y-%m-%d')
start_date = min(date1_obj, date2_obj)
end_date = max(date1_obj, date2_obj)
stretch_by = 30
# Generate list of all dates
maps_dates = [
(start_date + timedelta(days=x)).strftime('%Y-%m-%d')
for x in range((end_date - start_date).days + 1)
]
day_counter = 0
minutes_spent_there_list = []
minutes_locations_list = []
filename_4w = f"/{deployment_id}/{deployment_id}_{maps_dates[0]}_{maps_dates[-1]}_{filter_minutes}_{stretch_by}_4w_locations.png.bin"
for ddate in maps_dates:
timee = LocalDateToUTCEpoch(ddate, time_zone_s) + 24 * 3600 - 1
devices_list, device_ids = GetProximityList(deployment_id, timee)
Id2Location = {}
for device in devices_list:
Id2Location[device[1]] = device[2]
Id2Location[0] = "Outside/?"
filename_day = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_{stretch_by}_daily_locations.png.bin"
locations_list_s = ReadObjectMinIO("daily-maps", filename_day)
locations_list = ast.literal_eval(locations_list_s)
minutes_locations_list.append((ddate, locations_list))
#print(locations_list_s)
minutes_spent_there = {}
for loc in Id2Location:
minutes_spent_there[Id2Location[loc]] = 0
minutes_spent_there[Id2Location[0]] = 0
for loc in locations_list:
#print(loc[0])
#if loc[0] == 559:
# print("Stop")
if loc[0] in Id2Location:
print(Id2Location[loc[0]])
minutes_spent_there[Id2Location[loc[0]]] += loc[2]
for loc in minutes_spent_there:
minutes_spent_there[loc] = int(1000 * minutes_spent_there[loc] / 1440) / 10
minutes_spent_there_list.append((ddate, minutes_spent_there))
data_part = str(minutes_spent_there_list)
minutes_locations_list_str = str(minutes_locations_list)
obj_to_save = {"Location_indexes": str(Id2Location), "Locations": minutes_locations_list_str}
print(obj_to_save)
#SaveObjectInBlob(filename_4w, obj_to_save)
#print(data_part)
#prompt = "Attached is 4 weeks of data representing % of time where person living alone is spending each day"
#prompt = prompt + " Assess his last week compared to previous 3 weeks. Comment only on significant changes."
#prompt = prompt + " Ignore days where data is all (or mostly) 0!"
#prompt = prompt + " Consider that office and living room are equivalent for this individual. Entertainment is consumed on computer (office) and in living room TV."
#prompt = prompt + " But he is also napping in living room. Comment on his sleeping pattern as well"
#prompt = prompt + " Can you summarize all in 1 sentence?"
#prompt = prompt + " " + data_part
#result = AskGPTPure(prompt)
filterr = 5
details = GetSensorsDetailsFromDeployment(deployment_id, date_s, filterr)
#current_time = datetime.datetime.now()
current_time = datetime.datetime.now(datetime.timezone.utc)
last_location = details["last_location"]
before_last_location = details["before_last_location"]
last_present_duration = details["last_present_duration"]
detected_time = datetime.datetime.fromisoformat(details["last_detected_time"])
local_time = local_tz.localize(detected_time)
result = f"There are no significant changes in his routines. He slept {details['sleep_hours']} hours last night and woke 1 time for bathroom. He is in the {last_location} for last {last_present_duration} minutes, and I can smell coffee in the kitchen"
to_return = result
#to_return = "Your father appears to be fine. He was walking around the house 10 minutes ago and is currently in the living room. And I can smell coffee"
elif commmand == "#STATUS_F#":
to_return = "Your mother is doing well. She slept 8hr and 23min last night. She used the restroom twice last night. She is now in the kitchen. I can smell coffee."
elif commmand == "#HELP#":
to_return = "There is number of things you can ask me about. For example: 'how is my dad doing?' Or 'How is his environment' or any other question you like"
elif commmand == "#SLEEP#":
to_return = "Your dad slept approximately 8 hours last night, took a shower before bed, and got up 4 times during the night."
elif commmand == "#SLEEP_F#":
to_return = "Your mom slept approximately 8 hours last night, took a shower before bed, and got up 4 times during the night."
elif commmand == "#ENVIRONMENT#":
to_return = "The temperature in the house is 23 degrees Celsius, CO2 level is 662 ppm, and I can smell coffee brewing. Your dad slept approximately 8 hours last night, took a shower before bed, and got up 4 times during the night."
elif commmand == "#WEEK#":
to_return = "Showing his weekly activity"
elif commmand == "#WEEK_F#":
to_return = "Showing her weekly activity"
elif commmand == "#ACTIVITY#":
to_return = "Your dad has been less active this week than usual. He spent more time sitting in the living room and he got up later than usual by 38min. He also did not go outside as frequently and had less visitors. He only showered once this week."
elif commmand == "#ACTIVITY_F#":
to_return = "Your mom has been less active this week than usual. She spent more time sitting in the living room and she got up later than usual by 38min. She also did not go outside as frequently and had less visitors. She only showered once this week."
elif commmand == "#ACTIVITY_COMPARE#":
to_return = "Overall your dad is less active this year compared to last year. He slept longer in the mornings and had less visitors. Also his shower activity is reduced from typically 2 times a week to once a week."
elif commmand == "#ACTIVITY_COMPARE_F#":
to_return = "Overall your mom is less active this year compared to last year. She slept longer in the mornings and had less visitors. Also her shower activity is reduced from typically 2 times a week to once a week."
elif commmand == "#LOCATION#":
filterr = 5
details = GetSensorsDetailsFromDeployment(deployment_id, date_s, filterr)
#current_time = datetime.datetime.now()
current_time = datetime.datetime.now(datetime.timezone.utc)
last_location = details["last_location"]
before_last_location = details["before_last_location"]
last_present_duration = details["last_present_duration"]
detected_time = datetime.datetime.fromisoformat(details["last_detected_time"])
local_time = local_tz.localize(detected_time)
# Convert to UTC
detected_utc_time = local_time.astimezone(pytz.UTC)
time_diff = current_time - detected_utc_time
minutes = time_diff.total_seconds() / 60
#patch... needs investigating todo
if minutes > 1400:
minutes = 0
time_sentence = format_time_difference(minutes)
if minutes < 2:
to_return = f"He is now in the {last_location} for {last_present_duration} minutes. Before that he was in {before_last_location}"
else:
to_return = f"He was last detected in the {last_location} {time_sentence} ago"
elif commmand == "#SHOWER#":
to_return = "In the last 7 days, your Dad took a shower on Friday, Sunday and Tuesday"
elif commmand == "#SHOWER_F#":
to_return = "The last time your mom took a shower was Yesterda at 9:33AM"
elif commmand == "#BATHROOM#":
to_return = "Last night your Dad used the restroom only once at 6.10am"
elif commmand == "#KITCHEN#":
to_return = "Your Dad only cooked Dinner on Wednesday and he turned off the stove afterwards"
elif commmand == "#MOLD#":
to_return = "I cannot smell any mold. Also, the humidity is very low. In any of the rooms never exceeded 27% RH in the last 7 days."
elif commmand == "#VISITORS#":
to_return = "Yes, on Tuesday, I could detect motion in both office and kitchen at the same time and CO2 levels in the living room exceeded 900ppm."
elif commmand == "#TEMPERATURE#":
filterr = 5
details = GetSensorsDetailsFromDeployment(deployment_id, date_s, filterr)
current_time = datetime.datetime.now(datetime.timezone.utc)
last_location = details["last_location"]
temperature = int(details["temperature"])
if "America" in time_zone_s or "US/" in time_zone_s:
temperature_sentence = f"{int(CelsiusToFahrenheit(temperature))} degrees Farenhight"
else:
temperature_sentence = f"{temperature} degrees Celsius."
to_return = f"The temperature in the {last_location} is {temperature_sentence}."
elif commmand == "#TEMPERATURE_B#":
to_return = "The temperature in the main bathroom is 80 degrees Farenhight."
elif commmand == "#OXYGEN#":
to_return = "His last oxygen level was at 95%."
elif commmand == "#OXYGEN_F#":
to_return = "Her last oxygen level was at 95%."
elif commmand == "#HEART_RATE#":
to_return = "His last heart rate was 74 bpm."
elif commmand == "#BLOOD_PRESSURE#":
to_return = "His latest blood pressure was measured 5 hours ago and it was 137 over 83."
elif commmand == "#BLOOD_PRESSURE_F#":
to_return = "Her latest blood pressure was measured 5 hours ago and it was 137 over 83."
elif commmand == "#EKG#":
to_return = "His latest HeartBeam EKG was done on Monday and it was within his baseline!"
elif commmand == "#EKG_F#":
to_return = "Her latest HeartBeam EKG was done on Monday and it was within her baseline!"
return to_return
def ScaleToCommon(data, sensor):
if sensor == "temperature":
new_min = 0
new_max = 100
elif sensor == "humidity":
new_min = 100
new_max = 200
elif sensor == "light":
new_min = 200
new_max = 300
elif sensor == "radar":
new_min = 300
new_max = 400
elif sensor == "s0":
new_min = 400
new_max = 500
elif sensor == "s1":
new_min = 500
new_max = 600
elif sensor == "s2":
new_min = 600
new_max = 700
elif sensor == "s3":
new_min = 700
new_max = 800
elif sensor == "s4":
new_min = 800
new_max = 900
elif sensor == "s5":
new_min = 900
new_max = 1000
elif sensor == "s6":
new_min = 1000
new_max = 1100
elif sensor == "s7":
new_min = 1100
new_max = 1200
elif sensor == "s8":
new_min = 1200
new_max = 1300
else: #s9
new_min = 1300
new_max = 1400
# Split timestamps and values into separate arrays
timestamps = np.array([x[0] for x in data])
values = np.array([x[1] for x in data])
# Get current min and max
if len(values) > 0:
current_min = np.min(values)
current_max = np.max(values)
else:
current_min = 0;
current_max = 0;
# Scale the values using the min-max formula
if current_max - current_min > 0:
scaled_values = (values - current_min) * (new_max - new_min) / (current_max - current_min) + new_min
else:
mid_val = (new_max + new_min) / 2
scaled_values = np.full_like(values, mid_val)
# Zip back together with original timestamps
return list(zip(timestamps, scaled_values))
def CreateLocationsStripe(locations_file, time_zone_s):
parts = locations_file.split("/")
parts1 = parts[2].split("_")
ddate = parts1[1]
deployment_id = parts1[0]
filter_minutes = parts1[2]
bw = False
chart_type = 4
force_recreate = True
motion = False
scale_global = False
fast = True
GenerateFullLocationMap(locations_file, deployment_id, ddate, force_recreate, chart_type, bw, motion, scale_global, fast, time_zone_s, filter_minutes)
def CelsiusToFahrenheit(C):
F = (C * 9/5) + 32
return F
def CelsiusToFahrenheitList(compressed_readings: List[Tuple[datetime.datetime, np.float64]]) -> List[Tuple[datetime.datetime, np.float64]]:
# Create a new list with converted temperatures
converted_readings = [
[reading[0], CelsiusToFahrenheit(reading[1])]
for reading in compressed_readings
]
return converted_readings
def GetPriviledgesOnly(user_name):
with get_db_connection() as conn:
if isinstance(user_name, (int)) or user_name.isdigit():
sql = "SELECT access_to_deployments FROM public.person_details WHERE user_id = " + user_name
else:
sql = "SELECT access_to_deployments FROM public.person_details WHERE user_name = '" + user_name + "'"
with conn.cursor() as cur:
cur.execute(sql)
result = cur.fetchall()#cur.fetchone()
if result != None:
return result[0][0]
else:
return "0"
def GetPriviledgesAndUserId(user_name):
with get_db_connection() as conn:
sql = "SELECT access_to_deployments, user_id FROM public.person_details WHERE user_name = '" + user_name + "'"
with conn.cursor() as cur:
cur.execute(sql)
result = cur.fetchall()#cur.fetchone()
if result != None:
return result[0]
else:
return "[0,0]"
def AddToLog(message):
"""Add message to log"""
logger.info(message)
def FillFields(blob_data, record, form_type):
"""
Fill in the input fields in the HTML blob_data with values from the caretaker dictionary.
:param blob_data: str - The initial HTML string containing empty or placeholder input fields.
:param caretaker: dict - The dictionary containing values to populate the fields.
:return: str - The HTML string with the input fields filled with the appropriate values.
"""
# Ensure blob_data is a string
#blob_data = str(blob_data)
# Populate the fields
for field in record:
logger.debug(f"field= {field}")
if field == "user_id":
if record[field] is not None:
escaped_string = html.escape(str(record[field]))
# Create a regex pattern to match the span with specific id
pattern = rf'(]+id="editing_user_id"[^>]*>)([^<]*)()'
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(3)}', blob_data)
elif field == "deployment_id":
if record[field] is not None:
escaped_string = html.escape(str(record[field]))
# Create a regex pattern to match the span with specific id
pattern = rf'(]+id="editing_deployment_id"[^>]*>)([^<]*)()'
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(3)}', blob_data)
elif field == "device_id":
if record[field] is not None:
escaped_string = html.escape(str(record[field]))
# Create a regex pattern to match the span with specific id
pattern = rf'(]+id="editing_device_id"[^>]*>)([^<]*)()'
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(3)}', blob_data)
elif field == "user_name":
if record[field] != None:
escaped_string = html.escape(record[field])
pattern = rf'(]+id="new_user_name"[^>]+value=")[^"]*(")'
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(2)}', blob_data)
# Add value attribute if it does not exist
pattern = rf'(]+id="new_user_name"[^>]*)(>)'
blob_data = re.sub(pattern, lambda m: f'{m.group(1)} value="{escaped_string}"{m.group(2)}', blob_data)
elif field == "location":
if record[field] != None:
blob_data = SelectOption(blob_data, 'location', record[field])
elif field == "gender":
if record[field] != None:
blob_data = SelectOption(blob_data, 'gender', record[field])
elif field == "race":
if record[field] != None:
blob_data = SelectOption(blob_data, 'race', record[field])
elif field == "time_zone_s":
if record[field] != None:
blob_data = SelectOption(blob_data, 'time_zone_s', record[field])
elif field == "time_edit" or field == "user_edit":
pass
else:
if record[field] != None:
escaped_string = html.escape(str(record[field]))
pattern = rf'(]+id="{field}"[^>]+value=")[^"]*(")'
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(2)}', blob_data)
# Add value attribute if it does not exist
pattern = rf'(]+id="{field}"[^>]*)(>)'
blob_data = re.sub(pattern, lambda m: f'{m.group(1)} value="{escaped_string}"{m.group(2)}', blob_data)
return blob_data
def convert_timestamps_lc(data, time_zone_s):
target_tz = pytz.timezone(time_zone_s)
return [[datetime.datetime.fromtimestamp(epoch, pytz.UTC).astimezone(target_tz), value]
for epoch, value in data]
subbedToL = [("/wellget",1),("/wellget_cmp",1),("/well_hub",1)]
def on_connectL(client_, userdata, flags, rc):
print(MQTTSERVERL + " L. Connected with result code "+str(rc))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client_.subscribe(subbedToL)
print("SubscribedL to: "+str(subbedToL))
def on_messageL(client_, userdata, msg): #message from GUI
print(msg.topic+" "+str(msg.payload))
#msga = msg.payload.decode("ascii")
#print(msg.timestamp)
#in_queue.append((str(time.time()), msg.topic, msg.payload))
def MQSendL(topic, content, qos=1):
print(topic, content[0:100])
#return MQSend(topic, content)
#currentTime = int(time.time())
try:
if "_cmp" in topic:
enc_msg = zlib.compress(content.encode('utf-8'))
else:
enc_msg = content
clientL.publish(topic, enc_msg, qos=qos, retain=False)
except Exception as err:
print ("Err2B:", err)
try:
clientL.disconnect()
#client.username_pw_set('telegraf', 'well18')
clientL.connect(MQTTSERVERL, MQTT_PortL, 60)
except Exception as e:
print ("Err3b:", e)
def StoreFloorPlan(deployment_id, layout):
conn = get_db_connection()
cur = conn.cursor()
print(layout)
data = json.loads(layout)
# Extract the overlapping list
overlapping_list = str(data["overlapping"])
try:
sql = f"""
UPDATE public.deployment_details SET floor_plan = '{CleanObject(layout)}' WHERE deployment_id = {deployment_id};
"""
logger.debug(f"sql= {sql}")
cur.execute(sql)
conn.commit()
sql1 = f"""
INSERT INTO public.deployment_details (deployment_id, "overlapps")
VALUES ({deployment_id}, '{CleanObject(overlapping_list)}')
ON CONFLICT (deployment_id)
DO UPDATE SET "overlapps" = '{CleanObject(overlapping_list)}';
"""
logger.debug(f"sql= {sql1}")
cur.execute(sql1)
conn.commit()
cur.close()
conn.close()
AddToLog("Written/updated!")
return 1
except Exception as err:
return 0
def GetFloorPlan(deployment_id):
conn = get_db_connection()
try:
sql = f"""
SELECT floor_plan FROM public.deployment_details WHERE deployment_id = {deployment_id};
"""
with conn.cursor() as cur:
cur.execute(sql)
result = cur.fetchall()#cur.fetchone()
if result != None:
return result[0][0]
else:
return ""
logger.debug(f"sql= {sql}")
conn.close()
return 1
except Exception as err:
return 0
# CORS Middleware
class CORSMiddleware:
def process_request(self, req, resp):
resp.set_header('Access-Control-Allow-Origin', '*')
resp.set_header('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE, OPTIONS')
resp.set_header('Access-Control-Allow-Headers', '*')
def process_response(self, req, resp, resource, req_succeeded):
if req.method == 'OPTIONS': # Handle preflight requests
resp.status = falcon.HTTP_200
# Add this class to your code
class RequestParser:
def __init__(self):
# Detect if we're running in debug/development mode
self.debug_mode = __name__ == "__main__" or os.environ.get('DEBUG', 'false').lower() in ('true', '1', 'yes')
logger.debug(f"RequestParser initialized in {'DEBUG' if self.debug_mode else 'PRODUCTION'} mode")
def process_request(self, req, resp):
"""Pre-process the request to ensure media is parsed early"""
logger.debug(f"RequestParser processing: {req.method} {req.path}")
# Initialize an empty form_data dict
req.context.form_data = {}
# Only process POST requests with the right content type
if req.method != 'POST' or not req.content_type or 'form-urlencoded' not in req.content_type:
logger.debug("RequestParser: Skipping (not a form POST)")
return
try:
# Different handling based on environment
if self.debug_mode:
self._process_debug(req)
else:
self._process_production(req)
except Exception as e:
logger.error(f"RequestParser error: {str(e)}")
logger.error(traceback.format_exc())
def _read_chunked_safely(self, req):
"""Safely read chunked data"""
try:
# Try reading with a reasonable size limit
return req.bounded_stream.read(50 * 1024 * 1024) # 50MB
except:
return b''
def _process_debug(self, req):
"""Process request in debug mode - optimized for local development"""
logger.debug("RequestParser: Using DEBUG mode processing")
# In debug mode, we can use Content-Length and know it's reliable
content_length = req.get_header('content-length')
if content_length:
# Content-Length is present
content_length = int(content_length)
logger.debug(f"RequestParser: Reading {content_length} bytes using Content-Length")
raw_body = req.stream.read(content_length)
if raw_body:
body_text = raw_body.decode('utf-8')
logger.debug(f"RequestParser: Successfully read {len(body_text)} chars")
# Parse the form data
form_data = dict(urllib.parse.parse_qsl(body_text))
# Store in context
req.context.form_data = form_data
#logger.debug(f"RequestParser: Parsed form data: {form_data}")
# Reset the stream with the original content
req.stream = io.BytesIO(raw_body)
else:
logger.debug("RequestParser: No body data read")
else:
logger.debug("RequestParser (debug): No Content-Length header")
def _process_production(self, req):
"""Process request in production mode - optimized for OpenFaaS/faasd deployment"""
logger.debug("RequestParser: Using PRODUCTION mode processing")
# Try Content-Length first
content_length = req.get_header('content-length')
if content_length:
content_length = int(content_length)
logger.debug(f"RequestParser: Reading {content_length} bytes using Content-Length")
raw_body = req.stream.read(content_length)
else:
logger.debug("RequestParser (production): No Content-Length header - reading available data")
# Read all available data (faasd buffers the complete request)
raw_body = req.stream.read() # Read everything available
if raw_body:
body_text = raw_body.decode('utf-8')
logger.debug(f"RequestParser: Successfully read {len(body_text)} chars")
# Parse the form data
form_data = dict(urllib.parse.parse_qsl(body_text))
# Store in context
req.context.form_data = form_data
logger.debug(f"RequestParser: Parsed form data keys: {list(form_data.keys())}")
# Reset the stream with the original content
req.stream = io.BytesIO(raw_body)
else:
logger.debug("RequestParser: No body data read")
def FindDeviceByRole(deployment_id, location_list):
#For purposes of activity report, Bedroom and Bathroom are determined in order of priority:
#Bedroom: "Bedroom Master", "Bedroom", "Bedroom Guest" (106, 56, 107)
#Bathroom: ""Bathroom Main","Bathroom","Bathroom Guest" (104, 103, 105)
#location_names_inverted = {"All":-1 ,"?": 0,"Office": 5,"Hallway": 6,"Garage": 7,"Outside": 8,"Conference Room": 9,"Room": 10,"Kitchen": 34,
# "Bedroom": 56,"Living Room": 78,"Bathroom": 102,"Dining Room": 103,"Bathroom Main": ,104,"Bathroom Guest": 105,
# "Bedroom Master": 106, "Bedroom Guest": 107, "Conference Room": 108, "Basement": 109, "Attic": 110, "Other": 200}
ttime = datetime.datetime.now(datetime.timezone.utc).timestamp()
devices_list, device_ids = GetProximityList(deployment_id, ttime)
if location_list != []:
for location in location_list:
for device in devices_list:
well_id = device[0]
device_id = device[1]
location_t = device[2]
if location_t == location:
return (device_id, location, well_id)
else:
conn = get_db_connection()
with conn.cursor() as cur:
#we need to find beneficiaries from list of deployments
#sql = f'SELECT device_id FROM public.devices where device_id in {device_ids} and other="other"'
sql = "SELECT device_id, location, well_id FROM public.devices WHERE device_id = ANY(%s) AND other = %s"
#print(sql)
cur.execute(sql, (device_ids, "other"))
result = cur.fetchall()#cur.fetchone()
if len(result) > 0:
return result[0]
else:
devices_list, device_ids = GetProximityList(deployment_id, ttime)
for device in devices_list:
well_id = device[0]
device_id = device[1]
location_t = device[2]
if "Bathroom" in location_t or "Bedroom" in location_t or "Kitchen" in location_t:
pass
else:
return (device_id, location_t, well_id)
return (0, 0, 0)
def ensure_date_order(from_date, to_date):
"""
Ensures that from_date is earlier than to_date.
If not, swaps the dates.
Args:
from_date: Date string in format 'YYYY-MM-DD'
to_date: Date string in format 'YYYY-MM-DD'
Returns:
Tuple of (from_date, to_date) in correct order
"""
# Compare the date strings
# This works because the 'YYYY-MM-DD' format allows for string comparison
if from_date > to_date:
# Swap the dates
return to_date, from_date
else:
# Dates are already in correct order
return from_date, to_date
def signum(x):
return (x > 0) - (x < 0)
def get_week_days_and_dates(days_back, timezone_str="America/Los_Angeles"):
"""
Generate weekdays and dates from 7 days ago until today for a given timezone.
Args:
timezone_str (str): Timezone string like "America/Los_Angeles"
Returns:
list: List of tuples containing (weekday_name, date_string)
"""
# Get the timezone object
tz = pytz.timezone(timezone_str)
# Get current date in the specified timezone
today = datetime.datetime.now(tz).date()
# Generate dates from days_back days ago to today
result = []
for i in range(days_back-1, -1, -1): # days_back days ago to today (inclusive)
date = today - timedelta(days=i)
weekday_name = date.strftime("%A") # Full weekday name
date_string = date.strftime("%Y-%m-%d") # ISO format date
day_of_month = date.day
result.append((date_string, weekday_name, day_of_month))
return result
def filter_short_groups_numpy_orig(presence_list, filter_size, device_id, dates_str):
"""
Optimized version using NumPy to remove groups of consecutive zeros
or consecutive non-zeros (based on sign) shorter than filter_size.
Mimics the iterative, shortest-first logic of filter_short_groupss.
Args:
presence_list: List of numbers (can include floats, ints, 0s).
filter_size: Minimum size of consecutive groups (by sign) to keep.
Returns:
Filtered list with short groups removed. Output contains 0s and 1s.
(Note: Differs slightly from filter_short_groupss if negative numbers
were present, as this version converts them to 0, not 2).
"""
st = time.time()
if not presence_list or filter_size <= 1:
# print(f"NumPy: Early exit/no processing time: {time.time() - st:.6f}s")
# Return a copy to avoid modifying the original list
return presence_list[:] if isinstance(presence_list, list) else list(presence_list)
# Work with a NumPy array for efficiency, ensure float type for consistency
result = np.array(presence_list, dtype=float)
n = len(result)
# Use a set to store hashable representations (tuples) of previous states for cycle detection
previous_states = set()
while True:
current_state_tuple = tuple(result)
if current_state_tuple in previous_states:
# print("NumPy: Cycle detected, breaking.")
break
previous_states.add(current_state_tuple)
# 1. Calculate the sign of each element (-1, 0, 1)
signs = np.sign(result)
# 2. Find indices where the sign changes
# np.diff calculates the difference between adjacent elements.
# A non-zero difference means the sign changed.
# np.where returns the indices *before* the change. Add 1 to get the start of the new run.
change_indices = np.where(np.diff(signs) != 0)[0] + 1
# 3. Define the boundaries of all consecutive runs (start and end indices)
# Include the start (0) and end (n) of the array.
boundaries = np.concatenate(([0], change_indices, [n]))
# 4. Identify short runs
short_runs_to_process = []
for i in range(len(boundaries) - 1):
start = boundaries[i]
end = boundaries[i+1] # Slicing is exclusive of the end index
length = end - start
if length > 0: # Ensure the run is not empty
# Determine the characteristic sign of the run (use the first element)
run_sign = signs[start]
if length < filter_size:
# --- Verification Step (Crucial) ---
# Check if the segment *still* consists of elements with the same sign.
# This handles cases where a previous modification might have altered
# part of what *was* a longer run.
current_segment_signs = np.sign(result[start:end])
if np.all(current_segment_signs == run_sign):
# If the run is short and its sign consistency is verified,
# add it to the list of candidates for modification.
short_runs_to_process.append({
'start': start,
'end': end,
'sign': run_sign,
'length': length
})
# --- End Verification ---
# 5. Check if any short runs were found
if not short_runs_to_process:
# No modifiable short runs found in this pass, the list is stable.
break
# 6. Sort the short runs: shortest first, then by start index for determinism
# This ensures we process the same run as the original iterative function would.
short_runs_to_process.sort(key=lambda r: (r['length'], r['start']))
# 7. Process ONLY the *first* (shortest) identified run in this pass
run_to_process = short_runs_to_process[0]
start = run_to_process['start']
end = run_to_process['end']
run_sign = run_to_process['sign']
# Determine the replacement value based on the sign of the run being removed
# Short runs of 0 become 1
# Short runs of non-zero (positive or negative) become 0
replacement_value = 1.0 if run_sign == 0 else 0.0
# 8. Apply the replacement to the segment using NumPy slicing
result[start:end] = replacement_value
# Loop continues because a change was made
print(f"filter_short_groups_numpy time: {time.time() - st:.6f}s")
if (time.time() - st) > 40:
print(presence_list)
# Convert back to a standard Python list for the return value
return result.tolist()
def filter_short_groups_numpy(presence_list, filter_size, device_id, dates_str):
"""
Optimized version using NumPy to remove groups of consecutive zeros
or consecutive non-zeros (based on sign) shorter than filter_size.
Mimics the iterative, shortest-first logic.
Optimization:
- Vectorized extraction of segment properties.
- Removed redundant sign verification within the segment analysis loop.
"""
# Start timer (optional, for benchmarking)
st = time.time()
if not presence_list or filter_size <= 1:
# print(f"NumPy Optimized: Early exit/no processing time: {time.time() - st:.6f}s")
return presence_list[:] if isinstance(presence_list, list) else list(presence_list)
result = np.array(presence_list, dtype=float)
n = len(result)
previous_states = set()
while True:
# Cycle detection
current_state_tuple = tuple(result)
if current_state_tuple in previous_states:
# print("NumPy Optimized: Cycle detected, breaking.")
break
previous_states.add(current_state_tuple)
# 1. Calculate the sign of each element (-1, 0, 1)
signs = np.sign(result)
# 2. Find indices where the sign changes
change_indices = np.where(np.diff(signs) != 0)[0] + 1
# 3. Define the boundaries of all consecutive runs
boundaries = np.concatenate(([0], change_indices, [n]))
# If there's only one segment (e.g., all zeros, all ones, or array is too short to have changes),
# or if the array was empty (n=0 leading to boundaries=[0,0]), no further processing is needed.
if len(boundaries) <= 2: # e.g., boundaries is [0, n] or [0,0]
break
# 4. Vectorized extraction of run properties
run_starts = boundaries[:-1]
run_ends = boundaries[1:]
run_lengths = run_ends - run_starts
# The sign of the first element of a run (from the 'signs' array computed at the
# start of this 'while' iteration) is representative of the entire run's sign,
# by definition of how 'boundaries' were created.
run_signs = signs[run_starts]
# 5. Identify short runs and collect their properties
short_runs_to_process = []
for i in range(len(run_starts)): # Iterate over all identified runs
# Ensure run_length is positive (should be, due to boundary logic, but good check)
if run_lengths[i] > 0 and run_lengths[i] < filter_size:
short_runs_to_process.append({
'start': run_starts[i],
'end': run_ends[i],
'sign': run_signs[i],
'length': run_lengths[i]
})
# 6. Check if any modifiable short runs were found
if not short_runs_to_process:
# No short runs found in this pass, the list is stable.
break
# 7. Sort the short runs: shortest first, then by start index for determinism
short_runs_to_process.sort(key=lambda r: (r['length'], r['start']))
# 8. Process ONLY the *first* (shortest) identified run in this pass
run_to_process = short_runs_to_process[0]
start = run_to_process['start']
end = run_to_process['end']
run_sign = run_to_process['sign']
# Determine the replacement value
replacement_value = 1.0 if run_sign == 0 else 0.0
# 9. Apply the replacement
result[start:end] = replacement_value
# A change was made, so the 'while True' loop continues (unless a cycle is detected next)
# End timer and print (optional)
# Your original print statements for timing:
print(f"filter_short_groups_numpy time: {time.time() - st:.6f}s")
# if (time.time() - st) > 40:
# print(presence_list) # This would print the original input on long runs
return result.tolist()
def filter_short_groups(presence_list, filter_size):
"""
Corrected version to perform the same task as filter_short_groupss,
including handling of non-zero/non-one values based on signum.
Iteratively removes the shortest group < filter_size by flipping its
signum representation (0->1, pos->0, neg->2).
Args:
presence_list: List of numbers (0s, 1s, or any other number).
filter_size: Minimum size of groups (based on signum) to keep.
Returns:
Filtered list with short groups removed, potentially containing 0, 1, 2.
"""
st = time.time()
if not presence_list or filter_size <= 1:
# print(f"filter_short_groups: Early exit/no processing time: {time.time() - st:.6f}s")
return presence_list.copy()
result = presence_list.copy()
n = len(result)
# Using a set for faster cycle detection lookups
previous_states = set()
while True:
current_state_tuple = tuple(result)
if current_state_tuple in previous_states:
# print("Cycle detected in filter_short_groups, breaking.")
break
previous_states.add(current_state_tuple)
# --- Start of logic mimicking filter_short_groupss ---
changes_made_outer = False
# 1. Find all segments based on signum
segments = []
i = 0
while i < n:
start = i
# Use signum to define the characteristic value of the run
current_signum = signum(result[i])
# Find the end of the group based on *consistent signum*
while i < n and signum(result[i]) == current_signum:
i += 1
group_length = i - start
# Store the signum value associated with the run
segments.append((start, i - 1, current_signum, group_length))
# 2. Sort segments by length (ascending) to process shortest first
segments.sort(key=lambda x: x[3])
# 3. Process the segments (find the first short one to modify)
for start, end, run_signum, length in segments:
if length < filter_size:
# Verify the segment hasn't been fundamentally altered (signum-wise)
# This check mirrors filter_short_groupss's intent, using signum consistently.
is_still_original_signum_segment = True
for k_idx in range(start, end + 1):
if signum(result[k_idx]) != run_signum:
is_still_original_signum_segment = False
break
if is_still_original_signum_segment:
# Calculate replacement value based on signum (0->1, pos->0, neg->2)
replacement_value = 1 - run_signum
# Apply replacement
segment_modified = False
for j in range(start, end + 1):
# Use direct comparison as replacement values are integers (0, 1, 2)
if result[j] != replacement_value:
result[j] = replacement_value
segment_modified = True
if segment_modified:
changes_made_outer = True
# Break after making *one* change and restart the whole process
# (finding segments, sorting, finding shortest modifiable)
break # Break from the 'for segment in segments' loop
# --- End of logic mimicking filter_short_groupss ---
if not changes_made_outer:
# If we went through all segments and made no changes, we're done.
break
print(f"filter_short_groups time: {time.time() - st:.6f}s")
return result
def filter_short_groupss(presence_list, filter_size):
"""
Iteratively remove groups of consecutive 0s or 1s that are shorter than filter_size.
Continues until no more changes are made.
Args:
presence_list: List of 0s and 1s
filter_size: Minimum size of groups to keep
Returns:
Filtered list with short groups removed
"""
st = time.time()
if not presence_list or filter_size <= 1:
return presence_list.copy()
result = presence_list.copy()
changes_made = True
while changes_made:
changes_made = False
# First identify all segments
segments = []
i = 0
n = len(result)
while i < n:
# Find the start of a group
start = i
current_value = signum(result[i])
# Find the end of the group
while i < n and signum(result[i]) == current_value:
i += 1
# Calculate group length
group_length = i - start
segments.append((start, i-1, current_value, group_length))
# Sort segments by length (ascending) to process shortest first
segments.sort(key=lambda x: x[3])
# Process the segments
for start, end, value, length in segments:
# If segment is too short, replace with opposite value
if length < filter_size:
# Verify the segment hasn't been modified by previous replacements
if all(result[j] == value for j in range(start, end+1)):
replacement = 1 - value # Toggle between 0 and 1
for j in range(start, end+1):
result[j] = replacement
changes_made = True
#print(start, end)
break # Break after making a change and restart
print("s", time.time()-st)
return result
def filter_short_segments(segments, filter_size):
"""
Iteratively remove segments that are shorter than filter_size,
replacing them with data from the previous segment.
Args:
segments: List of tuples (start_time, end_time, num_persons, duration)
filter_size: Minimum duration to keep a segment
Returns:
Filtered list of segments covering the entire time range
"""
if not segments or filter_size <= 0:
return segments.copy()
result = segments.copy()
changes_made = True
while changes_made:
changes_made = False
i = 1 # Start from the second segment
while i < len(result):
_, _, _, duration = result[i]
if duration < filter_size:
# Get the previous segment's person count
if i > 0:
_, _, prev_persons, _ = result[i-1]
start, end, _, dur = result[i]
# Replace with previous person count
result[i] = (start, end, prev_persons, dur)
changes_made = True
# Check if we can merge with previous segment
if i > 0:
prev_start, prev_end, prev_persons, prev_dur = result[i-1]
curr_start, curr_end, curr_persons, curr_dur = result[i]
if prev_persons == curr_persons and prev_end + 1 == curr_start:
# Merge segments
merged = (prev_start, curr_end, prev_persons, prev_dur + curr_dur)
result[i-1] = merged
result.pop(i)
i -= 1 # Adjust index after removing an element
changes_made = True
i += 1
# Sort segments by start time to ensure proper order
result.sort(key=lambda x: x[0])
return result
def filter_out_short_high_segments(segments, filter_size):
"""
Iteratively remove segments that are shorter than filter_size,
replacing them with data from the previous segment.
Args:
segments: List of tuples (start_time, end_time, num_persons, duration)
filter_size: Minimum duration to keep a segment
Returns:
Filtered list of segments covering the entire time range
"""
if not segments:
return segments.copy()
result = segments.copy()
changes_made = True
while changes_made:
changes_made = False
i = 1 # Start from the second segment
while i < len(result):
_, _, _, duration = result[i]
if duration < filter_size:
# Get the previous segment's person count
if i > 0:
_, _, prev_persons, _ = result[i-1]
start, end, _, dur = result[i]
# Replace with previous person count
result[i] = (start, end, prev_persons, dur)
changes_made = True
# Check if we can merge with previous segment
if i > 0:
prev_start, prev_end, prev_persons, prev_dur = result[i-1]
curr_start, curr_end, curr_persons, curr_dur = result[i]
if prev_persons == curr_persons and prev_end + 1 == curr_start:
# Merge segments
merged = (prev_start, curr_end, prev_persons, prev_dur + curr_dur)
result[i-1] = merged
result.pop(i)
i -= 1 # Adjust index after removing an element
changes_made = True
i += 1
# Sort segments by start time to ensure proper order
result.sort(key=lambda x: x[0])
return result
def filter_out_short_same_groups_iterative(presence_list, filter_size):
"""
Iteratively remove groups of consecutive sames that are shorter than filter_size.
Continues until no more changes are made.
Args:
presence_list: List of values
filter_size: Minimum size of groups to keep
Returns:
Filtered list with short groups removed
"""
if not presence_list:
return presence_list.copy()
result = presence_list.copy()
# First identify all segments
segments = []
i = 0
n = len(result)
while i < n:
# Find the start of a group
start = i
current_value = result[i]
# Find the end of the group
while i < n and result[i] == current_value:
i += 1
# Calculate group length
group_length = i - start
segments.append((start, i-1, current_value, group_length))
result = filter_out_short_high_segments(segments, filter_size)
return result
def filter_out_short_highs_iterative(presence_list, filter_size):
"""
Iteratively remove groups of consecutive sames that are shorter than filter_size.
Continues until no more changes are made.
Args:
presence_list: List of values
filter_size: Minimum size of groups to keep
Returns:
Filtered list with short groups removed
"""
if not presence_list:
return presence_list.copy()
result = presence_list.copy()
# First identify all segments
segments = []
i = 0
n = len(result)
while i < n:
# Find the start of a group
start = i
current_value = result[i]
# Find the end of the group
while i < n and result[i] == current_value:
i += 1
# Calculate group length
group_length = i - start
segments.append((start, i-1, current_value, group_length))
result = filter_out_short_high_segments(segments, filter_size)
return result
def filter_short_groups_iterative_analog(presence_list, filter_size):
"""
Iteratively remove groups of consecutive similar values that are shorter than filter_size.
For non-zero values, replaces with 0. For zero values, needs context to determine replacement.
"""
if not presence_list or filter_size <= 1:
return presence_list.copy()
result = presence_list.copy()
changes_made = True
while changes_made:
changes_made = False
# Identify all segments of consecutive similar values
segments = []
i = 0
n = len(result)
while i < n:
start = i
is_zero = (result[i] == 0)
# Find the end of the group with same characteristic (zero or non-zero)
while i < n and ((result[i] == 0) == is_zero):
i += 1
group_length = i - start
segments.append((start, i-1, is_zero, group_length))
# Process segments from shortest to longest
segments.sort(key=lambda x: x[3])
for start, end, is_zero, length in segments:
if length < filter_size:
# For short non-zero groups, replace with zeros
if not is_zero:
for j in range(start, end+1):
result[j] = 0
changes_made = True
break
else:
# For short zero groups, replace with average of surrounding non-zero values
# First, find surrounding values
left_value = 0
right_value = 0
# Look for non-zero value on the left
for j in range(start-1, -1, -1):
if result[j] != 0:
left_value = result[j]
break
# Look for non-zero value on the right
for j in range(end+1, n):
if result[j] != 0:
right_value = result[j]
break
# Calculate replacement value
if left_value > 0 and right_value > 0:
replacement = (left_value + right_value) / 2
elif left_value > 0:
replacement = left_value
elif right_value > 0:
replacement = right_value
else:
replacement = 0 # No surrounding non-zero values
# Apply replacement
for j in range(start, end+1):
result[j] = replacement
if replacement != 0: # Only mark as changed if we actually changed something
changes_made = True
break
return result
def filter_short_high_groups_iterative_analog(presence_list, filter_size):
st = time.time()
"""
More efficient implementation that still handles cascading effects.
"""
if not presence_list or filter_size <= 1:
return presence_list.copy()
result = presence_list.copy()
changes_made = True
while changes_made:
changes_made = False
i = 0
n = len(result)
# Use a single pass to find all non-zero segments
segments = []
while i < n:
# Skip zeros
if result[i] == 0:
i += 1
continue
# Found non-zero, find the end of this segment
start = i
while i < n and result[i] != 0:
i += 1
# Add segment to our list
segments.append((start, i))
# Process all short segments in one iteration
for start, end in segments:
length = end - start
if length < filter_size:
# Set all elements in this segment to zero
for j in range(start, end):
result[j] = 0
changes_made = True
# Don't break - process all short segments in this pass
# If we've made changes, we need to check again for newly formed short segments
print(f"filter_short_high_groups_iterative_analog time: {time.time() - st:.6f}s")
return result
def filter_short_high_groups_iterative_analog_orig(presence_list, filter_size):
"""
Iteratively remove groups of consecutive similar values that are shorter than filter_size.
For non-zero values, replaces with 0. For zero values, needs context to determine replacement.
"""
if not presence_list or filter_size <= 1:
return presence_list.copy()
st = time.time()
result = presence_list.copy()
changes_made = True
while changes_made:
changes_made = False
# Identify all segments of consecutive similar values
segments = []
i = 0
n = len(result)
while i < n:
start = i
is_zero = (result[i] == 0)
# Find the end of the group with same characteristic (zero or non-zero)
while i < n and ((result[i] == 0) == is_zero):
i += 1
group_length = i - start
segments.append((start, i-1, is_zero, group_length))
# Process segments from shortest to longest
segments.sort(key=lambda x: x[3])
for start, end, is_zero, length in segments:
if length < filter_size:
# For short non-zero groups, replace with zeros
if not is_zero:
for j in range(start, end+1):
result[j] = 0
changes_made = True
break
print(f"filter_short_high_groups_iterative_analog time: {time.time() - st:.6f}s")
#if (time.time() - st) > 40:
# print(presence_list)
return result
def filter_short_groupsWhat(presence_list, filter_size):
"""
Remove groups of consecutive 0s or 1s that are shorter than filter_size.
For short groups of 0s, replace with 1s.
For short groups of 1s, replace with 0s.
Args:
presence_list: List of 0s and 1s
filter_size: Minimum size of groups to keep
Returns:
Filtered list with short groups removed
"""
if not presence_list or filter_size <= 1:
return presence_list.copy()
result = presence_list.copy()
n = len(result)
# Find groups and process them
i = 0
while i < n:
# Find the start of a group
start = i
current_value = result[i]
# Find the end of the group
while i < n and result[i] == current_value:
i += 1
# Calculate group length
group_length = i - start
# If group is too short, replace with opposite value
if group_length < filter_size:
replacement = 1 - current_value # Toggle between 0 and 1
for j in range(start, i):
result[j] = replacement
return result
def GetOverlapps(deployment_id):
with get_db_connection() as db_conn:
with db_conn.cursor() as cur:
sql = f"SELECT overlapps FROM public.deployment_details WHERE deployment_id = '{deployment_id}'"
cur.execute(sql)
result = cur.fetchone() #cur.fetchall()
if result != None:
return result[0]
def GetAmpitude(point_val, segment_lenght):
if point_val == 0:
return -segment_lenght
else:
return segment_lenght
def CreateZGraph(well_id, presence_list):
"""
return size and position of consecutive groups of 0s and 1s
Args:
presence_list: List of 0s and 1s
Returns:
list of times and lengths
"""
if not presence_list:
return presence_list.copy()
#if well_id == 290:
# print("Stop")
dekas_in_day = 6 * 1440
result = []
print(well_id)
#result will look like this: [(0,34),(34,-56),(92,6),...] where (A,B)
#A: is minute of section, B: height of section +=presence -=absence
#lets find point 0 first moving backward in time
segment_lenght = 0
point_zero_val = signum(presence_list[dekas_in_day])
for i in range(dekas_in_day-1, 0, -1):
if point_zero_val != signum(presence_list[i]):
segment_lenght = dekas_in_day -1 - i
break
x = 0
y = GetAmpitude(point_zero_val, segment_lenght)
result.append((x, y))
#x = x + segment_lenght
last_y = y
last_val = point_zero_val
last_source_minute = dekas_in_day + 1
for i in range(last_source_minute, len(presence_list)):
if last_val != signum(presence_list[i]):
segment_lenght = i - dekas_in_day - x
x = x + segment_lenght
y = last_y + GetAmpitude(last_val, segment_lenght)
result.append((x, y))
result.append((x, 0))
last_y = 0
last_val = signum(presence_list[i])
#last point i is NOT 1 + last above... it is last above so 2879!
segment_lenght = i - dekas_in_day - x
x = i - dekas_in_day #last point
y = GetAmpitude(last_val, segment_lenght)
result.append((x, y))
return result
def CreateZGraphAI(presence_list):
"""
Creates a step graph representation where:
- Presence periods: step up to +count, then drop to 0
- Absence periods: step down to -count, then rise to 0
- X-coordinate advances by the duration of each segment
Pattern: [0,0] → [duration1, +/-count1] → [duration1, 0] → [duration1+duration2, +/-count2] → [duration1+duration2, 0] → ...
Args:
presence_list: List of values (0 = absence, non-zero = presence)
Returns:
List of [minute, height] coordinates for step graph
"""
if not presence_list:
return []
result = [[0, 0]] # Start at origin
i = 0
current_x = 0
while i < len(presence_list):
if presence_list[i] != 0: # Start of presence period
# Count consecutive non-zero values
count = 0
while i < len(presence_list) and presence_list[i] != 0:
count += 1
i += 1
# Move x forward by count, step up to +count
current_x += count
result.append([current_x, count])
# Add vertical line down to 0
result.append([current_x, 0])
else: # Start of absence period
# Count consecutive zero values
count = 0
while i < len(presence_list) and presence_list[i] == 0:
count += 1
i += 1
# Move x forward by count, step down to -count
current_x += count
result.append([current_x, -count])
# Add vertical line up to 0
result.append([current_x, 0])
return result
# Add this function to your code
def get_form_data(req):
"""Helper function to get form data from either context or req.media"""
# First check if we pre-parsed the form data
if hasattr(req.context, 'form_data') and req.context.form_data:
logger.debug("Using pre-parsed form data from context")
return req.context.form_data
# Otherwise try to get from req.media (for json)
try:
if req.content_type and (
falcon.MEDIA_JSON in req.content_type or
falcon.MEDIA_URLENCODED in req.content_type
):
logger.debug("Attempting to get form data from req.media")
return req.media or {}
except Exception as e:
logger.error(f"Error getting req.media: {str(e)}")
logger.debug("No form data available, returning empty dict")
return {}
def DetectMultiple(temporary_map_day_plus, overlaps_str_lst):
"""
Written by Robert Zmrzli
Detects time intervals of multiple vs single/no presence and outputs
the signed duration of each interval at its end time.
Args:
temporary_map_day_plus: Map for each device radar reads that were detected to be above threshold
overlaps_lst: List of pairs of devices that have overlapping area
Returns:
A list of tuples representing the multiple presence timeline segments.
Each segment is represented by two tuples:
1. (end_minute, signed_duration): signed_duration is the length of the
interval ending at end_minute (+ multiple, - single/none).
2. (end_minute, 0): A marker for visualization.
"""
for location_id, data_list in temporary_map_day_plus.items():
minutes_in_data = len(data_list)
break
events = []
min_time = 0
max_time = 0
#['267:273', '273:291']
seen_list = [0] * minutes_in_data
seen_where_list = [[] for _ in range(minutes_in_data)]
for location_id, data_list in temporary_map_day_plus.items():
for i in range(minutes_in_data):
if data_list[i] > 0: # Presence interval
seen_where_list[i].append(location_id)
seen_where_list_uf = seen_where_list.copy()
overlap_pairs = set()
for overlap_str in overlaps_str_lst:
nums = [int(x) for x in overlap_str.split(':')]
# Add both orderings of the pair for easier checking
overlap_pairs.add((nums[0], nums[1]))
overlap_pairs.add((nums[1], nums[0]))
# Process each sub-list in seen_where_list
for i in range(len(seen_where_list)):
locations = seen_where_list[i]
# Skip empty lists and lists with only 0 or 1 item
if len(locations) <= 1:
continue
has_non_overlapping_pair = False
for j in range(len(locations)):
for k in range(j+1, len(locations)):
loc1, loc2 = locations[j], locations[k]
# If this pair is not in our overlap_pairs, then they don't overlap
if (loc1, loc2) not in overlap_pairs:
has_non_overlapping_pair = True
break
if has_non_overlapping_pair:
break
# If all pairs overlap (no non-overlapping pairs found), clear the list
if not has_non_overlapping_pair:
seen_where_list[i] = []
variations = []
variation_index = {}
for i in range(minutes_in_data):
if len(seen_where_list[i]) > 1: # Presence interval
if seen_where_list[i] not in variations:
variations.append(seen_where_list[i])
variation_index[str(seen_where_list[i])] = len(variations) - 1
seen_list[i] = variation_index[str(seen_where_list[i])]
return seen_list, seen_where_list_uf
def minutes_to_time(minutes):
"""
Convert minutes in a day (0-1439) to HH:MM format
Args:
minutes (int): Minutes since midnight (0-1439)
Returns:
str: Time in HH:MM format
"""
# Ensure the input is within valid range
#if not 0 <= minutes <= 1439:
# raise ValueError("Minutes must be between 0 and 1439")
# Calculate hours and remaining minutes
minutes = minutes % 1440
hours = minutes // 60
mins = minutes % 60
# Format as HH:MM with leading zeros
return f"{hours:02d}:{mins:02d}"
def decas_to_time(decas):
"""
Convert decas in a day (0-8639) to HH:MM format
Args:
decas (int): decas since midnight (0-1439)
Returns:
str: Time in HH:MM format
"""
# Ensure the input is within valid range
#if not 0 <= minutes <= 1439:
# raise ValueError("Minutes must be between 0 and 1439")
# Calculate hours and remaining minutes
decas = decas % 8640
hours = decas // (6 * 60)
mins = (decas // 6) % 60
secs = 10 * (decas % 10)
# Format as HH:MM with leading zeros
return f"{hours:02d}:{mins:02d}:{secs:02d}"
def ClearOverlaps(temporary_map_day_plus, overlaps_str_lst):
"""
Detects reads that came from same person read by multiple devices that overlap, and removes weaker reads
Args:
temporary_map_day_plus: Map for each device radar reads that were detected to be above threshold
overlaps_lst: List of pairs of devices that have overlapping area
Returns:
An original temporary_map_day_plus with some reads removed
"""
## Get the number of minutes
#for location_id, data_list in temporary_map_day_plus.items():
#decas_in_data = len(data_list)
#break
if temporary_map_day_plus:
decas_in_data = len(next(iter(temporary_map_day_plus.values())))
else:
decas_in_data = 0
# Create seen_where_list with device-signal pairs
seen_where_list = [[] for _ in range(decas_in_data)]
for location_id, data_list in temporary_map_day_plus.items():
for i in range(decas_in_data):
if data_list[i] > 0: # Presence interval
#if i == (8721):
# print("stop")
seen_where_list[i].append((location_id, data_list[i]))
# Parse overlap pairs
overlap_pairs = set()
for overlap_str in overlaps_str_lst:
nums = [int(x) for x in overlap_str.split(':')]
overlap_pairs.add((nums[0], nums[1]))
overlap_pairs.add((nums[1], nums[0]))
# Process each time slot
for i in range(len(seen_where_list)):
locations = seen_where_list[i]
if len(locations) <= 1:
continue
#if i == (5713 + 8640):
# print("stop")
#if i == (8721):
# print("stop")
# Create a new list to store the filtered results
filtered_list = []
# Make a copy of locations to process
to_process = locations.copy()
# Process each device and decide whether to keep it
while to_process:
current = to_process.pop(0)
device_id, signal_strength = current
should_keep = True
devices_to_remove = []
# Compare with all other devices (including those already in filtered_list)
for other in locations:
other_device_id, other_signal_strength = other
# Skip if comparing with itself
if device_id == other_device_id:
continue
# Check if these devices overlap
if (device_id, other_device_id) in overlap_pairs:
# They overlap, keep only the stronger signal
if signal_strength < other_signal_strength:
# Other device is stronger, don't keep current
should_keep = False
break
elif signal_strength == other_signal_strength and device_id > other_device_id:
# For equal signals, use device_id as tiebreaker
should_keep = False
break
# If we should keep this device, add it to filtered list
if should_keep:
filtered_list.append(current)
# Update the original list with filtered results
#if i == (8721):
# print("stop")
seen_where_list[i] = filtered_list
# Create a new temporary_map_day_plus with the filtered data
result = {}
for location_id, data_list in temporary_map_day_plus.items():
result[location_id] = [0] * decas_in_data
# Fill in the filtered data
for i in range(decas_in_data):
#if len(seen_where_list[i]) > 1:
#if i == (8721):
# print("stop")
#print(i, decas_to_time(i), seen_where_list[i])
for device_id, signal_strength in seen_where_list[i]:
result[device_id][i] = signal_strength
return result
# Path handling middleware
class StripPathMiddleware:
def process_request(self, req, resp):
# Strip the '/function/well-api' prefix if present
path = req.path
logger.info(f"Original request path: {path}")
# Define patterns to match different URL formats
patterns = [
r'^/function/well-api', # Standard OpenFaaS path
r'^/api/well_api', # API path
]
for pattern in patterns:
if re.match(pattern, path):
# Strip the matched prefix
path = re.sub(pattern, '', path)
# Ensure path starts with a slash
if not path.startswith('/'):
path = '/' + path
# Update the request path
req.path = path
logger.info(f"Modified request path: {path}")
break
def optimized_processing(myz_data, start_time, id2well_id, device_id_2_threshold, device_field_indexes, temporary_map_day_plus, data_type):
last_device_id = None
# Pre-compute seconds per minute
seconds_per_deka = 10
# Check if we need to process all data or just specific types
#process_all = data_type in ("all", "z-graph", "multiple")
process_all = True
for radar_read in myz_data:
local_time = radar_read[0]
device_id = radar_read[1]
# Calculate deca once
deca = int((local_time - start_time).total_seconds() / seconds_per_deka)
# Use cached lookups when possible
if device_id != last_device_id:
last_device_id = device_id
# Check if we've cached this device info
if device_id not in device_lookup_cache:
well_id = id2well_id[device_id]
radar_threshold_group_st = device_id_2_threshold[device_id]#well_id]
threshold_sig, threshold = radar_threshold_group_st
threshold_sig = threshold_sig.split("_")[0]
# Cache the values
device_lookup_cache[device_id] = {
'well_id': well_id,
'threshold_sig': threshold_sig,
'threshold': threshold
}
else:
# Use cached values
cached = device_lookup_cache[device_id]
well_id = cached['well_id']
threshold_sig = cached['threshold_sig']
threshold = cached['threshold']
days_decas = len(temporary_map_day_plus[well_id])
else:
# Use already loaded values from last iteration
cached = device_lookup_cache[device_id]
well_id = cached['well_id']
threshold_sig = cached['threshold_sig']
threshold = cached['threshold']
days_decas = len(temporary_map_day_plus[well_id])
# Get radar value using cached index
radar_val = radar_read[2 + device_field_indexes[threshold_sig]]
# Process data if needed
if process_all and radar_val > threshold and deca < days_decas:
temporary_map_day_plus[well_id][deca] = radar_val
#if well_id == 269:
# print(local_time)
return temporary_map_day_plus
def optimized_radar_processing(my_data, start_time, id2well_id, device_id_2_threshold,
device_field_indexes, presence_map, data_type):
last_device_id = 0
# Cache for threshold_sig calculation which is expensive due to dictionary lookups and string splitting
threshold_sig_cache = {}
field_index_cache = {}
for radar_read in my_data:
local_time = radar_read[0]
device_id = radar_read[1]
# Calculate deca once
deca = int((local_time - start_time).total_seconds() / 10)
# Device changed - update values that depend on device
if device_id != last_device_id:
last_device_id = device_id
well_id = id2well_id[device_id]
# Calculate days_decas exactly like original
#if data_type == "raw" or data_type == "all":
days_decas = len(presence_map['raw'][well_id])
#else:
# days_decas = len(presence_map['presence'][well_id])
# Calculate threshold_sig with caching
if device_id not in threshold_sig_cache:
radar_threshold_group_st = device_id_2_threshold[device_id]
threshold_sig, threshold = radar_threshold_group_st
threshold_sig = threshold_sig.split("_")[0]
threshold_sig_cache[device_id] = (threshold_sig, threshold)
else:
threshold_sig, threshold = threshold_sig_cache[device_id]
# Calculate field index with caching
if threshold_sig not in field_index_cache:
field_index = 2 + device_field_indexes[threshold_sig]
field_index_cache[threshold_sig] = field_index
else:
field_index = field_index_cache[threshold_sig]
else:
# Use values from previous iteration for same device
#well_id = id2well_id[device_id]
# Calculate days_decas exactly like original
#if data_type == "raw" or data_type == "all":
#days_decas = len(presence_map['raw'][well_id])
#else:
# days_decas = len(presence_map['presence'][well_id])
# Use cached values
threshold_sig, threshold = threshold_sig_cache[device_id]
field_index = field_index_cache[threshold_sig]
# Get radar value using cached field index
if field_index >= len(radar_read):
radar_val = radar_read[-1]
else:
radar_val = radar_read[field_index]
if well_id == 475:
print(".")
# Process presence data
if radar_val > threshold:
if deca < days_decas:
presence_map['longpresence'][well_id][deca] = 1
# Process raw data if needed
if data_type == "raw" or data_type == "all":
if deca < days_decas:
presence_map['raw'][well_id][deca] = radar_val
return presence_map
def CompressList(presence_devices_map):
for key in presence_devices_map:
presence_map_list = presence_devices_map[key]
presence_map_list_compressed = Compress(presence_map_list)
presence_devices_map[key] = presence_map_list_compressed
return presence_devices_map
def Compress(presence_map_list):
presence_map_list_compressed = []
l = len(presence_map_list)
if l > 1:
last_data_point = presence_map_list[0]
presence_map_list_compressed.append([0, last_data_point])
for i in range(1, l):
data_point = presence_map_list[i]
if data_point != last_data_point:
presence_map_list_compressed.append([i - 1, last_data_point])
presence_map_list_compressed.append([i, data_point])
last_data_point = data_point
presence_map_list_compressed.append([i, data_point])
return presence_map_list_compressed
def Decompress(pers_in_deka):
last = pers_in_deka[-1]
last_index = 1 + last[1]
result = [0] * last_index
for points in pers_in_deka:
start_deca = points[0]
end_deca = points[1]
value_deca = points[2]
for i in range(start_deca, 1+end_deca):
result[i] = value_deca
return result
def store_to_file(my_list, filename):
try:
with open(filename, 'w') as f:
json.dump(my_list, f, indent=4) # indent for pretty printing
print(f"List saved to {filename} using JSON")
except IOError:
print(f"Error: Could not write to file {filename}")
except TypeError as e:
print(f"Error: Could not serialize list to JSON. {e}") # e.g. if list contains unsupported types like sets
def find_custom_header(headers, name):
"""Helper to find a custom header value (case-insensitive name)."""
if not headers: return None
for header in headers:
if header.get('name', '').lower() == name.lower(): return header.get('value')
return None
def encode_state(parts):
"""Joins parts with a pipe and base64 encodes the result."""
plain_state = "|".join(map(str, parts))
base64_state = base64.b64encode(plain_state.encode('utf-8')).decode('ascii')
# Assuming 'logger' is your app's logger instance
logger.debug(f"Encoded state: '{plain_state}' -> '{base64_state}'")
return base64_state
def decode_state(b64_state):
"""Decodes a base64 state and splits it by pipe."""
if not b64_state: return []
try:
decoded_plain = base64.b64decode(b64_state).decode('utf-8')
parts = decoded_plain.split('|')
logger.debug(f"Decoded state: '{b64_state}' -> '{decoded_plain}' -> {parts}")
return parts
except Exception as e:
logger.error(f"Failed to decode client_state '{b64_state}': {e}")
return []
def create_client_state(base_event, call_control_id, prefix):
"""Create a base64 encoded client state string as required by Telnyx API"""
# Create the plain text client state string
plain_state = f"{prefix}_{base_event}_{call_control_id[:8]}" if call_control_id else f"{prefix}_{base_event}_unknownccid"
# Encode to base64 as required by Telnyx API
base64_state = base64.b64encode(plain_state.encode('utf-8')).decode('ascii')
logger.debug(f"Client state created: '{plain_state}' -> base64: '{base64_state}'")
return base64_state
def send_telnyx_command(action_path, params, api_key):
"""
Sends a command to the Telnyx Call Control API actions endpoint.
This function should REPLACE your existing send_telnyx_command.
"""
if not api_key:
logger.error(f"CMDFAIL ('{action_path}'): API_KEY not available.")
return None
ccid = params.get("call_control_id")
if not ccid:
logger.error(f"CMDFAIL ('{action_path}'): call_control_id missing in params.")
return None
# Correct endpoint construction for V2 actions
endpoint = f"{TELNYX_API_BASE_URL}/calls/{ccid}/{action_path}"
# Body should not contain call_control_id for actions API
body = {k: v for k, v in params.items() if k != 'call_control_id'}
headers = {
"Authorization": f"Bearer {api_key}",
"Content-Type": "application/json",
"Accept": "application/json"
}
logger.info(f"SENDCMD ('{action_path}')")
logger.debug(f" Endpoint: POST {endpoint}")
logger.debug(f" JSON Payload: {json.dumps(body, indent=2)}")
try:
response = requests.post(endpoint, json=body, headers=headers, timeout=10)
response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
logger.info(f"CMDOK ('{action_path}'): Telnyx accepted. Status: {response.status_code}")
return response.json()
except requests.exceptions.HTTPError as e:
logger.error(f"CMDFAIL ('{action_path}'): Telnyx rejected. Status: {e.response.status_code}")
try:
logger.error(f" Telnyx Err Detail: {json.dumps(e.response.json(), indent=2)}")
except json.JSONDecodeError:
logger.error(f" Raw Err Body: {e.response.text[:500]}")
except requests.exceptions.RequestException as e:
logger.exception(f"CMDFAIL ('{action_path}'): Network error")
return None
def ParseAddress(address_string):
try:
payload = {"address": address_string}
response = requests.post(
f"{base_url}/parse_address",
data=json.dumps(payload),
headers={"Content-Type": "application/json"}
)
return response.json()
except Exception as e:
print(f"Error: {e}")
return {}
def GetTZFromGPS(latitude, longitude):
try:
payload = {"latitude": latitude,"longitude": longitude }
response = requests.post(
f"{base_url}/gps_to_timezone",
data=json.dumps(payload),
headers={"Content-Type": "application/json"}
)
result_map = response.json()
if result_map["success"]:
return response.json()["timezone"]
else:
return ""
except Exception as e:
print(f"Error: {e}")
return ""
def JoinAddress(address_map):
try:
payload = address_map
response = requests.post(
f"{base_url}/join_address",
data=json.dumps(payload),
headers={"Content-Type": "application/json"}
)
return response.json()
except Exception as e:
print(f"Error: {e}")
return {}
def StoreToDB(data):
try:
event_type = data.get('event_type')
sql = ""
payload_json = json.dumps(data["payload"])
call_session_id = data["payload"]['call_session_id']
if event_type == "call.initiated":
timee = data.get('occurred_at')
sql = f"""
INSERT INTO public.alarms_voice (
index,
"time",
call_session_id,
initiated
)
VALUES (
(SELECT COALESCE(MAX(index), 0) + 1 FROM public.alarms_voice), -- Auto-increment index
'{timee}'::timestamptz, -- occurred_at value
'{call_session_id}', -- call_session_id value
'{payload_json}'
); """
elif event_type == "call.answered":
sql = f"""
UPDATE public.alarms_voice
SET answered = '{payload_json}'
WHERE call_session_id = '{call_session_id}';"""
elif event_type == "call.playback.started":
sql = f"""
UPDATE public.alarms_voice
SET playback_started = '{payload_json}'
WHERE call_session_id = '{call_session_id}';"""
elif event_type == "call.playback.ended":
sql = f"""
UPDATE public.alarms_voice
SET playback_ended = '{payload_json}'
WHERE call_session_id = '{call_session_id}';"""
elif event_type == "call.hangup":
sql = f"""
UPDATE public.alarms_voice
SET hangup = '{payload_json}'
WHERE call_session_id = '{call_session_id}';"""
if sql != "":
with get_db_connection() as conn:
with conn.cursor() as cur:
print(sql)
cur.execute(sql)
except Exception as e:
print ("Error in StoreToDB:", e)
def handle_telnyx_webhook(webhook_data, remote_addr, request_id):
"""
Process Telnyx webhook events with DTMF controls for repeating and hanging up.
This version works with both well-alerts.py and tstMP3Call.sh.
"""
logger.info(f"Processing Telnyx webhook from {remote_addr}, Request-ID: {request_id}")
try:
data = webhook_data.get('data', {})
event_type = data.get('event_type')
record_type = data.get('record_type')
payload = data.get('payload', {})
logger.info(f"Event: {event_type}, Record Type: {record_type}")
if not event_type or not record_type:
logger.error("Missing event_type or record_type in webhook data")
return False
if record_type == 'message':
logger.info("Processing SMS event...")
# Existing SMS handling code can go here.
return True
if record_type != 'event':
logger.warning(f"Unknown record type: {record_type}")
return False
# --- Voice Event Handling ---
call_control_id = payload.get('call_control_id')
b64_client_state = payload.get("client_state")
state_parts = decode_state(b64_client_state)
state_name = state_parts[0] if state_parts else None
logger.info(f"Processing voice event: {event_type}, State: {state_name}")
StoreToDB(data) # Store all events as before
# --- State Machine Logic ---
if event_type == 'call.initiated':
logger.info(f"Call initiated: From: {payload.get('from')}, To: {payload.get('to')}")
elif event_type == 'call.answered':
logger.info(f"Call answered: From: {payload.get('from')}, To: {payload.get('to')}")
# Determine media to play
custom_headers = payload.get('custom_headers', [])
audio_url = find_custom_header(custom_headers, 'X-Audio-Url')
tts_payload = find_custom_header(custom_headers, 'X-TTS-Payload')
media_type = "audio" if audio_url else "tts" if tts_payload else "none"
media_value = audio_url or tts_payload
if media_value:
logger.info(f"Playing main message via {media_type}.")
# Create the initial state and encode it
next_state = encode_state(['MAIN_MEDIA_PLAYED', media_type, media_value])
if media_type == "audio":
play_params = {
"call_control_id": call_control_id,
"client_state": next_state,
"audio_url": media_value
}
send_telnyx_command("actions/playback_start", play_params, TELNYX_API_KEY)
else: # tts
speak_params = {
"payload": media_value,
"voice": DEFAULT_TTS_VOICE,
"language": DEFAULT_TTS_LANGUAGE,
"call_control_id": call_control_id,
"client_state": next_state
}
send_telnyx_command("actions/speak", speak_params, TELNYX_API_KEY)
else:
logger.warning("No audio URL or TTS payload found. Hanging up.")
send_telnyx_command("actions/hangup", {"call_control_id": call_control_id}, TELNYX_API_KEY)
elif event_type in ['call.speak.ended', 'call.playback.ended']:
logger.info(f"Media ended with status: {payload.get('status')}")
if state_name in ['MAIN_MEDIA_PLAYED', 'REPLAYING_MEDIA']:
logger.info("Main message finished. Playing options menu.")
_, media_type, media_value = state_parts # Unpack state
# Create new state for waiting for DTMF input
next_state = encode_state(['WAITING_DTMF', media_type, media_value])
options_prompt = "press 0 to repeat the message, or press the pound key to hang up."
gather_params = {
"payload": options_prompt,
"voice": DEFAULT_TTS_VOICE,
"language": DEFAULT_TTS_LANGUAGE,
"valid_digits": "0#",
"max_digits": 1,
"timeout_millis": 10000, # 10 seconds
"call_control_id": call_control_id,
"client_state": next_state
}
send_telnyx_command("actions/gather_using_speak", gather_params, TELNYX_API_KEY)
else:
logger.warning(f"Media ended with unhandled state '{state_name}'. Hanging up.")
send_telnyx_command("actions/hangup", {"call_control_id": call_control_id}, TELNYX_API_KEY)
elif event_type == 'call.dtmf.received':
digit = payload.get('digit')
logger.info(f"DTMF Received: Digit='{digit}'")
if digit == '#':
logger.info("'#' received. Terminating call immediately.")
send_telnyx_command("actions/hangup", {"call_control_id": call_control_id}, TELNYX_API_KEY)
elif event_type == 'call.gather.ended':
logger.info(f"Gather ended. Digits: '{payload.get('digits')}', Status: '{payload.get('status')}'")
if state_name == 'WAITING_DTMF':
digits = payload.get('digits')
_, media_type, media_value = state_parts # Unpack state
if digits == "0":
logger.info("'0' pressed. Replaying main message.")
next_state = encode_state(['REPLAYING_MEDIA', media_type, media_value])
if media_type == "audio":
send_telnyx_command("actions/playback_start", {"audio_url": media_value, "call_control_id": call_control_id, "client_state": next_state}, TELNYX_API_KEY)
else: # tts
send_telnyx_command("actions/speak", {"payload": media_value, "voice": DEFAULT_TTS_VOICE, "language": DEFAULT_TTS_LANGUAGE, "call_control_id": call_control_id, "client_state": next_state}, TELNYX_API_KEY)
else:
logger.info("Gather ended without a repeat command (timeout or hangup). Ending call.")
send_telnyx_command("actions/hangup", {"call_control_id": call_control_id}, TELNYX_API_KEY)
elif event_type == 'call.hangup':
logger.info(f"Call hung up: Cause='{payload.get('cause')}'")
else:
logger.info(f"Unhandled voice event: {event_type}")
return True # Acknowledge the webhook
except Exception as e:
logger.exception(f"Critical error in handle_telnyx_webhook4: {e}")
return False
# Assume these are defined globally or accessible (e.g., from app_args or .env)
# logger = logging.getLogger(...)
# ENABLE_AUDIO_PLAYBACK = True / False
# CLIENT_STATE_PREFIX = "app_state"
# DEFAULT_TTS_VOICE = "female"
# DEFAULT_TTS_LANGUAGE = "en-US"
# TELNYX_API_KEY = "YOUR_API_KEY"
# DTMF_GATHER_TIMEOUT_SECONDS = 15 # Wait 15 seconds for DTMF input
# Placeholder for your DB function
# def StoreToDB(data):
# app_logger.debug(f"Placeholder: Storing to DB: {json.dumps(data)[:100]}") # Use app_logger
# (Your existing find_custom_header, create_client_state, send_telnyx_command should be here)
# Make sure send_telnyx_command uses app_logger
def handle_telnyx_webhook2(webhook_data, remote_addr, request_id): # Renamed logger to app_logger
"""Process Telnyx webhook events with IVR logic."""
logger.info(f"Processing Telnyx webhook from {remote_addr}, Request-ID: {request_id}")
try:
data = webhook_data.get('data', {})
event_type = data.get('event_type')
record_type = data.get('record_type')
payload = data.get('payload', {})
logger.info(f"Event: {event_type}, Record Type: {record_type}")
if not event_type or not record_type:
logger.error("Missing event_type or record_type in webhook data")
return False # Indicate failure to process
call_control_id = payload.get('call_control_id')
call_session_id = payload.get('call_session_id')
# Attempt to decode client_state if present
b64_client_state_rcvd = data.get("payload",{}).get("client_state")
plain_client_state_rcvd = ""
if b64_client_state_rcvd:
try:
plain_client_state_rcvd = base64.b64decode(b64_client_state_rcvd).decode('utf-8')
logger.info(f" Decoded Client State Received: '{plain_client_state_rcvd}'")
except Exception as e:
logger.warning(f" Could not decode client_state: {b64_client_state_rcvd}, Error: {e}")
plain_client_state_rcvd = "undecodable_state"
# Store all events to DB if needed
StoreToDB(webhook_data) # Pass the full webhook_data
# Voice Event Handling
if record_type == 'event':
logger.info(f"Processing voice event: {event_type}, CCID: {call_control_id}")
# --- Initial Call Setup ---
if event_type == 'call.initiated':
logger.info(f" Call initiated: From: {payload.get('from')}, To: {payload.get('to')}")
# No action needed here, wait for call.answered
elif event_type == 'call.answered':
logger.info(f" Call answered: From: {payload.get('from')}, To: {payload.get('to')}")
custom_headers = payload.get('custom_headers', [])
logger.debug(f" Custom headers: {json.dumps(custom_headers)}")
audio_url = find_custom_header(custom_headers, 'X-Audio-Url')
tts_payload = find_custom_header(custom_headers, 'X-TTS-Payload')
logger.info(f" X-Audio-Url: {audio_url}, X-TTS-Payload: {tts_payload}")
# This state means the main message is about to be played.
# After it ends, we'll play the options prompt.
next_client_state = create_client_state("main_media_played", call_control_id, app_args.client_state_prefix) # Use app_args
action_taken = False
if app_args.enable_audio_playback and audio_url: # Use app_args
logger.info(f" -> Playing main audio: {audio_url}")
play_params = {"call_control_id": call_control_id, "client_state": next_client_state, "audio_url": audio_url}
send_telnyx_command("actions/playback_start", play_params, app_args.api_key) # Use app_args
action_taken = True
elif tts_payload:
logger.info(f" -> Speaking main TTS: {tts_payload}")
speak_params = {"payload": tts_payload, "voice": app_args.default_tts_voice, "language": app_args.default_tts_language, "call_control_id": call_control_id, "client_state": next_client_state} # Use app_args
send_telnyx_command("actions/speak", speak_params, app_args.api_key) # Use app_args
action_taken = True
if not action_taken:
logger.warning(" -> No audio URL or TTS payload for main message. Hanging up.")
hangup_params = {"call_control_id": call_control_id, "client_state": create_client_state("no_main_media_hup", call_control_id, app_args.client_state_prefix)}
send_telnyx_command("actions/hangup", hangup_params, app_args.api_key)
# --- Handling End of Main Media Playback ---
elif event_type in ['call.speak.ended', 'call.playback.ended']:
status = payload.get('status')
ended_event_type_root = event_type.split('.')[1] # speak or playback
logger.info(f" Call {ended_event_type_root} ended: Status={status}, Current Decoded State='{plain_client_state_rcvd}'")
# Check if the main media just finished playing
if plain_client_state_rcvd.startswith(f"{app_args.client_state_prefix}_main_media_played"):
logger.info(" -> Main media finished. Playing DTMF options prompt.")
options_prompt_tts = "press 0 to repeat the message or press pound to hang up."
# This state means the options prompt is playing, and we're waiting for DTMF.
# gather_using_speak will trigger call.gather.ended
next_client_state = create_client_state("waiting_dtmf", call_control_id, app_args.client_state_prefix)
gather_params = {
"call_control_id": call_control_id,
"client_state": next_client_state,
"payload": options_prompt_tts,
"voice": app_args.default_tts_voice,
"language": app_args.default_tts_language,
"valid_digits": "0#", # Only accept 0 or #
"max_digits": 1, # Expect only one digit
"timeout_millis": app_args.dtmf_timeout_seconds * 1000, # N seconds timeout
"terminating_digits": "#" # # will also terminate gather immediately
}
send_telnyx_command("actions/gather_using_speak", gather_params, app_args.api_key)
elif plain_client_state_rcvd.startswith(f"{app_args.client_state_prefix}_replaying_main_media"):
logger.info(" -> Replayed main media finished. Playing DTMF options prompt again.")
# Same logic as above for playing options prompt
options_prompt_tts = "press 0 to repeat the message or press pound to hang up."
next_client_state = create_client_state("waiting_dtmf", call_control_id, app_args.client_state_prefix)
gather_params = {
"call_control_id": call_control_id, "client_state": next_client_state,
"payload": options_prompt_tts, "voice": app_args.default_tts_voice, "language": app_args.default_tts_language,
"valid_digits": "0#", "max_digits": 1, "timeout_millis": app_args.dtmf_timeout_seconds * 1000, "terminating_digits": "#"
}
send_telnyx_command("actions/gather_using_speak", gather_params, app_args.api_key)
else:
logger.warning(f" -> {ended_event_type_root} ended, but client_state ('{plain_client_state_rcvd}') doesn't match expected flow for options. Hanging up.")
hangup_params = {"call_control_id": call_control_id, "client_state": create_client_state(f"{ended_event_type_root}_unexpected_hup", call_control_id, app_args.client_state_prefix)}
send_telnyx_command("actions/hangup", hangup_params, app_args.api_key)
# --- Handling DTMF Input Result ---
elif event_type == 'call.gather.ended':
digits_received = payload.get('digits')
gather_status = payload.get('status') # e.g., 'completed_by_terminating_digit', 'timeout', 'call_hangup'
logger.info(f" Call Gather Ended: Digits='{digits_received}', Status='{gather_status}', Current Decoded State='{plain_client_state_rcvd}'")
if plain_client_state_rcvd.startswith(f"{app_args.client_state_prefix}_waiting_dtmf"):
if digits_received == "0":
logger.info(" -> DTMF '0' received. Replaying main message.")
# Replay the original message. We need to fetch it again from custom headers.
# This assumes the call.gather.ended payload still contains the original custom_headers.
# If not, we might need to store the original TTS/Audio URL in the client_state.
# For simplicity, let's assume custom_headers are still available or we re-evaluate.
# A more robust way would be to store the main message type/content in the client_state
# when transitioning from main_media_played.
# Let's try to get custom_headers from the current payload.
# Telnyx usually includes original call details in subsequent events.
custom_headers = payload.get('custom_headers', []) # This might not be reliable for original headers
# A BETTER APPROACH: Store original media info in client_state or retrieve from DB
# For this example, we'll try to re-evaluate based on what might be in custom_headers
# of the *call.gather.ended* event, which is NOT guaranteed to be the original ones.
# This part needs careful testing or a strategy to pass original media info.
# Simplified: Assume we need to re-fetch original custom headers if they are not in this payload.
# For now, let's just log and assume we'd need a mechanism to get original X-TTS-Payload/X-Audio-Url
logger.warning(" -> Replay logic needs access to original X-TTS-Payload/X-Audio-Url. This example will try to use current custom_headers if any, or a default.")
original_audio_url = find_custom_header(custom_headers, 'X-Audio-Url') # May not be original
original_tts_payload = find_custom_header(custom_headers, 'X-TTS-Payload') # May not be original
next_client_state = create_client_state("replaying_main_media", call_control_id, app_args.client_state_prefix)
action_taken = False
if app_args.enable_audio_playback and original_audio_url:
logger.info(f" -> Replaying audio: {original_audio_url}")
play_params = {"call_control_id": call_control_id, "client_state": next_client_state, "audio_url": original_audio_url}
send_telnyx_command("actions/playback_start", play_params, app_args.api_key)
action_taken = True
elif original_tts_payload:
logger.info(f" -> Replaying TTS: {original_tts_payload}")
speak_params = {"payload": original_tts_payload, "voice": app_args.default_tts_voice, "language": app_args.default_tts_language, "call_control_id": call_control_id, "client_state": next_client_state}
send_telnyx_command("actions/speak", speak_params, app_args.api_key)
action_taken = True
if not action_taken:
logger.error(" -> Could not find original media to replay. Hanging up.")
hangup_params = {"call_control_id": call_control_id, "client_state": create_client_state("replay_fail_hup", call_control_id, app_args.client_state_prefix)}
send_telnyx_command("actions/hangup", hangup_params, app_args.api_key)
elif digits_received == "#" or (gather_status == 'completed_by_terminating_digit' and payload.get('terminating_digit') == '#'):
logger.info(" -> DTMF '#' received or terminating digit. Hanging up.")
hangup_params = {"call_control_id": call_control_id, "client_state": create_client_state("dtmf_pound_hup", call_control_id, app_args.client_state_prefix)}
send_telnyx_command("actions/hangup", hangup_params, app_args.api_key)
elif gather_status == 'timeout':
logger.info(" -> DTMF gather timed out. Hanging up.")
hangup_params = {"call_control_id": call_control_id, "client_state": create_client_state("dtmf_timeout_hup", call_control_id, app_args.client_state_prefix)}
send_telnyx_command("actions/hangup", hangup_params, app_args.api_key)
else:
logger.warning(f" -> Gather ended with unhandled digits '{digits_received}' or status '{gather_status}'. Hanging up.")
hangup_params = {"call_control_id": call_control_id, "client_state": create_client_state("dtmf_unhandled_hup", call_control_id, app_args.client_state_prefix)}
send_telnyx_command("actions/hangup", hangup_params, app_args.api_key)
else:
logger.warning(f" -> Gather ended, but client_state ('{plain_client_state_rcvd}') doesn't match waiting_dtmf. Ignoring.")
elif event_type == 'call.hangup':
app_logger.info(f" Call Hangup Event: Cause='{payload.get('cause')}', SIPCause='{payload.get('sip_hangup_cause')}', Source='{payload.get('hangup_source')}'")
# Call is already over, no command to send.
# Log other voice events not explicitly handled above for visibility
elif event_type not in ['call.initiated', 'call.answered', 'call.speak.ended', 'call.playback.ended', 'call.gather.ended', 'call.hangup', 'call.speak.started', 'call.playback.started']:
logger.info(f" Other Voice Event: Type='{event_type}'. Payload: {json.dumps(payload, indent=2)}")
# --- SMS Event Handling (Placeholder from your snippet) ---
elif record_type == 'message':
logger.info(f"Processing SMS event: {event_type}")
# Your existing SMS handling code would go here...
# For now, just acknowledge
logger.info(" -> SMS ACK (204)")
return Response(status=204) # Ensure SMS events are also ACKed
else:
logger.warning(f"Unknown record type: {record_type}")
# Acknowledge to prevent retries from Telnyx
logger.info(" -> Unknown Record Type ACK (204)")
return Response(status=204)
# If we reached here for a voice event and didn't send a command through send_telnyx_command,
# it means we are just acknowledging the event.
logger.info(" -> Voice Event Processed (no immediate command sent or command sent async). ACK (204) to Telnyx.")
return Response(status=204) # ALWAYS ACK THE WEBHOOK
except Exception as e:
logger.exception(f"Error in handle_telnyx_webhook: {e}")
# Still try to ACK Telnyx if possible, but log the error.
# Depending on the error, Telnyx might retry if it doesn't get a 2xx.
return "Internal Server Error", 500
def handle_telnyx_webhook3(webhook_data, remote_addr, request_id):
"""
Processes Telnyx webhook events with full IVR logic for repeating messages.
This function should be added to your well-api.py.
"""
logger.info(f"Processing webhook in handle_telnyx_webhook3 from {remote_addr}, Request-ID: {request_id}")
# --- ADAPT THIS SECTION to your app's config management ---
# This example assumes config values are accessible as global constants or from a dict.
# Replace these with your actual config access method (e.g., self.config['...'])
config = {
'api_key': TELNYX_API_KEY,
'dtmf_timeout_seconds': 10,
'initial_silence_ms': 500,
'replay_silence_ms': 100,
'default_tts_voice': 'female',
'default_tts_language': 'en-US',
'client_state_prefix': 'well_api_state',
'inbound_greeting': 'Thank you for calling. We will be with you shortly.'
}
# --- END ADAPTATION SECTION ---
try:
StoreToDB(webhook_data) # Call your DB storage function first
data, payload = webhook_data.get('data', {}), webhook_data.get('data', {}).get('payload', {})
event_type, record_type, ccid = data.get('event_type'), data.get('record_type'), payload.get('call_control_id')
logger.info(f"EVENT '{event_type}' ({record_type})" + (f", CCID: {ccid}" if ccid else ""))
if record_type != 'event':
logger.info(f" -> Non-voice event ('{record_type}') received. Ignoring in this handler.")
return True
b64_client_state = payload.get("client_state")
decoded_parts = decode_state(b64_client_state)
state_name = decoded_parts[0] if decoded_parts else None
if state_name: logger.info(f" State Name Received: '{state_name}'")
current_api_key = config['api_key']
# --- State Machine Logic ---
if event_type == 'call.answered':
if payload.get('direction') == 'incoming':
logger.info(" -> Inbound call detected. Playing generic greeting and hanging up.")
next_state = encode_state(['INBOUND_GREETING_HUP'])
speak_params = {"payload": config['inbound_greeting'], "voice": config['default_tts_voice'], "language": config['default_tts_language'], "call_control_id": ccid, "client_state": next_state}
send_telnyx_command("actions/speak", speak_params, current_api_key)
else: # Outgoing call
audio_url = find_custom_header(payload.get('custom_headers'), 'X-Audio-Url')
tts_payload = find_custom_header(payload.get('custom_headers'), 'X-TTS-Payload')
media_type = "audio" if audio_url else "tts" if tts_payload else "none"
media_value = audio_url or tts_payload
if media_value:
logger.info(f" -> Outbound call. Playing {config['initial_silence_ms']}ms silence buffer.")
next_state = encode_state(['INIT_PLAY_MAIN', media_type, media_value])
send_telnyx_command("actions/play_silence", {"milliseconds": str(config['initial_silence_ms']), "call_control_id": ccid, "client_state": next_state}, current_api_key)
else:
logger.warning(" -> Outbound call, but no audio/tts payload. Hanging up.")
send_telnyx_command("actions/hangup", {"call_control_id": ccid}, current_api_key)
elif event_type == 'call.playback.ended':
if state_name == 'INIT_PLAY_MAIN': # Silence ended
logger.info(" -> Silence buffer ended. Playing main message.")
_, media_type, media_value = decoded_parts
next_state = encode_state(['MAIN_MEDIA_PLAYED', media_type, media_value])
if media_type == "audio":
send_telnyx_command("actions/playback_start", {"audio_url": media_value, "call_control_id": ccid, "client_state": next_state}, current_api_key)
elif media_type == "tts":
params = {"payload": media_value, "voice": config['default_tts_voice'], "language": config['default_tts_language'], "call_control_id": ccid, "client_state": next_state}
send_telnyx_command("actions/speak", params, current_api_key)
elif state_name == 'REPLAY_SILENCE': # Replay silence ended
logger.info(" -> Replay silence ended. Replaying main message.")
_, media_type, media_value = decoded_parts
next_state = encode_state(['REPLAYING_MEDIA', media_type, media_value])
if media_type == "audio":
send_telnyx_command("actions/playback_start", {"audio_url": media_value, "call_control_id": ccid, "client_state": next_state}, current_api_key)
elif media_type == "tts":
params = {"payload": media_value, "voice": config['default_tts_voice'], "language": config['default_tts_language'], "call_control_id": ccid, "client_state": next_state}
send_telnyx_command("actions/speak", params, current_api_key)
elif state_name in ['MAIN_MEDIA_PLAYED', 'REPLAYING_MEDIA']: # Actual audio file ended
logger.info(f" -> Main audio playback finished. Playing options menu.")
_, media_type, media_value = decoded_parts
next_state = encode_state(['WAITING_DTMF', media_type, media_value])
options_prompt = "press 0 to repeat the message or press pound to hang up."
gather_params = {
"payload": options_prompt, "voice": config['default_tts_voice'], "language": config['default_tts_language'],
"valid_digits": "0#", "max_digits": 1, "timeout_millis": config['dtmf_timeout_seconds'] * 1000, "terminating_digit": "#",
"call_control_id": ccid, "client_state": next_state
}
send_telnyx_command("actions/gather_using_speak", gather_params, current_api_key)
else:
logger.warning(f" -> Playback ended with unhandled state '{state_name}'. Hanging up.")
send_telnyx_command("actions/hangup", {"call_control_id": ccid}, current_api_key)
elif event_type == 'call.speak.ended':
if state_name in ['MAIN_MEDIA_PLAYED', 'REPLAYING_MEDIA']:
logger.info(f" -> Main message TTS finished. Playing options menu.")
_, media_type, media_value = decoded_parts
next_state = encode_state(['WAITING_DTMF', media_type, media_value])
options_prompt = "press 0 to repeat the message or press pound to hang up."
gather_params = {
"payload": options_prompt, "voice": config['default_tts_voice'], "language": config['default_tts_language'],
"valid_digits": "0#", "max_digits": 1, "timeout_millis": config['dtmf_timeout_seconds'] * 1000, "terminating_digit": "#",
"call_control_id": ccid, "client_state": next_state
}
send_telnyx_command("actions/gather_using_speak", gather_params, current_api_key)
elif state_name == 'INBOUND_GREETING_HUP':
logger.info(" -> Inbound greeting finished. Hanging up.")
send_telnyx_command("actions/hangup", {"call_control_id": ccid}, current_api_key)
else:
logger.warning(f" -> Speak ended with unhandled state '{state_name}'. Hanging up.")
send_telnyx_command("actions/hangup", {"call_control_id": ccid}, current_api_key)
elif event_type == 'call.dtmf.received':
digit = payload.get('digit')
logger.info(f" DTMF Received: Digit='{digit}'")
if digit == '#':
logger.info(" -> '#' received. Terminating call immediately.")
send_telnyx_command("actions/hangup", {"call_control_id": ccid}, current_api_key)
elif event_type == 'call.gather.ended':
logger.info(f" -> Gather ended. Digits received: '{payload.get('digits')}', Status: '{payload.get('status')}'")
if state_name == 'WAITING_DTMF':
digits = payload.get('digits')
_, media_type, media_value = decoded_parts
if digits == "0":
logger.info(f" -> '0' pressed. Playing {config['replay_silence_ms']}ms silence before replay.")
next_state = encode_state(['REPLAY_SILENCE', media_type, media_value])
send_telnyx_command("actions/play_silence", {"milliseconds": str(config['replay_silence_ms']), "call_control_id": ccid, "client_state": next_state}, current_api_key)
else:
logger.info(" -> Gather ended with non-repeat condition. Hanging up.")
send_telnyx_command("actions/hangup", {"call_control_id": ccid}, current_api_key)
else:
logger.warning(f" -> Gather ended with unhandled state '{state_name}'.")
elif event_type == 'call.hangup':
logger.info(f" Call Hangup Event: Cause='{payload.get('cause')}'")
else:
logger.info(f" -> Unhandled Voice Event: '{event_type}' with state '{state_name}'.")
return True # Return app-specific success
except Exception as e:
logger.exception(f"Error in handle_telnyx_webhook3: {e}")
return False
def FilterDevicesByDeviceId(devices_list, device_id_str):
"""
Filter devices list to include only the specified device_id.
Parameters:
devices_list: tuple of (device_details_list, device_ids_list)
device_id_str: string representation of device_id to filter by
Returns:
tuple: filtered (device_details_list, device_ids_list)
"""
try:
target_device_id = int(device_id_str)
except ValueError:
return ([], [])
device_details_list, device_ids_list = devices_list
filtered_details = []
filtered_ids = []
for i, device_details in enumerate(device_details_list):
device_id = device_details[1] # device_id is second element (index 1)
if device_id == target_device_id:
filtered_details.append(device_details)
filtered_ids.append(device_ids_list[i])
return (filtered_details, filtered_ids)
def FilterSensorsBySensorType(sensor_type):
"""
Filter s_table to include only the specified sensor type.
Parameters:
sensor_type: string name of sensor type (e.g., 'temperature', 'radar', 'voc0', etc.)
Returns:
list: filtered s_table containing only the mapped sensor name
"""
# Map user-friendly sensor names to their s_table equivalents
sensor_mapping = {
'temperature': 'avg_temperature',
'humidity': 'avg_humidity',
'pressure': 'pressure_amplitude',
'light': 'max_light',
'radar': 'radar',
'voc0': 'sensor_min_s0',
'voc1': 'sensor_min_s1',
'voc2': 'sensor_min_s2',
'voc3': 'sensor_min_s3',
'voc4': 'sensor_min_s4',
'voc5': 'sensor_min_s5',
'voc6': 'sensor_min_s6',
'voc7': 'sensor_min_s7',
'voc8': 'sensor_min_s8',
'voc9': 'sensor_min_s9'
}
# Get the actual sensor name used in s_table
mapped_sensor = sensor_mapping.get(sensor_type, sensor_type)
# Return the mapped sensor name if it's valid, otherwise empty list
if mapped_sensor in sensor_mapping.values() or mapped_sensor == sensor_type:
return [mapped_sensor]
return []
def set_character(some_string, bit_nr, new_char):
"""
Replace a character in a string at position bit_nr from the right.
Parameters:
some_string (str): The input string
bit_nr (int): Position from right (0 = rightmost, 1 = second from right, etc.)
new_char (str): The replacement character
Returns:
str: The modified string
"""
if bit_nr < 0 or bit_nr >= len(some_string):
return some_string # Invalid position
# Convert string to list for easier manipulation
chars = list(some_string)
# Replace character at position bit_nr from right
chars[len(chars) - 1 - bit_nr] = new_char
# Convert back to string
return ''.join(chars)
def GetBit(alarm_armed_settings, bit_nr):
if bit_nr <0 or bit_nr >= len(alarm_armed_settings):
return False
return alarm_armed_settings[-(bit_nr+1)] == "1"
def CreateSensorsMapFast(map_file, devices_list, selected_date, bw, time_zone_s, radar_part, group_by, filtered_s_table):
"""
Create a sensor map with filtered devices and sensors.
Based on CreateMapFast but with filtering support.
Parameters:
map_file: output file path
devices_list: filtered devices list
selected_date: date string
bw: black and white flag
time_zone_s: timezone string
radar_part: radar part specification
group_by: grouping strategy
filtered_s_table: filtered sensor table
Returns:
tuple: (success_boolean, vocs_scaled_array)
"""
global Id2MACDict
st = time.time()
if radar_part == "s28":
radar_part = "(s2+s3+s4+s5+s6+s7+s8)/7"
try:
lower_than200 = 0
larger_than200 = 0
ids_list = []
for details in devices_list[0]:
well_id = details[0]
ids_list.append(details[1])
if well_id < 200:
lower_than200 += 1
else:
larger_than200 += 1
if lower_than200 > 0 and larger_than200 > 0:
return False, []
if larger_than200 > 0:
sensors_c = len(filtered_s_table)
else: # old sensors not supported
return False, []
devices_c = len(devices_list[0])
devices_list_str = ",".join(map(str, devices_list[1]))
image_file = map_file
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
temp_offset = -10
# Use filtered sensor table for queries
if sensors_c > 1:
sql = get_deployment_query_filtered(
devices_list_str,
time_from_str,
time_to_str,
ids_list,
radar_part,
temp_offset,
filtered_s_table
)
else:
sql = get_deployment_single_query(
devices_list_str,
time_from_str,
time_to_str,
ids_list,
radar_part,
temp_offset,
filtered_s_table[0]
)
print(sql)
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sql)
day_data = cur.fetchall()
if day_data == None:
return False, []
stretch_by = 8
minutes = 1440
stripes = devices_c * sensors_c
arr_source_template = np.full((stripes, minutes + 4), -0.001, dtype=float)
arr_stretched_template = np.zeros((int(stripes * stretch_by), minutes, 3), dtype=np.uint8)
# Use filtered sensor table
arr_source = fast_fill_array_from_timescale_filtered(
day_data,
time_from_str,
devices_list[1],
arr_source_template,
filtered_s_table,
time_zone_s
)
arr_source = AddLimits_optimized_filtered(arr_source, devices_c, sensors_c, filtered_s_table, percentile=100)
scaled_day = CalcExtremes(arr_source, minutes, stripes)
arr_stretched, vocs_scaled = FillImage_optimized(scaled_day, devices_c, sensors_c, arr_stretched_template, group_by, bw)
SaveImageInBlob(image_file, arr_stretched, [])
return True, vocs_scaled
except Exception as e:
AddToLog(traceback.format_exc())
return False, []
def get_deployment_query_filtered(devices_list_str, time_from_str, time_to_str, ids_list, radar_part, temp_offset, filtered_s_table):
"""
Generate a filtered TimeScaleDB query for specific sensors only.
Parameters:
devices_list_str (str): Comma-separated string of device IDs
time_from_str (str): Start time for the query
time_to_str (str): End time for the query
ids_list (list): List of device IDs in priority order for sorting
radar_part (str): Radar column name
temp_offset (float): Temperature offset
filtered_s_table (list): List of sensor names to include
Returns:
str: Generated SQL query
"""
# Generate the CASE statement for ordering
case_statements = []
for index, device_id in enumerate(ids_list, start=1):
case_statements.append(f"WHEN {device_id} THEN {index}")
case_order = "\n ".join(case_statements)
# Build sensor-specific SELECT clauses
sensor_selects = []
sensor_aggregates = []
radar_needed = False
for sensor in filtered_s_table:
if sensor == "temperature":
sensor_selects.append(f"sr.avg_temperature+ {temp_offset} as avg_temperature")
sensor_aggregates.append("AVG(temperature) AS avg_temperature")
elif sensor == "humidity":
sensor_selects.append("sr.avg_humidity")
sensor_aggregates.append("AVG(humidity) AS avg_humidity")
elif sensor == "pressure":
sensor_selects.append("sr.pressure_amplitude")
sensor_aggregates.append("AVG(pressure) AS pressure_amplitude")
elif sensor == "light":
sensor_selects.append("sr.max_light")
sensor_aggregates.append("MAX(light) AS max_light")
elif sensor == "radar":
sensor_selects.append("rr.radar")
radar_needed = True
elif sensor.startswith("voc"):
# Extract sensor number (e.g., sensor_min_s0 -> s0)
sensor_num = sensor.replace("voc", "")
sensor_selects.append(f"sr.{sensor}")
sensor_aggregates.append(f"MIN(s{sensor_num}) AS {sensor}")
# Build the query
if radar_needed and sensor_aggregates:
# Need both sensor readings and radar readings
sql = f"""
SELECT
COALESCE(sr.minute, rr.minute) as minute,
COALESCE(sr.device_id, rr.device_id) as device_id,
{', '.join(sensor_selects)}
FROM (
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
{', '.join(sensor_aggregates)}
FROM
sensor_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
) sr
FULL OUTER JOIN (
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
MAX({radar_part}) AS radar
FROM
radar_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
) rr
ON sr.minute = rr.minute AND sr.device_id = rr.device_id
ORDER BY
CASE COALESCE(sr.device_id, rr.device_id)
{case_order}
END,
COALESCE(sr.minute, rr.minute);
"""
elif radar_needed:
# Only radar needed
sql = f"""
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
MAX({radar_part}) AS radar
FROM
radar_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
ORDER BY
CASE device_id
{case_order}
END,
minute;
"""
else:
# Only sensor readings needed
sql = f"""
SELECT
time_bucket('1 minute', time) AS minute,
device_id,
{', '.join(sensor_aggregates)}
FROM
sensor_readings
WHERE
device_id IN ({devices_list_str})
AND time >= '{time_from_str}'
AND time < '{time_to_str}'
GROUP BY
minute,
device_id
ORDER BY
CASE device_id
{case_order}
END,
minute;
"""
return sql
def fast_fill_array_from_timescale_filtered(day_data, time_from_str, devices_list, arr_source, filtered_s_table, timezone_str="Europe/Berlin"):
"""
Optimized array filling for filtered sensors.
Parameters:
day_data: query results
time_from_str: start time string
devices_list: list of device IDs
arr_source: array to fill
filtered_s_table: list of sensor names to process
timezone_str: timezone string
Returns:
numpy array: filled array
"""
# Convert start time to timezone-aware datetime
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
# Create device index mapping
device_to_index = {device_id: idx for idx, device_id in enumerate(devices_list)}
# Pre-process data into a more efficient structure
device_data = defaultdict(list)
for record in day_data:
if record[0] and record[1]: # If time and device_id exist
device_data[record[1]].append(record)
# Build column mapping based on filtered sensors
columns = {}
col_idx = 2 # Start after time and device_id
for sensor in filtered_s_table:
columns[sensor] = col_idx
col_idx += 1
# Process each device's data
for device_id, records in device_data.items():
if device_id not in device_to_index:
continue
base_idx = device_to_index[device_id] * len(filtered_s_table)
# Convert records to numpy array for faster processing
records_array = np.array(records, dtype=object)
# Calculate all minute deltas at once
times = records_array[:, 0]
minute_deltas = np.array([(t - start_time).total_seconds() / 60 for t in times], dtype=int)
# Filter valid minute deltas
valid_mask = (minute_deltas >= 0) & (minute_deltas < arr_source.shape[1] - 4)
if not np.any(valid_mask):
continue
minute_deltas = minute_deltas[valid_mask]
records_array = records_array[valid_mask]
# Process each filtered sensor
for sensor_idx, sensor_name in enumerate(filtered_s_table):
if sensor_name in columns:
row_idx = base_idx + sensor_idx
values = records_array[:, columns[sensor_name]]
# Filter out None values
valid_values = ~np.equal(values, None)
if not np.any(valid_values):
continue
# Update array in bulk
arr_source[row_idx, minute_deltas[valid_values]] = values[valid_values]
return arr_source
def AddLimits_optimized_filtered(arr_source, devices_c, sensors_c, filtered_s_table, percentile):
"""
Vectorized version of AddLimits for filtered sensors.
Parameters:
arr_source: array of shape (devices_c * sensors_c, 1444)
devices_c: number of devices
sensors_c: number of sensors per device
filtered_s_table: list of sensor names
percentile: parameter for clean_data_vectorized
"""
total_sensors = devices_c * sensors_c
# Create arrays of sensor legal values for filtered sensors
min_vals = []
max_vals = []
windows = []
for sensor_name in filtered_s_table:
if sensor_name in sensor_legal_values:
min_vals.append(sensor_legal_values[sensor_name][0])
max_vals.append(sensor_legal_values[sensor_name][1])
windows.append(sensor_legal_values[sensor_name][2])
else:
# Default values if sensor not found
min_vals.append(0)
max_vals.append(1000)
windows.append(1)
# Repeat for each device
min_vals = np.tile(min_vals, devices_c)
max_vals = np.tile(max_vals, devices_c)
windows = np.tile(windows, devices_c)
# Process rows that need cleaning (window > 2)
clean_mask = windows > 2
if np.any(clean_mask):
for window in np.unique(windows[clean_mask]):
rows_to_clean = np.where(clean_mask & (windows == window))[0]
for row_idx in rows_to_clean:
arr_source[row_idx, :1440] = clean_data_vectorized(
arr_source[row_idx, :1440],
window,
percentile
)
# Set min/max values for all rows
arr_source[:, 1440] = min_vals
arr_source[:, 1441] = max_vals
return arr_source
def GetNextWellId(min_well_id):
conn = get_db_connection()
sql = """
SELECT COALESCE(MAX(well_id), 0) AS max_well_id
FROM public.devices
"""
try:
with conn.cursor() as cur:
cur.execute(sql)
result = cur.fetchone()
if result == None:
return min_well_id
else:
max_wel_id = result[0]
if max_wel_id != None and min_well_id != None:
if max_wel_id + 1 > min_well_id:
return max_wel_id + 1
else:
return min_well_id
else:
if max_wel_id != None:
return max_wel_id + 1
except Exception as e:
return min_well_id
def GetAlarmAllDetails(deployment_id):
device_alarms_json_map = {}
devices = GetVisibleDevices(deployment_id)
deployment_alarms_json, dummy = GetAlarmSimple(deployment_id, 0)
for device in devices:
device_id = device[0]
dummy, device_alarm_json = GetAlarmSimple(0, device_id)
device_alarms_json_map[device_id] = device_alarm_json
return deployment_alarms_json, device_alarms_json_map
def GetCalibMaps(device_ids_list):
temp_calib = {}
humid_calib = {}
with get_db_connection() as conn:
with conn.cursor() as cur:
#list all devices that user has access to
sql = f"SELECT device_id, temperature_calib, humidity_calib FROM public.devices WHERE device_id in ({str(device_ids_list)[1:-1]})"
print(sql)
cur.execute(sql)
calib_records = cur.fetchall()#cur.fetchone()
for record in calib_records:
temp_calib[record[0]] = record[1]
humid_calib[record[0]] = record[2]
return temp_calib, humid_calib
#==================================== ADD FUNCTIONS BEFORE ============================================
# Main API class
class WellApi:
def on_get_healthz(self, req, resp):
"""Health check endpoint"""
resp.status = HTTP_200
resp.content_type = falcon.MEDIA_TEXT
resp.text = "OK"
def on_get(self, req, resp, path=""):
"""Handle GET requests"""
global s_table_temp
logger.debug(f"GET request to path: {path}")
logger.debug(f"Sent variables: {req.params}")
logger.debug(f"All headers: {dict(req.headers)}")
if path == "" or path == "/":
# Serve the main portal page
blob_data = read_file("well_portal.html")
if blob_data:
resp.content_type = "text/html"
resp.text = blob_data
else:
# Fall back to JSON response if file not found
resp.media = {"message": "Hello from OpenFaaS Serverless Web Server!", "method": "GET"}
return
elif path == "favicon.ico":
favicon_path = "favicon.ico"
if os.path.isfile(favicon_path):
resp.content_type = 'image/x-icon'
resp.data = read_file(favicon_path, type_="BIN")
resp.status = HTTP_200
else:
resp.status = HTTP_404
return
elif path == "health":
resp.status = HTTP_200
resp.content_type = falcon.MEDIA_JSON
resp.text = json.dumps({"status": "healthy"})
return
# Authentication and authorization
token = req.params.get('token')
user_name = req.params.get('user_name')
ps = req.params.get('ps')
if ps != "" and ps != None:
#was token sent in ps field? This allows for token and ps be populated by token or ps
user_info = verify_token(ps)
if user_info["username"] == user_name:
token = ps
else:
#is this valid password?
privileges, user_id = ValidUser(user_name, ps)
if privileges == "0":
resp.media = package_response("Log-Out", HTTP_401)
return
else:
token = generate_token(user_name)
user_info = verify_token(token)
if user_info == None or user_info["username"] != user_name:
resp.media = package_response("Log-Out", HTTP_401)
return
get_function_name = req.params.get('name')
logger.debug(f"[{datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}] - {__name__}.GET_API->{get_function_name}")
privileges = GetPriviledgesOnly(user_name)
if (token and user_name) or (token and user_name):
user_info = verify_token(token)
if user_info is None or user_info["username"] != user_name:
resp.media = package_response("Log-Out", HTTP_401)
return
get_function_name = req.params.get('name')
logger.debug(f"[{datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}] - {__name__}.GET_API->{get_function_name}")
if get_function_name == "deployment_add":
user_id = req.params.get('user_id')
blob_data = read_file("edit_deployment.html")
caretaker = {'deployment_id': 0, 'beneficiary_id': user_id, 'caretaker_id': user_id, 'owner_id': user_id, 'installer_id': user_id, 'user_id': 0, 'role_ids': '2', 'access_to_deployments': '', 'email': '', 'user_name': '', 'first_name': '', 'last_name': '', 'address_street': '', 'address_city': '', 'address_zip': '', 'address_state': '', 'address_country': '', 'phone_number': '', 'picture': '/', 'key': ''}
blob_data = FillFields(blob_data, caretaker, 1)
resp.content_type = "text/html"
resp.text = blob_data
return
elif get_function_name == "devices_list":
st = time.time()
user_name = req.params.get('user_name')
privileges = GetPriviledgesOnly(user_name)
first_s = req.params.get('first')
last_s = req.params.get('last')
try:
first = int(first_s)
except ValueError:
first = 0
try:
last = int(last_s)
except ValueError:
last = 1000000
blob_data = read_file("my_devices.html")
devices = []
if len(privileges) > 0:
devices = GetVisibleDevices(privileges)
users = GetUsersFromDeployments(privileges)
blob_data = UpdateDevicesTable(blob_data, devices, users)
blob_data = UpdateDeploymentsSelector(blob_data, users)
resp.content_type = "text/html"
resp.text = blob_data
#print(blob_data)
return
elif get_function_name == "deployment_edit":
deployment_id = req.params.get('deployment_id')
blob_data = read_file("edit_deployment.html")
deployment = DeploymentDetails(deployment_id)
#blob_data = blob_data.decode("utf-8")
blob_data = FillFields(blob_data, deployment, 1)
resp.content_type = "text/html"
resp.text = blob_data
return
elif get_function_name == "caretaker_add":
blob_data = read_file("edit_caretaker.html")
caretaker = {'user_id': 0, 'role_ids': '2', 'access_to_deployments': '', 'email': '', 'user_name': '', 'first_name': '', 'last_name': '', 'address_street': '', 'address_city': '', 'address_zip': '', 'address_state': '', 'address_country': '', 'phone_number': '', 'picture': '/', 'key': ''}
blob_data = FillFields(blob_data, caretaker, 1)
resp.content_type = "text/html"
resp.text = blob_data
return
elif get_function_name == "caretaker_edit":
user_id = req.params.get('user_id')
blob_data = read_file("edit_caretaker.html")
caretaker = UserDetails(user_id)
#blob_data = blob_data.decode("utf-8")
blob_data = FillFields(blob_data, caretaker, 1)
resp.content_type = "text/html"
resp.text = blob_data
return
elif get_function_name == "device_add":
blob_data = read_file("edit_device.html")
device = {'device_id': 0, 'device_mac': '', 'well_id': '', 'description': '', 'location': '', 'close_to': '', 'radar_threshold': '["s3_max",12]', 'temperature_calib': '0.0,1.0,0.0', 'humidity_calib': '0.0,1.0,0.0'}
blob_data = FillFields(blob_data, device, 1)
resp.content_type = "text/html"
resp.text = blob_data
return
elif get_function_name == "device_edit":
mac = req.params.get('mac')
blob_data = read_file("edit_device.html")
device_det = DeviceDetails(mac)
if device_det['radar_threshold'] == None or device_det['radar_threshold'] == "":
device_det['radar_threshold'] = '["s3_max",12]'
#blob_data = blob_data.decode("utf-8")
blob_data = FillFields(blob_data, device_det, 1)
resp.content_type = "text/html"
resp.text = blob_data
return
elif get_function_name == "beneficiary_edit":
user_id = req.params.get('user_id')
blob_data = read_file("edit_beneficiary.html")
beneficiary = UserDetails(user_id)
#blob_data = blob_data.decode("utf-8")
blob_data = FillFields(blob_data, beneficiary, 1)
resp.content_type = "text/html"
resp.text = blob_data
return
elif get_function_name == "beneficiary_add":
blob_data = read_file("edit_beneficiary.html")
beneficiary = {'user_id': 0, 'role_ids': '1', 'access_to_deployments': '', 'email': '', 'user_name': '', 'first_name': '', 'last_name': '', 'address_street': '', 'address_city': '', 'address_zip': '', 'address_state': '', 'address_country': '', 'phone_number': '', 'picture': '/', 'key': ''}
blob_data = FillFields(blob_data, beneficiary, 1)
resp.content_type = "text/html"
resp.text = blob_data
return
elif get_function_name == "get_image_file":
#image represents day in local time
st = time.time()
deployment_id = req.params.get('deployment_id')
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
ddate = req.params.get("date")
ddate = ddate.replace("_","-")
group_by = req.params.get("group_by")
timee = StringToEpoch(ddate, time_zone_s)
force_recreate = req.params.get("re_create") == "true"
radar_part = req.params.get("radar_part")
map_type = int(req.params.get("map_type"))
bw = req.params.get("bw") == "true"
unique_identifier = req.params.get("unique_identifier")
filename = f"/{deployment_id}/{deployment_id}_{ddate}_{group_by}_{radar_part}_{map_type}_{bw}_dayly_image.png"
#print(check_file_exists(filename))
if not force_recreate:
file_exists, time_modified_utc = check_file_exists(filename)
if file_exists:
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
time_modified_date = time_modified_local.date()
file_date = MapFileToDate(filename)
if time_modified_date <= file_date:
force_recreate = True
else:
force_recreate = True
#ddate is in Local Time
timee = LocalDateToUTCEpoch(ddate, time_zone_s) + 24 * 3600 - 1 #Get end of day
#midnight + 5 sec of day begining on date
#time that describes new devices in deployment_history is in UTC therefore timee is in UTC
st = time.time()
vocs_scaled = {}
#file_date is in Local time, so we are comparing that and current Local (to install) Date
devices_list = GetProximityList(deployment_id, timee)
if force_recreate:
st = time.time()
vocs_scaled = {}
s_table_temp = s_table
stored, vocs_scaled, has_old_format, error_string = CreateMapFast_hybrid(filename, devices_list, ddate, bw, time_zone_s, radar_part, group_by) #"[bit] 1=same sensors together, 2=same device together, 4=1 der, 8=2 der
if stored != True:
AddToLog("Map not created")
#logger.warning("Map not created")
resp.status = falcon.HTTP_400
resp.media = package_response("Map not created: "+error_string, HTTP_400)
return
else:
AddToLog("Map created")
#lets send over MQTT vocs_scaled
json_data = numpy_to_json(vocs_scaled, devices_list)
MQSendL("/"+unique_identifier, json_data)
#print(time.time() - st)
#lets read and send image from blob
image_bytes, content_type, metadata = GetBlob(filename)
if debug:
resp.media = package_response(f'Log: {debug_string}', HTTP_200)
else:
if image_bytes is None:
raise falcon.HTTPNotFound(
title='Image not found',
description=f'Image {filename} could not be found or retrieved'
)
sys.stdout.flush()
# Set response content type and body
resp.content_type = content_type
resp.data = image_bytes
resp.status = falcon.HTTP_200
toreport = "0"
if "x-amz-meta-hasoldformat" in metadata:
if metadata["x-amz-meta-hasoldformat"] == "True":
toreport = "1"
resp.set_header('X-Image-Metadata', toreport)
resp.set_header('X-Processing-Time', time.time() - st)
resp.set_header('X-Image-Info', json.dumps({'width': 1440, 'height': len(devices_list[0]) *120}))
return
elif get_function_name == "get_photo":
#image represents day in local time
imageName = req.params.get('imageName')
filename = f"{imageName}"
#lets read and send image from blob
image_bytes, content_type = GetJPG(filename)
if debug:
resp.media = package_response(f'Log: {debug_string}', HTTP_200)
else:
if image_bytes is None:
raise falcon.HTTPNotFound(
title='Image not found',
description=f'Image {filename} could not be found or retrieved'
)
sys.stdout.flush()
# Set response content type and body
resp.content_type = content_type
resp.data = image_bytes
resp.status = falcon.HTTP_200
return
elif get_function_name == "get_sensors_map":
# Get filtering parameters
device_id_str = req.params.get('device_id')
sensor = req.params.get('sensor')
# Get standard image parameters
deployment_id = req.params.get('deployment_id')
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
ddate = req.params.get("date")
ddate = ddate.replace("_", "-")
group_by = ""#req.params.get("group_by")
timee = StringToEpoch(ddate, time_zone_s)
force_recreate = "true"
radar_part = req.params.get("radar_part")
bw = req.params.get("bw") == "true"
unique_identifier = req.params.get("unique_identifier")
# Create filename with filtering parameters
filter_suffix = ""
if device_id_str:
filter_suffix += f"_dev{device_id_str}"
if sensor:
filter_suffix += f"_sens{sensor}"
filename = f"/{deployment_id}/{deployment_id}_{ddate}_{radar_part}_{bw}{filter_suffix}_sensors_map.png"
# Check if file exists and needs recreation
if not force_recreate:
file_exists, time_modified_utc = check_file_exists(filename)
if file_exists:
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
time_modified_date = time_modified_local.date()
file_date = MapFileToDate(filename)
if time_modified_date <= file_date:
force_recreate = True
else:
force_recreate = True
# Convert date to UTC epoch for device queries
timee = LocalDateToUTCEpoch(ddate, time_zone_s) + 24 * 3600 - 1
st = time.time()
vocs_scaled = {}
if force_recreate:
st = time.time()
vocs_scaled = {}
# Get initial device list
devices_list = GetProximityList(deployment_id, timee)
# Apply device filtering if specified
if device_id_str:
filtered_devices = FilterDevicesByDeviceId(devices_list, device_id_str)
else:
filtered_devices = devices_list
# Apply sensor filtering if specified
if sensor:
filtered_s_table = [sensor]#FilterSensorsBySensorType(sensor)
else:
filtered_s_table = s_table
# Validate we have devices and sensors to process
if not filtered_devices[0] or not filtered_s_table:
AddToLog("No devices or sensors match the specified filters")
resp.media = package_response("No devices or sensors match the specified filters", HTTP_400)
return
# Create the filtered map
stored, vocs_scaled = CreateSensorsMapFast(
filename,
filtered_devices,
ddate,
bw,
time_zone_s,
radar_part,
group_by,
filtered_s_table
)
if stored != True:
AddToLog("Sensors map not created")
resp.media = package_response("Sensors map not created", HTTP_401)
return
else:
AddToLog("Sensors map created")
# Send over MQTT vocs_scaled
json_data = numpy_to_json(vocs_scaled, filtered_devices)
MQSendL("/" + unique_identifier, json_data)
# Read and send image from blob
image_bytes, content_type, metadata = GetBlob(filename)
if debug:
resp.media = package_response(f'Log: {debug_string}', HTTP_200)
else:
if image_bytes is None:
raise falcon.HTTPNotFound(
title='Image not found',
description=f'Image {filename} could not be found or retrieved'
)
sys.stdout.flush()
# Set response content type and body
resp.content_type = content_type
resp.data = image_bytes
resp.status = falcon.HTTP_200
return
elif get_function_name == "get_full_location_map":
raw = req.params.get("raw") == "true"
if raw:
#function=request_deployment_map_new
#token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6InJvYnN0ZXIiLCJleHAiOjE3MzgxNzYzNTZ9.5wzC2dVQhKlMygHPZfombTINbltNq8vxdilLIugNTtA&
#user_name=robster&
#date=2025-01-27&
#deployment_id=21&
#map_type=2
chart_type = 8
else:
chart_type = int(req.params.get("map_type"))
#image represents day in local time
logger.debug("get_full_location_map")
deployment_id = req.params.get('deployment_id')
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
ddate = req.params.get("date")
ddate = ddate.replace("_","-")
to_date = ddate
try:
to_date = req.params.get("to_date")
to_date = to_date.replace("_","-")
except:
pass
if to_date != ddate:
chart_type = int(req.params.get("map_type"))
force_recreate = req.params.get("re_create") == "true"
force_recreate_orig = force_recreate
scale_global = req.params.get("scale_global") == "true"
fast = req.params.get("fast") == "true"
bw = req.params.get("bw") == "true"
motion = req.params.get("motion") == "true"
timee = StringToEpoch(ddate, time_zone_s)
filter_minutes = int(req.params.get("filter"))
if "flavor" in req.params: #this is to be used only when creating
flavor = int(req.params.get("flavor"))
else:
flavor = 0
if bw:
bw_s = "BW"
else:
bw_s = "CLR"
if fast:
fast_s = "FAST"
else:
fast_s = "SLOW"
if motion:
motion_s = "M"
else:
motion_s = "S"
if scale_global:
scl_s = "scl"
else:
scl_s = "nscl"
if chart_type == 5 or chart_type == 7:
#now_date = req.params.get("now_date")
#now_date = now_date.replace("_","-")
filename = f"/{deployment_id}/{deployment_id}_{ddate}_{to_date}_{filter_minutes}_history_image.png"
else:
filename = f"/{deployment_id}/{deployment_id}_{ddate}_{bw_s}_{motion_s}_{scl_s}_{chart_type}_Flocation_image.png"
if not force_recreate:
file_exists, time_modified_utc = check_file_exists(filename)
#file_exists1, time_modified_utc1 = check_file_exists(filename+".bin")
if file_exists:
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
time_modified_date = time_modified_local.date()
file_date = MapFileToDate(filename)
if time_modified_date <= file_date:
force_recreate = True
else: #same date
current_time = datetime.datetime.now(pytz.timezone(time_zone_s))
time_passed = current_time - time_modified_local
#if time_passed.seconds > 300: #recreate if older than 5 minutes
# force_recreate = True
else:
force_recreate = True
if force_recreate:
ddate = ddate.replace("_","-")
#filter_minutes = 5
#filename = os.path.join(scriptDir+"/daily_maps/"+deployment, proximity_string+"_"+deployment+"_"+ddate+"_dayly_image.png")
filename = filename.replace('\\','/')
if chart_type == 4: #"collapsed":
GenerateFullLocationMap(filename, deployment_id, ddate, force_recreate, chart_type, bw, motion, scale_global, fast, time_zone_s, filter_minutes)
elif chart_type == 5: #"history":
GeneratePresenceHistory(filename, force_recreate, deployment_id, filter_minutes, ddate, to_date, ddate, time_zone_s)
elif chart_type == 7: #"history full chart":
filename = GeneratePresenceHistoryChart(filename, force_recreate_orig, deployment_id, filter_minutes, ddate, to_date, ddate, time_zone_s)
elif chart_type == 8: #"set for mobile"
GenerateFullLocationMapLabelsOut(filename, deployment_id, ddate, force_recreate, chart_type, bw, motion, scale_global, fast,time_zone_s, filter_minutes)
else:
GenerateFullLocationMap(filename, deployment_id, ddate, force_recreate, chart_type, bw, motion, scale_global, fast,time_zone_s, filter_minutes)
#lets read and send image from blob
image_bytes, content_type, metadata = GetBlob(filename)
if image_bytes is None:
raise falcon.HTTPNotFound(
title='Image not found',
description=f'Image {filename} could not be found or retrieved'
)
# Set response content type and body
resp.content_type = content_type
resp.data = image_bytes
resp.status = falcon.HTTP_200
return
elif get_function_name == "get_presence_map":
#image represents day in local time
deployment_id = req.params.get('deployment_id')
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
ddate = req.params.get("date")
ddate = ddate.replace("_","-")
force_recreate = req.params.get("re_create") == "true"
scale_global = req.params.get("scale_global") == "true"
fast = req.params.get("fast") == "true"
bw = req.params.get("bw") == "true"
motion = req.params.get("motion") == "true"
timee = StringToEpoch(ddate, time_zone_s)
chart_type = int(req.params.get("map_type"))
filter_minutes = int(req.params.get("filter"))
if bw:
bw_s = "BW"
else:
bw_s = "CLR"
if fast:
fast_s = "FAST"
else:
fast_s = "SLOW"
if motion:
motion_s = "M"
else:
motion_s = "S"
if scale_global:
scl_s = "scl"
else:
scl_s = "nscl"
filename = f"/{deployment_id}/{deployment_id}_{ddate}_{bw_s}_{motion_s}_{scl_s}_{chart_type}_Flocation_image.png"
if not force_recreate:
file_exists, time_modified_utc = check_file_exists(filename)
if file_exists:
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
time_modified_date = time_modified_local.date()
file_date = MapFileToDate(filename)
if time_modified_date <= file_date:
force_recreate = True
else:
force_recreate = True
if force_recreate:
ddate = ddate.replace("_","-")
days = 7
filename = filename.replace('\\','/')
if chart_type == 6: #"AI Locations":
timee = LocalDateToUTCEpoch(ddate, time_zone_s) + 24 * 3600 - 1
devices_list, device_ids = GetProximityList(deployment_id, timee)
st = time.time()
if CreatePresenceMap(filename, devices_list, ddate, 1, force_recreate, chart_type, bw, motion, scale_global, fast, filter_minutes, time_zone_s) == 0: #"[bit] 1=same sensors together, 2=same device together, 4=1 der, 8=2 der
print(ddate, "Not found")
else:
print(ddate, time.time() - st)
#lets read and send image from blob
image_bytes, content_type, metadata = GetBlob(filename)
if image_bytes is None:
raise falcon.HTTPNotFound(
title='Image not found',
description=f'Image {filename} could not be found or retrieved'
)
# Set response content type and body
resp.content_type = content_type
resp.data = image_bytes
resp.status = falcon.HTTP_200
return
elif get_function_name == "download":
deployment_id = req.params.get('deployment_id')
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
date_from = req.params.get("date_from")
date_to = req.params.get("date_to")
date_from = date_from.replace("_","-")
date_to = date_to.replace("_","-")
consolidated_by = req.params.get("consolidated_by")
if consolidated_by == None:
consolidated_by = "by_minute_rc"
force_recreate = req.params.get("re_create") == "true"
radar_part = req.params.get("radar_part")
zip_filename = f"/{deployment_id}/{deployment_id}_{date_from}_{date_to}_{consolidated_by}_data.zip"
#print(check_file_exists(filename))
if not force_recreate:
file_exists, time_modified_utc = check_file_exists(zip_filename, bucket_name="data-downloads")
if file_exists:
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
time_modified_date = time_modified_local.date()
file_date = CSVFileToDate(zip_filename)
if time_modified_date <= file_date:
force_recreate = True
else:
force_recreate = True
#ddate is in Local Time
dates = DatesSpan(date_from, date_to)
to_zip = []
for ddate in dates:
force_recreate_csv = force_recreate
csv_dayly_filename = f"/{deployment_id}/{deployment_id}_{ddate}_{consolidated_by}_data.csv"
timee = LocalDateToUTCEpoch(ddate, time_zone_s) + 24 * 3600 - 1
if not force_recreate_csv:
#time that describes new devices in deployment_history is in UTC therefore timee is in UTC
file_exists, time_modified_utc = check_file_exists(csv_dayly_filename, bucket_name="data-downloads")
if file_exists:
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
time_modified_date = time_modified_local.date()
file_date = CSVFileToDate(csv_dayly_filename)
if time_modified_date <= file_date:
force_recreate_csv = True
else:
force_recreate_csv = True
st = time.time()
vocs_scaled = {}
#file_date is in Local time, so we are comparing that and current Local (to install) Date
if force_recreate_csv:
st = time.time()
vocs_scaled = {}
devices_list = GetProximityList(deployment_id, timee)
temp_offset = -10
file_stored = CreateDailyCSV(csv_dayly_filename, devices_list, ddate, vocs_scaled, time_zone_s, radar_part, consolidated_by, temp_offset) #"[bit] 1=same sensors together, 2=same device together, 4=1 der, 8=2 der
to_zip.append(file_stored)
else:
to_zip.append(csv_dayly_filename)
if to_zip:
success = zip_blobs(
blob_paths=to_zip,
zip_blob_name=zip_filename,
bucket_name="data-downloads",
minio_client=miniIO_blob_client
)
if success:
print("Files successfully zipped")
else:
print("Error occurred while zipping files")
#pack CSV files from BLOB into ZIP
#lets read and send image from blob
zip_bytes, content_type, metadata = GetBlob(zip_filename, bucket_name="data-downloads")
if debug:
resp.media = package_response(f'Log: {debug_string}', HTTP_200)
else:
if zip_bytes is None:
raise falcon.HTTPNotFound(
title='File not found',
description=f'File {zip_filename} could not be found or retrieved'
)
# Set response content type and body
resp.content_type = content_type
resp.data = zip_bytes
resp.status = falcon.HTTP_200
return
resp.media = package_response("Use POST method for this endpoint", HTTP_400)
# Default response for unmatched paths
#resp.media = package_response(f"Path: /{path}", HTTP_200)
def on_post(self, req, resp, path=""):
#ToDo make sure that any read/write data functions are authorized for this user_name
global device_lookup_cache
"""Handle POST requests"""
logger.debug(f"on_post called with path: {path}")
logger.debug(f"Request method: {req.method}")
logger.debug(f"Request path: {req.path}")
logger.debug(f"Request query string: {req.query_string}")
logger.debug(f"Request headers: {req.headers}")
logger.debug(f"Request content type: {req.content_type}")
# First, check if this is a Telnyx webhook request
is_telnyx_webhook = (
req.content_type and 'application/json' in req.content_type and
req.headers.get('USER-AGENT') == 'telnyx-webhooks'
)
if is_telnyx_webhook:
logger.info("Processing Telnyx webhook request")
try:
# Read the raw request body
raw_body = req.stream.read().decode('utf-8')
logger.debug(f"Raw webhook request body: {raw_body}")
if not raw_body:
logger.error("Empty request body received from Telnyx")
resp.status = falcon.HTTP_400
resp.content_type = falcon.MEDIA_JSON
resp.text = json.dumps({"error": "Empty request body"})
return
# Parse JSON
webhook_data = json.loads(raw_body)
logger.debug(f"Parsed webhook data: {json.dumps(webhook_data)}")
# Get remote address and request ID
remote_addr = req.headers.get('X-REAL-IP') or req.headers.get('X-FORWARDED-FOR') or 'unknown'
request_id = req.headers.get("X-Request-Id") or req.headers.get("Telnyx-Request-Id") or req.headers.get("X-CALL-ID") or "N/A"
# Process the webhook
handle_telnyx_webhook(webhook_data, remote_addr, request_id)
# Set response status - always acknowledge webhooks with 204 No Content
resp.status = falcon.HTTP_204
return
except json.JSONDecodeError as e:
logger.error(f"Failed to decode JSON from webhook request body: {e}")
resp.status = falcon.HTTP_400
resp.content_type = falcon.MEDIA_JSON
resp.text = json.dumps({"error": "Invalid JSON payload"})
return
except Exception as e:
logger.exception(f"Error processing webhook: {e}")
resp.status = falcon.HTTP_500
resp.content_type = falcon.MEDIA_JSON
resp.text = json.dumps({"error": "Internal Server Error"})
return
# If we get here, it's not a Telnyx webhook, so process as normal
logger.debug(f"on_post called with path: {path}")
logger.debug(f"Request content type: {req.content_type}")
try:
# Use your existing get_form_data function for URL-encoded
form_data = get_form_data(req)
#logger.debug(f"Form data: {form_data}")
# Add the Base64 photo fix
if 'beneficiary_photo_data' in form_data:
photo_fixed = quick_fix_base64_photo(form_data, form_data["beneficiary_photo"])
if photo_fixed:
form_data["beneficiary_photo_data"] = ""
logger.debug("Base64 photo processed successfully")
logger.debug(f"Form data: {form_data}")
# Your existing processing logic continues here with form_data...
# Get form data using our helper function - but don't read stream again
#form_data = get_form_data(req)
#logger.debug(f"Form data: {form_data}")
try:
# Get basic parameters
function = form_data.get('function')
user_name = form_data.get('user_name')
logger.debug(f"Function: {function}, User: {user_name}")
if function != "credentials" and function != "new_user_form":# and function != "set_deployment":
token = form_data.get('token')
ps = form_data.get('ps')
if ps != "" and ps != None:
#was token sent in ps field? This allows for token and ps be populated by token or ps
user_info = verify_token(ps)
if user_info != None:
if user_info["username"] == user_name:
token = ps
else:
#is this valid password?
privileges, user_id = ValidUser(user_name, ps)
if privileges == "0":
resp.media = package_response("Log-Out", HTTP_401)
return
else:
token = generate_token(user_name)
user_info = verify_token(token)
if user_info == None or user_info["username"] != user_name:
resp.media = package_response("Log-Out", HTTP_401)
return
#with get_db_connection() as db_conn:
privileges = GetPriviledgesOnly(user_name)
#Dissallow access to deployment
deployment_id = form_data.get('deployment_id')
if deployment_id != "" and deployment_id != None:
if privileges != "-1" and deployment_id not in privileges:
payload = {'ok': 0, 'error': "not allowed"}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
# Handle credentials function - most common case
if function == "credentials":
clientId = form_data.get('clientId')
nonce = form_data.get('nonce')
ps = form_data.get('ps')
if not user_name:
resp.media = package_response("Required field 'user_name' is missing", HTTP_400)
return
if not clientId:
resp.media = package_response("Required field 'clientId' is missing", HTTP_400)
return
if not nonce:
resp.media = package_response("Required field 'nonce' is missing", HTTP_400)
return
if not ps:
resp.media = package_response("Required field 'ps' is missing", HTTP_400)
return
if False:
pass
else:
#lets check for real
privileges, user_id = ValidUser(user_name, ps)
if privileges == "0":
access_token = 0
privileges = 0
else:
access_token = generate_token(user_name)
if privileges == "-1":
max_role = -1
else:
max_role = GetMaxRole(user_name)
if "2" in max_role:
max_role = 2
else:
max_role = 1
token_payload = {'access_token': access_token, 'privileges': privileges, 'user_id': user_id, 'max_role': max_role}
resp.media = package_response(token_payload)
resp.status = falcon.HTTP_200
return
# Handle token-protected functions
elif function == "messages_age":
macs = form_data.get('macs')
with get_db_connection() as conn:
#print (sqlr)
with conn.cursor() as cur:
devices = MACsStrToDevIds(cur, macs)
devices_string = ",".join(f"{device_id}" for mac, device_id in devices)
sqlr = f"""
SELECT
device_id,
GREATEST(
radar_last_time,
sensor_last_time
) AS latest_time
FROM
(SELECT unnest(ARRAY[{devices_string}]) AS device_id) d
LEFT JOIN LATERAL (
SELECT time AS radar_last_time
FROM radar_readings
WHERE device_id = d.device_id
ORDER BY time DESC
LIMIT 1
) r ON true
LEFT JOIN LATERAL (
SELECT time AS sensor_last_time
FROM sensor_readings
WHERE device_id = d.device_id
ORDER BY time DESC
LIMIT 1
) s ON true;"""
logger.debug(f"sqlr= {sqlr}")
cur.execute(sqlr)
times_list = cur.fetchall()
result = {}
for i in range(len(times_list)):
if times_list[i][1] is not None:
result[devices[i][0]] = times_list[i][1].timestamp()
else:
result[devices[i][0]] = 0
dataa = {}
dataa['Command'] = "REPORT"
dataa['body'] = result
dataa['time'] = time.time()
#json_data = json.dumps(dataa)
payload = {'ok': True, 'response': dataa}
resp.media = package_response(payload)
logger.warning(f"Responded: {str(payload)}")
resp.status = falcon.HTTP_200
return
elif function == "voice_ask":
question = form_data.get('question')
deployment_id = form_data.get('deployment_id')
if ('language_from' in form_data):
language_from = form_data.get('language_from').strip()
else:
language_from = "English"
if ('language_to' in form_data):
language_to = form_data.get('language_to').strip()
else:
language_to = "English"
result, language = AskGPT(question, language_from, language_to)
if result[0] == "#":
result = RunCommand(result, {}, deployment_id)
dataa = {}
dataa['Command'] = "REPORT"
dataa['body'] = result
dataa['name'] = ""
dataa['reflected'] = ""
dataa['language'] = language
dataa['time'] = time.time()
#json_data = json.dumps(dataa)
payload = {'ok': True, 'response': dataa}
resp.media = package_response(payload)
logger.warning(f"Responded: {str(payload)}")
resp.status = falcon.HTTP_200
return
elif function == "calibrate_thresholds":
#this will use current date to calibrate radar presence thresholds.
#make sure that data is well defined (has clear absence/presence signature) for all rooms for chosen day
#Format of radar_threshold field = [gates_to_use_Presence_list, p_threshold]
#We need to automate this functionality!!!
deployment_id = form_data.get('deployment_id')
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
ddate = form_data.get("date")
ddate = ddate.replace("_","-")
selected_date = ddate
stdev_range = int(form_data.get("stdev_range"))
timee = LocalDateToUTCEpoch(ddate, time_zone_s) + 24 * 3600 - 1
devices_list, device_ids = GetProximityList(deployment_id, timee)
selected_date = FindCalibrationDate(device_ids, ddate)
devices_c = len(devices_list[0])
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s, stdev_range)
fields = ["radar_s_min", "radar_s_max", "radar_m_max", "radar_stdev"]
cnt = 0
ids_list = []
for details in devices_list:
ids_list.append(details[1])
devices_list_str = ",".join(map(str, ids_list))
device_to_index = {device: idx for idx, device in enumerate(ids_list)}
minutes = 1440
with get_db_connection() as conn:
with conn.cursor() as cur:
for device_index in range(devices_c):
well_id = devices_list[device_index][0]
device_id = devices_list[device_index][1]
location = devices_list[device_index][2]
sql = get_device_radar_s28_only_query(time_from_str, time_to_str, device_id)
print(sql)
#sql1 = get_deployment_radar_only_colapsed_query(str(device_id), time_from_str, time_to_str, [device_id])
#print(sql1)
st = time.time()
cur.execute(sql)
my_data = cur.fetchall()
timestamps, stationary, motion = process_raw_data(my_data)
print(type(stationary))
# Find threshold above which 20% of points lie
AveragePercentSpendsThere = AveragePercentPerLocation[Consolidataed_locations[location]]
threshold_high, threshold_low = FindThreshold(stationary, AveragePercentSpendsThere)
file_save = f"threshold_graph_{location}.png"
title = f"{well_id}_{location}"
threshold2, x_percent, y_percent = ShowThresholdGraph(stationary, file_save, threshold_low, threshold_high, title, AveragePercentSpendsThere, location)
print(f"Maximum curvature point found at:")
print(f"Threshold value: {threshold2:.3f}")
print(f"X: {x_percent:.1f}% of range")
print(f"Y: {y_percent:.1f}% of points above")
ShowArray(stationary, threshold2, filename=f"stationary_{devices_list[device_index][0]}.png", title=f"stationary_{devices_list[device_index][0]}_{devices_list[device_index][2]}", style='line')
##threshold
##presence_mask, baseline, threshold = detect_presence(timestamps, stationary, motion)
### Save visualization to file
##visualize_detection(timestamps, stationary, motion, presence_mask,
## baseline, threshold)
#cur.execute(sql1)
#my_data1 = cur.fetchall()#cur.fetchone()
#print(time.time() - st)
#if my_data == None or my_data1 == None:
#logger.warning(f"No data found for device_id {device_id}")
#else:
#print(type(my_data))
##minute,
##device_id,
##s_min as radar_s_min,
##s_max as radar_s_max,
##m_max as radar_m_max
#values = [tup[1] for tup in my_data] #10 sec (RAW) data
#hist, bins = np.histogram(values, bins=1000, range=(0, 100))
#TR, BR = FindZeroIntersection(hist, bins, f'raw_{device_id}_histogram.png', device_id)
#if True:#device_id == 560:
#plot(values, filename=f"radar_{device_id}_s28.png", title=f"Radar s28 {device_id}", style='line')
#plot(hist, filename=f"radar_{device_id}_s28_hist.png", title=f"Radar s28 {device_id} histogram", style='line')
##life = [tup[3] - tup[2] + tup[4] for tup in my_data1]
#life, average = calculate_life_and_average(my_data1, stdev_range) #5 min data
#lhist, lbins = np.histogram(life, bins=1000)
#TLIFE, BLIFE = FindZeroIntersection(lhist, lbins, f'life_{device_id}_histogram.png', device_id)
#StoreThresholds2DB(device_id, TR, BR, TLIFE, BLIFE)
##for now not needed...
##ahist, abins = np.histogram(average, bins=1000)
##dummy1, dummy = FindZeroIntersection(ahist, abins)
#if True:#device_id == 560:
#plot(average, filename=f"average_{device_id}.png", title=f"Average {device_id}", style='line')
#plot(life, filename=f"life_{device_id}.png", title=f"Life {device_id}", style='line')
#plot(lhist, filename=f"life_{device_id}_hist.png", title=f"life {device_id} histogram", style='line')
##plot(ahist, filename=f"average_{device_id}_hist.png", title=f"average {device_id} histogram", style='line')
sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list)
print(sql)
my_data = []
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sql)
my_data = cur.fetchall()#cur.fetchone()
#print(result)
if my_data == None:
return False
fields_n = len(fields)
stripes = devices_c * fields_n #radar_min and radar_max
print(my_data)
base_minute = ConvertToBase(time_from_str, time_zone_s)
#base_minute = my_data[0][0]# min(record[0] for record in my_data)
#remember: base_minute is offset (smaller) by numbr of minutes in stdev_range
st = time.time()
wave_m = np.zeros((stripes, 1440+2*stdev_range, 1), dtype=np.float32)
for record in my_data:
#(minute,device_id,s28_min,s28_max) = record
minute, device_id = record[0:2]
values = record[2:] # All the min/max values
x = int((minute - base_minute).total_seconds()/60)
device_idx = device_to_index[device_id]
#value[0] are mins, value[1] are maxes
#when trying to illustrate presence, use s28_max, when absence (night leaving bed) use s28s_min
for field_idx, value in enumerate(values):
# Calculate y position
y = device_idx * fields_n + field_idx
wave_m[y, x] = value
print(time.time()-st)
#we need to reliably determine presence and LIFE (motion) in every 5 minutes of data...
#presence is determined by average value being significntly different from last known base
#last known base is determined by average value during extended periods ( >= H hours) of low stdev (<) while it is determined that:
#person is moving elsewhere, and only 1 person is determined to be in monitored area.
#lets calculate stdevs
for device_index in range(devices_c):
y = device_index * fields_n
row = wave_m[y]
stdevs = np.zeros((1440+2*stdev_range, 1), dtype=np.float32)
stdevs, amplitude = CalcStdevs(row, stdev_range, stdevs)
wave_m[y+3] = stdevs
plot(stdevs, filename=f"radar{device_index}_stdevs.png", title=f"Radar Stedevs {device_index}", style='line')
minutes = 1440
device_index = 0
y = 0
for device in devices_list:
wave = wave_m[y][stdev_range: stdev_range + minutes]
plot(wave,
filename="radar_wave_min.png",
title="Radar Signal Min",
style='line')
# Create histogram with 1000 bins
hist, bins = np.histogram(wave, bins=1000, range=(0, 100))
#bin_centers = (bins[:-1] + bins[1:]) / 2
hist_line = hist # These are your y values
# Plot with proper axis labels
plot(hist_line,
filename="radar_histogram_min.png",
title="Radar Signal Histogram Min (1000 bins)",
style='line')
wave = wave_m[y+1]
plot(wave,
filename="radar_wave_max.png",
title="Radar Signal",
style='line')
# Create histogram with 1000 bins
hist, bins = np.histogram(wave, bins=1000, range=(0, 100))
#bin_centers = (bins[:-1] + bins[1:]) / 2
hist_line = hist # These are your y values
# Plot with proper axis labels
plot(hist_line,
filename="radar_histogram_max.png",
title="Radar Signal Histogram Max(1000 bins)",
style='line')
print(wave)
device_to_index += 1
#lets see this map
stretch_by = 5
arr_stretched = np.zeros((int(stripes*stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
st = time.time()
for yy in range(stripes):
rgb_row = []
row = wave_m[yy]
for x in range(minutes):
value = 1280 * row[x] / 100
rgb_row.append(BestColor(value))
for stretch_index in range(stretch_by):
y = yy * stretch_by + stretch_index
arr_stretched[y, :] = rgb_row
print(time.time()-st)
filename = f"{deployment_id}/{deployment_id}_{ddate}_min_max_radar.png"
SaveImageInBlob(filename, arr_stretched, [])
return
elif function == "get_time_deltas":
deployment_id = form_data.get('deployment_id')
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
sensor = form_data.get('sensor')
selected_date = form_data.get('date')
date_to = form_data.get('to_date')
radar_part = ""
sensor_data = {}
if date_to == None:
date_to = selected_date
start_date = datetime.datetime.strptime(selected_date, '%Y-%m-%d')
end_date = datetime.datetime.strptime(date_to, '%Y-%m-%d')
# Determine direction and swap dates if necessary
if start_date > end_date:
selected_date, date_to = date_to, selected_date
device_id = form_data.get('device_id')
data_type = form_data.get('data_type')
epoch_from_utc, _ = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
_, epoch_to_utc = GetLocalTimeEpochsForDate(date_to, time_zone_s) #>= #<
all_slices = {}
cleaned_values = {}
temp_calib, humid_calib = GetCalibMaps([device_id])
temp_offset = ExtractTempOffset(temp_calib[device_id])
line_part = ReadSensor(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part, temp_offset)
st = time.time()
cleaned_values = [
(line_part[i][0], (line_part[i][0] - line_part[i-1][0]).total_seconds() * 1000)
for i in range(1, len(line_part))
]
print(time.time()-st)
if True:
# Create CSV content as a string
csv_content = "Record_Index,Timestamp,Value,Time_Diff_Seconds,Time_Diff_Milliseconds\n"
for i in range(len(line_part)):
timestamp, value = line_part[i]
if i == 0:
# First record has no previous record to compare
time_diff_seconds = 0
time_diff_ms = 0
else:
# Calculate time difference from previous record
prev_timestamp = line_part[i-1][0]
time_diff = timestamp - prev_timestamp
time_diff_seconds = time_diff.total_seconds()
time_diff_ms = time_diff_seconds * 1000
# Format the row
row = f"{i},{timestamp.isoformat()},{value},{round(time_diff_seconds, 6)},{round(time_diff_ms, 3)}\n"
csv_content += row
# Write to file
with open(f'time_differences_{sensor}_{device_id}.csv', 'w', encoding='utf-8') as f:
f.write(csv_content)
print(f"CSV file 'time_differences_{sensor}_{device_id}.csv' created successfully!")
line_part_t = [(x[0].timestamp(), x[1]) for x in cleaned_values]
sensor_data[sensor] = line_part_t
dataa = {}
all_slices = {}
all_slices[device_id] = sensor_data
dataa['Function'] = "time_deltas"
dataa['all_slices'] = all_slices
dataa['time_zone_st'] = time_zone_s
dataa['device_id'] = device_id
resp.media = package_response(dataa)
resp.status = falcon.HTTP_200
return
elif function == "get_sensor_deltas":
deployment_id = form_data.get('deployment_id')
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
sensor = form_data.get('sensor')
selected_date = form_data.get('date')
date_to = form_data.get('to_date')
radar_part = ""
sensor_data = {}
if date_to == None:
date_to = selected_date
start_date = datetime.datetime.strptime(selected_date, '%Y-%m-%d')
end_date = datetime.datetime.strptime(date_to, '%Y-%m-%d')
# Determine direction and swap dates if necessary
if start_date > end_date:
selected_date, date_to = date_to, selected_date
device_id = form_data.get('device_id')
data_type = form_data.get('data_type')
epoch_from_utc, _ = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
_, epoch_to_utc = GetLocalTimeEpochsForDate(date_to, time_zone_s) #>= #<
all_slices = {}
cleaned_values = {}
line_part = ReadSensorDeltas(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part)
st = time.time()
cleaned_values =line_part
#[
#(line_part[i][0], (line_part[i][1] - line_part[i-1][1]) / (line_part[i][0] - line_part[i-1][0]).total_seconds())
#for i in range(1, len(line_part))
#if (line_part[i][0] - line_part[i-1][0]).total_seconds() > 0
#and abs((line_part[i][1] - line_part[i-1][1]) / (line_part[i][0] - line_part[i-1][0]).total_seconds()) <= 100
#]
#print(time.time()-st)
line_part_t = [(x[0].timestamp(), x[1]) for x in cleaned_values]
sensor_data[sensor] = line_part_t
dataa = {}
all_slices = {}
all_slices[device_id] = sensor_data
dataa['Function'] = "time_deltas"
dataa['all_slices'] = all_slices
dataa['time_zone_st'] = time_zone_s
dataa['device_id'] = device_id
resp.media = package_response(dataa)
resp.status = falcon.HTTP_200
return
elif function == "request_single_slice":
deployment_id = form_data.get('deployment_id')
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
selected_date = form_data.get('date')
date_to = form_data.get('to_date')
if date_to == None:
date_to = selected_date
start_date = datetime.datetime.strptime(selected_date, '%Y-%m-%d')
end_date = datetime.datetime.strptime(date_to, '%Y-%m-%d')
# Determine direction and swap dates if necessary
if start_date > end_date:
selected_date, date_to = date_to, selected_date
devices_list = form_data.get('devices_list')
#devices_list = devices_list.replace("'",'"')
radar_details = {}
#devices_list = '[267,560,"?",null,"64B70888F6F0"]'
#devices_list = '[[267,560,"?",null,"64B70888F6F0"],[268,561,"?",null,"64B70888F6F1"]]'
sensor_list_loc = [form_data.get('sensor_list')]
device_ids_list, well_ids_list, device_details = extract_device_and_well_ids(devices_list)
#is_nested, device_details = check_and_parse(devices_list)
#if not is_nested:
#device_ids_list = [device_details[1]]
#well_ids_list = [device_details[0]]
#else:
#device_ids_list = [device[1] for device in device_details]
#well_ids_list = [device[0] for device in device_details]
data_type = form_data.get('data_type')
epoch_from_utc, _ = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
_, epoch_to_utc = GetLocalTimeEpochsForDate(date_to, time_zone_s) #>= #<
#we need to
buckets = ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
days = (epoch_to_utc - epoch_from_utc) / (60 * 1440)
well_id = well_ids_list[0]
device_detail = device_details[0]
all_slices = {}
radar_part = ""
if len(device_detail) > 4:
device_id2_mac = {device_detail[1]: device_detail[4]}
#device_id2_mac = {device_details[1]: device_details[3]}
#epoch_to = '1730592010' #smal sample to test
#radar_part = form_data.get('radar_part') we need to find what radar part is configured in device settings
if len(device_detail) > 5:
radar_part_all = device_detail[5]
if type(radar_part_all) == str:
if "," in radar_part_all:
radar_part_all = json.loads(radar_part_all)
else:
radar_part_all = ["s3_max",int(radar_part_all)]
elif type(radar_part_all) == int:
radar_part_all = ["s3_max",radar_part_all]
elif type(radar_part_all) == list:
pass
else:
radar_part_all = ["s3_max",12]
if len(radar_part_all) > 1:
radar_part = radar_part_all[0]
#we need only column name and not min or max here
if "_" in radar_part:
radar_parts = radar_part.split("_")
radar_part = radar_parts[0]
radar_details[device_detail[1]] = radar_part_all
#devices = GetVisibleDevices(deployment_id)
temp_calib, humid_calib = GetCalibMaps(device_ids_list)
for device_id in device_ids_list:
temp_offset = ExtractTempOffset(temp_calib[int(device_id)])
sensor_data = {}
for sensor in sensor_list_loc:
st = time.time()
if days < 3:
line_part = ReadSensor(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part, temp_offset)
elif days < 14:
bucket_size = "1m"
line_part = ReadSensor3(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part, bucket_size)
else:
bucket_size = "10m"
line_part = ReadSensor3(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part, bucket_size)
#already done above
##Lets apply calibration:
#if sensor == "temperature":
#temperature_calib = temperature_offset #float(temp_calib[device_id].split(",")[2])
#line_part = [(timestamp, value + temperature_calib) for timestamp, value in line_part]
if sensor == "humidity":
line_part = [(timestamp, value + humidity_offset) for timestamp, value in line_part]
window = sensor_legal_values[sensor][2]
if False:
# Create CSV content as a string
csv_content = "Record_Index,Timestamp,Value,Time_Diff_Seconds,Time_Diff_Milliseconds\n"
for i in range(len(line_part)):
timestamp, value = line_part[i]
if i == 0:
# First record has no previous record to compare
time_diff_seconds = 0
time_diff_ms = 0
else:
# Calculate time difference from previous record
prev_timestamp = line_part[i-1][0]
time_diff = timestamp - prev_timestamp
time_diff_seconds = time_diff.total_seconds()
time_diff_ms = time_diff_seconds * 1000
# Format the row
row = f"{i},{timestamp.isoformat()},{value},{round(time_diff_seconds, 6)},{round(time_diff_ms, 3)}\n"
csv_content += row
# Write to file
with open('time_differences.csv', 'w', encoding='utf-8') as f:
f.write(csv_content)
print("CSV file 'time_differences.csv' created successfully!")
#print("@1", time.time() - st)
#first = 3300
#last = 3400
#line_part = line_part[first:last]
line_part_t = []
#st = time.time()
#line_part_t = [tuple(x[:2]) for x in line_part]
#print(time.time() - st)
#st = time.time()
#line_part_t = list({(dt.timestamp(), value) for dt, value in line_part})
#print(time.time() - st)
line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
st = time.time()
cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
cleaned_values = cleaned_values_t #add_boundary_points(cleaned_values_t, time_zone_s)
#print("@2", time.time() - st)
#Lets add point in minute 0 and minute 1439
#st = time.time()
#cleaned_values = clean_data_fast(line_part_t, window=5, threshold=2.0)
#print("@3", time.time() - st)
sensor_data[sensor] = cleaned_values
if len(device_detail) > 4:
all_slices[device_id2_mac[device_id]] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
else:
all_slices[device_id] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
dataa = {}
dataa['Function'] = "single_slicedata"
dataa['devices_list'] = devices_list
dataa['all_slices'] = all_slices
dataa['radar_details'] = radar_details
dataa['time_zone_st'] = time_zone_s
dataa['well_id'] = well_id
resp.media = package_response(dataa)
resp.status = falcon.HTTP_200
return
elif function == "get_sensor_bucketed_data_by_room_sensor":
# Inputs:
# user_name and token
# deployment_id - from which report gets deployment set (all rooms and devices) to get timezone
# date - one day in a format YYYY-MM-DD
# sensor - temperature/radar/etc.. see full list
# (tells what sensor data to be retrieved)
# "voc" for all smell use s4 (lower reading is higher smell, max=0 find min for 100%)
# "radar" returns s28
# radar_part - optional and applies only to radar (tells which segment of radar to be retrieved)
# bucket_size - ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
# location - room name (has to be unique)
# data_type - ML
# Output: son structure with the following info
# chart_data with rooms : [list]
deployment_id = form_data.get('deployment_id')
selected_date = form_data.get('date')
date_to = form_data.get('to_date')
if date_to == None:
date_to = selected_date
sensor = form_data.get('sensor') # one sensor
radar_part = form_data.get('radar_part')
buckets = ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
bucket_size = "no" if (result := form_data.get('bucket_size')) in (None, "") else (result.strip() if result.strip() in buckets else "no")
#bucket_size = res2 if (res := form_data.get('bucket_size')) is not None and (res2 := str(res).strip()) and res2 in {'no', '10s', '1m', '5m', '10m', '15m', '30m', '1h'} else 'no'
location = form_data.get('location')
data_type = form_data.get('data_type')
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
epoch_from_utc, epoch_to_utc_ = GetLocalTimeEpochsForDate(selected_date, time_zone_s)
epoch_from_utc_, epoch_to_utc = GetLocalTimeEpochsForDate(date_to, time_zone_s)
# obtain devices_list for deployment_id
selected_date = selected_date.replace("_","-")
devices_list, device_ids = GetProximityList(deployment_id, epoch_from_utc)
sensor_data = {}
units = "°C"
if "America" in time_zone_s or "US/" in time_zone_s:
units = "°F"
# see https://www.w3schools.com/cssref/css_colors.php
sensor_props = {"temperature": ["red", units],
"humidity": ["blue", "%"],
"voc": ["orange", "PPM"],
"co2": ["orange", "PPM"],
"pressure": ["magenta", "Bar"],
"radar": ["cyan", "%"],
"light": ["yellow", "Lux"]}
current_time_la = datetime.datetime.now(pytz.timezone(time_zone_s))
formatted_time = current_time_la.strftime('%Y-%m-%dT%H:%M:%S') #"2025-02-06T20:09:00"
result_dictionary = {
"last_report_at": formatted_time,
"color": sensor_props[sensor][0] if sensor in s_table else "grey",
"units": sensor_props[sensor][1] if sensor in s_table else "?"
}
#sensor_mapping = {"co2": "s4", "voc": "s9"}
#sensor = sensor_mapping.get(sensor, sensor)
temp_calib, humid_calib = GetCalibMaps(device_ids)
#print(device_ids)
#print(temp_calib)
#print(humid_calib)
#print("++++++++++++++++++")
chart_data = []
# example data in each element of devices_list is (266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
for well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to in devices_list:
loc_and_desc = location_name
if description != None and description != "":
loc_and_desc = loc_and_desc + " " + description
if loc_and_desc == location:
line_part = ReadSensor3(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part, bucket_size)
if sensor == "temperature":
if "," in temp_calib[device_id]:
temperature_calib = float(temp_calib[device_id].split(",")[2])
else:
temperature_calib = -10
line_part = [(timestamp, value + temperature_calib) for timestamp, value in line_part]
if sensor == "humidity":
line_part = [(timestamp, value + humidity_offset) for timestamp, value in line_part]
window = sensor_legal_values[sensor][2]
line_part_t = []
line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
st = time.time()
cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
cleaned_values = cleaned_values_t #add_boundary_points(cleaned_values_t, time_zone_s)
compressed_readings = convert_timestamps_lc(cleaned_values, time_zone_s)
if sensor == "temperature":
if units == "°F":#"America" in time_zone_s:
compressed_readings = CelsiusToFahrenheitList(compressed_readings)
sensor_data[sensor] = compressed_readings
chart_data.append({'name': loc_and_desc, 'data': compressed_readings})
result_dictionary['chart_data'] = chart_data
payload = result_dictionary
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
elif function == "get_sensor_data_by_deployment_id":
# Inputs:
# user_name and token
# deployment_id - from which report gets deployment set (all rooms and devices)
# date - one day in a format YYYY-MM-DD
# sensor - temperature/radar/etc.. see full list (tells what sensor data to be retrieved)
# radar_part - optional and applies only to radar (tells which segment of radar to be retrieved)
# bucket_size - ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
# data_type - ML
# Output: son structure with the following info
# chart_data with rooms : [list]
deployment_id = form_data.get('deployment_id')
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
selected_date = form_data.get('date')
sensor = form_data.get('sensor') # one sensor
radar_part = form_data.get('radar_part')
buckets = ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
bucket_size = "no" if (result := form_data.get('bucket_size')) in (None, "") else (result.strip() if result.strip() in buckets else "no")
#bucket_size = res2 if (res := form_data.get('bucket_size')) is not None and (res2 := str(res).strip()) and res2 in {'no', '10s', '1m', '5m', '10m', '15m', '30m', '1h'} else 'no'
data_type = form_data.get('data_type')
epoch_from_utc, epoch_to_utc = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
# obtain devices_list for deployment_id
selected_date = selected_date.replace("_","-")
#timee = LocalDateToUTCEpoch(selected_date, time_zone_s)+5
devices_list, device_ids = GetProximityList(deployment_id, epoch_from_utc)
sensor_data = {}
# see https://www.w3schools.com/cssref/css_colors.php
sensor_props = {"temperature": ["red", "°C"],
"humidity": ["blue", "%"],
"voc": ["orange", "PPM"],
"co2": ["orange", "PPM"],
"pressure": ["magenta", "Bar"],
"radar": ["cyan", "%"],
"light": ["yellow", "Lux"]}
result_dictionary = {
"last_report_at": "2025-02-06T20:09:00",
"color": sensor_props[sensor][0] if sensor in s_table else "grey",
"units": sensor_props[sensor][1] if sensor in s_table else "?"
}
#sensor_mapping = {"co2": "s4", "voc": "s9"}
#sensor = sensor_mapping.get(sensor, sensor)
chart_data = []
for room_details in devices_list:
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = room_details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
line_part = ReadSensor3(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part, bucket_size)
if sensor == "temperature":
temperature_calib = float(temp_calib[device_id].split(",")[2])
line_part = [(timestamp, value + temperature_calib) for timestamp, value in line_part]
window = sensor_legal_values[sensor][2]
line_part_t = []
line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
st = time.time()
cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
cleaned_values = add_boundary_points(cleaned_values_t, time_zone_s)
compressed_readings = convert_timestamps_lc(cleaned_values, time_zone_s)
#compressed_readings = [(time.strftime("%H:%M", time.gmtime(lst[0][0])), float(sum(t for _, t in lst)/len(lst)))
#for _, lst in ((k, list(g))
#for k, g in itertools.groupby(cleaned_values, key=lambda x: time.gmtime(x[0]).tm_hour))]
sensor_data[sensor] = compressed_readings
chart_data.append({'name': location_name,
'data': compressed_readings})
result_dictionary['chart_data'] = chart_data
#all_slices[device_id2_mac[device_id]] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
#is_neste, device_details = check_and_parse(devices_list)
#if not is_nested:
#device_ids_list = [device_details[1]]
#well_ids_list = [device_details[0]]
#else:
#device_ids_list = list(map(lambda x: x[1], device_details))
#well_ids_list =list(map(lambda x: x[0], device_details))
#well_id = well_ids_list[0]
#all_slices = {}
#device_id2_mac = {device_details[1]: device_details[4]}
#for device_id in device_ids_list:
#device_id2_mac
#sensor_data = {}
#for sensor in sensor_list_loc:
#st = time.time()
#line_part = ReadSensor(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part)
#window = sensor_legal_values[sensor][2]
#line_part_t = []
#line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
#st = time.time()
#cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
#cleaned_values = add_boundary_points(cleaned_values_t, time_zone_s)
#sensor_data[sensor] = cleaned_values
#all_slices[device_id2_mac[device_id]] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
#dataa = {}
#dataa['Function'] = "single_slicedata"
#dataa['devices_list'] = devices_list
#dataa['all_slices'] = all_slices
#dataa['time_zone_st'] = time_zone_s
#dataa['well_id'] = well_id
#resp.media = package_response(dataa)
#resp.status = falcon.HTTP_200
result_dictionary2 = {
"alert_text": "No alert",
"alert_color": "bg-green-100 text-green-700",
"last_report_at": "ISO TIMESTAMP",
"chart_data": [
{
"rooms": [
{ "name": "Bathroom",
"data": [
{"title": "12AM","value": 20},
{"title": "01AM","value": 20},
{"title": "02AM","value": 26},
{"title": "03AM","value": 16},
{"title": "04AM","value": 27},
{"title": "05AM","value": 23},
{"title": "06AM","value": 26},
{"title": "07AM","value": 17},
{"title": "08AM","value": 18},
{"title": "09AM","value": 21},
{"title": "10AM","value": 28},
{"title": "11AM","value": 24},
{"title": "12PM","value": 18},
{"title": "01PM","value": 27},
{"title": "02PM","value": 27},
{"title": "03PM","value": 19},
{"title": "04PM","value": 0},
{"title": "05PM","value": 0},
{"title": "06PM","value": 0},
{"title": "07PM","value": 0},
{"title": "08PM","value": 0},
{"title": "09PM","value": 0},
{"title": "10PM","value": 0},
{"title": "11PM","value": 0}
]
},
{ "name": "Kitchen",
"data": [
{"title": "00AM","value": 19},
{"title": "01AM","value": 10},
{"title": "02AM","value": 8},
{"title": "03AM","value": 14},
{"title": "04AM","value": 20},
{"title": "05AM","value": 8},
{"title": "06AM","value": 7},
{"title": "07AM","value": 17},
{"title": "08AM","value": 3},
{"title": "09AM","value": 19},
{"title": "10AM","value": 4},
{"title": "11AM","value": 6},
{"title": "12PM","value": 4},
{"title": "01PM","value": 14},
{"title": "02PM","value": 17},
{"title": "03PM","value": 20},
{"title": "04PM","value": 19},
{"title": "05PM","value": 15},
{"title": "06PM","value": 5},
{"title": "07PM","value": 19},
{"title": "08PM","value": 3},
{"title": "09PM","value": 30},
{"title": "10PM","value": 1},
{"title": "11PM","value": 12 }
]
},
{ "name": "Living Room",
"data": [
{"title": "00AM","value": 25},
{"title": "01AM","value": 24},
{"title": "02AM","value": 19},
{"title": "03AM","value": 20},
{"title": "04AM","value": 22},
{"title": "05AM","value": 20},
{"title": "06AM","value": 11},
{"title": "07AM","value": 5},
{"title": "08AM","value": 16},
{"title": "09AM","value": 22},
{"title": "10AM","value": 23},
{"title": "11AM","value": 14},
{"title": "12PM","value": 0},
{"title": "01PM","value": 7},
{"title": "02PM","value": 25},
{"title": "03PM","value": 29},
{"title": "04PM","value": 23},
{"title": "05PM","value": 27},
{"title": "06PM","value": 27},
{"title": "07PM","value": 20},
{"title": "08PM","value": 2},
{"title": "09PM","value": 24},
{"title": "10PM","value": 21},
{"title": "11PM","value": 14 }
]
}
]
}
]
}
payload = result_dictionary
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
#AddToLog(payload)
#return
elif function == "request_device_slice":
deployment_id = form_data.get('deployment_id')
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
epoch_from_utc = form_data.get('epoch_from')
epoch_to_utc = form_data.get('epoch_to')
device_id = form_data.get('device_id')
well_id = form_data.get('well_id')
MAC = form_data.get('MAC')
sensor_list_loc = form_data.get('sensors_list')
sensor_list = sensor_list_loc.split(",")
device_ids_list = [device_id]
well_ids_list = [well_id]
maps_dates, positions_list, timezone_s = GetDeploymentDatesBoth(deployment_id)
data_type = "RL"
#epoch_from_utc, epoch_to_utc = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
#epoch_to = '1730592010' #smal sample to test
radar_part = form_data.get('radar_part')
well_id = well_ids_list[0]
all_slices = {}
temp_calib, humid_calib = GetCalibMaps(device_ids_list)
#device_id2_mac = {device_details[1]: device_details[4]}
for device_id in device_ids_list:
#device_id2_mac
temp_offset = ExtractTempOffset(temp_calib[device_id])
sensor_data = {}
for sensor in sensor_list:
st = time.time()
line_part = ReadSensor(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part, temp_offset)
window = sensor_legal_values[sensor][2]
#print("@1", time.time() - st)
#first = 3300
#last = 3400
#line_part = line_part[first:last]
line_part_t = []
#st = time.time()
#line_part_t = [tuple(x[:2]) for x in line_part]
#print(time.time() - st)
#st = time.time()
#line_part_t = list({(dt.timestamp(), value) for dt, value in line_part})
#print(time.time() - st)
line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
st = time.time()
cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
#cleaned_values = cleaned_values_t #add_boundary_points(cleaned_values_t, time_zone_s)
#print("@2", time.time() - st)
#Lets add point in minute 0 and minute 1439
#st = time.time()
#cleaned_values = clean_data_fast(line_part_t, window=5, threshold=2.0)
#print("@3", time.time() - st)
cleaned_values = ScaleToCommon(cleaned_values_t, sensor)
sensor_data[sensor] = cleaned_values
all_slices[device_id] = sensor_data
dataa = {}
dataa['Function'] = "device_slicedata"
dataa['all_slices'] = all_slices
dataa['time_zone_st'] = time_zone_s
dataa['proximity'] = positions_list
dataa['well_id'] = well_id
dataa['MAC'] = MAC
resp.media = package_response(dataa)
resp.status = falcon.HTTP_200
#return
elif function == "request_single_radar_slice":
deployment_id = form_data.get('deployment_id')
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
selected_date = form_data.get('date')
devices_list = form_data.get('devices_list')
ctrl_key_state = form_data.get('ctrl_key_state')
alt_key_state = form_data.get('alt_key_state')
#devices_list = '[267,560,"?",null,"64B70888F6F0"]'
#devices_list = '[[267,560,"?",null,"64B70888F6F0"],[268,561,"?",null,"64B70888F6F1"]]'
sensor_index_list = form_data.get('sensor_index_list').split(",")
is_nested, device_details = check_and_parse(devices_list)
if not is_nested:
device_ids_list = [device_details[1]]
well_ids_list = [device_details[0]]
else:
device_ids_list = list(map(lambda x: x[1], device_details))
well_ids_list =list(map(lambda x: x[0], device_details))
epoch_from_utc, epoch_to_utc = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
#epoch_to = '1730592010' #smal sample to test
radar_part = form_data.get('radar_part')
well_id = well_ids_list[0]
all_slices = {}
device_id2_mac = {device_details[1]: device_details[4]}
for device_id in device_ids_list:
device_id2_mac
sensor_data = {}
for sensor_index in sensor_index_list:
st = time.time()
sensor = ["m0", "m1", "m2", "m3", "m4", "m5", "m6", "m7", "m8", "m08_max", "s2", "s3", "s4", "s5", "s6", "s7", "s8", "s28_max", "s28_min"][int(sensor_index)]
line_part = ReadRadarDetail(device_id, sensor, epoch_from_utc, epoch_to_utc, alt_key_state)
window = sensor_legal_values["radar"][2]
line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
st = time.time()
cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
cleaned_values = add_boundary_points(cleaned_values_t, time_zone_s)
if len(sensor) < 4:
sensor_data[sensor+"_max"] = cleaned_values
else:
sensor_data[sensor] = cleaned_values
all_slices[device_id2_mac[device_id]] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
dataa = {}
dataa['Function'] = "single_slicedata"
dataa['devices_list'] = devices_list
dataa['all_slices'] = all_slices
dataa['time_zone_st'] = time_zone_s
dataa['well_id'] = well_id
resp.media = package_response(dataa)
resp.status = falcon.HTTP_200
elif function == "get_deployment":
blob_data = read_file("deployment.html")
deployment_id = form_data.get('deployment_id')
#lets update "Deployments" select
users = GetUsersFromDeployments(privileges)
blob_data = UpdateDeploymentsSelector(blob_data, users, False, deployment_id)
resp.content_type = "text/html"
resp.text = blob_data
return
elif function == "get_deployment_j":
deployment_id = form_data.get('deployment_id')
time_zone_st = GetTimeZoneOfDeployment(deployment_id)
date = form_data.get('date')
if date == None:
# Get today's date
local_timezone = pytz.timezone(time_zone_st) # Replace with your local timezone
date = datetime.datetime.now(local_timezone).strftime('%Y-%m-%d')
#epoch_from_utc = int(datetime.datetime.strptime(date, "%Y-%m-%d").timestamp())
#devices_list, device_ids = GetProximityList(deployment_id, epoch_from_utc)
dataa = {}
dataa['Function'] = "deployment_details"
if privileges == "-1":
deployment = DeploymentDetails(deployment_id)
dataa['deployment_details'] = deployment
else:
privileges = privileges.split(",")
if deployment_id in privileges:
deployment = DeploymentDetails(deployment_id)
dataa['deployment_details'] = deployment
resp.media = package_response(dataa)
resp.status = falcon.HTTP_200
return
elif function == "set_floor_layout":
deployment_id = form_data.get('deployment_id')
layout = form_data.get('layout')
if privileges == "-1" or deployment_id in privileges:
ok = StoreFloorPlan(deployment_id, layout)
payload = {'ok': ok}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
else:
payload = {'ok': 0, 'error': "not allowed"}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "get_floor_layout":
deployment_id = form_data.get('deployment_id')
dataa = {}
dataa['Function'] = "deployment_details"
if privileges == "-1":
layout = GetFloorPlan(deployment_id)
dataa['layout'] = layout
else:
privileges = privileges.split(",")
if deployment_id in privileges:
layout = GetFloorPlan(deployment_id)
dataa['layout'] = layout
resp.media = package_response(dataa)
resp.status = falcon.HTTP_200
return
elif function == "get_beneficiary":
user_id = form_data.get('user_id')
all_beneficiaries = ListBeneficiaries(privileges, user_id)
beneficiaries_list = []
for beneficiary_temp in all_beneficiaries:
beneficiaries_list.append(str(beneficiary_temp[0]))
dataa = {}
dataa['Function'] = "beneficiary_details"
if user_id in beneficiaries_list:
beneficiary = UserDetails(user_id)
#lets remove fields not relevant for beneficiary
try:
del beneficiary['time_edit']
except:
pass
try:
del beneficiary['user_edit']
except:
pass
try:
del beneficiary['access_to_deployments']
except:
pass
dataa['beneficiary_details'] = beneficiary
resp.media = package_response(dataa)
resp.status = falcon.HTTP_200
return
elif function == "get_caretaker":
user_name = form_data.get('user_name')
all_caretakers = ListCaretakers(privileges, user_name)
if len(all_caretakers) > 1:
user_id = form_data.get('user_id')
else:
user_id = str(all_caretakers[0][0])
caretakers_list = []
for caretakers_temp in all_caretakers:
caretakers_list.append(str(caretakers_temp[0]))
dataa = {}
dataa['Function'] = "caretaker_details"
if user_id in caretakers_list:
caretaker = UserDetails(user_id)
#lets remove fields not relevant for beneficiary
try:
del caretaker['time_edit']
except:
pass
try:
del caretaker['user_edit']
except:
pass
dataa['caretaker_details'] = caretaker
resp.media = package_response(dataa)
resp.status = falcon.HTTP_200
return
elif function == "get_device":
device_id = form_data.get('device_id')
device_mac = form_data.get('mac')
min_well_id = form_data.get('min_well_id')
if min_well_id != None:
min_well_id = int(min_well_id)
if device_mac != None:
device_det = GetDeviceDetailsSingleFromMac(device_mac)
print(device_det)
dataa = {}
dataa['Function'] = "device_details"
dataa['device_details'] = device_det
if device_det == {}:
dataa['next_well_id'] = GetNextWellId(min_well_id)
else:
devices = GetVisibleDevices(privileges)
dataa = {}
dataa['Function'] = "device_details"
dataa['device_details'] = {}
if privileges == "-1":
#device_det = GetDeviceDetails(device_id)
device_det = GetDeviceDetailsSingle(device_id)
if device_det['radar_threshold'] == None or device_det['radar_threshold'] == "":
device_det['radar_threshold'] = '["s3_max",12]'
dataa['device_details'] = device_det
else:
devices_list = []
for device_id_temp in devices:
devices_list.append(str(device_id_temp[0]))
if device_id in devices_list:
device_det = GetDeviceDetailsSingle(device_id)
if device_det['radar_threshold'] == None or device_det['radar_threshold'] == "":
device_det['radar_threshold'] = '["s3_max",12]'
dataa['device_details'] = device_det
resp.media = package_response(dataa)
resp.status = falcon.HTTP_200
return
elif function == "set_deployment":
credentials_changed = False
devices_changed = False
#at this point user_name has to be known (created, checked and communicated over mqtt (preserved!) to user's device)
#this call is from Mobile App if user_name does not exist, it will be created here
user_name = form_data.get('user_name')
privileges, user_id = GetPriviledgesAndUserId(user_name)
beneficiary_name = form_data.get('beneficiary_name')
if " " in beneficiary_name.strip():
form_data['firstName'], form_data['lastName'] = beneficiary_name.split(" ")
else:
form_data['firstName'] = beneficiary_name.strip()
form_data['lastName'] = ""
beneficiary_address = form_data.get('beneficiary_address')
beneficiary_user_name = form_data.get('beneficiary_user_name')
password = form_data.get('beneficiary_password')
address_map = ParseAddress(beneficiary_address)
#print(address_map)
#{'component_count': 6, 'parsed_components': {'city': 'saratoga', 'country': 'united states', 'house_number': '18569', 'postcode': '95070', 'road': 'allendale avenue', 'state': 'ca'}, 'success': True}
email = form_data.get('beneficiary_email')
#token = form_data.get('token')
signature = form_data.get('signature')
reuse_existing_devices = form_data.get('reuse_existing_devices')
deployment = form_data.get('deployment')
beneficiary_name = form_data.get('beneficiary_name')
image_file_name = form_data["beneficiary_photo"]
gps_lat = form_data.get('lat')
gps_lng = form_data.get('lng')
time_zone_s = GetTZFromGPS(gps_lat, gps_lng)
devices = form_data.get('devices')
#debug_received_data(form_data)
# Or better yet, update to handle both cases for backward compatibility:
#if form_data.get("beneficiary_photo", "") != "":
# # Old Base64 method
# StoreFile2Blob(form_data["beneficiary_photo"], image_file_name, "user-pictures")
#elif "beneficiary_photo" in files:
## New multipart method
#try:
#with open('beneficiary.jpg', 'rb') as f:
#image_data = f.read()
#image_base64 = base64.b64encode(image_data).decode('utf-8')
#StoreFile2Blob(image_base64, image_file_name, "user-pictures")
#os.remove('beneficiary.jpg')
#except Exception as e:
#logger.error(f"Failed to process beneficiary photo: {e}")
devices_list = json.loads(devices)
deployments_id = []
in_db_user_name, editing_user_id, password_in_db, role_ids, priviledges = AccountByEmailExists(email)
if editing_user_id != None:
user_name = GenerateUserNameWithContext(in_db_user_name, form_data['firstName'], form_data['lastName'], editing_user_id)
else:
user_name = GenerateUserNameWithContext(beneficiary_user_name, form_data['firstName'], form_data['lastName'], 0)
if user_name != in_db_user_name:
credentials_changed = True
if (password == None or password == ""):
if password_in_db != None:
password = password_in_db
else:
password = CreatePassword(12)
if password != password_in_db:
credentials_changed = True
if deployment == "NEW":
if in_db_user_name != None:
#this email exists in DB, so cannot be used for NEW deployment!
error_string = f"This email already has associated account!"
print(error_string)
payload = {'ok': 0, 'error': error_string}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
editing_user_id = "0" #beneficiary_id #specify if editing existing user, otherwise "0"
editing_deployment_id = "0"
else:
editing_deployment_id = deployment.split(" ")[0]
#this email is not in db, so we will create new user no matter what (NEW was requested!)
#Can same person have multiple deployments?
#Yes, they can have different sensors, visible to different people!
##Lets check if this beneficiary exists in DB
#beneficiary_id = PersonInDB(beneficiary_name)
#if beneficiary_id > 0: #there, so find deployment #
##Can same person have multiple deployments?
##Yes, they can have different sensors, visible to different people!
#deployments_id = GetDepoymentId(beneficiary_id)
#if deployments_id != []:
if editing_user_id == "0":
form_data['role_ids'] = "1" #we need to make sure that "1" is added to potentially "2" in there
else:
if role_ids == "2": #we need to add 1
form_data['role_ids'] = "1,2"
else:
form_data['role_ids'] = role_ids
form_data['access_to_deployments'] = priviledges #at this point we do not know it, since deployment is not yet created! #str(deployment_id)
#we need to update above field in DB after new deployment is generated
form_data['new_user_name'] = form_data['user_name']
form_data['first_name'] = form_data['firstName']
form_data['last_name'] = form_data['lastName']
print(address_map)
#{'component_count': 6, 'parsed_components': {'city': 'saratoga', 'country': 'united states', 'house_number': '18569', 'postcode': '95070', 'road': 'allendale avenue', 'state': 'ca'}, 'success': True}
#{'component_count': 5, 'parsed_components': {'city': 'rijeka', 'country': 'croatia', 'house_number': '51a', 'postcode': '51000', 'road': 'labinska ul.'}, 'success': True}
form_data['address_street'] = GetIfThere(address_map, "house_number") + " " + GetIfThere(address_map, "road")
form_data['address_city'] = GetIfThere(address_map, "city")
form_data['address_zip'] = GetIfThere(address_map, "postcode")
form_data['address_state'] = GetIfThere(address_map, "state")
form_data['address_country'] = GetIfThere(address_map, "country")
form_data['phone_number'] = ""#form_data['phone']
form_data['picture'] = image_file_name
form_data['key'] = password
#update person_details
#if in_db_user_name == None, than we really need to create new person.
beneficiary_id, if_new = StoreBeneficiary2DB(form_data, editing_user_id, user_id) #this will update or create beneficiary
#lets check if devices listed are not part of existing deployment
if reuse_existing_devices == "0": #do re-use
success, result = DevicesNotUsed(devices, user_name)
else:
success = True
result = {"deployed": [], "not_found": []}
if success: #this should always be true!
if result["deployed"]:
error_string = f"These devices are already deployed: {result['deployed']}"
print(error_string)
payload = {'ok': 0, 'error': error_string}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
if result["not_found"]:
error_string = f"These devices are not available: {result['not_found']}"
print(error_string)
payload = {'ok': 0, 'error': error_string}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
if not result["deployed"] and not result["not_found"]:
print("All devices are available for deployment")
#ok, error_string = StoreDisclaimer2DB(form_data)
#In DB, we need to update or insert into: deployments, deployment_details and deployment_history
#can there be more than 1 deployment per beneficiary?
#yes. Different set of devices visible to different Caretaker. So below can return multiple deployments.
#editing_deployment_id = "0"
deployment_details = []
if if_new == 0: #existing beneficiary
deployment_details = GetDeploymentDetailsFromBeneficiary(beneficiary_id, editing_deployment_id) #this only returns first one, even if more! TODO (handle it better)
form_data_temp = {}
form_data_temp['editing_deployment_id'] = editing_deployment_id
form_data_temp['beneficiary_id'] = beneficiary_id
form_data_temp['caretaker_id'] = user_id
form_data_temp['owner_id'] = user_id
form_data_temp['installer_id'] = user_id
form_data_temp['address_street'] = form_data['address_street']
form_data_temp['address_city'] = form_data['address_city']
form_data_temp['address_zip'] = form_data['address_zip']
form_data_temp['address_state'] = form_data['address_state']
form_data_temp['address_country'] = form_data['address_country']
form_data_temp['persons'] = form_data.get('persons')
form_data_temp['gender'] = form_data.get('gender')
form_data_temp['race'] = form_data.get('race')
form_data_temp['born'] = form_data.get('born')
form_data_temp['pets'] = form_data.get('pets')
form_data_temp['wifis'] = form_data.get('wifis')
form_data_temp['lat'] = form_data.get('lat')
form_data_temp['lng'] = form_data.get('lng')
form_data_temp['gps_age'] = str(int(float(form_data.get('gps_age'))))
form_data_temp['time_zone_s'] = time_zone_s
#["64B7088909FC", "64B7088909B8", "901506CA3DA0", "142B2F81A020", "64B7088905BC", "64B708890898", "64B70888FAD4","64B7088904BC"]
form_data_temp['devices'] = WellIDs2MAC(form_data.get('devices'))
form_data_temp['wifis'] = ConvertToMapString(form_data.get('wifis'))
editing_deployment_id, is_new_deployment = StoreDeployment2DB(form_data_temp, editing_deployment_id)
if is_new_deployment == 1 and if_new:
#This is new email, therefore person, so we need to give him access to only this deployment
form_data['access_to_deployments'] = str(editing_deployment_id)
beneficiary_id, if_new = StoreBeneficiary2DB(form_data, beneficiary_id, user_id)
#deployment_id = deployment_details[0]
#we need to update deployment_history table if devices changed
devices_in_history_last = GetDeploymentHistoryLast(editing_deployment_id)
if len(devices_in_history_last) > 0:
devices_in_history_last = devices_in_history_last[3]
if ListsSame(devices_in_history_last, form_data_temp['devices']) == False:
ok = StoreToDeploymentHistory(editing_deployment_id, form_data_temp['devices'])
devices_changed = True
#-- Fix permissions
#GRANT USAGE, SELECT ON SEQUENCE deployment_history_id_seq TO well_app;
#-- Fix sequence sync
#SELECT setval('deployment_history_id_seq', (SELECT COALESCE(MAX(id), 0) FROM deployment_history));
else:
ok = StoreToDeploymentHistory(editing_deployment_id, form_data_temp['devices'])
devices_changed = True
print(devices_in_history_last)
ok = 1
#at this step, we know deployment_id
if form_data.get("beneficiary_photo", "") != "":
image_file_name = form_data.get("beneficiary_photo", "")
if image_file_name == "NEW.jpg":
image_file_name = f"{editing_deployment_id}_{beneficiary_name}"
image_file_name = image_file_name.replace(" ", "_")
StoreFile2Blob(form_data["beneficiary_photo"], image_file_name, "user-pictures")
if credentials_changed:
if if_new == 1:
#we need to call cd ~/mqtt-auth-service/acl_manager.py
#we created new beneficiary and he needs welcome email
#if deployment == "NEW":
SendWelcomeBeneficiaryEmail(form_data['beneficiary_email'], form_data['first_name'], form_data['last_name'], devices, form_data['phone_number'], form_data['new_user_name'], form_data['key'], signature)
else:
SendCredentialsChangedEmail(form_data['beneficiary_email'], form_data['first_name'], form_data['last_name'], devices, form_data['phone_number'], form_data['new_user_name'], form_data['key'], signature)
else:
if devices_changed:
CallICLUpdate()
payload = {'ok': ok}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
else: #this should not be here
error_string = f"Error: {result}"
payload = {'ok': 0, 'error': error_string}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "purge_phantom_records":
last_valid_id = int(form_data.get('last_valid_device_id'))
try:
conn = get_db_connection()
with conn.cursor() as cur:
# 1. Check for non-null radar_threshold records
cur.execute("""
SELECT COUNT(*)
FROM public.devices
WHERE device_id > %s AND radar_threshold IS NOT NULL
""", (last_valid_id,))
non_null_count = cur.fetchone()[0]
if non_null_count > 0:
resp.media = {
"status": "aborted",
"reason": f"Found {non_null_count} records with device_id > {last_valid_id} that have radar_threshold NOT NULL"
}
return
# 2. Delete phantom records
cur.execute("""
DELETE FROM public.devices
WHERE device_id > %s AND radar_threshold IS NULL
""", (last_valid_id,))
deleted_count = cur.rowcount
if deleted_count > 0:
# 3. Reset sequence to the ACTUAL maximum device_id in the table
cur.execute("SELECT COALESCE(MAX(device_id), 0) FROM public.devices")
actual_max_id = cur.fetchone()[0]
# Reset sequence to actual max
cur.execute("SELECT setval('devices_device_id_seq', %s, true)", (actual_max_id,))
# Get next ID to verify
cur.execute("SELECT nextval('devices_device_id_seq')")
next_id = cur.fetchone()[0]
conn.commit()
resp.media = {
"status": "success",
"deleted_count": deleted_count,
"actual_max_device_id": actual_max_id,
"sequence_reset_to": actual_max_id,
"next_device_id": next_id
}
else:
resp.media = {
"status": "no_action",
"message": "No phantom records found to delete"
}
except Exception as e:
conn.rollback()
resp.media = {"error": str(e)}
resp.status = falcon.HTTP_500
elif function == "request_deployment_map_new":
st = time.time()
print(f"$0 ----{time.time() - st}")
deployment_id = form_data.get('deployment_id')
map_type = form_data.get('map_type')
print(f"$1 ----{time.time() - st}")
datee = form_data.get('date')
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
ddate = datee.replace("_","-")
timee = LocalDateToUTCEpoch(ddate, time_zone_s) + 24 * 3600 - 1 #Get end of day
devices_list = GetProximityList(deployment_id, timee)[0]
maps_dates, _, timezone_s = GetDeploymentDatesBoth(deployment_id)
if maps_dates != []:
if datee == "2022-4-2": #that one is default in HTML so disregard
datee = maps_dates[0]
locations_desc_map = {}
for details in devices_list:
well_id = details[0]
location = details[2]
if details[3] != None and details[3] != "":
location = location +" "+ details[3]
if details[6] != None and details[6] != "":
location = location +" "+ details[6]
MAC = details[4]
locations_desc_map[well_id] = location
print(f"$3 ----{time.time() - st}")
dataa = {}
dataa['Function'] = "deployments_maps_report"
dataa['proximity'] = devices_list
maps_dates.sort(reverse = True)
dataa['maps_dates'] = maps_dates
dataa['device_count'] = len(devices_list)
dataa['time_zone'] = timezone_s
dataa['map_type'] = map_type
#MACs_list = GetMACsListSimple(positions_list)
#MACs_map = {}
#for details in positions_list:
# id = details[0]
# MAC = details[3]
# MACs_map[id] = MAC
#for i in range(len(MACs_list)):
# MACs_map[devices_list[i]] = MACs_list[i][0]
id = devices_list[0][0]
#dataa['MACs_map'] = MACs_map
dataa['locations_desc_map'] = locations_desc_map
#proximity_list = proximity.split(",")
print(f"$4 ----{time.time() - st}")
if id < 200:
checkmarks_string = 'T>\n'
checkmarks_string = checkmarks_string + 'H>\n'
checkmarks_string = checkmarks_string + 'P>\n'
checkmarks_string = checkmarks_string + 'C>\n'
checkmarks_string = checkmarks_string + 'V>\n'
checkmarks_string = checkmarks_string + 'L>\n'
checkmarks_string = checkmarks_string + 'R> '
else: #>200 = ["Temperature", "Humidity", "Pressure", "Light", "Radar", "VOC"]
checkmarks_string = 'T>\n'
checkmarks_string = checkmarks_string + 'H>\n'
checkmarks_string = checkmarks_string + 'P>\n'
checkmarks_string = checkmarks_string + 'L>\n'
checkmarks_string = checkmarks_string + 'R>\n'
checkmarks_string = checkmarks_string + 'S0>\n'
checkmarks_string = checkmarks_string + 'S1>\n'
checkmarks_string = checkmarks_string + 'S2>\n'
checkmarks_string = checkmarks_string + 'S3>\n'
checkmarks_string = checkmarks_string + 'S4>\n'
checkmarks_string = checkmarks_string + 'S5>\n'
checkmarks_string = checkmarks_string + 'S6>\n'
checkmarks_string = checkmarks_string + 'S7>\n'
checkmarks_string = checkmarks_string + 'S8>\n'
checkmarks_string = checkmarks_string + 'S9> '
checked_or_not = " checked"
for index in range(len(devices_list)):
details = devices_list[index]
device_id = details[0]
location = details[2]
if details[3] != None and details[3] != "":
location = location + " " + details[3]
if details[6] != None and details[6] != "":
location = location + " " + details[6]
checkmarks_string = checkmarks_string + str(device_id) + '>\n'
checked_or_not = ''
print(f"$5 ----{time.time() - st}")
dataa['checkmarks'] = checkmarks_string
resp.media = package_response(dataa)
resp.status = falcon.HTTP_200
elif function == "request_proximity":
deployment = form_data.get('deployment_id')
timee = form_data.get('time')
#timee = StringToEpoch(datee)
#print(deployment, timee)
well_ids, device_ids = GetProximityList(deployment, timee)
#print(proximity)
dataa = {}
dataa['Function'] = "proximity_report"
if len(well_ids) > 0:
dataa['proximity'] = well_ids
else:
dataa['proximity'] = []
resp.media = package_response(dataa)
resp.status = falcon.HTTP_200
elif function == "request_devices":
deployment_id = form_data.get('deployment_id')
group_id = form_data.get('group_id')
location = form_data.get('location')
if location == "0":
location = "All"
is_fresh = form_data.get('is_fresh')
matching_devices = GetMatchingDevices(privileges, group_id, deployment_id, location)
dataa = {}
dataa['Function'] = "devices_report"
if len(matching_devices) > 0:
dataa['devices'] = matching_devices
else:
dataa['devices'] = []
resp.media = package_response(dataa)
resp.status = falcon.HTTP_200
elif function == "get_deployment_details":
deployment_id = form_data.get('deployment_id')
group_id = form_data.get('group_id')
location = form_data.get('location')
if location == "0":
location = "All"
is_fresh = form_data.get('is_fresh')
matching_devices = GetMatchingDevicesComplete(privileges, group_id, deployment_id, location)
deployment = DeploymentDetails(deployment_id)
dataa = {}
dataa['Function'] = "devices_report"
if len(matching_devices) > 0:
dataa['devices'] = matching_devices
else:
dataa['devices'] = []
if len(deployment) > 0:
dataa['details'] = deployment
else:
dataa['details'] = {}
resp.media = package_response(dataa)
resp.status = falcon.HTTP_200
elif function == "device_form":
request_id = str(uuid.uuid4())[:8]
logger.debug(f"[{request_id}] device_form ENTRY")
device_id = None
if 'editing_device_id' in form_data:
device_id = int(form_data.get('editing_device_id'))
logger.debug(f"[{request_id}] Found editing_device_id: {device_id}")
else:
logger.debug(f"[{request_id}] No editing_device_id found, device_id = {device_id}")
ok = 0
logger.debug(f"[{request_id}] privileges = {privileges}")
if privileges == "-1":
logger.debug(f"[{request_id}] CALLING StoreDevice2DB with device_id: {device_id}")
ok = StoreDevice2DB(form_data, device_id)
logger.debug(f"[{request_id}] StoreDevice2DB returned: {ok}")
payload = {'ok': 1}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
else:
logger.debug(f"[{request_id}] Non-admin path...")
if device_id != None:
devices = GetVisibleDevices(privileges)
for device in devices:
if device[0] == device_id:
logger.debug(f"[{request_id}] CALLING StoreDevice2DB in loop with device_id: {device_id}")
ok = StoreDevice2DB(form_data, device_id)
logger.debug(f"[{request_id}] StoreDevice2DB in loop returned: {ok}")
break
else:
logger.debug(f"[{request_id}] device_id is None, returning error")
payload = {'ok': 0}
resp.media = package_response(payload)
resp.status = falcon.HTTP_500
return
logger.debug(f"[{request_id}] Final ok value: {ok}")
elif function == "device_set_group":
group_id = int(form_data.get('group_id'))
MAC = form_data.get('mac')
if MAC != None:
device_id_or_mac = MAC
else:
device_id_or_mac = int(form_data.get('device_id'))
ok = ""
if privileges == "-1":
ok = StoreGroupToDevice(device_id_or_mac, group_id, user_name)
else:
devices = GetVisibleDevices(privileges)
for device in devices:
if device[0] == device_id_or_mac:
ok = StoreGroupToDevice(device_id_or_mac, group_id, user_name)
break
if ok != "":
payload = ok
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
else:
debug_string = "This device_id is not editable"
payload = {'ok': ok, 'error': debug_string}
resp.media = package_response(payload)
resp.status = falcon.HTTPError
return
elif function == "device_set_well_id":
well_id = int(form_data.get('well_id'))
MAC = form_data.get('mac')
if MAC != None:
device_id_or_mac = MAC
else:
device_id_or_mac = int(form_data.get('device_id'))
ok = ""
if privileges == "-1":
ok = StoreWellIdToDevice(device_id_or_mac, well_id, user_name)
else:
devices = GetVisibleDevices(privileges)
for device in devices:
if device[0] == device_id_or_mac:
ok = StoreWellIdToDevice(device_id_or_mac, well_id, user_name)
break
if ok != "":
payload = ok
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
else:
debug_string = "This device_id is not editable"
payload = {'ok': ok, 'error': debug_string}
resp.media = package_response(payload)
resp.status = falcon.HTTPError
return
elif function == "device_get_live":
MAC = form_data.get('mac')
if MAC != None:
device_id_or_mac = MAC
else:
device_id_or_mac = int(form_data.get('device_id'))
ok = ""
if privileges == "-1":
ok = GetDeviceLive(device_id_or_mac, user_name)
else:
devices = GetVisibleDevices(privileges)
for device in devices:
if device[0] == device_id_or_mac:
ok = GetDeviceLive(device_id_or_mac, user_name)
break
if ok != "":
payload = ok
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
else:
debug_string = "This device_id is not editable"
payload = {'ok': ok, 'error': debug_string}
resp.media = package_response(payload)
resp.status = falcon.HTTPError
return
elif function == "device_set_network_id":
network_id = int(form_data.get('network_id'))
MAC = form_data.get('mac')
if MAC != None:
device_id_or_mac = MAC
else:
device_id_or_mac = int(form_data.get('device_id'))
ok = ""
if privileges == "-1":
ok = StoreNetworkIdToDevice(device_id_or_mac, network_id, user_name)
else:
devices = GetVisibleDevices(privileges)
for device in devices:
if device[0] == device_id_or_mac:
ok = StoreNetworkIdToDevice(device_id_or_mac, network_id, user_name)
break
if ok != "":
payload = ok
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
else:
debug_string = "This device_id is not editable"
payload = {'ok': ok, 'error': debug_string}
resp.media = package_response(payload)
resp.status = falcon.HTTPError
return
elif function == "device_reboot":
if 'mac' in form_data:
MAC = form_data.get('mac').upper()
device_id_or_mac = MAC
else:
device_id = int(form_data.get('device_id'))
device_id_or_mac = device_id
ok = ""
if privileges == "-1":
ok = DeviceReboot(device_id_or_mac, user_name)
else:
devices = GetVisibleDevices(privileges)
#for this to work, device_id needs to be specified, not MAC!
for device in devices:
if device[0] == device_id_or_mac:
ok = DeviceReboot(device_id_or_mac, user_name)
break
print(f"OK = {ok}")
if ok != "":
payload = ok
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
else:
debug_string = "This device_id is not editable"
payload = {'ok': ok, 'error': debug_string}
resp.media = package_response(payload)
resp.status = falcon.HTTPError
return
elif function == "device_delete":
#check if admin!
ok = DeleteRecordFromDB(form_data)
payload = {'ok': ok}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "alarm_on_off":
deployment_id = form_data.get('deployment_id')
alarm_on = int(form_data.get('alarm_on'))
if privileges != "-1":
privileges_lst = privileges.split(",")
if deployment_id not in privileges_lst:
data_payload = {}
resp.media = package_response(data_payload)
resp.status = falcon.HTTP_200
return
# Lets prepare data to do same as store_alarms function
#read alarm_deployment_settings and all alarm_device_settings from db, and armm all bits that are enabled
deployment_alarms_json, device_alarms_json_map = GetAlarmAllDetails(deployment_id)
deployment_alarms = json.loads(deployment_alarms_json)
enabled = deployment_alarms["enabled"]
if alarm_on == 0:
if GetBit(enabled, 2):
enabled = set_character(enabled, 2, "0")
deployment_alarms["enabled"] = enabled
deployment_alarms_json = json.dumps(deployment_alarms)
redis_conn.set('alarm_deployment_settings_'+deployment_id, deployment_alarms_json)
ok = StoreAlarms2DBSimple(deployment_id, 0, deployment_alarms_json, "")
else:
if not GetBit(enabled, 2):
enabled = set_character(enabled, 2, "1")
deployment_alarms["enabled"] = enabled
deployment_alarms_json = json.dumps(deployment_alarms)
redis_conn.set('alarm_deployment_settings_'+deployment_id, deployment_alarms_json)
ok = StoreAlarms2DBSimple(deployment_id, 0, deployment_alarms_json, "")
if False: #no need to do it since every new_alarms call reads alarm_deployment_settings_ always
record = {
'user_name': user_name,
'deployment_id': deployment_id,
'device_id': device_id
}
record_json = json.dumps(record)
redis_conn.lpush('new_alarms', record_json)
if alarm_on != 0:
for device_id in device_alarms_json_map:
device_alarms_json = device_alarms_json_map[device_id]
device_alarms = json.loads(device_alarms_json)
enabled_alarms = device_alarms["enabled_alarms"]
armed_states = device_alarms["armed_states"]
if GetBit(enabled_alarms, 8):
armed_states = set_character(armed_states, 8, "1")
if GetBit(enabled_alarms, 9):
armed_states = set_character(armed_states, 9, "1")
if GetBit(enabled_alarms, 10):
armed_states = set_character(armed_states, 10, "1")
device_alarms["armed_states"] = armed_states
device_alarms_json = json.dumps(device_alarms)
redis_conn.set(f'alarm_device_settings_{device_id}', device_alarms_json)
ok = StoreAlarms2DBSimple(0, device_id, "", device_alarms_json)
#of course it is needed, how will well-alerts know that new data is stored to db?
# Create record dictionary
record = {
'user_name': user_name,
'deployment_id': deployment_id,
'device_id': device_id
}
# Convert dictionary to JSON string for storage in Redis list
record_json = json.dumps(record)
# Add to queue (list) - lpush adds to the left/front of the list
redis_conn.lpush('new_alarms', record_json)
payload = {'ok': ok}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "get_alarm_state":
deployment_id = form_data.get('deployment_id')
if privileges != "-1":
privileges_lst = privileges.split(",")
if deployment_id not in privileges_lst:
data_payload = {}
resp.media = package_response(data_payload)
resp.status = falcon.HTTP_200
return
# Lets prepare data to do same as store_alarms function
#read alarm_deployment_settings and all alarm_device_settings from db, and armm all bits that are enabled
deployment_alarms_json, device_alarms_json_map = GetAlarmAllDetails(deployment_id)
deployment_alarms = json.loads(deployment_alarms_json)
enabled = deployment_alarms["enabled"]
if not GetBit(enabled, 2):
alarm_state = 2 #off
else:
#if any device was trigerred, show 0, otherwise 1
alarm_state = 1
for device_id in device_alarms_json_map:
device_alarms_json = device_alarms_json_map[device_id]
device_alarms = json.loads(device_alarms_json)
enabled_alarms = device_alarms["enabled_alarms"]
armed_states = device_alarms["armed_states"]
if GetBit(enabled_alarms, 8):
if not GetBit(armed_states, 8): #if 0
alarm_state = 0
break
if GetBit(enabled_alarms, 9):
if not GetBit(armed_states, 9):
alarm_state = 0
break
if GetBit(enabled_alarms, 10):
if not GetBit(armed_states, 10):
alarm_state = 0
break
payload = {'ok': 1, 'alarm_state':alarm_state}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "submit_mobile_message":
message = form_data.get('message')
mqtt_id = form_data.get("mqtt_id")
privileges, user_id = GetPriviledgesAndUserId(user_name)
if "function" in message:
current_utc_time = datetime.datetime.now(timezone.utc)
message_map = json.loads(message)
func = message_map["function"]
conn = get_db_connection()
cur = conn.cursor()
error_string = ""
ok = 1
try:
current_utc_time = datetime.datetime.now(timezone.utc)
# Convert to epoch time
current_epoch_time = int(current_utc_time.timestamp() *1000)
sql = f"""
INSERT INTO public.mobile_clients_messages
(time, mqtt_id, message, function)
VALUES
({current_epoch_time}, '{CleanObject(mqtt_id)}','{CleanObject(message)}','{CleanObject(func)}');
"""
logger.debug(f"sql= {sql}")
# Execute update query
cur.execute(sql)
# Commit the changes to the database
conn.commit()
# Close the cursor and connection
except Exception as e:
logger.error(f"Error inserting to mobile_clients_messages: {str(e)}")
ok = 0
try:
current_utc_time = datetime.datetime.now(timezone.utc)
# Convert to epoch time
current_epoch_time = int(current_utc_time.timestamp() *1000)
sql1 = f"""
INSERT INTO public.mobile_clients
(mqtt_id, user_name, user_id, last_message, last_message_time)
VALUES
('{CleanObject(mqtt_id)}', '{CleanObject(user_name)}', {user_id}, '{CleanObject(message)}', {current_epoch_time})
ON CONFLICT (mqtt_id)
DO UPDATE SET
user_name = EXCLUDED.user_name,
user_id = EXCLUDED.user_id,
last_message = EXCLUDED.last_message,
last_message_time = EXCLUDED.last_message_time;
"""
logger.debug(f"sql= {sql1}")
# Execute update query
cur.execute(sql1)
# Commit the changes to the database
conn.commit()
except Exception as e:
logger.error(f"Error inserting to mobile_clients: {str(e)}")
ok = 0
cur.close()
conn.close()
payload = {'ok': ok}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "get_raw_data":
#container = GetReference("/MAC")
#MAC = req_dict["MAC"][0]
#sensor = req_dict["sensor"][0]
#if "part" in req_dict:
#part = req_dict["part"][0]
#else:
#part = ""
#from_time = req_dict["from_time"][0]
#to_time = req_dict["to_time"][0]
#timezone_str = req_dict["tzone"][0]
#AddToLog("get_raw_data:" + str(MAC) +","+ str(sensor) + "," + str(from_time) + "," + str(to_time) + "," + part+ "," + timezone_str)
##raw_data = GetRawSensorData(container, MAC, sensor, from_time, to_time, timezone_str)
raw_data = []#GetRawSensorDataFromBlobStorage(MAC, sensor, part, from_time, to_time, timezone_str)
data_payload = {'raw_data': raw_data}
resp.media = package_response(data_payload)
resp.status = falcon.HTTP_200
return
elif function == "get_presence_data":
deployment_id = form_data.get('deployment_id')
device_id_in_s = form_data.get('device_id')
device_id_in = None
refresh = True#form_data.get('refresh') == "1"
if privileges != "-1":
privileges_lst = privileges.split(",")
if deployment_id not in privileges_lst:
data_payload = {}
resp.media = package_response(data_payload)
resp.status = falcon.HTTP_200
return
filter = int(form_data.get('filter'))
ddate = form_data.get('date')
ddate = ddate.replace("_","-")
to_date = form_data.get('to_date')
if to_date == None:
to_date = ddate
else:
to_date = to_date.replace("_","-")
ddate, to_date = ensure_date_order(ddate, to_date)
date_obj = datetime.datetime.strptime(ddate, "%Y-%m-%d")
# Subtract one day
previous_day = date_obj - timedelta(days=1)
# Convert back to string
prev_date = previous_day.strftime("%Y-%m-%d")
data_type = form_data.get('data_type') #all, raw, presence, z-graph
if data_type == None or data_type == "":
data_type = "presence"
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
timee = LocalDateToUTCEpoch(ddate, time_zone_s) + 24 * 3600 - 1
devices_list, device_ids = GetProximityList(deployment_id, timee)
if device_id_in_s != None: #lets remove other devices, since asking for one
device_id_in = int(device_id_in_s)
device_ids = [id for id in device_ids if id == device_id_in]
devices_list = [device for device in devices_list if device[1] == device_id_in]
time_from_str, _ = GetLocalTimeForDate(ddate, time_zone_s)
_, time_to_str = GetLocalTimeForDate(to_date, time_zone_s)
time_to = datetime.datetime.strptime(time_to_str, '%Y-%m-%d %H:%M:%S%z')
# Calculate the difference in days
# Convert string to datetime object
date_obj = datetime.datetime.strptime(time_from_str, "%Y-%m-%d %H:%M:%S%z")
# Subtract one day
previous_day = date_obj - timedelta(days=1)
# Format back to string in the same format
time_from_z_str = previous_day.strftime("%Y-%m-%d %H:%M:%S%z")
time_from = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
time_from_z = datetime.datetime.strptime(time_from_z_str, '%Y-%m-%d %H:%M:%S%z')
epoch_time = calendar.timegm(time_from_z.utctimetuple())
days_difference = (time_to - time_from).days
days_difference_long = days_difference + 1
#epoch_time = calendar.timegm(time_from.utctimetuple())
presence_map = {}
presence_map["time_start"] = epoch_time
presence_map["time_zone"] = time_zone_s
device_id_2_threshold = {}
device_id_2_location = {0: "Outside"}
for details in devices_list:
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
if radar_threshold_group_st == None:
radar_threshold_group_st = '["s3_max",12]' #last value is threshold to s28 composite
if len(radar_threshold_group_st) > 8:
radar_threshold_group = json.loads(radar_threshold_group_st)
else:
radar_threshold_group = ["s3_max",12]
print(well_id, radar_threshold_group)
device_id_2_location[device_id] = location_name
device_id_2_threshold[device_id] = radar_threshold_group
ids_list = []
well_ids = []
id2well_id = {}
radar_fields_of_interest = []
device_field_indexes = {}
for details in devices_list:
if device_id_in == None or details[1] == device_id_in:
if "," in details[5]:
threshold_str = details[5]
try:
threshold_lst = json.loads(threshold_str)
except:
threshold_lst = ["s3",12]
#threshold_lst = ["s3_max",12]
else:
threshold_lst = ["s3",int(details[5])]
radar_field = threshold_lst[0]
#since we are getting 10 sec dat, no more need for min or max...
radar_field = radar_field.split("_")[0]
if radar_field not in radar_fields_of_interest:
device_field_indexes[radar_field] = len(radar_fields_of_interest)
radar_fields_of_interest.append(radar_field)
ids_list.append(details[1])
id2well_id[details[1]] = details[0]
well_ids.append(details[0])
presence_map["well_ids"] = well_ids
devices_list_str = ','.join(str(device[1]) for device in devices_list)
#sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
#sql = get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
#print(sql)
zsql = get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_z_str, time_to_str, ids_list, radar_fields_of_interest)
print(zsql)
with get_db_connection() as conn:
with conn.cursor() as cur:
#cur.execute(sql)
#my_data = None
myz_data = None
#my_data = cur.fetchall()
cur.execute(zsql)
myz_data = cur.fetchall()
device_id_2_threshold = {}
device_id_2_location = {0: "Outside"}
row_nr_2_device_id = {}
cnt = 0
row_nr_2_device_id[0] = 0
if myz_data != None:
temporary_map_day_plus = {}
presence_map['z_graph'] = {}
presence_map['longpresence'] = {}
presence_map['raw'] = {}
parsed_time = datetime.datetime.strptime(time_from_z_str, '%Y-%m-%d %H:%M:%S%z')
#start_time = datetime.datetime(
#parsed_time.year,
#parsed_time.month,
#parsed_time.day,
#parsed_time.hour - 7, # Adjust for UTC-7
#parsed_time.minute,
#parsed_time.second,
#tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))
#)
start_time = parsed_time.astimezone(pytz.UTC)
for details in devices_list:
#(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]','')
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details
if radar_threshold_group_st == None:
radar_threshold_group_st = '["s3",12]' #last value is threshold to s28 composite
if len(radar_threshold_group_st) > 8:
radar_threshold_group = json.loads(radar_threshold_group_st)
else:
radar_threshold_group = ["s3",12]
device_id_2_location[device_id] = location_name
device_id_2_threshold[device_id] = radar_threshold_group
presence_map['z_graph'][well_id] = [] #just place holder
temporary_map_day_plus[well_id] = [0] * 6 * 1440 * days_difference_long
presence_map['longpresence'][well_id] = [0] * 6 * 1440 * days_difference_long #just place holder
presence_map['raw'][well_id] = [0] * 6 * 1440 * days_difference_long #just place holder
print(f"start_time: {start_time}")
print(f"epoch_time being sent: {epoch_time}")
print(f"epoch_time as date: {datetime.datetime.fromtimestamp(epoch_time, tz=pytz.UTC)}")
#start_time_ = myz_data[0][0]
st = time.time()
device_lookup_cache = {}
temporary_map_day_plus = optimized_processing(myz_data, start_time, id2well_id, device_id_2_threshold, device_field_indexes, temporary_map_day_plus, data_type)
presence_map = optimized_radar_processing(myz_data, start_time, id2well_id, device_id_2_threshold, device_field_indexes, presence_map, data_type)
#save_list_to_csv_method1(presence_map['longpresence'][475], "longpresence_initial_data.csv")
overlaps_str = GetOverlapps(deployment_id)
overlaps_lst = []
if overlaps_str != None:
if ":" in overlaps_str:
overlaps_lst = json.loads(overlaps_str)
temporary_map_day_plus = ClearOverlaps(temporary_map_day_plus, overlaps_lst)
for device_id in ids_list:
device_id_str = str(device_id)
if filter > 1:
longpresence_list = filter_short_groups_c_wc(presence_map["longpresence"][id2well_id[device_id]], filter, device_id_str, prev_date, to_date, time_zone_s, refresh)
presence_map["longpresence"][id2well_id[device_id]] = longpresence_list
else: #straight decas
#presence_list = presence_map["presence"][id2well_id[device_id]]
#if data_type != "presence":
longpresence_list = presence_map["longpresence"][id2well_id[device_id]]
z_graph = CreateZGraphAI(presence_map["longpresence"][id2well_id[device_id]]) #temporary_map_day_plus[id2well_id[device_id]])
presence_map["z_graph"][id2well_id[device_id]] = z_graph
if data_type == "all" or data_type == "multiple":
#lets create "multiple" series
seen_at_lst, seen_where_list_uf = DetectMultiple(temporary_map_day_plus, overlaps_lst)
#here seen_at is straight decas
#seen_at = [1 if x >= 2 else 0 for x in seen_at]
pers_in_deka = []
dekas_in_day = 6 * 1440
for i in range(dekas_in_day, len(seen_where_list_uf)):
n_pers = seen_where_list_uf[i]
pers_in_deka.append(100*len(n_pers))
seen_at = filter_out_short_highs_iterative(seen_at_lst, filter) #this converts decas into compressed format!
seen_at_lst = Decompress(seen_at)
pers_in_deka = filter_out_short_same_groups_iterative(pers_in_deka, filter)
persons_decompressed = Decompress(pers_in_deka)
persons = Compress(persons_decompressed)
multiple_list = CreateZGraphAI(seen_at_lst)
presence_map["multiple"] = multiple_list
presence_map["persons"] = persons
presence_map["presence"] = CompressList(presence_map["longpresence"])
if data_type == "z-graph":
if "raw" in presence_map:
del presence_map["raw"]
#if "presence" in presence_map:
# del presence_map["presence"]
if "longpresence" in presence_map:
del presence_map["longpresence"]
if data_type == "multiple":
if "raw" in presence_map:
del presence_map["raw"]
#if "presence" in presence_map:
# del presence_map["presence"]
if "z_graph" in presence_map:
del presence_map["z_graph"]
#if "presence" in presence_map:
if "longpresence" in presence_map:
del presence_map["longpresence"]
data_payload = presence_map
resp.media = package_response(data_payload)
resp.status = falcon.HTTP_200
return
elif function == "get_zgraph_data":
deployment_id = form_data.get('deployment_id')
if privileges != "-1":
privileges_lst = privileges.split(",")
if deployment_id not in privileges_lst:
data_payload = {}
resp.media = package_response(data_payload)
resp.status = falcon.HTTP_200
return
device_id = int(form_data.get('device_id'))
devices = GetVisibleDevices(privileges)
if not any(item[0] == device_id for item in devices):
data_payload = {}
resp.media = package_response(data_payload)
resp.status = falcon.HTTP_200
return
filter = int(form_data.get('filter'))
ddate = form_data.get('date')
ddate = ddate.replace("_","-")
to_date = form_data.get('to_date')
if to_date == None:
to_date = ddate
else:
to_date = to_date.replace("_","-")
ddate, to_date = ensure_date_order(ddate, to_date)
data_type = "z-graph"
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
timee = LocalDateToUTCEpoch(ddate, time_zone_s) + 24 * 3600 - 1
devices_list, device_ids = GetProximityList(deployment_id, timee)
time_from_str, _ = GetLocalTimeForDate(ddate, time_zone_s)
_, time_to_str = GetLocalTimeForDate(to_date, time_zone_s)
time_from_z = datetime.datetime.strptime(time_from_z_str, '%Y-%m-%d %H:%M:%S%z')
epoch_time = calendar.timegm(time_from_z.utctimetuple())
#time_from = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
#epoch_time = calendar.timegm(time_from.utctimetuple())
time_to = datetime.datetime.strptime(time_to_str, '%Y-%m-%d %H:%M:%S%z')
presence_map = {}
presence_map["time_start"] = epoch_time
presence_map["time_zone"] = time_zone_s
# Calculate the difference in days
days_difference = (time_to - time_from).days
days_difference_long = days_difference + 1
# Convert string to datetime object
date_obj = datetime.datetime.strptime(time_from_str, "%Y-%m-%d %H:%M:%S%z")
# Subtract one day
previous_day = date_obj - timedelta(days=1)
# Format back to string in the same format
time_from_z_str = previous_day.strftime("%Y-%m-%d %H:%M:%S%z")
device_id_2_threshold = {}
device_id_2_location = {0: "Outside"}
for details in devices_list:
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
if radar_threshold_group_st == None:
radar_threshold_group_st = '["s3_max",12]' #last value is threshold to s28 composite
if len(radar_threshold_group_st) > 8:
radar_threshold_group = json.loads(radar_threshold_group_st)
else:
radar_threshold_group = ["s3_max",12]
print(well_id, radar_threshold_group)
device_id_2_location[device_id] = location_name
device_id_2_threshold[device_id] = radar_threshold_group
ids_list = []
well_ids = []
id2well_id = {}
radar_fields_of_interest = []
device_field_indexes = {}
for details in devices_list:
threshold_str = details[5]
try:
threshold_lst = json.loads(threshold_str)
except:
threshold_lst = ["s3",12]
#threshold_lst = ["s3_max",12]
radar_field = threshold_lst[0]
#since we are getting 10 sec dat, no more need for min or max...
radar_field = radar_field.split("_")[0]
if radar_field not in radar_fields_of_interest:
device_field_indexes[radar_field] = len(radar_fields_of_interest)
radar_fields_of_interest.append(radar_field)
ids_list.append(details[1])
id2well_id[details[1]] = details[0]
well_ids.append(details[0])
presence_map["well_ids"] = well_ids
devices_list_str = ','.join(str(device[1]) for device in devices_list)
#sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
sql = get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
print(sql)
if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
#zsql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_z_str, time_to_str, ids_list, radar_fields_of_interest)
zsql = get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_z_str, time_to_str, ids_list, radar_fields_of_interest)
print(zsql)
with get_db_connection() as conn:
with conn.cursor() as cur:
#cur.execute(sql)
#my_data = None
myz_data = None
#my_data = cur.fetchall()
#if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
cur.execute(zsql)
myz_data = cur.fetchall()
#if my_data != None:
#device_id_2_threshold = {}
#device_id_2_location = {0: "Outside"}
#row_nr_2_device_id = {}
#cnt = 0
#row_nr_2_device_id[0] = 0
##presence_map['longpresence'] and temporary_map_day_plus are similar, except one is used for Z-graph, and another for multiple persons detection
#if data_type == "presence" or data_type == "all" or data_type == "z-graph" or data_type == "multiple":
#presence_map['presence'] = {}
#presence_map['longpresence'] = {}
#if data_type == "raw" or data_type == "all":
#presence_map['raw'] = {}
#for details in devices_list:
##(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]','')
#well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details
#if data_type == "raw" or data_type == "all":
#zeros_list = [0] * 6 * 1440 * days_difference
#presence_map['raw'][well_id] = zeros_list
#if data_type == "presence" or data_type == "all" or data_type == "z-graph" or data_type == "multiple":
#zeros_list = [0] * 6 * 1440 * days_difference
#presence_map['presence'][well_id] = zeros_list
##presence_map[][well_id] = zeros_list
#cnt += 1
#row_nr_2_device_id[cnt] = well_id
#if radar_threshold_group_st == None:
#radar_threshold_group_st = '["s3",12]' #last value is threshold to s28 composite
#if len(radar_threshold_group_st) > 8:
#radar_threshold_group = json.loads(radar_threshold_group_st)
#else:
#radar_threshold_group = ["s3",12]
#device_id_2_location[well_id] = location_name
#device_id_2_threshold[well_id] = radar_threshold_group
#start_time_ = my_data[0][0]
#parsed_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
#start_time = datetime.datetime(
#parsed_time.year,
#parsed_time.month,
#parsed_time.day,
#parsed_time.hour - 7, # Adjust for UTC-7
#parsed_time.minute,
#parsed_time.second,
#tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))
#)
#presence_map = optimized_radar_processing(my_data, start_time_, id2well_id, device_id_2_threshold, device_field_indexes, presence_map, data_type)
##last_device_id = 0
##for radar_read in my_data: #(datetime.datetime(2025, 4, 28, 0, 0, tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))), 559, 6.512857142857143, 6.91, 9.28)
##local_time = radar_read[0]
##deca = int((local_time - start_time).total_seconds() / 10)
##device_id = radar_read[1]
##if device_id != last_device_id:
##last_device_id = device_id
##if data_type == "raw" or data_type == "all":
##days_decas = len(presence_map['raw'][id2well_id[device_id]])
##else:
##days_decas = len(presence_map['presence'][id2well_id[device_id]])
##well_id = id2well_id[device_id]
##radar_threshold_group_st = device_id_2_threshold[device_id]
##threshold_sig, threshold = radar_threshold_group_st
##threshold_sig = threshold_sig.split("_")[0]
##radar_val = radar_read[2+device_field_indexes[threshold_sig]]
##if data_type == "presence" or data_type == "z-graph" or data_type == "all" or data_type == "multiple":
##if radar_val > threshold:
##if deca < days_decas:
##presence_map['presence'][id2well_id[device_id]][deca] = 1
##if data_type == "raw" or data_type == "all":
##if deca < days_decas:
##presence_map['raw'][id2well_id[device_id]][deca] = radar_val
if myz_data != None:
temporary_map_day_plus = {}
presence_map['z_graph'] = {}
for details in devices_list:
#(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]','')
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details
#if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
#zeros_list = [0] * 6 * 1440 * (days_difference_long) #+1 is for previous day
presence_map['z_graph'][well_id] = [] #just place holder
temporary_map_day_plus[well_id] = [0] * 6 * 1440 * (days_difference_long)
presence_map['longpresence'][well_id] = [0] * 6 * 1440 * (days_difference_long) #just place holder
parsed_time = datetime.datetime.strptime(time_from_z_str, '%Y-%m-%d %H:%M:%S%z')
start_time = datetime.datetime(
parsed_time.year,
parsed_time.month,
parsed_time.day,
parsed_time.hour - 7, # Adjust for UTC-7
parsed_time.minute,
parsed_time.second,
tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))
)
#start_time_ = myz_data[0][0]
st = time.time()
device_lookup_cache = {}
temporary_map_day_plus = optimized_processing(myz_data, start_time, id2well_id, device_id_2_threshold, device_field_indexes, temporary_map_day_plus, data_type)
if data_type == "all" or data_type == "z-graph" or data_type == "presence" or data_type == "multiple":
overlaps_str = GetOverlapps(deployment_id)
overlaps_lst = []
if overlaps_str != None:
if ":" in overlaps_str:
overlaps_lst = json.loads(overlaps_str)
temporary_map_day_plus = ClearOverlaps(temporary_map_day_plus, overlaps_lst)
#if data_type == "all" or data_type == "z-graph" or data_type == "presence" or data_type == "multiple":
for device_id in ids_list:
device_id_str = str(device_id)
#if data_type == "presence" or data_type == "all" or data_type == "z-graph":
if filter > 1:
#presence_list = filter_short_groups_numpy(presence_map["presence"][id2well_id[device_id]], filter, device_id, ddate+"-"+to_date)
#presence_list = filter_short_groups_c_wc(presence_map["presence"][id2well_id[device_id]], filter, device_id_str, ddate, to_date, time_zone_s)
#presence_listt = filter_short_groupss(presence_map["presence"][id2well_id[device_id]], filter)
#if presence_list != presence_listt:
# print("stop")
#if data_type != "presence":
#longpresence_list = filter_short_groups_numpy(presence_map["longpresence"][id2well_id[device_id]], filter, device_id, ddate+"-"+to_date)
longpresence_list = filter_short_groups_c_wc(presence_map["longpresence"][id2well_id[device_id]], filter, device_id_str, prev_date, to_date, time_zone_s)
#longpresence_listt = filter_short_groupss(presence_map["longpresence"][id2well_id[device_id]], filter)
#if longpresence_list != longpresence_listt:
# print("stop")
# store_to_file(presence_map["longpresence"][id2well_id[device_id]], "test_list")
#presence_map["presence"][id2well_id[device_id]] = presence_list
#if data_type != "presence":
presence_map["longpresence"][id2well_id[device_id]] = longpresence_list
else: #straight decas
#presence_list = presence_map["presence"][id2well_id[device_id]]
#if data_type != "presence":
longpresence_list = presence_map["longpresence"][id2well_id[device_id]]
#if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
if filter > 1: #straight decas
presence_list1 = filter_short_high_groups_iterative_analog(temporary_map_day_plus[id2well_id[device_id]], filter)
else:
presence_list1 = temporary_map_day_plus[id2well_id[device_id]]
temporary_map_day_plus[id2well_id[device_id]] = presence_list1
#if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
for device_id in ids_list:
#print(device_id_2_threshold[id2well_id[device_id]])
z_graph = CreateZGraphAI(presence_map["longpresence"][id2well_id[device_id]]) #temporary_map_day_plus[id2well_id[device_id]])
presence_map["z_graph"][id2well_id[device_id]] = z_graph
if data_type == "all" or data_type == "multiple":
#lets create "multiple" series
seen_at_lst, seen_where_list_uf = DetectMultiple(temporary_map_day_plus, overlaps_lst)
#here seen_at is straight decas
#seen_at = [1 if x >= 2 else 0 for x in seen_at]
pers_in_deka = []
dekas_in_day = 6 * 1440
for i in range(dekas_in_day, len(seen_where_list_uf)):
n_pers = seen_where_list_uf[i]
pers_in_deka.append(100*len(n_pers))
seen_at = filter_out_short_highs_iterative(seen_at_lst, filter) #this converts decas into compressed format!
seen_at_lst = Decompress(seen_at)
pers_in_deka = filter_out_short_same_groups_iterative(pers_in_deka, filter)
persons_decompressed = Decompress(pers_in_deka)
persons = Compress(persons_decompressed)
multiple_list = CreateZGraphAI(seen_at_lst)
presence_map["multiple"] = multiple_list
presence_map["persons"] = persons
presence_map["presence"] = presence_map["longpresence"]
if data_type == "z-graph":
if "raw" in presence_map:
del presence_map["raw"]
if "presence" in presence_map:
del presence_map["presence"]
if "longpresence" in presence_map:
del presence_map["longpresence"]
if data_type == "multiple":
if "raw" in presence_map:
del presence_map["raw"]
if "presence" in presence_map:
del presence_map["presence"]
if "longpresence" in presence_map:
del presence_map["longpresence"]
if "z_graph" in presence_map:
del presence_map["z_graph"]
if "presence" in presence_map:
presence_map["presence"] = CompressList(presence_map["presence"])
data_payload = presence_map
resp.media = package_response(data_payload)
resp.status = falcon.HTTP_200
return
elif function == "get_candle_data":
container = GetReference("/MAC")
MAC = req_dict["MAC"][0]
sensor = req_dict["sensor"][0]
from_time = req_dict["from_time"][0]
to_time = req_dict["to_time"][0]
part = req_dict["part"][0]
tzone = req_dict["tzone"][0]
AddToLog(str(req_dict))
candle_data = GetCandleSensorData(container, MAC, sensor, from_time, to_time, part, tzone)
data_payload = {'candle_data': candle_data}
resp.media = package_response(data_payload)
resp.status = falcon.HTTP_200
return
elif function == "deployment_from_app":
editing_deployment_id = form_data.get('editing_deployment_id')
editing_deployment_id, is_new_deployment = StoreDeployment2DB(form_data, editing_deployment_id)
if editing_deployment_id != 0:
payload = {'ok': 1, 'deployment_id': editing_deployment_id}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
else:
payload = {'ok': 0, 'error': debug_string, 'deployment_id': editing_deployment_id}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "deployment_form":
editing_deployment_id = form_data.get('editing_deployment_id')
editing_deployment_id, is_new_deployment = StoreDeployment2DB(form_data, editing_deployment_id)
if editing_deployment_id != 0:
payload = {'ok': 1, 'deployment_id': editing_deployment_id}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
else:
payload = {'ok': 0, 'deployment_id': editing_deployment_id}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "deployment_delete":
ok = DeleteRecordFromDB(form_data)
payload = {'ok': ok}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "deployments_list":
result_list = []
first_s = form_data.get('first')
last_s = form_data.get('last')
user_id = form_data.get('user_id')
first = 0
last = 1000000
try:
if first_s != None:
first = int(first_s)
except ValueError:
pass
try:
if last_s != None:
last = int(last_s)
except ValueError:
pass
#user_id = form_data.get('user_id')
if user_id == "" or user_id == None:
privileges, user_id = GetPriviledgesAndUserId(user_name)
else:
privileges = GetPriviledgesOnly(user_name)
all_deployments = ListDeployments(privileges, user_id)
cnt = 0
for deployment in all_deployments:
#print(deployment)
#if deployment['deployment_id'] == 40:
# print("stop")
cnt += 1
if cnt >= first:
#print (deployment['beneficiary_id'])
if deployment['beneficiary_id'] in user_id_2_user.keys():
#print(user_id_2_user[deployment['beneficiary_id']])
caretaker_min_object = {"deployment_id": deployment['deployment_id'], "email": user_id_2_user[deployment['beneficiary_id']][3], "first_name": user_id_2_user[deployment['beneficiary_id']][5], "last_name": user_id_2_user[deployment['beneficiary_id']][6]}
result_list.append(caretaker_min_object)
#else:
# caretaker_min_object = {"deployment_id": deployment['deployment_id'], "email": deployment['beneficiary_id'][3], "first_name": deployment['beneficiary_id'][5], "last_name": deployment['beneficiary_id'][6]}
if cnt > last:
break
payload = {'result_list': result_list}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "device_list":
result_list = []
first_s = form_data.get('first')
last_s = form_data.get('last')
try:
first = int(first_s)
except ValueError:
first = 0
try:
last = int(last_s)
except ValueError:
last = 1000000
#user_id = form_data.get('user_id')
devices = GetVisibleDevices(privileges)
payload = {'result_list': devices}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "get_devices_locations":
well_ids = form_data.get('well_ids')
details_list = WellId2Details(well_ids)
#print(details_list)
to_report = []
for details in details_list:
if details[3] == -1:
report_record = str(details[0]) + " ?"
else:
report_record = str(details[0]) + " " + location_names[details[3]]
if details[4] != "" and details[4] != "initial":
report_record = report_record + " " +details[4]
if details[3] == -1:
to_report.append((details[0], "?", details[4], report_record))
else:
to_report.append((details[0], location_names[details[3]], details[4], report_record))
#print(to_report)
payload = {'deployments': to_report}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "find_deployments":
#For all devices, find deployments that they are part of
#For all those deployments, return:
#deployment_id First_name Last_name (of beneficiary)
#list of (well_id, location_descriptions) all devices in each deployment
well_ids = form_data.get('well_ids')
#well_ids_lst = well_ids.split(",")
details_list = WellId2Details(well_ids)
to_report = []
privileges_lst = []
if "," in privileges:
privileges_lst = privileges.split(",")
if len(details_list) > 0:
macs_list_clean = []
devices_details = {}
for entry in details_list:
macs_list_clean.append(entry[2])
macs_formatted = "', '".join(macs_list_clean)
with get_db_connection() as conn:
with conn.cursor() as cur:
sql = f"SELECT deployment_id, beneficiary_id, devices FROM public.deployment_details WHERE devices::text ~* ANY(ARRAY['{macs_formatted}']);"
print(sql)
cur.execute(sql)
result = cur.fetchall()
if result != None and result != []:
users_list_clean = []
for entry in result: #for every deployment
macs_list_clean = []
deployment_id = str(entry[0])
if privileges == '-1':
deployment = DeploymentDetails(deployment_id)
address_str = ExtractAddress(deployment)
deployment = PurgeDeployment(deployment)
deployment["address"] = address_str
users_list_clean.append(str(entry[1])) #beneficiary_id
users_formatted = ", ".join(users_list_clean)
mac_devices_in_deployment = ast.literal_eval(entry[2])
for mac in mac_devices_in_deployment:
macs_list_clean.append(mac)
device_ids, device_list = MACsToWellIds(cur, macs_list_clean)
devices_details[deployment_id] = (deployment, device_ids, device_list, users_formatted)
elif deployment_id in privileges_lst:
deployment = DeploymentDetails(deployment_id)
address_str = ExtractAddress(deployment)
deployment = PurgeDeployment(deployment)
deployment["address"] = address_str
users_list_clean.append(str(entry[1]))
users_formatted = ", ".join(users_list_clean)
mac_devices_in_deployment = ast.literal_eval(entry[2])
for mac in mac_devices_in_deployment:
macs_list_clean.append(mac)
device_ids, device_list = MACsToWellIds(cur, macs_list_clean)
devices_details[deployment_id] = (deployment, device_ids, device_list, users_formatted)
for entry in result:
deployment_id = str(entry[0])
device_list_to_report = []
device_list = devices_details[deployment_id][2]
users_formatted = devices_details[deployment_id][3]
for device in device_list:
#we need well_id, location and decription only
device_list_to_report.append((device[0], device[2], device[3]))
sql = f"SELECT first_name, last_name, email, picture FROM public.person_details WHERE user_id IN ({users_formatted});"
print(sql)
cur.execute(sql)
result1 = cur.fetchall()
counter = 0
deployment_id = str(entry[0])
deployment_t = devices_details[deployment_id][0]
first_name, last_name, email, photo = result1[counter]
deployment_t["beneficiary_first_name"] = first_name
deployment_t["beneficiary_last_name"] = last_name
deployment_t["beneficiary_email"] = email
deployment_t["photo"] = photo
to_report.append((entry, device_list_to_report, deployment_t))
print(to_report)
payload = {'deployments': to_report}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "device_list_by_deployment":
result_list = []
first_s = form_data.get('first')
last_s = form_data.get('last')
deployment_id = form_data.get('deployment_id')
try:
first = int(first_s)
except ValueError:
first = 0
try:
last = int(last_s)
except ValueError:
last = 1000000
if privileges == "-1":
devices = GetVisibleDevices(deployment_id)
else:
privileges = privileges.split(",")
if deployment_id in privileges:
devices = GetVisibleDevices(deployment_id)
payload = {'result_list': devices}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "device_list_4_gui":
result_list = []
deploymentData = []
deviceData = []
macs_list = []
user_id = GetUserId(user_name)
all_deployments = ListDeployments(privileges, user_id)
#{'deployment_id': 21, 'beneficiary_id': 25, 'caretaker_id': 1, 'owner_id': 1, 'installer_id': 1, 'address_street': '661 Encore Way', 'address_city': 'San Jose', 'address_zip': '95134', 'address_state': 'CA', 'address_country': 'USA', 'devices': '["64B70888FAB0","64B70888F860","64B70888F6F0","64B708896BDC","64B708897428","64B70888FA84","64B70889062C"]', 'wifis': '', 'persons': 1, 'gender': 1, 'race': 1, 'born': 1940, 'pets': 0, 'time_zone': 'America/Los_Angeles'}
MAC2Deployment = {}
for deployment in all_deployments:
beneficiary_id = deployment['beneficiary_id']
user = GetNameFromUserId(beneficiary_id)
name = f"{user[1]} {user[2]}"
deploymentData.append({'deployment_id': str(deployment['deployment_id']), 'name': name})
devices = deployment['devices']
if devices != None and devices != None != "":
devices_list = ToList(devices)
for device in devices_list:
macs_list.append(device)
MAC2Deployment[device] = deployment['deployment_id']
#deviceData.append({'well_id': device[0], 'mac': device[1]})
deployment_id_list = []
deviceData = []
#row_data = [device_id, well_id, mac, last_message_epoch, location_names[location_id], description, deployment_ids[cnt][0]]
with get_db_connection() as conn:
with conn.cursor() as cur:
device_ids, device_list = MACsToWellIds(cur, macs_list)
for device in device_list:
if MAC2Deployment[device[4]] != "":
deviceData.append({'well_id': device[0], 'mac': device[4], 'room_name': device[2], 'deployment_id': MAC2Deployment[device[4]]})
#deploymentData = [{'deployment_id': '21', 'name': 'Robert Zmrzli House'}, {'deployment_id': '36', 'name': 'Fred Zmrzli Apartment'}]
#deviceData = [{ 'well_id': '300', 'mac': '64B70888F6F0', 'room_name': 'Living Room', 'deployment_id': '21' }, { 'well_id': '301', 'mac': '64B70888F6F1', 'room_name': 'Bathroom Main', 'deployment_id': '36' }]
payload = {
'status': "success", 'deploymentData': deploymentData, 'deviceData': deviceData
}
logger.debug(f"device_list_4_gui------ {payload} ------------------------------------------")
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "caretaker_form":
editing_user_id = form_data.get('editing_user_id')
email = form_data.get('email')
user_id = form_data.get('user_id')
if "@" not in email:
resp.media = package_response("Missing or illegal 'email' parameter", HTTP_400)
return
print(privileges)
if privileges == "-1":
ok = StoreCaretaker2DB(form_data, editing_user_id, user_id)
if ok == 1:
payload = {'ok': ok}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
else:
payload = {'ok': ok, 'error': debug_string}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif "-1" in privileges:
payload = {'ok': 0, 'error': "Not allowed!"}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "caretaker_delete":
if privileges == "-1":
ok = DeleteRecordFromDB(form_data)
else:
ok = 0
AddToLog(ok)
payload = {'ok': ok}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "caretakers_list":
result_list = []
first_s = form_data.get('first')
last_s = form_data.get('last')
try:
first = int(first_s)
except ValueError:
first = 0
try:
last = int(last_s)
except ValueError:
last = 1000000
if privileges == "-1":
all_caretakers = ListCaretakers(privileges, user_name)
cnt = 0
for caretaker in all_caretakers:
cnt += 1
if cnt >= first:
caretaker_min_object = {"user_id": caretaker[0], "email": caretaker[3], "first_name": caretaker[5], "last_name": caretaker[6]}
result_list.append(caretaker_min_object)
if cnt > last:
break
elif "-1" in privileges:
all_caretakers = ListCaretakers(privileges, user_name)
cnt = 0
for caretaker in all_caretakers:
cnt += 1
if cnt >= first:
caretaker_min_object = {"user_id": caretaker[0], "email": caretaker[3], "first_name": caretaker[5], "last_name": caretaker[6]}
result_list.append(caretaker_min_object)
if cnt > last:
break
payload = {'result_list': result_list}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "new_user_form":
#this is only used from disclaimer form!
devices = form_data.get('devices')
user_name = form_data.get('user_name')
password = form_data.get('password')
email = form_data.get('email')
#if user_name == "" or password == "":
#lets skip comparission here
if True:
success = True
result = {}
result["deployed"] = []
result["not_found"] = []
else:
#lets check if devices listed are not part of existing deployment
success, result = DevicesNotUsed(devices, user_name)
if success:
if result["deployed"]:
error_string = f"These devices are already deployed: {result['deployed']}"
print(error_string)
payload = {'ok': 0, 'error': error_string}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
if result["not_found"]:
error_string = f"These devices are not available: {result['not_found']}"
print(error_string)
payload = {'ok': 0, 'error': error_string}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
print("All devices are available for deployment")
ok, error_string = StoreDisclaimer2DB(form_data)
if ok == False:
payload = {'ok': ok, 'error': error_string}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
in_db_user_name, user_id, password_in_db, role_ids, priviledges = AccountByEmailExists(email)
if user_id != None:
user_name = GenerateUserNameWithContext(in_db_user_name, form_data['firstName'], form_data['lastName'], user_id)
else:
user_name = GenerateUserNameWithContext(user_name, form_data['firstName'], form_data['lastName'], 0)
if (password == None or password == ""):
if password_in_db != None:
password = password_in_db
else:
password = CreatePassword(12)
#user_id = "0" #user that is adding this record. New user so "0"
#For now we will assume that person is unique by fist and last name
#user_id = PersonInDB(form_data['firstName']+" "+form_data['lastName'])
#lets create new account for this caretaker
#lets suplement form_data with parts needed for existing StoreCaretaker2DB function
editing_user_id = user_id #specify if editing existing user, otherwise "0"
form_data['role_ids'] = "2"
form_data['access_to_deployments'] = ""
#form_data['email'] = "" #this one matches
form_data['new_user_name'] = user_name
form_data['first_name'] = form_data['firstName']
form_data['last_name'] = form_data['lastName']
form_data['address_street'] = ""
form_data['address_city'] = ""
form_data['address_zip'] = ""
form_data['address_state'] = ""
form_data['address_country'] = ""
form_data['phone_number'] = form_data['phone']
form_data['picture'] = "/"
form_data['key'] = password
#logger.debug(f"editing_user_id= {editing_user_id} user_id= {user_id}")
ok = StoreCaretaker2DB(form_data, editing_user_id, user_id)
if ok == 1:
#~/mqtt-auth-service/acl_manager.py is called there because what deserved email (change in user_name, password or devices) deserves acl update
SendWelcomeCaretakerEmail(form_data['email'], form_data['first_name'], form_data['last_name'], devices, form_data['phone_number'], form_data['new_user_name'], form_data['key'], form_data['signature'])
payload = {'ok': ok}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
else:
privileges, user_id = ValidUser(user_name, password)
if user_id == "0": #bad password
error_string = f"Password does not match user {user_name}"
payload = {'ok': 0, 'error': error_string}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
payload = {'ok': ok}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
else:
error_string = f"Error: {result}"
payload = {'ok': 0, 'error': error_string}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "beneficiary_form":
editing_user_id = form_data.get('editing_user_id')
email = form_data.get('email')
user_id = GetUserId(user_name)
if "@" in email:
beneficiary_id, if_new = StoreBeneficiary2DB(form_data, editing_user_id, user_id)
if beneficiary_id != 0:
payload = {'ok': 1, 'beneficiary_id': beneficiary_id}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
else:
payload = {'ok': 0, 'beneficiary_id': beneficiary_id}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
else:
payload = {'ok': 0, 'beneficiary_id': 0}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "beneficiary_delete":
ok = DeleteRecordFromDB(form_data)
payload = {'ok': ok}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "beneficiaries_list":
result_list = []
first_s = form_data.get('first')
last_s = form_data.get('last')
try:
first = int(first_s)
except ValueError:
first = 0
try:
last = int(last_s)
except ValueError:
last = 1000000
user_id = form_data.get('user_id')
all_beneficiaries = ListBeneficiaries(privileges, user_id)
cnt = 0
for beneficiary in all_beneficiaries:
cnt += 1
if cnt >= first:
beneficiary_min_object = {"user_id": beneficiary[0], "email": beneficiary[3], "first_name": beneficiary[5], "last_name": beneficiary[6]}
result_list.append(beneficiary_min_object)
if cnt > last:
break
payload = {'result_list': result_list}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "activities_report_details":
deployment_id = form_data.get('deployment_id')
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
filterr = form_data.get('filter')
if filterr == None:
filterr = 6
else:
filterr = int(filterr)
refresh = form_data.get('refresh') == "1"
ddate = current_date_at_tz(time_zone_s)
timee = LocalDateToUTCEpoch(ddate, time_zone_s) + 24 * 3600 - 1
devices_list, device_ids = GetProximityList(deployment_id, timee)
#Here we need to add per day: (all based on Z-graph data!)
#Bathroom visits number
#Bathroom time spent
#Sleep weakes number (As breaks in Z-graph indicates in 10PM to 9AM period)
#Sleep length (For now add all times seen in bedroom)
#Kitchen visits number
#Kitchen time spent
#Most frequented room visits number
#Most frequented room time spent
#Lets find device_id of bathroom sensor
bathroom_device_id, location_ba, bathroom_well_id = FindDeviceByRole(deployment_id, ["Bathroom Main", "Bathroom", "Bathroom Guest"])
bedroom_device_id, location_be, bedroom_well_id = FindDeviceByRole(deployment_id, ["Bedroom Master", "Bedroom", "Bedroom Guest"])
kitchen_device_id, location_ke, kitchen_well_id = FindDeviceByRole(deployment_id, ["Kitchen"])
most_present_device_id, location_ot, most_present_well_id = FindDeviceByRole(deployment_id, []) #this will find most_present (as defined in other filed of device record)
if isinstance(location_ot, int):
other_location = location_names[location_ot]
else:
other_location = location_ot
#weekly
week_dates = get_week_days_and_dates(7, time_zone_s)
month_dates = get_week_days_and_dates(30, time_zone_s)
six_months_dates = get_week_days_and_dates(180, time_zone_s)
other_color = Loc2Color[other_location][0]
rgb_string = f"rgb({other_color[0]}, {other_color[1]}, {other_color[2]})"
rooms_reports = [("Bathroom", "blue", bathroom_device_id, bathroom_well_id), ("Bedroom", "green", bedroom_device_id, bedroom_well_id), ("Kitchen", "red", kitchen_device_id, kitchen_well_id), (other_location, rgb_string, most_present_device_id, most_present_well_id)]
six_months_report = []
for room_details in rooms_reports:
device_id = room_details[2]
if device_id > 0:
well_id = room_details[3]
radar_threshold_group_st = {device[1]: device[5] for device in devices_list}[device_id]
room = {"name": room_details[0],"color": room_details[1]}
data = []
for day_activity in six_months_dates:
datee = day_activity[0]
hours, events_count = GetActivities(device_id, well_id, datee, filterr, refresh, time_zone_s, radar_threshold_group_st)
if hours > 18:
print("Too long 6m!!!", device_id, well_id, datee, filterr, refresh, time_zone_s, radar_threshold_group_st)
data_record = { "title": str(day_activity[2]), "events": events_count, "hours": hours}
data.append(data_record)
room["data"] = data
six_months_report.append(room)
weekly_report = []
for room_details in rooms_reports:
device_id = room_details[2]
if device_id > 0:
well_id = room_details[3]
radar_threshold_group_st = {device[1]: device[5] for device in devices_list}[device_id]
room = {"name": room_details[0],"color": room_details[1]}
data = []
for day_activity in week_dates:
datee = day_activity[0]
hours, events_count = GetActivities(device_id, well_id, datee, filterr, refresh, time_zone_s, radar_threshold_group_st)
data_record = { "title": day_activity[1], "events": events_count, "hours": hours}
data.append(data_record)
room["data"] = data
weekly_report.append(room)
monthly_report = []
for room_details in rooms_reports:
device_id = room_details[2]
if device_id > 0:
well_id = room_details[3]
radar_threshold_group_st = {device[1]: device[5] for device in devices_list}[device_id]
room = {"name": room_details[0],"color": room_details[1]}
data = []
for day_activity in month_dates:
datee = day_activity[0]
hours, events_count = GetActivities(device_id, well_id, datee, filterr, refresh, time_zone_s, radar_threshold_group_st)
#if datee == "2025-05-20" and device_id == 572:
# print(hours)
if hours > 18:
print("Too long m!!!", device_id, well_id, datee, filterr, refresh, time_zone_s, radar_threshold_group_st)
data_record = { "title": str(day_activity[2]), "events": events_count, "hours": hours}
data.append(data_record)
room["data"] = data
monthly_report.append(room)
result_dictionary = {
"alert_text": "No alert",
"alert_color": "bg-green-100 text-green-700",
"chart_data": [
{
"name": "Weekly",
"rooms": [
{
"name": "Bathroom",
"color": "blue",
"data": [
{ "title": "Monday", "events": 186, "hours": 80.56 },
{ "title": "Tuesday", "events": 305, "hours": 200 },
{ "title": "Wednesday", "events": 237, "hours": 120 },
{ "title": "Thursday", "events": 73, "hours": 190 },
{ "title": "Friday", "events": 209, "hours": 130 },
{ "title": "Saturday", "events": 214, "hours": 140 },
{ "title": "Sunday", "events": 150, "hours": 100 }
]
},
{
"name": "Bedroom",
"color": "green",
"data": [
{ "title": "Monday", "events": 186, "hours": 80 },
{ "title": "Tuesday", "events": 305, "hours": 200 },
{ "title": "Wednesday", "events": 237, "hours": 120 },
{ "title": "Thursday", "events": 73, "hours": 190 },
{ "title": "Friday", "events": 209, "hours": 130 },
{ "title": "Saturday", "events": 214, "hours": 140 },
{ "title": "Sunday", "events": 150, "hours": 100 }
]
},
{
"name": "Kitchen",
"color": "red",
"data": [
{ "title": "Monday", "events": 186, "hours": 80 },
{ "title": "Tuesday", "events": 305, "hours": 200 },
{ "title": "Wednesday", "events": 237, "hours": 120 },
{ "title": "Thursday", "events": 73, "hours": 190 },
{ "title": "Friday", "events": 209, "hours": 130 },
{ "title": "Saturday", "events": 214, "hours": 140 },
{ "title": "Sunday", "events": 150, "hours": 100 }
]
},
{
"name": "Other",
"color": "yellow",
"data": [
{ "title": "Monday", "events": 186, "hours": 80 },
{ "title": "Tuesday", "events": 305, "hours": 200 },
{ "title": "Wednesday", "events": 237, "hours": 120 },
{ "title": "Thursday", "events": 73, "hours": 190 },
{ "title": "Friday", "events": 209, "hours": 130 },
{ "title": "Saturday", "events": 214, "hours": 140 },
{ "title": "Sunday", "events": 150, "hours": 100 }
]
}
]
},
{
"name": "Monthly",
"rooms": [
{
"name": "Bathroom",
"color": "purple",
"data": [
{ "title": "01", "events": 67, "hours": 45 },
{ "title": "02", "events": 97, "hours": 67 },
{ "title": "03", "events": 87, "hours": 23 },
{ "title": "04", "events": 42, "hours": 12 },
{ "title": "05", "events": 64, "hours": 48 },
{ "title": "06", "events": 53, "hours": 34 },
{ "title": "07", "events": 75, "hours": 23 },
{ "title": "08", "events": 45, "hours": 56 },
{ "title": "09", "events": 85, "hours": 47 },
{ "title": "10", "events": 34, "hours": 29 },
{ "title": "11", "events": 49, "hours": 30 },
{ "title": "12", "events": 62, "hours": 33 },
{ "title": "13", "events": 75, "hours": 44 },
{ "title": "14", "events": 88, "hours": 57 },
{ "title": "15", "events": 94, "hours": 65 },
{ "title": "16", "events": 45, "hours": 21 },
{ "title": "17", "events": 76, "hours": 54 },
{ "title": "18", "events": 85, "hours": 62 },
{ "title": "19", "events": 43, "hours": 28 },
{ "title": "20", "events": 59, "hours": 34 },
{ "title": "21", "events": 78, "hours": 56 },
{ "title": "22", "events": 64, "hours": 39 },
{ "title": "23", "events": 93, "hours": 72 },
{ "title": "24", "events": 52, "hours": 28 },
{ "title": "25", "events": 71, "hours": 48 },
{ "title": "26", "events": 85, "hours": 63 }
]
},
{
"name": "Bedroom",
"color": "#3b82f6",
"data": [
{ "title": "01", "events": 61, "hours": 42 },
{ "title": "02", "events": 72, "hours": 36 },
{ "title": "03", "events": 94, "hours": 49 },
{ "title": "04", "events": 67, "hours": 59 },
{ "title": "05", "events": 54, "hours": 20 },
{ "title": "06", "events": 77, "hours": 64 },
{ "title": "07", "events": 81, "hours": 70 },
{ "title": "08", "events": 53, "hours": 25 },
{ "title": "09", "events": 79, "hours": 42 },
{ "title": "10", "events": 84, "hours": 65 },
{ "title": "11", "events": 62, "hours": 54 },
{ "title": "12", "events": 45, "hours": 23 },
{ "title": "13", "events": 88, "hours": 71 },
{ "title": "14", "events": 74, "hours": 44 },
{ "title": "15", "events": 91, "hours": 59 },
{ "title": "16", "events": 46, "hours": 31 },
{ "title": "17", "events": 73, "hours": 40 },
{ "title": "18", "events": 85, "hours": 63 },
{ "title": "19", "events": 78, "hours": 66 },
{ "title": "20", "events": 66, "hours": 42 },
{ "title": "21", "events": 95, "hours": 78 },
{ "title": "22", "events": 57, "hours": 39 },
{ "title": "23", "events": 72, "hours": 48 },
{ "title": "24", "events": 48, "hours": 21 },
{ "title": "25", "events": 89, "hours": 61 },
{ "title": "26", "events": 77, "hours": 44 }
]
},
{
"name": "Kitchen",
"color": "orange",
"data": [
{ "title": "01", "events": 94, "hours": 59 },
{ "title": "02", "events": 62, "hours": 48 },
{ "title": "03", "events": 76, "hours": 38 },
{ "title": "04", "events": 81, "hours": 62 },
{ "title": "05", "events": 64, "hours": 27 },
{ "title": "06", "events": 53, "hours": 31 },
{ "title": "07", "events": 92, "hours": 65 },
{ "title": "08", "events": 85, "hours": 42 },
{ "title": "09", "events": 74, "hours": 35 },
{ "title": "10", "events": 67, "hours": 55 },
{ "title": "11", "events": 49, "hours": 23 },
{ "title": "12", "events": 88, "hours": 75 },
{ "title": "13", "events": 93, "hours": 66 },
{ "title": "14", "events": 76, "hours": 34 },
{ "title": "15", "events": 59, "hours": 39 },
{ "title": "16", "events": 72, "hours": 51 },
{ "title": "17", "events": 83, "hours": 44 },
{ "title": "18", "events": 74, "hours": 33 },
{ "title": "19", "events": 69, "hours": 28 },
{ "title": "20", "events": 85, "hours": 56 },
{ "title": "21", "events": 53, "hours": 22 },
{ "title": "22", "events": 92, "hours": 70 },
{ "title": "23", "events": 71, "hours": 41 },
{ "title": "24", "events": 67, "hours": 25 },
{ "title": "25", "events": 86, "hours": 74 },
{ "title": "26", "events": 94, "hours": 68 }
]
},
{
"name": "Other",
"color": "hotpink",
"data": [
{ "title": "01", "events": 57, "hours": 27 },
{ "title": "02", "events": 74, "hours": 33 },
{ "title": "03", "events": 84, "hours": 53 },
{ "title": "04", "events": 95, "hours": 68 },
{ "title": "05", "events": 71, "hours": 48 },
{ "title": "06", "events": 92, "hours": 76 },
{ "title": "07", "events": 85, "hours": 62 },
{ "title": "08", "events": 49, "hours": 25 },
{ "title": "09", "events": 66, "hours": 38 },
{ "title": "10", "events": 63, "hours": 31 },
{ "title": "11", "events": 75, "hours": 47 },
{ "title": "12", "events": 94, "hours": 72 },
{ "title": "13", "events": 79, "hours": 49 },
{ "title": "14", "events": 72, "hours": 45 },
{ "title": "15", "events": 88, "hours": 61 },
{ "title": "16", "events": 83, "hours": 52 },
{ "title": "17", "events": 92, "hours": 76 },
{ "title": "18", "events": 73, "hours": 40 },
{ "title": "19", "events": 65, "hours": 28 },
{ "title": "20", "events": 76, "hours": 63 },
{ "title": "21", "events": 58, "hours": 30 },
{ "title": "22", "events": 84, "hours": 67 },
{ "title": "23", "events": 72, "hours": 41 },
{ "title": "24", "events": 79, "hours": 46 },
{ "title": "25", "events": 63, "hours": 29 },
{ "title": "26", "events": 68, "hours": 39 }
]
}
]
},
{
"name": "6 Months",
"rooms": [
{
"name": "Bathroom",
"color": "purple",
"data": [
{ "title": "October", "events": 62, "hours": 23 },
{ "title": "November", "events": 76, "hours": 42 },
{ "title": "December", "events": 85, "hours": 54 },
{ "title": "January", "events": 94, "hours": 67 },
{ "title": "February", "events": 63, "hours": 35 },
{ "title": "March", "events": 81, "hours": 46 }
]
},
{
"name": "Bedroom",
"color": "#3b82f6",
"data": [
{ "title": "October", "events": 64, "hours": 35 },
{ "title": "November", "events": 88, "hours": 71 },
{ "title": "December", "events": 79, "hours": 54 },
{ "title": "January", "events": 72, "hours": 49 },
{ "title": "February", "events": 53, "hours": 32 },
{ "title": "March", "events": 93, "hours": 67 }
]
},
{
"name": "Kitchen",
"color": "orange",
"data": [
{ "title": "October", "events": 92, "hours": 65 },
{ "title": "November", "events": 85, "hours": 62 },
{ "title": "December", "events": 74, "hours": 49 },
{ "title": "January", "events": 63, "hours": 33 },
{ "title": "February", "events": 78, "hours": 56 },
{ "title": "March", "events": 69, "hours": 41 }
]
},
{
"name": "Other",
"color": "hotpink",
"data": [
{ "title": "October", "events": 88, "hours": 54 },
{ "title": "November", "events": 72, "hours": 39 },
{ "title": "December", "events": 84, "hours": 63 },
{ "title": "January", "events": 76, "hours": 46 },
{ "title": "February", "events": 93, "hours": 72 },
{ "title": "March", "events": 68, "hours": 29 }
]
}
]
}
]
}
result_dictionary["chart_data"][0]["rooms"] = weekly_report
result_dictionary["chart_data"][1]["rooms"] = monthly_report
result_dictionary["chart_data"][2]["rooms"] = six_months_report
payload = result_dictionary #{'result_dictionary': result_dictionary}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
AddToLog(payload)
return
elif function == "dashboard_list":
# works in UTC only
logger.error(f"------------------------------- dashboard_list ------------------------------------------")
caretaker = user_name
#date_s = form_data.get('date')
time_s = form_data.get('time')
date_s = datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d")
filterr = form_data.get('filter')
if filterr == None:
filterr = 5
privileges = GetPriviledgesOnly(caretaker)
deployments_list = []
if privileges != '':
deployments_list = GetUsersFromDeployments(privileges)
#all_beneficiaries = ListBeneficiariesOfCaretaker(caretaker) #GetPriviledgesOnly
#AddToLog(all_beneficiaries)
result_list = []
for deployment_id, first_name, last_name in deployments_list:
details = GetSensorsDetailsFromDeployment(deployment_id, date_s, filterr)
if details != {}:
details["units"] = "°C"
if "America" in details["time_zone"] or "US/" in details["time_zone"]:
details["temperature"] = CelsiusToFahrenheit(details["temperature"])
details["units"] = "°F"
devices_list, device_ids = GetProximityList(deployment_id, date_s)
# convert dates back to UTC
#details['bathroom_at'] = pytz.timezone(details['time_zone']).localize(datetime.datetime.strptime(details['bathroom_at'], "%Y-%m-%dT%H:%M:%S")).astimezone(pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S")
#details['kitchen_at'] = pytz.timezone(details['time_zone']).localize(datetime.datetime.strptime(details['bathroom_at'], "%Y-%m-%dT%H:%M:%S")).astimezone(pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S")
#details['bedroom_at'] = pytz.timezone(details['time_zone']).localize(datetime.datetime.strptime(details['bedroom_at'], "%Y-%m-%dT%H:%M:%S")).astimezone(pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S")
#details['last_detected_time'] = pytz.timezone(details['time_zone']).localize(datetime.datetime.strptime(details['last_detected_time'], "%Y-%m-%dT%H:%M:%S")).astimezone(pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S")
location_list = []
for room_details in devices_list:
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = room_details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
if description == None or description == "":
location_list.append(location_name)
else:
location_list.append(location_name + " " + description)
details["deployment_id"] = deployment_id
details["location_list"] = location_list
result_list.append(details)
payload = {'result_list': result_list}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
AddToLog(payload)
return
elif function == "dashboard_single":
caretaker = user_name
#date_s = form_data.get('date')
date_s = datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d")
deployment_id = form_data.get('deployment_id')
filterr = form_data.get('filter')
if filterr == None:
filterr = 5
#all_beneficiaries = ListBeneficiariesOfCaretaker(caretaker) #GetPriviledgesOnly
#AddToLog(all_beneficiaries)
result_list = []
details = GetSensorsDetailsFromDeployment(deployment_id, date_s, filterr, False)
details["units"] = "°C"
if "America" in details["time_zone"] or "US/" in details["time_zone"]:
details["temperature"] = CelsiusToFahrenheit(details["temperature"])
details["bedroom_temperature"] = CelsiusToFahrenheit(details["bedroom_temperature"])
details["units"] = "°F"
devices_list, device_ids = GetProximityList(deployment_id, date_s)
location_list = []
for room_details in devices_list:
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = room_details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
if description == None or description == "":
location_list.append(location_name)
else:
location_list.append(location_name + " " + description)
details["deployment_id"] = deployment_id
details["location_list"] = location_list
settings = {"wellness_score": False, "last_seen": False, "sleep_report": True, "activity_report": True, "temperature": True, "humidity": True, "air_pressure": True, "light": True, "air_quality": True, "radar": True, "other_activities": False}
details["settings"] = settings
result_list.append(details)
payload = {'result_list': result_list}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
AddToLog(payload)
return
elif function == "request_node_red":
logger.error(f"------------------------------- {function} ------------------------------------------")
#this will:
# 1.prepare folder and settings.js
# 2.start instance on node-red and return it's return port
#caretaker = user_name
#date_s = form_data.get('date')
time_s = datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
#deployment_id = form_data.get('deployment_id')
#redis_conn.set('node_red_requests', str([radar_threshold_signal, radar_threshold_value]))
# Hashes (dictionaries)
logger.error(f"Storing to node_red_requests {user_name}")
redis_conn.hset('node_red_requests', mapping={
'user_name': user_name,
'token': token,
'time': time_s,
'requests': 1
})
payload = {'ok': 1}
logger.error(f"Responding {payload}")
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
elif function == "get_node_red_port":
#this will:
# 1.prepare folder and settings.js
# 2.start instance on node-red and return it's return port
hash_data = GetRedisMap(f'node_red_status_{user_name}')
port = 0
if hash_data != {}:
port = hash_data['port']
#date_s = form_data.get('date')
#date_s = datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d")
#deployment_id = form_data.get('deployment_id')
payload = {'port': port}
logger.debug(f"get_node_red_port: {payload}")
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
elif function == "activity_detected":
#this will:
# 1.store to REDIS time of last activity
time_s = form_data.get('time')
hash_data = GetRedisMap(f'node_red_status_{user_name}')
port = 0
if hash_data != {}:
port = hash_data['port']
redis_conn.hset(f'node_red_status_{user_name}', mapping={
'port': port,
'last_activity': time_s
})
payload = {'ok': 1}
logger.debug(f"activity_detected: {payload}")
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
elif function == "store_flow":
#this will:
# 1.store flow into DB
time_s = form_data.get('time')
flow_json = form_data.get('flow')
logger.debug(f"store_flow: {flow_json}")
StoreFlow2DB(user_name, time_s, flow_json)
payload = {'ok': 1}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
elif function == "store_alarms":
#this will:
# 1.store flow into DB
deployment_id = form_data.get('deployment_id')
device_id = form_data.get('device_id')
deployment_alarms_json = form_data.get('deployment_alarms')
device_alarms_json = form_data.get('device_alarms')
logger.debug(f"store_alarms: {deployment_alarms_json}")
if privileges == "-1" or deployment_id in privileges:
ok = StoreAlarms2DB(deployment_id, device_id, deployment_alarms_json, device_alarms_json)
redis_conn.set('alarm_device_settings_'+device_id, device_alarms_json)
redis_conn.set('alarm_deployment_settings_'+deployment_id, deployment_alarms_json)
# Create record dictionary
record = {
'user_name': user_name,
'deployment_id': deployment_id,
'device_id': device_id
}
# Convert dictionary to JSON string for storage in Redis list
record_json = json.dumps(record)
# Add to queue (list) - lpush adds to the left/front of the list
redis_conn.lpush('new_alarms', record_json)
payload = {'ok': ok}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
else:
payload = {'ok': 0, 'error': "not allowed"}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
elif function == "send_walarm":
# Extract data from form
deployment_id = form_data.get('deployment_id')
device_id = form_data.get('device_id')
location = form_data.get('location')
method = form_data.get('method')
feature = form_data.get('feature')
currentAlertTableMode = form_data.get('currentAlertTableMode')
time_s = datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
content = form_data.get('content')
enabledCellContent = form_data.get('enabledCellContent')
currentUnits = form_data.get('currentUnits')
test_only = form_data.get('test_only')
action = form_data.get('action')
logger.debug(f"send_requests: {user_name}")
# Create record dictionary
record = {
'user_name': user_name,
'deployment_id': deployment_id,
'location': location,
'method': method,
'feature': feature,
'currentAlertTableMode': currentAlertTableMode,
'time': time_s,
'content': content,
'currentUnits': currentUnits,
'test_only': test_only,
'action': action,
'enabledCellContent': enabledCellContent
}
# Convert dictionary to JSON string for storage in Redis list
record_json = json.dumps(record)
# Add to queue (list) - lpush adds to the left/front of the list
redis_conn.lpush('send_requests', record_json)
payload = {'ok': 1}
logger.error(f"Responding {payload}")
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
elif function == "node-red_deployed":
#this will:
# 1.store flow into DB
time_s = form_data.get('time')
logger.debug(f"node-red_deployed: {user_name}")
redis_conn.hset('node-red_deployed', mapping={
'user_name': user_name,
'token': token,
'time': time_s,
'requests': 1
})
payload = {'ok': 1}
logger.error(f"Responding {payload}")
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
else:
debug_string = "Error: function not recognized!"
AddToLog(debug_string)
payload = {'ok': 0, 'error': debug_string}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
except Exception as e:
print(traceback.format_exc())
resp.media = package_response(f"Error: {str(e)} {traceback.format_exc()}", HTTP_500)
except Exception as e:
logger.error(f"Error in on_post: {e}")
resp.status = falcon.HTTP_500
resp.media = {"error": "Internal server error"}
def on_put(self, req, resp, path=""):
"""Handle PUT requests"""
if path == "users":
logger.info("PUT request to users endpoint")
try:
# Parse the request body
request_data = json.loads(req.stream.read().decode('utf-8'))
# TODO: Implement user update logic
resp.status = HTTP_200
resp.content_type = falcon.MEDIA_JSON
resp.text = json.dumps({"id": request_data.get("id"), "message": "User updated"})
except json.JSONDecodeError:
resp.status = HTTP_400
resp.content_type = falcon.MEDIA_JSON
resp.text = json.dumps({"error": "Invalid JSON"})
else:
resp.media = package_response(f"PUT to /{path} not implemented", HTTP_400)
def on_delete(self, req, resp, path=""):
"""Handle DELETE requests"""
if path == "users":
logger.info("DELETE request to users endpoint")
resp.status = HTTP_200
resp.content_type = falcon.MEDIA_JSON
resp.text = json.dumps({"message": "User deleted"})
else:
resp.media = package_response(f"DELETE to /{path} not implemented", HTTP_400)
#$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
def parse_devices_field(devices_str):
"""
Parse the devices field which may be a stringified JSON array.
"""
if not devices_str:
return []
try:
# Try direct parse
return ast.literal_eval(devices_str)
except (ValueError, SyntaxError):
try:
# Try unescaping if it's a JSON string
import json
return json.loads(devices_str)
except:
# Last resort: manually unescape
unescaped = devices_str.replace('\\"', '"')
return ast.literal_eval(unescaped)
def normalize_input(data):
"""
Normalize input to a list of rows (each row is a list).
Handles:
- Single row as list: [215,510,"Bedroom",...]
- Single row as string: '215,510,"Bedroom",...'
- Multiple rows: [[215,510,...], [218,521,...]]
"""
# If it's a string, parse it as a single row
if isinstance(data, str):
# Simple CSV parsing (good enough for this format)
import re
# Match quoted strings or non-comma sequences
parts = re.findall(r'"[^"]*"|\[.*?\]|[^,]+', data)
parts = [p.strip().strip('"') for p in parts if p.strip()]
return [parts]
# If it's a list
if isinstance(data, list):
if not data:
return []
# Check if it's a single row (first element is not a list)
if not isinstance(data[0], list):
return [data]
# It's already multiple rows
return data
return []
def extract_device_and_well_ids(devices_list):
"""
Extract device_ids_list, well_ids_list, and devices from various input formats.
Returns:
tuple: (device_ids_list, well_ids_list, devices)
- devices: list of parsed device arrays from index 5 of each row
"""
# Normalize input to list of rows
rows = normalize_input(devices_list)
device_ids_list = []
well_ids_list = []
devices = []
for row in rows:
# Ensure we have enough elements
if len(row) < 3:
continue
# Extract well_id (index 0) and device_id (index 1)
well_id = int(row[0]) if isinstance(row[0], (int, str)) and str(row[0]).isdigit() else row[0]
device_id = int(row[1]) if isinstance(row[1], (int, str)) and str(row[1]).isdigit() else row[1]
well_ids_list.append(str(well_id))
device_ids_list.append(str(device_id))
# Parse the devices field (index 5)
#devices_field = row[5] if len(row) > 5 else ""
#parsed_devices = parse_devices_field(devices_field)
devices.append(row)
return device_ids_list, well_ids_list, devices
def ExtractTempOffset(calib_str):
parts = calib_str.split(",")
if len(parts) > 2:
return parts[2]
else:
return "-10"
def read_bounded_stream_data(request):
"""Read data from bounded stream using stream-specific properties"""
stream = request.bounded_stream
logger.debug(f"Stream properties: eof={getattr(stream, 'eof', None)}, "
f"stream_len={getattr(stream, 'stream_len', None)}, "
f"bytes_remaining={getattr(stream, '_bytes_remaining', None)}")
try:
# Method 1: Check if stream has length info
if hasattr(stream, 'stream_len') and stream.stream_len:
logger.debug(f"Stream has length: {stream.stream_len}")
data = stream.read(stream.stream_len)
logger.debug(f"Read {len(data)} bytes using stream_len")
return data
# Method 2: Check bytes remaining
if hasattr(stream, '_bytes_remaining') and stream._bytes_remaining:
logger.debug(f"Bytes remaining: {stream._bytes_remaining}")
data = stream.read(stream._bytes_remaining)
logger.debug(f"Read {len(data)} bytes using bytes_remaining")
return data
# Method 3: Try to exhaust the stream
if hasattr(stream, 'exhaust'):
logger.debug("Trying stream.exhaust()")
data = stream.exhaust()
logger.debug(f"Exhausted stream: {len(data) if data else 0} bytes")
return data or b''
# Method 4: Check if we can access underlying stream
if hasattr(stream, 'stream'):
logger.debug("Trying underlying stream")
underlying = stream.stream
data = underlying.read()
logger.debug(f"Read from underlying stream: {len(data)} bytes")
return data
logger.error("No viable method to read from bounded stream")
return b''
except Exception as e:
logger.error(f"Failed to read bounded stream: {e}")
return b''
def read_chunked_data(request):
"""Read chunked transfer data safely"""
try:
# Try different methods to read the data
data = b''
# Method 1: Try reading in small chunks with timeout
chunk_size = 8192
max_chunks = 1000 # Prevent infinite loops
chunks_read = 0
while chunks_read < max_chunks:
try:
chunk = request.bounded_stream.read(chunk_size)
if not chunk:
break
data += chunk
chunks_read += 1
# Log progress for large uploads
if len(data) % (chunk_size * 10) == 0:
logger.debug(f"Read {len(data)} bytes so far...")
except Exception as e:
logger.debug(f"Chunk read error after {len(data)} bytes: {e}")
break
return data
except Exception as e:
logger.error(f"Failed to read chunked data: {e}")
return b''
def FixDeploymentHistorySequence(connection) -> bool:
"""
Fix the sequence for deployment_history table if it's out of sync.
Args:
connection: Database connection object
Returns:
bool: True if sequence was fixed successfully, False otherwise
"""
try:
cursor = connection.cursor()
# Reset sequence to the correct value
cursor.execute("""
SELECT setval('deployment_history_id_seq',
(SELECT COALESCE(MAX(id), 0) FROM deployment_history));
""")
connection.commit()
cursor.close()
print("Deployment history sequence has been fixed")
return True
except psycopg2.Error as e:
print(f"Error fixing sequence: {e}")
connection.rollback()
return False
def StoreToDeploymentHistory(deployment_id: int, proximity: str) -> bool:
"""
Store a new entry to the deployment_history table with auto-generated ID and current epoch time.
Args:
deployment_id: The deployment ID to store
proximity: The proximity data to store
connection: Database connection object (psycopg2 connection)
Returns:
bool: True if the record was successfully inserted, False otherwise
"""
try:
connection = get_db_connection()
cursor = connection.cursor()
# Get current epoch time
current_epoch_time = time.time()
# Insert query - id is auto-generated by the sequence, we provide deployment_id, time, and proximity
query = """
INSERT INTO deployment_history (deployment_id, time, proximity)
VALUES (%s, %s, %s)
"""
# Execute the query with parameters
cursor.execute(query, (deployment_id, current_epoch_time, proximity))
# Commit the transaction
connection.commit()
cursor.close()
print(f"Successfully inserted record: deployment_id={deployment_id}, time={current_epoch_time}, proximity='{proximity}'")
return True
except psycopg2.IntegrityError as e:
if "duplicate key value violates unique constraint" in str(e):
print("Sequence appears to be out of sync. Attempting to fix...")
connection.rollback()
# Try to fix the sequence
if FixDeploymentHistorySequence(connection):
# Retry the insertion after fixing sequence
try:
cursor = connection.cursor()
current_epoch_time = time.time()
cursor.execute(query, (deployment_id, current_epoch_time, proximity))
connection.commit()
cursor.close()
print(f"Successfully inserted record after sequence fix: deployment_id={deployment_id}")
return True
except Exception as retry_error:
print(f"Failed to insert even after sequence fix: {retry_error}")
connection.rollback()
return False
else:
print("Failed to fix sequence")
return False
else:
print(f"Database integrity error in StoreToDeploymentHistory: {e}")
connection.rollback()
return False
except psycopg2.Error as e:
print(f"Database error in StoreToDeploymentHistory: {e}")
connection.rollback()
return False
except Exception as e:
print(f"Unexpected error in StoreToDeploymentHistory: {e}")
connection.rollback()
return False
def ListsSame(s1: str, s2: str) -> bool:
"""
Compare two JSON strings containing MAC address lists and return True if they contain
the same MAC addresses regardless of order, False if they differ.
Args:
s1: JSON string containing list of MAC addresses
s2: JSON string containing list of MAC addresses
Returns:
bool: True if both lists contain the same MAC addresses (ignoring order),
False if they contain different MAC addresses or if there's an error parsing
"""
try:
# Parse both JSON strings into Python lists
list1 = json.loads(s1)
list2 = json.loads(s2)
# Convert lists to sets for order-independent comparison
set1 = set(list1)
set2 = set(list2)
# Return True if sets are equal (same elements regardless of order)
return set1 == set2
except (json.JSONDecodeError, TypeError) as e:
print(f"Error parsing JSON strings: {e}")
return False
def WellIDs2MAC(devices_details_str: str) -> str:
"""
Extract well_ids from devices_details_str and return corresponding device_mac values as JSON string.
Args:
devices_details_str: JSON string containing device details in format ["215:Bedroom ", "218:Bathroom ", ...]
connection: Database connection object (psycopg2 connection)
Returns:
JSON string containing list of device_mac values like ["64B7088908BC", "64B70889043C", ...]
"""
try:
# Parse the JSON string to get the list
devices_list = json.loads(devices_details_str)
# Extract well_ids from each entry (everything before the colon)
well_ids = []
for device_entry in devices_list:
# Split by colon and take the first part, then convert to int
well_id = int(device_entry.split(':')[0])
well_ids.append(well_id)
# Query database for device_mac values
with get_db_connection() as conn:
with conn.cursor() as cur:
# Use parameterized query to avoid SQL injection
placeholders = ','.join(['%s'] * len(well_ids))
query = f"SELECT device_mac FROM devices WHERE well_id IN ({placeholders}) ORDER BY well_id"
cur.execute(query, well_ids)
results = cur.fetchall()
# Extract device_mac values from results
device_macs = [row[0] for row in results]
cur.close()
# Return as JSON string
return json.dumps(device_macs)
except (json.JSONDecodeError, ValueError, psycopg2.Error) as e:
print(f"Error in WellIDs2MAC: {e}")
return "[]"
def ConvertToMapString(my_string ):
array = json.loads(my_string)
# Split each element and create dictionary
result_dict = {}
for item in array:
key, value = item.split('|', 1) # Split on first pipe only
result_dict[key] = value
# Convert back to JSON string
return json.dumps(result_dict)
def GetIfThere(address_map, key):
address_parts = address_map["parsed_components"]
if key in address_parts:
return address_parts[key]
else:
return ""
def StoreFile2Blob(source_file_name, destination_file_name, path_to_use):
"""
Store any file from local disk to MinIO blob storage
Args:
source_file_name (str): Path to the source file on disk
destination_file_name (str): Name to store the file as in blob storage
path_to_use (str): Bucket name/path in MinIO (e.g., "user-pictures")
Returns:
bool: True if successful, False otherwise
"""
try:
# Check if source file exists
if not os.path.exists(source_file_name):
raise Exception(f"Source file does not exist: {source_file_name}")
# Get file size
file_size = os.path.getsize(source_file_name)
AddToLog(f"File size: {file_size} bytes")
# Open and read the file in binary mode
with open(source_file_name, 'rb') as file:
# Store in MinIO blob storage
miniIO_blob_client.put_object(
path_to_use, # Bucket name (e.g., "user-pictures")
destination_file_name, # Object name in bucket
file, # File object
file_size # File size
)
AddToLog(f"Successfully stored {source_file_name} as {destination_file_name} in {path_to_use}")
return True
except Exception as e:
AddToLog(f"{traceback.format_exc()}")
logger.error(f"{traceback.format_exc()}")
return False
def parse_multipart_data_manual(body_data, boundary):
"""Manually parse multipart form data"""
form_data = {}
files = {}
# Split on boundary
parts = body_data.split(b'--' + boundary)
for part in parts[1:-1]: # Skip first empty and last closing parts
if not part.strip():
continue
# Split headers from content
header_end = part.find(b'\r\n\r\n')
if header_end == -1:
continue
headers = part[:header_end].decode('utf-8', errors='ignore')
content = part[header_end + 4:]
# Remove trailing CRLF
if content.endswith(b'\r\n'):
content = content[:-2]
# Parse Content-Disposition header
if 'Content-Disposition: form-data;' in headers:
# Extract field name
name_start = headers.find('name="') + 6
name_end = headers.find('"', name_start)
if name_start < 6 or name_end == -1:
continue
field_name = headers[name_start:name_end]
# Check if it's a file
if 'filename=' in headers:
# Extract filename
filename_start = headers.find('filename="') + 10
filename_end = headers.find('"', filename_start)
filename = headers[filename_start:filename_end] if filename_start >= 10 and filename_end != -1 else 'unknown'
files[field_name] = {
'filename': filename,
'data': content,
'size': len(content)
}
else:
# Text field
form_data[field_name] = content.decode('utf-8', errors='ignore')
return form_data, files
def debug_multipart_info(request):
"""Debug multipart request info"""
info = {
"content_type": getattr(request, 'content_type', 'None'),
"content_length": getattr(request, 'content_length', 'None'),
"headers": dict(request.headers) if hasattr(request.headers, 'items') else str(request.headers),
"method": getattr(request, 'method', 'None')
}
logger.debug(f"Debug multipart info: {info}")
return info
def handle_multipart_request(request):
"""Handle multipart form data request for your framework"""
debug_multipart_info(request)
try:
# Get the boundary from content type - try different ways to access headers
content_type = ''
# Try multiple ways to get the content type
if hasattr(request, 'content_type') and request.content_type:
content_type = request.content_type
elif hasattr(request.headers, 'get'):
content_type = request.headers.get('CONTENT-TYPE', '')
elif 'CONTENT-TYPE' in request.headers:
content_type = request.headers['CONTENT-TYPE']
print(f"Content-Type: {content_type}")
if 'boundary=' not in content_type:
print("No boundary found, trying alternative header access...")
# Debug: print all headers to see the structure
print(f"Headers type: {type(request.headers)}")
print(f"Headers: {request.headers}")
return {"error": "No boundary found in multipart request"}
boundary = content_type.split('boundary=')[1]
print(f"Found boundary: {boundary}")
# Convert to bytes
boundary = boundary.encode()
# Get content length
content_length = request.content_length
if content_length is None:
# Try different ways to get content length
if hasattr(request.headers, 'get'):
content_length = int(request.headers.get('CONTENT-LENGTH', 0))
elif 'CONTENT-LENGTH' in request.headers:
content_length = int(request.headers['CONTENT-LENGTH'])
else:
content_length = 0
print(f"Content length: {content_length}")
if content_length > 50 * 1024 * 1024: # 50MB limit
return {"error": "Request too large"}
if content_length == 0:
return {"error": "No content"}
# Read the raw body data safely
body_data = b''
bytes_read = 0
chunk_size = 8192
print("Starting to read body data...")
while bytes_read < content_length:
remaining = content_length - bytes_read
to_read = min(chunk_size, remaining)
chunk = request.bounded_stream.read(to_read)
if not chunk:
print(f"No more data at {bytes_read} bytes")
break
body_data += chunk
bytes_read += len(chunk)
if bytes_read % (chunk_size * 10) == 0: # Log every 80KB
print(f"Read {bytes_read}/{content_length} bytes")
print(f"Finished reading {len(body_data)} bytes of multipart data")
# Parse multipart data
form_data, files = parse_multipart_data_manual(body_data, boundary)
print(f"Parsed form fields: {list(form_data.keys())}")
print(f"Parsed files: {list(files.keys())}")
# Handle photo file
if 'beneficiary_photo' in files:
photo = files['beneficiary_photo']
photo_data = photo['data']
print(f"Received photo: {photo['filename']}, {len(photo_data)} bytes")
# Validate and save JPEG
if photo_data[:3] == b'\xff\xd8\xff' and photo_data.endswith(b'\xff\xd9'):
with open(photo['filename'], 'wb') as f:
f.write(photo_data)
print("✅ Multipart photo saved successfully")
else:
print(f"❌ Invalid JPEG data - starts with: {photo_data[:10].hex()}")
return {"status": "success", "form_data": form_data, "files": files, "filename": photo['filename']}
except Exception as e:
print(f"Error processing multipart request: {e}")
traceback.print_exc()
return {"error": str(e)}
def parse_multipart_data(request_body, content_type):
"""Parse multipart data - returns (form_data, files)"""
# Extract boundary
boundary = content_type.split('boundary=')[1].encode()
# Split on boundary
parts = request_body.split(b'--' + boundary)
form_data = {}
files = {}
for part in parts[1:-1]: # Skip first empty and last closing parts
if not part.strip():
continue
# Split headers from content
header_end = part.find(b'\r\n\r\n')
if header_end == -1:
continue
headers = part[:header_end].decode('utf-8')
content = part[header_end + 4:]
# Parse Content-Disposition header
if 'Content-Disposition: form-data;' in headers:
# Extract field name
name_start = headers.find('name="') + 6
name_end = headers.find('"', name_start)
field_name = headers[name_start:name_end]
# Check if it's a file
if 'filename=' in headers:
# Extract filename
filename_start = headers.find('filename="') + 10
filename_end = headers.find('"', filename_start)
filename = headers[filename_start:filename_end]
# Remove trailing CRLF
if content.endswith(b'\r\n'):
content = content[:-2]
files[field_name] = {
'filename': filename,
'data': content,
'size': len(content)
}
else:
# Text field - remove trailing CRLF
if content.endswith(b'\r\n'):
content = content[:-2]
form_data[field_name] = content.decode('utf-8')
return form_data, files
def quick_fix_base64_photo(form_data, file_name):
"""Fix corrupted Base64 photo from URL-encoded form"""
photo_b64 = form_data.get('beneficiary_photo_data')
if not photo_b64:
return False
try:
# Remove whitespace corruption
clean = re.sub(r'\s+', '', photo_b64)
# Fix padding
missing_padding = len(clean) % 4
if missing_padding:
clean += '=' * (4 - missing_padding)
# Decode and validate
image_data = base64.b64decode(clean)
if image_data[:3] == b'\xff\xd8\xff':
with open(file_name, 'wb') as f:
f.write(image_data)
print(f"Base64 photo {file_name} fixed and saved: {len(image_data)} bytes")
return True
else:
logger.error("Invalid JPEG header in Base64 data")
return False
except Exception as e:
logger.error(f"Base64 photo fix failed: {e}")
return False
#def debug_received_data(form_data):
#"""Debug what we actually received"""
#beneficiary_photo_b64 = form_data.get('beneficiary_photo')
#if not beneficiary_photo_b64:
#print("❌ No photo data received")
#return None
#print("=== PYTHON RECEIVE DEBUGGING ===")
#print(f"Received base64 length: {len(beneficiary_photo_b64)}")
#print(f"First 50 chars: {beneficiary_photo_b64[:50]}")
#print(f"Last 50 chars: {beneficiary_photo_b64[-50:]}")
## Check for whitespace
#whitespace_count = len(beneficiary_photo_b64) - len(beneficiary_photo_b64.replace(' ', '').replace('\n', '').replace('\r', '').replace('\t', ''))
#print(f"Whitespace characters: {whitespace_count}")
## Clean and check length
#clean_b64 = beneficiary_photo_b64.replace(' ', '').replace('\n', '').replace('\r', '').replace('\t', '')
#print(f"Clean base64 length: {len(clean_b64)}")
#print(f"Clean mod 4: {len(clean_b64) % 4}")
## Calculate expected original size
#expected_bytes = (len(clean_b64) * 3) // 4
#print(f"Expected decoded size: {expected_bytes} bytes")
## Try to decode with minimal processing
#try:
## Just fix padding without removing characters
#missing_padding = len(clean_b64) % 4
#if missing_padding:
#padded_b64 = clean_b64 + '=' * (4 - missing_padding)
#else:
#padded_b64 = clean_b64
#print(f"After padding: {len(padded_b64)} chars")
#decoded_bytes = base64.b64decode(padded_b64)
#print(f"✅ Decoded successfully: {len(decoded_bytes)} bytes")
## Check first and last bytes
#if len(decoded_bytes) > 10:
#first_bytes = ' '.join(f'{b:02x}' for b in decoded_bytes[:10])
#last_bytes = ' '.join(f'{b:02x}' for b in decoded_bytes[-10:])
#print(f"First 10 bytes: {first_bytes}")
#print(f"Last 10 bytes: {last_bytes}")
## Calculate hash for comparison
#data_hash = hashlib.md5(decoded_bytes).hexdigest()
#print(f"MD5 hash: {data_hash}")
## Save raw decoded data
#with open('raw_decoded.jpg', 'wb') as f:
#f.write(decoded_bytes)
#print("Saved raw decoded data to raw_decoded.jpg")
## Try to analyze the structure
#if decoded_bytes[:3] == b'\xff\xd8\xff':
#print("✅ Valid JPEG header")
## Look for JPEG end marker
#if decoded_bytes.endswith(b'\xff\xd9'):
#print("✅ Valid JPEG end marker")
#else:
## Find where JPEG data actually ends
#last_ffd9 = decoded_bytes.rfind(b'\xff\xd9')
#if last_ffd9 != -1:
#print(f"⚠️ JPEG end marker found at position {last_ffd9}, but file continues for {len(decoded_bytes) - last_ffd9 - 2} more bytes")
## Try extracting just the JPEG part
#jpeg_only = decoded_bytes[:last_ffd9 + 2]
#with open('jpeg_extracted.jpg', 'wb') as f:
#f.write(jpeg_only)
#print(f"Extracted JPEG part ({len(jpeg_only)} bytes) to jpeg_extracted.jpg")
#else:
#print("❌ No JPEG end marker found anywhere")
#else:
#print("❌ Invalid JPEG header")
#return decoded_bytes
#except Exception as e:
#print(f"❌ Decode failed: {e}")
#return None
def fix_incomplete_jpeg(image_bytes):
"""Add missing JPEG end marker if needed"""
print(f"Original image size: {len(image_bytes)} bytes")
print(f"Ends with: {image_bytes[-10:].hex()}")
# Check if JPEG end marker is present
if not image_bytes.endswith(b'\xff\xd9'):
print("❌ Missing JPEG end marker, adding it...")
fixed_bytes = image_bytes + b'\xff\xd9'
print(f"Fixed image size: {len(fixed_bytes)} bytes")
# Test if this fixes the image
try:
with Image.open(io.BytesIO(fixed_bytes)) as img:
img.load() # Force load to verify
print(f"✅ Successfully repaired JPEG: {img.format} {img.size}")
return fixed_bytes
except Exception as e:
print(f"Adding end marker didn't work: {e}")
return image_bytes
def robust_base64_decode_v3(base64_string):
"""Fixed version that handles the padding correctly"""
print(f"Original string length: {len(base64_string)}")
# Clean the string
clean_string = re.sub(r'\s+', '', base64_string)
print(f"After cleaning: {len(clean_string)}")
# The issue is with padding calculation
# Let's try different approaches
strategies = []
# Strategy 1: Remove characters until we get valid length
for remove_count in range(10):
test_string = clean_string[:-remove_count] if remove_count > 0 else clean_string
# Calculate proper padding
missing_padding = len(test_string) % 4
if missing_padding:
padded_string = test_string + '=' * (4 - missing_padding)
else:
padded_string = test_string
strategies.append((f"Remove {remove_count}, pad to {len(padded_string)}", padded_string))
for strategy_name, test_string in strategies:
try:
print(f"Trying {strategy_name}")
# Decode
image_bytes = base64.b64decode(test_string)
print(f" Decoded to {len(image_bytes)} bytes")
# Check JPEG header
if image_bytes[:3] == b'\xff\xd8\xff':
print(f" ✅ Valid JPEG header")
# Try to fix missing end marker
fixed_bytes = fix_incomplete_jpeg(image_bytes)
# Final validation
try:
with Image.open(io.BytesIO(fixed_bytes)) as img:
img.load()
print(f" ✅ Valid complete JPEG: {img.format} {img.size}")
return fixed_bytes
except Exception as e:
print(f" ❌ Still invalid: {e}")
continue
else:
print(f" ❌ Invalid JPEG header: {image_bytes[:10].hex()}")
except Exception as e:
print(f" {strategy_name} failed: {e}")
return None
def fix_base64_padding(base64_string):
"""
Fix base64 padding issues
"""
# Remove whitespace
base64_string = re.sub(r'\s+', '', base64_string)
# Calculate missing padding
missing_padding = len(base64_string) % 4
if missing_padding == 1:
# If 1 character short, this is likely corrupted data
# Try removing the last character and then pad
base64_string = base64_string[:-1]
missing_padding = len(base64_string) % 4
if missing_padding:
base64_string += '=' * (4 - missing_padding)
return base64_string
def GenerateUserNameWithContext(user_name, first_name: str, last_name: str, user_id) -> str:
"""
Generate a unique username with automatic cursor management.
Same functionality as GenerateUserName but with context manager for cursor.
If a potential username is found but matches the provided user_id, it can be used.
"""
connection = get_db_connection()
if not first_name or not last_name:
raise ValueError("Both first_name and last_name must be provided and non-empty")
first_letter = first_name.strip().lower()[0]
clean_last_name = last_name.strip().lower().replace(' ', '')
if user_name == None or user_name.strip() == "":
base_username = f"{first_letter}{clean_last_name}"
base_username = ''.join(c for c in base_username if c.isalnum())
else:
base_username = user_name
def is_username_available(cursor, username):
"""Check if username is available or belongs to the current user_id"""
cursor.execute(
"SELECT user_id FROM public.person_details WHERE LOWER(user_name) = %s",
(username,)
)
result = cursor.fetchone()
if result is None:
# Username doesn't exist, it's available
return True
elif result[0] == user_id:
# Username exists but belongs to current user, it's available
return True
else:
# Username exists and belongs to different user, not available
return False
try:
with connection.cursor() as cursor:
# Check base username
if is_username_available(cursor, base_username):
return base_username
# Find next available username
counter = 1
while counter <= 9999:
candidate_username = f"{base_username}{counter}"
if is_username_available(cursor, candidate_username):
return candidate_username
counter += 1
raise RuntimeError("Unable to generate unique username after 9999 attempts")
except psycopg2.Error as e:
raise RuntimeError(f"Database error occurred: {e}")
def SendWelcomeCaretakerEmail(email, first_name, last_name, devices, phone_number, user_name, password, signature):
logger.error(f"Requesting welcome email to {email}")
queue_data = {
'function': "new_caretaker",
'email': email,
'user_name': user_name,
'first_name': first_name,
'last_name': last_name,
'devices': devices,
'phone_number': phone_number,
'password': password,
'signature': signature,
'requests': 1,
'timestamp': time.time() # Optional: add timestamp
}
redis_conn.xadd('messaging_requests_stream', queue_data)
def SendWelcomeBeneficiaryEmail(email, first_name, last_name, devices, phone_number, user_name, password, signature):
logger.error(f"Requesting welcome beneficiary email to {email}")
queue_data = {
'function': "new_beneficiary",
'email': email,
'user_name': user_name,
'first_name': first_name,
'last_name': last_name,
'devices': devices,
'phone_number': phone_number,
'password': password,
'signature': signature,
'requests': 1,
'timestamp': time.time() # Optional: add timestamp
}
redis_conn.xadd('messaging_requests_stream', queue_data)
def SendCredentialsChangedEmail(email, first_name, last_name, devices, phone_number, user_name, password, signature):
logger.error(f"Requesting credentials changed of beneficiary email to {email}")
queue_data = {
'function': "credentials_updated",
'email': email,
'user_name': user_name,
'first_name': first_name,
'last_name': last_name,
'devices': devices,
'phone_number': phone_number,
'password': password,
'signature': signature,
'requests': 1,
'timestamp': time.time() # Optional: add timestamp
}
redis_conn.xadd('messaging_requests_stream', queue_data)
def CallICLUpdate():
logger.error(f"Requesting ACL update")
queue_data = {
'function': "update_acl",
'requests': 1,
'timestamp': time.time() # Optional: add timestamp
}
redis_conn.xadd('messaging_requests_stream', queue_data)
def DevicesNotUsed(devices, user_name):
"""
Check if devices exist in DB and if they are already deployed (excluding user's own deployments).
Args:
devices (str): Comma-separated string of well_id or device_mac values
user_name (str): Username to check access permissions
Returns:
tuple: (success_flag, result)
- If success: (1, {"deployed": [well_ids], "not_found": [device_identifiers]})
- If error: (0, error_string)
"""
if not devices:
return 1, {"deployed": [], "not_found": []}
# Clean and split the devices string
#device_list = [CleanObject(device.strip()) for device in devices.split(',') if device.strip()]
devices_clean = devices.strip('[]"').replace('"', '')
device_list = [device.split(':')[0].strip() for device in devices_clean.split(',') if device.strip()]
if not device_list:
return 1, {"deployed": [], "not_found": []}
conn = get_db_connection()
cur = conn.cursor()
error_string = ""
try:
# First, get user's accessible deployments
user_deployments = []
is_superuser = False
if user_name:
user_sql = f"""
SELECT access_to_deployments
FROM public.person_details
WHERE user_name = '{CleanObject(user_name)}'
"""
logger.debug(f"user_sql= {user_sql}")
cur.execute(user_sql)
user_result = cur.fetchone()
if user_result and user_result[0]:
access_deployments = user_result[0].strip()
if access_deployments == "-1":
# Superuser has access to all deployments
is_superuser = True
logger.debug("User is superuser - has access to all deployments")
else:
# Parse comma-separated deployment IDs
user_deployments = [int(dep_id.strip()) for dep_id in access_deployments.split(',') if dep_id.strip().isdigit()]
logger.debug(f"User has access to deployments: {user_deployments}")
deployed_well_ids = []
not_found_devices = []
for device in device_list:
# Check if this device is a well_id or device_mac by string length
if len(device) == 12:
# Exactly 12 characters = device_mac
device_mac = device
# First check if device exists in devices table
check_exists_sql = f"""
SELECT well_id FROM public.devices WHERE device_mac = '{device_mac}'
"""
logger.debug(f"check_exists_sql= {check_exists_sql}")
cur.execute(check_exists_sql)
device_result = cur.fetchone()
if not device_result:
# Device not found in DB
not_found_devices.append(device)
continue
device_well_id = device_result[0]
# Query to check if this device_mac is deployed (excluding user's accessible deployments)
if is_superuser:
# Superuser has access to all deployments, so no devices are "deployed elsewhere"
sql = "SELECT NULL WHERE FALSE" # Returns no results
elif user_deployments:
deployment_exclusion = f"AND dd.deployment_id NOT IN ({','.join(map(str, user_deployments))})"
sql = f"""
SELECT DISTINCT d.well_id
FROM public.devices d
JOIN public.deployment_details dd ON dd.devices LIKE '%' || d.device_mac || '%'
WHERE d.device_mac = '{device_mac}' {deployment_exclusion}
"""
else:
sql = f"""
SELECT DISTINCT d.well_id
FROM public.devices d
JOIN public.deployment_details dd ON dd.devices LIKE '%' || d.device_mac || '%'
WHERE d.device_mac = '{device_mac}'
"""
else:
# Any other length = well_id
well_id = device
# First check if device exists in devices table
check_exists_sql = f"""
SELECT well_id FROM public.devices WHERE well_id = {well_id}
"""
logger.debug(f"check_exists_sql= {check_exists_sql}")
cur.execute(check_exists_sql)
device_result = cur.fetchone()
if not device_result:
# Device not found in DB
not_found_devices.append(device)
continue
# Query to check if any device with this well_id is deployed (excluding user's accessible deployments)
if is_superuser:
# Superuser has access to all deployments, so no devices are "deployed elsewhere"
sql = "SELECT NULL WHERE FALSE" # Returns no results
elif user_deployments:
deployment_exclusion = f"AND dd.deployment_id NOT IN ({','.join(map(str, user_deployments))})"
sql = f"""
SELECT DISTINCT d.well_id
FROM public.devices d
JOIN public.deployment_details dd ON dd.devices LIKE '%' || d.device_mac || '%'
WHERE d.well_id = {well_id} {deployment_exclusion}
"""
else:
sql = f"""
SELECT DISTINCT d.well_id
FROM public.devices d
JOIN public.deployment_details dd ON dd.devices LIKE '%' || d.device_mac || '%'
WHERE d.well_id = {well_id}
"""
logger.debug(f"sql= {sql}")
cur.execute(sql)
results = cur.fetchall()
# Add any found well_id's to our list
for row in results:
if row[0] is not None and row[0] not in deployed_well_ids:
deployed_well_ids.append(row[0])
# Close the cursor and connection
cur.close()
conn.close()
AddToLog(f"DevicesNotUsed check completed. Found {len(deployed_well_ids)} deployed devices, {len(not_found_devices)} not found.")
return 1, {"deployed": deployed_well_ids, "not_found": not_found_devices}
except Exception as err:
error_string = traceback.format_exc()
AddToLog(error_string)
if cur:
cur.close()
if conn:
conn.close()
return 0, error_string
def CreatePassword(chat_len):
"""
Generate a reasonable password of specified length using common words
with character substitutions (leet speak style).
Args:
chat_len (int): Desired password length
Returns:
str: Generated password
"""
# Seed random with current time for better randomness
random.seed(int(time.time() * 1000000) % 2**32)
# Common words to use as base
words = [
"password", "secure", "strong", "safe", "guard", "shield", "lock",
"key", "code", "access", "login", "enter", "open", "door",
"house", "home", "family", "friend", "happy", "sunny", "bright",
"quick", "fast", "smart", "clever", "power", "energy", "magic",
"super", "ultra", "mega", "cool", "awesome", "great", "best",
"gold", "silver", "diamond", "star", "moon", "sun", "fire",
"water", "earth", "wind", "storm", "thunder", "lightning",
"mountain", "ocean", "forest", "river", "bridge", "castle",
"knight", "dragon", "wizard", "hero", "champion", "winner"
]
# Character substitution mapping (leet speak)
substitutions = {
'a': '@', 'A': '@',
'e': '3', 'E': '3',
'i': '1', 'I': '1',
'o': '0', 'O': '0',
's': '$', 'S': '$',
't': '7', 'T': '7',
'b': '8', 'B': '8',
'g': '9', 'G': '9',
'l': '!', 'L': '!',
'z': '2', 'Z': '2'
}
password = ""
while len(password) < chat_len:
# Pick a random word
word = random.choice(words)
# Apply random substitutions (not all characters, to keep it readable)
modified_word = ""
for char in word:
# 40% chance to substitute if substitution exists
if char in substitutions and random.random() < 0.4:
modified_word += substitutions[char]
else:
modified_word += char
# Randomly capitalize some letters for variety
if random.random() < 0.3:
modified_word = modified_word.capitalize()
# Add the word to password
if len(password) + len(modified_word) <= chat_len:
password += modified_word
else:
# If word is too long, take only what we need
remaining = chat_len - len(password)
password += modified_word[:remaining]
break
# Add a random number or symbol between words (if space allows)
if len(password) < chat_len and random.random() < 0.5:
separators = ['1', '2', '3', '4', '5', '6', '7', '8', '9', '0', '#', '*', '+', '=']
separator = random.choice(separators)
if len(password) + 1 <= chat_len:
password += separator
return password
def DevicesNotUsedNoOwnershipCheck(devices):
"""
Check if devices exist in DB and if they are already deployed.
Args:
devices (str): Comma-separated string of well_id or device_mac values
Returns:
tuple: (success_flag, result)
- If success: (1, {"deployed": [well_ids], "not_found": [device_identifiers]})
- If error: (0, error_string)
"""
if not devices:
return 1, {"deployed": [], "not_found": []}
# Clean and split the devices string
device_list = [CleanObject(device.strip()) for device in devices.split(',') if device.strip()]
if not device_list:
return 1, {"deployed": [], "not_found": []}
conn = get_db_connection()
cur = conn.cursor()
error_string = ""
try:
deployed_well_ids = []
not_found_devices = []
for device in device_list:
# Check if this device is a well_id or device_mac by string length
if len(device) == 12:
# Exactly 12 characters = device_mac
device_mac = device
# First check if device exists in devices table
check_exists_sql = f"""
SELECT well_id FROM public.devices WHERE device_mac = '{device_mac}'
"""
logger.debug(f"check_exists_sql= {check_exists_sql}")
cur.execute(check_exists_sql)
device_result = cur.fetchone()
if not device_result:
# Device not found in DB
not_found_devices.append(device)
continue
#device_well_id = device_result[0]
# Query to check if this device_mac is deployed
sql = f"""
SELECT DISTINCT d.well_id
FROM public.devices d
JOIN public.deployment_details dd ON dd.devices LIKE '%' || d.device_mac || '%'
WHERE d.device_mac = '{device_mac}'
"""
else:
# Any other length = well_id
well_id = device
# First check if device exists in devices table
check_exists_sql = f"""
SELECT well_id FROM public.devices WHERE well_id = {well_id}
"""
logger.debug(f"check_exists_sql= {check_exists_sql}")
cur.execute(check_exists_sql)
device_result = cur.fetchone()
if not device_result:
# Device not found in DB
not_found_devices.append(device)
continue
# Query to check if any device with this well_id is deployed
sql = f"""
SELECT DISTINCT d.well_id
FROM public.devices d
JOIN public.deployment_details dd ON dd.devices LIKE '%' || d.device_mac || '%'
WHERE d.well_id = {well_id}
"""
logger.debug(f"sql= {sql}")
cur.execute(sql)
results = cur.fetchall()
# Add any found well_id's to our list
for row in results:
if row[0] is not None and row[0] not in deployed_well_ids:
deployed_well_ids.append(row[0])
# Close the cursor and connection
cur.close()
conn.close()
AddToLog(f"DevicesNotUsed check completed. Found {len(deployed_well_ids)} deployed devices, {len(not_found_devices)} not found.")
return 1, {"deployed": deployed_well_ids, "not_found": not_found_devices}
except Exception as err:
error_string = traceback.format_exc()
AddToLog(error_string)
if cur:
cur.close()
if conn:
conn.close()
return 0, error_string
def ExtractAddress(deployment_map):
address_map = {}
'''
{
"city": "mountain view",
"country": "usa",
"house_number": "760",
"postcode": "95070",
"road": "hope st.",
"state": "ca"
}
'''
print(deployment_map) #{'deployment_id': 24, 'time_edit': 1753129300.0, 'user_edit': 32, 'persons': 2, 'gender': 1, 'race': 1, 'born': 1972, 'pets': 0,
# 'address_street': '', 'address_city': '', 'address_zip': '95070', 'address_state': '', 'address_country': '', 'wifis': '{"CBX_F": "69696969", "CBX": "69696969"}', 'lat': 37.267117, 'lng': -121.99548, 'gps_age': 0, 'note': 'me', 'overlapps': None}'
address_map["city"] = deployment_map["address_city"]
address_map["country"] = deployment_map["address_country"]
address_map["road"] = deployment_map["address_street"]
address_map["postcode"] = deployment_map["address_zip"]
address_map["state"] = deployment_map["address_state"]
#address_map["city"] = "San Francisco"
#address_map["country"] = "USA"
#address_map["road"] = "230 Hope str."
#address_map["postcode"] = "95070"
#address_map["state"] = "CA"
address_str = JoinAddress(address_map)
return(address_str["formatted_address"].strip())
# Initialize data files
def PurgeDeployment(deployment):
deployment.pop('floor_plan', None)
deployment.pop('context', None)
deployment.pop('alarm_details', None)
deployment.pop('devices', None)
deployment.pop('floor_plan', None)
deployment.pop('time_zone_s', None)
deployment.pop('beneficiary_id', None)
deployment.pop('caretaker_id', None)
deployment.pop('owner_id', None)
deployment.pop('installer_id', None)
#print(deployment)
deployment.pop('address_street', None)
deployment.pop('address_city', None)
deployment.pop('address_zip', None)
deployment.pop('address_state', None)
deployment.pop('address_country', None)
deployment.pop('gps_age', None)
deployment.pop('note', None)
deployment.pop('overlapps', None)
deployment.pop('time_edit', None)
deployment.pop('user_edit', None)
return deployment
def save_list_to_csv_method1(data_list, filename):
"""Save list data to CSV with one item per row using csv module"""
with open(filename, 'w', newline='') as csvfile:
writer = csv.writer(csvfile)
# Optional: Add header
writer.writerow(['Value'])
# Write each item in a separate row
for item in data_list:
writer.writerow([item])
print(f"Data saved to {filename} using csv module")
logger.error(f"------------------------------- STARTED ------------------------------------------")
try:
searches_text = read_file("searches.json")
searches_dict = json.loads(searches_text) if searches_text else {}
dialogs_data = read_file("dialog.json")
dialog_dict = json.loads(dialogs_data) if dialogs_data else {"utterances": {}, "intents": {}}
intent_map = dialog_dict.get("utterances", {})
utterances = {}
for key in intent_map:
logger.debug(key)
list_of_utterances = intent_map[key]
for utterance in list_of_utterances:
utterances[utterance] = key
intents = dialog_dict.get("intents", {})
except Exception as e:
logger.error(f"Error initializing data files: {str(e)}")
searches_dict = {}
utterances = {}
intents = {}
# Create Falcon application with middleware
middlewares = [CORSMiddleware(), RequestParser(), StripPathMiddleware()]
try:
# For newer Falcon versions
app = falcon.App(middleware=middlewares)
except:
# For older Falcon versions
app = falcon.API(middleware=middlewares)
#logger.error(f"@1")
# Add routes for well-api
well_api_instance = WellApi()
# New routes for well_api with multiple access paths
app.add_route('/function/well-api', well_api_instance)
app.add_route('/function/well-api/{path}', well_api_instance)
app.add_route('/api/well_api', well_api_instance)
app.add_route('/api/well_api/{path}', well_api_instance)
app.add_route('/healthz', well_api_instance, suffix='healthz')
# Add routes for the standard API paths
app.add_route('/health', well_api_instance)
app.add_route('/users', well_api_instance)
app.add_route('/items', well_api_instance)
# Keep the original routes for backward compatibility
app.add_route('/', well_api_instance)
app.add_route('/{path}', well_api_instance)
MQTTSERVERL = "eluxnetworks.net"
MQTT_PortL = 443
MyName = "well-api"
clientL = mqtt.Client(
client_id=MyName+str(time.time()),
transport="websockets",
callback_api_version=mqtt.CallbackAPIVersion.VERSION2
)
clientL.tls_set(cert_reqs=ssl.CERT_NONE) # For self-signed certs, use proper CA in production
clientL.ws_set_options(path="/mqtt") # Important! Same path as in your JS code
clientL.username_pw_set(MQTT_USER, MQTT_PASS)
clientL.on_connect = on_connectL
clientL.on_message = on_messageL
#clientL.connect(MQTTSERVERL, MQTT_PortL, 60)
#lientL.loop_start()
#logger.error(f"@2")
# This code runs when executed directly (for development/debugging)
if __name__ == "__main__":
from wsgiref.simple_server import make_server
redis_conn = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)
# Use port 8000 for local debugging
port = int(os.environ.get('PORT', 8000))
#port = int(os.environ.get('PORT', 1998))
# Create a WSGI server
with make_server('', port, app) as httpd:
print(f'Serving on port {port}...')
# Serve until process is killed
httpd.serve_forever()
else:
redis_conn = redis.Redis(host=redis_host, port=6379, db=0)