18667 lines
750 KiB
Python
18667 lines
750 KiB
Python
#!/usr/bin/env python3
|
|
#Vesion 2.0.1
|
|
import os
|
|
import sys
|
|
import ast
|
|
from ast import literal_eval
|
|
import falcon
|
|
from falcon import HTTP_200, HTTP_400, HTTP_401, HTTP_500
|
|
import json
|
|
import logging
|
|
from dotenv import load_dotenv
|
|
import calendar
|
|
import io
|
|
import datetime
|
|
from datetime import timedelta, timezone
|
|
import jwt
|
|
import psycopg2
|
|
import html
|
|
import re
|
|
import fnmatch
|
|
import traceback
|
|
import time
|
|
import pytz
|
|
from PIL import Image, ImageDraw, ImageFont
|
|
import paho.mqtt.client as mqtt
|
|
import ssl
|
|
import hashlib
|
|
import itertools
|
|
from collections import defaultdict, deque
|
|
import warnings
|
|
from io import BytesIO
|
|
import zipfile
|
|
from minio import Minio
|
|
from minio.error import S3Error
|
|
import numpy as np
|
|
import cv2
|
|
from sklearn.mixture import GaussianMixture
|
|
import openai
|
|
from openai import OpenAI
|
|
from typing import List, Tuple
|
|
import redis
|
|
import base64
|
|
import requests
|
|
|
|
# Try to import the module
|
|
try:
|
|
from filter_short_groups import filter_short_groups_c
|
|
print("Successfully imported filter_short_groups_c")
|
|
except ImportError as e:
|
|
print(f"Error importing module: {e}")
|
|
exit(1)
|
|
|
|
device_lookup_cache = {}
|
|
threshold_cache = {}
|
|
|
|
st = 0
|
|
if True:
|
|
|
|
#from scipy import interpolate
|
|
from scipy.optimize import curve_fit
|
|
from scipy import stats
|
|
import pandas as pd
|
|
#from scipy.signal import savgol_filter
|
|
|
|
EnablePlot = False #True
|
|
|
|
if EnablePlot:
|
|
import matplotlib
|
|
matplotlib.use('Agg') # Set the backend before importing pyplot
|
|
import matplotlib.pyplot as plt
|
|
from matplotlib.colors import LinearSegmentedColormap
|
|
import matplotlib.dates as mdates
|
|
|
|
# Configure logging
|
|
logging.basicConfig(
|
|
level=logging.DEBUG,# .ERROR,
|
|
format='%(asctime)s [%(levelname)s] %(message)s'
|
|
)
|
|
logger = logging.getLogger(__name__)
|
|
|
|
location_names = {-1:"All",0:"?",5:"Office",6:"Hallway",7:"Garage",8:"Outside",9:"Conference Room",10:"Room",34:"Kitchen",
|
|
56:"Bedroom",78:"Living Room",102:"Bathroom",103:"Dining Room",104:"Bathroom Main",105:"Bathroom Guest",
|
|
106:"Bedroom Master", 107:"Bedroom Guest", 108:"Conference Room", 109:"Basement", 110:"Attic", 200:"Other"}
|
|
|
|
#Loc2Color = {"?":(0,0,0),"Office":(255,255,0),"Hallway":(128,128,128),"Garage":(128,0,0),"Outside":(0,0,0),"Conference Room":(0,0,128),
|
|
#"Room":(64,64,64),"Kitchen":(255,0,0),"Bedroom":(16,255,16),"Living Room":(160,32,240),"Bathroom":(0,0,255),
|
|
#"Dining Room":(255,128,0),"Bathroom Main":(16,16,255), "Bedroom Master":(0,255,0),"Bathroom Guest":(32,32,255),
|
|
#"Bedroom Guest":(32,255,32), "Basement":(64,64,64), "Attic":(255,165,0), "Other":(192,192,192)}
|
|
|
|
Loc2Color = {"Bedroom":((16,255,16),0),"Bedroom Master":((0,255,0),0),"Bedroom Guest":((32,255,32),0),"Bathroom":((0,0,255),1),
|
|
"Bathroom Main":((16,16,255),1),"Bathroom Guest":((32,32,255),1),"Kitchen":((255,0,0),2),"Dining Room":((255,128,0),3),"Dining":((255,128,0),3),
|
|
"Office":((255,255,0),4),"Conference Room":((0,0,128),5),"Conference":((0,0,128),5),"Room":((64,64,64),6),"Living Room":((160,32,240),7),"Living":((160,32,240),7),"Hallway":((128,128,128),8),
|
|
"Garage":((128,0,0),9),"Basement":((64,64,64), 10),"Attic":((255,165,0), 11),"Other":((192,192,192),12),"?":((0,0,0),13),"Outside":((0,0,0),14)}
|
|
|
|
|
|
s_table = ["temperature", "humidity", "pressure", "light", "radar", "voc0", "voc1", "voc2", "voc3", "voc4", "voc5", "voc6", "voc7", "voc8", "voc9"] # derived
|
|
smells_table = ["s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "s8", "s9"] # derived
|
|
|
|
s_table_temp = []
|
|
|
|
Consolidataed_locations = {"?":"Room","Office":"Office","Hallway":"Hallway","Garage":"Garage","Outside":"Outside","Conference Room":"Office",
|
|
"Room":"Room","Kitchen":"Kitchen","Bedroom":"Bedroom","Living Room":"Living Room","Bathroom Guest":"Bathroom",
|
|
"Dining Room":"Dining Room","Bathroom":"Bathroom", "Bathroom Main":"Bathroom","Bedroom Master":"Bedroom",
|
|
"Bedroom Guest":"Bedroom", "Basement":"Basement", "Attic":"Attic", "Other":"Room"}
|
|
|
|
AveragePercentPerLocation = {"Bedroom":[29, 37.5], "Bathroom":[2, 4], "Office":[10, 40],"Hallway":[0.1, 0.2],"Garage":[2, 3],"Outside":[5, 10],
|
|
"Room":[5, 10],"Kitchen":[5, 12.5], "Living Room":[5, 10],
|
|
"Dining Room":[5, 10], "Basement":[0, 0.2], "Attic":[0, 0.2]}
|
|
|
|
location_indexes = {}
|
|
|
|
for i in location_names:
|
|
location_indexes[location_names[i]] = i
|
|
|
|
|
|
|
|
# HTTP Status codes
|
|
HTTP_200 = falcon.HTTP_200
|
|
HTTP_201 = falcon.HTTP_201
|
|
HTTP_400 = falcon.HTTP_400
|
|
HTTP_401 = falcon.HTTP_401
|
|
HTTP_404 = falcon.HTTP_404
|
|
HTTP_500 = falcon.HTTP_500
|
|
|
|
load_dotenv()
|
|
|
|
DB_NAME = os.getenv('DB_NAME')
|
|
DB_USER = os.getenv('DB_USER')
|
|
DB_PASSWORD = os.getenv('DB_PASSWORD')
|
|
DB_HOST = os.getenv('DB_HOST')
|
|
DB_PORT = os.getenv('DB_PORT')
|
|
MINIO_ACCESS_KEY = os.getenv('MINIO_ACCESS_KEY')
|
|
MINIO_SECRET_KEY = os.getenv('MINIO_SECRET_KEY')
|
|
MINIO_HOST = os.getenv('MINIO_HOST')
|
|
MINIO_PORT = os.getenv('MINIO_PORT')
|
|
DAILY_MAPS_BUCKET_NAME = os.getenv('DAILY_MAPS_BUCKET_NAME')
|
|
JWT_SECRET = os.getenv('JWT_SECRET')
|
|
MASTER_ADMIN = os.getenv('MASTER_ADMIN')
|
|
MASTER_PS = os.getenv('MASTER_PS')
|
|
|
|
OPENAI_API_KEY = os.getenv('OPENAI_API_KEY')
|
|
model_engine = os.getenv('OPENAI_API_MODEL_ENGINE')
|
|
|
|
# Redis Configuration
|
|
REDIS_HOST = os.getenv('REDIS_HOST', 'localhost')
|
|
REDIS_PORT = int(os.getenv('REDIS_PORT'))
|
|
REDIS_DB = int(os.getenv('REDIS_DB', 0))
|
|
REDIS_PASSWORD = os.getenv('REDIS_PASSWORD', None)
|
|
|
|
ENABLE_AUDIO_PLAYBACK = True
|
|
OPT_IN_KEYWORD = "WELLNUOJOIN"
|
|
DEFAULT_TTS_VOICE = "female"
|
|
DEFAULT_TTS_LANGUAGE = "en-US"
|
|
CLIENT_STATE_PREFIX = "app_state"
|
|
TELNYX_API_KEY = os.getenv('TELNYX_API_KEY')
|
|
TELNYX_API_BASE_URL = os.getenv("TELNYX_API_BASE_URL")
|
|
|
|
logger.debug(f"REDIS_PORT: {REDIS_PORT}")
|
|
logger.debug(f"TELNYX_API_KEY: {TELNYX_API_KEY}")
|
|
logger.debug(f"TELNYX_API_BASE_URL: {TELNYX_API_BASE_URL}")
|
|
|
|
redis_host = os.getenv('REDIS_HOST', '192.168.68.70')
|
|
redis_host = '192.168.68.70'
|
|
|
|
use_pdb = True
|
|
debug = False
|
|
debug_string = ""
|
|
logger.debug(f"Environment variables: {os.environ}")
|
|
filesDir = "/home/app/well_web_storage" #os.path.dirname(os.path.realpath(__file__))
|
|
min_io_address = MINIO_HOST + ":" + MINIO_PORT
|
|
|
|
miniIO_blob_client = Minio(min_io_address, access_key=MINIO_ACCESS_KEY, secret_key=MINIO_SECRET_KEY, secure=False)
|
|
|
|
user_id_2_user = {}
|
|
smell_min = 1
|
|
no_smell = 102400000
|
|
smell_max = no_smell - 1
|
|
sensor_legal_values = {"radar": (0,1000, 1), "co2": (smell_min, smell_max, 31), "humidity": (1,99, 31), "light": (0, 4095, 1),
|
|
"pressure": (0, 10000, 5), "temperature": (1, 60, 31), "voc": (smell_min, smell_max, 31), "voc0": (smell_min, smell_max, 31),
|
|
"voc1": (smell_min, smell_max, 31), "voc2": (smell_min, smell_max, 31), "voc3": (smell_min, smell_max, 31), "voc4": (smell_min, smell_max, 31),
|
|
"voc5": (smell_min, smell_max, 31), "voc6": (smell_min, smell_max, 31), "voc7": (smell_min, smell_max, 31), "voc8": (smell_min, smell_max, 31), "voc9": (smell_min, smell_max, 31),
|
|
"s0": (smell_min, smell_max, 31), "s1": (smell_min, smell_max, 31), "s2": (smell_min, smell_max, 31), "s3": (smell_min, smell_max, 31), "s4": (smell_min, smell_max, 31),
|
|
"s5": (smell_min, smell_max, 31), "s6": (smell_min, smell_max, 31), "s7": (smell_min, smell_max, 31), "s8": (smell_min, smell_max, 31), "s9": (smell_min, smell_max, 31)}
|
|
|
|
smell_legal_values = {"s0": (smell_min, smell_max, 31), "s1": (smell_min, smell_max, 31), "s2": (smell_min, smell_max, 31), "s3": (smell_min, smell_max, 31), "s4": (smell_min, smell_max, 31),
|
|
"s5": (smell_min, smell_max, 31), "s6": (smell_min, smell_max, 31), "s7": (smell_min, smell_max, 31), "s8": (smell_min, smell_max, 31), "s9": (smell_min, smell_max, 31)}
|
|
|
|
|
|
def GetRedisInt(key_name):
|
|
try:
|
|
result = int(redis_conn.get(key_name).decode('utf-8'))
|
|
except:
|
|
result = None
|
|
return result
|
|
|
|
def GetRedisFloat(key_name):
|
|
try:
|
|
result = float(redis_conn.get(key_name).decode('utf-8'))
|
|
except:
|
|
result = None
|
|
|
|
return result
|
|
|
|
def GetRedisString(key_name):
|
|
try:
|
|
result = redis_conn.get(key_name).decode('utf-8')
|
|
except:
|
|
result = None
|
|
return result
|
|
|
|
def GetRedisMap(key_name):
|
|
try:
|
|
result_bytes = redis_conn.hgetall(key_name)
|
|
result = {k.decode('utf-8'): v.decode('utf-8') for k, v in result_bytes.items()}
|
|
except:
|
|
result = {}
|
|
return result
|
|
|
|
|
|
|
|
def read_file(file_name, source = "LOCAL", type_ = "TEXT", bucket_name="daily-maps"):
|
|
|
|
blob_data = ""
|
|
if source == "MINIO":
|
|
blob_data = ReadObjectMinIO(bucket_name, file_name)
|
|
elif source == "LOCAL":
|
|
login_file = os.path.join(filesDir, file_name)
|
|
login_file = login_file.replace("\\","/")
|
|
logger.debug(f"Full file path: {login_file}")
|
|
logger.debug(f"File exists: {os.path.exists(login_file)}")
|
|
#print(login_file)
|
|
if type_ == "TEXT":
|
|
with open(login_file, encoding="utf8") as f:
|
|
blob_data = f.read()
|
|
else:
|
|
with open(login_file, 'rb') as f:
|
|
blob_data = f.read()
|
|
|
|
elif source == "AZURE":
|
|
try:
|
|
blob_data = ""#container_client.download_blob(file_name).readall()
|
|
except Exception as err:
|
|
logger.error("Not reading Azure blob "+str(err))
|
|
blob_data = ""
|
|
return blob_data
|
|
else:
|
|
pass
|
|
return blob_data
|
|
|
|
|
|
def match_with_wildcard(string, pattern):
|
|
return fnmatch.fnmatchcase(string, pattern)
|
|
|
|
def extract_differing_part(string, pattern):
|
|
regex_pattern = re.escape(pattern).replace(r'\*', r'(.+)')
|
|
match = re.match(regex_pattern, string)
|
|
if match:
|
|
return match.group(1)
|
|
else:
|
|
return None
|
|
|
|
def get_db_connection():
|
|
return psycopg2.connect(dbname=DB_NAME, user=DB_USER, password=DB_PASSWORD, host=DB_HOST, port=DB_PORT)
|
|
|
|
|
|
def generate_token(username):
|
|
expiration = datetime.datetime.now(timezone.utc) + timedelta(hours=24)
|
|
token = jwt.encode({"username": username, "exp": expiration}, JWT_SECRET, algorithm="HS256")
|
|
return token
|
|
|
|
|
|
def verify_token(token):
|
|
try:
|
|
payload = jwt.decode(token, JWT_SECRET, algorithms=["HS256"])
|
|
return payload
|
|
except jwt.ExpiredSignatureError:
|
|
return None
|
|
except jwt.InvalidTokenError:
|
|
return None
|
|
|
|
def SmartSplit(data_string):
|
|
"""
|
|
Splits a comma-separated string into a list, properly handling nested structures
|
|
and converting values to appropriate Python types using only the ast library.
|
|
"""
|
|
if not data_string:
|
|
return []
|
|
|
|
# Remove trailing comma if present
|
|
data_string = data_string.rstrip(',')
|
|
|
|
items = []
|
|
current_item = ""
|
|
bracket_count = 0
|
|
in_quotes = False
|
|
quote_char = None
|
|
|
|
i = 0
|
|
while i < len(data_string):
|
|
char = data_string[i]
|
|
|
|
# Handle quotes
|
|
if char in ('"', "'") and (i == 0 or data_string[i-1] != '\\'):
|
|
if not in_quotes:
|
|
in_quotes = True
|
|
quote_char = char
|
|
elif char == quote_char:
|
|
in_quotes = False
|
|
quote_char = None
|
|
|
|
# Track brackets only when not in quotes
|
|
if not in_quotes:
|
|
if char in '[{(':
|
|
bracket_count += 1
|
|
elif char in ']}':
|
|
bracket_count -= 1
|
|
|
|
# Split on comma only when not inside brackets/quotes
|
|
if char == ',' and bracket_count == 0 and not in_quotes:
|
|
items.append(current_item.strip())
|
|
current_item = ""
|
|
else:
|
|
current_item += char
|
|
|
|
i += 1
|
|
|
|
# Add the last item
|
|
if current_item.strip():
|
|
items.append(current_item.strip())
|
|
|
|
# Convert each item using ast.literal_eval when possible
|
|
result = []
|
|
for item in items:
|
|
if item == '':
|
|
result.append(None)
|
|
else:
|
|
try:
|
|
# Try to evaluate as Python literal
|
|
converted = ast.literal_eval(item)
|
|
result.append(converted)
|
|
except (ValueError, SyntaxError):
|
|
# If it fails, keep as string
|
|
result.append(item)
|
|
|
|
return result
|
|
|
|
|
|
def SaveObjectInBlob(file_name, obj):
|
|
"""
|
|
Saves a Python object to MinIO blob storage using JSON serialization
|
|
|
|
Args:
|
|
file_name (str): Name of the file to save in blob storage
|
|
obj: Python object to serialize and save
|
|
"""
|
|
try:
|
|
# Convert object to JSON string
|
|
json_str = json.dumps(obj)
|
|
# Convert string to bytes
|
|
json_bytes = json_str.encode('utf-8')
|
|
|
|
# Save to MinIO
|
|
miniIO_blob_client.put_object(
|
|
DAILY_MAPS_BUCKET_NAME,
|
|
file_name,
|
|
io.BytesIO(json_bytes),
|
|
len(json_bytes)
|
|
)
|
|
return True
|
|
except Exception as e:
|
|
logger.error(f"Error saving object to blob: {traceback.format_exc()}")
|
|
return False
|
|
|
|
def SaveGenericObjectInBlob(bucket_name, file_name, obj):
|
|
"""
|
|
Saves a Python object to MinIO blob storage using JSON serialization
|
|
|
|
Args:
|
|
file_name (str): Name of the file to save in blob storage
|
|
obj: Python object to serialize and save
|
|
"""
|
|
try:
|
|
# Convert object to JSON string
|
|
json_str = json.dumps(obj)
|
|
# Convert string to bytes
|
|
json_bytes = json_str.encode('utf-8')
|
|
|
|
# Save to MinIO
|
|
miniIO_blob_client.put_object(
|
|
bucket_name,
|
|
file_name,
|
|
io.BytesIO(json_bytes),
|
|
len(json_bytes)
|
|
)
|
|
return True
|
|
except Exception as e:
|
|
logger.error(f"Error saving object to blob: {traceback.format_exc()}")
|
|
return False
|
|
|
|
|
|
|
|
def ReadObjectMinIO(bucket_name, file_name, filter_date=None):
|
|
"""
|
|
Read object from MinIO with optional date filtering.
|
|
|
|
Args:
|
|
bucket_name (str): Name of the MinIO bucket
|
|
file_name (str): Name of the file/object
|
|
filter_date (str, optional): Date string in format "YYYY-MM-DD".
|
|
If provided, returns empty string if object
|
|
was modified before or on this date.
|
|
|
|
Returns:
|
|
str: Object content as string, empty string if filtered out, or None on error
|
|
"""
|
|
try:
|
|
# If date filtering is requested, check object's last modified date first
|
|
if filter_date:
|
|
try:
|
|
# Get object metadata to check last modified date
|
|
stat = miniIO_blob_client.stat_object(bucket_name, file_name)
|
|
last_modified = stat.last_modified
|
|
|
|
# Parse filter date (assuming format YYYY-MM-DD)
|
|
target_date = datetime.datetime.strptime(filter_date, "%Y-%m-%d").date()
|
|
|
|
# If object was modified before or on target date, return empty string
|
|
if last_modified.date() <= target_date:
|
|
return None
|
|
|
|
except S3Error as e:
|
|
logger.error(f"Error getting metadata for {file_name}: {e}")
|
|
return None
|
|
except ValueError as e:
|
|
logger.error(f"Invalid date format '{filter_date}': {e}")
|
|
return None
|
|
|
|
# Retrieve the object data
|
|
response = miniIO_blob_client.get_object(bucket_name, file_name)
|
|
# Read the data from response
|
|
data_bytes = response.read()
|
|
# Convert bytes to string
|
|
data_string = data_bytes.decode('utf-8')
|
|
# Don't forget to close the response
|
|
response.close()
|
|
response.release_conn()
|
|
return data_string
|
|
|
|
except S3Error as e:
|
|
logger.error(f"An error occurred while reading {file_name}: {e}")
|
|
return None
|
|
except:
|
|
logger.error(f"An error occurred while decoding {file_name}")
|
|
return None
|
|
|
|
#def ReadObjectMinIO(bucket_name, file_name):
|
|
#try:
|
|
## Retrieve the object data
|
|
#response = miniIO_blob_client.get_object(bucket_name, file_name)
|
|
|
|
## Read the data from response
|
|
#data_bytes = response.read()
|
|
|
|
## Convert bytes to string and then load into a dictionary
|
|
#data_string = data_bytes.decode('utf-8')
|
|
|
|
## Don't forget to close the response
|
|
#response.close()
|
|
#response.release_conn()
|
|
|
|
#return data_string
|
|
|
|
#except S3Error as e:
|
|
#logger.error(f"An error occurred: {e}")
|
|
#return None
|
|
|
|
|
|
def package_response_C(payload, status_code=HTTP_200):
|
|
"""Package response in a standard format"""
|
|
if status_code == HTTP_200:
|
|
return {"status": "success", "data": payload}
|
|
else:
|
|
return {"status": "error", "message": payload, "code": status_code}
|
|
|
|
def package_response(content, status=falcon.HTTP_200):
|
|
"""
|
|
Format the HTTP response.
|
|
|
|
:param content: The content to be returned in the response.
|
|
:param status: HTTP status code (default is 200 OK).
|
|
:return: A dictionary containing the formatted response.
|
|
"""
|
|
if isinstance(content, str):
|
|
# If content is a string, try to parse it as JSON
|
|
try:
|
|
response = json.loads(content)
|
|
except json.JSONDecodeError:
|
|
# If it's not valid JSON, use it as message
|
|
response = {"message": content}
|
|
elif isinstance(content, dict):
|
|
# If content is a dictionary, serialize it with datetime handling
|
|
try:
|
|
# First serialize to JSON string with datetime handling
|
|
json_str = json.dumps(content, default=datetime_handler)
|
|
# Then parse back to dict
|
|
response = json.loads(json_str)
|
|
except TypeError as e:
|
|
response = {"message": f"Serialization error: {str(e)}"}
|
|
else:
|
|
# For any other type, convert to string and use as message
|
|
response = {"message": str(content)}
|
|
|
|
# Add status code to the response
|
|
response["status"] = status
|
|
|
|
# Handle specific status codes
|
|
if status == falcon.HTTP_400:
|
|
response["error"] = "Bad Request"
|
|
elif status == falcon.HTTP_401:
|
|
response["error"] = "Unauthorized"
|
|
elif status == falcon.HTTP_500:
|
|
response["error"] = "Internal Server Error"
|
|
|
|
return response
|
|
|
|
def GetPriviledges(conn, user_name, password):
|
|
sql = "SELECT key, access_to_deployments, user_id FROM public.person_details WHERE user_name = '" + user_name + "'"
|
|
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
result = cur.fetchall()#cur.fetchone()
|
|
if result != None and result != []:
|
|
if result[0][0] == password:
|
|
return result[0][1], result[0][2]
|
|
else:
|
|
return "0", "0"
|
|
else:
|
|
return "0", "0"
|
|
|
|
def GetPriviledgesOnly(user):
|
|
with get_db_connection() as conn:
|
|
if isinstance(user, (int)) or user.isdigit():
|
|
sql = "SELECT access_to_deployments FROM public.person_details WHERE user_id = " + user
|
|
else:
|
|
sql = "SELECT access_to_deployments FROM public.person_details WHERE user_name = '" + user + "'"
|
|
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
result = cur.fetchall()#cur.fetchone()
|
|
if result != None:
|
|
return result[0][0]
|
|
else:
|
|
return "0"
|
|
|
|
def GetUserId(user_name):
|
|
with get_db_connection() as conn:
|
|
sql = "SELECT user_id FROM public.person_details WHERE user_name = '" + user_name + "'"
|
|
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
result = cur.fetchall()#cur.fetchone()
|
|
if result != None:
|
|
return result[0][0]
|
|
else:
|
|
return "0"
|
|
|
|
def GetNameFromUserId(user_id):
|
|
with get_db_connection() as conn:
|
|
sql = f"SELECT user_name, first_name, last_name FROM public.person_details WHERE user_id = {user_id}"
|
|
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
result = cur.fetchall()#cur.fetchone()
|
|
if result != None:
|
|
return result[0]
|
|
else:
|
|
return None
|
|
|
|
|
|
|
|
def ListDeployments(priviledges, user_id):
|
|
|
|
global user_id_2_user
|
|
|
|
conn = get_db_connection()
|
|
|
|
if priviledges == "-1":
|
|
sql = "SELECT * FROM public.deployments ORDER BY deployment_id ASC;"
|
|
else:
|
|
sql = f"SELECT * FROM public.deployments WHERE deployment_id IN ({priviledges}) OR user_edit = {user_id} ORDER BY deployment_id ASC;"
|
|
|
|
try:
|
|
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
result = cur.fetchall()#cur.fetchone()
|
|
if result == None:
|
|
complete_result = []
|
|
else:
|
|
deployment_ids = []
|
|
deployment_records_dict = {}
|
|
for record in result:
|
|
deployment_id = record[0]
|
|
deployment_ids.append(deployment_id)
|
|
deployment_records_dict[deployment_id] = record
|
|
|
|
sql = f"SELECT * FROM public.deployment_details WHERE deployment_id IN ({','.join(map(str, deployment_ids))}) ORDER BY deployment_id ASC;"
|
|
cur.execute(sql)
|
|
details_result = cur.fetchall()
|
|
|
|
beneficiary_ids = []
|
|
|
|
for record_details in details_result:
|
|
if record_details[1] != None and record_details[1] not in beneficiary_ids:
|
|
beneficiary_ids.append(record_details[1])
|
|
|
|
sql = f"SELECT * FROM public.person_details WHERE user_id IN ({','.join(map(str, beneficiary_ids))});"
|
|
|
|
|
|
cur.execute(sql)
|
|
user_id_2_user = {}
|
|
users = cur.fetchall()#cur.fetchone()
|
|
for usr_record in users:
|
|
user_id_2_user[usr_record[0]] = usr_record
|
|
|
|
complete_result = []
|
|
if details_result != None:
|
|
for record_details in details_result:
|
|
deployment_record = deployment_records_dict[record_details[0]]
|
|
complete_record = {'deployment_id': record_details[0], 'beneficiary_id': record_details[1], 'caretaker_id': record_details[2],
|
|
'owner_id': record_details[3], 'installer_id': record_details[4],
|
|
'address_street': record_details[6], 'address_city': record_details[7], 'address_zip': record_details[8],
|
|
'address_state': record_details[9], 'address_country': record_details[10],
|
|
'devices': record_details[5], 'wifis': record_details[11], 'persons': deployment_record[4], 'gender': deployment_record[5],
|
|
'race': deployment_record[6], 'born': deployment_record[7], 'pets': deployment_record[8], 'time_zone': deployment_record[3]
|
|
}
|
|
complete_result.append(complete_record)
|
|
except:
|
|
logger.debug(f"Error: {traceback.format_exc()}")
|
|
return complete_result
|
|
|
|
def ListCaretakers(privileges, user_name):
|
|
|
|
conn = get_db_connection()
|
|
if privileges == "-1":
|
|
sql = "SELECT * FROM public.person_details WHERE role_ids LIKE '%2%' ORDER BY last_name;" #2 is caretaker
|
|
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
result = cur.fetchall()#cur.fetchone()
|
|
if result == None:
|
|
result = []
|
|
else:
|
|
#we need to check if
|
|
sql = f"SELECT * FROM public.person_details WHERE user_name = '{user_name}';" #2 is caretaker
|
|
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
result = cur.fetchall()#cur.fetchone()
|
|
if result == None:
|
|
result = []
|
|
|
|
pass
|
|
return result
|
|
|
|
def ListBeneficiaries(privilidges, user_info):
|
|
|
|
conn = get_db_connection()
|
|
with conn.cursor() as cur:
|
|
|
|
if (privilidges == "-1"):
|
|
sql = "SELECT * FROM public.person_details WHERE role_ids LIKE '%1%' ORDER BY last_name;" #1 is beneficiary
|
|
else:
|
|
#we need to find beneficiaries from list of deployments
|
|
sql = f"SELECT beneficiary_id FROM public.deployment_details WHERE deployment_id IN ({privilidges}) ORDER BY deployment_id ASC;"
|
|
cur.execute(sql)
|
|
result1 = cur.fetchall()#cur.fetchone()
|
|
if result1 == None:
|
|
result = []
|
|
return result
|
|
beneficiaries = ",".join(str(x[0]) for x in result1)
|
|
sql = f"SELECT * FROM public.person_details WHERE user_id IN ({beneficiaries}) OR user_edit = {user_info} AND role_ids LIKE '%1%' ORDER BY last_name;" #1 is beneficiary
|
|
logger.debug(f"sql= {sql}")
|
|
|
|
cur.execute(sql)
|
|
result = cur.fetchall()#cur.fetchone()
|
|
if result == None:
|
|
result = []
|
|
|
|
return result
|
|
|
|
def UserDetails(user_id):
|
|
|
|
conn = get_db_connection()
|
|
|
|
sql = "SELECT column_name FROM information_schema.columns WHERE table_schema = 'public' AND table_name = 'person_details';"
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
columns_names = cur.fetchall()
|
|
|
|
sql = "SELECT * FROM public.person_details WHERE user_id = "+user_id
|
|
|
|
caretaker_record = {}
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
result = cur.fetchone() #cur.fetchall()
|
|
if result != None:
|
|
cnt = 0
|
|
for field in columns_names:
|
|
caretaker_record[field[0]] = result[cnt]
|
|
cnt += 1
|
|
|
|
return caretaker_record
|
|
|
|
def DeviceDetails(mac):
|
|
|
|
conn = get_db_connection()
|
|
|
|
sql = "SELECT column_name FROM information_schema.columns WHERE table_schema = 'public' AND table_name = 'devices';"
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
columns_names = cur.fetchall()
|
|
|
|
sql = "SELECT * FROM public.devices WHERE device_mac = '" + mac + "'"
|
|
|
|
device_record = {}
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
result = cur.fetchone() #cur.fetchall()
|
|
if result != None:
|
|
cnt = 0
|
|
for field in columns_names:
|
|
device_record[field[0]] = result[cnt]
|
|
cnt += 1
|
|
|
|
return device_record
|
|
|
|
def GetDeviceDetailsSingle(device_id):
|
|
|
|
conn = get_db_connection()
|
|
|
|
sql = "SELECT column_name FROM information_schema.columns WHERE table_schema = 'public' AND table_name = 'devices';"
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
columns_names = cur.fetchall()
|
|
|
|
sql = "SELECT * FROM public.devices WHERE device_id = '" + device_id + "'"
|
|
|
|
device_record = {}
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
result = cur.fetchone() #cur.fetchall()
|
|
if result != None:
|
|
cnt = 0
|
|
for field in columns_names:
|
|
device_record[field[0]] = result[cnt]
|
|
cnt += 1
|
|
|
|
return device_record
|
|
|
|
|
|
def DeploymentDetails(deployment_id):
|
|
|
|
deployment_record = {}
|
|
|
|
conn = get_db_connection()
|
|
|
|
with conn.cursor() as cur:
|
|
|
|
sql = "SELECT * FROM information_schema.columns WHERE table_schema = 'public' AND table_name = 'deployments';"
|
|
cur.execute(sql)
|
|
columns_names = cur.fetchall()
|
|
|
|
sql = "SELECT * FROM public.deployments WHERE deployment_id = '" + deployment_id + "'"
|
|
|
|
cur.execute(sql)
|
|
result = cur.fetchone() #cur.fetchall()
|
|
if result != None:
|
|
cnt = 0
|
|
for field in columns_names:
|
|
deployment_record[field[3]] = result[cnt]
|
|
cnt += 1
|
|
|
|
sql = "SELECT * FROM information_schema.columns WHERE table_schema = 'public' AND table_name = 'deployment_details';"
|
|
cur.execute(sql)
|
|
columns_names = cur.fetchall()
|
|
|
|
sql = "SELECT * FROM public.deployment_details WHERE deployment_id = '" + deployment_id + "'"
|
|
|
|
cur.execute(sql)
|
|
result = cur.fetchone() #cur.fetchall()
|
|
if result != None:
|
|
cnt = 0
|
|
for field in columns_names:
|
|
deployment_record[field[3]] = result[cnt]
|
|
cnt += 1
|
|
|
|
|
|
return deployment_record
|
|
|
|
def ValidUser(user_name, password):
|
|
|
|
|
|
if use_pdb:
|
|
with get_db_connection() as db_conn:
|
|
priviledges, user_id= GetPriviledges(db_conn, user_name, password)
|
|
return priviledges, user_id
|
|
|
|
else:
|
|
pass
|
|
#container = GetReference("/MAC")
|
|
#try:
|
|
## We can do an efficient point read lookup on partition key and id
|
|
##response = container.read_item(item="64B708896BD8_temperature_2024-01-01_00", partition_key="64B708896BD8") #OK
|
|
##items = query_items(container, '64B708896BD8') #Too slow
|
|
##AddToLog("1!")
|
|
#privileges = GetCaretakers(container, email, password)
|
|
#return privileges
|
|
|
|
#except Exception as err:
|
|
#AddToLog("Error !1 "+str(err))
|
|
|
|
|
|
def GetMaxRole(user_name):
|
|
|
|
with get_db_connection() as db_conn:
|
|
|
|
sql = "SELECT role_ids FROM public.person_details WHERE user_name = '" + user_name + "'"
|
|
|
|
with db_conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
result = cur.fetchall()#cur.fetchone()
|
|
if result != None and result != []:
|
|
return str(result[0])
|
|
return ""
|
|
|
|
|
|
def SelectOption(html_code, select_id, selected_item):
|
|
"""
|
|
Modifies HTML code to set the selected attribute for a specific option in a select element.
|
|
|
|
Args:
|
|
html_code (str): Original HTML code
|
|
select_id (str): ID of the select element to modify
|
|
selected_item (str or int): Value of the option to be selected
|
|
|
|
Returns:
|
|
str: Modified HTML code with the selected attribute added
|
|
"""
|
|
# Convert selected_item to string for comparison
|
|
selected_item = str(selected_item)
|
|
|
|
# Find the select element with the given ID
|
|
select_pattern = rf'<select[^>]*id=[\'"]?{select_id}[\'"]?[^>]*>(.*?)</select>'
|
|
select_match = re.search(select_pattern, html_code, re.IGNORECASE | re.DOTALL)
|
|
|
|
if not select_match:
|
|
return html_code # Return unchanged if select element not found
|
|
|
|
select_content = select_match.group(0)
|
|
select_content_orig = select_content
|
|
# Remove any existing selected attributes
|
|
select_content = re.sub(r'\s+selected(?=[>\s])', '', select_content, flags=re.IGNORECASE)
|
|
|
|
# Add selected attribute to the matching option
|
|
def replace_option(match):
|
|
value = re.search(r'value=[\'"]?([^\'">\s]+)', match.group(0))
|
|
if value and value.group(1) == selected_item:
|
|
# Add selected attribute before the closing >
|
|
return match.group(0).rstrip('>') + ' selected>'
|
|
return match.group(0)
|
|
|
|
modified_select = re.sub(
|
|
r'<option[^>]*>',
|
|
replace_option,
|
|
select_content
|
|
)
|
|
|
|
# Replace the original select element with the modified one
|
|
return html_code.replace(select_content_orig, modified_select)
|
|
|
|
def FillFields(blob_data, record, form_type):
|
|
"""
|
|
Fill in the input fields in the HTML blob_data with values from the caretaker dictionary.
|
|
|
|
:param blob_data: str - The initial HTML string containing empty or placeholder input fields.
|
|
:param caretaker: dict - The dictionary containing values to populate the fields.
|
|
:return: str - The HTML string with the input fields filled with the appropriate values.
|
|
"""
|
|
# Ensure blob_data is a string
|
|
#blob_data = str(blob_data)
|
|
|
|
# Populate the fields
|
|
for field in record:
|
|
logger.debug(f"field= {field}")
|
|
if field == "user_id":
|
|
if record[field] is not None:
|
|
escaped_string = html.escape(str(record[field]))
|
|
# Create a regex pattern to match the span with specific id
|
|
pattern = rf'(<span[^>]+id="editing_user_id"[^>]*>)([^<]*)(</span>)'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(3)}', blob_data)
|
|
elif field == "deployment_id":
|
|
if record[field] is not None:
|
|
escaped_string = html.escape(str(record[field]))
|
|
# Create a regex pattern to match the span with specific id
|
|
pattern = rf'(<span[^>]+id="editing_deployment_id"[^>]*>)([^<]*)(</span>)'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(3)}', blob_data)
|
|
elif field == "device_id":
|
|
if record[field] is not None:
|
|
escaped_string = html.escape(str(record[field]))
|
|
# Create a regex pattern to match the span with specific id
|
|
pattern = rf'(<span[^>]+id="editing_device_id"[^>]*>)([^<]*)(</span>)'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(3)}', blob_data)
|
|
elif field == "user_name":
|
|
if record[field] != None:
|
|
escaped_string = html.escape(record[field])
|
|
pattern = rf'(<input[^>]+id="new_user_name"[^>]+value=")[^"]*(")'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(2)}', blob_data)
|
|
|
|
# Add value attribute if it does not exist
|
|
pattern = rf'(<input[^>]+id="new_user_name"[^>]*)(>)'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)} value="{escaped_string}"{m.group(2)}', blob_data)
|
|
|
|
elif field == "location":
|
|
if record[field] != None:
|
|
blob_data = SelectOption(blob_data, 'location', record[field])
|
|
|
|
elif field == "gender":
|
|
if record[field] != None:
|
|
blob_data = SelectOption(blob_data, 'gender', record[field])
|
|
|
|
elif field == "race":
|
|
if record[field] != None:
|
|
blob_data = SelectOption(blob_data, 'race', record[field])
|
|
|
|
elif field == "time_zone_s":
|
|
if record[field] != None:
|
|
blob_data = SelectOption(blob_data, 'time_zone_s', record[field])
|
|
|
|
elif field == "time_edit" or field == "user_edit":
|
|
pass
|
|
else:
|
|
if record[field] != None:
|
|
escaped_string = html.escape(str(record[field]))
|
|
pattern = rf'(<input[^>]+id="{field}"[^>]+value=")[^"]*(")'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(2)}', blob_data)
|
|
|
|
# Add value attribute if it does not exist
|
|
pattern = rf'(<input[^>]+id="{field}"[^>]*)(>)'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)} value="{escaped_string}"{m.group(2)}', blob_data)
|
|
|
|
return blob_data
|
|
|
|
def StoreThresholds2DB(device_id, TR, BR, TLIFE, BLIFE):
|
|
|
|
#print('\nCreating create_caretaker\n')
|
|
# Create a caretaker object. This object has nested properties and various types including numbers, DateTimes and strings.
|
|
# This can be saved as JSON as is without converting into rows/columns.
|
|
conn = get_db_connection()
|
|
cur = conn.cursor()
|
|
|
|
if device_id == None or device_id == 0:
|
|
return 1
|
|
|
|
try:
|
|
|
|
|
|
sql = f"""
|
|
UPDATE public.devices
|
|
SET
|
|
radar_threshold = '[{TR},{BR},{TLIFE},{BLIFE}]'
|
|
WHERE device_id = {device_id};
|
|
"""
|
|
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
print(sql)
|
|
cur.execute(sql)
|
|
|
|
# Commit the changes to the database
|
|
conn.commit()
|
|
|
|
# Close the cursor and connection
|
|
cur.close()
|
|
conn.close()
|
|
logger.debug("Written/updated!")
|
|
return 1
|
|
except Exception as err:
|
|
AddToLog(traceback.format_exc())
|
|
return 0
|
|
|
|
|
|
def StoreBeneficiary2DB(parameters, editing_user_id, user_id):
|
|
|
|
#print('\nCreating create_caretaker\n')
|
|
# Create a caretaker object. This object has nested properties and various types including numbers, DateTimes and strings.
|
|
# This can be saved as JSON as is without converting into rows/columns.
|
|
conn = get_db_connection()
|
|
cur = conn.cursor()
|
|
error_string = ""
|
|
if editing_user_id == None or editing_user_id == "":
|
|
editing_user_id = "0"
|
|
|
|
try:
|
|
|
|
current_utc_time = datetime.datetime.now(timezone.utc)
|
|
|
|
# Convert to epoch time
|
|
current_epoch_time = current_utc_time.timestamp()
|
|
|
|
if editing_user_id != "0":
|
|
sql = f"""
|
|
UPDATE public.person_details
|
|
SET
|
|
email = '{CleanObject(parameters.get('email'))}',
|
|
user_name = '{CleanObject(parameters.get('new_user_name'))}',
|
|
first_name = '{CleanObject(parameters.get('first_name'))}',
|
|
last_name = '{CleanObject(parameters.get('last_name'))}',
|
|
address_street = '{CleanObject(parameters.get('address_street'))}',
|
|
address_city = '{CleanObject(parameters.get('address_city'))}',
|
|
address_zip = '{CleanObject(parameters.get('address_zip'))}',
|
|
address_state = '{CleanObject(parameters.get('address_state'))}',
|
|
address_country = '{CleanObject(parameters.get('address_country'))}',
|
|
time_edit = {current_epoch_time},
|
|
user_edit = {user_id},
|
|
role_ids = '{CleanObject(parameters.get('role_ids'))}',
|
|
phone_number = '{CleanObject(parameters.get('phone_number'))}',
|
|
picture = '{CleanObject(parameters.get('picture'))}',
|
|
key = '{CleanObject(parameters.get('key'))}'
|
|
WHERE user_id = {editing_user_id}; -- replace 34 with the actual person_id you want to update
|
|
"""
|
|
|
|
else:
|
|
sql = f"""
|
|
INSERT INTO public.person_details
|
|
(role_ids, email, user_name, first_name, last_name, address_street, address_city, address_zip, address_state, address_country, time_edit, user_edit, phone_number, picture, key)
|
|
VALUES
|
|
('{CleanObject(parameters.get('role_ids'))}', '{CleanObject(parameters.get('email'))}', '{CleanObject(parameters.get('new_user_name'))}',
|
|
'{CleanObject(parameters.get('first_name'))}', '{CleanObject(parameters.get('last_name'))}', '{CleanObject(parameters.get('address_street'))}',
|
|
'{CleanObject(parameters.get('address_city'))}', '{CleanObject(parameters.get('address_zip'))}', '{CleanObject(parameters.get('address_state'))}',
|
|
'{CleanObject(parameters.get('address_country'))}', {current_epoch_time}, {user_id}, '{CleanObject(parameters.get('phone_number'))}',
|
|
'{CleanObject(parameters.get('picture'))}', '{CleanObject(parameters.get('key'))}');
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
|
|
# Commit the changes to the database
|
|
conn.commit()
|
|
|
|
# Close the cursor and connection
|
|
cur.close()
|
|
conn.close()
|
|
|
|
AddToLog("Written/updated!")
|
|
return 1, error_string
|
|
except Exception as err:
|
|
error_string = traceback.format_exc()
|
|
AddToLog(error_string)
|
|
return 0, error_string
|
|
|
|
|
|
def DeleteRecordFromDB(form_data):
|
|
|
|
caretaker = form_data['user_name']
|
|
privileges = GetPriviledgesOnly(caretaker)
|
|
|
|
if privileges != "-1":
|
|
AddToLog("Forbidden!")
|
|
return 0
|
|
|
|
conn = get_db_connection()
|
|
cur = conn.cursor()
|
|
function = form_data.get('function')
|
|
if function == "deployment_delete":
|
|
user_id = form_data['user_id']
|
|
editing_deployment_id = form_data['editing_deployment_id']
|
|
priviledges = form_data['priviledges']
|
|
if editing_deployment_id == None or editing_deployment_id == "" or editing_deployment_id == "0":
|
|
AddToLog("deployment_id is not defined")
|
|
cur.close()
|
|
conn.close()
|
|
return 0
|
|
try:
|
|
if user_id == "-1":
|
|
sql = f"""
|
|
DELETE FROM public.deployments WHERE deployment_id = {editing_deployment_id}
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
|
|
sql = f"""
|
|
DELETE FROM public.deployment_details WHERE deployment_id = {editing_deployment_id}
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
|
|
# Commit the changes to the database
|
|
conn.commit()
|
|
|
|
# Close the cursor and connection
|
|
cur.close()
|
|
conn.close()
|
|
|
|
AddToLog("Deleted!")
|
|
return 1
|
|
else:
|
|
#lets check if user_edit = user_id
|
|
sql = f"SELECT user_edit FROM public.deployments WHERE deployment_id = '{editing_deployment_id}'"
|
|
cur.execute(sql)
|
|
result = cur.fetchone()
|
|
if priviledges != "-1":
|
|
if result[0] != int(user_id):
|
|
cur.close()
|
|
conn.close()
|
|
return 0
|
|
|
|
sql = f"""
|
|
DELETE FROM public.deployments WHERE deployment_id = {editing_deployment_id}
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
|
|
sql = f"""
|
|
DELETE FROM public.deployment_details WHERE deployment_id = {editing_deployment_id}
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
|
|
# Commit the changes to the database
|
|
conn.commit()
|
|
|
|
# Close the cursor and connection
|
|
cur.close()
|
|
conn.close()
|
|
|
|
AddToLog("Deleted!")
|
|
return 1
|
|
|
|
except Exception as err:
|
|
AddToLog(traceback.format_exc())
|
|
cur.close()
|
|
conn.close()
|
|
return 0
|
|
|
|
elif function == "device_delete":
|
|
user_id = form_data['user_id']
|
|
editing_device_id = form_data['editing_device_id']
|
|
priviledges = form_data['priviledges']
|
|
if editing_device_id == None or editing_device_id == "" or editing_device_id == "0":
|
|
AddToLog("editing_device_id is not defined")
|
|
cur.close()
|
|
conn.close()
|
|
return 0
|
|
try:
|
|
if user_id == "-1":
|
|
sql = f"""
|
|
DELETE FROM public.deployments WHERE device_id = {editing_device_id}
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
|
|
conn.commit()
|
|
|
|
# Close the cursor and connection
|
|
cur.close()
|
|
conn.close()
|
|
|
|
AddToLog("Deleted!")
|
|
return 1
|
|
#else:
|
|
##lets check if user_edit = user_id
|
|
#sql = f"SELECT user_edit FROM public.deployments WHERE deployment_id = '{editing_deployment_id}'"
|
|
#cur.execute(sql)
|
|
#result = cur.fetchone()
|
|
#if priviledges != "-1":
|
|
#if result[0] != int(user_id):
|
|
#cur.close()
|
|
#conn.close()
|
|
#return 0
|
|
|
|
#sql = f"""
|
|
#DELETE FROM public.deployments WHERE device_id = {editing_device_id}
|
|
#"""
|
|
#logger.debug(f"sql= {sql}")
|
|
## Execute update query
|
|
#cur.execute(sql)
|
|
|
|
#conn.commit()
|
|
|
|
## Close the cursor and connection
|
|
#cur.close()
|
|
#conn.close()
|
|
|
|
#AddToLog("Deleted!")
|
|
#return 1
|
|
|
|
except Exception as err:
|
|
AddToLog(traceback.format_exc())
|
|
cur.close()
|
|
conn.close()
|
|
return 0
|
|
else:
|
|
|
|
#user_id = form_data['user_id']
|
|
editing_user_id = form_data['delete_user_id']
|
|
if editing_user_id == None or editing_user_id == "" or editing_user_id == "0":
|
|
AddToLog("user_id is not defined")
|
|
cur.close()
|
|
conn.close()
|
|
return 0
|
|
try:
|
|
if privileges == "-1": #user_id == "-1":
|
|
sql = f"""
|
|
DELETE FROM public.person_details WHERE user_id = {editing_user_id}
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
|
|
# Commit the changes to the database
|
|
conn.commit()
|
|
|
|
# Close the cursor and connection
|
|
cur.close()
|
|
conn.close()
|
|
|
|
AddToLog("Deleted!")
|
|
return 1
|
|
else:
|
|
#lets check if user_edit = user_id
|
|
sql = f"SELECT user_edit FROM public.person_details WHERE user_id = '{editing_user_id}'"
|
|
cur.execute(sql)
|
|
result = cur.fetchone()
|
|
if result[0] != int(user_id):
|
|
cur.close()
|
|
conn.close()
|
|
return 0
|
|
|
|
sql = f"""
|
|
DELETE FROM public.person_details WHERE user_id = {editing_user_id}
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
|
|
# Commit the changes to the database
|
|
conn.commit()
|
|
|
|
# Close the cursor and connection
|
|
cur.close()
|
|
conn.close()
|
|
|
|
AddToLog("Deleted!")
|
|
return 1
|
|
|
|
except Exception as err:
|
|
AddToLog(traceback.format_exc())
|
|
cur.close()
|
|
conn.close()
|
|
return 0
|
|
|
|
def StoreCaretaker2DB(parameters, editing_user_id, user_id):
|
|
|
|
#print('\nCreating create_caretaker\n')
|
|
# Create a caretaker object. This object has nested properties and various types including numbers, DateTimes and strings.
|
|
# This can be saved as JSON as is without converting into rows/columns.
|
|
conn = get_db_connection()
|
|
cur = conn.cursor()
|
|
|
|
if editing_user_id == None or editing_user_id == "":
|
|
editing_user_id = "0"
|
|
|
|
try:
|
|
|
|
current_utc_time = datetime.datetime.now(timezone.utc)
|
|
|
|
# Convert to epoch time
|
|
current_epoch_time = current_utc_time.timestamp()
|
|
|
|
if editing_user_id != "0":
|
|
sql = f"""
|
|
UPDATE public.person_details
|
|
SET
|
|
role_ids = '{parameters.get('role_ids')}',
|
|
access_to_deployments = '{parameters.get('access_to_deployments')}',
|
|
email = '{parameters.get('email')}',
|
|
user_name = '{parameters.get('new_user_name')}',
|
|
first_name = '{parameters.get('first_name')}',
|
|
last_name = '{parameters.get('last_name')}',
|
|
address_street = '{parameters.get('address_street')}',
|
|
address_city = '{parameters.get('address_city')}',
|
|
address_zip = '{parameters.get('address_zip')}',
|
|
address_state = '{parameters.get('address_state')}',
|
|
address_country = '{parameters.get('address_country')}',
|
|
time_edit = {current_epoch_time},
|
|
user_edit = {user_id},
|
|
phone_number = '{parameters.get('phone_number')}',
|
|
picture = '{parameters.get('picture')}',
|
|
key = '{parameters.get('key')}'
|
|
WHERE user_id = {editing_user_id}; -- replace 34 with the actual person_id you want to update
|
|
"""
|
|
|
|
else:
|
|
sql = f"""
|
|
INSERT INTO public.person_details
|
|
(role_ids, access_to_deployments, email, user_name, first_name, last_name, address_street, address_city, address_zip, address_state, address_country, time_edit, user_edit, phone_number, picture, key)
|
|
VALUES
|
|
('{parameters.get('role_ids')}', '{parameters.get('access_to_deployments')}', '{parameters.get('email')}', '{parameters.get('new_user_name')}', '{parameters.get('first_name')}',
|
|
'{parameters.get('last_name')}', '{parameters.get('address_street')}', '{parameters.get('address_city')}', '{parameters.get('address_zip')}', '{parameters.get('address_state')}',
|
|
'{parameters.get('address_country')}', {current_epoch_time}, {user_id}, '{parameters.get('phone_number')}', '{parameters.get('picture')}', '{parameters.get('key')}');
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
|
|
# Commit the changes to the database
|
|
conn.commit()
|
|
|
|
# Close the cursor and connection
|
|
cur.close()
|
|
conn.close()
|
|
|
|
AddToLog("Written/updated!")
|
|
return 1
|
|
except Exception as err:
|
|
AddToLog(traceback.format_exc())
|
|
return 0
|
|
return ok
|
|
|
|
def StoreFlow2DB(user_name, time_s, flow_json):
|
|
|
|
conn = get_db_connection()
|
|
cur = conn.cursor()
|
|
|
|
query = f"""
|
|
INSERT INTO public.node_reds (user_name, last_activity, flow)
|
|
VALUES ('{user_name}', {time_s}, '{flow_json}')
|
|
ON CONFLICT (user_name)
|
|
DO UPDATE SET
|
|
last_activity = EXCLUDED.last_activity,
|
|
flow = EXCLUDED.flow
|
|
"""
|
|
logger.debug(f"sql= {query}")
|
|
|
|
try:
|
|
#cur.execute(query, (user_name, time_s, flow_json))
|
|
cur.execute(query)
|
|
conn.commit()
|
|
logger.debug(f"OK!")
|
|
return True
|
|
except Exception as e:
|
|
conn.rollback()
|
|
print(f"Error storing flow: {e}")
|
|
logger.debug(f"Error storing flow: {e}")
|
|
return False
|
|
finally:
|
|
cur.close()
|
|
conn.close()
|
|
logger.debug(f"Closing!")
|
|
|
|
def StoreAlarms2DB(deployment_id, device_id, deployment_alarms_json, device_alarms_json):
|
|
|
|
conn = get_db_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Extract the overlapping list
|
|
|
|
try:
|
|
sql = f"""
|
|
UPDATE public.deployments SET alarm_details = '{CleanObject(deployment_alarms_json)}' WHERE deployment_id = {deployment_id};
|
|
"""
|
|
|
|
logger.debug(f"sql= {sql}")
|
|
cur.execute(sql)
|
|
conn.commit()
|
|
|
|
sql1 = f"""
|
|
UPDATE public.devices SET alert_details = '{CleanObject(device_alarms_json)}' WHERE device_id = {device_id};
|
|
"""
|
|
|
|
logger.debug(f"sql= {sql1}")
|
|
cur.execute(sql1)
|
|
conn.commit()
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
AddToLog("Written/updated!")
|
|
return 1
|
|
except Exception as err:
|
|
return 0
|
|
|
|
def CleanObject(object_in, typee = "s"):
|
|
|
|
if typee == "n":
|
|
res = 0
|
|
if object_in == None or object_in == "":
|
|
return 0
|
|
if isinstance(object_in, str):
|
|
try:
|
|
res = object_in.replace("'", '"')
|
|
except:
|
|
pass
|
|
return res
|
|
else:
|
|
res = ""
|
|
if object_in == None:
|
|
return ""
|
|
if isinstance(object_in, str):
|
|
try:
|
|
res = object_in.replace("'", '"')
|
|
except:
|
|
pass
|
|
return res
|
|
return object_in
|
|
|
|
def StoreDeployment2DB(parameters, editing_deployment_id):
|
|
conn = get_db_connection()
|
|
cur = conn.cursor()
|
|
|
|
if editing_deployment_id == None or editing_deployment_id == "":
|
|
editing_deployment_id = "0"
|
|
|
|
try:
|
|
|
|
current_utc_time = datetime.datetime.now(timezone.utc)
|
|
|
|
# Convert to epoch time
|
|
current_epoch_time = current_utc_time.timestamp()
|
|
|
|
if editing_deployment_id != "0":
|
|
sql = f"""
|
|
UPDATE public.deployments
|
|
SET
|
|
persons = {CleanObject(parameters.get('persons'), "n")},
|
|
gender = {CleanObject(parameters.get('gender'), "n")},
|
|
race = {CleanObject(parameters.get('race'), "n")},
|
|
born = {CleanObject(parameters.get('born'), "n")},
|
|
pets = {CleanObject(parameters.get('pets'), "n")},
|
|
time_zone_s = '{CleanObject(parameters.get('time_zone_s'))}',
|
|
user_edit = {CleanObject(parameters.get('user_id'), "n")},
|
|
time_edit = {current_epoch_time}
|
|
WHERE deployment_id = {CleanObject(editing_deployment_id, "n")};
|
|
"""
|
|
|
|
else:
|
|
sql = f"""
|
|
INSERT INTO public.deployments
|
|
(persons, gender, race, born, pets, time_zone_s, user_edit, time_edit)
|
|
VALUES
|
|
({CleanObject(parameters.get('persons'), "n")}, {CleanObject(parameters.get('gender'), "n")}, {CleanObject(parameters.get('race'), "n")},
|
|
{CleanObject(parameters.get('born'), "n")}, {CleanObject(parameters.get('pets'), "n")}, '{CleanObject(parameters.get('time_zone_s'))}',
|
|
{CleanObject(parameters.get('user_id'), "n")}, {current_epoch_time})
|
|
RETURNING deployment_id;
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
if editing_deployment_id == "0":
|
|
new_deployment_id = cur.fetchone()[0]
|
|
# Commit the changes to the database
|
|
conn.commit()
|
|
|
|
if editing_deployment_id != "0":
|
|
sql = f"""
|
|
UPDATE public.deployment_details
|
|
SET
|
|
beneficiary_id = {CleanObject(parameters.get('beneficiary_id'), "n")},
|
|
caretaker_id = {CleanObject(parameters.get('caretaker_id'), "n")},
|
|
owner_id = {CleanObject(parameters.get('owner_id'), "n")},
|
|
installer_id = {CleanObject(parameters.get('installer_id'), "n")},
|
|
address_street = '{CleanObject(parameters.get('address_street'))}',
|
|
address_city = '{CleanObject(parameters.get('address_city'))}',
|
|
address_zip = '{CleanObject(parameters.get('address_zip'))}',
|
|
address_state = '{CleanObject(parameters.get('address_state'))}',
|
|
address_country = '{CleanObject(parameters.get('address_country'))}',
|
|
|
|
wifis = '{CleanObject(parameters.get('wifis'))}',
|
|
devices = '{CleanObject(parameters.get('devices'))}',
|
|
lat = {CleanObject(parameters.get('lat'), "n")},
|
|
lng = {CleanObject(parameters.get('lng'), "n")},
|
|
gps_age = {CleanObject(parameters.get('gps_age'), "n")}
|
|
|
|
WHERE deployment_id = {editing_deployment_id};
|
|
"""
|
|
|
|
else:
|
|
sql = f"""
|
|
INSERT INTO public.deployment_details
|
|
(deployment_id, beneficiary_id, caretaker_id, owner_id, installer_id, address_street, address_city, address_zip, address_state, address_country)
|
|
VALUES
|
|
({new_deployment_id}, {CleanObject(parameters.get('beneficiary_id'), "n")}, {CleanObject(parameters.get('caretaker_id'), "n")}, {CleanObject(parameters.get('owner_id'), "n")}, {CleanObject(parameters.get('installer_id'), "n")},
|
|
'{CleanObject(parameters.get('address_street'))}', '{CleanObject(parameters.get('address_city'))}', '{CleanObject(parameters.get('address_zip'))}', '{CleanObject(parameters.get('address_state'))}',
|
|
'{CleanObject(parameters.get('address_country'))}');
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
|
|
# Commit the changes to the database
|
|
conn.commit()
|
|
|
|
|
|
|
|
|
|
# Close the cursor and connection
|
|
cur.close()
|
|
conn.close()
|
|
|
|
AddToLog("Written/updated!")
|
|
return 1
|
|
except Exception as err:
|
|
AddToLog(traceback.format_exc())
|
|
return 0
|
|
return ok
|
|
|
|
def StoreDevice2DB(parameters, editing_device_id):
|
|
|
|
# Create a device object. This object has nested properties and various types including numbers, DateTimes and strings.
|
|
# This can be saved as JSON as is without converting into rows/columns.
|
|
conn = get_db_connection()
|
|
cur = conn.cursor()
|
|
|
|
if editing_device_id == None or editing_device_id == "":
|
|
editing_device_id = "0"
|
|
|
|
try:
|
|
|
|
current_utc_time = datetime.datetime.now(timezone.utc)
|
|
|
|
# Convert to epoch time
|
|
current_epoch_time = current_utc_time.timestamp()
|
|
|
|
if editing_device_id != "0":
|
|
sql = f"""
|
|
UPDATE public.devices
|
|
SET
|
|
device_mac = '{CleanObject(parameters.get('device_mac'))}',
|
|
well_id = '{CleanObject(parameters.get('well_id'))}',
|
|
description = '{CleanObject(parameters.get('description'))}',
|
|
location = '{CleanObject(parameters.get('location'))}',
|
|
close_to = '{CleanObject(parameters.get('close_to'))}',
|
|
radar_threshold = '{CleanObject(parameters.get('radar_threshold'))}',
|
|
temperature_calib = '{CleanObject(parameters.get('temperature_calib'))}',
|
|
humidity_calib = '{CleanObject(parameters.get('humidity_calib'))}'
|
|
WHERE device_id = {editing_device_id};
|
|
"""
|
|
|
|
else:
|
|
sql = f"""
|
|
INSERT INTO public.devices
|
|
(device_mac, well_id, description, location, close_to, radar_threshold, temperature_calib, humidity_calib)
|
|
VALUES
|
|
('{CleanObject(parameters.get('device_mac'))}', '{CleanObject(parameters.get('well_id'))}', '{CleanObject(parameters.get('description'))}',
|
|
'{CleanObject(parameters.get('location'))}', '{CleanObject(parameters.get('close_to'))}', '{CleanObject(parameters.get('radar_threshold'))}',
|
|
'{CleanObject(parameters.get('temperature_calib'))}', '{CleanObject(parameters.get('humidity_calib'))}');
|
|
"""
|
|
logger.debug(f"sql= {sql}")
|
|
# Execute update query
|
|
cur.execute(sql)
|
|
|
|
# Commit the changes to the database
|
|
conn.commit()
|
|
|
|
# Close the cursor and connection
|
|
cur.close()
|
|
conn.close()
|
|
|
|
AddToLog("Written/updated!")
|
|
return 1
|
|
except Exception as err:
|
|
AddToLog(traceback.format_exc())
|
|
return 0
|
|
return ok
|
|
|
|
|
|
def ShowAge(seconds):
|
|
minutes = int(seconds // 60)
|
|
hours = int(minutes // 60)
|
|
days = int(hours // 24)
|
|
|
|
if seconds >= 0:
|
|
hours = int(hours - (days * 24))
|
|
minutes = int(minutes - (days * 24 * 60) - (hours * 60))
|
|
seconds = int(seconds - (days * 24 * 60 * 60) - (hours * 60 * 60) - (minutes * 60))
|
|
|
|
if days > 0:
|
|
report = f"{int(days)} d {int(hours)} h {int(minutes)} m {int(seconds)} s"
|
|
elif hours > 0:
|
|
report = f"{int(hours)} h {int(minutes)} m {int(seconds)} s"
|
|
elif minutes > 0:
|
|
report = f"{int(minutes)} m {int(seconds)} s"
|
|
else:
|
|
report = f"{int(seconds)} s"
|
|
|
|
return report
|
|
else:
|
|
return "0 s"
|
|
|
|
def UpdateDevicesTable(html_string, devices, users):
|
|
#ID, Well id, MAC, Last_Message, Location, Description, Deployment
|
|
table_rows_string = ""
|
|
for device in devices:
|
|
#result = next(item for item in users if item[0] == device[6])
|
|
|
|
deployment_name = "?"
|
|
for item in users:
|
|
if item[0] == device[6]:
|
|
result = item
|
|
deployment_name = result[1]
|
|
if result[2] != None:
|
|
deployment_name = deployment_name + " " + result[2]
|
|
break
|
|
|
|
|
|
|
|
|
|
mac = device[2]
|
|
if mac == "64B7088903B4":
|
|
print("stop")
|
|
mac_row_string = f' <td style="text-align:center"><a href="#" onclick="OpenDevice(\'{mac}\'); return false;">{mac}</a></td>\n'
|
|
age = time.time() - device[3]
|
|
|
|
if (age < 300):
|
|
row_sting = f' <tr style="background-color: #90FFD7;">\n <th scope="row" style="text-align:center">{device[0]}</th>\n'
|
|
else:
|
|
row_sting = f' <tr>\n <th scope="row" style="text-align:center">{device[0]}</th>\n'
|
|
|
|
row_ending = f' <td style="text-align:center"><input type="checkbox" onchange="IsItSingle();"></td>\n </tr>\n'
|
|
for col_cnt in range(1, len(device)):
|
|
column_value = device[col_cnt]
|
|
#print(column_value)
|
|
if col_cnt == 2:
|
|
col_string_template = mac_row_string
|
|
elif col_cnt == 3:
|
|
if column_value > 0:
|
|
col_string_template = f' <td style="text-align:center">{ShowAge(age)}</td>\n'
|
|
else:
|
|
col_string_template = f' <td style="text-align:center">No data</td>\n'
|
|
elif col_cnt == 4:
|
|
col_string_template = f' <td style="text-align:center">{column_value}</td>\n'
|
|
elif col_cnt == 5:
|
|
if column_value == None:
|
|
col_string_template = f' <td style="text-align:center"></td>\n'
|
|
else:
|
|
col_string_template = f' <td style="text-align:center">{column_value}</td>\n'
|
|
elif col_cnt == 6:
|
|
col_string_template = f' <td style="text-align:center"><a href="#" onclick="OpenDeployment(\'{column_value}\')" title="{deployment_name}">{column_value}</a></td>\n';
|
|
else:
|
|
if column_value == None:
|
|
column_value = ""
|
|
col_string_template = f' <td style="text-align:center">{column_value}</td>\n'
|
|
row_sting = row_sting + col_string_template
|
|
row_sting = row_sting + row_ending
|
|
table_rows_string = table_rows_string + row_sting
|
|
#print(table_rows_string)
|
|
|
|
html_string = html_string.replace("###ROWS###",table_rows_string)
|
|
return html_string
|
|
|
|
def UpdateDeploymentsSelector(html_string, deployments, include_all=True, selected="1"):
|
|
# <option value="All" selected>All</option>
|
|
if include_all:
|
|
selector_string = f' <option value="0">All</option>\n'
|
|
else:
|
|
selector_string = ''
|
|
|
|
for deployment in deployments:
|
|
first_name = ""
|
|
last_name = ""
|
|
if deployment[1] != None:
|
|
first_name = deployment[1]
|
|
if deployment[2] != None:
|
|
last_name = deployment[2]
|
|
if deployment[0] == int(selected):
|
|
choice_string = f' <option value="{deployment[0]}" selected>{deployment[0]} {first_name} {last_name}</option>\n'
|
|
else:
|
|
choice_string = f' <option value="{deployment[0]}">{deployment[0]} {first_name} {last_name}</option>\n'
|
|
selector_string = selector_string + choice_string
|
|
#print(selector_string)
|
|
|
|
html_string = html_string.replace("###INSTALLS###",selector_string)
|
|
return html_string
|
|
|
|
def GetDeviceDetails(cur, deployment_ids, location_id):
|
|
|
|
#ID, Well id, MAC, Last_Message, Location, Description, Deployment
|
|
macs = [mac for _, mac in deployment_ids]
|
|
#macs = list(deployment_ids.keys())
|
|
macs_string_nq = ",".join(macs)
|
|
macs_string = "'" + "','".join(macs) + "'"
|
|
|
|
if location_id == -1:
|
|
sql = f"""
|
|
WITH ordered_macs AS (
|
|
SELECT unnest(string_to_array('{macs_string_nq}', ',')) as mac,
|
|
generate_series(1, array_length(string_to_array('{macs_string_nq}', ','), 1)) as position
|
|
)
|
|
SELECT d.*
|
|
FROM public.devices d
|
|
JOIN ordered_macs om ON d.device_mac = om.mac::text
|
|
WHERE device_mac IN ({macs_string})
|
|
ORDER BY om.position;
|
|
"""
|
|
else:
|
|
sql = f"""
|
|
WITH ordered_macs AS (
|
|
SELECT unnest(string_to_array('{macs_string_nq}', ',')) as mac,
|
|
generate_series(1, array_length(string_to_array('{macs_string_nq}', ','), 1)) as position
|
|
)
|
|
SELECT d.*
|
|
FROM public.devices d
|
|
JOIN ordered_macs om ON d.device_mac = om.mac::text
|
|
WHERE device_mac IN ({macs_string}) AND location = {location_id}
|
|
ORDER BY om.position;
|
|
"""
|
|
|
|
cur.execute(sql)
|
|
print(sql)
|
|
devices_ids_records = cur.fetchall()
|
|
all_details = []
|
|
|
|
|
|
devices_ids_list = [x[0] for x in devices_ids_records]
|
|
device_ids_string = ",".join(map(str, devices_ids_list))
|
|
#sql = f"SELECT device_id, MAX(time) as last_reading_time FROM sensor_readings WHERE device_id IN ({device_ids_string}) GROUP BY device_id" #to slow
|
|
sql = f"SELECT DISTINCT ON (device_id) device_id, time as last_reading_time FROM sensor_readings WHERE device_id IN ({device_ids_string}) AND time > now() - INTERVAL '1 day' ORDER BY device_id, time DESC"
|
|
cur.execute(sql)
|
|
print(sql)
|
|
devices_times = cur.fetchall()#cur.fetchone()
|
|
found_device_details = {}
|
|
for device_record in devices_times:
|
|
device_id, last_message_time = device_record
|
|
found_device_details[device_id] = last_message_time
|
|
cnt = 0
|
|
for device_table_record in devices_ids_records:
|
|
if len(devices_times) > 0:
|
|
device_id = device_table_record[0]
|
|
if device_id in found_device_details:
|
|
last_message_time = found_device_details[device_id]
|
|
last_message_epoch = int(last_message_time.timestamp())
|
|
else:
|
|
try:
|
|
last_message_time = int(device_table_record[14])
|
|
except:
|
|
last_message_time = 0
|
|
last_message_epoch = last_message_time
|
|
else:
|
|
last_message_time = 0
|
|
last_message_epoch = 0
|
|
|
|
#print(last_message_epoch)
|
|
#print(type(last_message_epoch))
|
|
device_id = device_table_record[0]
|
|
mac = device_table_record[1]
|
|
well_id = device_table_record[2]
|
|
description = device_table_record[3]
|
|
if description == None:
|
|
description = ""
|
|
if device_table_record[5] != None:
|
|
if device_table_record[5] != "":
|
|
description = description + " Close to " + device_table_record[5]
|
|
location_id = device_table_record[4]
|
|
if location_id == None:
|
|
location_id = 0
|
|
try:
|
|
row_data = [device_id, well_id, mac, last_message_epoch, location_names[location_id], description, deployment_ids[cnt][0]]
|
|
except:
|
|
row_data = [device_id, well_id, mac, last_message_epoch, location_names[location_id], description, deployment_ids[cnt][0]]
|
|
cnt += 1
|
|
all_details.append(row_data)
|
|
|
|
return all_details
|
|
|
|
def GetDeviceDetailsComplete(cur, deployment_ids, location_id):
|
|
|
|
#ID, Well id, MAC, Last_Message, Location, Description, Deployment
|
|
macs = [mac for _, mac in deployment_ids]
|
|
#macs = list(deployment_ids.keys())
|
|
macs_string_nq = ",".join(macs)
|
|
macs_string = "'" + "','".join(macs) + "'"
|
|
|
|
if location_id == -1:
|
|
sql = f"""
|
|
WITH ordered_macs AS (
|
|
SELECT unnest(string_to_array('{macs_string_nq}', ',')) as mac,
|
|
generate_series(1, array_length(string_to_array('{macs_string_nq}', ','), 1)) as position
|
|
)
|
|
SELECT d.*
|
|
FROM public.devices d
|
|
JOIN ordered_macs om ON d.device_mac = om.mac::text
|
|
WHERE device_mac IN ({macs_string})
|
|
ORDER BY om.position;
|
|
"""
|
|
else:
|
|
sql = f"""
|
|
WITH ordered_macs AS (
|
|
SELECT unnest(string_to_array('{macs_string_nq}', ',')) as mac,
|
|
generate_series(1, array_length(string_to_array('{macs_string_nq}', ','), 1)) as position
|
|
)
|
|
SELECT d.*
|
|
FROM public.devices d
|
|
JOIN ordered_macs om ON d.device_mac = om.mac::text
|
|
WHERE device_mac IN ({macs_string}) AND location = {location_id}
|
|
ORDER BY om.position;
|
|
"""
|
|
|
|
cur.execute(sql)
|
|
print(sql)
|
|
devices_ids_records = cur.fetchall()
|
|
all_details = []
|
|
|
|
|
|
devices_ids_list = [x[0] for x in devices_ids_records]
|
|
device_ids_string = ",".join(map(str, devices_ids_list))
|
|
#sql = f"SELECT device_id, MAX(time) as last_reading_time FROM sensor_readings WHERE device_id IN ({device_ids_string}) GROUP BY device_id" #to slow
|
|
sql = f"SELECT DISTINCT ON (device_id) device_id, time as last_reading_time FROM sensor_readings WHERE device_id IN ({device_ids_string}) AND time > now() - INTERVAL '1 day' ORDER BY device_id, time DESC"
|
|
cur.execute(sql)
|
|
print(sql)
|
|
devices_times = cur.fetchall()#cur.fetchone()
|
|
found_device_details = {}
|
|
for device_record in devices_times:
|
|
device_id, last_message_time = device_record
|
|
found_device_details[device_id] = last_message_time
|
|
cnt = 0
|
|
for device_table_record in devices_ids_records:
|
|
if len(devices_times) > 0:
|
|
|
|
if device_id in found_device_details:
|
|
last_message_time = found_device_details[device_id]
|
|
last_message_epoch = int(last_message_time.timestamp())
|
|
else:
|
|
try:
|
|
last_message_time = int(device_table_record[14])
|
|
except:
|
|
last_message_time = 0
|
|
last_message_epoch = last_message_time
|
|
else:
|
|
last_message_time = 0
|
|
last_message_epoch = 0
|
|
|
|
#print(last_message_epoch)
|
|
#print(type(last_message_epoch))
|
|
device_id = device_table_record[0]
|
|
mac = device_table_record[1]
|
|
well_id = device_table_record[2]
|
|
description = device_table_record[3]
|
|
alarm_details = device_table_record[16]
|
|
if description == None:
|
|
description = ""
|
|
if device_table_record[5] != None:
|
|
if device_table_record[5] != "":
|
|
description = description + " Close to " + device_table_record[5]
|
|
location_id = device_table_record[4]
|
|
if location_id == None:
|
|
location_id = 0
|
|
#try:
|
|
# row_data = [device_id, well_id, mac, last_message_epoch, location_names[location_id], description, deployment_ids[cnt][0], alarm_details]
|
|
#except:
|
|
row_data = [device_id, well_id, mac, last_message_epoch, location_names[location_id], description, deployment_ids[cnt][0], alarm_details]
|
|
cnt += 1
|
|
all_details.append(row_data)
|
|
|
|
return all_details
|
|
|
|
def GetVisibleDevices(deployments):
|
|
|
|
devices_details = []
|
|
stt = time.time()
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
#list all devices that user has access to
|
|
if deployments == "-1":
|
|
sql = "SELECT device_mac FROM public.devices ORDER BY device_id ASC"# SELECT deployment_id, devices FROM public.deployment_details"
|
|
macs_group = []
|
|
deployment_ids = []
|
|
print(sql)
|
|
cur.execute(sql)
|
|
macs_records = cur.fetchall()#cur.fetchone()
|
|
for record in macs_records:
|
|
deployment_ids.append((0, record[0]))
|
|
devices_details = GetDeviceDetails(cur, deployment_ids, -1)
|
|
else:
|
|
sql = f"SELECT deployment_id, devices FROM public.deployment_details WHERE deployment_id IN ({deployments})"
|
|
|
|
print(sql)
|
|
cur.execute(sql)
|
|
devices_groups = cur.fetchall()#cur.fetchone()
|
|
deployment_ids = []
|
|
for deployment_id, dev_group in devices_groups:
|
|
if dev_group != None:
|
|
if len(dev_group) > 10:
|
|
if "[" not in dev_group:
|
|
if "," not in dev_group:
|
|
dev_group = '["' + dev_group + '"]'
|
|
else:
|
|
dev_group = dev_group.replace(" ", "")
|
|
dev_group = dev_group.replace(",", '","')
|
|
dev_group = '["' + dev_group + '"]'
|
|
|
|
macs_group = literal_eval(dev_group)
|
|
|
|
for mac in macs_group:
|
|
deployment_ids.append((deployment_id, mac))
|
|
else:
|
|
print(f"Deployment {deployment_id} has dev_group empty")
|
|
devices_details = GetDeviceDetails(cur, deployment_ids, -1)
|
|
#devices_details.append(devices_detail)
|
|
|
|
return devices_details
|
|
|
|
def GetVisibleDevicesPerLocation(deployments, location):
|
|
|
|
devices_details = []
|
|
|
|
with get_db_connection() as conn:
|
|
#list all devices that user has access to
|
|
if deployments == "-1" or deployments == "0":
|
|
sql = "SELECT deployment_id, devices FROM public.deployment_details"
|
|
else:
|
|
sql = f"SELECT deployment_id, devices FROM public.deployment_details WHERE deployment_id IN ({deployments})"
|
|
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
devices_groups = cur.fetchall()#cur.fetchone()
|
|
deployment_ids = []
|
|
for deployment_id, dev_group in devices_groups:
|
|
if dev_group != None:
|
|
if len(dev_group) > 10:
|
|
if dev_group[0] == "[":
|
|
macs_group = literal_eval(dev_group)
|
|
else:
|
|
macs_group = dev_group.split(',')
|
|
for mac in macs_group:
|
|
deployment_ids.append((deployment_id, mac))
|
|
|
|
devices_details = GetDeviceDetails(cur, deployment_ids, location_indexes[location])
|
|
#devices_details.append(devices_detail)
|
|
|
|
return devices_details
|
|
|
|
def GetVisibleDevicesPerLocationComplete(deployments, location):
|
|
|
|
devices_details = []
|
|
|
|
with get_db_connection() as conn:
|
|
#list all devices that user has access to
|
|
if deployments == "-1" or deployments == "0":
|
|
sql = "SELECT deployment_id, devices FROM public.deployment_details"
|
|
else:
|
|
sql = f"SELECT deployment_id, devices FROM public.deployment_details WHERE deployment_id IN ({deployments})"
|
|
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
devices_groups = cur.fetchall()#cur.fetchone()
|
|
deployment_ids = []
|
|
for deployment_id, dev_group in devices_groups:
|
|
if dev_group != None:
|
|
if len(dev_group) > 10:
|
|
if dev_group[0] == "[":
|
|
macs_group = literal_eval(dev_group)
|
|
else:
|
|
macs_group = dev_group.split(',')
|
|
for mac in macs_group:
|
|
deployment_ids.append((deployment_id, mac))
|
|
|
|
devices_details = GetDeviceDetailsComplete(cur, deployment_ids, location_indexes[location])
|
|
#devices_details.append(devices_detail)
|
|
|
|
return devices_details
|
|
|
|
def GetUsersFromDeployments(deployments):
|
|
#list all devices that user has access to
|
|
deployments_dets = []
|
|
with get_db_connection() as conn:
|
|
try:
|
|
if deployments == "-1":
|
|
sql = f"""
|
|
SELECT dd.deployment_id, pd.first_name, pd.last_name
|
|
FROM deployment_details dd
|
|
JOIN person_details pd ON dd.beneficiary_id = pd.user_id
|
|
ORDER BY dd.deployment_id;
|
|
"""
|
|
else:
|
|
sql = f"""
|
|
SELECT dd.deployment_id, pd.first_name, pd.last_name
|
|
FROM deployment_details dd
|
|
JOIN person_details pd ON dd.beneficiary_id = pd.user_id
|
|
WHERE dd.deployment_id IN ({deployments})
|
|
ORDER BY dd.deployment_id;
|
|
"""
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
deployments_dets = cur.fetchall()#cur.fetchone()
|
|
except Exception as err:
|
|
logger.error("GetUsersFromDeployments "+str(err) +" "+sql)
|
|
|
|
return deployments_dets
|
|
|
|
def GetPreviousDate(current_date):
|
|
date_obj = datetime.datetime.strptime(current_date, "%Y-%m-%d")
|
|
# Subtract one day
|
|
previous_date = date_obj - timedelta(days=1)
|
|
# Convert back to string format
|
|
previous_date_str = previous_date.strftime("%Y-%m-%d")
|
|
return(previous_date_str)
|
|
|
|
def CovertToIsoTime(date_s, n_minute):
|
|
|
|
hours = n_minute // 60 # Integer division
|
|
minutes = n_minute % 60
|
|
|
|
base_date = datetime.datetime.strptime(date_s, "%Y-%m-%d")
|
|
final_datetime = base_date + timedelta(hours=hours, minutes=minutes)
|
|
iso_timestamp = final_datetime.isoformat()
|
|
return iso_timestamp
|
|
|
|
|
|
def sleep_length(presence_list, short_absence_threshold=15):
|
|
"""
|
|
Calculate the total sleep duration and wake time based on presence data.
|
|
|
|
This function correctly interprets the presence_list to determine sleep duration by:
|
|
1. Properly aggregating the total sleep time from all significant in-bed periods
|
|
2. Considering short absences as part of the same sleep session
|
|
3. Determining the wake time when the main sleep session ended
|
|
|
|
Args:
|
|
presence_list (list): List of tuples indicating bed presence/absence
|
|
short_absence_threshold (int, optional): Maximum duration in decas to consider
|
|
an absence "short" and still count as sleep.
|
|
Default is 15 (2.5 minutes)
|
|
|
|
Returns:
|
|
tuple: (sleep_duration_minutes, wake_time_minutes)
|
|
sleep_duration_minutes: Total sleep duration in minutes
|
|
wake_time_minutes: Minute in the day when person was determined to be
|
|
done sleeping (minutes since midnight)
|
|
"""
|
|
# Extract in-bed periods and out-of-bed periods
|
|
in_bed_periods = []
|
|
out_bed_periods = []
|
|
|
|
# First process the raw data into periods
|
|
for i in range(len(presence_list)):
|
|
deca_index, deca_count = presence_list[i]
|
|
|
|
# Skip separator tuples where deca_count is 0
|
|
if deca_count == 0:
|
|
continue
|
|
|
|
if deca_count > 0: # In bed
|
|
# Special case for the midnight (first) tuple
|
|
if i == 0 and deca_index == 0:
|
|
# This is time in bed before midnight
|
|
start_deca = -deca_count # Negative because it's before midnight
|
|
end_deca = 0 # Midnight
|
|
else:
|
|
start_deca = deca_index
|
|
end_deca = deca_index + deca_count
|
|
|
|
in_bed_periods.append({
|
|
'start': start_deca,
|
|
'end': end_deca,
|
|
'duration': deca_count
|
|
})
|
|
else: # Out of bed
|
|
out_bed_periods.append({
|
|
'start': deca_index,
|
|
'end': deca_index + abs(deca_count),
|
|
'duration': abs(deca_count)
|
|
})
|
|
|
|
# Sort periods to ensure chronological order
|
|
in_bed_periods.sort(key=lambda p: p['start'])
|
|
out_bed_periods.sort(key=lambda p: p['start'])
|
|
|
|
# Now determine which periods are part of the main night's sleep
|
|
# For this, we need to identify consecutive in-bed periods separated by short absences
|
|
|
|
# Start by finding the key sleep segments - significant periods in bed during night time
|
|
night_time_end_deca = 4320 # 12 hours after midnight
|
|
sleep_segments = []
|
|
|
|
# Merge in-bed periods that are separated by short absences
|
|
merged_periods = []
|
|
current_period = None
|
|
|
|
for period in in_bed_periods:
|
|
# If we're at the start or after a long break, begin a new period
|
|
if current_period is None:
|
|
current_period = period.copy()
|
|
else:
|
|
# Check if this period starts shortly after the previous one ends
|
|
gap = period['start'] - current_period['end']
|
|
|
|
# If the gap is negative, the periods overlap (data error), treat as continuous
|
|
if gap < 0:
|
|
gap = 0
|
|
|
|
# If the gap is short enough, merge the periods
|
|
if gap <= short_absence_threshold:
|
|
# Extend the current period
|
|
current_period['end'] = period['end']
|
|
current_period['duration'] += period['duration'] - gap
|
|
else:
|
|
# Gap too long, add the completed period and start a new one
|
|
merged_periods.append(current_period)
|
|
current_period = period.copy()
|
|
|
|
# Add the last period if there is one
|
|
if current_period is not None:
|
|
merged_periods.append(current_period)
|
|
|
|
# Find the main sleep period - prioritize periods that span midnight or early morning
|
|
# and have significant duration
|
|
significant_sleep_threshold = 180 # 30 minutes (180 decas)
|
|
night_periods = [p for p in merged_periods if
|
|
(p['start'] <= 0 or p['start'] <= night_time_end_deca) and
|
|
p['duration'] >= significant_sleep_threshold]
|
|
|
|
if night_periods:
|
|
# Find the period with the largest duration
|
|
main_sleep_period = max(night_periods, key=lambda p: p['duration'])
|
|
|
|
# Calculate total sleep duration
|
|
sleep_duration_minutes = round(main_sleep_period['duration'] / 6) # Convert to minutes
|
|
|
|
# Wake time is when this period ended
|
|
wake_time_minutes = max(0, round(main_sleep_period['end'] / 6)) # Ensure it's not negative
|
|
|
|
return (sleep_duration_minutes, wake_time_minutes)
|
|
|
|
# No significant sleep periods found
|
|
return (0, 0)
|
|
|
|
|
|
# Example usage:
|
|
# sleep_minutes = sleep_length(presence_list) # Use default threshold
|
|
# sleep_minutes = sleep_length(presence_list, short_absence_threshold=30) # Allow longer absences (5 minutes)
|
|
|
|
|
|
|
|
# Example usage:
|
|
# presence_list = [
|
|
# [0, 554], [3303, 3857], [3303, 0], [3387, -84], [3387, 0], [3388, 1], [3388, 0],
|
|
# [3668, -280], [3668, 0], [3669, 1], [3669, 0], [3699, -30], [3699, 0], [3700, 1],
|
|
# [3700, 0], [3863, -163], [3863, 0], [3864, 1], [3864, 0], [4418, -554], [4418, 0],
|
|
# [4419, 1], [4419, 0], [4547, -128], [4547, 0], [4548, 1], [4548, 0], [4603, -55],
|
|
# [4603, 0], [4604, 1], [4604, 0], [4965, -361], [4965, 0], [4966, 1], [4966, 0],
|
|
# [4984, -18], [4984, 0], [4985, 1], [4985, 0], [8639, -3654]
|
|
# ]
|
|
# print(f"Sleep duration: {sleep_length(presence_list)} minutes")
|
|
|
|
|
|
# Example usage:
|
|
# sleep_minutes = sleep_length(presence_list) # Use default threshold
|
|
# sleep_minutes = sleep_length(presence_list, short_absence_threshold=30) # Allow longer absences (5 minutes)
|
|
|
|
|
|
|
|
# Example usage:
|
|
# presence_list = [
|
|
# [0, 554], [3303, 3857], [3303, 0], [3387, -84], [3387, 0], [3388, 1], [3388, 0],
|
|
# [3668, -280], [3668, 0], [3669, 1], [3669, 0], [3699, -30], [3699, 0], [3700, 1],
|
|
# [3700, 0], [3863, -163], [3863, 0], [3864, 1], [3864, 0], [4418, -554], [4418, 0],
|
|
# [4419, 1], [4419, 0], [4547, -128], [4547, 0], [4548, 1], [4548, 0], [4603, -55],
|
|
# [4603, 0], [4604, 1], [4604, 0], [4965, -361], [4965, 0], [4966, 1], [4966, 0],
|
|
# [4984, -18], [4984, 0], [4985, 1], [4985, 0], [8639, -3654]
|
|
# ]
|
|
# print(f"Sleep duration: {sleep_length(presence_list)} minutes")
|
|
|
|
|
|
# Example usage:
|
|
# sleep_minutes = sleep_length(presence_list) # Use default threshold
|
|
# sleep_minutes = sleep_length(presence_list, short_absence_threshold=30) # Allow longer absences (5 minutes)
|
|
|
|
|
|
|
|
# Example usage:
|
|
# presence_list = [
|
|
# [0, 554], [3303, 3857], [3303, 0], [3387, -84], [3387, 0], [3388, 1], [3388, 0],
|
|
# [3668, -280], [3668, 0], [3669, 1], [3669, 0], [3699, -30], [3699, 0], [3700, 1],
|
|
# [3700, 0], [3863, -163], [3863, 0], [3864, 1], [3864, 0], [4418, -554], [4418, 0],
|
|
# [4419, 1], [4419, 0], [4547, -128], [4547, 0], [4548, 1], [4548, 0], [4603, -55],
|
|
# [4603, 0], [4604, 1], [4604, 0], [4965, -361], [4965, 0], [4966, 1], [4966, 0],
|
|
# [4984, -18], [4984, 0], [4985, 1], [4985, 0], [8639, -3654]
|
|
# ]
|
|
# print(f"Sleep duration: {sleep_length(presence_list)} minutes")
|
|
|
|
def filter_short_groups_c_wc_old(presence_list, filter_size, device_id_str, from_date, to_date, time_zone_s, refresh = False):
|
|
|
|
#days = presence_list
|
|
#for from_date, to_date
|
|
|
|
tz = pytz.timezone(time_zone_s)
|
|
# Get current time in that timezone
|
|
current_time = datetime.datetime.now(tz)
|
|
# Return just the date part as string
|
|
now_date_str = current_time.strftime("%Y-%m-%d")
|
|
|
|
start_date = datetime.datetime.strptime(from_date, "%Y-%m-%d")
|
|
end_date = datetime.datetime.strptime(to_date, "%Y-%m-%d")
|
|
|
|
last_offset = 0
|
|
#if to_date == now_date_str:
|
|
# last_offset = 1
|
|
|
|
# Loop through each date (including end_date)
|
|
current_date = start_date
|
|
dates_list = []
|
|
days_difference = 1 + (end_date - start_date).days
|
|
whole_result = [0] * 6 * 1440 * (days_difference)
|
|
|
|
is_long = False
|
|
if len(presence_list)/(6 * 1440) > (days_difference): #long version
|
|
is_long = True
|
|
|
|
while current_date <= end_date:
|
|
current_date_str = current_date.strftime("%Y-%m-%d")
|
|
print(current_date_str)
|
|
dates_list.append(current_date_str)
|
|
current_date += timedelta(days=1)
|
|
|
|
for day in range(1, days_difference-last_offset+1):
|
|
print(day)
|
|
end_index = (1 + day) * 6 * 1440
|
|
if end_index > len(presence_list):
|
|
end_index = len(presence_list)
|
|
|
|
if is_long:
|
|
start_index = end_index - 2 * 6 * 1440
|
|
else:
|
|
start_index = end_index - 6 * 1440
|
|
|
|
current_date_str = dates_list[day-1]
|
|
filename_day_presence = f"/{device_id_str}/{device_id_str}_{current_date_str}_{filter_size}_presence.bin"
|
|
filtered_day_str = None
|
|
if refresh == False:
|
|
filtered_day_str = ReadObjectMinIO("filtered-presence", filename_day_presence)
|
|
|
|
if filtered_day_str == None:
|
|
filtered_day = filter_short_groups_c(presence_list[start_index:end_index], filter_size, device_id_str, from_date)
|
|
SaveGenericObjectInBlob("filtered-presence", filename_day_presence, filtered_day)
|
|
else:
|
|
filtered_day = json.loads(filtered_day_str)
|
|
whole_result[start_index:end_index] = filtered_day
|
|
|
|
if current_date_str != to_date:
|
|
end_index = len(presence_list)
|
|
start_index = end_index - 2 * 6 * 1440
|
|
filtered_day = filter_short_groups_c(presence_list[start_index:end_index], filter_size, device_id_str, from_date)
|
|
whole_result[start_index:end_index] = filtered_day
|
|
|
|
return whole_result
|
|
|
|
def filter_short_groups_c_wc(presence_list, filter_size, device_id_str, from_date, to_date, time_zone_s, refresh=False):
|
|
"""
|
|
Filter out short groups across multiple days.
|
|
Preserves original logic of using previous day's data for midnight boundary handling.
|
|
"""
|
|
# Setup timezone and current time
|
|
tz = pytz.timezone(time_zone_s)
|
|
current_time = datetime.datetime.now(tz)
|
|
now_date_str = current_time.strftime("%Y-%m-%d")
|
|
|
|
start_date = datetime.datetime.strptime(from_date, "%Y-%m-%d")
|
|
end_date = datetime.datetime.strptime(to_date, "%Y-%m-%d")
|
|
|
|
# Build dates list
|
|
current_date = start_date
|
|
dates_list = []
|
|
while current_date <= end_date:
|
|
current_date_str = current_date.strftime("%Y-%m-%d")
|
|
print(current_date_str)
|
|
dates_list.append(current_date_str)
|
|
current_date += timedelta(days=1)
|
|
|
|
days_difference = len(dates_list)
|
|
|
|
# Handle current day limitation
|
|
samples_per_day = 6 * 1440
|
|
total_samples = samples_per_day * days_difference
|
|
|
|
# If today is the last day, limit the data
|
|
effective_total_samples = total_samples
|
|
if to_date == now_date_str:
|
|
current_minute_of_day = current_time.hour * 60 + current_time.minute
|
|
current_sample_of_day = min(current_minute_of_day * 6, samples_per_day)
|
|
effective_total_samples = (days_difference - 1) * samples_per_day + current_sample_of_day
|
|
print(f"Today detected: limiting to {current_sample_of_day} samples for last day")
|
|
|
|
# Initialize result - use effective total samples
|
|
whole_result = [0] * effective_total_samples
|
|
|
|
# Determine if we have "long" data (more than expected days)
|
|
is_long = len(presence_list) > (days_difference * samples_per_day)
|
|
|
|
# Process each day (0-indexed to avoid confusion)
|
|
for day_idx in range(days_difference):
|
|
current_date_str = dates_list[day_idx]
|
|
print(f"Processing day {day_idx + 1}: {current_date_str}")
|
|
|
|
# Calculate result array indices for this day
|
|
result_start_idx = day_idx * samples_per_day
|
|
result_end_idx = (day_idx + 1) * samples_per_day
|
|
|
|
# For the last day, if it's today, limit the end index
|
|
if day_idx == days_difference - 1 and to_date == now_date_str:
|
|
result_end_idx = result_start_idx + current_sample_of_day
|
|
|
|
# Skip if this day's range is beyond our result array
|
|
if result_start_idx >= len(whole_result):
|
|
break
|
|
|
|
# Ensure we don't exceed result array bounds
|
|
result_end_idx = min(result_end_idx, len(whole_result))
|
|
|
|
# Calculate input data range
|
|
if is_long:
|
|
# Use 2 days of context (previous day + current day)
|
|
input_end_idx = min(len(presence_list), result_end_idx)
|
|
input_start_idx = max(0, input_end_idx - 2 * samples_per_day)
|
|
else:
|
|
# Use 1 day of data
|
|
input_end_idx = min(len(presence_list), result_end_idx)
|
|
input_start_idx = max(0, input_end_idx - samples_per_day)
|
|
|
|
# Skip if no input data available
|
|
if input_start_idx >= input_end_idx:
|
|
print(f"No input data available for {current_date_str}")
|
|
continue
|
|
|
|
# Try to load cached data
|
|
filename_day_presence = f"/{device_id_str}/{device_id_str}_{current_date_str}_{filter_size}_presence.bin"
|
|
filtered_day_str = None
|
|
|
|
if not refresh:
|
|
filtered_day_str = ReadObjectMinIO("filtered-presence", filename_day_presence, current_date_str)
|
|
|
|
if filtered_day_str is not None and filtered_day_str != "":
|
|
has_larger = bool(re.search(r'\b(?:[2-9]|\d{2,})\.\d+\b', filtered_day_str))
|
|
if has_larger:
|
|
filtered_day_str = None
|
|
|
|
if filtered_day_str is None or filtered_day_str == "":
|
|
# Filter the input data
|
|
input_data = presence_list[input_start_idx:input_end_idx]
|
|
print(input_start_idx, input_end_idx, filter_size, device_id_str, from_date, len(input_data))
|
|
filtered_data = filter_short_groups_c(input_data, filter_size, device_id_str, from_date)
|
|
|
|
# Extract the portion corresponding to this day
|
|
if is_long:
|
|
# We have 2 days of data, take the second day
|
|
day_data_start = samples_per_day
|
|
else:
|
|
# We have 1 day of data, take it all
|
|
day_data_start = 0
|
|
|
|
# Calculate how much data we need for this day
|
|
needed_samples = result_end_idx - result_start_idx
|
|
day_data_end = day_data_start + needed_samples
|
|
|
|
# Extract the day's portion, ensuring we don't exceed bounds
|
|
if day_data_start < len(filtered_data):
|
|
filtered_day = filtered_data[day_data_start:min(day_data_end, len(filtered_data))]
|
|
else:
|
|
filtered_day = []
|
|
|
|
# Cache the result
|
|
SaveGenericObjectInBlob("filtered-presence", filename_day_presence, filtered_day)
|
|
else:
|
|
filtered_day = json.loads(filtered_day_str)
|
|
|
|
# Copy to result array
|
|
copy_length = min(len(filtered_day), result_end_idx - result_start_idx)
|
|
if copy_length > 0:
|
|
whole_result[result_start_idx:result_start_idx + copy_length] = filtered_day[:copy_length]
|
|
|
|
print(f"Completed {current_date_str}: copied {copy_length} samples")
|
|
|
|
return whole_result
|
|
|
|
def GetLastDurationMinutes(deployment_id, selected_devices, filter, ddate):
|
|
|
|
global threshold_cache, device_lookup_cache
|
|
|
|
max_sleep = 0
|
|
max_device_id = 0
|
|
max_woke_up = 0
|
|
presence_list = []
|
|
to_date = ddate
|
|
|
|
|
|
date_obj = datetime.datetime.strptime(ddate, "%Y-%m-%d")
|
|
# Subtract one day
|
|
previous_day = date_obj - timedelta(days=1)
|
|
# Convert back to string
|
|
prev_date = previous_day.strftime("%Y-%m-%d")
|
|
|
|
data_type = "z-graph"
|
|
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
|
|
|
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
|
|
|
#Lets filter bedrooms only
|
|
just_selected_devices = []
|
|
for device_details in devices_list:
|
|
if device_details[1] in selected_devices:
|
|
just_selected_devices.append(device_details)
|
|
|
|
devices_list = just_selected_devices
|
|
|
|
time_from_str, _ = GetLocalTimeForDate(ddate, time_zone_s)
|
|
_, time_to_str = GetLocalTimeForDate(to_date, time_zone_s)
|
|
|
|
time_from = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
|
|
time_to = datetime.datetime.strptime(time_to_str, '%Y-%m-%d %H:%M:%S%z')
|
|
epoch_time = calendar.timegm(time_from.utctimetuple())
|
|
|
|
presence_map = {}
|
|
presence_map["time_start"] = epoch_time
|
|
presence_map["time_zone"] = time_zone_s
|
|
|
|
# Calculate the difference in days
|
|
days_difference = (time_to - time_from).days
|
|
|
|
if data_type == "all" or data_type == "z-graph" or data_type == "multiple":
|
|
|
|
# Convert string to datetime object
|
|
date_obj = datetime.datetime.strptime(time_from_str, "%Y-%m-%d %H:%M:%S%z")
|
|
# Subtract one day
|
|
previous_day = date_obj - timedelta(days=1)
|
|
|
|
# Format back to string in the same format
|
|
time_from_z_str = previous_day.strftime("%Y-%m-%d %H:%M:%S%z")
|
|
|
|
|
|
device_id_2_threshold = {}
|
|
device_id_2_location = {0: "Outside"}
|
|
|
|
for details in devices_list:
|
|
|
|
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
|
|
if radar_threshold_group_st == None:
|
|
radar_threshold_group_st = '["s3_max",12]' #last value is threshold to s28 composite
|
|
|
|
if len(radar_threshold_group_st) > 8:
|
|
radar_threshold_group = json.loads(radar_threshold_group_st)
|
|
else:
|
|
radar_threshold_group = ["s3_max",12]
|
|
|
|
print(well_id, radar_threshold_group)
|
|
|
|
device_id_2_location[device_id] = location_name
|
|
device_id_2_threshold[device_id] = radar_threshold_group
|
|
|
|
ids_list = []
|
|
well_ids = []
|
|
id2well_id = {}
|
|
radar_fields_of_interest = []
|
|
device_field_indexes = {}
|
|
for details in devices_list:
|
|
threshold_str = details[5]
|
|
try:
|
|
threshold_lst = json.loads(threshold_str)
|
|
except:
|
|
threshold_lst = ["s3",12]
|
|
#threshold_lst = ["s3_max",12]
|
|
|
|
radar_field = threshold_lst[0]
|
|
#since we are getting 10 sec dat, no more need for min or max...
|
|
radar_field = radar_field.split("_")[0]
|
|
if radar_field not in radar_fields_of_interest:
|
|
device_field_indexes[radar_field] = len(radar_fields_of_interest)
|
|
radar_fields_of_interest.append(radar_field)
|
|
|
|
ids_list.append(details[1])
|
|
id2well_id[details[1]] = details[0]
|
|
well_ids.append(details[0])
|
|
presence_map["well_ids"] = well_ids
|
|
|
|
if len(devices_list) > 0:
|
|
|
|
devices_list_str = ','.join(str(device[1]) for device in devices_list)
|
|
#sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
|
|
sql = get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
|
|
print(sql)
|
|
if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
|
|
#zsql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_z_str, time_to_str, ids_list, radar_fields_of_interest)
|
|
zsql = get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_z_str, time_to_str, ids_list, radar_fields_of_interest)
|
|
print(zsql)
|
|
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = None
|
|
myz_data = None
|
|
|
|
my_data = cur.fetchall()
|
|
if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
|
|
cur.execute(zsql)
|
|
myz_data = cur.fetchall()
|
|
|
|
if my_data != None:
|
|
|
|
device_id_2_threshold = {}
|
|
device_id_2_location = {0: "Outside"}
|
|
row_nr_2_device_id = {}
|
|
cnt = 0
|
|
row_nr_2_device_id[0] = 0
|
|
|
|
#presence_map['longpresence'] and temporary_map_day_plus are similar, except one is used for Z-graph, and another for multiple persons detection
|
|
|
|
if data_type == "presence" or data_type == "all" or data_type == "z-graph" or data_type == "multiple":
|
|
presence_map['presence'] = {}
|
|
presence_map['longpresence'] = {}
|
|
|
|
if data_type == "raw" or data_type == "all":
|
|
presence_map['raw'] = {}
|
|
|
|
for details in devices_list:
|
|
#(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]','')
|
|
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details
|
|
|
|
if data_type == "raw" or data_type == "all":
|
|
zeros_list = [0] * 6 * 1440 * days_difference
|
|
presence_map['raw'][well_id] = zeros_list
|
|
|
|
if data_type == "presence" or data_type == "all" or data_type == "z-graph" or data_type == "multiple":
|
|
zeros_list = [0] * 6 * 1440 * days_difference
|
|
presence_map['presence'][well_id] = zeros_list
|
|
|
|
|
|
#presence_map[][well_id] = zeros_list
|
|
cnt += 1
|
|
row_nr_2_device_id[cnt] = well_id
|
|
|
|
if radar_threshold_group_st == None:
|
|
radar_threshold_group_st = '["s3",12]' #last value is threshold to s28 composite
|
|
|
|
if len(radar_threshold_group_st) > 8:
|
|
radar_threshold_group = json.loads(radar_threshold_group_st)
|
|
else:
|
|
radar_threshold_group = ["s3",12]
|
|
|
|
device_id_2_location[well_id] = location_name
|
|
device_id_2_threshold[well_id] = radar_threshold_group
|
|
if len(my_data) > 1:
|
|
|
|
start_time_ = my_data[0][0]
|
|
parsed_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
|
|
|
|
start_time = datetime.datetime(
|
|
parsed_time.year,
|
|
parsed_time.month,
|
|
parsed_time.day,
|
|
parsed_time.hour, # Adjust for UTC-7
|
|
parsed_time.minute,
|
|
parsed_time.second,
|
|
tzinfo=datetime.timezone(datetime.timedelta(hours=-7))
|
|
)
|
|
|
|
presence_map = optimized_radar_processing(my_data, start_time_, id2well_id, device_id_2_threshold, device_field_indexes, presence_map, data_type)
|
|
|
|
|
|
if myz_data != None:
|
|
temporary_map_day_plus = {}
|
|
presence_map['z_graph'] = {}
|
|
for details in devices_list:
|
|
#(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]','')
|
|
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details
|
|
|
|
if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
|
|
zeros_list = [0] * 6 * 1440 * (days_difference + 1) #+1 is for previous day
|
|
presence_map['z_graph'][well_id] = [] #just place holder
|
|
temporary_map_day_plus[well_id] = zeros_list
|
|
presence_map['longpresence'][well_id] = zeros_list #just place holder
|
|
|
|
print(deployment_id)
|
|
print(time_from_z_str)
|
|
print(devices_list)
|
|
parsed_time = datetime.datetime.strptime(time_from_z_str, '%Y-%m-%d %H:%M:%S%z')
|
|
|
|
start_time = datetime.datetime(
|
|
parsed_time.year,
|
|
parsed_time.month,
|
|
parsed_time.day,
|
|
parsed_time.hour, # Adjust for UTC-7
|
|
parsed_time.minute,
|
|
parsed_time.second,
|
|
tzinfo=datetime.timezone(datetime.timedelta(hours=-7))
|
|
)
|
|
|
|
#start_time_ = myz_data[0][0]
|
|
st = time.time()
|
|
device_lookup_cache = {}
|
|
threshold_cache = {}
|
|
temporary_map_day_plus = optimized_processing(myz_data, start_time, id2well_id, device_id_2_threshold, device_field_indexes, temporary_map_day_plus, data_type)
|
|
|
|
|
|
if data_type == "all" or data_type == "z-graph" or data_type == "presence" or data_type == "multiple":
|
|
for device_id in ids_list:
|
|
device_id_str = str(device_id)
|
|
if data_type == "presence" or data_type == "all" or data_type == "z-graph":
|
|
if filter > 1:
|
|
#presence_list = filter_short_groups_numpy(presence_map["presence"][id2well_id[device_id]], filter, device_id, ddate+"-"+to_date)
|
|
presence_list = filter_short_groups_c_wc(presence_map["presence"][id2well_id[device_id]], filter, device_id_str, ddate, to_date, time_zone_s)
|
|
presence_map["presence"][id2well_id[device_id]] = presence_list
|
|
#longpresence_list = filter_short_groups_numpy(presence_map["longpresence"][id2well_id[device_id]], filter, device_id, ddate+"-"+to_date)
|
|
longpresence_list = filter_short_groups_c_wc(presence_map["longpresence"][id2well_id[device_id]], filter, device_id_str, prev_date, to_date, time_zone_s)
|
|
presence_map["longpresence"][id2well_id[device_id]] = longpresence_list
|
|
|
|
else: #straight decas
|
|
presence_list = presence_map["presence"][id2well_id[device_id]]
|
|
longpresence_list = presence_map["longpresence"][id2well_id[device_id]]
|
|
|
|
|
|
if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
|
|
max_sleep = 0
|
|
max_device_id = 0
|
|
max_woke_up = 0
|
|
for device_id in ids_list:
|
|
#print(device_id_2_threshold[id2well_id[device_id]])
|
|
presence_list = CreateZGraph(id2well_id[device_id], presence_map["longpresence"][id2well_id[device_id]]) #temporary_map_day_plus[id2well_id[device_id]])
|
|
sleep_minutes, woke_up = sleep_length(presence_list)
|
|
if sleep_minutes > max_sleep:
|
|
max_sleep = sleep_minutes
|
|
max_device_id = device_id
|
|
max_woke_up = woke_up
|
|
presence_map = {}
|
|
return max_sleep, max_device_id, max_woke_up, presence_list
|
|
|
|
def GetTemperature(bedroom_device_id, ddate):
|
|
result = 0
|
|
sql = f"""
|
|
SELECT *
|
|
FROM public.sensor_readings
|
|
WHERE device_id = {bedroom_device_id}
|
|
ORDER BY "time" DESC
|
|
LIMIT 1;
|
|
"""
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
result = cur.fetchone()
|
|
return result
|
|
|
|
def GetSensorsDetailsFromDeployment(deployment_id, ddate, filter_minutes, fast=True):
|
|
#list all devices that user has access to
|
|
deployments_dets = []
|
|
with get_db_connection() as conn:
|
|
try:
|
|
sql = f"""
|
|
SELECT pd.user_id, pd.first_name, pd.last_name, pd.address_street, pd.picture
|
|
FROM deployment_details dd
|
|
JOIN person_details pd ON dd.beneficiary_id = pd.user_id
|
|
WHERE dd.deployment_id ={deployment_id};
|
|
"""
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
deployments_dets = cur.fetchone()
|
|
except Exception as err:
|
|
logger.error("GetSensorsDetailsFromDeployment "+str(err) +" "+sql)
|
|
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
|
|
|
#Which sensor is in: Bathroom, Kitchen, Bedroom ?
|
|
bathrooms = []
|
|
kitchens = []
|
|
bedrooms = []
|
|
dev_id_to_location = {0: "Outside/?"}
|
|
for device in devices_list:
|
|
dev_id_to_location[device[1]] = device[2]
|
|
if Consolidataed_locations[device[2]] == "Bathroom":
|
|
bathrooms.append(device[1])
|
|
elif Consolidataed_locations[device[2]] == "Kitchen":
|
|
kitchens.append(device[1])
|
|
elif Consolidataed_locations[device[2]] == "Bedroom":
|
|
bedrooms.append(device[1])
|
|
|
|
#we need to determine where user is seen last, and user sensor data from there...
|
|
locations_file = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_daily_locations.png"
|
|
logger.debug(f"locations_file1 ={locations_file}")
|
|
locations_list_s = ReadObjectMinIO("daily-maps", locations_file+".bin")
|
|
|
|
force_recreate = False
|
|
file_exists1, file_modified_utc1 = check_file_exists(locations_file+".bin")
|
|
if file_exists1:
|
|
file_modified_local = file_modified_utc1.astimezone(pytz.timezone(time_zone_s))
|
|
file_modified_date_local = file_modified_local.date() #local date
|
|
file_modified_date_utc = file_modified_utc1.date()
|
|
file_date_utc = MapFileToDate(locations_file) #locations_file is UTC
|
|
#if file_modified_date_local < file_date_utc:
|
|
if file_modified_utc1.date() < file_date_utc:
|
|
force_recreate = True
|
|
else: #same date
|
|
current_time = datetime.datetime.now(pytz.timezone(time_zone_s))
|
|
time_passed = current_time - file_modified_local
|
|
if time_passed.seconds > 30: #recreate if older than 5 minutes
|
|
force_recreate = True
|
|
else:
|
|
force_recreate = True
|
|
|
|
|
|
logger.debug(f"force_recreate={str(force_recreate)}")
|
|
|
|
if force_recreate:
|
|
CreateLocationsStripe(locations_file, time_zone_s)
|
|
locations_list_s = ReadObjectMinIO("daily-maps", locations_file+".bin")
|
|
|
|
|
|
last_present_device = 0
|
|
last_present = 0
|
|
last_bathroom = 0
|
|
last_kitchen = 0
|
|
last_bedroom = 0
|
|
last_bathroom_date = ddate
|
|
last_kitchen_date = ddate
|
|
last_bedroom_date = ddate
|
|
|
|
if locations_list_s is not None:
|
|
locations_list = json.loads(locations_list_s)
|
|
|
|
if len(locations_list) > 1:
|
|
if locations_list[-1][0] > 0:
|
|
last_present_device = locations_list[-1][0]
|
|
last_present = locations_list[-1][1] + locations_list[-1][2]
|
|
else:
|
|
last_present_device = locations_list[-2][0]
|
|
last_present = locations_list[-2][1] + locations_list[-2][2]
|
|
elif len(locations_list) == 1:
|
|
last_present_device = locations_list[0][0]
|
|
|
|
|
|
#Lets find last bathroom presence time
|
|
|
|
if len(locations_list) > 0 and len(bathrooms) > 0:
|
|
for loc_time in reversed(locations_list):
|
|
for device_id_temp in bathrooms:
|
|
if device_id_temp == loc_time[0]:
|
|
if (loc_time[1] + loc_time[2]) > last_bathroom:
|
|
last_bathroom = loc_time[1] + loc_time[2]
|
|
last_bathroom_date = ddate
|
|
|
|
|
|
#Lets find last kitchen presence time
|
|
|
|
if len(locations_list) > 0 and len(kitchens) > 0:
|
|
for loc_time in reversed(locations_list):
|
|
for device_id_temp in kitchens:
|
|
if device_id_temp == loc_time[0]:
|
|
if (loc_time[1] + loc_time[2]) > last_kitchen:
|
|
last_kitchen = loc_time[1] + loc_time[2]
|
|
last_kitchen_date = ddate
|
|
|
|
|
|
#Lets find last bedroom presence time
|
|
|
|
if len(locations_list) > 0 and len(bedrooms) > 0:
|
|
for loc_time in reversed(locations_list):
|
|
for device_id_temp in bedrooms:
|
|
if device_id_temp == loc_time[0]:
|
|
if (loc_time[1] + loc_time[2]) > last_bedroom:
|
|
last_bedroom = loc_time[1] + loc_time[2]
|
|
last_bedroom_date = ddate
|
|
|
|
|
|
if last_bathroom == 0 or last_kitchen == 0 or last_bedroom == 0:
|
|
ddate = GetPreviousDate(ddate)
|
|
|
|
locations_file = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_daily_locations.png"
|
|
logger.debug(f"locations_file2 ={locations_file}")
|
|
|
|
force_recreate = False
|
|
file_exists1, file_modified_utc1 = check_file_exists(locations_file+".bin")
|
|
logger.debug(f"file_exists1={str(file_exists1)}")
|
|
logger.debug(f"file_modified_utc1={str(file_modified_utc1)}")
|
|
#file_exists1, file_modified_utc1
|
|
if file_exists1:
|
|
file_modified_local = file_modified_utc1.astimezone(pytz.timezone(time_zone_s))
|
|
file_modified_date_local = file_modified_local.date()
|
|
file_date_utc = MapFileToDate(locations_file)
|
|
logger.debug(f"file_modified_utc1={str(file_modified_utc1.date())} file_date_utc={str(file_date_utc)}")
|
|
if file_modified_utc1.date() < file_date_utc:
|
|
force_recreate = True
|
|
else: #same date
|
|
current_time = datetime.datetime.now(pytz.timezone(time_zone_s))
|
|
time_passed = current_time - file_modified_local
|
|
logger.debug(f"current_time={current_time} file_modified_local={file_modified_local} time_passed={time_passed}")
|
|
if time_passed.seconds > 30: #recreate if older than 5 minutes
|
|
force_recreate = True
|
|
else:
|
|
force_recreate = True
|
|
|
|
|
|
if force_recreate:
|
|
CreateLocationsStripe(locations_file, time_zone_s)
|
|
locations_list_s = ReadObjectMinIO("daily-maps", locations_file+".bin")
|
|
|
|
|
|
|
|
logger.debug(f"locations_list_s={locations_list_s}")
|
|
|
|
if (locations_list_s is not None):
|
|
|
|
locations_list = json.loads(locations_list_s)
|
|
|
|
if last_present_device == 0:
|
|
if len(locations_list) > 1:
|
|
if locations_list[-1][0] > 0:
|
|
last_present_device = locations_list[-1][0]
|
|
else:
|
|
last_present_device = locations_list[-2][0]
|
|
elif len(locations_list) == 1:
|
|
last_present_device = locations_list[0][0]
|
|
|
|
if last_bathroom == 0:
|
|
if len(locations_list) > 0 and len(bathrooms) > 0:
|
|
for loc_time in reversed(locations_list):
|
|
for device_id_temp in bathrooms:
|
|
if device_id_temp == loc_time[0]:
|
|
if (loc_time[1] + loc_time[2]) > last_bathroom:
|
|
last_bathroom = loc_time[1] + loc_time[2]
|
|
last_bathroom_date = ddate
|
|
|
|
if last_kitchen == 0:
|
|
if len(locations_list) > 0 and len(kitchens) > 0:
|
|
for loc_time in reversed(locations_list):
|
|
for device_id_temp in kitchens:
|
|
if device_id_temp == loc_time[0]:
|
|
if (loc_time[1] + loc_time[2]) > last_kitchen:
|
|
last_kitchen = loc_time[1] + loc_time[2]
|
|
last_kitchen_date = ddate
|
|
|
|
if last_bedroom == 0:
|
|
if len(locations_list) > 0 and len(bedrooms) > 0:
|
|
for loc_time in reversed(locations_list):
|
|
for device_id_temp in bedrooms:
|
|
if device_id_temp == loc_time[0]:
|
|
if (loc_time[1] + loc_time[2]) > last_bedroom:
|
|
last_bedroom = loc_time[1] + loc_time[2]
|
|
last_bedroom_date = ddate
|
|
|
|
|
|
|
|
last_bathroom_time = "2023-01-01T00:00:00"
|
|
if last_bathroom > 0:
|
|
last_bathroom_time = CovertToIsoTime(last_bathroom_date, last_bathroom)
|
|
|
|
last_kitchen_time = "2023-01-01T00:00:00"
|
|
if last_kitchen > 0:
|
|
last_kitchen_time = CovertToIsoTime(last_kitchen_date, last_kitchen)
|
|
|
|
last_bedroom_time = "2023-01-01T00:00:00"
|
|
if last_bedroom > 0:
|
|
last_bedroom_time = CovertToIsoTime(last_bedroom_date, last_bedroom)
|
|
|
|
last_present_time = "2023-01-01T00:00:00"
|
|
if last_present > 0:
|
|
last_present_time = CovertToIsoTime(ddate, last_present)
|
|
|
|
# debug for 48h bug
|
|
if last_bathroom_time == "2023-01-01T00:00:00" or last_kitchen_time == "2023-01-01T00:00:00" or last_bedroom_time == "2023-01-01T00:00:00":
|
|
#last_bathroom_time = "48h" if last_bathroom_time == "2023-01-01T00:00:00" else f"{last_bathroom-last_bathroom_time}"
|
|
#last_kitchen_time = "48h" if last_kitchen_time == "2023-01-01T00:00:00" else f"{last_kitchen-last_kitchen_time}"
|
|
#last_bedroom_time = "48h" if last_bedroom_time == "2023-01-01T00:00:00" else f"{last_bedroom-last_bedroom_time}"
|
|
|
|
logger.debug(f"48h-> deployment_id={str(deployment_id)}, ddate={str(ddate)}")
|
|
logger.debug(f"48h-> force_recreate={force_recreate}")
|
|
logger.debug(f"48h-> last_bathroom_time={last_bathroom_time}|last_kitchen_time={last_kitchen_time}|last_bedroom_time={last_bedroom_time}")
|
|
logger.debug(f"48h-> devices_list={str(devices_list)}")
|
|
logger.debug(f"48h-> bathrooms={str(bathrooms)}")
|
|
logger.debug(f"48h-> kitchens={str(kitchens)}")
|
|
logger.debug(f"48h-> bedrooms={str(bedrooms)}")
|
|
logger.debug(f"48h-> locations_list_s={str(locations_list_s)}")
|
|
|
|
try:
|
|
sql = f"""
|
|
SELECT * FROM sensor_readings
|
|
WHERE device_id = {last_present_device}
|
|
ORDER BY time DESC
|
|
LIMIT 1;
|
|
"""
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
sensor_dets = cur.fetchone()
|
|
except Exception as err:
|
|
logger.error("GetSensorsDetailsFromDeployment1 "+str(err) +" "+sql)
|
|
|
|
# wellness_score_percent
|
|
wellness_score_percent = 90
|
|
|
|
sleep_filter_minutes = 5
|
|
|
|
time_from_str, _ = GetLocalTimeForDate(ddate, time_zone_s)
|
|
|
|
sleep_bathroom_visit_count = 0
|
|
# bedroom_co2
|
|
bedroom_co2 = 500
|
|
device_detail = None
|
|
bedroom_temperature = 0
|
|
sleep_hours = 0
|
|
if fast == False:
|
|
|
|
if len(bedrooms) > 0:
|
|
sleep_minutes, bedroom_device_id, woke_up, presence_list = GetLastDurationMinutes(deployment_id, bedrooms, sleep_filter_minutes, ddate)
|
|
sleep_hours = sleep_minutes/ 60
|
|
# bedroom_temperature
|
|
temp_offset = -16.0
|
|
device_detail = GetTemperature(bedroom_device_id, ddate)
|
|
|
|
# sleep_bathroom_visit_count
|
|
date_obj = datetime.datetime.strptime(time_from_str, "%Y-%m-%d %H:%M:%S%z")
|
|
if sleep_minutes < woke_up: # went to sleep after midnight
|
|
date_sleep = ddate
|
|
to_sleep = woke_up - sleep_minutes
|
|
else:# went to sleep before midnight
|
|
to_sleep = 1440 + woke_up - sleep_minutes
|
|
|
|
# Convert string to datetime object
|
|
previous_day = date_obj - timedelta(days=1)
|
|
date_sleep = previous_day.strftime("%Y-%m-%d %H:%M:%S%z")
|
|
|
|
|
|
if device_detail != None:
|
|
bedroom_temperature = device_detail[2] + temp_offset
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# shower_detected_time
|
|
shower_detected_time = last_bathroom_time
|
|
|
|
# breakfast_detected_time
|
|
breakfast_detected_time = 0
|
|
|
|
# living_room_detected_time
|
|
living_room_time_spent = 0
|
|
|
|
# outside_hours
|
|
outside_hours = 0
|
|
|
|
#lets find last time seen at Bathroom, Kitchen, Bedroom pd.first_name, pd.last_name, pd.address_street, pd.picture
|
|
|
|
picture_url = deployments_dets[4]
|
|
report = {}
|
|
try:
|
|
|
|
report = {"user_id":deployments_dets[0],
|
|
"name":deployments_dets[1] + " " + deployments_dets[2],
|
|
"address":deployments_dets[3],
|
|
"time_zone":time_zone_s,
|
|
"picture":picture_url,
|
|
"bathroom_at": last_bathroom_time,
|
|
"kitchen_at": last_kitchen_time,
|
|
"bedroom_at": last_bedroom_time,
|
|
"temperature": (sensor_dets[2] - 16) if sensor_dets != None else 0,
|
|
"smell": "clean",
|
|
"bathroom_delayed": [6, 12],
|
|
"kitchen_delayed": [6, 12],
|
|
"bedroom_delayed": [13, 16],
|
|
"last_location": dev_id_to_location[last_present_device],
|
|
"last_detected_time": last_present_time,
|
|
"wellness_score_percent": wellness_score_percent,
|
|
"wellness_descriptor_color": "bg-green-100 text-green-700",
|
|
"bedroom_temperature": round(bedroom_temperature, 2),
|
|
"sleep_bathroom_visit_count": sleep_bathroom_visit_count,
|
|
"bedroom_co2": bedroom_co2,
|
|
"shower_detected_time": shower_detected_time,
|
|
"breakfast_detected_time": breakfast_detected_time,
|
|
"living_room_time_spent": round(living_room_time_spent, 2),
|
|
"outside_hours": round(outside_hours, 2),
|
|
"wellness_descriptor": "Great!",
|
|
"last_seen_alert": "Alert = None",
|
|
"last_seen_alert_colors": "bg-green-100 text-green-700", #https://tailwindcss.com/docs/colors
|
|
"most_time_spent_in": "Bedroom",
|
|
"sleep_hours": round(sleep_hours, 2)
|
|
}
|
|
except Exception as e:
|
|
print(traceback.format_exc())
|
|
return report
|
|
|
|
def ToList(input_data):
|
|
# If input is already a list
|
|
if isinstance(input_data, list):
|
|
return [str(x).strip() for x in input_data]
|
|
|
|
# If input is string
|
|
if isinstance(input_data, str):
|
|
# Remove outer brackets if present
|
|
cleaned = input_data.strip('()')
|
|
cleaned = cleaned.strip('[]')
|
|
# Remove extra quotes
|
|
cleaned = cleaned.replace('"', '').replace("'", '')
|
|
# Split by comma and clean each element
|
|
return [x.strip() for x in cleaned.split(',')]
|
|
|
|
raise ValueError(f"Unsupported input type: {type(input_data)}")
|
|
|
|
def MACsToWellIds(cur, macs_list):
|
|
|
|
device_ids = []
|
|
device_list = []
|
|
|
|
macs_string = ",".join(f"'{mac}'" for mac in macs_list)
|
|
if macs_string != "'None'":
|
|
sqlr = f"SELECT well_id, device_mac, device_id, location, description, radar_threshold, close_to FROM public.devices WHERE device_mac IN ({macs_string})"
|
|
|
|
#print (sqlr)
|
|
macs_map = {}
|
|
cur.execute(sqlr)
|
|
proximitys_list = cur.fetchall()
|
|
for well_id, mac, device_id, location, description, radar_threshold, close_to in proximitys_list:
|
|
macs_map[mac] = (well_id, device_id, location_names[location], description, mac, radar_threshold, close_to)
|
|
|
|
|
|
for mac in macs_list:
|
|
device_ids.append(macs_map[mac][1])
|
|
device_list.append(macs_map[mac])
|
|
|
|
return device_ids, device_list
|
|
|
|
def MACsStrToDevIds(cur, macs):
|
|
|
|
device_ids = []
|
|
#we need to repcakage string to contain '
|
|
macs_list = ToList(macs)
|
|
macs_string = ",".join(f"'{mac}'" for mac in macs_list)
|
|
if macs_string != "'None'":
|
|
|
|
sqlr = f"SELECT device_mac, device_id FROM public.devices WHERE device_mac IN ({macs_string})"
|
|
print (sqlr)
|
|
macs_map = {}
|
|
cur.execute(sqlr)
|
|
proximitys_list = cur.fetchall()
|
|
for mac, device_id in proximitys_list:
|
|
device_ids.append((mac, device_id))
|
|
|
|
return device_ids
|
|
|
|
|
|
|
|
def ReadCleanStringDB(cur, sql):
|
|
cur.execute(sql)
|
|
temp_string = cur.fetchone()
|
|
if temp_string == None:
|
|
return ""
|
|
else:
|
|
return str(temp_string[0]).strip()
|
|
|
|
# obtain device_list, device_ids for deployment_id on time as epoch_from_file_s (usually today)
|
|
# it tries first overridden/newly-installed (FROM public.deployment_history)
|
|
# then if none found there searches FROM public.deployment_details
|
|
def GetProximityList(deployment_id, epoch_from_file_s):
|
|
|
|
#both are valid:
|
|
#64B70888FA84,64B70888F6F0,64B70888F860,64B70889062C,64B70888FAB0,64B708896BDC,64B708897428
|
|
#['64B70888FA84', '64B70888F6F0', '64B70888F860', '64B70889062C', '64B70888FAB0', '64B708896BDC', '64B708897428']
|
|
|
|
#result_list = []
|
|
#well_ids = []
|
|
with get_db_connection() as conn:
|
|
|
|
sqlr = f"""
|
|
SELECT * FROM (
|
|
SELECT proximity
|
|
FROM public.deployment_history
|
|
WHERE deployment_id = {deployment_id}
|
|
AND time <= {epoch_from_file_s}
|
|
ORDER BY time DESC
|
|
LIMIT 1
|
|
) AS latest_deployment
|
|
"""
|
|
#print (sqlr)
|
|
with conn.cursor() as cur:
|
|
devices_string = ReadCleanStringDB(cur, sqlr)
|
|
|
|
if devices_string == "":
|
|
sqlr = f"SELECT devices from public.deployment_details WHERE deployment_id ={deployment_id}"
|
|
#print (sqlr)
|
|
devices_string = ReadCleanStringDB(cur, sqlr)
|
|
|
|
|
|
if devices_string == "":
|
|
return [], []
|
|
|
|
macs_list = ToList(devices_string)
|
|
device_ids, device_list = MACsToWellIds(cur, macs_list)
|
|
return device_list, device_ids
|
|
|
|
def FilterList(to_filter: str, allowed: str) -> str:
|
|
# Convert comma-separated strings to sets
|
|
filter_set = set(to_filter.split(','))
|
|
allowed_set = set(allowed.split(','))
|
|
|
|
# Find intersection and sort the result
|
|
filtered = sorted(filter_set.intersection(allowed_set), key=int)
|
|
|
|
# Join back to comma-separated string
|
|
return ','.join(filtered)
|
|
|
|
|
|
def GetMatchingDevices(privileges, group, deployment, location):
|
|
|
|
global LocationsMap
|
|
|
|
results=[]
|
|
if privileges != "-1":
|
|
if deployment == "" or deployment == "0":
|
|
deployment = privileges
|
|
|
|
privileges_list = privileges.split(',')
|
|
if deployment != "0":
|
|
if "," in deployment:
|
|
deployment = FilterList(deployment, privileges)
|
|
else:
|
|
if deployment not in privileges_list:
|
|
return results
|
|
else:
|
|
if deployment == "0":
|
|
deployment = "-1"
|
|
|
|
devices = GetVisibleDevicesPerLocation(deployment, location)
|
|
return devices
|
|
|
|
def GetMatchingDevicesComplete(privileges, group, deployment, location):
|
|
|
|
global LocationsMap
|
|
|
|
results=[]
|
|
if privileges != "-1":
|
|
if deployment == "" or deployment == "0":
|
|
deployment = privileges
|
|
|
|
privileges_list = privileges.split(',')
|
|
if deployment != "0":
|
|
if "," in deployment:
|
|
deployment = FilterList(deployment, privileges)
|
|
else:
|
|
if deployment not in privileges_list:
|
|
return results
|
|
else:
|
|
if deployment == "0":
|
|
deployment = "-1"
|
|
|
|
devices = GetVisibleDevicesPerLocationComplete(deployment, location)
|
|
return devices
|
|
|
|
def getOldestDeploymentHistoryFromBeneficiary(deployment_id):
|
|
#this will return oldest entry as well as last proximity (devices)
|
|
st = time.time()
|
|
print(f"*0 ----{time.time() - st}")
|
|
results=[]
|
|
well_ids_last = [] #this needs to be list of tuples (well_id, Location_st, Description)
|
|
oldest_time = None
|
|
try:
|
|
print(f"*0a ----{time.time() - st}")
|
|
with get_db_connection() as conn:
|
|
sqlr = f"""
|
|
SELECT * FROM (
|
|
SELECT time, proximity
|
|
FROM public.deployment_history
|
|
WHERE deployment_id = {deployment_id}
|
|
ORDER BY time ASC
|
|
) AS latest_deployment
|
|
"""
|
|
print (sqlr)
|
|
print(f"*1 ----{time.time() - st}")
|
|
with conn.cursor() as cur:
|
|
cur.execute(sqlr)
|
|
print(f"*2 ----{time.time() - st}")
|
|
results = cur.fetchall()
|
|
print(f"*3 ----{time.time() - st}")
|
|
#lets find which of historical sets has data in DB
|
|
if results == None or results == []: #look in deployment_details
|
|
sqlr = f"SELECT devices from public.deployment_details WHERE deployment_id ={deployment_id}"
|
|
#print (sqlr)
|
|
print(f"*4 ----{time.time() - st}")
|
|
devices_string = ReadCleanStringDB(cur, sqlr)
|
|
print(f"*5 ----{time.time() - st}")
|
|
macs_list = ToList(devices_string)
|
|
print(f"*6 ----{time.time() - st}")
|
|
device_ids_last, device_alls_last = MACsToWellIds(cur, macs_list)
|
|
sql_query = """
|
|
SELECT device_id, first_seen_at
|
|
FROM device_first_seen
|
|
WHERE device_id = ANY(%s)
|
|
GROUP BY device_id;
|
|
"""
|
|
print(f"*7 ----{time.time() - st}")
|
|
try:
|
|
cur.execute(sql_query, (device_ids_last,))
|
|
results1 = cur.fetchall()
|
|
if results1 == []:
|
|
pass
|
|
else:
|
|
oldest_time = results1[0][1]
|
|
except Exception as e:
|
|
AddToLog(traceback.format_exc())
|
|
AddToLog(str(e))
|
|
print(f"*8 ----{time.time() - st}")
|
|
else:
|
|
history_entry = results[-1]
|
|
macs_list = ToList(history_entry[1])
|
|
print(f"*9 ----{time.time() - st}")
|
|
device_ids_last, device_alls_last = MACsToWellIds(cur, macs_list)
|
|
|
|
|
|
for history_entry in results:
|
|
macs_list = ToList(history_entry[1])
|
|
print(f"*10 ----{time.time() - st}")
|
|
device_ids, device_alls = MACsToWellIds(cur, macs_list)
|
|
print(f"*11 ----{time.time() - st}")
|
|
sql_query = """
|
|
SELECT time as oldest_record_time
|
|
FROM sensor_readings
|
|
WHERE device_id = ANY(%s)
|
|
ORDER BY time ASC
|
|
LIMIT 1;
|
|
"""
|
|
print(f"*12 ----{time.time() - st}")
|
|
try:
|
|
cur.execute(sql_query, (device_ids_last,))
|
|
results1 = cur.fetchall()
|
|
oldest_time = results1[0][0]
|
|
if oldest_time != None:
|
|
break
|
|
|
|
|
|
except Exception as e:
|
|
print(str(e))
|
|
print(f"*13 ----{time.time() - st}")
|
|
|
|
except Exception as e:
|
|
print(f"*0b ----{time.time() - st}")
|
|
AddToLog(traceback.format_exc())
|
|
|
|
print(f"*14 ----{time.time() - st}")
|
|
return oldest_time, device_alls_last
|
|
|
|
|
|
def getLastEditedBeneficiary(beneficiary):
|
|
|
|
#lets generate token here to elliminate issues with outdated token...
|
|
token = generate_token(beneficiary)
|
|
url = 'https://well-api.azurewebsites.net/api/well_api'
|
|
params = {
|
|
"name": "beneficiary_detail",
|
|
"beneficiary": beneficiary,
|
|
"token": token
|
|
}
|
|
#{"id": "user_beneficiary_bernhard@wellnuo.com", "MAC": "BENEFICIARY", "email": "bernhard@wellnuo.com", "edit_date": "Fri Aug 16 06:45:01 2024", "c_password": "bern1", "first_name": "Bernhard", "last_name": "Knigge", "address": "776 Dubanski Dr.", "address_city": "San Jose", "address_state": "CA", "address_zip": "95123", "address_country": "United States", "phone_number": "4087055709", "persons": "2", "gender": "M", "race": "W", "born": "1972", "pets": "1", "creds": "", "devs": "[[203, 'Living Room', '', '64B708890B14'], [251, 'Bathroom', '', '64B7088909E8'], [252, 'Bedroom', '', '64B708890734'], [204, 'Bathroom', 'Guest', '64B708890288'], [201, 'Kitchen', 'toaster', '64B708890584'], [202, 'Kitchen', 'stove', '64B7088906D8'], [205, 'Office', '', '64B708897018']]", "tzone": "America/Los_Angeles", "ttl": -1, "_rid": "R60hANIG-K+qTQIAAAAAAg==", "_self": "dbs/R60hAA==/colls/R60hANIG-K8=/docs/R60hANIG-K+qTQIAAAAAAg==/", "_etag": "\"3500a0ae-0000-0800-0000-66bef56d0000\"", "_attachments": "attachments/", "_ts": 1723790701}
|
|
response = requests.get(url, params=params)
|
|
if response.status_code == 200:
|
|
|
|
text = response.text
|
|
#print(text)
|
|
if text == "Log-Out":
|
|
return text
|
|
if text[0] == "{":
|
|
data = json.loads(response.text)
|
|
date_string = data["edit_date"]
|
|
parsed_date = datetime.datetime.strptime(date_string, '%c')
|
|
# Convert the datetime object to a timestamp (epoch time)
|
|
epoch_str = str(time.mktime(parsed_date.timetuple()))
|
|
devices = data["devs"]
|
|
return(epoch_str, devices)
|
|
else:
|
|
return text,""
|
|
else:
|
|
logger.debug((f"Failed to retrieve the data, status code: {response.status_code}"))
|
|
|
|
return "",""
|
|
|
|
def GetDeploymentNameFromId(Id):
|
|
|
|
con = sqlite3.connect(main_db)
|
|
con.text_factory = str
|
|
cur = con.cursor()
|
|
results=[]
|
|
SQL = "SELECT name FROM deployments WHERE id =" + Id
|
|
df = cur.execute(SQL)
|
|
results = cur.fetchall()
|
|
if len(results) > 0:
|
|
return results[0][0]
|
|
else:
|
|
return ""
|
|
|
|
def GetTimeZoneOfDeployment(deployment_id):
|
|
time_zone_st = 'America/Los_Angeles'
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
sqlr = f"SELECT time_zone_s from public.deployments WHERE deployment_id ={deployment_id}"
|
|
time_zone_st = ReadCleanStringDB(cur, sqlr)
|
|
return time_zone_st
|
|
|
|
def StringToEpoch(date_string, time_zone_s):
|
|
"""
|
|
Convert a date string to epoch timestamp for start of day (midnight) in specified timezone
|
|
|
|
Args:
|
|
date_string (str): Date in 'YYYY-MM-DD' format
|
|
time_zone_s (str): Timezone string (e.g. 'America/Los_Angeles')
|
|
|
|
Returns:
|
|
float: Epoch timestamp in seconds
|
|
"""
|
|
# Parse the date string
|
|
date_format = '%Y-%m-%d'
|
|
naive_date = datetime.datetime.strptime(date_string, date_format)
|
|
|
|
# Get the timezone
|
|
timezone = pytz.timezone(time_zone_s)
|
|
|
|
# Localize the date to midnight in the specified timezone
|
|
local_date = timezone.localize(naive_date)
|
|
|
|
# Convert to epoch timestamp
|
|
epoch_time = local_date.timestamp()
|
|
|
|
return epoch_time
|
|
|
|
def LocalDateToUTCEpoch(local_date_str, time_zone_s):
|
|
"""
|
|
Convert a date string to epoch timestamp for start of day (midnight) in UTC
|
|
|
|
Args:
|
|
local_date_str (str): Date in 'YYYY-MM-DD' format
|
|
time_zone_s (str): Timezone string (e.g. 'America/Los_Angeles')
|
|
|
|
Returns:
|
|
float: Epoch UTC timestamp in seconds
|
|
"""
|
|
timezone = pytz.timezone(time_zone_s)
|
|
# Parse the date string
|
|
date_format = '%Y-%m-%d'
|
|
local_datetime = datetime.datetime.strptime(local_date_str, date_format)
|
|
local_datetime = timezone.localize(local_datetime)
|
|
|
|
utc_datetime = local_datetime.astimezone(pytz.UTC)
|
|
epoch_time = int(utc_datetime.timestamp())
|
|
|
|
return epoch_time
|
|
|
|
def GetDeploymentDatesBoth(deployment_in):
|
|
|
|
#when looking at the date, date is defined in TZ where device is!
|
|
#Lets take oldest data from first member of deployment
|
|
st = time.time()
|
|
date_list = []
|
|
print(f"&0 ----{time.time() - st}")
|
|
time_zone_st = GetTimeZoneOfDeployment(deployment_in)
|
|
print(f"&1 ----{time.time() - st}")
|
|
oldest_date_dt_utc, devices_all = getOldestDeploymentHistoryFromBeneficiary(deployment_in)
|
|
print(f"&2 ----{time.time() - st}")
|
|
if oldest_date_dt_utc != None:
|
|
#get date in local time zone from UTC datetime
|
|
|
|
#oldest_date_dt
|
|
# Get today's date
|
|
local_timezone = pytz.timezone(time_zone_st) # Replace with your local timezone
|
|
oldest_date_dt_local = oldest_date_dt_utc.astimezone(local_timezone)
|
|
today_date = datetime.datetime.now(local_timezone)
|
|
|
|
# Generate a list of date strings from oldest_date to today in inverted order
|
|
date_list = [(today_date - timedelta(days=x)).strftime('%Y-%m-%d') for x in range((today_date - oldest_date_dt_local).days + 1)]
|
|
print(f"&3 ----{time.time() - st}")
|
|
return date_list, devices_all, time_zone_st
|
|
|
|
def check_file_exists(file_name, bucket_name="daily-maps"):
|
|
try:
|
|
# Try to get the object's stats - this will raise an exception if the object doesn't exist
|
|
stat_result = miniIO_blob_client.stat_object(bucket_name, file_name)
|
|
last_modified_utc = stat_result.last_modified
|
|
return True, last_modified_utc
|
|
except S3Error as e:
|
|
if e.code == 'NoSuchKey':
|
|
return False, 0
|
|
# Re-raise if it's a different error
|
|
raise
|
|
|
|
def get_text_dimensions(text, font, font_scale, thickness):
|
|
(width, height), baseline = cv2.getTextSize(text, font, font_scale, thickness)
|
|
return {
|
|
'width': width,
|
|
'height': height,
|
|
'baseline': baseline,
|
|
'total_height': height + baseline
|
|
}
|
|
|
|
def save_to_minio(image, filename, bucket_name="daily-maps", content_type="image/png"):
|
|
"""
|
|
Save a PIL Image directly to MinIO
|
|
|
|
Args:
|
|
image (PIL.Image): Image to save
|
|
filename (str): Filename to use in MinIO
|
|
bucket_name (str): MinIO bucket name
|
|
content_type (str): Content type of the file
|
|
|
|
Returns:
|
|
bool: True if successful, False otherwise
|
|
"""
|
|
logger = logging.getLogger(__name__)
|
|
try:
|
|
# Convert PIL image to bytes
|
|
img_byte_arr = io.BytesIO()
|
|
image.save(img_byte_arr, format='PNG')
|
|
img_byte_arr.seek(0) # Move to start of the BytesIO buffer
|
|
|
|
# Upload to MinIO
|
|
miniIO_blob_client.put_object(
|
|
DAILY_MAPS_BUCKET_NAME,
|
|
filename,
|
|
img_byte_arr,
|
|
length=len(img_byte_arr.getvalue()),
|
|
content_type=content_type
|
|
)
|
|
return True
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error saving to MinIO: {traceback.format_exc()}")
|
|
return False
|
|
|
|
|
|
def SaveImageInBlob(file_name, arr_stretched, labels = []):
|
|
|
|
#labels=[(caption,(x,y),font,scale,color,thickness,line_type)]
|
|
try:
|
|
|
|
image_with_text = arr_stretched.copy()
|
|
|
|
for label in labels:
|
|
cv2.putText(
|
|
image_with_text, # Image
|
|
label[0], # Text to write
|
|
label[1], # Position (x, y)
|
|
label[2], # Font type
|
|
label[3], # Font scale
|
|
label[4], # Color (BGR)
|
|
label[5], # Thickness
|
|
label[6] # Line type
|
|
)
|
|
|
|
# Encode the image to a memory buffer using imencode
|
|
success, encoded_image = cv2.imencode('.png', image_with_text)
|
|
AddToLog(f"success={success}")
|
|
if not success:
|
|
raise Exception("Could not encode image!")
|
|
|
|
#AddToLog(f"DAILY_MAPS_BUCKET_NAME={DAILY_MAPS_BUCKET_NAME}")
|
|
|
|
image_bytes = encoded_image.tobytes()
|
|
AddToLog(f"len(image_bytes)={len(image_bytes)}")
|
|
miniIO_blob_client.put_object(
|
|
DAILY_MAPS_BUCKET_NAME,
|
|
file_name,
|
|
io.BytesIO(image_bytes),
|
|
len(image_bytes))
|
|
return True
|
|
except Exception as e:
|
|
AddToLog(f"{traceback.format_exc()}")
|
|
logger.error(f"{traceback.format_exc()}")
|
|
return False
|
|
|
|
def SaveImageInBlobLabelsOut(file_name, arr_stretched, labels, title_labels):
|
|
|
|
#labels=[(caption,(x,y),font,scale,color,thickness,line_type)]
|
|
try:
|
|
|
|
image_with_text = arr_stretched.copy()
|
|
|
|
for label in labels:
|
|
cv2.putText(
|
|
image_with_text, # Image
|
|
label[0], # Text to write
|
|
label[1], # Position (x, y)
|
|
label[2], # Font type
|
|
label[3], # Font scale
|
|
label[4], # Color (BGR)
|
|
label[5], # Thickness
|
|
label[6] # Line type
|
|
)
|
|
|
|
for label in title_labels:
|
|
cv2.putText(
|
|
image_with_text, # Image
|
|
label[0], # Text to write
|
|
label[1], # Position (x, y)
|
|
label[2], # Font type
|
|
label[3], # Font scale
|
|
label[4], # Color (BGR)
|
|
label[5], # Thickness
|
|
label[6] # Line type
|
|
)
|
|
|
|
|
|
# Encode the image to a memory buffer using imencode
|
|
success, encoded_image = cv2.imencode('.png', image_with_text)
|
|
AddToLog(f"success={success}")
|
|
if not success:
|
|
raise Exception("Could not encode image!")
|
|
|
|
#AddToLog(f"DAILY_MAPS_BUCKET_NAME={DAILY_MAPS_BUCKET_NAME}")
|
|
|
|
image_bytes = encoded_image.tobytes()
|
|
AddToLog(f"len(image_bytes)={len(image_bytes)}")
|
|
miniIO_blob_client.put_object(
|
|
DAILY_MAPS_BUCKET_NAME,
|
|
file_name,
|
|
io.BytesIO(image_bytes),
|
|
len(image_bytes))
|
|
return True
|
|
except Exception as e:
|
|
AddToLog(f"{traceback.format_exc()}")
|
|
logger.error(f"{traceback.format_exc()}")
|
|
return False
|
|
|
|
def GetLocalTimeForDate(selected_date, time_zone_s, minutes_padding = 0):
|
|
# Parse the selected date
|
|
local_tz = pytz.timezone(time_zone_s)
|
|
|
|
# Convert selected_date string to datetime object (start of day in local time)
|
|
local_date = datetime.datetime.strptime(selected_date, "%Y-%m-%d")
|
|
local_start = local_tz.localize(local_date)
|
|
|
|
# Get the next day
|
|
local_next = local_start + timedelta(days=1)
|
|
|
|
if minutes_padding > 0:
|
|
local_start = local_start - timedelta(minutes=minutes_padding)
|
|
local_next = local_next + timedelta(minutes=minutes_padding)
|
|
|
|
# Convert to UTC
|
|
utc_start = local_start.astimezone(pytz.UTC)
|
|
utc_next = local_next.astimezone(pytz.UTC)
|
|
|
|
# Format as strings
|
|
time_from_str = utc_start.strftime("%Y-%m-%d %H:%M:%S")
|
|
time_to_str = utc_next.strftime("%Y-%m-%d %H:%M:%S")
|
|
|
|
return time_from_str + "+0000", time_to_str + "+0000"
|
|
|
|
def GetLocalTimeForDateSimple(selected_date, time_zone_s, minutes_padding = 0):
|
|
# Parse the selected date
|
|
local_tz = pytz.timezone(time_zone_s)
|
|
|
|
# Convert selected_date string to datetime object (start of day in local time)
|
|
local_date = datetime.datetime.strptime(selected_date, "%Y-%m-%d")
|
|
local_start = local_tz.localize(local_date)
|
|
|
|
# Get the next day
|
|
local_next = local_start + timedelta(days=1)
|
|
|
|
if minutes_padding > 0:
|
|
local_start = local_start - timedelta(minutes=minutes_padding)
|
|
local_next = local_next + timedelta(minutes=minutes_padding)
|
|
|
|
# Convert to UTC
|
|
utc_start = local_start.astimezone(pytz.UTC)
|
|
utc_next = local_next.astimezone(pytz.UTC)
|
|
|
|
return utc_start, utc_next
|
|
|
|
def GetLocalTimeEpochsForDate(selected_date, time_zone_s):
|
|
"""
|
|
Get start and end of day epochs for a given date in a specific timezone.
|
|
|
|
Args:
|
|
selected_date (str): Date in "YYYY-MM-DD" format
|
|
time_zone_s (str): Timezone string (e.g., "America/New_York")
|
|
|
|
Returns:
|
|
tuple: (start_epoch, end_epoch) - Unix timestamps for start and end of day
|
|
"""
|
|
# Parse the selected date
|
|
local_tz = pytz.timezone(time_zone_s)
|
|
|
|
# Convert selected_date string to datetime object (start of day in local time)
|
|
local_date = datetime.datetime.strptime(selected_date, "%Y-%m-%d")
|
|
local_start = local_tz.localize(local_date)
|
|
|
|
# Get the next day
|
|
local_next = local_start + timedelta(days=1)
|
|
|
|
# Convert to UTC
|
|
utc_start = local_start.astimezone(pytz.UTC)
|
|
utc_next = local_next.astimezone(pytz.UTC)
|
|
|
|
# Convert to epochs (Unix timestamps)
|
|
start_epoch = int(utc_start.timestamp())
|
|
end_epoch = int(utc_next.timestamp())
|
|
|
|
return start_epoch, end_epoch
|
|
|
|
def UTC2Local(utc_time, time_zone_s):
|
|
# Parse the selected date
|
|
local_tz = pytz.timezone(time_zone_s)
|
|
|
|
# Convert selected_date string to datetime object (start of day in local time)
|
|
#local_date = datetime.datetime.strptime(selected_date, "%Y-%m-%d")
|
|
local_start = local_tz.localize(selected_date)
|
|
|
|
# Convert to UTC
|
|
utc_start = local_start.astimezone(pytz.UTC)
|
|
utc_next = local_next.astimezone(pytz.UTC)
|
|
|
|
# Format as strings
|
|
time_from_str = utc_start.strftime("%Y-%m-%d %H:%M:%S")
|
|
time_to_str = utc_next.strftime("%Y-%m-%d %H:%M:%S")
|
|
|
|
return time_from_str + "+0000", time_to_str + "+0000"
|
|
|
|
def get_timezone_aware_datetime(time_str, timezone_str="America/Los_Angeles"):
|
|
"""
|
|
Convert a naive datetime string to a timezone-aware datetime object.
|
|
|
|
Parameters:
|
|
time_str: String in format 'YYYY-MM-DD HH:MM:SS'
|
|
timezone_str: String representing the timezone (default: "America/Los_Angeles")
|
|
|
|
Returns:
|
|
datetime: A timezone-aware datetime object
|
|
"""
|
|
# Parse the naive datetime
|
|
naive_dt = datetime.datetime.strptime(time_str, '%Y-%m-%d %H:%M:%S')
|
|
|
|
# Get the timezone
|
|
tz = pytz.timezone(timezone_str)
|
|
|
|
# Localize the datetime (make it timezone-aware)
|
|
# localize() is the correct way to do this, as it handles DST transitions properly
|
|
aware_dt = tz.localize(naive_dt)
|
|
return aware_dt
|
|
|
|
def fast_fill_array_from_timescale(day_data, time_from_str, devices_list, arr_source, timezone_str="Europe/Berlin"):
|
|
"""
|
|
Optimized version of array filling from TimeScaleDB data.
|
|
Uses vectorized operations for significant speed improvement.
|
|
"""
|
|
# Convert start time to timezone-aware datetime
|
|
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
|
|
#start_time = start_time.replace(tzinfo=timezone.utc)
|
|
|
|
# Create device index mapping
|
|
device_to_index = {device_id: idx for idx, device_id in enumerate(devices_list)}
|
|
|
|
# Define column mappings (sensor type to position in record)
|
|
columns = {
|
|
'avg_temperature': 2,
|
|
'avg_humidity': 3,
|
|
'pressure_amplitude': 4,
|
|
'max_light': 5,
|
|
'radar': 6
|
|
}
|
|
# Add sensor columns dynamically
|
|
cols_len = len(columns)
|
|
for i in range(10):
|
|
columns[f'sensor_min_s{i}'] = i + cols_len #smell * 10 + 5
|
|
|
|
# Pre-process data into a more efficient structure
|
|
# Group by device_id to reduce lookup operations
|
|
device_data = defaultdict(list)
|
|
for record in day_data:
|
|
if record[0] and record[1]: # If time and device_id exist
|
|
device_data[record[1]].append(record)
|
|
|
|
# Process each device's data in bulk
|
|
for device_id, records in device_data.items():
|
|
if device_id not in device_to_index:
|
|
continue
|
|
|
|
base_idx = device_to_index[device_id] * len(columns)
|
|
|
|
# Convert records to numpy array for faster processing
|
|
records_array = np.array(records, dtype=object)
|
|
|
|
# Calculate all minute deltas at once
|
|
times = records_array[:, 0]
|
|
#print(times[0], start_time, (times[0] - start_time).total_seconds())
|
|
minute_deltas = np.array([(t - start_time).total_seconds() / 60 for t in times], dtype=int)
|
|
|
|
# Filter valid minute deltas
|
|
valid_mask = (minute_deltas >= 0) & (minute_deltas < arr_source.shape[1])
|
|
if not np.any(valid_mask):
|
|
continue
|
|
|
|
minute_deltas = minute_deltas[valid_mask]
|
|
records_array = records_array[valid_mask]
|
|
|
|
# Process each column type in bulk
|
|
for col_name, col_offset in columns.items():
|
|
row_idx = base_idx + list(columns.keys()).index(col_name)
|
|
values = records_array[:, col_offset]
|
|
|
|
# Filter out None values
|
|
valid_values = ~np.equal(values, None)
|
|
if not np.any(valid_values):
|
|
continue
|
|
|
|
# Update array in bulk
|
|
arr_source[row_idx, minute_deltas[valid_values]] = values[valid_values]
|
|
|
|
return arr_source
|
|
|
|
def fast_fill_smell_array_from_timescale(day_data, time_from_str, device_to_index, arr_source, timezone_str="Europe/Berlin"):
|
|
"""
|
|
Optimized version of array filling from TimeScaleDB data.
|
|
Uses vectorized operations for significant speed improvement.
|
|
"""
|
|
# Convert start time to timezone-aware datetime
|
|
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
|
|
#start_time = start_time.replace(tzinfo=timezone.utc)
|
|
|
|
# Define column mappings (sensor type to position in record)
|
|
columns = {
|
|
's0': 2,
|
|
's1': 3,
|
|
's2': 4,
|
|
's3': 5,
|
|
's4': 6,
|
|
's5': 7,
|
|
's6': 8,
|
|
's7': 9,
|
|
's8': 10,
|
|
's9': 11
|
|
}
|
|
## Add sensor columns dynamically
|
|
#cols_len = len(columns)
|
|
#for i in range(10):
|
|
#columns[f'sensor_min_s{i}'] = i + cols_len #smell * 10 + 5
|
|
|
|
# Pre-process data into a more efficient structure
|
|
# Group by device_id to reduce lookup operations
|
|
device_data = defaultdict(list)
|
|
for record in day_data:
|
|
if record[0] and record[1]: # If time and device_id exist
|
|
device_data[record[1]].append(record)
|
|
|
|
# Process each device's data in bulk
|
|
for device_id, records in device_data.items():
|
|
if device_id not in device_to_index:
|
|
continue
|
|
|
|
base_idx = device_to_index[device_id] * len(columns)
|
|
|
|
# Convert records to numpy array for faster processing
|
|
records_array = np.array(records, dtype=object)
|
|
|
|
# Calculate all minute deltas at once
|
|
times = records_array[:, 0]
|
|
#print(times[0], start_time, (times[0] - start_time).total_seconds())
|
|
minute_deltas = np.array([(t - start_time).total_seconds() / 60 for t in times], dtype=int)
|
|
|
|
# Filter valid minute deltas
|
|
valid_mask = (minute_deltas >= 0) & (minute_deltas < arr_source.shape[1])
|
|
if not np.any(valid_mask):
|
|
continue
|
|
|
|
minute_deltas = minute_deltas[valid_mask]
|
|
records_array = records_array[valid_mask]
|
|
|
|
# Process each column type in bulk
|
|
for col_name, col_offset in columns.items():
|
|
row_idx = base_idx + list(columns.keys()).index(col_name)
|
|
values = records_array[:, col_offset]
|
|
|
|
# Filter out None values
|
|
valid_values = ~np.equal(values, None)
|
|
if not np.any(valid_values):
|
|
continue
|
|
|
|
# Update array in bulk
|
|
arr_source[row_idx, minute_deltas[valid_values]] = values[valid_values]
|
|
|
|
return arr_source
|
|
|
|
def fast_fill_radar_array_from_timescale(day_data, time_from_str, devices_list, arr_source, timezone_str="Europe/Berlin"):
|
|
"""
|
|
Optimized version of array filling from TimeScaleDB data.
|
|
Uses vectorized operations for significant speed improvement.
|
|
"""
|
|
# Convert start time to timezone-aware datetime
|
|
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
|
|
#start_time = start_time.replace(tzinfo=timezone.utc)
|
|
|
|
# Create device index mapping
|
|
device_to_index = {device_id: idx for idx, device_id in enumerate(devices_list)}
|
|
|
|
# Define column mappings (sensor type to position in record)
|
|
columns = {
|
|
's2': 2,
|
|
's3': 3,
|
|
's4': 4,
|
|
's5': 5,
|
|
's6': 6,
|
|
's7': 7,
|
|
's8': 8
|
|
}
|
|
|
|
# Pre-process data into a more efficient structure
|
|
# Group by device_id to reduce lookup operations
|
|
device_data = defaultdict(list)
|
|
for record in day_data:
|
|
if record[0] and record[1]: # If time and device_id exist
|
|
device_data[record[1]].append(record)
|
|
|
|
# Process each device's data in bulk
|
|
for device_id, records in device_data.items():
|
|
if device_id not in device_to_index:
|
|
continue
|
|
|
|
base_idx = device_to_index[device_id] * len(columns)
|
|
|
|
# Convert records to numpy array for faster processing
|
|
records_array = np.array(records, dtype=object)
|
|
|
|
# Calculate all minute deltas at once
|
|
times = records_array[:, 0]
|
|
#print(times[0], start_time, (times[0] - start_time).total_seconds())
|
|
minute_deltas = np.array([(t - start_time).total_seconds() / 60 for t in times], dtype=int)
|
|
|
|
# Filter valid minute deltas
|
|
valid_mask = (minute_deltas >= 0) & (minute_deltas < arr_source.shape[1])
|
|
if not np.any(valid_mask):
|
|
continue
|
|
|
|
minute_deltas = minute_deltas[valid_mask]
|
|
records_array = records_array[valid_mask]
|
|
|
|
# Process each column type in bulk
|
|
for col_name, col_offset in columns.items():
|
|
row_idx = base_idx + list(columns.keys()).index(col_name)
|
|
values = records_array[:, col_offset]
|
|
|
|
# Filter out None values
|
|
valid_values = ~np.equal(values, None)
|
|
if not np.any(valid_values):
|
|
continue
|
|
|
|
# Update array in bulk
|
|
arr_source[row_idx, minute_deltas[valid_values]] = values[valid_values]
|
|
|
|
return arr_source
|
|
|
|
def ScaleTemperature(temperature_in_f):
|
|
# Define our key temperature points and their corresponding color values
|
|
temp_points = [30, 50, 70, 90, 110]
|
|
color_values = [768, 640, 384, 128, 0] # Color values in the rainbow scale
|
|
|
|
# Clamp the temperature to our range
|
|
clamped_temp = max(min(temperature_in_f, 110), 30)
|
|
|
|
# Find which segment the temperature falls into
|
|
for i in range(len(temp_points) - 1):
|
|
if temp_points[i] <= clamped_temp <= temp_points[i + 1]:
|
|
# Linear interpolation between the two nearest points
|
|
t = (clamped_temp - temp_points[i]) / (temp_points[i + 1] - temp_points[i])
|
|
color_value = int(color_values[i] + t * (color_values[i + 1] - color_values[i]))
|
|
return color_value
|
|
|
|
# Fallback (should never reach here due to clamping)
|
|
return 0
|
|
|
|
def GetTemperatureColor(temperature_in_f):
|
|
color_value = ScaleTemperature(temperature_in_f)
|
|
return BestColor(color_value)
|
|
|
|
def BestColor(in_val):
|
|
#this function uses numbers from 0 to 1279 to convert to rainbow from Blue to Red(1024) to Violet 1279
|
|
r,g,b=0,0,0
|
|
in_val = int(in_val)
|
|
if(in_val > 1279):
|
|
in_val = 1279
|
|
if (in_val < 256):
|
|
r = 255
|
|
g = in_val
|
|
elif (in_val < 512):
|
|
r = 511 - in_val
|
|
g = 255
|
|
elif (in_val < 768):
|
|
g = 255
|
|
b = in_val-512
|
|
elif (in_val < 1024):
|
|
g = 1023 - in_val
|
|
b = 255
|
|
else:
|
|
r = in_val - 1024
|
|
b = 255
|
|
|
|
#if (r > 255):
|
|
# print(in_val)
|
|
# print(int(r),int(g),int(b))
|
|
return(int(r),int(g),int(b))
|
|
|
|
def GrayColor(in_val):
|
|
#this function uses numbers from 0 to 1279 to convert to rainbow from Blue to Red(1024) to Violet 1279
|
|
r,g,b=0,0,0
|
|
in_val = int(in_val)
|
|
if(in_val < 0):
|
|
in_val = 0
|
|
if(in_val > 255):
|
|
in_val = 255
|
|
|
|
r = in_val
|
|
g = r
|
|
b = r
|
|
return(int(r),int(g),int(b))
|
|
|
|
def fill_array_from_timescale(day_data, time_from_str, devices_list, arr_source, timezone_str):
|
|
"""
|
|
Fill numpy array with data from TimeScaleDB query results.
|
|
|
|
Parameters:
|
|
day_data: List of tuples from database query
|
|
time_from_str: Starting datetime string in format 'YYYY-MM-DD HH:MM:SS'
|
|
devices_list: List of device IDs
|
|
arr_source: Pre-initialized numpy array to fill
|
|
|
|
Returns:
|
|
numpy.ndarray: Filled array
|
|
"""
|
|
# Parse the start time
|
|
#start_time = get_timezone_aware_datetime(time_from_str, timezone_str)
|
|
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
|
|
# Create mapping of device_ids to their index positions
|
|
device_to_index = {device_id: idx for idx, device_id in enumerate(devices_list)}
|
|
|
|
# Define columns and their positions in the result tuple
|
|
columns = {
|
|
'avg_temperature': 2,
|
|
'avg_humidity': 3,
|
|
'pressure_amplitude': 4,
|
|
'max_light': 5,
|
|
'radar': 6,
|
|
'sensor_min_s0': 7,
|
|
'sensor_min_s1': 8,
|
|
'sensor_min_s2': 9,
|
|
'sensor_min_s3': 10,
|
|
'sensor_min_s4': 11,
|
|
'sensor_min_s5': 12,
|
|
'sensor_min_s6': 13,
|
|
'sensor_min_s7': 14,
|
|
'sensor_min_s8': 15,
|
|
'sensor_min_s9': 16
|
|
}
|
|
|
|
# Process each record
|
|
for record in day_data:
|
|
# Get minute and device_id from record
|
|
record_time = record[0] # minute column
|
|
device_id = record[1] # device_id column
|
|
|
|
if record_time and device_id:
|
|
# Calculate minute delta
|
|
minute_delta = int((record_time - start_time).total_seconds() / 60)
|
|
|
|
if 0 <= minute_delta < arr_source.shape[1]:
|
|
# Calculate base index for this device
|
|
base_idx = device_to_index[device_id] * len(columns)
|
|
|
|
# Fill data for each sensor/measurement type
|
|
for col_name, col_offset in columns.items():
|
|
value = record[col_offset]
|
|
if value is not None: # Skip NULL values
|
|
row_idx = base_idx + list(columns.keys()).index(col_name)
|
|
arr_source[row_idx, minute_delta] = value
|
|
|
|
return arr_source
|
|
|
|
def fast_fill_array_from_timescale_bad(day_data, time_from_str, devices_list, arr_source, timezone_str="Europe/Berlin"):
|
|
"""
|
|
Optimized version of array filling from TimeScaleDB data.
|
|
Uses vectorized operations for significant speed improvement.
|
|
"""
|
|
# Convert start time to timezone-aware datetime
|
|
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
|
|
|
|
# Create device index mapping
|
|
device_to_index = {device_id: idx for idx, device_id in enumerate(devices_list)}
|
|
|
|
# Define column mappings (sensor type to position in record) - KEEP EXACT SAME ORDER as original
|
|
columns = {
|
|
'avg_temperature': 2,
|
|
'avg_humidity': 3,
|
|
'pressure_amplitude': 4,
|
|
'max_light': 5,
|
|
'radar': 6,
|
|
'sensor_min_s0': 7,
|
|
'sensor_min_s1': 8,
|
|
'sensor_min_s2': 9,
|
|
'sensor_min_s3': 10,
|
|
'sensor_min_s4': 11,
|
|
'sensor_min_s5': 12,
|
|
'sensor_min_s6': 13,
|
|
'sensor_min_s7': 14,
|
|
'sensor_min_s8': 15,
|
|
'sensor_min_s9': 16
|
|
}
|
|
|
|
# Pre-compute column keys list for consistent indexing
|
|
column_keys = list(columns.keys())
|
|
|
|
# Pre-process data into a more efficient structure
|
|
# Group by device_id to reduce lookup operations
|
|
device_data = defaultdict(list)
|
|
for record in day_data:
|
|
if record[0] and record[1]: # If time and device_id exist
|
|
device_data[record[1]].append(record)
|
|
|
|
# Process each device's data in bulk
|
|
for device_id, records in device_data.items():
|
|
if device_id not in device_to_index:
|
|
continue
|
|
|
|
base_idx = device_to_index[device_id] * len(columns)
|
|
|
|
# Convert records to numpy array for faster processing
|
|
records_array = np.array(records, dtype=object)
|
|
|
|
# Calculate all minute deltas at once
|
|
times = records_array[:, 0]
|
|
minute_deltas = np.array([(t - start_time).total_seconds() / 60 for t in times], dtype=int)
|
|
|
|
# Filter valid minute deltas
|
|
valid_mask = (minute_deltas >= 0) & (minute_deltas < arr_source.shape[1])
|
|
if not np.any(valid_mask):
|
|
continue
|
|
|
|
minute_deltas = minute_deltas[valid_mask]
|
|
records_array = records_array[valid_mask]
|
|
|
|
# Process each column type in bulk
|
|
for col_name, col_offset in columns.items():
|
|
# Use pre-computed column_keys list for consistent indexing
|
|
row_idx = base_idx + column_keys.index(col_name)
|
|
values = records_array[:, col_offset]
|
|
|
|
# Filter out None values
|
|
valid_values = ~np.equal(values, None)
|
|
if not np.any(valid_values):
|
|
continue
|
|
|
|
# Update array in bulk
|
|
arr_source[row_idx, minute_deltas[valid_values]] = values[valid_values]
|
|
|
|
return arr_source
|
|
|
|
def fast_fill_array_from_timescale(day_data, time_from_str, devices_list, arr_source, timezone_str="Europe/Berlin"):
|
|
"""
|
|
Optimized version of array filling from TimeScaleDB data.
|
|
Uses vectorized operations for significant speed improvement.
|
|
"""
|
|
# Convert start time to timezone-aware datetime
|
|
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
|
|
|
|
# Create device index mapping
|
|
device_to_index = {device_id: idx for idx, device_id in enumerate(devices_list)}
|
|
|
|
# Define column mappings (sensor type to position in record) - KEEP EXACT SAME ORDER as original
|
|
columns = {
|
|
'avg_temperature': 2,
|
|
'avg_humidity': 3,
|
|
'pressure_amplitude': 4,
|
|
'max_light': 5,
|
|
'radar': 6,
|
|
'sensor_min_s0': 7,
|
|
'sensor_min_s1': 8,
|
|
'sensor_min_s2': 9,
|
|
'sensor_min_s3': 10,
|
|
'sensor_min_s4': 11,
|
|
'sensor_min_s5': 12,
|
|
'sensor_min_s6': 13,
|
|
'sensor_min_s7': 14,
|
|
'sensor_min_s8': 15,
|
|
'sensor_min_s9': 16
|
|
}
|
|
|
|
# Pre-compute column keys list for consistent indexing
|
|
column_keys = list(columns.keys())
|
|
|
|
# Pre-process data into a more efficient structure
|
|
# Group by device_id to reduce lookup operations
|
|
device_data = defaultdict(list)
|
|
for record in day_data:
|
|
if record[0] and record[1]: # If time and device_id exist
|
|
device_data[record[1]].append(record)
|
|
|
|
# Process each device's data in bulk
|
|
for device_id, records in device_data.items():
|
|
if device_id not in device_to_index:
|
|
continue
|
|
|
|
base_idx = device_to_index[device_id] * len(columns)
|
|
|
|
# Convert records to numpy array for faster processing
|
|
records_array = np.array(records, dtype=object)
|
|
|
|
# Calculate all minute deltas at once
|
|
times = records_array[:, 0]
|
|
minute_deltas = np.array([(t - start_time).total_seconds() / 60 for t in times], dtype=int)
|
|
|
|
# Filter valid minute deltas
|
|
valid_mask = (minute_deltas >= 0) & (minute_deltas < arr_source.shape[1])
|
|
if not np.any(valid_mask):
|
|
continue
|
|
|
|
minute_deltas = minute_deltas[valid_mask]
|
|
records_array = records_array[valid_mask]
|
|
|
|
# Process each column type in bulk
|
|
for col_name, col_offset in columns.items():
|
|
# Use pre-computed column_keys list for consistent indexing
|
|
row_idx = base_idx + column_keys.index(col_name)
|
|
values = records_array[:, col_offset]
|
|
|
|
# Filter out None values
|
|
valid_values = ~np.equal(values, None)
|
|
if not np.any(valid_values):
|
|
continue
|
|
|
|
# Update array in bulk
|
|
arr_source[row_idx, minute_deltas[valid_values]] = values[valid_values]
|
|
|
|
return arr_source
|
|
|
|
def fast_fill_array_from_timescale_single(day_data, time_from_str, devices_list, arr_source, sensor, timezone_str="Europe/Berlin"):
|
|
"""
|
|
Optimized version of array filling from TimeScaleDB data.
|
|
Uses vectorized operations for significant speed improvement.
|
|
"""
|
|
# Convert start time to timezone-aware datetime
|
|
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
|
|
|
|
# Create device index mapping
|
|
device_to_index = {device_id: idx for idx, device_id in enumerate(devices_list)}
|
|
|
|
# Pre-process data into a more efficient structure
|
|
# Group by device_id to reduce lookup operations
|
|
device_data = defaultdict(list)
|
|
for record in day_data:
|
|
if record[0] and record[1]: # If time and device_id exist
|
|
device_data[record[1]].append(record)
|
|
if sensor != None:
|
|
columns = {
|
|
'avg_temperature': 2,
|
|
'avg_humidity': 2,
|
|
'pressure_amplitude': 2,
|
|
'max_light': 2,
|
|
'radar': 2,
|
|
'sensor_min_s0': 2,
|
|
'sensor_min_s1': 2,
|
|
'sensor_min_s2': 2,
|
|
'sensor_min_s3': 2,
|
|
'sensor_min_s4': 2,
|
|
'sensor_min_s5': 2,
|
|
'sensor_min_s6': 2,
|
|
'sensor_min_s7': 2,
|
|
'sensor_min_s8': 2,
|
|
'sensor_min_s9': 2
|
|
}
|
|
else:
|
|
|
|
columns = {
|
|
'avg_temperature': 2,
|
|
'avg_humidity': 3,
|
|
'pressure_amplitude': 4,
|
|
'max_light': 5,
|
|
'radar': 6,
|
|
'sensor_min_s0': 7,
|
|
'sensor_min_s1': 8,
|
|
'sensor_min_s2': 9,
|
|
'sensor_min_s3': 10,
|
|
'sensor_min_s4': 11,
|
|
'sensor_min_s5': 12,
|
|
'sensor_min_s6': 13,
|
|
'sensor_min_s7': 14,
|
|
'sensor_min_s8': 15,
|
|
'sensor_min_s9': 16
|
|
}
|
|
|
|
column_keys = list(columns.keys())
|
|
|
|
# Process each device's data in bulk
|
|
for device_id, records in device_data.items():
|
|
if device_id not in device_to_index:
|
|
continue
|
|
|
|
base_idx = device_to_index[device_id] #* len(columns)
|
|
|
|
# Convert records to numpy array for faster processing
|
|
records_array = np.array(records, dtype=object)
|
|
|
|
# Calculate all minute deltas at once
|
|
times = records_array[:, 0]
|
|
minute_deltas = np.array([(t - start_time).total_seconds() / 60 for t in times], dtype=int)
|
|
|
|
# Filter valid minute deltas
|
|
valid_mask = (minute_deltas >= 0) & (minute_deltas < arr_source.shape[1])
|
|
if not np.any(valid_mask):
|
|
continue
|
|
|
|
minute_deltas = minute_deltas[valid_mask]
|
|
records_array = records_array[valid_mask]
|
|
|
|
# Process each column type in bulk
|
|
# Use pre-computed column_keys list for consistent indexing
|
|
|
|
|
|
#row_idx = base_idx + 2#column_keys.index(col_name)
|
|
#values = records_array[:, column_keys.index(col_name)]
|
|
|
|
## Filter out None values
|
|
#valid_values = ~np.equal(values, None)
|
|
#if not np.any(valid_values):
|
|
#continue
|
|
|
|
# Process each column type in bulk
|
|
for col_name, col_offset in columns.items():
|
|
row_idx = base_idx + list(columns.keys()).index(col_name)
|
|
values = records_array[:, col_offset]
|
|
|
|
# Filter out None values
|
|
valid_values = ~np.equal(values, None)
|
|
if not np.any(valid_values):
|
|
continue
|
|
|
|
# Update array in bulk
|
|
arr_source[row_idx, minute_deltas[valid_values]] = values[valid_values]
|
|
|
|
return arr_source
|
|
|
|
def CalcExtremes(arr_source, length, height):
|
|
"""
|
|
Calculate min and max values for each row within legal bounds.
|
|
Optimized version using numpy vectorized operations.
|
|
|
|
Parameters:
|
|
arr_source: numpy array of shape (height, length+4) containing data and bounds
|
|
length: number of data points to process (typically 1440 for minutes in a day)
|
|
height: number of rows in the array
|
|
|
|
Returns:
|
|
numpy array with min/max values stored in columns 1442 and 1443
|
|
"""
|
|
# Extract the data portion and bounds
|
|
data = arr_source[:, :length]
|
|
ignore_below = arr_source[:, 1440:1441] # Keep 2D shape for broadcasting
|
|
ignore_above = arr_source[:, 1441:1442] # Keep 2D shape for broadcasting
|
|
|
|
# Create masks for valid values
|
|
above_min_mask = data >= ignore_below
|
|
below_max_mask = data <= ignore_above
|
|
valid_mask = above_min_mask & below_max_mask
|
|
|
|
# Create a masked array to handle invalid values
|
|
masked_data = np.ma.array(data, mask=~valid_mask)
|
|
|
|
# Calculate min and max values for each row
|
|
row_mins = np.ma.min(masked_data, axis=1).filled(-0.001)
|
|
row_maxs = np.ma.max(masked_data, axis=1).filled(-0.001)
|
|
|
|
# Store results
|
|
arr_source[:, 1442] = row_mins
|
|
arr_source[:, 1443] = row_maxs
|
|
|
|
return arr_source
|
|
|
|
def plot(arr, filename="histogram.png", title="Histogram Plot", figsize=(12, 6),
|
|
color='blue', style='histogram', bins=1000):
|
|
"""
|
|
Plot a 1D numpy array as a line or scatter plot
|
|
|
|
Parameters:
|
|
arr : 1D numpy array
|
|
title : str, plot title
|
|
figsize : tuple, figure size in inches
|
|
color : str, line/point color
|
|
style : str, 'line' or 'scatter'
|
|
"""
|
|
title = filename
|
|
plt.figure(figsize=figsize)
|
|
x = np.arange(len(arr))
|
|
|
|
if style == 'line':
|
|
x = np.arange(len(arr))
|
|
plt.plot(x, arr, color=color)
|
|
elif style == 'scatter':
|
|
x = np.arange(len(arr))
|
|
plt.scatter(x, arr, color=color, alpha=0.6)
|
|
elif style == 'histogram':
|
|
plt.hist(arr.ravel(), bins=bins, range=(0, 100), color=color, alpha=0.8)
|
|
plt.yscale('log') # Using log scale for better visualization
|
|
plt.xlabel('Signal Value')
|
|
plt.ylabel('Frequency')
|
|
|
|
plt.title(title)
|
|
plt.xlabel('Index')
|
|
plt.ylabel('Value')
|
|
plt.grid(True, alpha=0.3)
|
|
plt.tight_layout()
|
|
plt.savefig(filename)
|
|
plt.close()
|
|
print(f"Plot saved to: {filename}")
|
|
#plt.show()
|
|
|
|
def ShowArray(arr, threshold, filename="histogram.png", title="Histogram Plot", figsize=(12, 6),
|
|
color='blue', style='histogram', bins=1000):
|
|
"""
|
|
Plot a 1D numpy array as a line or scatter plot
|
|
|
|
Parameters:
|
|
arr : 1D numpy array
|
|
title : str, plot title
|
|
figsize : tuple, figure size in inches
|
|
color : str, line/point color
|
|
style : str, 'line' or 'scatter'
|
|
"""
|
|
title = filename
|
|
plt.figure(figsize=figsize)
|
|
x = np.arange(len(arr))
|
|
|
|
if style == 'line':
|
|
x = np.arange(len(arr))
|
|
plt.plot(x, arr, color=color)
|
|
plt.axhline(y=threshold, color='red', linestyle='--',
|
|
label=f'Threshold: {threshold:.3f}')
|
|
plt.xlabel('Index')
|
|
plt.ylabel('Value')
|
|
elif style == 'scatter':
|
|
x = np.arange(len(arr))
|
|
plt.scatter(x, arr, color=color, alpha=0.6)
|
|
elif style == 'histogram':
|
|
plt.hist(arr.ravel(), bins=bins, range=(0, 100), color=color, alpha=0.8)
|
|
plt.yscale('log') # Using log scale for better visualization
|
|
plt.xlabel('Signal Value')
|
|
plt.ylabel('Frequency')
|
|
|
|
plt.title(title)
|
|
plt.xlabel('Index')
|
|
plt.ylabel('Value')
|
|
plt.grid(True, alpha=0.3)
|
|
plt.tight_layout()
|
|
plt.savefig(filename)
|
|
plt.close()
|
|
print(f"Plot saved to: {filename}")
|
|
#plt.show()
|
|
|
|
def AddLimits_optimized(arr_source, devices_c, sensors_c, percentile):
|
|
"""
|
|
Vectorized version of AddLimits that processes all sensors at once.
|
|
|
|
Parameters:
|
|
arr_source: array of shape (devices_c * sensors_c, 1444)
|
|
devices_c: number of devices
|
|
sensors_c: number of sensors per device
|
|
percentile: parameter for clean_data_vectorized
|
|
"""
|
|
total_sensors = devices_c * sensors_c
|
|
|
|
# Create arrays of sensor indices for all rows
|
|
sensor_indices = np.arange(total_sensors) % sensors_c
|
|
|
|
# Convert sensor_legal_values into arrays for vectorized access
|
|
sensor_types = np.array([s_table[i] for i in range(sensors_c)])
|
|
min_vals = np.array([sensor_legal_values[t][0] for t in sensor_types])
|
|
max_vals = np.array([sensor_legal_values[t][1] for t in sensor_types])
|
|
windows = np.array([sensor_legal_values[t][2] for t in sensor_types])
|
|
|
|
# Get values for each row based on sensor type
|
|
row_windows = windows[sensor_indices]
|
|
row_mins = min_vals[sensor_indices]
|
|
row_maxs = max_vals[sensor_indices]
|
|
|
|
# Process rows that need cleaning (window > 2)
|
|
clean_mask = row_windows > 2
|
|
if np.any(clean_mask):
|
|
# Clean each row with its corresponding window size
|
|
for window in np.unique(row_windows[clean_mask]):
|
|
# Get indices of rows that need this window size
|
|
rows_to_clean = np.where(clean_mask & (row_windows == window))[0]
|
|
|
|
# Clean each row individually (since clean_data_vectorized expects 1D input)
|
|
for row_idx in rows_to_clean:
|
|
arr_source[row_idx, :1440] = clean_data_vectorized(
|
|
arr_source[row_idx, :1440],
|
|
window,
|
|
percentile
|
|
)
|
|
|
|
# Set min/max values for all rows at once
|
|
arr_source[:, 1440] = row_mins
|
|
arr_source[:, 1441] = row_maxs
|
|
|
|
return arr_source
|
|
|
|
def AddSmellLimits_optimized(arr_source, devices_c, sensors_c, percentile):
|
|
"""
|
|
Vectorized version of AddLimits that processes all sensors at once.
|
|
Parameters:
|
|
arr_source: array of shape (devices_c * sensors_c, 1444)
|
|
devices_c: number of devices
|
|
sensors_c: number of sensors per device
|
|
percentile: parameter for clean_data_vectorized
|
|
"""
|
|
total_sensors = devices_c * sensors_c
|
|
# Create arrays of sensor indices for all rows
|
|
sensor_indices = np.arange(total_sensors) % sensors_c
|
|
# Convert sensor_legal_values into arrays for vectorized access
|
|
sensor_types = np.array([smells_table[i] for i in range(sensors_c)])
|
|
min_vals = np.array([smell_legal_values[t][0] for t in sensor_types])
|
|
max_vals = np.array([smell_legal_values[t][1] for t in sensor_types])
|
|
# Get values for each row based on sensor type
|
|
row_mins = min_vals[sensor_indices]
|
|
row_maxs = max_vals[sensor_indices]
|
|
|
|
# Replace values smaller than smell_min and larger than smell_max with no_smell
|
|
# Create a mask for the data points (first 1440 columns)
|
|
data_mask_below = arr_source[:, :1440] < smell_min
|
|
data_mask_above = arr_source[:, :1440] > smell_max
|
|
data_mask_invalid = data_mask_below | data_mask_above
|
|
|
|
# Replace invalid values with no_smell
|
|
arr_source[:, :1440][data_mask_invalid] = no_smell
|
|
|
|
# Set min/max values for all rows at once
|
|
arr_source[:, 1440] = row_mins
|
|
arr_source[:, 1441] = row_maxs
|
|
return arr_source
|
|
|
|
def AddLimits(arr_source, devices_c, sensors_c, percentile):
|
|
for y in range(devices_c*sensors_c):
|
|
sensor_index = y % sensors_c
|
|
min_ok, max_ok, window = sensor_legal_values[s_table[sensor_index]]
|
|
#if EnablePlot:
|
|
#if (y == 33):
|
|
#print("stop")
|
|
#plot(arr_source[y, :1440], "before_clean_sensor.png")
|
|
if window > 2:
|
|
arr_source[y, :1440] = clean_data_vectorized(arr_source[y, :1440], window, percentile)
|
|
|
|
#if EnablePlot:
|
|
#if (y == 33):
|
|
#print("stop")
|
|
#plot(arr_source[y, :1440], "after_clean_sensor.png")
|
|
|
|
arr_source[y][1440] = min_ok
|
|
arr_source[y][1441] = max_ok
|
|
return arr_source
|
|
|
|
def clean_data_vectorized(data, window, percentile):
|
|
"""
|
|
Vectorized version of clean_data function using pure numpy
|
|
First removes zeros, then cleans outliers
|
|
|
|
Parameters:
|
|
data: numpy array of sensor readings
|
|
window: int, size of rolling window
|
|
percentile: float, percentile threshold for deviation filtering
|
|
"""
|
|
# Create a copy to avoid modifying original data
|
|
working_data = data.copy()
|
|
|
|
# Replace zeros with NaN
|
|
zero_mask = working_data == 0
|
|
working_data[zero_mask] = np.nan
|
|
|
|
# Create rolling window view of the data
|
|
def rolling_window(a, window):
|
|
shape = a.shape[:-1] + (a.shape[-1] - window + 1, window)
|
|
strides = a.strides + (a.strides[-1],)
|
|
return np.lib.stride_tricks.as_strided(a, shape=shape, strides=strides)
|
|
|
|
# Pad array for edge handling
|
|
pad_width = window // 2
|
|
padded = np.pad(working_data, pad_width, mode='edge')
|
|
|
|
# Create rolling windows
|
|
windows = rolling_window(padded, window)
|
|
|
|
# Calculate rolling median (ignoring NaN values)
|
|
medians = np.nanmedian(windows, axis=1)
|
|
|
|
# Forward/backward fill any NaN in medians
|
|
# Forward fill
|
|
mask = np.isnan(medians)
|
|
idx = np.where(~mask, np.arange(mask.shape[0]), 0)
|
|
np.maximum.accumulate(idx, out=idx)
|
|
medians[mask] = medians[idx[mask]]
|
|
|
|
# Backward fill any remaining NaNs
|
|
mask = np.isnan(medians)
|
|
idx = np.where(~mask, np.arange(mask.shape[0]), mask.shape[0] - 1)
|
|
idx = np.minimum.accumulate(idx[::-1])[::-1]
|
|
medians[mask] = medians[idx[mask]]
|
|
|
|
# Calculate deviations (ignoring NaN values)
|
|
deviations = np.abs(working_data - medians)
|
|
|
|
# Calculate threshold (ignoring NaN values)
|
|
threshold = np.nanpercentile(deviations, percentile)
|
|
|
|
# Create mask and replace outliers with median values
|
|
# Points are good if they're not NaN and deviation is within threshold
|
|
good_points = (~np.isnan(working_data)) & (deviations <= threshold)
|
|
|
|
# Replace all bad points (including zeros and outliers) with median values
|
|
result = np.where(good_points, working_data, medians)
|
|
|
|
return result
|
|
|
|
|
|
def process_chunk(args):
|
|
"""
|
|
Process a chunk of rows
|
|
"""
|
|
chunk, sensors_c, sensor_legal_values, s_table, window, percentile = args
|
|
result = np.copy(chunk)
|
|
|
|
# Process all time series in the chunk at once
|
|
result[:, :1440] = np.array([
|
|
clean_data_vectorized(row[:1440], window, percentile)
|
|
for row in chunk
|
|
])
|
|
|
|
# Set limits for all rows in chunk using vectorized operations
|
|
sensor_indices = np.arange(len(chunk)) % sensors_c
|
|
min_values = np.array([sensor_legal_values[s_table[i]][0] for i in sensor_indices])
|
|
max_values = np.array([sensor_legal_values[s_table[i]][1] for i in sensor_indices])
|
|
|
|
result[:, 1440] = min_values
|
|
result[:, 1441] = max_values
|
|
|
|
return result
|
|
|
|
|
|
def FillImage_optimized(scaled_day, devices_c, sensors_c, arr_stretched, group_by, bw):
|
|
"""
|
|
Optimized version of FillImage function that fills the stretched array with colored sensor data.
|
|
|
|
Parameters:
|
|
scaled_day: 2D array of shape (stripes, minutes+4) containing sensor readings
|
|
devices_c: number of devices
|
|
sensors_c: number of sensors per device
|
|
arr_stretched: 3D array of shape (stripes*stretch_by, minutes, 3) to fill with RGB values
|
|
group_by: grouping strategy ("sensortype" or other)
|
|
bw: boolean flag for black and white output
|
|
|
|
Returns:
|
|
arr_stretched: Filled array with RGB values
|
|
and vocs_scaled aray from 0 to 1280
|
|
"""
|
|
stripes = devices_c * sensors_c
|
|
stretch_by = arr_stretched.shape[0] // stripes
|
|
minutes = arr_stretched.shape[1]
|
|
|
|
# Pre-calculate VOC rows mask
|
|
if group_by != "sensortype":
|
|
voc_rows = np.arange(stripes) >= 5 * devices_c
|
|
else:
|
|
voc_rows = (np.arange(stripes) % sensors_c) >= 5
|
|
|
|
# Pre-calculate destination row mapping for sensortype grouping
|
|
if group_by == "sensortype":
|
|
row_indices = np.arange(stripes)
|
|
sensor_indices = row_indices % sensors_c
|
|
device_indices = row_indices // sensors_c
|
|
dest_rows = sensor_indices * devices_c + device_indices
|
|
dest_rows = dest_rows[:, np.newaxis] * stretch_by + np.arange(stretch_by)
|
|
else:
|
|
row_indices = np.arange(stripes)[:, np.newaxis] * stretch_by + np.arange(stretch_by)
|
|
|
|
# Optimize color calculation functions
|
|
def best_color_vectorized(vals):
|
|
"""Vectorized version of BestColor that matches the original implementation exactly"""
|
|
vals = np.clip(vals, 0, 1279).astype(np.int32)
|
|
r = np.zeros_like(vals, dtype=np.int32)
|
|
g = np.zeros_like(vals, dtype=np.int32)
|
|
b = np.zeros_like(vals, dtype=np.int32)
|
|
|
|
# Region 0-255
|
|
mask1 = vals < 256
|
|
r[mask1] = 255
|
|
g[mask1] = vals[mask1]
|
|
|
|
# Region 256-511
|
|
mask2 = (vals >= 256) & (vals < 512)
|
|
r[mask2] = 511 - vals[mask2]
|
|
g[mask2] = 255
|
|
|
|
# Region 512-767
|
|
mask3 = (vals >= 512) & (vals < 768)
|
|
g[mask3] = 255
|
|
b[mask3] = vals[mask3] - 512
|
|
|
|
# Region 768-1023
|
|
mask4 = (vals >= 768) & (vals < 1024)
|
|
g[mask4] = 1023 - vals[mask4]
|
|
b[mask4] = 255
|
|
|
|
# Region 1024-1279
|
|
mask5 = vals >= 1024
|
|
r[mask5] = vals[mask5] - 1024
|
|
b[mask5] = 255
|
|
|
|
return r, g, b
|
|
|
|
def gray_color_vectorized(vals):
|
|
"""Vectorized version of GrayColor that matches the original implementation exactly"""
|
|
vals = np.clip(vals, 0, 255).astype(np.int32)
|
|
return vals, vals, vals
|
|
|
|
color_func = gray_color_vectorized if bw else best_color_vectorized
|
|
|
|
# Process all rows at once
|
|
valid_mask = scaled_day[:, :minutes] != -0.001
|
|
big_min = scaled_day[:, 1442:1443] # Keep 2D shape for broadcasting
|
|
big_max = scaled_day[:, 1443:1444]
|
|
|
|
# Calculate k factors where max > min
|
|
valid_range_mask = big_max > big_min
|
|
k = np.zeros_like(big_min)
|
|
k[valid_range_mask] = (1280 if not bw else 255) / (big_max[valid_range_mask] - big_min[valid_range_mask])
|
|
|
|
# Calculate normalized values for all rows at once
|
|
normalized_vals = np.zeros_like(scaled_day[:, :minutes])
|
|
valid_range_indices = np.where(valid_range_mask)[0]
|
|
|
|
normalized_vals[valid_range_indices] = (
|
|
k[valid_range_indices] *
|
|
(scaled_day[valid_range_indices, :minutes] - big_min[valid_range_indices])
|
|
)
|
|
|
|
# Invert VOC rows
|
|
normalized_vals[voc_rows] = (1279 if not bw else 255) - normalized_vals[voc_rows]
|
|
|
|
# Apply valid mask
|
|
normalized_vals[~valid_mask] = 0
|
|
|
|
# Convert to RGB
|
|
r, g, b = color_func(normalized_vals)
|
|
|
|
# Create RGB array
|
|
rgb_values = np.stack([r, g, b], axis=-1)
|
|
|
|
# Handle special case where max == min
|
|
equal_range_mask = ~valid_range_mask
|
|
if np.any(equal_range_mask):
|
|
rgb_values[equal_range_mask.ravel()] = 128
|
|
|
|
# Fill the stretched array efficiently
|
|
if group_by == "sensortype":
|
|
arr_stretched[dest_rows] = rgb_values[:, None]
|
|
else:
|
|
arr_stretched[row_indices] = rgb_values[:, None]
|
|
|
|
return arr_stretched, normalized_vals[voc_rows]
|
|
|
|
|
|
def FillSmellImage_optimized(scaled_day, arr_stretched, y_offset):
|
|
"""
|
|
Fill the stretched array with colored sensor data from scaled_day.
|
|
|
|
Parameters:
|
|
scaled_day: 2D array of shape (70, 1444) containing sensor readings
|
|
arr_stretched: 3D array of shape (2685, 1640, 3) to fill with RGB values
|
|
|
|
Returns:
|
|
arr_stretched: Filled array with RGB values
|
|
"""
|
|
stretch_by = 20
|
|
x_offset = 200
|
|
|
|
def best_color_vectorizedS(vals):
|
|
"""Vectorized version of BestColor that matches the original implementation exactly"""
|
|
vals = np.clip(vals, 0, 1279).astype(np.int32)
|
|
r = np.zeros_like(vals, dtype=np.int32)
|
|
g = np.zeros_like(vals, dtype=np.int32)
|
|
b = np.zeros_like(vals, dtype=np.int32)
|
|
|
|
# Region 0-255
|
|
mask1 = vals < 256
|
|
r[mask1] = 255
|
|
g[mask1] = vals[mask1]
|
|
|
|
# Region 256-511
|
|
mask2 = (vals >= 256) & (vals < 512)
|
|
r[mask2] = 511 - vals[mask2]
|
|
g[mask2] = 255
|
|
|
|
# Region 512-767
|
|
mask3 = (vals >= 512) & (vals < 768)
|
|
g[mask3] = 255
|
|
b[mask3] = vals[mask3] - 512
|
|
|
|
# Region 768-1023
|
|
mask4 = (vals >= 768) & (vals < 1024)
|
|
g[mask4] = 1023 - vals[mask4]
|
|
b[mask4] = 255
|
|
|
|
# Region 1024-1279
|
|
mask5 = vals >= 1024
|
|
r[mask5] = vals[mask5] - 1024
|
|
b[mask5] = 255
|
|
|
|
return r, g, b
|
|
|
|
# Process each row in scaled_day
|
|
for row_idx in range(scaled_day.shape[0]):
|
|
# Extract min and max for this row
|
|
row_min = scaled_day[row_idx, 1442]
|
|
row_max = scaled_day[row_idx, 1443]
|
|
|
|
# Get data for this row (first 1440 elements)
|
|
row_data = scaled_day[row_idx, :1440]
|
|
|
|
# Check if min and max are the same
|
|
if row_min == row_max:
|
|
# Create gray stripe
|
|
stripe = np.ones((stretch_by, 1440, 3), dtype=np.int32) * 128
|
|
else:
|
|
# Normalize the data between 0 and 1279
|
|
k = 1280 / (row_max - row_min)
|
|
normalized_vals = k * (row_data - row_min)
|
|
normalized_vals = np.clip(normalized_vals, 0, 1279)
|
|
|
|
# Convert to RGB
|
|
r, g, b = best_color_vectorizedS(normalized_vals)
|
|
|
|
# Create RGB stripe
|
|
stripe = np.zeros((stretch_by, 1440, 3), dtype=np.int32)
|
|
|
|
# Fill stripe with the same color pattern for all stretch_by rows
|
|
for i in range(stretch_by):
|
|
stripe[i, :, 0] = r
|
|
stripe[i, :, 1] = g
|
|
stripe[i, :, 2] = b
|
|
|
|
# Calculate the y position for this stripe
|
|
y_pos = y_offset + row_idx * stretch_by
|
|
|
|
# Place the stripe into arr_stretched
|
|
print(stretch_by, stripe.shape, arr_stretched.shape, y_pos)
|
|
arr_stretched[y_pos:y_pos+stretch_by, x_offset:x_offset+1440, :] = stripe
|
|
|
|
return arr_stretched
|
|
|
|
def FillImage(scaled_day, devices_c, sensors_c, arr_stretched, group_by, bw):
|
|
"""
|
|
Fill the stretched array with colored sensor data.
|
|
|
|
Parameters:
|
|
scaled_day: 2D array of shape (stripes, minutes+4) containing sensor readings
|
|
devices_c: number of devices
|
|
sensors_c: number of sensors per device
|
|
arr_stretched: 3D array of shape (stripes*stretch_by, minutes, 3) to fill with RGB values
|
|
|
|
Returns:
|
|
arr_stretched: Filled array with RGB values
|
|
"""
|
|
stripes = devices_c * sensors_c
|
|
stretch_by = arr_stretched.shape[0] // stripes
|
|
minutes = arr_stretched.shape[1]
|
|
|
|
# Create a boolean mask for VOC sensors
|
|
if group_by != "sensortype":
|
|
voc_rows = np.array([i for i in range(stripes) if int(i/devices_c) >= 5])
|
|
else:
|
|
voc_rows = np.array([i for i in range(stripes) if int(i % sensors_c) >= 5])
|
|
# Vectorize the BestColor function
|
|
if not bw:
|
|
vectorized_best_color = np.vectorize(BestColor)
|
|
else:
|
|
vectorized_best_color = np.vectorize(GrayColor)
|
|
|
|
# Process each row
|
|
for row in range(stripes):
|
|
|
|
row_data = scaled_day[row, :minutes] # Get minute data
|
|
|
|
#if row == 33:
|
|
# print("stop")
|
|
# plot(row_data, "row_data.png")
|
|
big_min = scaled_day[row, 1442] # min value
|
|
big_max = scaled_day[row, 1443] # max value
|
|
|
|
# Create mask for valid values
|
|
valid_mask = row_data != -0.001
|
|
|
|
# Initialize RGB row with zeros
|
|
rgb_row = np.zeros((minutes, 3), dtype=np.uint8)
|
|
|
|
if big_max > big_min:
|
|
# Scale factor
|
|
if not bw:
|
|
k = 1280/(big_max-big_min)
|
|
else:
|
|
k = 255/(big_max-big_min)
|
|
# Calculate normalized values
|
|
normalized_vals = k * (row_data - big_min)
|
|
|
|
# Invert if it's a VOC row
|
|
if row in voc_rows:
|
|
if not bw:
|
|
normalized_vals = 1279 - normalized_vals
|
|
else:
|
|
normalized_vals = 255 - normalized_vals
|
|
|
|
# Apply valid mask
|
|
normalized_vals = np.where(valid_mask, normalized_vals, 0)
|
|
#if row == 33:
|
|
# plot(normalized_vals, "normalized_vals.png")
|
|
|
|
# Convert to RGB colors (vectorized)
|
|
r, g, b = vectorized_best_color(normalized_vals)
|
|
|
|
# Combine into RGB array
|
|
rgb_row[valid_mask] = np.stack([r[valid_mask],
|
|
g[valid_mask],
|
|
b[valid_mask]], axis=1)
|
|
else:
|
|
# Set to gray where valid
|
|
rgb_row[valid_mask] = 128
|
|
|
|
|
|
if group_by == "sensortype":
|
|
|
|
# Fill the stretched rows
|
|
sensor_index = row % sensors_c
|
|
device_index = int(row/sensors_c)
|
|
dest_row = sensor_index * devices_c + device_index #0-0, 1-
|
|
start_idx = dest_row * stretch_by
|
|
end_idx = start_idx + stretch_by
|
|
arr_stretched[start_idx:end_idx] = rgb_row
|
|
else:
|
|
# Fill the stretched rows
|
|
start_idx = row * stretch_by
|
|
end_idx = start_idx + stretch_by
|
|
arr_stretched[start_idx:end_idx] = rgb_row
|
|
|
|
return arr_stretched
|
|
|
|
def FillRadarImage(scaled_day, devices_c, bands, arr_stretched, group_by, map_type):
|
|
"""
|
|
Fill the stretched array with colored sensor data.
|
|
|
|
Parameters:
|
|
scaled_day: 2D array of shape (stripes, minutes+4) containing sensor readings
|
|
devices_c: number of devices
|
|
bands: number of bands per device
|
|
arr_stretched: 3D array of shape (stripes*stretch_by, minutes, 3) to fill with RGB values
|
|
|
|
Returns:
|
|
arr_stretched: Filled array with RGB values
|
|
"""
|
|
stripes = devices_c * bands
|
|
stretch_by = arr_stretched.shape[0] // stripes
|
|
minutes = arr_stretched.shape[1]
|
|
|
|
# Create a boolean mask for VOC sensors
|
|
if group_by != "sensortype":
|
|
voc_rows = np.array([i for i in range(stripes) if int(i/devices_c) >= 5])
|
|
else:
|
|
voc_rows = np.array([i for i in range(stripes) if int(i % bands) >= 5])
|
|
# Vectorize the BestColor function
|
|
if map_type == 3:
|
|
vectorized_best_color = np.vectorize(BestColor)
|
|
else:
|
|
vectorized_best_color = np.vectorize(GrayColor)
|
|
|
|
# Process each row
|
|
for row in range(stripes):
|
|
|
|
row_data = scaled_day[row, :minutes] # Get minute data
|
|
|
|
#if row == 33:
|
|
# print("stop")
|
|
# plot(row_data, "row_data.png")
|
|
big_min = 0 #scaled_day[row, 1442] # min value
|
|
big_max = 255 #scaled_day[row, 1443] # max value
|
|
|
|
# Create mask for valid values
|
|
valid_mask = row_data != -0.001
|
|
|
|
# Initialize RGB row with zeros
|
|
rgb_row = np.zeros((minutes, 3), dtype=np.uint8)
|
|
|
|
if big_max > big_min:
|
|
# Scale factor
|
|
if map_type == 3:
|
|
k = 1280/(big_max-big_min)
|
|
else:
|
|
k = 255/(big_max-big_min)
|
|
# Calculate normalized values
|
|
normalized_vals = k * (row_data - big_min)
|
|
|
|
# Invert if it's a VOC row
|
|
if row in voc_rows:
|
|
if map_type == 3:
|
|
normalized_vals = 1279 - normalized_vals
|
|
else:
|
|
normalized_vals = 255 - normalized_vals
|
|
|
|
# Apply valid mask
|
|
normalized_vals = np.where(valid_mask, normalized_vals, 0)
|
|
#if row == 33:
|
|
# plot(normalized_vals, "normalized_vals.png")
|
|
|
|
# Convert to RGB colors (vectorized)
|
|
r, g, b = vectorized_best_color(normalized_vals)
|
|
|
|
# Combine into RGB array
|
|
rgb_row[valid_mask] = np.stack([r[valid_mask],
|
|
g[valid_mask],
|
|
b[valid_mask]], axis=1)
|
|
else:
|
|
# Set to gray where valid
|
|
rgb_row[valid_mask] = 128
|
|
|
|
|
|
if group_by == "sensortype":
|
|
|
|
# Fill the stretched rows
|
|
band_index = row % bands
|
|
device_index = int(row/bands)
|
|
dest_row = band_index * devices_c + device_index #0-0, 1-
|
|
start_idx = dest_row * stretch_by
|
|
end_idx = start_idx + stretch_by
|
|
arr_stretched[start_idx:end_idx] = rgb_row
|
|
else:
|
|
# Fill the stretched rows
|
|
start_idx = row * stretch_by
|
|
end_idx = start_idx + stretch_by
|
|
arr_stretched[start_idx:end_idx] = rgb_row
|
|
|
|
return arr_stretched
|
|
|
|
def GetFullLocMapDetails(map_file):
|
|
#'/Volumes/XTRM-Q/wellnuo/daily_maps/1/1_2023-11-07_dayly_image.png'
|
|
local_timezone = pytz.timezone('America/Los_Angeles') # Replace with your local timezone
|
|
dest_path = os.path.dirname(map_file)
|
|
|
|
parts = map_file.split("/")
|
|
deployment = parts[-2]
|
|
parts1 = parts[-1].split("_")
|
|
date_string = parts1[1]
|
|
|
|
deployments = GetDeploymentB(deployment, -1) #All
|
|
last_locations_file = ""
|
|
last_per_minute_file = ""
|
|
|
|
today = datetime.today()
|
|
deployment_details = deployments[0]
|
|
deployment_pair = deployment_details[0]
|
|
proximity_lst = deployment_details[1]
|
|
|
|
date_object = datetime.strptime(date_string, "%Y-%m-%d")
|
|
date_object_midnight = local_timezone.localize(date_object.replace(hour=0, minute=0, second=0, microsecond=0))
|
|
selected_epoch = int(date_object_midnight.timestamp())
|
|
|
|
sel_date = datetime.fromtimestamp(selected_epoch)
|
|
devices_list_str = GetDevicesList(deployment_details, sel_date)#.split(',')
|
|
devices_list = ast.literal_eval(devices_list_str)
|
|
return devices_list, selected_epoch, dest_path
|
|
|
|
def median_filter(data, window_size):
|
|
filtered_data = []
|
|
print(len(data))
|
|
window = deque(maxlen=window_size)
|
|
last_value = -1
|
|
offset = 0
|
|
added_old = 0
|
|
for value in data:
|
|
if value != '':
|
|
added_old = 0
|
|
last_value = value
|
|
window.append(value)
|
|
if len(window) == window_size:
|
|
# Sort the window and get the median value
|
|
sorted_window = sorted(window)
|
|
median = sorted_window[window_size // 2]
|
|
filtered_data.append(median)
|
|
else:
|
|
if last_value != -1:
|
|
if added_old < window_size:
|
|
added_old = added_old + 1
|
|
window.append(last_value)
|
|
else:
|
|
window.append(-1)
|
|
|
|
if len(window) == window_size:
|
|
# Sort the window and get the median value
|
|
sorted_window = sorted(window)
|
|
median = sorted_window[window_size // 2]
|
|
filtered_data.append(median)
|
|
|
|
else:
|
|
offset +=1
|
|
|
|
if len(filtered_data) > 0:
|
|
offset += (window_size // 2)
|
|
#if starts empty, just leav it such, do not fake backwards from midnight
|
|
first_val = -1# filtered_data[0]
|
|
last_val = filtered_data[-1]
|
|
front_padding = [first_val] * offset
|
|
remaining = len(data) - len(filtered_data) - len(front_padding)
|
|
back_padding = [last_val] * remaining
|
|
out_data = front_padding + filtered_data + back_padding
|
|
else:
|
|
out_data = data
|
|
#add front and back padding
|
|
|
|
return out_data
|
|
|
|
def FilterGlitches(wave_in, filter_minutes):
|
|
|
|
if(filter_minutes > 0):
|
|
notfiltered_wave = [i[0] for i in wave_in]
|
|
filtered_wave = median_filter(notfiltered_wave, filter_minutes)
|
|
for i, value in enumerate(filtered_wave):
|
|
wave_in[i][0] = value
|
|
|
|
return wave_in
|
|
|
|
def setup_timezone_converter(time_zone_st):
|
|
"""
|
|
Setup timezone converter to be reused
|
|
|
|
Parameters:
|
|
time_zone_st (str): Timezone string (e.g. 'Europe/Berlin')
|
|
|
|
Returns:
|
|
pytz.timezone: Timezone object for conversion
|
|
"""
|
|
return pytz.timezone(time_zone_st)
|
|
|
|
def ReadDailyRadar(MAC, current_date):
|
|
|
|
#This will return all 1 Minute radar data for each gate in the file
|
|
#Will return list (2 items) of lists: Maxes, Mins
|
|
|
|
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
|
|
|
|
start_of_day = ToLocal(calendar.timegm(datetime(current_date.year, current_date.month,current_date.day, 0, 0).timetuple()))
|
|
end_of_day = start_of_day + 1440 * 60
|
|
file = os.path.join(scriptDir, "DB/"+MAC.upper() +"_"+str(current_date.year)+"_"+str(current_date.month).rjust(2, '0')+".db")
|
|
file = file.replace("\\","/")
|
|
file1 = os.path.join(scriptDir, "DB/processed_db/"+MAC.upper() +"_"+str(current_date.year)+"_"+str(current_date.month).rjust(2, '0')+".db")
|
|
file1 = file1.replace("\\","/")
|
|
if (not path.exists(file) and not path.exists(file1)):
|
|
print(file + " and " + file1 + " are not found")
|
|
return []
|
|
result = []
|
|
min_OK = "0"
|
|
sqlr = "SELECT * FROM radars WHERE time >= "+str(start_of_day) +" and time < "+str(end_of_day) +" ORDER BY time ASC"
|
|
#sqlr = "SELECT Date, high, low from "+sensor.lower()+"s1Min"+" WHERE low >= "+min_OK+" and Date >= "+str(start_of_day) +" and Date < "+str(end_of_day)
|
|
print(sqlr)
|
|
if os.path.exists(file):
|
|
result = QuerrySql(file, sqlr)
|
|
elif os.path.exists(file1):
|
|
result = QuerrySql(file1, sqlr)
|
|
|
|
# M0 ............M8 S2 ........S8
|
|
#day_minutes_data = [[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]] * (24 * 60 + 2)
|
|
day_minutes_data = [[0] * 16 for _ in range(24 * 60)]
|
|
#for each gate lets find maximum value per minute
|
|
for mgate in range(9):
|
|
max_per_min = 0
|
|
for minute_data in result:
|
|
seconde = minute_data[0]
|
|
date_time_minute = datetime.fromtimestamp(seconde)
|
|
minute_m = 60*date_time_minute.hour+date_time_minute.minute
|
|
if minute_data[mgate + 6] > day_minutes_data[minute_m][mgate]:
|
|
day_minutes_data[minute_m][mgate] = minute_data[mgate + 6]
|
|
|
|
for sgate in range(7):
|
|
for minute_data in result:
|
|
seconde = minute_data[0]
|
|
date_time_minute = datetime.fromtimestamp(seconde)
|
|
minute_m = 60*date_time_minute.hour+date_time_minute.minute
|
|
if minute_data[sgate + 17] > day_minutes_data[minute_m][sgate+9]:
|
|
day_minutes_data[minute_m][sgate+9] = minute_data[sgate + 17]
|
|
|
|
return day_minutes_data
|
|
|
|
|
|
def FromLocalMidnight(epoch_time, local_delta):
|
|
|
|
# Convert epoch time to UTC datetime object
|
|
print(type(epoch_time))
|
|
print(epoch_time)
|
|
local_datetime = datetime.datetime.utcfromtimestamp(epoch_time+local_delta).replace(tzinfo=pytz.UTC)
|
|
|
|
# Calculate minute count from midnight
|
|
minutes_from_midnight = (local_datetime - local_datetime.replace(hour=0, minute=0, second=0, microsecond=0)).total_seconds() / 60
|
|
return minutes_from_midnight
|
|
|
|
def process_wave_data_numpy(image_file, my_data, time_zone_s, device_id_2_threshold, radar_fields_of_interest):
|
|
"""
|
|
NumPy-based version of wave processing
|
|
|
|
Parameters:
|
|
my_data: List of tuples containing (time_val, device_id, other radar_fields_of_interest)
|
|
time_zone_s: Target timezone string
|
|
device_id_2_threshold: Dictionary mapping device_ids to their thresholds
|
|
|
|
Returns:
|
|
List of [device_id, max_val] pairs for each minute
|
|
"""
|
|
wave_m = None
|
|
tz = pytz.timezone(time_zone_s)
|
|
if not my_data:
|
|
return [["", -1] for _ in range(1440)]
|
|
|
|
vectorized_BestColor = np.vectorize(BestColor)
|
|
stripes = len(device_id_2_threshold)
|
|
stretch_by = 5
|
|
minutes = 1440
|
|
arr_source = np.zeros((int(stripes), minutes), dtype=np.float32)
|
|
arr_stretched = np.zeros((int(stripes*stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
|
|
|
|
|
|
index_map = {word: idx for idx, word in enumerate(radar_fields_of_interest)}
|
|
devices_map = {word: idx for idx, word in enumerate(device_id_2_threshold)}
|
|
times = []
|
|
start_time = 0
|
|
for data_set in my_data:
|
|
time_stamp = data_set[0]
|
|
if start_time == 0:
|
|
|
|
# Convert timestamp to a datetime object in UTC
|
|
local_tz = pytz.timezone(time_zone_s)
|
|
local_time = time_stamp.astimezone(local_tz)
|
|
# Set the time to the start of the day in the local time zone
|
|
start_of_day_local = local_time.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
# Convert the start of the day back to UTC
|
|
start_time = start_of_day_local.astimezone(pytz.utc)
|
|
|
|
diff = time_stamp - start_time
|
|
minute = int(diff.total_seconds() / 60)
|
|
device_id = data_set[1]
|
|
field_name = device_id_2_threshold[device_id][0]
|
|
field_index = index_map[field_name]
|
|
threshold = device_id_2_threshold[device_id][1]
|
|
value = data_set[2+field_index]
|
|
if value > threshold:
|
|
arr_source[devices_map[device_id]][minute] = value
|
|
#np.savetxt('output.csv', arr_source, delimiter=',')
|
|
if False:
|
|
for yy in range(stripes):
|
|
rgb_row = vectorized_BestColor(1280*arr_source[yy]/100)
|
|
rgb_reshaped = np.array(rgb_row).reshape(3, minutes).T
|
|
for stretch_index in range(stretch_by):
|
|
y = yy * stretch_by + stretch_index
|
|
arr_stretched[y, :] = rgb_reshaped
|
|
|
|
SaveImageInBlob(image_file, arr_stretched, [])
|
|
|
|
|
|
max_values = np.max(arr_source, axis=0)
|
|
|
|
# Get indices (0-based)
|
|
wave_m = np.argmax(arr_source, axis=0)
|
|
|
|
# Add 1 to convert to 1-based indexing
|
|
wave_m = wave_m + 1
|
|
|
|
# Set to 0 where the column was all zeros
|
|
wave_m[max_values == 0] = 0
|
|
|
|
return wave_m
|
|
|
|
|
|
|
|
def ReadDailyCollapsedFastRadar(MAC, time_from_str, time_to_str):
|
|
|
|
#This will return all 1 Minute radar data for each gate in the file
|
|
#Will return list (2 items) of lists: Maxes, Mins based on s28 (stationary[2] to [8])
|
|
radar_part = "(s2+s3+s4+s5+s6+s7+s8)/7"
|
|
result = []
|
|
min_OK = "0"
|
|
sqlr = "SELECT radar_max FROM devices WHERE MAC = '"+MAC +"'"
|
|
print(sqlr)
|
|
DB_to_be_found_in_full = os.path.join(scriptDir, "main.db")
|
|
DB_to_be_found_in_full = DB_to_be_found_in_full.replace("\\","/")
|
|
result = QuerrySql(DB_to_be_found_in_full, sqlr)
|
|
sqlr = "SELECT date, low FROM radars1Min WHERE date >= "+str(start_of_day) +" and date < "+str(end_of_day) + " ORDER BY date"
|
|
if len(result)>0:
|
|
if result[0][0] == 1:
|
|
sqlr = "SELECT date, high FROM radars1Min WHERE date >= "+str(start_of_day) +" and date < "+str(end_of_day) + " ORDER BY date"
|
|
|
|
print(sqlr)
|
|
if os.path.exists(file):
|
|
result = QuerrySql(file, sqlr)
|
|
elif os.path.exists(file1):
|
|
result = QuerrySql(file1, sqlr)
|
|
|
|
return result
|
|
|
|
def vectorized_best_color_numpy(values):
|
|
"""Vectorized version of BestColor using pure NumPy"""
|
|
# Ensure values are within range
|
|
values = np.clip(values, 0, 1279)
|
|
|
|
# Initialize output arrays
|
|
r = np.zeros_like(values, dtype=np.uint8)
|
|
g = np.zeros_like(values, dtype=np.uint8)
|
|
b = np.zeros_like(values, dtype=np.uint8)
|
|
|
|
# Create masks for each range
|
|
mask_0_255 = values < 256
|
|
mask_256_511 = (values >= 256) & (values < 512)
|
|
mask_512_767 = (values >= 512) & (values < 768)
|
|
mask_768_1023 = (values >= 768) & (values < 1024)
|
|
mask_1024_plus = values >= 1024
|
|
|
|
# Set values for each range using masks
|
|
r[mask_0_255] = 255
|
|
g[mask_0_255] = values[mask_0_255]
|
|
|
|
r[mask_256_511] = 511 - values[mask_256_511]
|
|
g[mask_256_511] = 255
|
|
|
|
g[mask_512_767] = 255
|
|
b[mask_512_767] = values[mask_512_767] - 512
|
|
|
|
g[mask_768_1023] = 1023 - values[mask_768_1023]
|
|
b[mask_768_1023] = 255
|
|
|
|
r[mask_1024_plus] = values[mask_1024_plus] - 1024
|
|
b[mask_1024_plus] = 255
|
|
|
|
return np.stack([r, g, b], axis=-1)
|
|
|
|
def create_optimized_heatmap(my_data, bw, fields, wave_m, device_to_index, base_minute, timezone_st, min_val, max_val):
|
|
|
|
if len(my_data) < 1:
|
|
return []
|
|
|
|
|
|
local_tz = pytz.timezone(timezone_st)
|
|
|
|
n_fields = len(fields)
|
|
# Convert my_data to numpy array for faster processing
|
|
data_array = np.array(my_data)
|
|
|
|
# Get unique device IDs and create mapping
|
|
|
|
# Convert device IDs to indices using vectorized operation
|
|
device_indices = np.vectorize(device_to_index.get)(data_array[:, 1])
|
|
|
|
# Calculate x coordinates (minutes from base)
|
|
#minute is in local time zone, and base_minute is UTC
|
|
base_minute_local = base_minute #.astimezone(local_tz)
|
|
#x_coords = np.array([(minute.replace(tzinfo=datetime.timezone.utc) - base_minute_local).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
|
|
x_coords = np.array([(minute - base_minute_local).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
|
|
|
|
# Extract values and convert to float
|
|
values = data_array[:, 2:].astype(np.float32)
|
|
|
|
if bw:
|
|
# Process in batches to avoid memory issues
|
|
batch_size = 1000
|
|
for start_idx in range(0, len(data_array), batch_size):
|
|
end_idx = min(start_idx + batch_size, len(data_array))
|
|
batch_slice = slice(start_idx, end_idx)
|
|
|
|
|
|
# Calculate gray values
|
|
gray_values = (values[batch_slice, :] - min_val / (max_val - min_val)) * 255.0
|
|
|
|
# Clip values to valid range
|
|
gray_values = np.clip(gray_values, 0, 255).astype(np.uint8)
|
|
|
|
# Create y coordinates for each record
|
|
y_coords = (device_indices[batch_slice] * n_fields).reshape(-1, 1) + np.arange(n_fields)
|
|
|
|
# Assign values to the image array
|
|
for i in range(end_idx - start_idx):
|
|
wave_m[y_coords[i], x_coords[batch_slice][i]] = gray_values[i, :, np.newaxis]
|
|
|
|
else: # Color mode
|
|
# Process in batches
|
|
batch_size = 1000
|
|
for start_idx in range(0, len(data_array), batch_size):
|
|
end_idx = min(start_idx + batch_size, len(data_array))
|
|
batch_slice = slice(start_idx, end_idx)
|
|
|
|
# Calculate color values
|
|
color_values = np.zeros_like(values[batch_slice])
|
|
|
|
color_values[:, :] = ((values[batch_slice, :] - min_val) / (max_val - min_val)) * 1279.0
|
|
#color_values[:, :] = (values[batch_slice, :] / 100.0) * 1279.0 # other fields
|
|
|
|
# Create y coordinates for each record
|
|
y_coords = (device_indices[batch_slice] * n_fields).reshape(-1, 1) + np.arange(n_fields)
|
|
|
|
# Convert to RGB colors
|
|
for i in range(end_idx - start_idx):
|
|
rgb_values = vectorized_best_color_numpy(color_values[i])
|
|
wave_m[y_coords[i], x_coords[batch_slice][i]] = rgb_values
|
|
|
|
return wave_m
|
|
|
|
def create_light_optimized_heatmap(my_data, bw, fields, wave_m, device_to_index, base_minute, timezone_st, min_val=0, max_val=4095):
|
|
"""
|
|
Create an optimized heatmap for light data (range 0-4095)
|
|
|
|
Parameters:
|
|
my_data (list): Data from the database query
|
|
bw (bool): Whether to create a black and white (True) or color (False) heatmap
|
|
fields (list): List of field names
|
|
wave_m (numpy.ndarray): The image array to fill
|
|
device_to_index (dict): Mapping from device_id to index
|
|
base_minute (datetime): The base minute for time calculations
|
|
timezone_st (str): Timezone string
|
|
min_val (float): Minimum value for normalization (default: 0)
|
|
max_val (float): Maximum value for normalization (default: 4095)
|
|
|
|
Returns:
|
|
numpy.ndarray: The filled image array
|
|
"""
|
|
if len(my_data) < 1:
|
|
return wave_m
|
|
|
|
import numpy as np
|
|
import pytz
|
|
|
|
# Get the local timezone
|
|
local_tz = pytz.timezone(timezone_st)
|
|
|
|
# Number of fields (should be 1 for light data)
|
|
n_fields = len(fields)
|
|
|
|
# Convert my_data to numpy array for faster processing
|
|
data_array = np.array(my_data)
|
|
|
|
# Convert device IDs to indices using vectorized operation
|
|
device_indices = np.vectorize(device_to_index.get)(data_array[:, 1])
|
|
|
|
# Calculate x coordinates (minutes from base)
|
|
x_coords = np.array([(minute - base_minute).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
|
|
|
|
# Extract values and convert to float - light data is in column 2
|
|
# Reshape to match expected format (n_samples, n_fields)
|
|
values = data_array[:, 2].astype(np.float32).reshape(-1, 1)
|
|
|
|
# Process in batches to avoid memory issues
|
|
batch_size = 1000
|
|
|
|
if bw:
|
|
for start_idx in range(0, len(data_array), batch_size):
|
|
end_idx = min(start_idx + batch_size, len(data_array))
|
|
batch_slice = slice(start_idx, end_idx)
|
|
|
|
# Normalize light values (0-4095) to grayscale (0-255)
|
|
gray_values = ((values[batch_slice] - min_val) / (max_val - min_val) * 255.0)
|
|
|
|
# Clip values to valid range
|
|
gray_values = np.clip(gray_values, 0, 255).astype(np.uint8)
|
|
|
|
# Create y coordinates for each record
|
|
y_coords = (device_indices[batch_slice] * n_fields).reshape(-1, 1)
|
|
|
|
# Assign values to the image array
|
|
for i in range(end_idx - start_idx):
|
|
# Create RGB grayscale (same value for R, G, B)
|
|
gray_rgb = np.full(3, gray_values[i, 0], dtype=np.uint8)
|
|
wave_m[y_coords[i, 0], x_coords[batch_slice][i]] = gray_rgb
|
|
else:
|
|
# Color mode
|
|
for start_idx in range(0, len(data_array), batch_size):
|
|
end_idx = min(start_idx + batch_size, len(data_array))
|
|
batch_slice = slice(start_idx, end_idx)
|
|
|
|
# Normalize light values (0-4095) to color range (0-1279)
|
|
color_values = ((values[batch_slice] - min_val) / (max_val - min_val) * 1279.0)
|
|
|
|
# Create y coordinates for each record
|
|
y_coords = (device_indices[batch_slice] * n_fields).reshape(-1, 1)
|
|
|
|
# For each value, calculate its RGB color and assign to the image
|
|
for i in range(end_idx - start_idx):
|
|
# Convert normalized value to RGB using vectorized_best_color_numpy
|
|
rgb_value = vectorized_best_color_numpy(np.array([color_values[i, 0]]))[0]
|
|
wave_m[y_coords[i, 0], x_coords[batch_slice][i]] = rgb_value
|
|
|
|
return wave_m
|
|
|
|
def create_temperature_optimized_heatmap(my_data, bw, fields, wave_m, device_to_index, base_minute, timezone_st, min_val=0, max_val=4095):
|
|
"""
|
|
Create an optimized heatmap for temperature data with alarm levels
|
|
|
|
Parameters:
|
|
my_data (list): Data from the database query with columns for minute, device_id, temperature_avg, alarm_level
|
|
bw (bool): Whether to create a black and white (True) or color (False) heatmap
|
|
fields (list): List of field names - should be ['temperature', 'temperature_state']
|
|
wave_m (numpy.ndarray): The image array to fill
|
|
device_to_index (dict): Mapping from device_id to index
|
|
base_minute (datetime): The base minute for time calculations
|
|
timezone_st (str): Timezone string
|
|
min_val (float): Minimum value for temperature normalization
|
|
max_val (float): Maximum value for temperature normalization
|
|
|
|
Returns:
|
|
numpy.ndarray: The filled image array
|
|
"""
|
|
if len(my_data) < 1:
|
|
return wave_m
|
|
|
|
import numpy as np
|
|
import pytz
|
|
|
|
# Get the local timezone
|
|
local_tz = pytz.timezone(timezone_st)
|
|
|
|
# Number of fields (should be 2 for temperature data: temperature and alarm state)
|
|
n_fields = len(fields)
|
|
|
|
# Convert my_data to numpy array for faster processing
|
|
data_array = np.array(my_data)
|
|
|
|
# Convert device IDs to indices using vectorized operation
|
|
device_indices = np.vectorize(device_to_index.get)(data_array[:, 1])
|
|
|
|
# Calculate x coordinates (minutes from base)
|
|
x_coords = np.array([(minute - base_minute).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
|
|
|
|
# Process in batches to avoid memory issues
|
|
batch_size = 1000
|
|
|
|
for start_idx in range(0, len(data_array), batch_size):
|
|
end_idx = min(start_idx + batch_size, len(data_array))
|
|
batch_slice = slice(start_idx, end_idx)
|
|
|
|
for i in range(end_idx - start_idx):
|
|
# Get data for this record
|
|
temperature = data_array[batch_slice][i, 2]
|
|
alarm_level = 0
|
|
|
|
# If we have an alarm_level column (index 3), use it
|
|
if data_array.shape[1] > 3:
|
|
alarm_level = data_array[batch_slice][i, 3]
|
|
|
|
# Calculate base y-coordinate for this device
|
|
base_y = device_indices[batch_slice][i] * n_fields
|
|
|
|
# Temperature row (even row - index 0, 2, 4...)
|
|
# Normalize temperature to the color range and create color
|
|
if not bw:
|
|
# For color mode
|
|
normalized_temp = np.clip((temperature - min_val) / (max_val - min_val) * 1279.0, 0, 1279)
|
|
temp_rgb = vectorized_best_color_numpy(np.array([normalized_temp]))[0]
|
|
else:
|
|
# For B&W mode
|
|
normalized_temp = np.clip((temperature - min_val) / (max_val - min_val) * 255.0, 0, 255)
|
|
gray_value = int(normalized_temp)
|
|
temp_rgb = np.array([gray_value, gray_value, gray_value], dtype=np.uint8)
|
|
|
|
# Set the temperature color in the even row
|
|
wave_m[base_y, x_coords[batch_slice][i]] = temp_rgb
|
|
|
|
# Alarm level row (odd row - index 1, 3, 5...)
|
|
# Set color based on alarm level (0=green, 1=yellow, 2=red)
|
|
if alarm_level == 0:
|
|
# Green for normal
|
|
alarm_rgb = np.array([0, 255, 0], dtype=np.uint8)
|
|
elif alarm_level == 1:
|
|
# Yellow for warning
|
|
alarm_rgb = np.array([0, 255, 255], dtype=np.uint8)
|
|
else: # alarm_level == 2
|
|
# Red for critical
|
|
alarm_rgb = np.array([0, 0, 255], dtype=np.uint8)
|
|
|
|
# Set the alarm color in the odd row
|
|
wave_m[base_y + 1, x_coords[batch_slice][i]] = alarm_rgb
|
|
|
|
return wave_m
|
|
|
|
|
|
def create_humidity_optimized_heatmap(my_data, bw, fields, wave_m, device_to_index, base_minute, timezone_st, min_val=0, max_val=100):
|
|
"""
|
|
Create a heatmap with the exact blue-cyan-green-yellow-red-violet spectrum
|
|
matching Image 2, with green at position 40
|
|
"""
|
|
if len(my_data) < 1:
|
|
return wave_m
|
|
|
|
# Number of fields
|
|
n_fields = len(fields)
|
|
|
|
# Convert my_data to numpy array for faster processing
|
|
data_array = np.array(my_data)
|
|
|
|
# Convert device IDs to indices using vectorized operation
|
|
device_indices = np.vectorize(device_to_index.get)(data_array[:, 1])
|
|
|
|
# Calculate x coordinates (minutes from base)
|
|
x_coords = np.array([(minute - base_minute).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
|
|
|
|
# Process in batches to avoid memory issues
|
|
batch_size = 1000
|
|
|
|
# Define the color mapping function based on the exact spectrum we want
|
|
def get_color(t):
|
|
"""Get RGB color from humidity 0-100"""
|
|
# Define color stops - exact RGB values at each step
|
|
# Format: (position, (r, g, b))
|
|
#color_stops = [
|
|
#(0, (0, 0, 255)), # Blue
|
|
#(20, (0, 255, 255)), # Cyan
|
|
#(40, (0, 255, 0)), # Green (centered at 40)
|
|
#(60, (255, 255, 0)), # Yellow
|
|
#(80, (255, 0, 0)), # Red
|
|
#(100, (255, 0, 255)) # Violet
|
|
#]
|
|
|
|
color_stops = [
|
|
(0, (0, 0, 255)), # Blue
|
|
(16, (0, 255, 255)), # Cyan
|
|
(32, (0, 255, 0)), # Green (now centered at 32)
|
|
(60, (255, 255, 0)), # Yellow
|
|
(80, (255, 0, 0)), # Red
|
|
(100, (255, 0, 255)) # Violet
|
|
]
|
|
|
|
# Ensure t is within range
|
|
t = max(0, min(100, t))
|
|
|
|
# Find the two stops to interpolate between
|
|
for i in range(len(color_stops) - 1):
|
|
pos1, color1 = color_stops[i]
|
|
pos2, color2 = color_stops[i+1]
|
|
|
|
if pos1 <= t <= pos2:
|
|
# Linear interpolation between the two color stops
|
|
ratio = (t - pos1) / (pos2 - pos1)
|
|
r = int(color1[0] + ratio * (color2[0] - color1[0]))
|
|
g = int(color1[1] + ratio * (color2[1] - color1[1]))
|
|
b = int(color1[2] + ratio * (color2[2] - color1[2]))
|
|
return r, g, b
|
|
|
|
# Should never reach here
|
|
return 0, 0, 0
|
|
|
|
for start_idx in range(0, len(data_array), batch_size):
|
|
end_idx = min(start_idx + batch_size, len(data_array))
|
|
batch_slice = slice(start_idx, end_idx)
|
|
|
|
for i in range(end_idx - start_idx):
|
|
# Get data for this record
|
|
humidity = float(data_array[batch_slice][i, 2])
|
|
|
|
# Map humidity from min_val-max_val to 0-100 for our color function
|
|
normalized_temp = 100.0 * (humidity - min_val) / (max_val - min_val) if max_val > min_val else 0
|
|
normalized_temp = max(0, min(100, normalized_temp)) # Clamp to 0-100
|
|
|
|
alarm_level = 0
|
|
# If we have an alarm_level column (index 3), use it
|
|
if data_array.shape[1] > 3:
|
|
alarm_level = data_array[batch_slice][i, 3]
|
|
|
|
# Calculate base y-coordinate for this device
|
|
base_y = device_indices[batch_slice][i] * n_fields
|
|
|
|
# Temperature row (even row)
|
|
if not bw:
|
|
# Get RGB color from our direct mapping function
|
|
r, g, b = get_color(normalized_temp)
|
|
|
|
# OpenCV uses BGR ordering, not RGB
|
|
temp_rgb = np.array([b, g, r], dtype=np.uint8)
|
|
else:
|
|
# For B&W mode
|
|
gray_value = int(normalized_temp * 2.55) # 0-100 to 0-255
|
|
gray_value = max(0, min(255, gray_value))
|
|
temp_rgb = np.array([gray_value, gray_value, gray_value], dtype=np.uint8)
|
|
|
|
# Set the humidity color in the even row
|
|
wave_m[base_y, x_coords[batch_slice][i]] = temp_rgb
|
|
|
|
# Alarm level row (odd row)
|
|
if alarm_level == 0:
|
|
# Green for normal
|
|
alarm_rgb = np.array([0, 255, 0], dtype=np.uint8) #thisis B,G,R !!!
|
|
elif alarm_level == 1:
|
|
# Yellow for warning
|
|
alarm_rgb = np.array([0, 255, 255], dtype=np.uint8)
|
|
else: # alarm_level == 2
|
|
# Red for critical
|
|
alarm_rgb = np.array([0, 0, 255], dtype=np.uint8)
|
|
|
|
# Set the alarm color in the odd row
|
|
wave_m[base_y + 1, x_coords[batch_slice][i]] = alarm_rgb
|
|
|
|
return wave_m
|
|
|
|
def create_smell_optimized_heatmap(arr_stretched, my_data, bw, fields, device_to_index, base_minute, timezone_st, smell_component_stretch_by, selected_date, y_offset):
|
|
"""
|
|
Create a heatmap with the exact blue-cyan-green-yellow-red-violet spectrum
|
|
matching Image 2, with green at position 40
|
|
"""
|
|
if len(my_data) < 1:
|
|
return
|
|
|
|
minutes = 1440
|
|
devices_c = len(device_to_index)
|
|
sensors_c = len(fields)
|
|
stripes = devices_c * sensors_c #2 for upper maxes, lower mins
|
|
arr_source_template = np.full((stripes, minutes+4), -0.001, dtype=float)
|
|
st = time.time()
|
|
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, timezone_st)
|
|
st = time.time()
|
|
arr_source = fast_fill_smell_array_from_timescale(my_data, time_from_str, device_to_index, arr_source_template, timezone_st)
|
|
arr_source = AddSmellLimits_optimized(arr_source, devices_c, sensors_c, percentile=100)
|
|
scaled_day = CalcExtremes(arr_source, minutes, stripes)
|
|
|
|
arr_stretched = FillSmellImage_optimized(scaled_day, arr_stretched, y_offset)
|
|
|
|
return
|
|
|
|
|
|
def create_optimized_heatmap_simple(my_data, bw, fields, wave_m, device_to_index, base_minute, timezone_st, min_val, max_val):
|
|
|
|
if len(my_data) < 1:
|
|
return []
|
|
|
|
|
|
local_tz = pytz.timezone(timezone_st)
|
|
|
|
n_fields = len(fields)
|
|
# Convert my_data to numpy array for faster processing
|
|
data_array = np.array(my_data)
|
|
|
|
# Get unique device IDs and create mapping
|
|
|
|
# Convert device IDs to indices using vectorized operation
|
|
device_indices = np.vectorize(device_to_index.get)(data_array[:, 1])
|
|
|
|
# Calculate x coordinates (minutes from base)
|
|
#minute is in local time zone, and base_minute is UTC
|
|
base_minute_local = base_minute #.astimezone(local_tz)
|
|
#x_coords = np.array([(minute.replace(tzinfo=datetime.timezone.utc) - base_minute_local).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
|
|
x_coords = np.array([(minute - base_minute_local).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
|
|
|
|
# Extract values and convert to float
|
|
values = data_array[:, 2:].astype(np.float32)
|
|
|
|
if bw:
|
|
# Process in batches to avoid memory issues
|
|
batch_size = 1000
|
|
for start_idx in range(0, len(data_array), batch_size):
|
|
end_idx = min(start_idx + batch_size, len(data_array))
|
|
batch_slice = slice(start_idx, end_idx)
|
|
|
|
|
|
# Calculate gray values
|
|
gray_values = (values[batch_slice, :] - min_val / (max_val - min_val)) * 255.0
|
|
|
|
# Clip values to valid range
|
|
gray_values = np.clip(gray_values, 0, 255).astype(np.uint8)
|
|
|
|
# Create y coordinates for each record
|
|
y_coords = (device_indices[batch_slice] * n_fields).reshape(-1, 1) + np.arange(n_fields)
|
|
|
|
# Assign values to the image array
|
|
for i in range(end_idx - start_idx):
|
|
wave_m[y_coords[i], x_coords[batch_slice][i]] = gray_values[i, :, np.newaxis]
|
|
|
|
else: # Color mode
|
|
# Process in batches
|
|
batch_size = 1000
|
|
for i in range(0, len(data_array)):
|
|
rgb_value = ((values[i] - min_val) / (max_val - min_val)) * 1279.0
|
|
wave_m[i, x_coords[i]] = rgb_value
|
|
|
|
return wave_m
|
|
|
|
|
|
def create_radar_optimized_heatmap(my_data, bw, fields, wave_m, device_to_index, base_minute, timezone_st):
|
|
|
|
if len(my_data) < 1:
|
|
return []
|
|
|
|
|
|
local_tz = pytz.timezone(timezone_st)
|
|
|
|
n_fields = len(fields)
|
|
# Convert my_data to numpy array for faster processing
|
|
data_array = np.array(my_data)
|
|
|
|
# Get unique device IDs and create mapping
|
|
|
|
# Convert device IDs to indices using vectorized operation
|
|
device_indices = np.vectorize(device_to_index.get)(data_array[:, 1])
|
|
|
|
# Calculate x coordinates (minutes from base)
|
|
#minute is in local time zone, and base_minute is UTC
|
|
base_minute_local = base_minute #.astimezone(local_tz)
|
|
#x_coords = np.array([(minute.replace(tzinfo=datetime.timezone.utc) - base_minute_local).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
|
|
x_coords = np.array([(minute - base_minute_local).total_seconds()/60 for minute in data_array[:, 0]], dtype=np.int32)
|
|
|
|
# Extract values and convert to float
|
|
values = data_array[:, 2:].astype(np.float32)
|
|
|
|
if bw:
|
|
# Process in batches to avoid memory issues
|
|
batch_size = 1000
|
|
for start_idx in range(0, len(data_array), batch_size):
|
|
end_idx = min(start_idx + batch_size, len(data_array))
|
|
batch_slice = slice(start_idx, end_idx)
|
|
|
|
|
|
# Calculate gray values
|
|
gray_values = (values[batch_slice, :] / 100.0) * 255.0
|
|
|
|
# Clip values to valid range
|
|
gray_values = np.clip(gray_values, 0, 255).astype(np.uint8)
|
|
|
|
# Create y coordinates for each record
|
|
y_coords = (device_indices[batch_slice] * n_fields).reshape(-1, 1) + np.arange(n_fields)
|
|
|
|
# Assign values to the image array
|
|
for i in range(end_idx - start_idx):
|
|
wave_m[y_coords[i], x_coords[batch_slice][i]] = gray_values[i, :, np.newaxis]
|
|
|
|
else: # Color mode
|
|
# Process in batches
|
|
batch_size = 1000
|
|
for start_idx in range(0, len(data_array), batch_size):
|
|
end_idx = min(start_idx + batch_size, len(data_array))
|
|
batch_slice = slice(start_idx, end_idx)
|
|
|
|
# Calculate color values
|
|
color_values = np.zeros_like(values[batch_slice])
|
|
color_values[:, :] = (values[batch_slice, :] / 100.0) * 1279.0 # other fields
|
|
|
|
# Create y coordinates for each record
|
|
y_coords = (device_indices[batch_slice] * n_fields).reshape(-1, 1) + np.arange(n_fields)
|
|
|
|
# Convert to RGB colors
|
|
for i in range(end_idx - start_idx):
|
|
rgb_values = vectorized_best_color_numpy(color_values[i])
|
|
wave_m[y_coords[i], x_coords[batch_slice][i]] = rgb_values
|
|
|
|
return wave_m
|
|
|
|
def visualize_gmm_fit(stationary_signal, output_file='gmm_explanation.png'):
|
|
"""
|
|
Visualize how GMM separates the stationary signal into components
|
|
"""
|
|
# Prepare data
|
|
X = stationary_signal.reshape(-1, 1)
|
|
|
|
# Fit GMM
|
|
gmm = GaussianMixture(n_components=2, random_state=42)
|
|
gmm.fit(X)
|
|
|
|
# Get parameters
|
|
means = gmm.means_.flatten()
|
|
stds = np.sqrt(gmm.covariances_.flatten())
|
|
weights = gmm.weights_
|
|
|
|
# Create histogram of actual data
|
|
plt.figure(figsize=(12, 6))
|
|
|
|
# Plot histogram of actual data
|
|
plt.hist(X, bins=50, density=True, alpha=0.6, color='gray',
|
|
label='Actual Signal Distribution')
|
|
|
|
# Generate points for GMM curves
|
|
x = np.linspace(X.min(), X.max(), 200)
|
|
|
|
# Plot individual components
|
|
for i in range(len(means)):
|
|
plt.plot(x, weights[i] * stats.norm.pdf(x, means[i], stds[i]),
|
|
label=f'Component {i+1}: mean={means[i]:.2f}, std={stds[i]:.2f}')
|
|
|
|
# Plot combined GMM
|
|
gmm_curve = np.zeros_like(x)
|
|
for i in range(len(means)):
|
|
gmm_curve += weights[i] * stats.norm.pdf(x, means[i], stds[i])
|
|
plt.plot(x, gmm_curve, 'r--', linewidth=2, label='Combined GMM')
|
|
|
|
# Add vertical lines for threshold
|
|
baseline = min(means)
|
|
threshold = baseline + 3 * np.sqrt(gmm.covariances_.flatten()[np.argmin(means)])
|
|
plt.axvline(x=baseline, color='g', linestyle='--', label='Baseline')
|
|
plt.axvline(x=threshold, color='r', linestyle='--', label='Threshold')
|
|
|
|
plt.title('Gaussian Mixture Model Components of Stationary Signal')
|
|
plt.xlabel('Signal Value')
|
|
plt.ylabel('Density')
|
|
plt.legend()
|
|
plt.grid(True)
|
|
|
|
# Save and close
|
|
plt.savefig(output_file, dpi=300, bbox_inches='tight')
|
|
plt.close()
|
|
|
|
def process_location_data(location_data):
|
|
"""
|
|
Convert raw location data into aligned time series.
|
|
"""
|
|
timestamps = np.array([t[0] for t in location_data])
|
|
stationary = np.array([t[1] for t in location_data])
|
|
motion = np.array([t[2] for t in location_data])
|
|
return timestamps, stationary, motion
|
|
|
|
def detect_presence_for_location(stationary_signal, motion_signal,
|
|
motion_threshold=5, gmm_components=2):
|
|
"""
|
|
Simplified presence detection for a single location.
|
|
Returns presence mask and parameters.
|
|
"""
|
|
# Fit GMM to stationary signal
|
|
gmm = GaussianMixture(n_components=gmm_components, random_state=42)
|
|
X = stationary_signal.reshape(-1, 1)
|
|
gmm.fit(X)
|
|
|
|
visualize_gmm_fit(stationary_signal, output_file='gmm_explanation.png')
|
|
|
|
# Get baseline and threshold
|
|
baseline = min(gmm.means_)[0]
|
|
components_sorted = sorted(zip(gmm.means_.flatten(), gmm.covariances_.flatten()))
|
|
baseline_std = np.sqrt(components_sorted[0][1])
|
|
threshold = baseline + 3 * baseline_std
|
|
|
|
# Detect presence
|
|
presence_mask = (motion_signal > motion_threshold) | (stationary_signal > threshold)
|
|
|
|
# Smooth presence detection (15 seconds window = 1.5 samples at 10sec sampling)
|
|
smooth_window = 3
|
|
presence_mask = np.convolve(presence_mask.astype(int),
|
|
np.ones(smooth_window)/smooth_window,
|
|
mode='same') > 0.5
|
|
|
|
return presence_mask, threshold
|
|
|
|
def find_current_location(data_sets, start_time, end_time, motion_threshold=10):
|
|
"""
|
|
Analyze presence across multiple locations for each minute.
|
|
|
|
Parameters:
|
|
-----------
|
|
data_sets : dict
|
|
Dictionary of location_name: data_tuples pairs
|
|
start_time : datetime
|
|
Start time for analysis
|
|
end_time : datetime
|
|
End time for analysis
|
|
motion_threshold : float
|
|
Threshold for significant motion detection
|
|
|
|
Returns:
|
|
--------
|
|
dict
|
|
Minute by minute analysis of presence and movement
|
|
"""
|
|
# Process each location's data
|
|
location_data = {}
|
|
for location, data in data_sets.items():
|
|
timestamps, stationary, motion = process_location_data(data)
|
|
presence, threshold = detect_presence_for_location(stationary, motion, motion_threshold)
|
|
location_data[location] = {
|
|
'timestamps': timestamps,
|
|
'presence': presence,
|
|
'motion': motion,
|
|
'stationary': stationary,
|
|
'threshold': threshold
|
|
}
|
|
|
|
# Create minute-by-minute analysis
|
|
current_time = start_time
|
|
results = []
|
|
|
|
while current_time < end_time:
|
|
minute_end = current_time + timedelta(minutes=1)
|
|
|
|
# Analysis for current minute
|
|
minute_status = {
|
|
'timestamp': current_time,
|
|
'locations': [],
|
|
'moving_locations': [],
|
|
'presence_values': {},
|
|
'motion_values': {},
|
|
'status': 'nobody_present'
|
|
}
|
|
|
|
# First pass: collect all presence and motion values
|
|
for location, data in location_data.items():
|
|
# Find indices for current minute
|
|
mask = (data['timestamps'] >= current_time) & (data['timestamps'] < minute_end)
|
|
if not any(mask):
|
|
continue
|
|
|
|
presence_in_minute = data['presence'][mask]
|
|
motion_in_minute = data['motion'][mask]
|
|
stationary_in_minute = data['stationary'][mask]
|
|
|
|
if any(presence_in_minute):
|
|
minute_status['presence_values'][location] = np.max(stationary_in_minute)
|
|
minute_status['motion_values'][location] = np.max(motion_in_minute)
|
|
|
|
# If no presence detected anywhere
|
|
if not minute_status['presence_values']:
|
|
minute_status['status'] = 'nobody_present'
|
|
results.append(minute_status)
|
|
current_time += timedelta(minutes=1)
|
|
continue
|
|
|
|
|
|
# Find location with strongest presence
|
|
primary_location = max(minute_status['presence_values'].items(),
|
|
key=lambda x: x[1])[0]
|
|
|
|
# Count locations with significant motion
|
|
moving_locations = [loc for loc, motion in minute_status['motion_values'].items()
|
|
if motion > motion_threshold]
|
|
|
|
plot(motion, filename=f"motion.png", title=f"Motion", style='line')
|
|
# Update status based on motion and presence
|
|
if len(moving_locations) > 1:
|
|
# Multiple locations with significant motion indicates multiple people
|
|
minute_status['status'] = 'multiple_people_moving'
|
|
minute_status['locations'] = moving_locations
|
|
minute_status['moving_locations'] = moving_locations
|
|
else:
|
|
# Single or no motion - assign to location with strongest presence
|
|
minute_status['locations'] = [primary_location]
|
|
if moving_locations:
|
|
minute_status['status'] = f'single_person_moving_in_{primary_location}'
|
|
minute_status['moving_locations'] = moving_locations
|
|
else:
|
|
minute_status['status'] = f'single_person_stationary_in_{primary_location}'
|
|
|
|
results.append(minute_status)
|
|
current_time += timedelta(minutes=1)
|
|
|
|
return results
|
|
|
|
def get_size(obj, seen=None):
|
|
# Recursively find size of objects and their contents
|
|
if seen is None:
|
|
seen = set()
|
|
|
|
obj_id = id(obj)
|
|
if obj_id in seen:
|
|
return 0
|
|
|
|
seen.add(obj_id)
|
|
size = sys.getsizeof(obj)
|
|
|
|
if isinstance(obj, (list, tuple, set, dict)):
|
|
if isinstance(obj, (list, tuple, set)):
|
|
size += sum(get_size(i, seen) for i in obj)
|
|
else: # dict
|
|
size += sum(get_size(k, seen) + get_size(v, seen) for k, v in obj.items())
|
|
|
|
return size
|
|
|
|
def CreatePresenceMap(location_image_file, devices_list, selected_date,
|
|
map_type, force_recreate, chart_type, bw, motion, scale_global,
|
|
fast, filter_minutes, time_zone_s):
|
|
|
|
#global Id2MACDict
|
|
data_sets = {}
|
|
ids_list = []
|
|
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
|
|
time_from, time_to = GetLocalTimeForDateSimple(selected_date, time_zone_s)
|
|
|
|
for details in devices_list:
|
|
|
|
sql = get_device_radar_only_query(str(details[1]), time_from_str, time_to_str, [details[1]])
|
|
print(sql)
|
|
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
data_sets[details[2]] = cur.fetchall()#cur.fetchone()
|
|
|
|
|
|
|
|
|
|
# Get minute-by-minute analysis
|
|
location_analysis = find_current_location(data_sets, time_from, time_to)
|
|
|
|
# Example of printing results
|
|
for minute in location_analysis:
|
|
print(f"Time: {minute['timestamp']}")
|
|
print(f"Status: {minute['status']}")
|
|
print(f"Present in: {', '.join(minute['locations'])}")
|
|
if minute['moving_locations']:
|
|
print(f"Movement in: {', '.join(minute['moving_locations'])}")
|
|
print("---")
|
|
|
|
print(f"Dictionary size: {get_size(data_sets)} bytes")
|
|
devices_list_str = ','.join(str(device[1]) for device in devices_list)
|
|
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
|
|
|
|
sql = get_device_radar_only_query(devices_list_str, time_from_str, time_to_str, ids_list)
|
|
print(sql)
|
|
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()#cur.fetchone()
|
|
#print(result)
|
|
if my_data == None:
|
|
return False
|
|
|
|
|
|
#thresholds_dict = {}
|
|
|
|
#stretch_to_min_max = True
|
|
#devices_c = len(devices_list)
|
|
|
|
#data_sets = {
|
|
#'living_room': my_data1,
|
|
#'kitchen': my_data2,
|
|
#'bedroom1': my_data3,
|
|
#'bedroom2': my_data4,
|
|
#'hallway': my_data5,
|
|
#'bathroom': my_data6,
|
|
#'office': my_data7
|
|
#}
|
|
|
|
sensors_c = 1#len(sensors_table)
|
|
|
|
|
|
image_file = location_image_file
|
|
|
|
minutes = 1440
|
|
|
|
#search_pattern = os.path.join(scriptDir, "scratch/*_"+str(current_date.year)+"_"+str(current_date.month).rjust(2, '0')+"_"+str(current_date.day)+"_*.pkl")
|
|
|
|
#allFiles = [os.path.join(dest_path, f) for f in glob.glob(search_pattern)]
|
|
#rekreate .pckl files if missing
|
|
today_date = datetime.datetime.fromtimestamp(time.time())
|
|
|
|
fields = ['m0_max', 'm1_max', 'm2_max', 'm3_max', 'm4_max', 'm5_max',
|
|
'm6_max', 'm7_max', 'm8_max', 'm08_max', 's2_max', 's3_max',
|
|
's4_max', 's5_max', 's6_max', 's7_max', 's8_max', 's28_max', 's28_min'] #Why 'm8_max' and 'm08_max' ?because m08 is m0 + m1 .. to 8!
|
|
|
|
fields_n = len(fields)
|
|
|
|
stripes = len(devices_list) * fields_n
|
|
|
|
#device_counter = 0
|
|
stretch_by = 5
|
|
|
|
#arr_source = np.zeros((stripes, minutes), dtype=float)
|
|
arr_stretched = np.zeros((int(stripes*stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
|
|
|
|
|
|
ids_list = []
|
|
|
|
labels = []
|
|
label_font = cv2.FONT_HERSHEY_SIMPLEX
|
|
label_font_scale = 1
|
|
label_font_color = (255, 255, 255)
|
|
label_font_thickness = 2
|
|
label_font_line = cv2.LINE_AA
|
|
cnt = 0
|
|
for details in devices_list:
|
|
dev_id = details[0]
|
|
ids_list.append(details[1])
|
|
descriptor = details[2]
|
|
if details[3] != None and details[3] != "":
|
|
descriptor = descriptor + " " + details[3]
|
|
if details[6] != None and details[6] != "":
|
|
descriptor = descriptor + " " + details[6]
|
|
|
|
text_dimensions = get_text_dimensions(descriptor, label_font, label_font_scale, label_font_thickness)
|
|
text_height = text_dimensions["height"]
|
|
labels.append((descriptor, (10, 10 + text_height + (cnt)*fields_n*stretch_by), label_font, label_font_scale, label_font_color, label_font_thickness, label_font_line))
|
|
cnt += 1
|
|
|
|
sql = get_deployment_radar_only_detailed_query(devices_list_str, time_from_str, time_to_str, ids_list)
|
|
print(sql)
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()#cur.fetchone()
|
|
#print(result)
|
|
if my_data == None:
|
|
return False
|
|
|
|
|
|
|
|
|
|
# Get start and end times from your data
|
|
start_time = min(data_sets['living_room'][0][0],
|
|
data_sets['kitchen'][0][0],
|
|
# ... add other locations
|
|
)
|
|
end_time = max(data_sets['living_room'][-1][0],
|
|
data_sets['kitchen'][-1][0],
|
|
# ... add other locations
|
|
)
|
|
|
|
# Get minute-by-minute analysis
|
|
location_analysis = find_current_location(data_sets, start_time, end_time)
|
|
|
|
# Example of printing results
|
|
for minute in location_analysis:
|
|
print(f"Time: {minute['timestamp']}")
|
|
print(f"Status: {minute['status']}")
|
|
print(f"Present in: {', '.join(minute['locations'])}")
|
|
if minute['moving_locations']:
|
|
print(f"Movement in: {', '.join(minute['moving_locations'])}")
|
|
print("---")
|
|
#----------------------------------------------------------------------------------------------------
|
|
|
|
|
|
|
|
print(sql)
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()#cur.fetchone()
|
|
#print(result)
|
|
if my_data == None:
|
|
return False
|
|
|
|
|
|
#device_ids = sorted(set(record[1] for record in my_data))
|
|
device_to_index = {device: idx for idx, device in enumerate(ids_list)}
|
|
|
|
base_minute = ConvertToBase(time_from_str, time_zone_s)
|
|
st = time.time()
|
|
if True:
|
|
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
|
|
wave_m = create_radar_optimized_heatmap(my_data, bw, fields, wave_m, device_to_index, base_minute, time_zone_s)
|
|
print(time.time()-st)
|
|
|
|
if False:
|
|
#base_minute = my_data[0][0]# min(record[0] for record in my_data)
|
|
|
|
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
|
|
|
|
for record in my_data:
|
|
|
|
#(minute,device_id,absent_min,stationary_max,moving_max,both_max,m0_max,m1_max,m2_max,m3_max,m4_max,
|
|
# m5_max,m6_max,m7_max,m8_max,m08_max,s2_max,s3_max,s4_max,s5_max,s6_max,s7_max,s8_max,s28_max) = record
|
|
minute, device_id = record[0:2]
|
|
values = record[2:] # All the max/min values
|
|
x = int((minute - base_minute).total_seconds()/60)
|
|
device_idx = device_to_index[device_id]
|
|
|
|
if bw:
|
|
for field_idx, value in enumerate(values):
|
|
# Calculate y position
|
|
y = device_idx * fields_n + field_idx
|
|
|
|
# Convert value to grayscale (0-100 to 0-255)
|
|
gray_value = int((value / 100.0) * 255.0)
|
|
|
|
# Set RGB values (all same for grayscale)
|
|
wave_m[y, x] = [gray_value, gray_value, gray_value]
|
|
else: #color
|
|
for field_idx, value in enumerate(values):
|
|
# Calculate y position
|
|
y = device_idx * 22 + field_idx
|
|
|
|
# Convert value to grayscale (0-100 to 0-255)
|
|
gray_value = int((value / 100.0) * 1279.0)
|
|
|
|
# Set RGB values (all same for grayscale)
|
|
wave_m[y, x] = BestColor(gray_value)
|
|
|
|
print(time.time()-st)
|
|
|
|
st = time.time()
|
|
for yy in range(stripes):
|
|
rgb_row = wave_m[yy]
|
|
for stretch_index in range(stretch_by):
|
|
y = yy * stretch_by + stretch_index
|
|
arr_stretched[y, :] = rgb_row
|
|
|
|
print(time.time()-st)
|
|
|
|
SaveImageInBlob(image_file, arr_stretched, labels)
|
|
#arr_source[2*gate, :] = wave_m
|
|
#rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
|
|
|
|
#for col in range(1440):
|
|
#sens_val = wave_m[col]
|
|
#if sens_val != 0:
|
|
#r,g,b=BestColor(km*(sens_val-m_min))
|
|
#if r > 255 or g > 255 or b > 255:
|
|
#print(r,g,b)
|
|
#rgb_row[col] = r,g,b
|
|
|
|
#for stretch_index in range(stretch_by):
|
|
#y = device_counter * (18*stretch_by) + 2*gate * stretch_by + stretch_index
|
|
##print(y, row, devices_c, sensor_index, location_index, stretch_index)
|
|
##arr_stretched[y, :] = rgb_row
|
|
|
|
#if gate > 1:
|
|
#ks = 0
|
|
#if(s_max > s_min):
|
|
#if bw:
|
|
#ks = 255/(s_max - s_min)
|
|
#else:
|
|
#ks = 1280/(s_max - s_min)
|
|
##wave_m = np.array([km*(item[0]-m_min) for item in minute_radar_lists[:1440][gate]])
|
|
#wave_s = np.array([0.0] * 1440)
|
|
|
|
#for minute_m in range(1440):
|
|
#wave_s[minute_m] = minute_radar_lists[minute_m][gate+7]
|
|
##wave_m = np.array([item[0] for item in minute_radar_lists[:1440][gate]])
|
|
##DoDisplay2(wave_m, wave_s, location_name+" "+str(dev_id)+" "+ description+" "+ str(gate))
|
|
#arr_source[2*gate + 1, :] = wave_s
|
|
|
|
#rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
|
|
|
|
#for col in range(1440):
|
|
#sens_val = wave_s[col]
|
|
#if sens_val != 0:
|
|
#if bw:
|
|
#r = ks*(sens_val-s_min)
|
|
#g = r
|
|
#b = r
|
|
#else:
|
|
#r,g,b=BestColor(ks*(sens_val-s_min))
|
|
##print(r,g,b)
|
|
#rgb_row[col] = r,g,b
|
|
|
|
#for stretch_index in range(stretch_by):
|
|
#y = device_counter * (18*stretch_by) + (2*(gate) + 1) * stretch_by + stretch_index
|
|
#arr_stretched[y, :] = rgb_row
|
|
#y = device_counter * (18*stretch_by) + (2*(gate)) * stretch_by + stretch_index
|
|
#arr_stretched[y, :] = rgb_row
|
|
|
|
print("stop")
|
|
|
|
def ConvertToBase(time_from_str, time_zone_s):
|
|
print(time_from_str)
|
|
dt = datetime.datetime.strptime(time_from_str, "%Y-%m-%d %H:%M:%S%z")
|
|
return dt
|
|
|
|
def GetTimeAndEvents(data):
|
|
"""
|
|
Calculates non-zero elements and consecutive non-zero groups using itertools.
|
|
This is often the most readable and efficient pure Python approach.
|
|
"""
|
|
# Fast way to count non-zeros since they are all 1.0
|
|
#non_zeros = int(sum(data))
|
|
non_zeros = sum(1 for x in data if x != 0)
|
|
# Count groups of non-zero elements
|
|
events = sum(1 for key, group in itertools.groupby(data) if key != 0.0)
|
|
return non_zeros, events
|
|
|
|
def current_date_at_tz(timezone_str):
|
|
"""
|
|
Returns the current date in the specified timezone in yyyy-mm-dd format.
|
|
|
|
Args:
|
|
timezone_str (str): Timezone string like "America/Los_Angeles"
|
|
|
|
Returns:
|
|
str: Current date in yyyy-mm-dd format
|
|
"""
|
|
# Get the timezone object
|
|
tz = pytz.timezone(timezone_str)
|
|
|
|
# Get current datetime in the specified timezone
|
|
current_dt = datetime.datetime.now(tz)
|
|
|
|
# Format as yyyy-mm-dd
|
|
return current_dt.strftime('%Y-%m-%d')
|
|
|
|
|
|
def GetActivities(device_id, well_id, date_str, filter_size, refresh, timezone_str, radar_threshold_group_st):
|
|
#filtered_day has non 0 points that exceeded threshold of radar reads
|
|
device_id_str = str(device_id)
|
|
|
|
try:
|
|
|
|
time_from_str, time_to_str = GetLocalTimeForDate(date_str, timezone_str)
|
|
filename_day_presence = f"/{device_id_str}/{device_id_str}_{date_str}_{filter_size}_presence.bin"
|
|
filtered_day_str = None
|
|
if refresh == False and date_str != current_date_at_tz(timezone_str):
|
|
has_larger = False
|
|
filtered_day_str = ReadObjectMinIO("filtered-presence", filename_day_presence, date_str)
|
|
if filtered_day_str != None and filtered_day_str != "":
|
|
has_larger = bool(re.search(r'\b(?:[2-9]|\d{2,})\.\d+\b', filtered_day_str))
|
|
if has_larger:
|
|
filtered_day_str = None
|
|
if filtered_day_str == None:
|
|
|
|
radar_fields_of_interest = []
|
|
|
|
try:
|
|
threshold_lst = json.loads(radar_threshold_group_st)
|
|
except:
|
|
threshold_lst = ["s3_max",12]
|
|
radar_fields_of_interest = [threshold_lst[0]]
|
|
ids_list = [int(device_id)]
|
|
devices_list_str = device_id_str
|
|
#sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
|
|
sql = get_deployment_radar_10sec_snapped_query_min_max(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
|
|
print(sql)
|
|
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = None
|
|
my_data = cur.fetchall()
|
|
|
|
days_difference = 1
|
|
zeros_list = [0] * 6 * 1440 * days_difference
|
|
presence_map = {'presence': {}}
|
|
presence_map['presence'][well_id] = zeros_list
|
|
|
|
if radar_threshold_group_st == None:
|
|
radar_threshold_group_st = '["s3",12]' #last value is threshold to s28 composite
|
|
|
|
if len(radar_threshold_group_st) > 8:
|
|
radar_threshold_group = json.loads(radar_threshold_group_st)
|
|
else:
|
|
radar_threshold_group = ["s3",12]
|
|
|
|
device_id_2_location = {well_id: ""}
|
|
device_id_2_threshold = {well_id: radar_threshold_group}
|
|
device_field_indexes = {radar_threshold_group[0].split("_")[0]: 1} #len(radar_fields_of_interest)
|
|
id2well_id = {device_id: well_id}
|
|
|
|
if len(my_data) > 1:
|
|
|
|
start_time_ = my_data[0][0]
|
|
parsed_time_ = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
|
|
|
|
#start_time = datetime.datetime(
|
|
#parsed_time.year,
|
|
#parsed_time.month,
|
|
#parsed_time.day,
|
|
#parsed_time.hour, # Adjust for UTC-7
|
|
#parsed_time.minute,
|
|
#parsed_time.second,
|
|
#tzinfo=datetime.timezone(datetime.timedelta(hours=-7))
|
|
#)
|
|
|
|
presence_map = optimized_radar_processing(my_data, start_time_, id2well_id, device_id_2_threshold, device_field_indexes, presence_map, "presence")
|
|
|
|
presence_list = filter_short_groups_c_wc(presence_map["presence"][id2well_id[device_id]], filter_size, device_id_str, date_str, date_str, timezone_str)
|
|
filtered_day_str = ReadObjectMinIO("filtered-presence", filename_day_presence)
|
|
filtered_day = json.loads(filtered_day_str)
|
|
else:
|
|
filtered_day = json.loads(filtered_day_str)
|
|
|
|
non_zeros, events = GetTimeAndEvents(filtered_day)
|
|
|
|
return(non_zeros / 360, events) #decas to hours
|
|
except Exception as e:
|
|
print(filename_day_presence)
|
|
print(filtered_day_str)
|
|
print(traceback.format_exc())
|
|
return(0, 0)
|
|
def CreateFullLocationMap(location_image_file, devices_list, selected_date,
|
|
map_type, force_recreate, chart_type, bw, motion, scale_global, fast, filter_minutes, time_zone_s):
|
|
#global Id2MACDict
|
|
|
|
thresholds_dict = {}
|
|
|
|
stretch_to_min_max = True
|
|
devices_c = len(devices_list)
|
|
|
|
if devices_c == 0:
|
|
return
|
|
|
|
sensors_c = 1#len(sensors_table)
|
|
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
|
|
|
|
image_file = location_image_file
|
|
|
|
minutes = 1440
|
|
|
|
|
|
#search_pattern = os.path.join(scriptDir, "scratch/*_"+str(current_date.year)+"_"+str(current_date.month).rjust(2, '0')+"_"+str(current_date.day)+"_*.pkl")
|
|
|
|
#allFiles = [os.path.join(dest_path, f) for f in glob.glob(search_pattern)]
|
|
#rekreate .pckl files if missing
|
|
today_date = datetime.datetime.fromtimestamp(time.time())
|
|
|
|
if scale_global and chart_type != 3 and chart_type != 4: #"digital" and chart_type != "collapsed"
|
|
|
|
max_gate={}
|
|
for gate in range(9):
|
|
max_gate[str(gate)+"_m"] = 0
|
|
max_gate[str(gate)+"_s"] = 0
|
|
|
|
device_counter = 0
|
|
for details in devices_list:
|
|
MAC, threshold, location_name, description = details
|
|
if threshold == None:
|
|
threshold = '["s3_max",12]'
|
|
|
|
|
|
#day_minutes_data = [(0,0)] * (24 * 60 + 2)
|
|
#day_minutes_data_l = [[0] * 10 for _ in range(24 * 60 + 2)]
|
|
|
|
minute_radar_lists = ReadDailyRadar(MAC, current_date)
|
|
|
|
for gate in range(9):
|
|
for minute_m in range(1440):
|
|
if (minute_radar_lists[minute_m][gate] > max_gate[str(gate)+"_m"]):
|
|
max_gate[str(gate)+"_m"] = minute_radar_lists[minute_m][gate]
|
|
|
|
if gate > 1:
|
|
if (minute_radar_lists[minute_m][gate + 7] > max_gate[str(gate)+"_s"]):
|
|
max_gate[str(gate)+"_s"] = minute_radar_lists[minute_m][gate + 7]
|
|
|
|
|
|
if (chart_type == 2): #"analog"
|
|
|
|
|
|
#fields = ['absent_min', 'stationary_max', 'moving_max', 'both_max',
|
|
#'m0_max', 'm1_max', 'm2_max', 'm3_max', 'm4_max', 'm5_max',
|
|
#'m6_max', 'm7_max', 'm8_max', 'm08_max', 's2_max', 's3_max',
|
|
#'s4_max', 's5_max', 's6_max', 's7_max', 's8_max', 's28_max']
|
|
fields = ['m0_max', 'm1_max', 'm2_max', 'm3_max', 'm4_max', 'm5_max',
|
|
'm6_max', 'm7_max', 'm8_max', 'm08_max', 's2_max', 's3_max',
|
|
's4_max', 's5_max', 's6_max', 's7_max', 's8_max', 's28_max', 's28_min']
|
|
|
|
|
|
fields_n = len(fields)
|
|
|
|
stripes = len(devices_list) * fields_n
|
|
|
|
device_counter = 0
|
|
stretch_by = 5
|
|
|
|
arr_source = np.zeros((stripes, minutes), dtype=float)
|
|
arr_stretched = np.zeros((int(stripes*stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
|
|
|
|
|
|
|
|
devices_list_str = ','.join(str(device[1]) for device in devices_list)
|
|
ids_list = []
|
|
|
|
labels = []
|
|
label_font = cv2.FONT_HERSHEY_SIMPLEX
|
|
label_font_scale = 1
|
|
label_font_color = (255, 255, 255)
|
|
label_font_thickness = 2
|
|
label_font_line = cv2.LINE_AA
|
|
cnt = 0
|
|
for details in devices_list:
|
|
dev_id = details[0]
|
|
ids_list.append(details[1])
|
|
descriptor = details[2]
|
|
if details[3] != None and details[3] != "":
|
|
descriptor = descriptor + " " + details[3]
|
|
if details[6] != None and details[6] != "":
|
|
descriptor = descriptor + " " + details[6]
|
|
|
|
text_dimensions = get_text_dimensions(descriptor, label_font, label_font_scale, label_font_thickness)
|
|
text_height = text_dimensions["height"]
|
|
labels.append((descriptor, (10, 10 + text_height + (cnt)*fields_n*stretch_by), label_font, label_font_scale, label_font_color, label_font_thickness, label_font_line))
|
|
cnt += 1
|
|
sql = get_deployment_radar_only_detailed_query(devices_list_str, time_from_str, time_to_str, ids_list)
|
|
print(sql)
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()#cur.fetchone()
|
|
#print(result)
|
|
if my_data == None:
|
|
return False
|
|
|
|
|
|
#device_ids = sorted(set(record[1] for record in my_data))
|
|
device_to_index = {device: idx for idx, device in enumerate(ids_list)}
|
|
# Calculate base minute
|
|
base_minute = ConvertToBase(time_from_str, time_zone_s)
|
|
|
|
st = time.time()
|
|
if True:
|
|
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
|
|
wave_m = create_radar_optimized_heatmap(my_data, bw, fields, wave_m, device_to_index, base_minute, time_zone_s)
|
|
print(time.time()-st)
|
|
|
|
if False:
|
|
#base_minute = my_data[0][0]# min(record[0] for record in my_data)
|
|
|
|
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
|
|
|
|
for record in my_data:
|
|
|
|
#(minute,device_id,absent_min,stationary_max,moving_max,both_max,m0_max,m1_max,m2_max,m3_max,m4_max,
|
|
# m5_max,m6_max,m7_max,m8_max,m08_max,s2_max,s3_max,s4_max,s5_max,s6_max,s7_max,s8_max,s28_max) = record
|
|
minute, device_id = record[0:2]
|
|
values = record[2:] # All the max/min values
|
|
x = int((minute - base_minute).total_seconds()/60)
|
|
device_idx = device_to_index[device_id]
|
|
|
|
if bw:
|
|
for field_idx, value in enumerate(values):
|
|
# Calculate y position
|
|
y = device_idx * fields_n + field_idx
|
|
|
|
# Convert value to grayscale (0-100 to 0-255)
|
|
gray_value = int((value / 100.0) * 255.0)
|
|
|
|
# Set RGB values (all same for grayscale)
|
|
wave_m[y, x] = [gray_value, gray_value, gray_value]
|
|
else: #color
|
|
for field_idx, value in enumerate(values):
|
|
# Calculate y position
|
|
y = device_idx * 22 + field_idx
|
|
|
|
# Convert value to grayscale (0-100 to 0-255)
|
|
gray_value = int((value / 100.0) * 1279.0)
|
|
|
|
# Set RGB values (all same for grayscale)
|
|
wave_m[y, x] = BestColor(gray_value)
|
|
|
|
print(time.time()-st)
|
|
|
|
st = time.time()
|
|
for yy in range(stripes):
|
|
rgb_row = wave_m[yy]
|
|
for stretch_index in range(stretch_by):
|
|
y = yy * stretch_by + stretch_index
|
|
arr_stretched[y, :] = rgb_row
|
|
|
|
print(time.time()-st)
|
|
|
|
SaveImageInBlob(image_file, arr_stretched, labels)
|
|
#arr_source[2*gate, :] = wave_m
|
|
#rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
|
|
|
|
#for col in range(1440):
|
|
#sens_val = wave_m[col]
|
|
#if sens_val != 0:
|
|
#r,g,b=BestColor(km*(sens_val-m_min))
|
|
#if r > 255 or g > 255 or b > 255:
|
|
#print(r,g,b)
|
|
#rgb_row[col] = r,g,b
|
|
|
|
#for stretch_index in range(stretch_by):
|
|
#y = device_counter * (18*stretch_by) + 2*gate * stretch_by + stretch_index
|
|
##print(y, row, devices_c, sensor_index, location_index, stretch_index)
|
|
##arr_stretched[y, :] = rgb_row
|
|
|
|
#if gate > 1:
|
|
#ks = 0
|
|
#if(s_max > s_min):
|
|
#if bw:
|
|
#ks = 255/(s_max - s_min)
|
|
#else:
|
|
#ks = 1280/(s_max - s_min)
|
|
##wave_m = np.array([km*(item[0]-m_min) for item in minute_radar_lists[:1440][gate]])
|
|
#wave_s = np.array([0.0] * 1440)
|
|
|
|
#for minute_m in range(1440):
|
|
#wave_s[minute_m] = minute_radar_lists[minute_m][gate+7]
|
|
##wave_m = np.array([item[0] for item in minute_radar_lists[:1440][gate]])
|
|
##DoDisplay2(wave_m, wave_s, location_name+" "+str(dev_id)+" "+ description+" "+ str(gate))
|
|
#arr_source[2*gate + 1, :] = wave_s
|
|
|
|
#rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
|
|
|
|
#for col in range(1440):
|
|
#sens_val = wave_s[col]
|
|
#if sens_val != 0:
|
|
#if bw:
|
|
#r = ks*(sens_val-s_min)
|
|
#g = r
|
|
#b = r
|
|
#else:
|
|
#r,g,b=BestColor(ks*(sens_val-s_min))
|
|
##print(r,g,b)
|
|
#rgb_row[col] = r,g,b
|
|
|
|
#for stretch_index in range(stretch_by):
|
|
#y = device_counter * (18*stretch_by) + (2*(gate) + 1) * stretch_by + stretch_index
|
|
#arr_stretched[y, :] = rgb_row
|
|
#y = device_counter * (18*stretch_by) + (2*(gate)) * stretch_by + stretch_index
|
|
#arr_stretched[y, :] = rgb_row
|
|
|
|
print("stop")
|
|
elif (chart_type == 3): #"digital"
|
|
device_counter = 0
|
|
for details in devices_list:
|
|
dev_id = details[0]
|
|
MAC, threshold, location_id, description = GetMacThrFromId(dev_id)
|
|
if threshold == None:
|
|
threshold = 30
|
|
|
|
sensor = "Radar"
|
|
location_name = location_names[location_id]
|
|
pickle_file = os.path.join(scriptDir, "scratch/"+MAC.upper() +"_"+str(current_date.year)+"_"+str(current_date.month).rjust(2, '0')+"_"+str(current_date.day)+"_radarM.pkl")
|
|
pickle_file = pickle_file.replace("\\","/")
|
|
#day_minutes_data = [(0,0)] * (24 * 60 + 2)
|
|
#day_minutes_data_l = [[0] * 10 for _ in range(24 * 60 + 2)]
|
|
|
|
minute_radar_lists = ReadDailyRadar(MAC, current_date)
|
|
|
|
y = 0
|
|
sensor_index = 0
|
|
|
|
#location_index = 0
|
|
|
|
for gate in range(9):
|
|
threshold = 15
|
|
if (gate > 1):
|
|
threshold = thresholds_dict[dev_id][gate-2]
|
|
|
|
for minute_m in range(1440):
|
|
if (minute_radar_lists[minute_m][gate] > threshold):
|
|
minute_radar_lists[minute_m][gate] = 100
|
|
else:
|
|
minute_radar_lists[minute_m][gate] = 0
|
|
|
|
if gate > 1:
|
|
if (minute_radar_lists[minute_m][gate + 7] > threshold):
|
|
minute_radar_lists[minute_m][gate + 7] = 100
|
|
else:
|
|
minute_radar_lists[minute_m][gate + 7] = 0
|
|
|
|
m_max = 100
|
|
m_min = 0
|
|
s_max = 100
|
|
s_min = 0
|
|
|
|
km = 0
|
|
if(m_max > m_min):
|
|
km = 1280/(m_max - m_min)
|
|
#wave_m = np.array([km*(item[0]-m_min) for item in minute_radar_lists[:1440][gate]])
|
|
wave_m = np.array([0.0] * 1440)
|
|
|
|
for minute_m in range(1440):
|
|
wave_m[minute_m] = minute_radar_lists[minute_m][gate]
|
|
|
|
if gate < 2:
|
|
DoDisplay(wave_m, location_name+" "+ description+" " + str(gate))
|
|
#wave_m = np.array([item[0] for item in minute_radar_lists[:1440][gate]])
|
|
arr_source[2*gate, :] = wave_m
|
|
rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
|
|
|
|
for col in range(1440):
|
|
sens_val = wave_m[col]
|
|
if sens_val != 0:
|
|
r,g,b=BestColor(km*(sens_val-m_min))
|
|
#print(r,g,b)
|
|
rgb_row[col] = r,g,b
|
|
|
|
for stretch_index in range(stretch_by):
|
|
y = device_counter * (18*stretch_by) + 2*gate * stretch_by + stretch_index
|
|
#print(y, row, devices_c, sensor_index, location_index, stretch_index)
|
|
|
|
#arr_stretched[y, :] = rgb_row
|
|
|
|
if gate > 1:
|
|
ks = 0
|
|
if(s_max > s_min):
|
|
if bw:
|
|
ks = 255/(s_max - s_min)
|
|
else:
|
|
ks = 1280/(s_max - s_min)
|
|
#wave_m = np.array([km*(item[0]-m_min) for item in minute_radar_lists[:1440][gate]])
|
|
wave_s = np.array([0.0] * 1440)
|
|
|
|
for minute_m in range(1440):
|
|
wave_s[minute_m] = minute_radar_lists[minute_m][gate+7]
|
|
#wave_m = np.array([item[0] for item in minute_radar_lists[:1440][gate]])
|
|
DoDisplay2(wave_m, wave_s, location_name+" "+str(dev_id)+" "+ description+" "+ str(gate))
|
|
arr_source[2*gate + 1, :] = wave_s
|
|
|
|
rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
|
|
|
|
for col in range(1440):
|
|
sens_val = wave_s[col]
|
|
if sens_val != 0:
|
|
if bw:
|
|
r = ks*(sens_val-s_min)
|
|
g = r
|
|
b = r
|
|
else:
|
|
r,g,b=BestColor(ks*(sens_val-s_min))
|
|
#print(r,g,b)
|
|
rgb_row[col] = r,g,b
|
|
|
|
for stretch_index in range(stretch_by):
|
|
y = device_counter * (18*stretch_by) + (2*(gate) + 1) * stretch_by + stretch_index
|
|
arr_stretched[y, :] = rgb_row
|
|
y = device_counter * (18*stretch_by) + (2*(gate)) * stretch_by + stretch_index
|
|
arr_stretched[y, :] = rgb_row
|
|
|
|
device_counter += 1
|
|
print("stop")
|
|
|
|
elif (chart_type == 4): #"collapsed"
|
|
|
|
stretch_by = 50
|
|
arr_source = np.zeros((1, minutes), dtype=float)
|
|
arr_stretched = np.zeros((int(stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
|
|
|
|
|
|
device_counter = 0
|
|
wave_m = [["", -1] for _ in range(1440)]
|
|
devices_list_str = ','.join(str(device[1]) for device in devices_list)
|
|
ids_list = []
|
|
radar_fields_of_interest = []
|
|
for details in devices_list:
|
|
threshold_str = details[5]
|
|
try:
|
|
threshold_lst = json.loads(threshold_str)
|
|
except:
|
|
threshold_lst = ["s3_max",12]
|
|
|
|
radar_field = threshold_lst[0]
|
|
if radar_field not in radar_fields_of_interest:
|
|
radar_fields_of_interest.append(radar_field)
|
|
|
|
threshold = threshold_lst[1]
|
|
dev_id = details[0]
|
|
ids_list.append(details[1])
|
|
|
|
sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
|
|
print(sql)
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()#cur.fetchone()
|
|
#print(result)
|
|
if my_data == None:
|
|
return False
|
|
|
|
device_id_2_threshold = {}
|
|
device_id_2_location = {0: "Outside"}
|
|
row_nr_2_device_id = {}
|
|
cnt = 0
|
|
row_nr_2_device_id[0] = 0
|
|
for details in devices_list:
|
|
|
|
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
|
|
|
|
cnt += 1
|
|
row_nr_2_device_id[cnt] = device_id
|
|
|
|
if radar_threshold_group_st == None:
|
|
radar_threshold_group_st = '["s3_max",12]' #last value is threshold to s28 composite
|
|
|
|
if len(radar_threshold_group_st) > 8:
|
|
radar_threshold_group = json.loads(radar_threshold_group_st)
|
|
else:
|
|
radar_threshold_group = ["s3_max",12]
|
|
|
|
device_id_2_location[device_id] = location_name
|
|
|
|
device_id_2_threshold[device_id] = radar_threshold_group
|
|
|
|
target_tz = pytz.timezone(time_zone_s)
|
|
st = time.time()
|
|
|
|
#each record in my_data has time, device_id and radar_fields_of_interest in it
|
|
result_np = None
|
|
try:
|
|
result_np = process_wave_data_numpy(image_file, my_data, time_zone_s, device_id_2_threshold, radar_fields_of_interest)
|
|
print(time.time() - st)
|
|
except Exception as err:
|
|
print(str(err))
|
|
|
|
if False:
|
|
for record in my_data:
|
|
time_val, device_id, min_val, max_val = record
|
|
radar_threshold = device_id_2_threshold[device_id]
|
|
local_time = time_val.astimezone(target_tz)
|
|
minute_m = int((local_time - local_time.replace(hour=0, minute=0, second=0, microsecond=0)).total_seconds() / 60)
|
|
|
|
if (wave_m[minute_m][0] == ""):
|
|
if max_val > radar_threshold:
|
|
wave_m[minute_m][0] = device_id
|
|
wave_m[minute_m][1] = max_val
|
|
else:
|
|
if max_val > radar_threshold:
|
|
if max_val > wave_m[minute_m][1]:
|
|
wave_m[minute_m][0] = device_id
|
|
wave_m[minute_m][1] = max_val
|
|
|
|
|
|
|
|
print(time.time()-st)
|
|
if result_np is not None:
|
|
wave_m = result_np
|
|
rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
|
|
#wave_m = FilterGlitches(wave_m, filter_minutes)
|
|
r = 0
|
|
g = 0
|
|
b = 0
|
|
|
|
if isinstance(wave_m[0], np.int64):
|
|
inital_device_id = row_nr_2_device_id[wave_m[0]]
|
|
else:
|
|
inital_device_id = 0
|
|
|
|
present_at = [[inital_device_id, 0, 1]] #device_id, minute, duration
|
|
|
|
for minute_m in range(1440):
|
|
try:
|
|
|
|
if isinstance(wave_m[minute_m], np.int64):
|
|
device_id = row_nr_2_device_id[wave_m[minute_m]]
|
|
else:
|
|
device_id = 0
|
|
|
|
if device_id != "" and device_id != -1:
|
|
r,g,b = Loc2Color[device_id_2_location[device_id]][0]
|
|
rgb_row[minute_m] = b,g,r
|
|
|
|
if minute_m > 0:
|
|
if present_at[-1][0] != device_id:
|
|
present_at.append([device_id, minute_m, 1])
|
|
else:
|
|
present_at[-1][2] += 1
|
|
|
|
except Exception as err:
|
|
print(str(err))
|
|
|
|
for stretch_index in range(stretch_by):
|
|
y = stretch_index
|
|
arr_stretched[y, :] = rgb_row
|
|
|
|
#print("stop")
|
|
#print(r,g,b)
|
|
SaveObjectInBlob(image_file+".bin", present_at)
|
|
SaveImageInBlob(image_file, arr_stretched)
|
|
|
|
def CreateFullLocationMapLabelsOut(location_image_file, devices_list, selected_date,
|
|
map_type, force_recreate, chart_type, bw, motion, scale_global, fast, filter_minutes, time_zone_s):
|
|
#global Id2MACDict
|
|
|
|
thresholds_dict = {}
|
|
|
|
stretch_to_min_max = True
|
|
devices_c = len(devices_list)
|
|
|
|
if devices_c == 0:
|
|
return
|
|
|
|
sensors_c = 1#len(sensors_table)
|
|
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
|
|
|
|
image_file = location_image_file
|
|
|
|
minutes = 1440
|
|
|
|
|
|
#search_pattern = os.path.join(scriptDir, "scratch/*_"+str(current_date.year)+"_"+str(current_date.month).rjust(2, '0')+"_"+str(current_date.day)+"_*.pkl")
|
|
|
|
#allFiles = [os.path.join(dest_path, f) for f in glob.glob(search_pattern)]
|
|
#rekreate .pckl files if missing
|
|
today_date = datetime.datetime.fromtimestamp(time.time())
|
|
|
|
|
|
if (chart_type == 8): #"all graphs"
|
|
|
|
fields = ['m0_max', 'm1_max', 'm2_max', 'm3_max', 'm4_max', 'm5_max',
|
|
'm6_max', 'm7_max', 'm8_max', 'm08_max', 's2_max', 's3_max',
|
|
's4_max', 's5_max', 's6_max', 's7_max', 's8_max', 's28_max', 's28_min']
|
|
|
|
|
|
show_radar = True
|
|
show_light = True
|
|
show_temperature = True
|
|
show_humidity = True
|
|
show_smell = True
|
|
|
|
labels_width = 200
|
|
title_labels_height = 40
|
|
title_label_width = 100
|
|
|
|
#common
|
|
label_font = cv2.FONT_HERSHEY_SIMPLEX
|
|
label_font_line = cv2.LINE_AA
|
|
|
|
#different
|
|
title_label_font_scale = 1
|
|
title_label_font_color = (0, 0, 0)#(128, 255, 255)
|
|
title_label_font_thickness = 2
|
|
|
|
label_font_scale = 0.5
|
|
label_font_color = (0, 0, 0)#(0, 255, 255)
|
|
label_font_thickness = 1
|
|
|
|
fields_n = len(fields)
|
|
|
|
radar_stripes = len(devices_list) * fields_n
|
|
radar_stretch_by = 5
|
|
|
|
light_stripes = len(devices_list)
|
|
light_stretch_by = 20
|
|
|
|
smell_sensors_stripes = 10 * len(devices_list)
|
|
other_sensors_stripes = len(devices_list)
|
|
|
|
temp_stripe_width = 15
|
|
alarm_stripe_width = 5
|
|
temperature_stretch_by = temp_stripe_width + alarm_stripe_width # Total height per device
|
|
|
|
humidity_stripe_width = 15
|
|
humidity_stretch_by = humidity_stripe_width + alarm_stripe_width
|
|
smell_component_stretch_by = 20
|
|
|
|
text_dimensions = get_text_dimensions("TEST", label_font, label_font_scale, label_font_thickness)
|
|
text_height = text_dimensions["height"]
|
|
|
|
all_maps_height = 0
|
|
|
|
# radar, light, temperature, humidity, smell*10
|
|
|
|
if show_radar:
|
|
all_maps_height = title_labels_height + radar_stripes*radar_stretch_by
|
|
|
|
if show_light:
|
|
all_maps_height = all_maps_height + title_labels_height + other_sensors_stripes*light_stretch_by
|
|
|
|
if show_temperature:
|
|
all_maps_height = all_maps_height + title_labels_height + other_sensors_stripes*temperature_stretch_by
|
|
|
|
if show_humidity:
|
|
all_maps_height = all_maps_height + title_labels_height + other_sensors_stripes*humidity_stretch_by
|
|
|
|
if show_smell:
|
|
all_maps_height = all_maps_height + title_labels_height + other_sensors_stripes*smell_component_stretch_by * 10
|
|
|
|
|
|
if all_maps_height == 0:
|
|
return
|
|
|
|
vertical_offset = 0
|
|
arr_stretched = np.full((all_maps_height, minutes+labels_width, 3), [255, 174, 70], dtype=np.uint8)
|
|
|
|
#Lets add divider lines
|
|
x = 190
|
|
if show_radar:
|
|
stretch_by = radar_stretch_by
|
|
cnt = 0
|
|
for details in devices_list:
|
|
y = vertical_offset + title_labels_height + (cnt)*fields_n*stretch_by
|
|
arr_stretched[y, 190:201, :] = 0
|
|
cnt += 1
|
|
|
|
section_height = title_labels_height + radar_stripes*radar_stretch_by
|
|
vertical_offset = vertical_offset + section_height
|
|
if show_light:
|
|
stretch_by = light_stretch_by
|
|
cnt = 0
|
|
|
|
for details in devices_list:
|
|
y = vertical_offset + title_labels_height+ (cnt)*1*stretch_by
|
|
arr_stretched[y, 190:201, :] = 0
|
|
cnt += 1
|
|
|
|
section_height = title_labels_height + other_sensors_stripes*stretch_by
|
|
vertical_offset = vertical_offset + section_height
|
|
|
|
if show_temperature:
|
|
stretch_by = temperature_stretch_by
|
|
cnt = 0
|
|
|
|
for details in devices_list:
|
|
y = vertical_offset + title_labels_height+ (cnt)*1*stretch_by
|
|
arr_stretched[y, 190:201, :] = 0
|
|
cnt += 1
|
|
section_height = title_labels_height + other_sensors_stripes*stretch_by
|
|
vertical_offset = vertical_offset + section_height
|
|
if show_humidity:
|
|
stretch_by = humidity_stretch_by
|
|
cnt = 0
|
|
|
|
for details in devices_list:
|
|
y = vertical_offset + title_labels_height+ (cnt)*1*stretch_by
|
|
arr_stretched[y, 190:201, :] = 0
|
|
cnt += 1
|
|
section_height = title_labels_height + other_sensors_stripes*humidity_stretch_by
|
|
vertical_offset = vertical_offset + section_height
|
|
if show_smell:
|
|
stretch_by = smell_component_stretch_by
|
|
|
|
cnt = 0
|
|
|
|
for details in devices_list:
|
|
y = vertical_offset + title_labels_height+ (cnt)*10*stretch_by
|
|
arr_stretched[y, 190:201, :] = 0
|
|
cnt += 1
|
|
#section_height = title_labels_height + other_sensors_stripes**stretch_by * 10
|
|
#vertical_offset = vertical_offset + section_height
|
|
|
|
#all_maps_height = all_maps_height + title_labels_height + other_sensors_stripes*stretch_by * 10
|
|
|
|
|
|
|
|
|
|
devices_list_str = ','.join(str(device[1]) for device in devices_list)
|
|
ids_list = []
|
|
labels = []
|
|
title_labels = []
|
|
vertical_offset = 0
|
|
######################################## RADAR ##################################################################
|
|
if show_radar:
|
|
title_label_text = "RADAR"
|
|
fields_s = fields
|
|
stripes = radar_stripes
|
|
stretch_by = radar_stretch_by
|
|
title_text_dimensions = get_text_dimensions(title_label_text, label_font, title_label_font_scale, label_font_thickness)
|
|
title_text_height = title_text_dimensions["height"]
|
|
title_label_width = title_text_dimensions["width"]
|
|
title_label = (title_label_text, (int(labels_width + minutes * 0.5 - title_label_width / 2), vertical_offset + 10 + title_text_height), label_font, title_label_font_scale, title_label_font_color, title_label_font_thickness, label_font_line)
|
|
|
|
title_labels.append(title_label)
|
|
|
|
cnt = 0
|
|
for details in devices_list:
|
|
dev_id = details[0]
|
|
ids_list.append(details[1])
|
|
descriptor = details[2]
|
|
if details[3] != None and details[3] != "":
|
|
descriptor = descriptor + " " + details[3]
|
|
if details[6] != None and details[6] != "":
|
|
descriptor = descriptor + " " + details[6]
|
|
|
|
text_dimensions = get_text_dimensions(descriptor, label_font, label_font_scale, label_font_thickness)
|
|
text_height = text_dimensions["height"]
|
|
labels.append((descriptor, (10, vertical_offset + title_labels_height+40+text_height + (cnt)*fields_n*stretch_by), label_font, label_font_scale, label_font_color, label_font_thickness, label_font_line))
|
|
cnt += 1
|
|
sql = get_deployment_radar_only_detailed_query(devices_list_str, time_from_str, time_to_str, ids_list)
|
|
print(sql)
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()#cur.fetchone()
|
|
#print(result)
|
|
if my_data != None:
|
|
|
|
device_to_index = {device: idx for idx, device in enumerate(ids_list)}
|
|
# Calculate base minute
|
|
base_minute = ConvertToBase(time_from_str, time_zone_s)
|
|
|
|
st = time.time()
|
|
if True:
|
|
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
|
|
wave_m = create_radar_optimized_heatmap(my_data, bw, fields_s, wave_m, device_to_index, base_minute, time_zone_s)
|
|
print(time.time()-st)
|
|
|
|
|
|
st = time.time()
|
|
for yy in range(stripes):
|
|
rgb_row = wave_m[yy]
|
|
for stretch_index in range(radar_stretch_by):
|
|
y = yy * radar_stretch_by + stretch_index
|
|
arr_stretched[title_labels_height+y, 200:] = rgb_row
|
|
|
|
print(time.time()-st)
|
|
vertical_offset = vertical_offset + title_labels_height + stripes*radar_stretch_by
|
|
######################################## LIGHT ##################################################################
|
|
if show_light:
|
|
title_label_text = "LIGHT"
|
|
fields_s = ['light']
|
|
min_val = 0
|
|
max_val = 4095
|
|
stretch_by = light_stretch_by
|
|
stripes = len(devices_list) * len(fields_s) # Calculate number of rows needed
|
|
# Calculate the correct vertical offset for light section
|
|
|
|
# Draw the light section title at the correct position
|
|
title_text_dimensions = get_text_dimensions(title_label_text, label_font, title_label_font_scale, label_font_thickness)
|
|
title_text_height = title_text_dimensions["height"]
|
|
title_label_width = title_text_dimensions["width"]
|
|
title_label = (title_label_text, (int(labels_width + minutes * 0.5 - title_label_width / 2),
|
|
vertical_offset + 10 + title_text_height),
|
|
label_font, title_label_font_scale, title_label_font_color, title_label_font_thickness, label_font_line)
|
|
|
|
title_labels.append(title_label)
|
|
|
|
# Draw device labels for light section
|
|
cnt = 0
|
|
light_ids_list = [] # Create a separate list for light section
|
|
for details in devices_list:
|
|
dev_id = details[0]
|
|
light_ids_list.append(details[1])
|
|
descriptor = details[2]
|
|
if details[3] != None and details[3] != "":
|
|
descriptor = descriptor + " " + details[3]
|
|
if details[6] != None and details[6] != "":
|
|
descriptor = descriptor + " " + details[6]
|
|
|
|
text_dimensions = get_text_dimensions(descriptor, label_font, label_font_scale, label_font_thickness)
|
|
text_height = text_dimensions["height"]
|
|
# Position labels in the light section
|
|
labels.append((descriptor, (10, vertical_offset + title_labels_height + text_height + (cnt)*len(fields_s)*stretch_by),
|
|
label_font, label_font_scale, label_font_color, label_font_thickness, label_font_line))
|
|
cnt += 1
|
|
|
|
# Get light data using the existing query function
|
|
sql = get_deployment_light_only_query(devices_list_str, time_from_str, time_to_str, light_ids_list)
|
|
print(sql)
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()
|
|
|
|
if my_data != None and len(my_data) > 0:
|
|
device_to_index = {device: idx for idx, device in enumerate(light_ids_list)}
|
|
|
|
# Calculate base minute
|
|
base_minute = ConvertToBase(time_from_str, time_zone_s)
|
|
|
|
# Process light data
|
|
st = time.time()
|
|
|
|
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
|
|
|
|
# Use the light-specific function
|
|
wave_m = create_light_optimized_heatmap(my_data, bw, fields_s, wave_m,
|
|
device_to_index, base_minute, time_zone_s,
|
|
min_val, max_val)
|
|
print(f"Light heatmap creation time: {time.time()-st:.4f} seconds")
|
|
|
|
# Stretch the heatmap vertically
|
|
st = time.time()
|
|
section_start = vertical_offset + title_labels_height
|
|
|
|
for yy in range(stripes):
|
|
rgb_row = wave_m[yy]
|
|
for stretch_index in range(stretch_by):
|
|
y = yy * stretch_by + stretch_index
|
|
target_y = section_start + y
|
|
|
|
# Make sure we're within bounds of the array
|
|
if target_y < arr_stretched.shape[0]:
|
|
arr_stretched[target_y, labels_width:] = rgb_row
|
|
else:
|
|
print(f"Warning: Row {target_y} is out of bounds (max: {arr_stretched.shape[0]-1})")
|
|
|
|
vertical_offset = vertical_offset + title_labels_height + stripes*stretch_by
|
|
print(f"Light stretching time: {time.time()-st:.4f} seconds")
|
|
|
|
######################################## TEMPERATURE ##################################################################
|
|
if show_temperature:
|
|
title_label_text = "TEMPERATURE"
|
|
fields_s = ['temperature', 'temperature_state']
|
|
|
|
# Define different stripe widths for temperature and alarm
|
|
|
|
temp_offset = -16.0
|
|
min_val = 20
|
|
max_val = 30
|
|
|
|
# Calculate the correct vertical offset for temperature section
|
|
vertical_offset = 0
|
|
if show_radar:
|
|
vertical_offset += title_labels_height + radar_stripes * radar_stretch_by
|
|
if show_light:
|
|
vertical_offset += title_labels_height + other_sensors_stripes * light_stretch_by
|
|
|
|
stripes = len(devices_list) * len(fields_s) # Number of rows needed in data array
|
|
|
|
# Draw the temperature section title
|
|
title_text_dimensions = get_text_dimensions(title_label_text, label_font, title_label_font_scale, label_font_thickness)
|
|
title_text_height = title_text_dimensions["height"]
|
|
title_label_width = title_text_dimensions["width"]
|
|
title_label = (title_label_text, (int(labels_width + minutes * 0.5 - title_label_width / 2),
|
|
vertical_offset + 10 + title_text_height),
|
|
label_font, title_label_font_scale, title_label_font_color, title_label_font_thickness, label_font_line)
|
|
|
|
title_labels.append(title_label)
|
|
|
|
# Draw device labels for temperature section
|
|
cnt = 0
|
|
temp_ids_list = [] # Create a separate list for temperature section
|
|
for details in devices_list:
|
|
dev_id = details[0]
|
|
temp_ids_list.append(details[1])
|
|
descriptor = details[2]
|
|
if details[3] != None and details[3] != "":
|
|
descriptor = descriptor + " " + details[3]
|
|
if details[6] != None and details[6] != "":
|
|
descriptor = descriptor + " " + details[6]
|
|
|
|
text_dimensions = get_text_dimensions(descriptor, label_font, label_font_scale, label_font_thickness)
|
|
text_height = text_dimensions["height"]
|
|
# Position labels in the temperature section
|
|
y_pos = vertical_offset + title_labels_height + text_height + cnt * temperature_stretch_by
|
|
#y_pos = vertical_offset + title_labels_height + text_height + (cnt)*len(fields_s)*stretch_by)
|
|
labels.append((descriptor, (10, y_pos), label_font, label_font_scale,
|
|
label_font_color, label_font_thickness, label_font_line))
|
|
cnt += 1
|
|
|
|
# Get temperature data
|
|
sql = get_deployment_temperature_only_query(devices_list_str, time_from_str, time_to_str, temp_ids_list, temp_offset)
|
|
print(sql)
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()
|
|
|
|
if my_data != None and len(my_data) > 0:
|
|
device_to_index = {device: idx for idx, device in enumerate(temp_ids_list)}
|
|
base_minute = ConvertToBase(time_from_str, time_zone_s)
|
|
|
|
# Process temperature data
|
|
st = time.time()
|
|
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
|
|
if False:
|
|
# Simulate data for testing
|
|
for i in range(min(len(my_data), 500)):
|
|
if i >= 100: # Only modify indices 100-500
|
|
t = (i - 100) / 4.0 # Temperature value
|
|
|
|
# Set correct alarm levels based on temperature
|
|
if CelsiusToFahrenheit(t) <= 50 or CelsiusToFahrenheit(t) >= 90:
|
|
alarm_level = 2 # Critical - should be red
|
|
elif CelsiusToFahrenheit(t) <= 60 or CelsiusToFahrenheit(t) >= 80:
|
|
alarm_level = 1 # Warning - should be yellow
|
|
else:
|
|
alarm_level = 0 # Normal - should be green
|
|
|
|
# Replace the tuple with new values
|
|
my_data[i] = (my_data[i][0], my_data[i][1], t, alarm_level)
|
|
|
|
# Create the heatmap data
|
|
wave_m = create_temperature_optimized_heatmap(my_data, bw, fields_s, wave_m,
|
|
device_to_index, base_minute, time_zone_s,
|
|
min_val, max_val)
|
|
print(f"Temperature heatmap creation time: {time.time()-st:.4f} seconds")
|
|
|
|
# Stretch the heatmap with different heights for temperature and alarm
|
|
st = time.time()
|
|
section_start = vertical_offset + title_labels_height
|
|
|
|
# Loop through each device
|
|
for device_idx in range(len(temp_ids_list)):
|
|
# Get the data rows for this device
|
|
temp_row = wave_m[device_idx * 2] # Temperature row (even index)
|
|
alarm_row = wave_m[device_idx * 2 + 1] # Alarm row (odd index)
|
|
|
|
# Calculate the starting y-position for this device
|
|
device_y_start = section_start + device_idx * temperature_stretch_by
|
|
|
|
# Draw the temperature stripe (15 pixels)
|
|
for stretch_index in range(temp_stripe_width):
|
|
target_y = device_y_start + stretch_index
|
|
if target_y < arr_stretched.shape[0]:
|
|
arr_stretched[target_y, labels_width:] = temp_row
|
|
|
|
# Draw the alarm stripe (5 pixels)
|
|
for stretch_index in range(alarm_stripe_width):
|
|
target_y = device_y_start + temp_stripe_width + stretch_index
|
|
if target_y < arr_stretched.shape[0]:
|
|
arr_stretched[target_y, labels_width:] = alarm_row
|
|
|
|
print(f"Temperature stretching time: {time.time()-st:.4f} seconds")
|
|
|
|
######################################## HUMIDITY ##################################################################
|
|
'''
|
|
Ideal indoor humidity: 30-50%
|
|
Too dry: Below 30% - Can cause dry skin, irritated eyes, and respiratory issues
|
|
Too humid: Above 60% - Feels warmer than actual temperature, promotes mold growth
|
|
'''
|
|
if show_humidity:
|
|
title_label_text = "HUMIDITY"
|
|
fields_s = ['humidity', 'humidity_state']
|
|
|
|
# Define different stripe widths for humidity and alarm
|
|
|
|
humidity_offset = 0
|
|
min_val = 30#40
|
|
max_val = 60#60
|
|
|
|
# Calculate the correct vertical offset for temperature section
|
|
vertical_offset = 0
|
|
if show_radar:
|
|
vertical_offset += title_labels_height + radar_stripes * radar_stretch_by
|
|
if show_light:
|
|
vertical_offset += title_labels_height + other_sensors_stripes * light_stretch_by
|
|
if show_temperature:
|
|
vertical_offset += title_labels_height + other_sensors_stripes * temperature_stretch_by
|
|
|
|
stripes = len(devices_list) * len(fields_s) # Number of rows needed in data array
|
|
|
|
# Draw the temperature section title
|
|
title_text_dimensions = get_text_dimensions(title_label_text, label_font, title_label_font_scale, label_font_thickness)
|
|
title_text_height = title_text_dimensions["height"]
|
|
title_label_width = title_text_dimensions["width"]
|
|
title_label = (title_label_text, (int(labels_width + minutes * 0.5 - title_label_width / 2),
|
|
vertical_offset + 10 + title_text_height),
|
|
label_font, title_label_font_scale, title_label_font_color, title_label_font_thickness, label_font_line)
|
|
|
|
title_labels.append(title_label)
|
|
|
|
# Draw device labels for temperature section
|
|
cnt = 0
|
|
temp_ids_list = [] # Create a separate list for temperature section
|
|
for details in devices_list:
|
|
dev_id = details[0]
|
|
temp_ids_list.append(details[1])
|
|
descriptor = details[2]
|
|
if details[3] != None and details[3] != "":
|
|
descriptor = descriptor + " " + details[3]
|
|
if details[6] != None and details[6] != "":
|
|
descriptor = descriptor + " " + details[6]
|
|
|
|
text_dimensions = get_text_dimensions(descriptor, label_font, label_font_scale, label_font_thickness)
|
|
text_height = text_dimensions["height"]
|
|
# Position labels in the temperature section
|
|
y_pos = vertical_offset + title_labels_height + text_height + cnt * humidity_stretch_by
|
|
labels.append((descriptor, (10, y_pos), label_font, label_font_scale,
|
|
label_font_color, label_font_thickness, label_font_line))
|
|
cnt += 1
|
|
|
|
# Get humidity data
|
|
sql = get_deployment_humidity_only_query(devices_list_str, time_from_str, time_to_str, temp_ids_list, humidity_offset)
|
|
print(sql)
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()
|
|
|
|
if my_data != None and len(my_data) > 0:
|
|
device_to_index = {device: idx for idx, device in enumerate(temp_ids_list)}
|
|
base_minute = ConvertToBase(time_from_str, time_zone_s)
|
|
|
|
# Process temperature data
|
|
st = time.time()
|
|
wave_m = np.zeros((stripes, 1440, 3), dtype=np.uint8)
|
|
if False:
|
|
# Simulate data for testing
|
|
for i in range(min(len(my_data), 500)):
|
|
if i >= 100: # Only modify indices 100-500
|
|
h = (i - 100) / 4.0 # Temperature value
|
|
|
|
# Set correct alarm levels based on temperature
|
|
if h <= 20 or h >= 60:
|
|
alarm_level = 2 # Critical - should be red
|
|
elif h <= 30 or h >= 50:
|
|
alarm_level = 1 # Warning - should be yellow
|
|
else:
|
|
alarm_level = 0 # Normal - should be green
|
|
|
|
# Replace the tuple with new values
|
|
my_data[i] = (my_data[i][0], my_data[i][1], h, alarm_level)
|
|
|
|
# Create the heatmap data
|
|
wave_m = create_humidity_optimized_heatmap(my_data, bw, fields_s, wave_m,
|
|
device_to_index, base_minute, time_zone_s,
|
|
min_val, max_val)
|
|
print(f"Humidity heatmap creation time: {time.time()-st:.4f} seconds")
|
|
|
|
# Stretch the heatmap with different heights for humidity and alarm
|
|
st = time.time()
|
|
section_start = vertical_offset + title_labels_height
|
|
|
|
# Loop through each device
|
|
for device_idx in range(len(temp_ids_list)):
|
|
# Get the data rows for this device
|
|
humidity_row = wave_m[device_idx * 2] # Humidity row (even index)
|
|
alarm_row = wave_m[device_idx * 2 + 1] # Alarm row (odd index)
|
|
|
|
# Calculate the starting y-position for this device
|
|
device_y_start = section_start + device_idx * humidity_stretch_by
|
|
|
|
# Draw the humidity stripe (15 pixels)
|
|
for stretch_index in range(humidity_stripe_width):
|
|
target_y = device_y_start + stretch_index
|
|
if target_y < arr_stretched.shape[0]:
|
|
arr_stretched[target_y, labels_width:] = humidity_row
|
|
|
|
# Draw the alarm stripe (5 pixels)
|
|
for stretch_index in range(alarm_stripe_width):
|
|
target_y = device_y_start + temp_stripe_width + stretch_index
|
|
if target_y < arr_stretched.shape[0]:
|
|
arr_stretched[target_y, labels_width:] = alarm_row
|
|
|
|
print(f"Temperature stretching time: {time.time()-st:.4f} seconds")
|
|
|
|
######################################## SMELL ##################################################################
|
|
if show_smell:
|
|
title_label_text = "SMELL"
|
|
fields_s = ['S0', 'S1', 'S2', 'S3', 'S4', 'S5', 'S6', 'S7', 'S8', 'S9']
|
|
|
|
# Define different stripe widths for humidity and alarm
|
|
|
|
smell_offset = 0
|
|
|
|
# Calculate the correct vertical offset for temperature section
|
|
vertical_offset = 0
|
|
if show_radar:
|
|
vertical_offset += title_labels_height + radar_stripes * radar_stretch_by
|
|
if show_light:
|
|
vertical_offset += title_labels_height + other_sensors_stripes * light_stretch_by
|
|
if show_temperature:
|
|
vertical_offset += title_labels_height + other_sensors_stripes * temperature_stretch_by
|
|
if show_humidity:
|
|
vertical_offset += title_labels_height + other_sensors_stripes * humidity_stretch_by
|
|
|
|
stripes = len(devices_list) * len(fields_s) # Number of rows needed in data array
|
|
|
|
# Draw the temperature section title
|
|
title_text_dimensions = get_text_dimensions(title_label_text, label_font, title_label_font_scale, label_font_thickness)
|
|
title_text_height = title_text_dimensions["height"]
|
|
title_label_width = title_text_dimensions["width"]
|
|
title_label = (title_label_text, (int(labels_width + minutes * 0.5 - title_label_width / 2),
|
|
vertical_offset + 10 + title_text_height),
|
|
label_font, title_label_font_scale, title_label_font_color, title_label_font_thickness, label_font_line)
|
|
|
|
title_labels.append(title_label)
|
|
|
|
# Draw device labels for temperature section
|
|
cnt = 0
|
|
temp_ids_list = [] # Create a separate list for temperature section
|
|
for details in devices_list:
|
|
dev_id = details[0]
|
|
temp_ids_list.append(details[1])
|
|
descriptor = details[2]
|
|
if details[3] != None and details[3] != "":
|
|
descriptor = descriptor + " " + details[3]
|
|
if details[6] != None and details[6] != "":
|
|
descriptor = descriptor + " " + details[6]
|
|
|
|
text_dimensions = get_text_dimensions(descriptor, label_font, label_font_scale, label_font_thickness)
|
|
text_height = text_dimensions["height"]
|
|
# Position labels in the temperature section
|
|
y_pos = vertical_offset + title_labels_height +80+ text_height + cnt * smell_component_stretch_by * 10
|
|
#y_pos = vertical_offset + title_labels_height+40+text_height + (cnt)*fields_n*stretch_by)
|
|
labels.append((descriptor, (10, y_pos), label_font, label_font_scale,
|
|
label_font_color, label_font_thickness, label_font_line))
|
|
cnt += 1
|
|
|
|
# Get smell data
|
|
sql = get_deployment_smell_only_query(devices_list_str, time_from_str, time_to_str, temp_ids_list, smell_offset)
|
|
print(sql)
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()
|
|
|
|
if my_data != None and len(my_data) > 0:
|
|
device_to_index = {device: idx for idx, device in enumerate(temp_ids_list)}
|
|
base_minute = ConvertToBase(time_from_str, time_zone_s)
|
|
|
|
# Create the heatmap data
|
|
create_smell_optimized_heatmap(arr_stretched, my_data, bw, fields_s, device_to_index, base_minute, time_zone_s, smell_component_stretch_by, selected_date, vertical_offset + 18 + title_text_height)
|
|
|
|
|
|
|
|
SaveImageInBlobLabelsOut(image_file, arr_stretched, labels, title_labels)
|
|
|
|
print("stop")
|
|
|
|
|
|
|
|
|
|
|
|
def CreateDailyLocationMap(location_image_file, devices_list, selected_date, filter_minutes, time_zone_s, stretch_by):
|
|
|
|
devices_c = len(devices_list)
|
|
sensors_c = 1#len(sensors_table)
|
|
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
|
|
|
|
image_file = location_image_file
|
|
|
|
minutes = 1440
|
|
|
|
|
|
#search_pattern = os.path.join(scriptDir, "scratch/*_"+str(current_date.year)+"_"+str(current_date.month).rjust(2, '0')+"_"+str(current_date.day)+"_*.pkl")
|
|
|
|
#allFiles = [os.path.join(dest_path, f) for f in glob.glob(search_pattern)]
|
|
#rekreate .pckl files if missing
|
|
today_date = datetime.datetime.fromtimestamp(time.time())
|
|
|
|
arr_source = np.zeros((1, minutes), dtype=float)
|
|
arr_stretched = np.zeros((int(stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
|
|
arr_stretched_sorted = np.zeros((int(stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
|
|
|
|
device_counter = 0
|
|
wave_m = [["", -1] for _ in range(1440)]
|
|
devices_list_str = ','.join(str(device[1]) for device in devices_list)
|
|
ids_list = []
|
|
radar_fields_of_interest = []
|
|
for details in devices_list:
|
|
threshold_str = details[5]
|
|
try:
|
|
threshold_lst = json.loads(threshold_str)
|
|
except:
|
|
threshold_lst = ["s3_max",12]
|
|
|
|
radar_field = threshold_lst[0]
|
|
if radar_field not in radar_fields_of_interest:
|
|
radar_fields_of_interest.append(radar_field)
|
|
|
|
threshold = threshold_lst[1]
|
|
dev_id = details[0]
|
|
ids_list.append(details[1])
|
|
|
|
sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
|
|
print(sql)
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()#cur.fetchone()
|
|
#print(result)
|
|
if my_data == None:
|
|
return False
|
|
|
|
device_id_2_threshold = {}
|
|
device_id_2_location = {0: "Outside"}
|
|
row_nr_2_device_id = {}
|
|
cnt = 0
|
|
row_nr_2_device_id[0] = 0
|
|
for details in devices_list:
|
|
|
|
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
|
|
|
|
cnt += 1
|
|
row_nr_2_device_id[cnt] = device_id
|
|
|
|
if radar_threshold_group_st == None:
|
|
radar_threshold_group_st = '["s3_max",12]' #last value is threshold to s28 composite
|
|
|
|
if len(radar_threshold_group_st) > 8:
|
|
radar_threshold_group = json.loads(radar_threshold_group_st)
|
|
else:
|
|
radar_threshold_group = ["s3_max",12]
|
|
|
|
device_id_2_location[device_id] = location_name
|
|
|
|
device_id_2_threshold[device_id] = radar_threshold_group
|
|
|
|
target_tz = pytz.timezone(time_zone_s)
|
|
st = time.time()
|
|
|
|
#each record in my_data has time, device_id and radar_fields_of_interest in it
|
|
|
|
try:
|
|
result_np = process_wave_data_numpy(image_file, my_data, time_zone_s, device_id_2_threshold, radar_fields_of_interest)
|
|
print(time.time() - st)
|
|
except Exception as err:
|
|
print(str(err))
|
|
|
|
if False:
|
|
for record in my_data:
|
|
time_val, device_id, min_val, max_val = record
|
|
radar_threshold = device_id_2_threshold[device_id]
|
|
local_time = time_val.astimezone(target_tz)
|
|
minute_m = int((local_time - local_time.replace(hour=0, minute=0, second=0, microsecond=0)).total_seconds() / 60)
|
|
|
|
if (wave_m[minute_m][0] == ""):
|
|
if max_val > radar_threshold:
|
|
wave_m[minute_m][0] = device_id
|
|
wave_m[minute_m][1] = max_val
|
|
else:
|
|
if max_val > radar_threshold:
|
|
if max_val > wave_m[minute_m][1]:
|
|
wave_m[minute_m][0] = device_id
|
|
wave_m[minute_m][1] = max_val
|
|
|
|
|
|
|
|
print(time.time()-st)
|
|
wave_m = result_np
|
|
rgb_row = np.zeros(( 1440, 3), dtype=np.uint8)
|
|
rgbsorted_row = np.zeros(( 1440, 3), dtype=np.uint8)
|
|
#wave_m = FilterGlitches(wave_m, filter_minutes)
|
|
r = 0
|
|
g = 0
|
|
b = 0
|
|
|
|
presence_minutes = {}
|
|
|
|
#we want to generate present_at array
|
|
if isinstance(wave_m[0], np.int64):
|
|
inital_device_id = row_nr_2_device_id[wave_m[0]]
|
|
else:
|
|
inital_device_id = 0
|
|
present_at = [[inital_device_id, 0, 1]] #device_id, minute, duration
|
|
for minute_m in range(1440):
|
|
try:
|
|
if isinstance(wave_m[minute_m], np.int64):
|
|
device_id = row_nr_2_device_id[wave_m[minute_m]]
|
|
else:
|
|
device_id = 0
|
|
|
|
if device_id != "" and device_id != -1:
|
|
r,g,b = Loc2Color[device_id_2_location[device_id]][0]
|
|
rgb_row[minute_m] = b,g,r
|
|
|
|
if Loc2Color[device_id_2_location[device_id]][1] in presence_minutes:
|
|
presence_minutes[Loc2Color[device_id_2_location[device_id]][1]] = [presence_minutes[Loc2Color[device_id_2_location[device_id]][1]][0] + 1, Loc2Color[device_id_2_location[device_id]][0]]
|
|
else:
|
|
presence_minutes[Loc2Color[device_id_2_location[device_id]][1]] = [1, Loc2Color[device_id_2_location[device_id]][0]]
|
|
|
|
if minute_m > 0:
|
|
if present_at[-1][0] != device_id:
|
|
present_at.append([device_id, minute_m, 1])
|
|
else:
|
|
present_at[-1][2] += 1
|
|
|
|
except Exception as err:
|
|
print(str(err))
|
|
|
|
start_minute = 0
|
|
for color_key in sorted(presence_minutes):
|
|
print(color_key, presence_minutes[color_key])
|
|
rgbsorted_row[start_minute:start_minute+presence_minutes[color_key][0]] = presence_minutes[color_key][1][::-1]
|
|
start_minute += presence_minutes[color_key][0]
|
|
|
|
#we need to save present_at list to blob
|
|
SaveObjectInBlob(image_file+".bin", present_at)
|
|
#present_at_back_s = ReadObjectMinIO("daily-maps", image_file+".bin")
|
|
#present_at_back = json.loads(present_at_back_s)
|
|
#print(present_at_back)
|
|
for stretch_index in range(stretch_by):
|
|
y = stretch_index
|
|
arr_stretched[y, :] = rgb_row
|
|
arr_stretched_sorted[y, :] = rgbsorted_row
|
|
#print("stop")
|
|
#print(r,g,b)
|
|
SaveImageInBlob(image_file, arr_stretched)
|
|
SaveImageInBlob(image_file[:-4]+"S.png", arr_stretched_sorted)
|
|
|
|
|
|
def GenerateFullLocationMap(map_file, deployment_id, ddate, recreate_or_not, chart_type, bw, motion, scale_global, fast, time_zone_s, filter_minutes = 5):
|
|
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
|
|
|
st = time.time()
|
|
if CreateFullLocationMap(map_file, devices_list, ddate, 1, recreate_or_not, chart_type, bw, motion, scale_global, fast, filter_minutes, time_zone_s) == 0: #"[bit] 1=same sensors together, 2=same device together, 4=1 der, 8=2 der
|
|
print(ddate, "Not found")
|
|
else:
|
|
print(ddate, time.time() - st)
|
|
|
|
def GenerateFullLocationMapLabelsOut(map_file, deployment_id, ddate, recreate_or_not, chart_type, bw, motion, scale_global, fast, time_zone_s, filter_minutes = 5):
|
|
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
|
|
|
st = time.time()
|
|
if CreateFullLocationMapLabelsOut(map_file, devices_list, ddate, 1, recreate_or_not, chart_type, bw, motion, scale_global, fast, filter_minutes, time_zone_s) == 0: #"[bit] 1=same sensors together, 2=same device together, 4=1 der, 8=2 der
|
|
print(ddate, "Not found")
|
|
else:
|
|
print(ddate, time.time() - st)
|
|
|
|
def CreateMapFast(map_file, devices_list, selected_date, bw, time_zone_s, radar_part, group_by):
|
|
global Id2MACDict, s_table_temp
|
|
|
|
st = time.time()
|
|
if radar_part == "s28":
|
|
radar_part = "(s2+s3+s4+s5+s6+s7+s8)/7"
|
|
try:
|
|
|
|
#stretch_to_min_max = True
|
|
|
|
#current_date_p = selected_date.replace("-", "_")
|
|
#current_date_s = selected_date
|
|
|
|
lower_than200 = 0
|
|
larger_than200 = 0
|
|
ids_list = []
|
|
for details in devices_list[0]:
|
|
dev_id = details[0]
|
|
ids_list.append(details[1])
|
|
if dev_id < 200:
|
|
lower_than200 += 1
|
|
else:
|
|
larger_than200 += 1
|
|
|
|
if lower_than200 > 0 and larger_than200 > 0:
|
|
return False, []
|
|
|
|
if larger_than200 > 0:
|
|
sensors_c = len(s_table_temp)
|
|
else: #old sensors not supported
|
|
return False, []
|
|
|
|
|
|
devices_c = len(devices_list[0])
|
|
devices_list_str = ",".join(map(str, devices_list[1]))
|
|
image_file = map_file
|
|
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
|
|
temp_offset = -16
|
|
if sensors_c > 1:
|
|
sql = get_deployment_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_part, temp_offset)
|
|
else:
|
|
sql = get_deployment_single_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_part, temp_offset, s_table_temp[0])
|
|
print(sql)
|
|
#print(sql)
|
|
#st = time.time()
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
day_data = cur.fetchall()#cur.fetchone()
|
|
#print(result)
|
|
if day_data == None:
|
|
return False, []
|
|
|
|
stretch_by = 10
|
|
minutes = 1440
|
|
stripes = devices_c * sensors_c #2 for upper maxes, lower mins
|
|
arr_source_template = np.full((stripes, minutes+4), -0.001, dtype=float)
|
|
arr_stretched_template = np.zeros((int(stripes*stretch_by), minutes, 3), dtype=np.uint8) # 3 for RGB channels
|
|
arr_source = fast_fill_array_from_timescale_single(day_data, time_from_str, devices_list[1], arr_source_template, s_table_temp[0], time_zone_s)
|
|
arr_source = AddLimits_optimized(arr_source, devices_c, sensors_c, percentile=100)
|
|
scaled_day = CalcExtremes(arr_source, minutes, stripes)
|
|
arr_stretched, vocs_scaled = FillImage_optimized(scaled_day, devices_c, sensors_c, arr_stretched_template, group_by, bw)
|
|
SaveImageInBlob(image_file, arr_stretched)
|
|
return True, vocs_scaled
|
|
|
|
except Exception as e:
|
|
AddToLog(traceback.format_exc())
|
|
return False, []
|
|
|
|
def get_deployment_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_part, temp_offset):
|
|
"""
|
|
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
radar_part (str): Radar column name, defaults to 'radar'
|
|
|
|
Returns:
|
|
str: Generated SQL query
|
|
"""
|
|
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
sql = f"""
|
|
SELECT
|
|
COALESCE(sr.minute, rr.minute) as minute,
|
|
COALESCE(sr.device_id, rr.device_id) as device_id,
|
|
sr.avg_temperature+ {temp_offset} as avg_temperature,
|
|
sr.avg_humidity,
|
|
sr.pressure_amplitude,
|
|
sr.max_light,
|
|
rr.radar,
|
|
sr.min_s0 as sensor_min_s0,
|
|
sr.min_s1 as sensor_min_s1,
|
|
sr.min_s2 as sensor_min_s2,
|
|
sr.min_s3 as sensor_min_s3,
|
|
sr.min_s4 as sensor_min_s4,
|
|
sr.min_s5 as sensor_min_s5,
|
|
sr.min_s6 as sensor_min_s6,
|
|
sr.min_s7 as sensor_min_s7,
|
|
sr.min_s8 as sensor_min_s8,
|
|
sr.min_s9 as sensor_min_s9
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
AVG(temperature) AS avg_temperature,
|
|
AVG(humidity) AS avg_humidity,
|
|
AVG(pressure) AS pressure_amplitude,
|
|
MAX(light) AS max_light,
|
|
MIN(CASE WHEN s0 > 0 THEN s0 END) AS min_s0,
|
|
MIN(CASE WHEN s1 > 0 THEN s1 END) AS min_s1,
|
|
MIN(CASE WHEN s2 > 0 THEN s2 END) AS min_s2,
|
|
MIN(CASE WHEN s3 > 0 THEN s3 END) AS min_s3,
|
|
MIN(CASE WHEN s4 > 0 THEN s4 END) AS min_s4,
|
|
MIN(CASE WHEN s5 > 0 THEN s5 END) AS min_s5,
|
|
MIN(CASE WHEN s6 > 0 THEN s6 END) AS min_s6,
|
|
MIN(CASE WHEN s7 > 0 THEN s7 END) AS min_s7,
|
|
MIN(CASE WHEN s8 > 0 THEN s8 END) AS min_s8,
|
|
MIN(CASE WHEN s9 > 0 THEN s9 END) AS min_s9
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) sr
|
|
FULL OUTER JOIN (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
MAX({radar_part}) AS radar
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
ON sr.minute = rr.minute AND sr.device_id = rr.device_id
|
|
ORDER BY
|
|
CASE COALESCE(sr.device_id, rr.device_id)
|
|
{case_order}
|
|
END,
|
|
COALESCE(sr.minute, rr.minute);
|
|
"""
|
|
return sql
|
|
|
|
def get_deployment_single_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_part, temp_offset, sensor_in):
|
|
"""
|
|
Generate a TimeScaleDB query for a single sensor reading based on device IDs.
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
radar_part (str): Radar column name, defaults to 'radar'
|
|
temp_offset (float): Temperature offset to apply
|
|
sensor (str): Single sensor to query from s_table
|
|
Returns:
|
|
str: Generated SQL query
|
|
"""
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
if "_" in sensor_in:
|
|
sensor = sensor_in.split("_")[1]
|
|
else:
|
|
sensor = sensor_in
|
|
|
|
# Handle different sensor types
|
|
if sensor == "radar":
|
|
# Query only radar readings
|
|
sql = f"""
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
MAX({radar_part}) AS {sensor_in}
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute;
|
|
"""
|
|
elif sensor == "temperature":
|
|
# Query temperature with offset
|
|
sql = f"""
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
AVG(temperature) + {temp_offset} AS {sensor_in}
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute;
|
|
"""
|
|
elif sensor == "humidity":
|
|
# Query humidity
|
|
sql = f"""
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
AVG(humidity) AS {sensor_in}
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute;
|
|
"""
|
|
elif sensor == "pressure":
|
|
# Query pressure
|
|
sql = f"""
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
AVG(pressure) AS {sensor_in}
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute;
|
|
"""
|
|
elif sensor == "light":
|
|
# Query light
|
|
sql = f"""
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
MAX(light) AS {sensor_in}
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute;
|
|
"""
|
|
elif sensor.startswith("voc"):
|
|
# Query VOC sensors (voc0-voc9) - these correspond to s0-s9 in the original query
|
|
voc_num = sensor[3:] # Extract number from "voc0", "voc1", etc.
|
|
sql = f"""
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
MIN(CASE WHEN s{voc_num} > 0 THEN s{voc_num} END) AS {sensor}
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute;
|
|
"""
|
|
else:
|
|
raise ValueError(f"Unknown sensor type: {sensor}. Must be one of: temperature, humidity, pressure, light, radar, voc0-voc9")
|
|
|
|
return sql
|
|
|
|
def get_deployment_single_query_rz(devices_list_str, time_from_str, time_to_str, ids_list, radar_part, temp_offset, sensor):
|
|
"""
|
|
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
radar_part (str): Radar column name, defaults to 'radar'
|
|
|
|
Returns:
|
|
str: Generated SQL query
|
|
"""
|
|
|
|
#table_sens = {"temperature": (f"sr.avg_temperature+ {temp_offset} as avg_temperature", "avg_temperature"),
|
|
#"humidity": ("sr.avg_humidity", "avg_humidity"),
|
|
#"pressure": ("sr.pressure_amplitude", "pressure_amplitude"),
|
|
#"light":("sr.max_light", "max_light"),
|
|
#"radar":("rr.radar")
|
|
#"voc0":
|
|
#"voc1":
|
|
#"voc2":
|
|
#"voc3":
|
|
#"voc4":
|
|
#"voc5":
|
|
#"voc6":
|
|
#"voc7":
|
|
#"voc8":
|
|
#"voc9": ,
|
|
#}
|
|
# derived
|
|
|
|
#if sensor ==
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
sql = f"""
|
|
SELECT
|
|
COALESCE(sr.minute, rr.minute) as minute,
|
|
COALESCE(sr.device_id, rr.device_id) as device_id,
|
|
sr.avg_temperature+ {temp_offset} as avg_temperature,
|
|
sr.avg_humidity,
|
|
sr.pressure_amplitude,
|
|
sr.max_light,
|
|
rr.radar,
|
|
sr.min_s0 as sensor_min_s0,
|
|
sr.min_s1 as sensor_min_s1,
|
|
sr.min_s2 as sensor_min_s2,
|
|
sr.min_s3 as sensor_min_s3,
|
|
sr.min_s4 as sensor_min_s4,
|
|
sr.min_s5 as sensor_min_s5,
|
|
sr.min_s6 as sensor_min_s6,
|
|
sr.min_s7 as sensor_min_s7,
|
|
sr.min_s8 as sensor_min_s8,
|
|
sr.min_s9 as sensor_min_s9
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
AVG(temperature) AS avg_temperature,
|
|
AVG(humidity) AS avg_humidity,
|
|
AVG(pressure) AS pressure_amplitude,
|
|
MAX(light) AS max_light,
|
|
MIN(CASE WHEN s0 > 0 THEN s0 END) AS min_s0,
|
|
MIN(CASE WHEN s1 > 0 THEN s1 END) AS min_s1,
|
|
MIN(CASE WHEN s2 > 0 THEN s2 END) AS min_s2,
|
|
MIN(CASE WHEN s3 > 0 THEN s3 END) AS min_s3,
|
|
MIN(CASE WHEN s4 > 0 THEN s4 END) AS min_s4,
|
|
MIN(CASE WHEN s5 > 0 THEN s5 END) AS min_s5,
|
|
MIN(CASE WHEN s6 > 0 THEN s6 END) AS min_s6,
|
|
MIN(CASE WHEN s7 > 0 THEN s7 END) AS min_s7,
|
|
MIN(CASE WHEN s8 > 0 THEN s8 END) AS min_s8,
|
|
MIN(CASE WHEN s9 > 0 THEN s9 END) AS min_s9
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) sr
|
|
FULL OUTER JOIN (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
MAX({radar_part}) AS radar
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
ON sr.minute = rr.minute AND sr.device_id = rr.device_id
|
|
ORDER BY
|
|
CASE COALESCE(sr.device_id, rr.device_id)
|
|
{case_order}
|
|
END,
|
|
COALESCE(sr.minute, rr.minute);
|
|
"""
|
|
return sql
|
|
|
|
|
|
def get_deployment_rd_query(devices_list_str, time_from_str, time_to_str, ids_list, temp_offset):
|
|
#radar detailed
|
|
"""
|
|
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
|
|
Returns:
|
|
str: Generated SQL query
|
|
"""
|
|
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
sql = f"""
|
|
SELECT
|
|
COALESCE(sr.minute, rr.minute) as minute,
|
|
COALESCE(sr.device_id, rr.device_id) as device_id,
|
|
sr.avg_temperature+{temp_offset} as avg_temperature,
|
|
sr.avg_humidity,
|
|
sr.avg_pressure,
|
|
sr.max_light,
|
|
sr.min_s0 as smell_s0,
|
|
sr.min_s1 as smell_s1,
|
|
sr.min_s2 as smell_s2,
|
|
sr.min_s3 as smell_s3,
|
|
sr.min_s4 as smell_s4,
|
|
sr.min_s5 as smell_s5,
|
|
sr.min_s6 as smell_s6,
|
|
sr.min_s7 as smell_s7,
|
|
sr.min_s8 as smell_s8,
|
|
sr.min_s9 as smell_s9,
|
|
rr.absent as radar_absent,
|
|
rr.moving as radar_moving,
|
|
rr.stationary as radar_stationary,
|
|
rr.both as radar_both,
|
|
rr.m0 as radar_m0,
|
|
rr.m1 as radar_m1,
|
|
rr.m2 as radar_m2,
|
|
rr.m3 as radar_m3,
|
|
rr.m4 as radar_m4,
|
|
rr.m5 as radar_m5,
|
|
rr.m6 as radar_m6,
|
|
rr.m7 as radar_m7,
|
|
rr.m8 as radar_m8,
|
|
rr.s2 as radar_s2,
|
|
rr.s3 as radar_s3,
|
|
rr.s4 as radar_s4,
|
|
rr.s5 as radar_s5,
|
|
rr.s6 as radar_s6,
|
|
rr.s7 as radar_s7,
|
|
rr.s8 as radar_s8
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
AVG(temperature) AS avg_temperature,
|
|
AVG(humidity) AS avg_humidity,
|
|
AVG(pressure) AS avg_pressure,
|
|
MAX(light) AS max_light,
|
|
MIN(CASE WHEN s0 > 0 THEN s0 END) AS min_s0,
|
|
MIN(CASE WHEN s1 > 0 THEN s1 END) AS min_s1,
|
|
MIN(CASE WHEN s2 > 0 THEN s2 END) AS min_s2,
|
|
MIN(CASE WHEN s3 > 0 THEN s3 END) AS min_s3,
|
|
MIN(CASE WHEN s4 > 0 THEN s4 END) AS min_s4,
|
|
MIN(CASE WHEN s5 > 0 THEN s5 END) AS min_s5,
|
|
MIN(CASE WHEN s6 > 0 THEN s6 END) AS min_s6,
|
|
MIN(CASE WHEN s7 > 0 THEN s7 END) AS min_s7,
|
|
MIN(CASE WHEN s8 > 0 THEN s8 END) AS min_s8,
|
|
MIN(CASE WHEN s9 > 0 THEN s9 END) AS min_s9
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) sr
|
|
FULL OUTER JOIN (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
MAX(absent) AS absent,
|
|
MAX(moving) AS moving,
|
|
MAX(stationary) AS stationary,
|
|
MAX(\"both\") AS both,
|
|
MAX(m0) AS m0,
|
|
MAX(m1) AS m1,
|
|
MAX(m2) AS m2,
|
|
MAX(m3) AS m3,
|
|
MAX(m4) AS m4,
|
|
MAX(m5) AS m5,
|
|
MAX(m6) AS m6,
|
|
MAX(m7) AS m7,
|
|
MAX(m8) AS m8,
|
|
MAX(s2) AS s2,
|
|
MAX(s3) AS s3,
|
|
MAX(s4) AS s4,
|
|
MAX(s5) AS s5,
|
|
MAX(s6) AS s6,
|
|
MAX(s7) AS s7,
|
|
MAX(s8) AS s8
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
ON sr.minute = rr.minute AND sr.device_id = rr.device_id
|
|
ORDER BY
|
|
CASE COALESCE(sr.device_id, rr.device_id)
|
|
{case_order}
|
|
END,
|
|
COALESCE(sr.minute, rr.minute);
|
|
"""
|
|
return sql
|
|
|
|
def get_deployment_radar_only_query(devices_list_str, time_from_str, time_to_str, ids_list):
|
|
#radar detailed
|
|
"""
|
|
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
|
|
Returns:
|
|
str: Generated SQL query
|
|
"""
|
|
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
sql = f"""
|
|
SELECT
|
|
COALESCE(sr.minute, rr.minute) as minute,
|
|
COALESCE(sr.device_id, rr.device_id) as device_id,
|
|
rr.s2 as radar_s2,
|
|
rr.s3 as radar_s3,
|
|
rr.s4 as radar_s4,
|
|
rr.s5 as radar_s5,
|
|
rr.s6 as radar_s6,
|
|
rr.s7 as radar_s7,
|
|
rr.s8 as radar_s8
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) sr
|
|
FULL OUTER JOIN (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
MAX(s2) AS s2,
|
|
MAX(s3) AS s3,
|
|
MAX(s4) AS s4,
|
|
MAX(s5) AS s5,
|
|
MAX(s6) AS s6,
|
|
MAX(s7) AS s7,
|
|
MAX(s8) AS s8
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
ON sr.minute = rr.minute AND sr.device_id = rr.device_id
|
|
ORDER BY
|
|
CASE COALESCE(sr.device_id, rr.device_id)
|
|
{case_order}
|
|
END,
|
|
COALESCE(sr.minute, rr.minute);
|
|
"""
|
|
return sql
|
|
|
|
def get_device_radar_s28_only_query(time_from_str, time_to_str, device_id):
|
|
sql = f"""
|
|
SELECT
|
|
time,
|
|
(s2+s3+s4+s5+s6+s7+s8)/7 AS s28,
|
|
(m2+m3+m4+m5+m6+m7+m8)/7 AS m28
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id = {device_id}
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
ORDER BY
|
|
time ASC
|
|
"""
|
|
return sql
|
|
|
|
def get_device_radar_only_query(devices_list_str, time_from_str, time_to_str, device_id):
|
|
sql = f"""
|
|
SELECT
|
|
time,
|
|
(s2+s3+s4+s5+s6+s7+s8)/7 AS s28,
|
|
(m2+m3+m4+m5+m6+m7+m8)/7 AS m28
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
ORDER BY
|
|
time ASC
|
|
"""
|
|
return sql
|
|
|
|
|
|
def get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest):
|
|
#radar detailed
|
|
"""
|
|
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
radar_fields_of_interest (list) list of different unique fields required across all devices
|
|
|
|
Returns:
|
|
str: Generated SQL query
|
|
"""
|
|
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
|
|
case_order = "\n ".join(case_statements)
|
|
radar_fields_to_get = ""
|
|
q_parts = ""
|
|
for field in radar_fields_of_interest:
|
|
if field == "s28_min":
|
|
q_part = "MIN((s2+s3+s4+s5+s6+s7+s8)/7) AS s28_min"
|
|
elif field == "s28_max":
|
|
q_part = "MAX((s2+s3+s4+s5+s6+s7+s8)/7) AS s28_max"
|
|
elif field == "m08_max":
|
|
q_part = "MAX((m0+m1+m2+m3+m4+m5+m6+m7+m8)/9) AS m08_max"
|
|
elif field == "s2_max":
|
|
q_part = "MAX(s2) AS s2_max"
|
|
elif field == "s3_max":
|
|
q_part = "MAX(s3) AS s3_max"
|
|
elif field == "s4_max":
|
|
q_part = "MAX(s4) AS s4_max"
|
|
elif field == "s5_max":
|
|
q_part = "MAX(s5) AS s5_max"
|
|
elif field == "s6_max":
|
|
q_part = "MAX(s6) AS s6_max"
|
|
elif field == "s7_max":
|
|
q_part = "MAX(s7) AS s7_max"
|
|
elif field == "s8_max":
|
|
q_part = "MAX(s8) AS s8_max"
|
|
elif field == "m0_max":
|
|
q_part = "MAX(m0) AS m0_max"
|
|
elif field == "m1_max":
|
|
q_part = "MAX(m1) AS m1_max"
|
|
elif field == "m2_max":
|
|
q_part = "MAX(m2) AS m2_max"
|
|
elif field == "m3_max":
|
|
q_part = "MAX(m3) AS m3_max"
|
|
elif field == "m4_max":
|
|
q_part = "MAX(m4) AS m4_max"
|
|
elif field == "m5_max":
|
|
q_part = "MAX(m5) AS m5_max"
|
|
elif field == "m6_max":
|
|
q_part = "MAX(m6) AS m6_max"
|
|
elif field == "m7_max":
|
|
q_part = "MAX(m7) AS m7_max"
|
|
elif field == "m8_max":
|
|
q_part = "MAX(m8) AS m8_max"
|
|
else:
|
|
q_part = field
|
|
|
|
if q_parts == "":
|
|
q_parts = q_part
|
|
else:
|
|
q_parts = q_parts + ", " + q_part
|
|
if radar_fields_to_get == "":
|
|
radar_fields_to_get = field
|
|
else:
|
|
radar_fields_to_get = radar_fields_to_get + ", " + field
|
|
sql = f"""
|
|
SELECT
|
|
minute,
|
|
device_id,
|
|
{radar_fields_to_get}
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
{q_parts}
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute
|
|
"""
|
|
return sql
|
|
|
|
def get_deployment_radar_only_colapsed_query_wid(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest):
|
|
#radar detailed
|
|
"""
|
|
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
|
|
Returns:
|
|
str: Generated SQL query
|
|
"""
|
|
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
|
|
case_order = "\n ".join(case_statements)
|
|
radar_fields_to_get = ""
|
|
q_parts = ""
|
|
for field in radar_fields_of_interest:
|
|
if field == "s28_min":
|
|
q_part = "MIN((s2+s3+s4+s5+s6+s7+s8)/7) AS s28_min"
|
|
elif field == "s28_max":
|
|
q_part = "MAX((s2+s3+s4+s5+s6+s7+s8)/7) AS s28_max"
|
|
elif field == "m08_max":
|
|
q_part = "MAX((m0+m1+m2+m3+m4+m5+m6+m7+m8)/9) AS m08_max"
|
|
elif field == "s2_max":
|
|
q_part = "MAX(s2) AS s2_max"
|
|
elif field == "s3_max":
|
|
q_part = "MAX(s3) AS s3_max"
|
|
elif field == "s4_max":
|
|
q_part = "MAX(s4) AS s4_max"
|
|
elif field == "s5_max":
|
|
q_part = "MAX(s5) AS s5_max"
|
|
elif field == "s6_max":
|
|
q_part = "MAX(s6) AS s6_max"
|
|
elif field == "s7_max":
|
|
q_part = "MAX(s7) AS s7_max"
|
|
elif field == "s8_max":
|
|
q_part = "MAX(s8) AS s8_max"
|
|
elif field == "m0_max":
|
|
q_part = "MAX(m0) AS m0_max"
|
|
elif field == "m1_max":
|
|
q_part = "MAX(m1) AS m1_max"
|
|
elif field == "m2_max":
|
|
q_part = "MAX(m2) AS m2_max"
|
|
elif field == "m3_max":
|
|
q_part = "MAX(m3) AS m3_max"
|
|
elif field == "m4_max":
|
|
q_part = "MAX(m4) AS m4_max"
|
|
elif field == "m5_max":
|
|
q_part = "MAX(m5) AS m5_max"
|
|
elif field == "m6_max":
|
|
q_part = "MAX(m6) AS m6_max"
|
|
elif field == "m7_max":
|
|
q_part = "MAX(m7) AS m7_max"
|
|
elif field == "m8_max":
|
|
q_part = "MAX(m8) AS m8_max"
|
|
else:
|
|
q_part = field
|
|
|
|
if q_parts == "":
|
|
q_parts = q_part
|
|
else:
|
|
q_parts = q_parts + ", " + q_part
|
|
if radar_fields_to_get == "":
|
|
radar_fields_to_get = field
|
|
else:
|
|
radar_fields_to_get = radar_fields_to_get + ", " + field
|
|
sql = f"""
|
|
SELECT
|
|
minute,
|
|
device_id,
|
|
{radar_fields_to_get}
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
{q_parts}
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute
|
|
"""
|
|
return sql
|
|
|
|
def get_deployment_radar_only_detailed_query(devices_list_str, time_from_str, time_to_str, ids_list):
|
|
#radar detailed
|
|
"""
|
|
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
|
|
Returns:
|
|
str: Generated SQL query
|
|
This is looking for presence, NOT absence... othervise all MAXes need to be converted to MINs
|
|
"""
|
|
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
sql = f"""
|
|
SELECT
|
|
minute,
|
|
device_id,
|
|
m0_max,
|
|
m1_max,
|
|
m2_max,
|
|
m3_max,
|
|
m4_max,
|
|
m5_max,
|
|
m6_max,
|
|
m7_max,
|
|
m8_max,
|
|
m08_max,
|
|
s2_max,
|
|
s3_max,
|
|
s4_max,
|
|
s5_max,
|
|
s6_max,
|
|
s7_max,
|
|
s8_max,
|
|
s28_max,
|
|
s28_min
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
MAX(m0) AS m0_max,
|
|
MAX(m1) AS m1_max,
|
|
MAX(m2) AS m2_max,
|
|
MAX(m3) AS m3_max,
|
|
MAX(m4) AS m4_max,
|
|
MAX(m5) AS m5_max,
|
|
MAX(m6) AS m6_max,
|
|
MAX(m7) AS m7_max,
|
|
MAX(m8) AS m8_max,
|
|
MAX((m0+m1+m2+m3+m4+m5+m6+m7+m8)/9) AS m08_max,
|
|
MAX(s2) AS s2_max,
|
|
MAX(s3) AS s3_max,
|
|
MAX(s4) AS s4_max,
|
|
MAX(s5) AS s5_max,
|
|
MAX(s6) AS s6_max,
|
|
MAX(s7) AS s7_max,
|
|
MAX(s8) AS s8_max,
|
|
MAX((s2+s3+s4+s5+s6+s7+s8)/7) AS s28_max,
|
|
MIN((s2+s3+s4+s5+s6+s7+s8)/7) AS s28_min
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute
|
|
"""
|
|
return sql
|
|
|
|
def get_deployment_light_only_query(devices_list_str, time_from_str, time_to_str, ids_list):
|
|
#light detailed
|
|
"""
|
|
Generate a TimeScaleDB query for light readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
|
|
Returns:
|
|
str: Generated SQL query
|
|
"""
|
|
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
sql = f"""
|
|
SELECT
|
|
minute,
|
|
device_id,
|
|
light_max
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
MAX(light) AS light_max
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute
|
|
"""
|
|
return sql
|
|
|
|
def get_deployment_temperature_only_query(devices_list_str, time_from_str, time_to_str, ids_list, temp_offset):
|
|
"""
|
|
Generate a TimeScaleDB query for temperature readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
temp_offset (float): Temperature offset to apply
|
|
|
|
Returns:
|
|
str: Generated SQL query with temperature and alarm level
|
|
"""
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
# SQL query with added alarm_level calculation
|
|
sql = f"""
|
|
SELECT
|
|
minute,
|
|
device_id,
|
|
temperature_avg,
|
|
CASE
|
|
WHEN (temperature_avg * 9/5 + 32) <= 50 OR (temperature_avg * 9/5 + 32) >= 90 THEN 2
|
|
WHEN (temperature_avg * 9/5 + 32) <= 60 OR (temperature_avg * 9/5 + 32) >= 80 THEN 1
|
|
ELSE 0
|
|
END AS alarm_level
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
AVG(temperature)+{temp_offset} AS temperature_avg
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute
|
|
"""
|
|
return sql
|
|
|
|
def get_deployment_humidity_only_query(devices_list_str, time_from_str, time_to_str, ids_list, humidity_offset):
|
|
"""
|
|
Generate a TimeScaleDB query for humidity readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
temp_offset (float): Temperature offset to apply
|
|
|
|
Returns:
|
|
str: Generated SQL query with humidity and alarm level
|
|
"""
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
case_order = "\n ".join(case_statements)
|
|
col_expr = f"2.3592 * AVG(humidity) + 23.5546" #= 2.3592 * J2 + 33.5546
|
|
|
|
# SQL query with added alarm_level calculation
|
|
sql = f"""
|
|
SELECT
|
|
minute,
|
|
device_id,
|
|
humidity_avg,
|
|
CASE
|
|
WHEN humidity_avg <= 20 OR humidity_avg >= 60 THEN 2
|
|
WHEN humidity_avg <= 30 OR humidity_avg >= 50 THEN 1
|
|
ELSE 0
|
|
END AS alarm_level
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
{col_expr} AS humidity_avg
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute
|
|
"""
|
|
return sql
|
|
|
|
def get_deployment_smell_only_query(devices_list_str, time_from_str, time_to_str, ids_list, humidity_offset):
|
|
"""
|
|
Generate a TimeScaleDB query for smell readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
temp_offset (float): Temperature offset to apply
|
|
|
|
Returns:
|
|
str: Generated SQL query with smell components
|
|
"""
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
# SQL query with added alarm_level calculation
|
|
sql = f"""
|
|
SELECT
|
|
minute,
|
|
device_id,
|
|
min_s0,
|
|
min_s1,
|
|
min_s2,
|
|
min_s3,
|
|
min_s4,
|
|
min_s5,
|
|
min_s6,
|
|
min_s7,
|
|
min_s8,
|
|
min_s9
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
MIN(CASE WHEN s0 > 0 THEN s0 END) AS min_s0,
|
|
MIN(CASE WHEN s1 > 0 THEN s1 END) AS min_s1,
|
|
MIN(CASE WHEN s2 > 0 THEN s2 END) AS min_s2,
|
|
MIN(CASE WHEN s3 > 0 THEN s3 END) AS min_s3,
|
|
MIN(CASE WHEN s4 > 0 THEN s4 END) AS min_s4,
|
|
MIN(CASE WHEN s5 > 0 THEN s5 END) AS min_s5,
|
|
MIN(CASE WHEN s6 > 0 THEN s6 END) AS min_s6,
|
|
MIN(CASE WHEN s7 > 0 THEN s7 END) AS min_s7,
|
|
MIN(CASE WHEN s8 > 0 THEN s8 END) AS min_s8,
|
|
MIN(CASE WHEN s9 > 0 THEN s9 END) AS min_s9
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute
|
|
"""
|
|
return sql
|
|
|
|
|
|
def get_deployment_radar_only_detailed_all_query(devices_list_str, time_from_str, time_to_str, ids_list):
|
|
#radar detailed
|
|
"""
|
|
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
|
|
Returns:
|
|
str: Generated SQL query
|
|
This is looking for presence, NOT absence... othervise all MAXes need to be converted to MINs
|
|
"""
|
|
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
sql = f"""
|
|
SELECT
|
|
minute,
|
|
device_id,
|
|
absent_min,
|
|
stationary_max,
|
|
moving_max,
|
|
both_max,
|
|
m0_max,
|
|
m1_max,
|
|
m2_max,
|
|
m3_max,
|
|
m4_max,
|
|
m5_max,
|
|
m6_max,
|
|
m7_max,
|
|
m8_max,
|
|
m08_max,
|
|
s2_max,
|
|
s3_max,
|
|
s4_max,
|
|
s5_max,
|
|
s6_max,
|
|
s7_max,
|
|
s8_max,
|
|
s28_max
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
MIN(absent) AS absent_min,
|
|
MAX(stationary) AS stationary_max,
|
|
MAX(moving) AS moving_max,
|
|
MAX("both") AS both_max,
|
|
MAX(m0) AS m0_max,
|
|
MAX(m1) AS m1_max,
|
|
MAX(m2) AS m2_max,
|
|
MAX(m3) AS m3_max,
|
|
MAX(m4) AS m4_max,
|
|
MAX(m5) AS m5_max,
|
|
MAX(m6) AS m6_max,
|
|
MAX(m7) AS m7_max,
|
|
MAX(m8) AS m8_max,
|
|
MAX((m0+m1+m2+m3+m4+m5+m6+m7+m8)/9) AS m08_max,
|
|
MAX(s2) AS s2_max,
|
|
MAX(s3) AS s3_max,
|
|
MAX(s4) AS s4_max,
|
|
MAX(s5) AS s5_max,
|
|
MAX(s6) AS s6_max,
|
|
MAX(s7) AS s7_max,
|
|
MAX(s8) AS s8_max,
|
|
MAX((s2+s3+s4+s5+s6+s7+s8)/7) AS s28_max
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute
|
|
"""
|
|
return sql
|
|
def get_deployment_deca_query(devices_list_str, time_from_str, time_to_str, ids_list, temp_offset):
|
|
"""
|
|
Generate a TimeScaleDB query for sensor and radar readings based on device IDs.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
|
|
Returns:
|
|
str: Generated SQL query
|
|
"""
|
|
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
sql = f"""
|
|
SELECT
|
|
COALESCE(sr.minute, rr.minute) as minute,
|
|
COALESCE(sr.device_id, rr.device_id) as device_id,
|
|
sr.avg_temperature+{temp_offset} as avg_temperature,
|
|
sr.avg_humidity,
|
|
sr.avg_pressure,
|
|
sr.max_light,
|
|
sr.min_s0 as smell_s0,
|
|
sr.min_s1 as smell_s1,
|
|
sr.min_s2 as smell_s2,
|
|
sr.min_s3 as smell_s3,
|
|
sr.min_s4 as smell_s4,
|
|
sr.min_s5 as smell_s5,
|
|
sr.min_s6 as smell_s6,
|
|
sr.min_s7 as smell_s7,
|
|
sr.min_s8 as smell_s8,
|
|
sr.min_s9 as smell_s9,
|
|
rr.absent as radar_absent,
|
|
rr.moving as radar_moving,
|
|
rr.stationary as radar_stationary,
|
|
rr.both as radar_both,
|
|
rr.m0 as radar_m0,
|
|
rr.m1 as radar_m1,
|
|
rr.m2 as radar_m2,
|
|
rr.m3 as radar_m3,
|
|
rr.m4 as radar_m4,
|
|
rr.m5 as radar_m5,
|
|
rr.m6 as radar_m6,
|
|
rr.m7 as radar_m7,
|
|
rr.m8 as radar_m8,
|
|
rr.s2 as radar_s2,
|
|
rr.s3 as radar_s3,
|
|
rr.s4 as radar_s4,
|
|
rr.s5 as radar_s5,
|
|
rr.s6 as radar_s6,
|
|
rr.s7 as radar_s7,
|
|
rr.s8 as radar_s8
|
|
FROM (
|
|
SELECT
|
|
time_bucket('10 seconds', time) AS minute,
|
|
device_id,
|
|
AVG(temperature) AS avg_temperature,
|
|
AVG(humidity) AS avg_humidity,
|
|
AVG(pressure) AS avg_pressure,
|
|
MAX(light) AS max_light,
|
|
MIN(CASE WHEN s0 > 0 THEN s0 END) AS min_s0,
|
|
MIN(CASE WHEN s1 > 0 THEN s1 END) AS min_s1,
|
|
MIN(CASE WHEN s2 > 0 THEN s2 END) AS min_s2,
|
|
MIN(CASE WHEN s3 > 0 THEN s3 END) AS min_s3,
|
|
MIN(CASE WHEN s4 > 0 THEN s4 END) AS min_s4,
|
|
MIN(CASE WHEN s5 > 0 THEN s5 END) AS min_s5,
|
|
MIN(CASE WHEN s6 > 0 THEN s6 END) AS min_s6,
|
|
MIN(CASE WHEN s7 > 0 THEN s7 END) AS min_s7,
|
|
MIN(CASE WHEN s8 > 0 THEN s8 END) AS min_s8,
|
|
MIN(CASE WHEN s9 > 0 THEN s9 END) AS min_s9
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) sr
|
|
FULL OUTER JOIN (
|
|
SELECT
|
|
time_bucket('10 seconds', time) AS minute,
|
|
device_id,
|
|
MAX(absent) AS absent,
|
|
MAX(moving) AS moving,
|
|
MAX(stationary) AS stationary,
|
|
MAX(\"both\") AS both,
|
|
MAX(m0) AS m0,
|
|
MAX(m1) AS m1,
|
|
MAX(m2) AS m2,
|
|
MAX(m3) AS m3,
|
|
MAX(m4) AS m4,
|
|
MAX(m5) AS m5,
|
|
MAX(m6) AS m6,
|
|
MAX(m7) AS m7,
|
|
MAX(m8) AS m8,
|
|
MAX(s2) AS s2,
|
|
MAX(s3) AS s3,
|
|
MAX(s4) AS s4,
|
|
MAX(s5) AS s5,
|
|
MAX(s6) AS s6,
|
|
MAX(s7) AS s7,
|
|
MAX(s8) AS s8
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
ON sr.minute = rr.minute AND sr.device_id = rr.device_id
|
|
ORDER BY
|
|
CASE COALESCE(sr.device_id, rr.device_id)
|
|
{case_order}
|
|
END,
|
|
COALESCE(sr.minute, rr.minute);
|
|
"""
|
|
return sql
|
|
|
|
def get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest):
|
|
"""
|
|
Generate a TimeScaleDB query for radar readings based on device IDs with time snapped to 10-second intervals.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
radar_fields_of_interest (list): List of field names required across all devices
|
|
|
|
Returns:
|
|
str: Generated SQL query
|
|
"""
|
|
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
# Handle fields processing
|
|
select_fields = []
|
|
for field in radar_fields_of_interest:
|
|
if field == "s28":
|
|
select_fields.append("(s2+s3+s4+s5+s6+s7+s8)/7 AS s28")
|
|
else:
|
|
select_fields.append(field)
|
|
|
|
fields_str = ", ".join(select_fields)
|
|
|
|
sql = f"""
|
|
SELECT
|
|
time_bucket('10 seconds', time) AS ten_seconds,
|
|
device_id,
|
|
{fields_str}
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
ten_seconds
|
|
"""
|
|
return sql
|
|
|
|
def get_deployment_radar_10sec_snapped_query_min_max(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest):
|
|
"""
|
|
Generate a TimeScaleDB query for radar readings based on device IDs with time snapped to 10-second intervals.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
radar_fields_of_interest (list): List of field names required across all devices
|
|
|
|
Returns:
|
|
str: Generated SQL query
|
|
"""
|
|
|
|
# Generate the CASE statement for ordering based on the provided ids_list
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
# Handle fields processing
|
|
select_fields = []
|
|
for field in radar_fields_of_interest:
|
|
|
|
radar_fields = field.split("_")
|
|
field_t = radar_fields[0]
|
|
if field_t == "s28":
|
|
if radar_fields[1] == "max":
|
|
select_fields.append("MAX((s2+s3+s4+s5+s6+s7+s8)/7) AS s28")
|
|
else:
|
|
select_fields.append("MIN((s2+s3+s4+s5+s6+s7+s8)/7) AS s28")
|
|
else:
|
|
if radar_fields[1] == "max":
|
|
select_fields.append(f"MAX({field_t}) as {field}")
|
|
else:
|
|
select_fields.append(f"MIN({field_t}) as {field}")
|
|
|
|
fields_str = ", ".join(select_fields)
|
|
|
|
sql = f"""
|
|
SELECT
|
|
time_bucket('10 seconds', time) AS ten_seconds,
|
|
device_id,
|
|
{fields_str}
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
ten_seconds,
|
|
device_id
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
ten_seconds
|
|
"""
|
|
return sql
|
|
|
|
def export_query_to_minio_chunked(connection_params, query, minio_client, bucket_name, blob_name=None, chunksize=10000):
|
|
"""
|
|
Export query results to MinIO as CSV in chunks to handle large datasets
|
|
|
|
Parameters:
|
|
connection_params (dict): Database connection parameters
|
|
query (str): SQL query to execute
|
|
minio_client: Initialized MinIO client
|
|
bucket_name (str): Name of the MinIO bucket
|
|
blob_name (str): Name for the blob in MinIO. If None, generates timestamped name
|
|
chunksize (int): Number of rows to process at a time
|
|
|
|
Returns:
|
|
str: Name of the created blob
|
|
"""
|
|
try:
|
|
# Create direct connection using psycopg2
|
|
conn = psycopg2.connect(**connection_params)
|
|
|
|
# Generate blob name if not provided
|
|
if blob_name is None:
|
|
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
|
blob_name = f'query_results_{timestamp}.csv'
|
|
|
|
# Create a buffer to store CSV data
|
|
csv_buffer = io.StringIO()
|
|
|
|
# Stream the query results in chunks
|
|
first_chunk = True
|
|
for chunk_df in pd.read_sql_query(query, conn, chunksize=chunksize):
|
|
# Write header only for the first chunk
|
|
chunk_df.to_csv(
|
|
csv_buffer,
|
|
index=False,
|
|
header=first_chunk,
|
|
mode='a'
|
|
)
|
|
first_chunk = False
|
|
|
|
# Get the CSV data as bytes
|
|
csv_buffer.seek(0)
|
|
csv_bytes = csv_buffer.getvalue().encode('utf-8')
|
|
|
|
# Upload to MinIO
|
|
minio_client.put_object(
|
|
bucket_name,
|
|
blob_name,
|
|
io.BytesIO(csv_bytes),
|
|
len(csv_bytes)
|
|
)
|
|
|
|
print(f"Data exported successfully to MinIO: {bucket_name}/{blob_name}")
|
|
return blob_name
|
|
|
|
except Exception as e:
|
|
print(f"Error exporting data: {str(e)}")
|
|
print(f"Traceback: {traceback.format_exc()}")
|
|
raise
|
|
|
|
finally:
|
|
if 'conn' in locals():
|
|
conn.close()
|
|
if 'csv_buffer' in locals():
|
|
csv_buffer.close()
|
|
|
|
def export_query_to_csv_pandas(connection_params, query, output_path=None):
|
|
"""
|
|
Export query results to CSV using pandas with psycopg2 connection
|
|
|
|
Parameters:
|
|
connection_params (dict): Database connection parameters
|
|
query (str): SQL query to execute
|
|
output_path (str): Path for output CSV file. If None, generates timestamped filename
|
|
|
|
Returns:
|
|
str: Path to the created CSV file
|
|
"""
|
|
try:
|
|
# Create direct connection using psycopg2
|
|
conn = psycopg2.connect(**connection_params)
|
|
|
|
# Generate output path if not provided
|
|
if output_path is None:
|
|
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
|
output_path = f'query_results_{timestamp}.csv'
|
|
|
|
# Read query directly into DataFrame using the psycopg2 connection
|
|
df = pd.read_sql_query(query, conn)
|
|
|
|
# Export to CSV with all headers
|
|
df.to_csv(output_path, index=False)
|
|
print(f"Data exported successfully to {output_path}")
|
|
return output_path
|
|
|
|
except Exception as e:
|
|
print(f"Error exporting data: {str(e)}")
|
|
raise
|
|
|
|
finally:
|
|
if 'conn' in locals():
|
|
conn.close()
|
|
|
|
|
|
def CreateDailyCSV(csv_file, devices_list, selected_date, vocs_scaled, time_zone_s, radar_part, consolidated_by, temp_offset):
|
|
global Id2MACDict
|
|
if radar_part == "s28":
|
|
radar_part = "(s2+s3+s4+s5+s6+s7+s8)/7"
|
|
try:
|
|
|
|
#stretch_to_min_max = True
|
|
|
|
#current_date_p = selected_date.replace("-", "_")
|
|
#current_date_s = selected_date
|
|
|
|
lower_than200 = 0
|
|
larger_than200 = 0
|
|
ids_list = []
|
|
for details in devices_list[0]:
|
|
dev_id = details[0]
|
|
ids_list.append(details[1])
|
|
if dev_id < 200:
|
|
lower_than200 += 1
|
|
else:
|
|
larger_than200 += 1
|
|
|
|
if lower_than200 > 0 and larger_than200 > 0:
|
|
return ""
|
|
|
|
if larger_than200 > 0:
|
|
sensors_c = len(s_table)
|
|
else: #old sensors not supported
|
|
return ""
|
|
|
|
devices_c = len(devices_list[0])
|
|
devices_list_str = ",".join(map(str, devices_list[1]))
|
|
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
|
|
if consolidated_by == "by_minute_rc":
|
|
sql = get_deployment_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_part, temp_offset)
|
|
elif consolidated_by == "by_deca_rd":
|
|
sql = get_deployment_deca_query(devices_list_str, time_from_str, time_to_str, ids_list, temp_offset)
|
|
elif consolidated_by == "by_minute_rd":
|
|
sql = get_deployment_rd_query(devices_list_str, time_from_str, time_to_str, ids_list, temp_offset)
|
|
|
|
print(sql)
|
|
|
|
connection_params = {
|
|
'host': DB_HOST,
|
|
'database': DB_NAME,
|
|
'user': DB_USER,
|
|
'password': DB_PASSWORD,
|
|
'port': DB_PORT
|
|
}
|
|
|
|
# Using pandas approach (recommended)
|
|
output_file = export_query_to_minio_chunked(
|
|
connection_params,
|
|
sql,
|
|
miniIO_blob_client,
|
|
"data-downloads",
|
|
csv_file,
|
|
chunksize=10000
|
|
)
|
|
return output_file
|
|
except Exception as e:
|
|
logging.error(str(traceback.format_exc()))
|
|
return ""
|
|
|
|
def GetBlob(file_name, bucket_name="daily-maps"):
|
|
"""
|
|
Retrieve image from blob storage
|
|
|
|
Args:
|
|
file_name (str): Name of the file to retrieve from blob storage
|
|
|
|
Returns:
|
|
tuple: (image_bytes, content_type)
|
|
Returns None, None if image not found or error occurs
|
|
"""
|
|
logger.debug(f"GetBlob({file_name})")
|
|
try:
|
|
# Get the object from blob storage
|
|
data = miniIO_blob_client.get_object(
|
|
bucket_name,
|
|
file_name
|
|
)
|
|
|
|
# Read the data into bytes
|
|
data_bytes = data.read()
|
|
#logger.debug(f"len(data_bytes)={len(data_bytes)}")
|
|
|
|
if bucket_name == "daily-maps":
|
|
return data_bytes, 'image/png'
|
|
else:
|
|
return data_bytes, 'application/zip'
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error: {traceback.format_exc()}")
|
|
return None, None
|
|
|
|
|
|
def MapFileToDate(map_file):
|
|
#'/Volumes/XTRM-Q/wellnuo/daily_maps/1/1_2023-11-07_dayly_image.png'
|
|
parts = map_file.split("/")
|
|
parts = parts[-1].split("_")
|
|
|
|
if "-" in parts[0]:
|
|
date_string = parts[0]
|
|
elif "-" in parts[1]:
|
|
date_string = parts[1]
|
|
|
|
date_object = datetime.datetime.strptime(date_string, "%Y-%m-%d")
|
|
date_only = date_object.date()
|
|
return date_only
|
|
|
|
def CSVFileToDate(csv_file):
|
|
#'/Volumes/XTRM-Q/wellnuo/daily_maps/1/1_2023-11-07_dayly_image.png'
|
|
parts = csv_file.split("/")
|
|
parts = parts[-1].split("_")
|
|
|
|
if "-" in parts[0]:
|
|
date_string = parts[0]
|
|
elif "-" in parts[1]:
|
|
date_string = parts[1]
|
|
|
|
date_object = datetime.datetime.strptime(date_string, "%Y-%m-%d")
|
|
date_only = date_object.date()
|
|
return date_only
|
|
|
|
def GetMACsListSimple(list_of_lists):
|
|
|
|
result = []
|
|
if len(list_of_lists) > 0:
|
|
result = [sublist[3] for sublist in list_of_lists]
|
|
|
|
return(result)
|
|
|
|
def datetime_handler(obj):
|
|
"""Handle datetime serialization for JSON"""
|
|
if isinstance(obj, datetime.datetime):
|
|
if obj.tzinfo:
|
|
return obj.isoformat()
|
|
return obj.strftime('%Y-%m-%d %H:%M:%S.%f')
|
|
raise TypeError(f"Object of type {type(obj)} is not JSON serializable")
|
|
|
|
def ReadCandles(file, sensor, period, time_from, time_to):
|
|
result = []
|
|
if sensor == "voc0":
|
|
sqlr = "SELECT * from vocs_0"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
|
|
elif sensor == "voc1":
|
|
sqlr = "SELECT * from vocs_1"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
|
|
elif sensor == "voc2":
|
|
sqlr = "SELECT * from vocs_2"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
|
|
elif sensor == "voc3":
|
|
sqlr = "SELECT * from vocs_3"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
|
|
elif sensor == "voc4":
|
|
sqlr = "SELECT * from vocs_4"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
|
|
elif sensor == "voc5":
|
|
sqlr = "SELECT * from vocs_5"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
|
|
elif sensor == "voc6":
|
|
sqlr = "SELECT * from vocs_6"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
|
|
elif sensor == "voc7":
|
|
sqlr = "SELECT * from vocs_7"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
|
|
elif sensor == "voc8":
|
|
sqlr = "SELECT * from vocs_8"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
|
|
elif sensor == "voc9":
|
|
sqlr = "SELECT * from vocs_9"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
|
|
else:
|
|
sqlr = "SELECT * from "+sensor+"s"+period+ " WHERE Date >= "+str(time_from) + " AND Date <= "+str(time_to)
|
|
logger.debug(f"sqlr = {sqlr}")
|
|
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
devices_string = ReadCleanStringDB(cur, sqlr)
|
|
result = QuerrySql(file, sqlr)
|
|
return result
|
|
|
|
def ReadSensor(device_id, sensor, time_from_epoch, time_to_epoch, data_type, radar_part, bucket_size="no"):
|
|
|
|
time_utc = datetime.datetime.fromtimestamp(float(time_from_epoch), tz=timezone.utc)
|
|
# Format in ISO 8601 format with timezone
|
|
time_from_str = time_utc.strftime("%Y-%m-%d %H:%M:%S%z")
|
|
|
|
time_utc = datetime.datetime.fromtimestamp(float(time_to_epoch), tz=timezone.utc)
|
|
# Format in ISO 8601 format with timezone
|
|
time_to_str = time_utc.strftime("%Y-%m-%d %H:%M:%S%z")
|
|
|
|
legal_min, legal_max, window = sensor_legal_values[sensor]
|
|
|
|
result = []
|
|
if sensor == "radar":
|
|
if radar_part == "s28":
|
|
radar_part = "(s2+s3+s4+s5+s6+s7+s8)/7"
|
|
sqlr = f"SELECT time, {radar_part} AS radar FROM radar_readings WHERE device_id = {device_id} AND time >= '{time_from_str}' AND time <= '{time_to_str}' ORDER BY time ASC"
|
|
elif sensor[0] == "s":
|
|
sqlr = f"SELECT time, {sensor} AS smell FROM sensor_readings WHERE device_id = {device_id} AND {sensor} >= '{legal_min}' AND {sensor} <= '{legal_max}' AND time >= '{time_from_str}' AND time <= '{time_to_str}' ORDER BY time ASC"
|
|
else:
|
|
if sensor == "temperature":
|
|
sqlr = f"SELECT time, {sensor} - 16 from sensor_readings WHERE device_id = {device_id} AND {sensor} >= '{legal_min}' AND {sensor} <= '{legal_max}' AND time >= '{time_from_str}' AND time <= '{time_to_str}' ORDER BY time ASC"
|
|
else:
|
|
sqlr = f"SELECT time, {sensor} from sensor_readings WHERE device_id = {device_id} AND {sensor} >= '{legal_min}' AND {sensor} <= '{legal_max}' AND time >= '{time_from_str}' AND time <= '{time_to_str}' ORDER BY time ASC"
|
|
|
|
logger.debug(f"sqlr = {sqlr}")
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sqlr)
|
|
result = cur.fetchall()
|
|
|
|
return result
|
|
|
|
def ReadSensor3(device_id, sensor, time_from_epoch, time_to_epoch, data_type, radar_part, bucket_size="no"):
|
|
import datetime
|
|
from datetime import timezone
|
|
|
|
# Convert epoch to datetime and format as ISO 8601 strings with timezone
|
|
time_utc = datetime.datetime.fromtimestamp(float(time_from_epoch), tz=timezone.utc)
|
|
time_from_str = time_utc.strftime("%Y-%m-%d %H:%M:%S%z")
|
|
time_utc = datetime.datetime.fromtimestamp(float(time_to_epoch), tz=timezone.utc)
|
|
time_to_str = time_utc.strftime("%Y-%m-%d %H:%M:%S%z")
|
|
|
|
legal_min, legal_max, window = sensor_legal_values[sensor]
|
|
|
|
# If bucket_size is provided (i.e. not "no"), then use time bucketing.
|
|
use_bucket = bucket_size != "no"
|
|
if use_bucket:
|
|
# Map the shorthand bucket sizes to PostgreSQL interval strings.
|
|
mapping = {
|
|
"10s": "10 seconds",
|
|
"1m": "1 minute",
|
|
"5m": "5 minutes",
|
|
"10m": "10 minutes",
|
|
"15m": "15 minutes",
|
|
"30m": "30 minutes",
|
|
"1h": "1 hour"
|
|
}
|
|
bucket_interval = mapping.get(bucket_size, bucket_size)
|
|
|
|
avgmax = "AVG"
|
|
# Build the SQL query based on sensor type.
|
|
if sensor == "radar":
|
|
# For radar sensors, a special aggregation is needed.
|
|
avgmax = "MAX"
|
|
if radar_part == "s28":
|
|
radar_expr = "(s2+s3+s4+s5+s6+s7+s8)/7"
|
|
else:
|
|
radar_expr = radar_part
|
|
|
|
if use_bucket:
|
|
sqlr = f"""
|
|
SELECT time_bucket('{bucket_interval}', time) AS time,
|
|
{avgmax}({radar_expr}) AS radar
|
|
FROM radar_readings
|
|
WHERE device_id = {device_id}
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
GROUP BY time_bucket('{bucket_interval}', time)
|
|
ORDER BY time ASC;
|
|
"""
|
|
else:
|
|
sqlr = f"""
|
|
SELECT time, {radar_expr} AS radar
|
|
FROM radar_readings
|
|
WHERE device_id = {device_id}
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
ORDER BY time ASC;
|
|
"""
|
|
elif sensor[0] == "s":
|
|
# For sensors whose name starts with "s" (for example, smell sensors)
|
|
if use_bucket:
|
|
sqlr = f"""
|
|
SELECT time_bucket('{bucket_interval}', time) AS time,
|
|
{avgmax}({sensor}) AS smell
|
|
FROM sensor_readings
|
|
WHERE device_id = {device_id}
|
|
AND {sensor} >= '{legal_min}'
|
|
AND {sensor} <= '{legal_max}'
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
GROUP BY time_bucket('{bucket_interval}', time)
|
|
ORDER BY time ASC;
|
|
"""
|
|
else:
|
|
sqlr = f"""
|
|
SELECT time, {sensor} AS smell
|
|
FROM sensor_readings
|
|
WHERE device_id = {device_id}
|
|
AND {sensor} >= '{legal_min}'
|
|
AND {sensor} <= '{legal_max}'
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
ORDER BY time ASC;
|
|
"""
|
|
elif sensor == "co2":
|
|
alias = sensor
|
|
sensor = "s4"
|
|
sqlr = f"""
|
|
WITH s4_values AS (
|
|
SELECT s4
|
|
FROM public.sensor_readings
|
|
WHERE device_id = 559
|
|
AND s4 IS NOT NULL
|
|
),
|
|
s4_percentile AS (
|
|
SELECT percentile_cont(0.25) WITHIN GROUP (ORDER BY s4 DESC) AS s4_25_percentile
|
|
FROM s4_values
|
|
)
|
|
SELECT s4_25_percentile
|
|
FROM s4_percentile;
|
|
"""
|
|
co2_max = 22536000#102400000
|
|
co2_min = 2400000#16825674 #387585
|
|
|
|
real_co2_max = 2000
|
|
real_co2_min = 430
|
|
|
|
|
|
#logger.debug(f"sqlr = {sqlr}")
|
|
#with get_db_connection() as conn:
|
|
#with conn.cursor() as cur:
|
|
#cur.execute(sqlr)
|
|
#result = cur.fetchall()
|
|
#co2_max = result[0][0]
|
|
#co2_min = result[0][1]
|
|
#=E17+E20*(102400000-A24)/B18
|
|
#col_expr = f"{real_co2_min}+({real_co2_max}-{real_co2_min})*(102400000-{sensor})/({co2_min}-{co2_max})"
|
|
col_expr = f"GREATEST({real_co2_min},{real_co2_min}+({real_co2_max}-{real_co2_min})*({co2_max}-percentile_cont(0.5) WITHIN GROUP (ORDER BY {sensor}))/({co2_max}-{co2_min}))"
|
|
|
|
if use_bucket:
|
|
sqlr = f"""
|
|
SELECT time_bucket('{bucket_interval}', time) AS time,
|
|
({col_expr}) AS {alias}
|
|
FROM sensor_readings
|
|
WHERE device_id = {device_id}
|
|
AND {sensor} >= '{legal_min}'
|
|
AND {sensor} <= '{legal_max}'
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
GROUP BY time_bucket('{bucket_interval}', time)
|
|
ORDER BY time ASC;
|
|
"""
|
|
else:
|
|
sqlr = f"""
|
|
SELECT time, {col_expr} AS {alias}
|
|
FROM sensor_readings
|
|
WHERE device_id = {device_id}
|
|
AND {sensor} >= '{legal_min}'
|
|
AND {sensor} <= '{legal_max}'
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
ORDER BY time ASC;
|
|
"""
|
|
elif sensor == "voc":
|
|
sensor = "s9"
|
|
alias = sensor
|
|
col_expr = f"{sensor} - 0"
|
|
|
|
if use_bucket:
|
|
sqlr = f"""
|
|
SELECT time_bucket('{bucket_interval}', time) AS time,
|
|
{avgmax}({col_expr}) AS {alias}
|
|
FROM sensor_readings
|
|
WHERE device_id = {device_id}
|
|
AND {sensor} >= '{legal_min}'
|
|
AND {sensor} <= '{legal_max}'
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
GROUP BY time_bucket('{bucket_interval}', time)
|
|
ORDER BY time ASC;
|
|
"""
|
|
else:
|
|
sqlr = f"""
|
|
SELECT time, {col_expr} AS {alias}
|
|
FROM sensor_readings
|
|
WHERE device_id = {device_id}
|
|
AND {sensor} >= '{legal_min}'
|
|
AND {sensor} <= '{legal_max}'
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
ORDER BY time ASC;
|
|
"""
|
|
elif sensor == "humidity":
|
|
alias = sensor
|
|
col_expr = f"2.3592 * {sensor} + 32.5546" #= 2.3592 * J2 + 33.5546
|
|
|
|
if use_bucket:
|
|
sqlr = f"""
|
|
SELECT time_bucket('{bucket_interval}', time) AS time,
|
|
{avgmax}({col_expr}) AS {alias}
|
|
FROM sensor_readings
|
|
WHERE device_id = {device_id}
|
|
AND {sensor} >= '{legal_min}'
|
|
AND {sensor} <= '{legal_max}'
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
GROUP BY time_bucket('{bucket_interval}', time)
|
|
ORDER BY time ASC;
|
|
"""
|
|
else:
|
|
sqlr = f"""
|
|
SELECT time, {col_expr} AS {alias}
|
|
FROM sensor_readings
|
|
WHERE device_id = {device_id}
|
|
AND {sensor} >= '{legal_min}'
|
|
AND {sensor} <= '{legal_max}'
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
ORDER BY time ASC;
|
|
"""
|
|
else:
|
|
# For other sensors (including temperature, which requires a subtraction)
|
|
alias = sensor
|
|
col_expr = sensor
|
|
if sensor == "temperature":
|
|
col_expr = f"{sensor} - 16"
|
|
alias = "temperature"
|
|
elif sensor == "light":
|
|
avgmax = "MAX"
|
|
|
|
if use_bucket:
|
|
sqlr = f"""
|
|
SELECT time_bucket('{bucket_interval}', time) AS time,
|
|
{avgmax}({col_expr}) AS {alias}
|
|
FROM sensor_readings
|
|
WHERE device_id = {device_id}
|
|
AND {sensor} >= '{legal_min}'
|
|
AND {sensor} <= '{legal_max}'
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
GROUP BY time_bucket('{bucket_interval}', time)
|
|
ORDER BY time ASC;
|
|
"""
|
|
else:
|
|
sqlr = f"""
|
|
SELECT time, {col_expr} AS {alias}
|
|
FROM sensor_readings
|
|
WHERE device_id = {device_id}
|
|
AND {sensor} >= '{legal_min}'
|
|
AND {sensor} <= '{legal_max}'
|
|
AND time >= '{time_from_str}'
|
|
AND time <= '{time_to_str}'
|
|
ORDER BY time ASC;
|
|
"""
|
|
|
|
logger.debug(f"sqlr = {sqlr}")
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sqlr)
|
|
result = cur.fetchall()
|
|
|
|
return result
|
|
|
|
def ReadRadarDetail(device_id, sensor, time_from_epoch, time_to_epoch, alt_key_state):
|
|
|
|
time_utc = datetime.datetime.fromtimestamp(float(time_from_epoch), tz=timezone.utc)
|
|
# Format in ISO 8601 format with timezone
|
|
time_from_str = time_utc.strftime("%Y-%m-%d %H:%M:%S%z")
|
|
|
|
time_utc = datetime.datetime.fromtimestamp(float(time_to_epoch), tz=timezone.utc)
|
|
# Format in ISO 8601 format with timezone
|
|
time_to_str = time_utc.strftime("%Y-%m-%d %H:%M:%S%z")
|
|
#sensor_index = int(sensor_index)
|
|
|
|
|
|
result = []
|
|
|
|
#time_period_sec can be "10" (RAW) or "60"
|
|
if alt_key_state == "1": #"RAW = 10 sec"
|
|
radar_part = sensor
|
|
if sensor == "m08_max":
|
|
radar_part = "(m0+m1+m2+m3+m4+m5+m6+m7+m8)/9"
|
|
elif sensor == "s28_max" or sensor == "s28_min":
|
|
radar_part = "(s2+s3+s4+s5+s6+s7+s8)/7"
|
|
sqlr = f"SELECT time, {radar_part} AS radar FROM radar_readings WHERE device_id = {device_id} AND time >= '{time_from_str}' AND time <= '{time_to_str}' ORDER BY time ASC"
|
|
else:
|
|
if sensor == "m08_max":
|
|
radar_part = "MAX((m0+m1+m2+m3+m4+m5+m6+m7+m8)/7) AS m08_max"
|
|
elif sensor == "s28_max":
|
|
radar_part = f"MAX((s2+s3+s4+s5+s6+s7+s8)/7) AS {sensor}"
|
|
elif sensor == "s28_min":
|
|
radar_part = f"MIN((s2+s3+s4+s5+s6+s7+s8)/7) AS {sensor}"
|
|
else:
|
|
radar_part = f"MAX({sensor}) AS {sensor}"
|
|
|
|
sqlr = f"""
|
|
SELECT
|
|
minute,
|
|
{sensor} as {sensor}
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
{radar_part}
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id = {device_id}
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute
|
|
) rr
|
|
|
|
ORDER BY
|
|
minute
|
|
"""
|
|
logger.debug(f"sqlr = {sqlr}")
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sqlr)
|
|
result = cur.fetchall()
|
|
|
|
return result
|
|
|
|
def check_and_parse(data_str):
|
|
# Remove whitespace to handle cases with spaces
|
|
cleaned = data_str.strip()
|
|
# Check if second character is '['
|
|
is_list_of_lists = cleaned[1] == '['
|
|
if cleaned[0] == '[':
|
|
# Parse the string regardless of type
|
|
parsed = json.loads(cleaned)
|
|
else:
|
|
#parsed = cleaned.split(",")
|
|
parsed = SmartSplit(cleaned)
|
|
return is_list_of_lists, parsed
|
|
|
|
def clean_data_with_rolling_spline(line_part_t, window=5, threshold=2.0):
|
|
"""
|
|
Filter outliers using rolling median and replace with spline interpolation
|
|
Returns data in the same format as input: [(timestamp, value), ...]
|
|
"""
|
|
# Unzip the input tuples
|
|
x, y = zip(*line_part_t)
|
|
x = np.array(x)
|
|
y = np.array(y, dtype=float) # explicitly convert to float
|
|
|
|
# Calculate rolling median and MAD using a safer approach
|
|
rolling_median = []
|
|
rolling_mad = []
|
|
|
|
for i in range(len(y)):
|
|
start_idx = max(0, i - window//2)
|
|
end_idx = min(len(y), i + window//2 + 1)
|
|
window_values = y[start_idx:end_idx]
|
|
|
|
# Skip if window is empty or contains invalid values
|
|
if len(window_values) == 0 or np.any(np.isnan(window_values)):
|
|
rolling_median.append(y[i])
|
|
rolling_mad.append(0)
|
|
continue
|
|
|
|
med = np.median(window_values)
|
|
mad = np.median(np.abs(window_values - med))
|
|
|
|
rolling_median.append(med)
|
|
rolling_mad.append(mad)
|
|
|
|
rolling_median = np.array(rolling_median)
|
|
rolling_mad = np.array(rolling_mad)
|
|
|
|
# Identify outliers (protect against division by zero)
|
|
outlier_mask = np.abs(y - rolling_median) > threshold * (rolling_mad + 1e-10)
|
|
good_data_mask = ~outlier_mask
|
|
|
|
if np.sum(good_data_mask) < 4:
|
|
return line_part_t # return original data if we can't interpolate
|
|
|
|
try:
|
|
# Create and apply spline
|
|
spline = interpolate.InterpolatedUnivariateSpline(
|
|
x[good_data_mask],
|
|
y[good_data_mask],
|
|
k=3
|
|
)
|
|
|
|
y_cleaned = y.copy()
|
|
y_cleaned[outlier_mask] = spline(x[outlier_mask])
|
|
except Exception as e:
|
|
print(f"Spline interpolation failed: {e}")
|
|
return line_part_t
|
|
|
|
# Return in the same format as input
|
|
return list(zip(x, y_cleaned))
|
|
|
|
def DatesSpan(date_from: str, date_to: str) -> list:
|
|
"""
|
|
Generate a list of dates between date_from and date_to (inclusive).
|
|
Handles cases where date_from is later than date_to.
|
|
|
|
Args:
|
|
date_from (str): Start date in 'YYYY-MM-DD' format
|
|
date_to (str): End date in 'YYYY-MM-DD' format
|
|
|
|
Returns:
|
|
list: List of dates in 'YYYY-MM-DD' format
|
|
"""
|
|
# Convert string dates to datetime objects
|
|
start_date = datetime.datetime.strptime(date_from, '%Y-%m-%d')
|
|
end_date = datetime.datetime.strptime(date_to, '%Y-%m-%d')
|
|
|
|
# Determine direction and swap dates if necessary
|
|
if start_date > end_date:
|
|
start_date, end_date = end_date, start_date
|
|
|
|
# Generate list of dates
|
|
dates_list = []
|
|
current_date = start_date
|
|
|
|
while current_date <= end_date:
|
|
dates_list.append(current_date.strftime('%Y-%m-%d'))
|
|
current_date += timedelta(days=1)
|
|
|
|
# Reverse the list if original date_from was later than date_to
|
|
#if datetime.datetime.strptime(date_from, '%Y-%m-%d') > datetime.datetime.strptime(date_to, '%Y-%m-%d'):
|
|
# dates_list.reverse()
|
|
|
|
return dates_list
|
|
|
|
def zip_blobs(blob_paths, zip_blob_name, bucket_name, minio_client=None):
|
|
"""
|
|
Zip multiple blobs from MinIO storage into a single zip file without saving locally.
|
|
|
|
Args:
|
|
blob_paths (list): List of blob paths to zip
|
|
zip_blob_name (str): Name/path for the output zip file in MinIO
|
|
bucket_name (str): MinIO bucket name
|
|
minio_client (Minio, optional): Existing MinIO client instance
|
|
|
|
Returns:
|
|
bool: True if successful, False otherwise
|
|
"""
|
|
try:
|
|
# Create zip file in memory
|
|
zip_buffer = BytesIO()
|
|
|
|
with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zip_file:
|
|
# Process each blob
|
|
for blob_path in blob_paths:
|
|
# Get file name from path for zip entry
|
|
file_name = blob_path.split('/')[-1]
|
|
|
|
# Get blob data into memory
|
|
data = minio_client.get_object(bucket_name, blob_path.lstrip('/'))
|
|
|
|
# Add file to zip
|
|
zip_file.writestr(file_name, data.read())
|
|
|
|
# Close the object to free memory
|
|
data.close()
|
|
|
|
# Seek to start of zip file
|
|
zip_buffer.seek(0)
|
|
|
|
# Upload zip file to MinIO
|
|
minio_client.put_object(
|
|
bucket_name,
|
|
zip_blob_name.lstrip('/'),
|
|
zip_buffer,
|
|
length=zip_buffer.getbuffer().nbytes
|
|
)
|
|
|
|
return True
|
|
|
|
except Exception as e:
|
|
print(f"Error creating zip file: {str(e)}")
|
|
return False
|
|
finally:
|
|
# Clean up
|
|
zip_buffer.close()
|
|
|
|
def clean_data_with_spline(x, y, threshold=2.0):
|
|
"""
|
|
Filter outliers and replace with spline interpolation
|
|
|
|
Parameters:
|
|
x : array-like, timestamps or x-coordinates
|
|
y : array-like, values to be filtered
|
|
threshold : float, number of median absolute deviations for outlier detection
|
|
|
|
Returns:
|
|
array-like : cleaned data with outliers replaced by spline interpolation
|
|
"""
|
|
# Convert inputs to numpy arrays
|
|
x = np.array(x)
|
|
y = np.array(y)
|
|
|
|
# Calculate median and median absolute deviation
|
|
median = np.median(y)
|
|
mad = stats.median_abs_deviation(y)
|
|
|
|
# Identify outliers
|
|
outlier_mask = np.abs(y - median) > threshold * mad
|
|
good_data_mask = ~outlier_mask
|
|
|
|
# If we have too few good points for interpolation, adjust threshold
|
|
min_points_needed = 4 # minimum points needed for cubic spline
|
|
if np.sum(good_data_mask) < min_points_needed:
|
|
return y # return original data if we can't interpolate
|
|
|
|
# Create spline with non-outlier data
|
|
spline = interpolate.InterpolatedUnivariateSpline(
|
|
x[good_data_mask],
|
|
y[good_data_mask],
|
|
k=3 # cubic spline
|
|
)
|
|
|
|
# Replace outliers with interpolated values
|
|
y_cleaned = y.copy()
|
|
y_cleaned[outlier_mask] = spline(x[outlier_mask])
|
|
|
|
return y_cleaned
|
|
|
|
def clean_data(line_part_t, window=5, threshold=2.0):
|
|
"""
|
|
Remove obvious outliers based on window comparison
|
|
Returns cleaned data in the same format: [(timestamp, value), ...]
|
|
"""
|
|
if len(line_part_t) < window:
|
|
return line_part_t
|
|
|
|
x, y = zip(*line_part_t)
|
|
x = np.array(x)
|
|
y = np.array(y, dtype=float)
|
|
|
|
cleaned_data = []
|
|
|
|
for i in range(len(y)):
|
|
# Get window around current point
|
|
start_idx = max(0, i - window//2)
|
|
end_idx = min(len(y), i + window//2 + 1)
|
|
window_values = y[start_idx:end_idx]
|
|
|
|
# Calculate median and MAD for the window
|
|
window_median = np.median(window_values)
|
|
deviation = abs(y[i] - window_median)
|
|
|
|
# Keep point if it's not too far from window median
|
|
if deviation <= threshold * window_median:
|
|
cleaned_data.append((x[i], y[i]))
|
|
#else:
|
|
#print(window_values)
|
|
return cleaned_data
|
|
|
|
def clean_data_fast(line_part_t, window=5, threshold=2.0):
|
|
"""
|
|
Remove obvious outliers based on window comparison - vectorized version
|
|
Returns cleaned data in the same format: [(timestamp, value), ...]
|
|
"""
|
|
if len(line_part_t) < window:
|
|
return line_part_t
|
|
|
|
x, y = zip(*line_part_t)
|
|
x = np.array(x)
|
|
y = np.array(y, dtype=float)
|
|
|
|
# Calculate rolling median using numpy
|
|
half_window = window // 2
|
|
medians = np.array([
|
|
np.median(y[max(0, i-half_window):min(len(y), i+half_window+1)])
|
|
for i in range(len(y))
|
|
])
|
|
|
|
# Calculate deviations for all points at once
|
|
deviations = np.abs(y - medians)
|
|
|
|
# Create mask for good points
|
|
good_points = deviations <= threshold * medians
|
|
|
|
# Return filtered data using boolean indexing
|
|
return list(zip(x[good_points], y[good_points]))
|
|
|
|
def clean_data_pd(line_part_t, window=5, percentile=99):
|
|
"""
|
|
Remove obvious outliers based on window comparison - pandas version
|
|
Returns cleaned data in the same format: [(timestamp, value), ...]
|
|
"""
|
|
#line_part_t = line_part_t[2000:2100]
|
|
if len(line_part_t) < window:
|
|
return line_part_t
|
|
|
|
x, y = zip(*line_part_t)
|
|
|
|
# Create pandas Series and calculate rolling median
|
|
series = pd.Series(y)
|
|
medians = series.rolling(window=window, center=True, min_periods=1).median()
|
|
|
|
# Calculate deviations
|
|
deviations = np.abs(series - medians)
|
|
|
|
largest_deviations = deviations.nlargest(10)
|
|
#print(largest_deviations)
|
|
|
|
# Create mask for good points
|
|
deviation_threshold = np.percentile(deviations, percentile)
|
|
good_points = deviations <= deviation_threshold
|
|
|
|
# Convert back to numpy arrays for filtering
|
|
x = np.array(x)
|
|
y = np.array(y)
|
|
|
|
# Return filtered data
|
|
return list(zip(x[good_points], y[good_points]))
|
|
|
|
def CombineStripes(result_filename, stripes_files):
|
|
try:
|
|
# Open the first image to get the width and initialize the height
|
|
first_image = Image.open(stripes_files[0])
|
|
width, height = first_image.size
|
|
|
|
# Calculate the total height of the combined image
|
|
total_height = height * len(stripes_files)
|
|
|
|
# Create a new blank image with the same width and the calculated height
|
|
result_image = Image.new('RGB', (width, total_height))
|
|
|
|
# Paste each image onto the result image vertically
|
|
y_offset = 0
|
|
for file_name in stripes_files:
|
|
image = Image.open(file_name)
|
|
result_image.paste(image, (0, y_offset))
|
|
y_offset += height
|
|
|
|
# Save the result image
|
|
result_image.save(result_filename)
|
|
|
|
# Return success flag
|
|
return True
|
|
|
|
except Exception as e:
|
|
print("Error:", e)
|
|
return False
|
|
|
|
def FindFirstLocalMinimum(counts, bins):
|
|
"""
|
|
Find the first local minimum in a histogram after the main peak and calculate its offset.
|
|
|
|
Parameters:
|
|
hist: tuple of (counts, bin_edges) from np.histogram()
|
|
The histogram data to analyze
|
|
|
|
Returns:
|
|
tuple: (TR, THR_OFFSET)
|
|
TR: float, the bin value (position) of the first local minimum after the main peak
|
|
THR_OFFSET: int, number of bins between the global maximum and the local minimum
|
|
"""
|
|
|
|
# Find the main peak (global maximum)
|
|
main_peak_idx = np.argmax(counts)
|
|
|
|
# Look for the first local minimum after the main peak
|
|
for i in range(main_peak_idx + 1, len(counts) - 1):
|
|
# Check if current point is less than or equal to both neighbors
|
|
if counts[i] <= counts[i-1] and counts[i] <= counts[i+1]:
|
|
# Calculate the bin center value for TR
|
|
TR = (bins[i] + bins[i+1]) / 2
|
|
# Calculate offset from main peak in number of bins
|
|
THR_OFFSET = (bins[i] + bins[i+1]) / 2 - (bins[main_peak_idx] + bins[main_peak_idx+1]) / 2
|
|
return (bins[main_peak_idx] + bins[main_peak_idx+1]) / 2, TR, THR_OFFSET
|
|
|
|
# If no local minimum is found, return None for both values
|
|
return None, None
|
|
|
|
def process_raw_data(data_tuples):
|
|
"""
|
|
Convert list of (timestamp, stationary, motion) tuples to separate arrays
|
|
|
|
Parameters:
|
|
-----------
|
|
data_tuples : list of tuples
|
|
Each tuple contains (datetime, stationary_value, motion_value)
|
|
|
|
Returns:
|
|
--------
|
|
timestamps : array of datetime
|
|
stationary : array of float
|
|
motion : array of float
|
|
"""
|
|
timestamps = np.array([t[0] for t in data_tuples])
|
|
stationary = np.array([t[1] for t in data_tuples])
|
|
motion = np.array([t[2] for t in data_tuples])
|
|
|
|
return timestamps, stationary, motion
|
|
|
|
def rolling_std_fast(arr, window_size):
|
|
"""
|
|
Fast calculation of rolling standard deviation using NumPy's stride tricks.
|
|
|
|
Parameters:
|
|
-----------
|
|
arr : numpy array
|
|
Input array
|
|
window_size : int
|
|
Size of rolling window
|
|
|
|
Returns:
|
|
--------
|
|
numpy array
|
|
Rolling standard deviation
|
|
"""
|
|
# Compute rolling sum of squares
|
|
r = np.array(arr, dtype=float)
|
|
r2 = np.array(arr, dtype=float) ** 2
|
|
|
|
# Calculate cumulative sums
|
|
cum = np.cumsum(np.insert(r, 0, 0))
|
|
cum2 = np.cumsum(np.insert(r2, 0, 0))
|
|
|
|
# Get rolling windows
|
|
x = (cum[window_size:] - cum[:-window_size])
|
|
x2 = (cum2[window_size:] - cum2[:-window_size])
|
|
|
|
# Calculate standard deviation
|
|
n = window_size
|
|
return np.sqrt((x2/n) - (x/n) ** 2)
|
|
|
|
def detect_presence(timestamps, stationary_signal, motion_signal, window_size=100,
|
|
motion_threshold=5, gmm_components=2):
|
|
"""
|
|
Detect presence using both stationary and motion signals with adaptive thresholding.
|
|
|
|
Parameters:
|
|
-----------
|
|
timestamps : array-like
|
|
Array of datetime objects
|
|
stationary_signal : array-like
|
|
Time series of stationary signal (0-100)
|
|
motion_signal : array-like
|
|
Time series of motion signal (0-100)
|
|
window_size : int
|
|
Size of rolling window for statistics (used only for temporal smoothing)
|
|
motion_threshold : float
|
|
Threshold for significant motion
|
|
gmm_components : int
|
|
Number of components for Gaussian Mixture Model
|
|
|
|
Returns:
|
|
--------
|
|
presence_mask : numpy array
|
|
Boolean array indicating presence
|
|
baseline : float
|
|
Computed baseline for stationary signal
|
|
threshold : float
|
|
Computed threshold for stationary signal
|
|
"""
|
|
|
|
# Convert inputs to numpy arrays
|
|
stationary_signal = np.array(stationary_signal)
|
|
motion_signal = np.array(motion_signal)
|
|
|
|
# 1. Fit Gaussian Mixture Model to stationary signal
|
|
gmm = GaussianMixture(n_components=gmm_components, random_state=42)
|
|
X = stationary_signal.reshape(-1, 1)
|
|
gmm.fit(X)
|
|
|
|
# Get the component with lowest mean as baseline
|
|
baseline = min(gmm.means_)[0]
|
|
|
|
# 2. Calculate adaptive threshold using GMM components
|
|
components_sorted = sorted(zip(gmm.means_.flatten(), gmm.covariances_.flatten()))
|
|
baseline_std = np.sqrt(components_sorted[0][1])
|
|
threshold = baseline + 3 * baseline_std # 3 sigma rule
|
|
|
|
# 3. Combine motion and stationary detection
|
|
presence_mask = np.zeros(len(stationary_signal), dtype=bool)
|
|
|
|
# Vectorized operations instead of loop
|
|
presence_mask = (motion_signal > motion_threshold) | (stationary_signal > threshold)
|
|
|
|
# 4. Apply temporal smoothing to reduce false transitions
|
|
smooth_window = min(window_size // 4, 10) # Smaller window for smoothing
|
|
presence_mask = np.convolve(presence_mask.astype(int),
|
|
np.ones(smooth_window)/smooth_window,
|
|
mode='same') > 0.5
|
|
|
|
return presence_mask, baseline, threshold
|
|
|
|
def visualize_detection(timestamps, stationary_signal, motion_signal, presence_mask,
|
|
baseline, threshold, output_file='presence_detection.png'):
|
|
"""
|
|
Visualize the detection results and save to file.
|
|
|
|
Parameters:
|
|
-----------
|
|
timestamps : array-like
|
|
Array of datetime objects
|
|
stationary_signal : array-like
|
|
Time series of stationary signal
|
|
motion_signal : array-like
|
|
Time series of motion signal
|
|
presence_mask : array-like
|
|
Boolean array indicating presence
|
|
baseline : float
|
|
Computed baseline for stationary signal
|
|
threshold : float
|
|
Computed threshold for stationary signal
|
|
output_file : str
|
|
Path to save the output PNG file
|
|
"""
|
|
plt.figure(figsize=(15, 10))
|
|
|
|
# Configure time formatting
|
|
date_formatter = mdates.DateFormatter('%H:%M:%S')
|
|
|
|
# Plot signals
|
|
plt.subplot(3, 1, 1)
|
|
plt.plot(timestamps, stationary_signal, label='Stationary Signal')
|
|
plt.axhline(y=baseline, color='g', linestyle='--', label='Baseline')
|
|
plt.axhline(y=threshold, color='r', linestyle='--', label='Threshold')
|
|
plt.gca().xaxis.set_major_formatter(date_formatter)
|
|
plt.legend()
|
|
plt.title('Stationary Signal with Baseline and Threshold')
|
|
plt.grid(True)
|
|
|
|
plt.subplot(3, 1, 2)
|
|
plt.plot(timestamps, motion_signal, label='Motion Signal')
|
|
plt.gca().xaxis.set_major_formatter(date_formatter)
|
|
plt.legend()
|
|
plt.title('Motion Signal')
|
|
plt.grid(True)
|
|
|
|
plt.subplot(3, 1, 3)
|
|
plt.plot(timestamps, presence_mask, label='Presence Detection')
|
|
plt.gca().xaxis.set_major_formatter(date_formatter)
|
|
plt.ylim(-0.1, 1.1)
|
|
plt.legend()
|
|
plt.title('Presence Detection Result')
|
|
plt.grid(True)
|
|
|
|
plt.tight_layout()
|
|
|
|
# Save to file and close figure to free memory
|
|
plt.savefig(output_file, dpi=300, bbox_inches='tight')
|
|
plt.close()
|
|
|
|
def FindZeroIntersection(counts, bins, save_plot, device_id):
|
|
"""
|
|
Find the zero intersection point by fitting a parabola to the descending slope
|
|
between 50% and 10% of the maximum peak height. Also returns the peak position.
|
|
|
|
Parameters:
|
|
counts: array-like
|
|
The histogram counts
|
|
bins: array-like
|
|
The histogram bin edges
|
|
save_plot: str or None, optional
|
|
If provided, saves the visualization to the specified file path
|
|
|
|
Returns:
|
|
tuple: (zero_intersections, peak_position)
|
|
zero_intersections: list of floats, x-coordinates where parabola intersects y=0
|
|
peak_position: float, x-coordinate of the histogram maximum peak
|
|
"""
|
|
# Find the main peak
|
|
main_peak_idx = np.argmax(counts)
|
|
peak_height = counts[main_peak_idx]
|
|
peak_position = (bins[main_peak_idx] + bins[main_peak_idx+1]) / 2
|
|
|
|
# Calculate 75% and 25% of peak height
|
|
height_50 = 0.50 * peak_height
|
|
height_10 = 0.10 * peak_height
|
|
|
|
# Find indices where counts cross these thresholds after the peak
|
|
idx_50 = main_peak_idx
|
|
idx_10 = main_peak_idx
|
|
|
|
for i in range(main_peak_idx, len(counts)):
|
|
if counts[i] <= height_50 and idx_50 == main_peak_idx:
|
|
idx_50 = i
|
|
if counts[i] <= height_10:
|
|
idx_10 = i
|
|
break
|
|
|
|
# If we couldn't find valid points, return None
|
|
if idx_50 == main_peak_idx or idx_10 == main_peak_idx:
|
|
return None, peak_position
|
|
|
|
# Get x and y coordinates for fitting
|
|
# Use bin centers for x coordinates
|
|
x_points = np.array([(bins[i] + bins[i+1])/2 for i in range(idx_50, idx_10+1)])
|
|
y_points = counts[idx_50:idx_10+1]
|
|
|
|
# Define quadratic function for fitting
|
|
def quadratic(x, a, b, c):
|
|
return a*x**2 + b*x + c
|
|
|
|
try:
|
|
popt, pcov = curve_fit(quadratic, x_points, y_points)
|
|
a, b, c = popt
|
|
|
|
# Find zeros using quadratic formula
|
|
if a != 0:
|
|
discriminant = b**2 - 4*a*c
|
|
if discriminant >= 0:
|
|
x1 = (-b + np.sqrt(discriminant)) / (2*a)
|
|
x2 = (-b - np.sqrt(discriminant)) / (2*a)
|
|
zero_intersections = sorted([x1, x2])
|
|
# Filter zeros to only include those after the peak
|
|
zero_intersections = [x for x in zero_intersections if x > peak_position]
|
|
else:
|
|
zero_intersections = []
|
|
else:
|
|
# If a ≈ 0, fallback to linear solution
|
|
if b != 0:
|
|
zero_intersections = [-c/b]
|
|
else:
|
|
zero_intersections = []
|
|
|
|
if save_plot:
|
|
plt.figure(figsize=(10, 6))
|
|
|
|
# Plot histogram
|
|
bin_centers = [(bins[i] + bins[i+1])/2 for i in range(len(counts))]
|
|
plt.bar(bin_centers, counts, width=bins[1]-bins[0], alpha=0.6,
|
|
color='skyblue', label='Histogram')
|
|
|
|
# Plot peak height lines
|
|
plt.axhline(y=height_50, color='g', linestyle='--', alpha=0.5,
|
|
label='50% Peak Height')
|
|
plt.axhline(y=height_10, color='r', linestyle='--', alpha=0.5,
|
|
label='10% Peak Height')
|
|
|
|
# Plot fitted parabola
|
|
x_fit = np.linspace(min(x_points), max(x_points), 100)
|
|
y_fit = quadratic(x_fit, a, b, c)
|
|
plt.plot(x_fit, y_fit, 'r-', label='Fitted Parabola')
|
|
|
|
# Plot points used for fitting
|
|
plt.plot(x_points, y_points, 'ro', alpha=0.5, label='Fitting Points')
|
|
|
|
# Plot zero intersections
|
|
for x_zero in zero_intersections:
|
|
plt.plot(x_zero, 0, 'ko', label='Zero Intersection')
|
|
|
|
# Plot peak position
|
|
plt.axvline(x=peak_position, color='purple', linestyle='--', alpha=0.5,
|
|
label='Peak Position')
|
|
|
|
# Add labels and legend
|
|
plt.xlabel('Bin Values')
|
|
plt.ylabel('Counts')
|
|
plt.title(f'Histogram Analysis with Parabolic Fit {device_id}')
|
|
plt.legend()
|
|
|
|
# Show zero line
|
|
plt.axhline(y=0, color='k', linestyle='-', alpha=0.2)
|
|
|
|
# Add text with intersection and peak values
|
|
text = f'Peak Position: {peak_position:.2f}\n'
|
|
if zero_intersections:
|
|
text += f'Zero Intersection(s): {", ".join([f"{x:.2f}" for x in zero_intersections])}'
|
|
plt.text(0.02, 0.98, text, transform=plt.gca().transAxes,
|
|
verticalalignment='top',
|
|
bbox=dict(boxstyle='round', facecolor='white', alpha=0.8))
|
|
|
|
# Save plot to file
|
|
plt.savefig(save_plot, dpi=300, bbox_inches='tight')
|
|
plt.close()
|
|
|
|
return zero_intersections, peak_position
|
|
except RuntimeError:
|
|
print("Warning: Failed to fit parabola")
|
|
return None, peak_position
|
|
|
|
|
|
def GeneratePresenceHistory(filename, recreate_in, deployment_id, filter_minutes, ddate, to_date, now_date, time_zone_s):
|
|
|
|
#maps_dates, proximity = GetDeploymentDatesBoth(deployment_id)
|
|
minutes = 1440
|
|
stripes_files = []
|
|
|
|
date1_obj = datetime.datetime.strptime(ddate, '%Y-%m-%d')
|
|
date2_obj = datetime.datetime.strptime(to_date, '%Y-%m-%d')
|
|
|
|
start_date = min(date1_obj, date2_obj)
|
|
end_date = max(date1_obj, date2_obj)
|
|
|
|
# Generate list of all dates
|
|
maps_dates = [
|
|
(start_date + timedelta(days=x)).strftime('%Y-%m-%d')
|
|
for x in range((end_date - start_date).days + 1)
|
|
]
|
|
maps_dates.reverse()
|
|
days = len(maps_dates)
|
|
stretch_by = int(1000 / days)
|
|
if stretch_by > 50:
|
|
stretch_by = 50
|
|
|
|
#lets use 1000 pixels
|
|
#arr_stretched = np.zeros((int(days*stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
|
|
result_image = Image.new('RGB', (minutes, int(days*stretch_by)))
|
|
|
|
|
|
# Paste each image onto the result image vertically
|
|
y_offset = 0
|
|
|
|
for ddate in maps_dates:
|
|
|
|
force_recreate = recreate_in
|
|
|
|
filename_day = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_{stretch_by}_daily_locations.png"
|
|
|
|
if not force_recreate:
|
|
|
|
file_exists, time_modified_utc = check_file_exists(filename_day)
|
|
if file_exists:
|
|
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
|
|
file_modified_date = time_modified_local.date()
|
|
file_date = MapFileToDate(filename_day)
|
|
if file_modified_date <= file_date:
|
|
force_recreate = True
|
|
else:
|
|
force_recreate = True
|
|
|
|
if force_recreate:
|
|
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
|
|
|
CreateDailyLocationMap(filename_day, devices_list, ddate, filter_minutes, time_zone_s, stretch_by)
|
|
|
|
|
|
image_bytes, content_type = GetBlob(filename_day)
|
|
if image_bytes != None:
|
|
image_stream = io.BytesIO(image_bytes)
|
|
image = Image.open(image_stream)
|
|
|
|
#image = Image.open(file_name)
|
|
result_image.paste(image, (0, y_offset))
|
|
image.close()
|
|
image_stream.close()
|
|
|
|
y_offset += stretch_by
|
|
|
|
# Save directly to MinIO instead of local file
|
|
success = save_to_minio(result_image, filename, DAILY_MAPS_BUCKET_NAME)
|
|
# Clean up
|
|
result_image.close()
|
|
return success
|
|
|
|
def AddText(room_image_cv2, x, y, room_name, font_size):
|
|
pil_im = Image.fromarray(room_image_cv2)
|
|
draw = ImageDraw.Draw(pil_im)
|
|
font_path = os.path.join(os.path.dirname(__file__), "fonts", "Poppins-Regular.ttf")
|
|
#print(f"Attempting to load font from: {font_path}")
|
|
try:
|
|
font = ImageFont.truetype(font_path, font_size) # 12px size
|
|
except:
|
|
logger.error(f"Poppins font not found in {font_path}. Please ensure the font file is in your working directory")
|
|
# Fallback to default font if Poppins is not available
|
|
font = ImageFont.load_default()
|
|
draw.text((x, y), room_name, font=font, fill=(150, 150, 150)) # Black color in RGB
|
|
room_image_cv2 = cv2.cvtColor(np.array(pil_im), cv2.COLOR_RGB2BGR)
|
|
return room_image_cv2
|
|
|
|
def AddTextList(room_image_cv2, strings_list, font_size):
|
|
pil_im = Image.fromarray(room_image_cv2)
|
|
draw = ImageDraw.Draw(pil_im)
|
|
font_path = os.path.join(os.path.dirname(__file__), "fonts", "Poppins-Regular.ttf")
|
|
try:
|
|
font = ImageFont.truetype(font_path, font_size) # 12px size
|
|
except:
|
|
logger.error("Poppins font not found. Please ensure the font file is in your working directory")
|
|
# Fallback to default font if Poppins is not available
|
|
font = ImageFont.load_default()
|
|
|
|
for x, y, room_name in strings_list:
|
|
draw.text((x, y), room_name, font=font, fill=(150, 150, 150)) # Black color in RGB
|
|
|
|
room_image_cv2 = cv2.cvtColor(np.array(pil_im), cv2.COLOR_RGB2BGR)
|
|
return room_image_cv2
|
|
|
|
|
|
def AddRoomData(room_image, room_name, data):
|
|
|
|
# Example usage:
|
|
radius = 10
|
|
color_t = data["color"] # BGR format for red
|
|
color = (color_t[2], color_t[1], color_t[0])
|
|
x_offset = 12
|
|
|
|
room_image = AddText(room_image, 13, 20, room_name, 50)
|
|
print(data)
|
|
for present in data["presence"]:
|
|
device_id, minute, duration = present
|
|
#duration = 10
|
|
top_left = (x_offset + minute, 140) #bottom_right = (300, 200)
|
|
bottom_right = (x_offset + minute + duration, 260)
|
|
draw_rounded_rectangle(room_image, top_left, bottom_right, radius, color)
|
|
return room_image
|
|
|
|
def AddFooterData(image):
|
|
'12, 370, 736, 1092, 1452'
|
|
step_size = 1440 / 4
|
|
string_width = 60
|
|
offset = 12
|
|
yoffset = 30
|
|
step = 0
|
|
font_size = 40
|
|
image = AddText(image, offset+step * step_size + step_size/2 - string_width/2, yoffset, "3 AM", font_size)
|
|
step = 1
|
|
image = AddText(image, offset+step * step_size + step_size/2 - string_width/2, yoffset, "9 AM", font_size)
|
|
step = 2
|
|
image = AddText(image, offset+step * step_size + step_size/2 - string_width/2, yoffset, "3 PM", font_size)
|
|
step = 3
|
|
image = AddText(image, offset+step * step_size + step_size/2 - string_width/2, yoffset, "9 PM", font_size)
|
|
return image
|
|
|
|
def draw_rounded_rectangle(image, top_left, bottom_right, radius, color):
|
|
"""
|
|
Draw a filled rectangle with rounded corners, using simple rectangle for small dimensions
|
|
:param image: Image to draw on
|
|
:param top_left: Top-left corner coordinates (x, y)
|
|
:param bottom_right: Bottom-right corner coordinates (x, y)
|
|
:param radius: Desired corner radius (will be adjusted if needed)
|
|
:param color: Rectangle color in BGR format
|
|
"""
|
|
x1, y1 = top_left
|
|
x2, y2 = bottom_right
|
|
|
|
# Calculate width
|
|
width = x2 - x1
|
|
|
|
|
|
# Adjust radius if width or height is too small
|
|
# Maximum radius should be half of the smaller dimension
|
|
max_radius = abs(width) // 2
|
|
radius = min(radius, max_radius)
|
|
|
|
# If width is too small, fallback to regular rectangle
|
|
if width <= 4 or radius <= 1:
|
|
cv2.rectangle(image, top_left, bottom_right, color, -1)
|
|
return
|
|
|
|
# Adjust radius if needed
|
|
radius = min(radius, width // 2)
|
|
|
|
# Create points for the main rectangle
|
|
pts = np.array([
|
|
[x1 + radius, y1],
|
|
[x2 - radius, y1],
|
|
[x2, y1 + radius],
|
|
[x2, y2 - radius],
|
|
[x2 - radius, y2],
|
|
[x1 + radius, y2],
|
|
[x1, y2 - radius],
|
|
[x1, y1 + radius]
|
|
], np.int32)
|
|
|
|
# Fill the main shape
|
|
cv2.fillPoly(image, [pts], color)
|
|
|
|
# Fill the corners
|
|
cv2.ellipse(image, (x1 + radius, y1 + radius), (radius, radius), 180, 0, 90, color, -1)
|
|
cv2.ellipse(image, (x2 - radius, y1 + radius), (radius, radius), 270, 0, 90, color, -1)
|
|
cv2.ellipse(image, (x1 + radius, y2 - radius), (radius, radius), 90, 0, 90, color, -1)
|
|
cv2.ellipse(image, (x2 - radius, y2 - radius), (radius, radius), 0, 0, 90, color, -1)
|
|
|
|
def filter_device(locations_list, device_id):
|
|
result = []
|
|
for entry in locations_list:
|
|
if entry[0] == device_id:
|
|
result.append(entry)
|
|
|
|
return result
|
|
|
|
def GenerateLocationsMap(date_st, devices_list, devices_map, locations_list, time_zone_s):
|
|
|
|
devices_list_t = [("date",date_st)]
|
|
|
|
|
|
for mac in devices_list:
|
|
well_id, device_id, room = devices_map[mac]
|
|
#room = devices[well_id][0]
|
|
if room in Loc2Color:
|
|
color = Loc2Color[room][0]
|
|
else:
|
|
color = Loc2Color[room.split()[0]][0]
|
|
presence_data = filter_device(locations_list, device_id)
|
|
room_details = (room, {"color": color, "presence": presence_data})
|
|
devices_list_t.append(room_details)
|
|
|
|
well_id = 0
|
|
device_id = 0
|
|
room = "Outside/?"
|
|
color = (0, 0, 0)
|
|
|
|
#lets's not draw future unknown!
|
|
presence_data = filter_device(locations_list, device_id)
|
|
current_utc = datetime.datetime.now(pytz.UTC)
|
|
current_date_local = current_utc.astimezone(pytz.timezone(time_zone_s))
|
|
current_minute_of_day = current_date_local.hour * 60 + current_date_local.minute
|
|
|
|
if date_st == current_date_local.strftime('%Y-%m-%d'):
|
|
filtered_presence_data = []
|
|
for entry in presence_data:
|
|
if entry[1] < current_minute_of_day :
|
|
if entry[1] + entry[2] < current_minute_of_day:
|
|
filtered_presence_data.append(entry)
|
|
else:
|
|
entry[2] = (current_minute_of_day - entry[2])
|
|
if entry[2] > 0:
|
|
filtered_presence_data.append(entry)
|
|
#print(presence_data)
|
|
else:
|
|
filtered_presence_data = presence_data
|
|
|
|
room_details = (room, {"color": color, "presence": filtered_presence_data})
|
|
devices_list_t.append(room_details)
|
|
|
|
return devices_list_t
|
|
|
|
def CreateDailyLocationChart(filename_chart_image_day, locations):
|
|
result = False
|
|
header_image_file = "header.png"
|
|
room_image_file = "room.png"
|
|
footer_image_file = "footer.png"
|
|
|
|
#ToDo: change it so it reads files from MinIo
|
|
header_image_file = os.path.join(filesDir, header_image_file)
|
|
header_image_file = header_image_file.replace("\\","/")
|
|
header_image = cv2.imread(header_image_file)
|
|
#header_height, header_width = header_image.shape[:2]
|
|
|
|
room_image_file = os.path.join(filesDir, room_image_file)
|
|
room_image_file = room_image_file.replace("\\","/")
|
|
room_image = cv2.imread(room_image_file)
|
|
#room_height, room_width = room_image.shape[:2]
|
|
|
|
footer_image_file = os.path.join(filesDir, footer_image_file)
|
|
footer_image_file = footer_image_file.replace("\\","/")
|
|
footer_image = cv2.imread(footer_image_file)
|
|
|
|
all_images = [header_image]
|
|
for item_c in locations:
|
|
item = item_c[0]
|
|
if item == "date":
|
|
date = item_c[1]
|
|
else:
|
|
room_image = cv2.imread(room_image_file)
|
|
data = item_c[1]
|
|
room_image = AddRoomData(room_image, item, data)
|
|
all_images.append(room_image)
|
|
|
|
footer_image = AddFooterData(footer_image)
|
|
all_images.append(footer_image)
|
|
final_image = np.vstack(all_images)
|
|
#this needs to write straight to MinIo !
|
|
SaveImageInBlob(filename_chart_image_day, final_image)
|
|
result = True
|
|
#cv2.imwrite(filename_chart_image_day, final_image)
|
|
#print(rooms_count)
|
|
return result
|
|
|
|
|
|
def GetOptimumFontSize(target_width, text="00", min_size=1, max_size=100, tolerance=1):
|
|
"""
|
|
Find optimal font size to fit text within target width using binary search.
|
|
|
|
Args:
|
|
target_width (int): Desired width in pixels
|
|
text (str): Text to measure (default "00")
|
|
min_size (int): Minimum font size to try
|
|
max_size (int): Maximum font size to try
|
|
tolerance (int): Acceptable difference from target width
|
|
|
|
Returns:
|
|
int: Optimal font size
|
|
"""
|
|
while min_size <= max_size:
|
|
current_size = (min_size + max_size) // 2
|
|
width, _ = GetStringSize(text, current_size)
|
|
|
|
if abs(width - target_width) <= tolerance:
|
|
return current_size
|
|
elif width > target_width:
|
|
max_size = current_size - 1
|
|
else:
|
|
min_size = current_size + 1
|
|
|
|
# Return the largest size that fits within target width
|
|
width, _ = GetStringSize(text, min_size)
|
|
return min_size if width <= target_width else min_size - 1
|
|
|
|
def GetStringSize(some_string, font_size):
|
|
font_path = os.path.join(os.path.dirname(__file__), "fonts", "Poppins-Regular.ttf")
|
|
try:
|
|
font = ImageFont.truetype(font_path, font_size) # 12px size
|
|
except:
|
|
logger.error("Poppins font not found. Please ensure the font file is in your working directory")
|
|
# Fallback to default font if Poppins is not available
|
|
font = ImageFont.load_default()
|
|
|
|
bbox = font.getbbox(some_string)
|
|
return bbox[2] - bbox[0], bbox[3] - bbox[1]
|
|
|
|
def GeneratePresenceHistoryChart(filename, recreate_in, deployment_id, filter_minutes, ddate, to_date, now_date, time_zone_s):
|
|
|
|
#maps_dates, proximity = GetDeploymentDatesBoth(deployment_id)
|
|
minutes = 1440
|
|
stripes_files = []
|
|
|
|
date1_obj = datetime.datetime.strptime(ddate, '%Y-%m-%d')
|
|
date2_obj = datetime.datetime.strptime(to_date, '%Y-%m-%d')
|
|
|
|
start_date = min(date1_obj, date2_obj)
|
|
end_date = max(date1_obj, date2_obj)
|
|
|
|
# Generate list of all dates
|
|
maps_dates = [
|
|
(start_date + timedelta(days=x)).strftime('%Y-%m-%d')
|
|
for x in range((end_date - start_date).days + 1)
|
|
]
|
|
#maps_dates.reverse()
|
|
days = len(maps_dates)
|
|
#stretch_by = int(1000 / days)
|
|
#if stretch_by > 50:
|
|
#stretch_by = 50
|
|
stretch_by = 30
|
|
|
|
#background_image_file = os.path.join(filesDir, "multi_day_template.png")
|
|
background_image_file = os.path.join(filesDir, "multi_day_template2.png")
|
|
background_image_file = background_image_file.replace("\\","/")
|
|
background_image = cv2.imread(background_image_file)
|
|
|
|
rgb_image = background_image #cv2.cvtColor(background_image, cv2.COLOR_BGR2RGB)
|
|
result_image = Image.fromarray(rgb_image) # Convert to PIL Image
|
|
#result_image = Image.new('RGB', (minutes, int(days*stretch_by)))
|
|
|
|
# Paste each image onto the result image vertically
|
|
y_offset = 0
|
|
locations_list = []
|
|
font_size = 50
|
|
string_width, string_height = GetStringSize("00", font_size)
|
|
|
|
|
|
success = False
|
|
if len(maps_dates) == 1:
|
|
|
|
filename_chart_image_day = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_{stretch_by}_daily_locations_chart.png"
|
|
force_recreate = recreate_in
|
|
#it is faster to resize existing daily location chart (length is always 1440), than having to re-create it each time...
|
|
filename_day = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_daily_locations.png"
|
|
filename_chart_data_day = filename_day+".bin"
|
|
if not force_recreate:
|
|
file_exists, time_modified_utc = check_file_exists(filename_chart_image_day)
|
|
if file_exists:
|
|
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
|
|
time_modified_date = time_modified_local.date()
|
|
file_date = ddate
|
|
if time_modified_date <= file_date:
|
|
force_recreate = True
|
|
else:
|
|
force_recreate = True
|
|
|
|
if not force_recreate:
|
|
file_exists1, time_modified_utc1 = check_file_exists(filename_chart_data_day)
|
|
if file_exists1:
|
|
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
|
|
time_modified_date = time_modified_local.date()
|
|
file_date = ddate
|
|
if time_modified_date <= file_date:
|
|
force_recreate = True
|
|
else:
|
|
force_recreate = True
|
|
|
|
|
|
|
|
if force_recreate:
|
|
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
devices_list_a, device_ids = GetProximityList(deployment_id, timee)
|
|
CreateDailyLocationMap(filename_day, devices_list_a, ddate, filter_minutes, time_zone_s, stretch_by)
|
|
locations_list_s = ReadObjectMinIO("daily-maps", filename_chart_data_day)
|
|
locations_list = json.loads(locations_list_s)
|
|
|
|
devices_map = {}
|
|
devices_list = []
|
|
for device_entry in devices_list_a:
|
|
#if T:
|
|
if device_entry[3] == None or device_entry[3].strip() == "":
|
|
devices_map[device_entry[4]] = [device_entry[0], device_entry[1], device_entry[2]]
|
|
else:
|
|
devices_map[device_entry[4]] = [device_entry[0], device_entry[1], device_entry[2] + " " + device_entry[3]]
|
|
devices_list.append(device_entry[4])
|
|
|
|
locations = GenerateLocationsMap(ddate, devices_list, devices_map, locations_list, time_zone_s)
|
|
success = CreateDailyLocationChart(filename, locations)
|
|
else:
|
|
|
|
day_counter = 0
|
|
day_step_width = int(1780 / days)
|
|
x_offset = 563
|
|
y_offset = 1615
|
|
h_labels_bottom = 1720
|
|
day_width = int(0.9 * day_step_width)
|
|
day_height = 1440
|
|
font_size = GetOptimumFontSize(day_width, "00", 10, 50, 0)
|
|
string_width, string_height = GetStringSize("00", font_size)
|
|
#logger.debug(f"font_size={font_size} string_width={string_width}")
|
|
|
|
y_offset = y_offset - day_height
|
|
filename_chart_image_day = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_{stretch_by}_daily_locations_chart.png"
|
|
|
|
for ddate in maps_dates:
|
|
|
|
force_recreate = recreate_in
|
|
|
|
filename_day = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_{stretch_by}_daily_locations.png"
|
|
|
|
if not force_recreate:
|
|
file_exists, time_modified_utc = check_file_exists(filename_day)
|
|
file_existsS, time_modifiedS_utc = check_file_exists(filename_day[:-4]+"S.png")
|
|
if file_exists and file_existsS:
|
|
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
|
|
time_modified_date = time_modified_local.date()
|
|
file_date = MapFileToDate(filename_day)
|
|
if time_modified_date <= file_date:
|
|
force_recreate = True
|
|
else:
|
|
force_recreate = True
|
|
|
|
|
|
if force_recreate:
|
|
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
|
|
|
CreateDailyLocationMap(filename_day, devices_list, ddate, filter_minutes, time_zone_s, stretch_by)
|
|
|
|
#here we need to rotate and resize to:
|
|
|
|
image_bytes, content_type = GetBlob(filename_day)
|
|
image_bytes_s, content_type_s = GetBlob(filename_day[:-4]+"S.png")
|
|
if image_bytes != None:
|
|
image_stream = io.BytesIO(image_bytes)
|
|
image = Image.open(image_stream)
|
|
numpy_image = np.array(image)
|
|
rotated_image = cv2.rotate(numpy_image, cv2.ROTATE_90_COUNTERCLOCKWISE)
|
|
scaled_image = cv2.resize(rotated_image, (day_width, day_height), interpolation=cv2.INTER_AREA)
|
|
|
|
# Convert from BGR to RGB
|
|
rgb_image = cv2.cvtColor(scaled_image, cv2.COLOR_BGR2RGB)
|
|
# Convert to PIL Image
|
|
pil_image = Image.fromarray(rgb_image)
|
|
|
|
#image = Image.open(file_name)
|
|
x_origin = x_offset + day_step_width * day_counter + int(0.05 * day_step_width)
|
|
result_image.paste(pil_image, (x_origin, y_offset))
|
|
|
|
image_stream = io.BytesIO(image_bytes_s)
|
|
image = Image.open(image_stream)
|
|
numpy_image = np.array(image)
|
|
rotated_image = cv2.rotate(numpy_image, cv2.ROTATE_90_COUNTERCLOCKWISE)
|
|
scaled_image = cv2.resize(rotated_image, (day_width, day_height), interpolation=cv2.INTER_AREA)
|
|
|
|
# Convert from BGR to RGB
|
|
rgb_image = cv2.cvtColor(scaled_image, cv2.COLOR_BGR2RGB)
|
|
# Convert to PIL Image
|
|
pil_image = Image.fromarray(rgb_image)
|
|
|
|
#image = Image.open(file_name)
|
|
x_origin = x_offset + day_step_width * day_counter + int(0.05 * day_step_width)
|
|
result_image.paste(pil_image, (x_origin, 1807+y_offset))
|
|
|
|
|
|
image.close()
|
|
image_stream.close()
|
|
|
|
day_counter += 1
|
|
|
|
pil_im = result_image
|
|
#result_image_cv2 = cv2.cvtColor(np.array(pil_im), cv2.COLOR_RGB2BGR)
|
|
result_image_cv2 = np.array(pil_im)#cv2.cvtColor(np.array(pil_im), cv2.COLOR_RGB2BGR)
|
|
strings_list = []
|
|
day_counter = 0
|
|
for ddate in maps_dates:
|
|
if string_width <= day_width:
|
|
date_str = ddate[8:10]
|
|
x_origin = x_offset + int(day_step_width * (day_counter + 0.5)) - int(string_width / 2)
|
|
strings_list.append((x_origin, h_labels_bottom, date_str))
|
|
day_counter += 1
|
|
result_image_cv2 = AddTextList(result_image_cv2, strings_list, font_size)
|
|
|
|
|
|
#Y 124 to 1636
|
|
labels_bottom = 1636 - 1.5 * string_height
|
|
x_offset = 340
|
|
step = -4 * 60 #4 hours
|
|
font_size = 50
|
|
strings_list = []
|
|
count = 0
|
|
y_offset = labels_bottom + count * step
|
|
strings_list.append((x_offset, y_offset, "12 AM"))
|
|
count = 1
|
|
y_offset = labels_bottom + count * step
|
|
strings_list.append((x_offset, y_offset, "4 AM"))
|
|
count = 2
|
|
y_offset = labels_bottom + count * step
|
|
strings_list.append((x_offset, y_offset, "8 AM"))
|
|
count = 3
|
|
y_offset = labels_bottom + count * step
|
|
strings_list.append((x_offset, y_offset, "12 PM"))
|
|
count = 4
|
|
y_offset = labels_bottom + count * step
|
|
strings_list.append((x_offset, y_offset, "4 PM"))
|
|
count = 5
|
|
y_offset = labels_bottom + count * step
|
|
strings_list.append((x_offset, y_offset, "8 PM"))
|
|
count = 6
|
|
y_offset = labels_bottom + count * step
|
|
strings_list.append((x_offset, y_offset, "12 AM"))
|
|
|
|
result_image_cv2 = AddTextList(result_image_cv2, strings_list, font_size)
|
|
|
|
numpy_image = np.array(result_image_cv2)
|
|
|
|
success = SaveImageInBlob(filename, numpy_image)
|
|
#SaveImageInBlob(filename, result_image)
|
|
# Save directly to MinIO instead of local file
|
|
#if success:
|
|
# success = save_to_minio(result_image, filename, DAILY_MAPS_BUCKET_NAME)
|
|
# Clean up
|
|
if success:
|
|
return filename
|
|
else:
|
|
return ""
|
|
|
|
def GeneratePresenceHistoryFiles(filename, recreate_in, deployment_id, filter_minutes, ddate, to_date, now_date, time_zone_s):
|
|
|
|
date1_obj = datetime.datetime.strptime(ddate, '%Y-%m-%d')
|
|
date2_obj = datetime.datetime.strptime(to_date, '%Y-%m-%d')
|
|
|
|
start_date = min(date1_obj, date2_obj)
|
|
end_date = max(date1_obj, date2_obj)
|
|
stretch_by = 30
|
|
|
|
# Generate list of all dates
|
|
maps_dates = [
|
|
(start_date + timedelta(days=x)).strftime('%Y-%m-%d')
|
|
for x in range((end_date - start_date).days + 1)
|
|
]
|
|
|
|
day_counter = 0
|
|
|
|
for ddate in maps_dates:
|
|
|
|
force_recreate = recreate_in
|
|
|
|
filename_day = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_{stretch_by}_daily_locations.png"
|
|
|
|
if not force_recreate:
|
|
file_exists, time_modified_utc = check_file_exists(filename_day+".bin")
|
|
if file_exists:
|
|
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
|
|
time_modified_date = time_modified_local.date()
|
|
file_date = MapFileToDate(filename_day)
|
|
if time_modified_date <= file_date:
|
|
force_recreate = True
|
|
else:
|
|
force_recreate = True
|
|
|
|
|
|
if force_recreate:
|
|
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
|
|
|
CreateDailyLocationMap(filename_day, devices_list, ddate, filter_minutes, time_zone_s, 10)
|
|
|
|
day_counter += 1
|
|
|
|
return filename
|
|
|
|
def CalcStdevs(row, stdev_range, stdevs):
|
|
half_range = stdev_range // 2
|
|
data_len = len(row)
|
|
|
|
# Calculate standard deviations with proper window alignment
|
|
for i in range(data_len):
|
|
# Calculate window boundaries
|
|
start = max(0, i - half_range)
|
|
end = min(data_len, i + half_range + 1)
|
|
|
|
# Get data within window
|
|
window_data = row[start:end]
|
|
|
|
# Calculate standard deviation if we have data
|
|
if len(window_data) > 0:
|
|
stdevs[i] = np.std(window_data)
|
|
|
|
# Find amplitude (max - min of standard deviations)
|
|
amplitude = np.max(stdevs) - np.min(stdevs)
|
|
|
|
# Scale to range 0-1279
|
|
if amplitude > 0: # Avoid division by zero
|
|
stdevs = ((stdevs - np.min(stdevs)) / amplitude * 1279).astype(np.float32)
|
|
|
|
return stdevs, amplitude
|
|
|
|
def CalcLife(row, stdev_range, stdevs):
|
|
half_range = stdev_range // 2
|
|
data_len = len(row)
|
|
|
|
# Calculate standard deviations with proper window alignment
|
|
for i in range(data_len):
|
|
# Calculate window boundaries
|
|
start = max(0, i - half_range)
|
|
end = min(data_len, i + half_range + 1)
|
|
|
|
# Get data within window
|
|
window_data = row[start:end]
|
|
|
|
# Calculate standard deviation if we have data
|
|
if len(window_data) > 0:
|
|
stdevs[i] = np.std(window_data)
|
|
|
|
# Find amplitude (max - min of standard deviations)
|
|
amplitude = np.max(stdevs) - np.min(stdevs)
|
|
|
|
# Scale to range 0-1279
|
|
if amplitude > 0: # Avoid division by zero
|
|
stdevs = ((stdevs - np.min(stdevs)) / amplitude * 1279).astype(np.float32)
|
|
|
|
return stdevs, amplitude
|
|
|
|
def FindCalibrationDate(device_ids, ddate):
|
|
PCD = 50 #% (Peak Contained Data %)
|
|
PHB = 50 #% (Peak Height from Base %)
|
|
MPW = 10 #? (Max Peak Width)
|
|
MPSD =10 #? (Minimum Presence signal Standard Deviation)
|
|
#Find first day with, for all devices:
|
|
#- enough radar data points collected
|
|
#-Single histogram peak containing more than PCD% of data and peak width (at PHB% height) is < MPW
|
|
#Stdev of Data larger > MPSD
|
|
return ddate
|
|
|
|
def FindThreshold(data, percent_list):
|
|
"""
|
|
Find the threshold value above which lies the specified percentage of points.
|
|
|
|
Args:
|
|
data: numpy array of values
|
|
percent: percentage of points that should be above the threshold (0-100)
|
|
|
|
Returns:
|
|
threshold value
|
|
"""
|
|
percent_from, percent_to = percent_list
|
|
# Sort data in descending order
|
|
sorted_data = np.sort(data)[::-1]
|
|
|
|
# Calculate the index corresponding to the desired percentage
|
|
index_from = int((percent_from / 100) * len(data))
|
|
index_to = int((percent_to / 100) * len(data))
|
|
|
|
# Return the threshold value
|
|
return sorted_data[index_from], sorted_data[index_to]
|
|
|
|
def ShowThresholdGraph(data, filename, threshold_low, threshold_high, title, AveragePercentSpendsThere, location):
|
|
"""
|
|
Create and save a threshold analysis graph with maximum curvature point.
|
|
"""
|
|
dpi=600
|
|
# Get min and max values
|
|
min_val = np.min(data)
|
|
max_val = np.max(data)
|
|
|
|
# Create 1000 threshold levels from max to min
|
|
thresholds = np.linspace(max_val, min_val, 1000)
|
|
threshold_percentages = np.linspace(0, 100, 1000)
|
|
|
|
# Calculate percentage of points above each threshold
|
|
points_above = []
|
|
total_points = len(data)
|
|
|
|
for thresh in thresholds:
|
|
above_count = np.sum(data > thresh)
|
|
percentage = (above_count / total_points) * 100
|
|
points_above.append(percentage)
|
|
|
|
points_above = np.array(points_above)
|
|
|
|
# Calculate derivatives and smooth them
|
|
first_derivative = np.gradient(points_above)
|
|
second_derivative = np.gradient(first_derivative)
|
|
|
|
#first_derivative = savgol_filter(np.gradient(points_above), window_length=51, polyorder=3)
|
|
#second_derivative = savgol_filter(np.gradient(first_derivative), window_length=51, polyorder=3)
|
|
|
|
|
|
# Find the point of maximum absolute second derivative
|
|
# Exclude edges (first and last 5% of points) to avoid edge effects
|
|
edge_margin = len(second_derivative) // 20 # 5% of points
|
|
valid_range = slice(edge_margin, -edge_margin)
|
|
max_curve_idx = edge_margin + np.argmax(np.abs(second_derivative[valid_range]))
|
|
|
|
max_curve_x = threshold_percentages[max_curve_idx]
|
|
max_curve_y = points_above[max_curve_idx]
|
|
max_curve_second_deriv = second_derivative[max_curve_idx]
|
|
|
|
# Calculate the actual threshold value for this point
|
|
threshold2 = max_val - (max_curve_x/100) * (max_val - min_val)
|
|
|
|
# Create subplot figure
|
|
fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(12, 10), height_ratios=[3, 2])
|
|
|
|
# Plot 1: Original curve with thresholds
|
|
ax1.plot(threshold_percentages, points_above, 'b-', linewidth=2, label='Distribution')
|
|
ax1.grid(True, linestyle='--', alpha=0.7)
|
|
|
|
# Add original threshold line if provided
|
|
if threshold_low is not None:
|
|
threshold_percent = ((max_val - threshold_low) / (max_val - min_val)) * 100
|
|
percent_above = (np.sum(data > threshold_low) / total_points) * 100
|
|
|
|
ax1.axvline(x=threshold_percent, color='r', linestyle='--', label=f'Minimum % spent in {location}: {AveragePercentSpendsThere[0]:.3f}')
|
|
ax1.axhline(y=percent_above, color='r', linestyle='--')
|
|
|
|
#ax1.annotate(f'Threshold 1: {threshold_low:.3f}\nPoints above: {percent_above:.1f}%',
|
|
#xy=(threshold_percent, percent_above),
|
|
#xytext=(10, 10), textcoords='offset points',
|
|
#bbox=dict(boxstyle='round,pad=0.5', fc='yellow', alpha=0.5),
|
|
#arrowprops=dict(arrowstyle='->'))
|
|
|
|
if threshold_high is not None:
|
|
threshold_percent = ((max_val - threshold_high) / (max_val - min_val)) * 100
|
|
percent_above = (np.sum(data > threshold_high) / total_points) * 100
|
|
|
|
ax1.axvline(x=threshold_percent, color='b', linestyle='--', label=f'Maximum % spent in {location}: {AveragePercentSpendsThere[1]:.3f}')
|
|
ax1.axhline(y=percent_above, color='b', linestyle='--')
|
|
|
|
#ax1.annotate(f'Threshold 1: {threshold_high:.3f}\nPoints above: {percent_above:.1f}%',
|
|
#xy=(threshold_percent, percent_above),
|
|
#xytext=(10, 10), textcoords='offset points',
|
|
#bbox=dict(boxstyle='round,pad=0.5', fc='yellow', alpha=0.5),
|
|
#arrowprops=dict(arrowstyle='->'))
|
|
|
|
# Add maximum curvature point threshold
|
|
ax1.axvline(x=max_curve_x, color='g', linestyle='--', label=f'Threshold 2: {threshold2:.3f}')
|
|
ax1.axhline(y=max_curve_y, color='g', linestyle='--')
|
|
ax1.plot(max_curve_x, max_curve_y, 'go', markersize=10)
|
|
|
|
ax1.annotate(f'Threshold 2: {threshold2:.3f}\nPoints above: {max_curve_y:.1f}%',
|
|
xy=(max_curve_x, max_curve_y),
|
|
xytext=(10, -20), textcoords='offset points',
|
|
bbox=dict(boxstyle='round,pad=0.5', fc='lightgreen', alpha=0.5),
|
|
arrowprops=dict(arrowstyle='->'))
|
|
|
|
ax1.set_xlabel('Threshold Level (%)\n0% = Maximum, 100% = Minimum')
|
|
ax1.set_ylabel('Points Above Threshold (%)')
|
|
ax1.set_title(title)
|
|
ax1.set_xlim(0, 100)
|
|
ax1.set_ylim(0, 100)
|
|
ax1.legend()
|
|
|
|
# Plot 2: Rate of change
|
|
ax2.plot(threshold_percentages, first_derivative, 'g-', label='First derivative', alpha=0.7)
|
|
ax2.plot(threshold_percentages, second_derivative, 'r-', label='Second derivative', alpha=0.7)
|
|
ax2.grid(True, linestyle='--', alpha=0.7)
|
|
|
|
# Mark maximum curvature point on derivative plot
|
|
ax2.axvline(x=max_curve_x, color='g', linestyle='--')
|
|
# Plot point exactly on the second derivative curve
|
|
ax2.plot(max_curve_x, max_curve_second_deriv, 'go', markersize=10,
|
|
label=f'Max curvature at {max_curve_x:.1f}%')
|
|
|
|
ax2.set_xlabel('Threshold Level (%)')
|
|
ax2.set_ylabel('Rate of Change')
|
|
ax2.set_title('Rate of Change Analysis')
|
|
ax2.legend()
|
|
|
|
plt.tight_layout()
|
|
plt.savefig(filename, dpi=dpi, bbox_inches='tight')
|
|
plt.close()
|
|
|
|
return threshold2, max_curve_x, max_curve_y
|
|
|
|
def add_boundary_points(line_part_t, time_zone):
|
|
"""
|
|
Add boundary points (00:00:00 and 23:59:59) to a time series list.
|
|
|
|
Args:
|
|
line_part_t: List of tuples (timestamp, value)
|
|
time_zone: String representing the timezone (e.g., "America/Los_Angeles")
|
|
|
|
Returns:
|
|
List of tuples with added boundary points
|
|
"""
|
|
if not line_part_t:
|
|
return line_part_t
|
|
|
|
tz = pytz.timezone(time_zone)
|
|
|
|
# Get the date from the first point
|
|
first_dt = datetime.datetime.fromtimestamp(line_part_t[0][0], tz)
|
|
date = first_dt.date()
|
|
|
|
last_dt = datetime.datetime.fromtimestamp(line_part_t[-1][0], tz)
|
|
last_date = last_dt.date()
|
|
|
|
|
|
# Create datetime objects for start and end of the day
|
|
start_dt = tz.localize(datetime.datetime.combine(date, datetime.datetime.min.time()))
|
|
end_dt = tz.localize(datetime.datetime.combine(last_date, datetime.datetime.max.time()))
|
|
|
|
# Convert to timestamps
|
|
start_ts = start_dt.timestamp()
|
|
end_ts = end_dt.timestamp()
|
|
|
|
result = list(line_part_t)
|
|
|
|
# Handle start point (00:00:00)
|
|
first_point_dt = datetime.datetime.fromtimestamp(line_part_t[0][0], tz)
|
|
time_diff = first_point_dt - start_dt
|
|
|
|
start_value = line_part_t[0][1]
|
|
|
|
# Add start point at the beginning
|
|
#result.insert(0, (start_ts, start_value))
|
|
|
|
# Handle end point (23:59:59)
|
|
last_point_dt = datetime.datetime.fromtimestamp(line_part_t[-1][0], tz)
|
|
end_value = line_part_t[-1][1]
|
|
# Add end point
|
|
result.append((end_ts, end_value))
|
|
|
|
return result
|
|
|
|
def calculate_life_and_average(my_data1, stdev_range=5):
|
|
# Convert data to numpy array for faster operations
|
|
data_array = np.array(my_data1)
|
|
|
|
# Calculate half range
|
|
stdev_range_h = stdev_range // 2
|
|
|
|
# Pre-calculate indices for the sliding window
|
|
indices = np.arange(len(data_array) - 2 * stdev_range_h)[:, None] + np.arange(2 * stdev_range_h + 1)
|
|
|
|
# Get sliding windows of data
|
|
windows = data_array[indices]
|
|
|
|
# Calculate average (using column 3)
|
|
average = np.mean(windows[:, :, 3], axis=1)
|
|
|
|
# Calculate life (using columns 2, 3, and 4)
|
|
deltas = windows[:, :, 3] - windows[:, :, 2] + windows[:, :, 4]
|
|
life = np.mean(deltas, axis=1)
|
|
|
|
return life.tolist(), average.tolist()
|
|
|
|
def TryJulia(prompt):
|
|
|
|
if len(prompt) > 0:
|
|
|
|
if prompt[0] == "#":
|
|
return prompt.upper()
|
|
|
|
if prompt not in utterances:
|
|
return ""
|
|
else:
|
|
intent = utterances[prompt]
|
|
action = intents[intent]
|
|
|
|
return action[0]
|
|
else:
|
|
return ""
|
|
|
|
def AskGPT(in_prompt, language_from, language_to):
|
|
|
|
if len(in_prompt) > 4:
|
|
|
|
prompt = in_prompt.lower()
|
|
|
|
if language_to.lower() not in language_from.lower():
|
|
prompt = in_prompt + " Answer in " + language_to
|
|
|
|
|
|
print(prompt)
|
|
|
|
#lets see if question is looking for OSM query
|
|
pattern = "what is only the node line for query for * on openstreetmap api? do not answer with url to nominatim, but with query!"
|
|
|
|
|
|
if match_with_wildcard(prompt, pattern):
|
|
differing_part = extract_differing_part(prompt, pattern)
|
|
|
|
if differing_part != "":
|
|
|
|
print(differing_part)
|
|
|
|
if differing_part in searches_dict:
|
|
response = searches_dict[differing_part]
|
|
print(response)
|
|
return response, language_to
|
|
else:
|
|
#check if one of synonims:
|
|
if differing_part in searches_dict["synonims"]:
|
|
differing_part = searches_dict["synonims"][differing_part]
|
|
if differing_part != "":
|
|
if differing_part in searches_dict[differing_part]:
|
|
response = searches_dict[differing_part]
|
|
print(response)
|
|
return response, language_to
|
|
|
|
hash_string = hashlib.sha256(str(prompt).encode('utf-8')).hexdigest()
|
|
#filename=os.path.join(cache_path, "chgpt_query_" + hash_string+".pkl")
|
|
|
|
julia_present = False
|
|
if prompt.startswith("julia"):
|
|
prompt = prompt[len("julia") + 1:]
|
|
julia_present = True
|
|
|
|
completion = ""
|
|
if julia_present == False:
|
|
completion = TryJulia(prompt)
|
|
#if completion == "":
|
|
# if os.path.exists(filename):
|
|
# #completion = pickle.load(open( filename, "rb" ))
|
|
# completion = (completion.choices[0].message.content.strip(), language_to)[0]
|
|
else:
|
|
completion = TryJulia(prompt)
|
|
|
|
|
|
if completion == "":
|
|
|
|
st = time.time()
|
|
#import wandb
|
|
|
|
#run = wandb.init(project='GPT-4 in Python')
|
|
#prediction_table = wandb.Table(columns=["prompt", "prompt tokens", "completion", "completion tokens", "model", "total tokens"])
|
|
|
|
print(time.time() - st)
|
|
|
|
openai.api_key = OPENAI_API_KEY
|
|
client = OpenAI(
|
|
# This is the default and can be omitted
|
|
api_key = OPENAI_API_KEY
|
|
)
|
|
|
|
completion = client.chat.completions.create(
|
|
messages=[
|
|
{
|
|
"role": "user",
|
|
"content": prompt,
|
|
}
|
|
],
|
|
model="gpt-3.5-turbo",
|
|
)
|
|
|
|
|
|
#with open(filename, 'wb') as handle:
|
|
#pickle.dump(completion, handle, protocol=pickle.HIGHEST_PROTOCOL)
|
|
|
|
response = (completion.choices[0].message.content.strip(), language_to)
|
|
else:
|
|
response = (completion, language_to)
|
|
|
|
else:
|
|
response = ("question is too short", language_to)
|
|
|
|
|
|
print(response)
|
|
return response
|
|
|
|
def AskGPTPure(in_prompt):
|
|
|
|
if len(in_prompt) > 4:
|
|
|
|
prompt = in_prompt.lower()
|
|
|
|
print(prompt)
|
|
|
|
st = time.time()
|
|
|
|
print(time.time() - st)
|
|
|
|
openai.api_key = OPENAI_API_KEY
|
|
client = OpenAI(
|
|
# This is the default and can be omitted
|
|
api_key = OPENAI_API_KEY
|
|
)
|
|
|
|
completion = client.chat.completions.create(
|
|
messages=[
|
|
{
|
|
"role": "user",
|
|
"content": prompt,
|
|
}
|
|
],
|
|
model="gpt-3.5-turbo",
|
|
)
|
|
|
|
response = completion.choices[0].message.content.strip()
|
|
|
|
|
|
else:
|
|
response = "question is too short"
|
|
|
|
|
|
print(response)
|
|
return response
|
|
|
|
def get_last_n_days(n=14, timezone_str='America/Los_Angeles'):
|
|
# Get current UTC time
|
|
utc_now = datetime.datetime.now(pytz.UTC)
|
|
|
|
# Convert to the specified timezone
|
|
local_now = utc_now.astimezone(pytz.timezone(timezone_str))
|
|
|
|
# Get the current date in the specified timezone
|
|
current_date = local_now.date()
|
|
|
|
# Determine the last whole day
|
|
if local_now.hour > 0 or local_now.minute > 0 or local_now.second > 0:
|
|
# Yesterday in the specified timezone
|
|
last_whole_day = current_date - timedelta(days=1)
|
|
else:
|
|
# If it's exactly midnight, the last whole day is two days ago
|
|
last_whole_day = current_date - timedelta(days=2)
|
|
|
|
# Generate list of n days, ending with the last whole day
|
|
date_list = []
|
|
for i in range(n-1, -1, -1):
|
|
day = last_whole_day - timedelta(days=i)
|
|
date_list.append(day.strftime('%Y-%m-%d'))
|
|
|
|
return date_list
|
|
|
|
|
|
def numpy_to_json(arr, devices_list):
|
|
"""
|
|
Convert numpy array to JSON-serializable format
|
|
|
|
Args:
|
|
arr (numpy.ndarray): 2D numpy array to serialize
|
|
|
|
Returns:
|
|
str: JSON string containing array data and metadata
|
|
"""
|
|
if not isinstance(arr, np.ndarray):
|
|
raise TypeError("Input must be a numpy array")
|
|
|
|
array_dict = {
|
|
'dtype': str(arr.dtype),
|
|
'shape': arr.shape,
|
|
'devices_list': devices_list,
|
|
'data': arr.tolist() # Convert to nested Python lists
|
|
}
|
|
|
|
return json.dumps(array_dict)
|
|
|
|
def format_time_difference(minutes):
|
|
# Calculate days, hours, minutes
|
|
days = int(minutes // (24 * 60))
|
|
remaining_minutes = minutes % (24 * 60)
|
|
hours = int(remaining_minutes // 60)
|
|
mins = int(remaining_minutes % 60)
|
|
|
|
parts = []
|
|
|
|
# Add days if any
|
|
if days > 0:
|
|
parts.append(f"{days} day{'s' if days != 1 else ''}")
|
|
|
|
# Add hours if any
|
|
if hours > 0:
|
|
parts.append(f"{hours} hour{'s' if hours != 1 else ''}")
|
|
|
|
# Add minutes if any
|
|
if mins > 0 or (days == 0 and hours == 0):
|
|
parts.append(f"{mins} minute{'s' if mins != 1 else ''}")
|
|
|
|
# Combine the parts into a sentence
|
|
if len(parts) == 1:
|
|
return parts[0]
|
|
elif len(parts) == 2:
|
|
return f"{parts[0]} and {parts[1]}"
|
|
else:
|
|
return f"{parts[0]}, {parts[1]}, and {parts[2]}"
|
|
|
|
def RunCommand(commmand, args_dictionary, deployment_id):
|
|
|
|
to_return = ""
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
local_tz = pytz.timezone(time_zone_s)
|
|
|
|
filter_minutes = 5
|
|
dates = get_last_n_days(28, time_zone_s)
|
|
ddate = dates[0] #2025-02-02 req.params.get("date")
|
|
to_date = dates[-1]
|
|
date_s = datetime.datetime.now(pytz.UTC).astimezone(local_tz).date().strftime("%Y-%m-%d")
|
|
|
|
if commmand == "#STATUS#":
|
|
force_recreate_orig = False #True
|
|
|
|
filename = f"/{deployment_id}/{deployment_id}_{ddate}_{to_date}_{filter_minutes}_history_image.png"
|
|
filename = GeneratePresenceHistoryFiles(filename, force_recreate_orig, deployment_id, filter_minutes, ddate, to_date, ddate, time_zone_s)
|
|
|
|
date1_obj = datetime.datetime.strptime(ddate, '%Y-%m-%d')
|
|
date2_obj = datetime.datetime.strptime(to_date, '%Y-%m-%d')
|
|
|
|
start_date = min(date1_obj, date2_obj)
|
|
end_date = max(date1_obj, date2_obj)
|
|
stretch_by = 30
|
|
|
|
# Generate list of all dates
|
|
maps_dates = [
|
|
(start_date + timedelta(days=x)).strftime('%Y-%m-%d')
|
|
for x in range((end_date - start_date).days + 1)
|
|
]
|
|
|
|
day_counter = 0
|
|
minutes_spent_there_list = []
|
|
minutes_locations_list = []
|
|
filename_4w = f"/{deployment_id}/{deployment_id}_{maps_dates[0]}_{maps_dates[-1]}_{filter_minutes}_{stretch_by}_4w_locations.png.bin"
|
|
for ddate in maps_dates:
|
|
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
|
|
|
Id2Location = {}
|
|
for device in devices_list:
|
|
Id2Location[device[1]] = device[2]
|
|
Id2Location[0] = "Outside/?"
|
|
|
|
filename_day = f"/{deployment_id}/{deployment_id}_{ddate}_{filter_minutes}_{stretch_by}_daily_locations.png.bin"
|
|
locations_list_s = ReadObjectMinIO("daily-maps", filename_day)
|
|
locations_list = ast.literal_eval(locations_list_s)
|
|
minutes_locations_list.append((ddate, locations_list))
|
|
#print(locations_list_s)
|
|
minutes_spent_there = {}
|
|
|
|
|
|
for loc in Id2Location:
|
|
minutes_spent_there[Id2Location[loc]] = 0
|
|
minutes_spent_there[Id2Location[0]] = 0
|
|
|
|
for loc in locations_list:
|
|
minutes_spent_there[Id2Location[loc[0]]] += loc[2]
|
|
|
|
for loc in minutes_spent_there:
|
|
minutes_spent_there[loc] = int(1000 * minutes_spent_there[loc] / 1440) / 10
|
|
minutes_spent_there_list.append((ddate, minutes_spent_there))
|
|
data_part = str(minutes_spent_there_list)
|
|
minutes_locations_list_str = str(minutes_locations_list)
|
|
obj_to_save = {"Location_indexes": str(Id2Location), "Locations": minutes_locations_list_str}
|
|
print(obj_to_save)
|
|
SaveObjectInBlob(filename_4w, obj_to_save)
|
|
print(data_part)
|
|
|
|
prompt = "Attached is 4 weeks of data representing % of time where person living alone is spending each day"
|
|
prompt = prompt + " Assess his last week compared to previous 3 weeks. Comment only on significant changes."
|
|
prompt = prompt + " Ignore days where data is all (or mostly) 0!"
|
|
prompt = prompt + " Consider that office and living room are equivalent for this individual. Entertainment is consumed on computer (office) and in living room TV."
|
|
prompt = prompt + " But he is also napping in living room. Comment on his sleeping pattern as well"
|
|
prompt = prompt + " Can you summarize all in 1 sentence?"
|
|
prompt = prompt + " " + data_part
|
|
result = AskGPTPure(prompt)
|
|
|
|
to_return = result
|
|
#to_return = "Your father appears to be fine. He was walking around the house 10 minutes ago and is currently in the living room. And I can smell coffee"
|
|
|
|
elif commmand == "#STATUS_F#":
|
|
to_return = "Your mother is doing well. She slept 8hr and 23min last night. She used the restroom twice last night. She is now in the kitchen. I can smell coffee."
|
|
elif commmand == "#HELP#":
|
|
to_return = "There is number of things you can ask me about. For example: 'how is my dad doing?' Or 'How is his environment' or any other question you like"
|
|
elif commmand == "#SLEEP#":
|
|
to_return = "Your dad slept approximately 8 hours last night, took a shower before bed, and got up 4 times during the night."
|
|
elif commmand == "#SLEEP_F#":
|
|
to_return = "Your mom slept approximately 8 hours last night, took a shower before bed, and got up 4 times during the night."
|
|
elif commmand == "#ENVIRONMENT#":
|
|
to_return = "The temperature in the house is 23 degrees Celsius, CO2 level is 662 ppm, and I can smell coffee brewing. Your dad slept approximately 8 hours last night, took a shower before bed, and got up 4 times during the night."
|
|
elif commmand == "#WEEK#":
|
|
to_return = "Showing his weekly activity"
|
|
elif commmand == "#WEEK_F#":
|
|
to_return = "Showing her weekly activity"
|
|
elif commmand == "#ACTIVITY#":
|
|
to_return = "Your dad has been less active this week than usual. He spent more time sitting in the living room and he got up later than usual by 38min. He also did not go outside as frequently and had less visitors. He only showered once this week."
|
|
elif commmand == "#ACTIVITY_F#":
|
|
to_return = "Your mom has been less active this week than usual. She spent more time sitting in the living room and she got up later than usual by 38min. She also did not go outside as frequently and had less visitors. She only showered once this week."
|
|
elif commmand == "#ACTIVITY_COMPARE#":
|
|
to_return = "Overall your dad is less active this year compared to last year. He slept longer in the mornings and had less visitors. Also his shower activity is reduced from typically 2 times a week to once a week."
|
|
elif commmand == "#ACTIVITY_COMPARE_F#":
|
|
to_return = "Overall your mom is less active this year compared to last year. She slept longer in the mornings and had less visitors. Also her shower activity is reduced from typically 2 times a week to once a week."
|
|
elif commmand == "#LOCATION#":
|
|
filterr = 5
|
|
details = GetSensorsDetailsFromDeployment(deployment_id, date_s, filterr)
|
|
#current_time = datetime.datetime.now()
|
|
current_time = datetime.datetime.now(datetime.timezone.utc)
|
|
last_location = details["last_location"]
|
|
detected_time = datetime.datetime.fromisoformat(details["last_detected_time"])
|
|
local_time = local_tz.localize(detected_time)
|
|
# Convert to UTC
|
|
detected_utc_time = local_time.astimezone(pytz.UTC)
|
|
|
|
time_diff = current_time - detected_utc_time
|
|
minutes = time_diff.total_seconds() / 60
|
|
time_sentence = format_time_difference(minutes)
|
|
if minutes == 0:
|
|
to_return = f"He is now in the {last_location}."
|
|
else:
|
|
to_return = f"He was last detected in the {last_location} {time_sentence} ago"
|
|
elif commmand == "#SHOWER#":
|
|
to_return = "In the last 7 days, your Dad took a shower on Friday, Sunday and Tuesday"
|
|
elif commmand == "#SHOWER_F#":
|
|
to_return = "The last time your mom took a shower was Yesterda at 9:33AM"
|
|
elif commmand == "#BATHROOM#":
|
|
to_return = "Last night your Dad used the restroom only once at 6.10am"
|
|
elif commmand == "#KITCHEN#":
|
|
to_return = "Your Dad only cooked Dinner on Wednesday and he turned off the stove afterwards"
|
|
elif commmand == "#MOLD#":
|
|
to_return = "I cannot smell any mold. Also, the humidity is very low. In any of the rooms never exceeded 27% RH in the last 7 days."
|
|
elif commmand == "#VISITORS#":
|
|
to_return = "Yes, on Tuesday, I could detect motion in both office and kitchen at the same time and CO2 levels in the living room exceeded 900ppm."
|
|
elif commmand == "#TEMPERATURE#":
|
|
filterr = 5
|
|
details = GetSensorsDetailsFromDeployment(deployment_id, date_s, filterr)
|
|
current_time = datetime.datetime.now(datetime.timezone.utc)
|
|
last_location = details["last_location"]
|
|
temperature = int(details["temperature"])
|
|
if "America" in time_zone_s:
|
|
temperature_sentence = f"{int(CelsiusToFahrenheit(temperature))} degrees Farenhight"
|
|
else:
|
|
temperature_sentence = f"{temperature} degrees Celsius."
|
|
to_return = f"The temperature in the {last_location} is {temperature_sentence}."
|
|
elif commmand == "#TEMPERATURE_B#":
|
|
to_return = "The temperature in the main bathroom is 80 degrees Farenhight."
|
|
elif commmand == "#OXYGEN#":
|
|
to_return = "His last oxygen level was at 95%."
|
|
elif commmand == "#OXYGEN_F#":
|
|
to_return = "Her last oxygen level was at 95%."
|
|
elif commmand == "#HEART_RATE#":
|
|
to_return = "His last heart rate was 74 bpm."
|
|
elif commmand == "#BLOOD_PRESSURE#":
|
|
to_return = "His latest blood pressure was measured 5 hours ago and it was 137 over 83."
|
|
elif commmand == "#BLOOD_PRESSURE_F#":
|
|
to_return = "Her latest blood pressure was measured 5 hours ago and it was 137 over 83."
|
|
elif commmand == "#EKG#":
|
|
to_return = "His latest HeartBeam EKG was done on Monday and it was within his baseline!"
|
|
elif commmand == "#EKG_F#":
|
|
to_return = "Her latest HeartBeam EKG was done on Monday and it was within her baseline!"
|
|
return to_return
|
|
|
|
def ScaleToCommon(data, sensor):
|
|
|
|
if sensor == "temperature":
|
|
new_min = 0
|
|
new_max = 100
|
|
elif sensor == "humidity":
|
|
new_min = 100
|
|
new_max = 200
|
|
elif sensor == "light":
|
|
new_min = 200
|
|
new_max = 300
|
|
elif sensor == "radar":
|
|
new_min = 300
|
|
new_max = 400
|
|
elif sensor == "s0":
|
|
new_min = 400
|
|
new_max = 500
|
|
elif sensor == "s1":
|
|
new_min = 500
|
|
new_max = 600
|
|
elif sensor == "s2":
|
|
new_min = 600
|
|
new_max = 700
|
|
elif sensor == "s3":
|
|
new_min = 700
|
|
new_max = 800
|
|
elif sensor == "s4":
|
|
new_min = 800
|
|
new_max = 900
|
|
elif sensor == "s5":
|
|
new_min = 900
|
|
new_max = 1000
|
|
elif sensor == "s6":
|
|
new_min = 1000
|
|
new_max = 1100
|
|
elif sensor == "s7":
|
|
new_min = 1100
|
|
new_max = 1200
|
|
elif sensor == "s8":
|
|
new_min = 1200
|
|
new_max = 1300
|
|
else: #s9
|
|
new_min = 1300
|
|
new_max = 1400
|
|
|
|
# Split timestamps and values into separate arrays
|
|
timestamps = np.array([x[0] for x in data])
|
|
values = np.array([x[1] for x in data])
|
|
|
|
# Get current min and max
|
|
if len(values) > 0:
|
|
current_min = np.min(values)
|
|
current_max = np.max(values)
|
|
else:
|
|
current_min = 0;
|
|
current_max = 0;
|
|
|
|
# Scale the values using the min-max formula
|
|
if current_max - current_min > 0:
|
|
scaled_values = (values - current_min) * (new_max - new_min) / (current_max - current_min) + new_min
|
|
else:
|
|
mid_val = (new_max + new_min) / 2
|
|
scaled_values = np.full_like(values, mid_val)
|
|
|
|
# Zip back together with original timestamps
|
|
return list(zip(timestamps, scaled_values))
|
|
|
|
def CreateLocationsStripe(locations_file, time_zone_s):
|
|
|
|
parts = locations_file.split("/")
|
|
parts1 = parts[2].split("_")
|
|
ddate = parts1[1]
|
|
deployment_id = parts1[0]
|
|
filter_minutes = parts1[2]
|
|
bw = False
|
|
chart_type = 4
|
|
force_recreate = True
|
|
motion = False
|
|
scale_global = False
|
|
fast = True
|
|
GenerateFullLocationMap(locations_file, deployment_id, ddate, force_recreate, chart_type, bw, motion, scale_global, fast, time_zone_s, filter_minutes)
|
|
|
|
|
|
def CelsiusToFahrenheit(C):
|
|
F = (C * 9/5) + 32
|
|
return F
|
|
|
|
def CelsiusToFahrenheitList(compressed_readings: List[Tuple[datetime.datetime, np.float64]]) -> List[Tuple[datetime.datetime, np.float64]]:
|
|
|
|
# Create a new list with converted temperatures
|
|
converted_readings = [
|
|
[reading[0], CelsiusToFahrenheit(reading[1])]
|
|
for reading in compressed_readings
|
|
]
|
|
|
|
return converted_readings
|
|
|
|
def GetPriviledgesOnly(user_name):
|
|
with get_db_connection() as conn:
|
|
if isinstance(user_name, (int)) or user_name.isdigit():
|
|
sql = "SELECT access_to_deployments FROM public.person_details WHERE user_id = " + user_name
|
|
else:
|
|
sql = "SELECT access_to_deployments FROM public.person_details WHERE user_name = '" + user_name + "'"
|
|
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
result = cur.fetchall()#cur.fetchone()
|
|
if result != None:
|
|
return result[0][0]
|
|
else:
|
|
return "0"
|
|
|
|
def GetPriviledgesAndUserId(user_name):
|
|
with get_db_connection() as conn:
|
|
sql = "SELECT access_to_deployments, user_id FROM public.person_details WHERE user_name = '" + user_name + "'"
|
|
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
result = cur.fetchall()#cur.fetchone()
|
|
if result != None:
|
|
return result[0]
|
|
else:
|
|
return "[0,0]"
|
|
|
|
def AddToLog(message):
|
|
"""Add message to log"""
|
|
logger.info(message)
|
|
|
|
def FillFields(blob_data, record, form_type):
|
|
"""
|
|
Fill in the input fields in the HTML blob_data with values from the caretaker dictionary.
|
|
|
|
:param blob_data: str - The initial HTML string containing empty or placeholder input fields.
|
|
:param caretaker: dict - The dictionary containing values to populate the fields.
|
|
:return: str - The HTML string with the input fields filled with the appropriate values.
|
|
"""
|
|
# Ensure blob_data is a string
|
|
#blob_data = str(blob_data)
|
|
|
|
# Populate the fields
|
|
for field in record:
|
|
logger.debug(f"field= {field}")
|
|
if field == "user_id":
|
|
if record[field] is not None:
|
|
escaped_string = html.escape(str(record[field]))
|
|
# Create a regex pattern to match the span with specific id
|
|
pattern = rf'(<span[^>]+id="editing_user_id"[^>]*>)([^<]*)(</span>)'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(3)}', blob_data)
|
|
elif field == "deployment_id":
|
|
if record[field] is not None:
|
|
escaped_string = html.escape(str(record[field]))
|
|
# Create a regex pattern to match the span with specific id
|
|
pattern = rf'(<span[^>]+id="editing_deployment_id"[^>]*>)([^<]*)(</span>)'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(3)}', blob_data)
|
|
elif field == "device_id":
|
|
if record[field] is not None:
|
|
escaped_string = html.escape(str(record[field]))
|
|
# Create a regex pattern to match the span with specific id
|
|
pattern = rf'(<span[^>]+id="editing_device_id"[^>]*>)([^<]*)(</span>)'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(3)}', blob_data)
|
|
elif field == "user_name":
|
|
if record[field] != None:
|
|
escaped_string = html.escape(record[field])
|
|
pattern = rf'(<input[^>]+id="new_user_name"[^>]+value=")[^"]*(")'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(2)}', blob_data)
|
|
|
|
# Add value attribute if it does not exist
|
|
pattern = rf'(<input[^>]+id="new_user_name"[^>]*)(>)'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)} value="{escaped_string}"{m.group(2)}', blob_data)
|
|
|
|
elif field == "location":
|
|
if record[field] != None:
|
|
blob_data = SelectOption(blob_data, 'location', record[field])
|
|
|
|
elif field == "gender":
|
|
if record[field] != None:
|
|
blob_data = SelectOption(blob_data, 'gender', record[field])
|
|
|
|
elif field == "race":
|
|
if record[field] != None:
|
|
blob_data = SelectOption(blob_data, 'race', record[field])
|
|
|
|
elif field == "time_zone_s":
|
|
if record[field] != None:
|
|
blob_data = SelectOption(blob_data, 'time_zone_s', record[field])
|
|
|
|
elif field == "time_edit" or field == "user_edit":
|
|
pass
|
|
else:
|
|
if record[field] != None:
|
|
escaped_string = html.escape(str(record[field]))
|
|
pattern = rf'(<input[^>]+id="{field}"[^>]+value=")[^"]*(")'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)}{escaped_string}{m.group(2)}', blob_data)
|
|
|
|
# Add value attribute if it does not exist
|
|
pattern = rf'(<input[^>]+id="{field}"[^>]*)(>)'
|
|
blob_data = re.sub(pattern, lambda m: f'{m.group(1)} value="{escaped_string}"{m.group(2)}', blob_data)
|
|
|
|
return blob_data
|
|
|
|
def convert_timestamps_lc(data, time_zone_s):
|
|
target_tz = pytz.timezone(time_zone_s)
|
|
return [[datetime.datetime.fromtimestamp(epoch, pytz.UTC).astimezone(target_tz), value]
|
|
for epoch, value in data]
|
|
|
|
|
|
subbedToL = [("/wellget",1),("/wellget_cmp",1),("/well_hub",1)]
|
|
def on_connectL(client_, userdata, flags, rc):
|
|
print(MQTTSERVERL + " L. Connected with result code "+str(rc))
|
|
|
|
# Subscribing in on_connect() means that if we lose the connection and
|
|
# reconnect then subscriptions will be renewed.
|
|
client_.subscribe(subbedToL)
|
|
print("SubscribedL to: "+str(subbedToL))
|
|
|
|
def on_messageL(client_, userdata, msg): #message from GUI
|
|
print(msg.topic+" "+str(msg.payload))
|
|
#msga = msg.payload.decode("ascii")
|
|
#print(msg.timestamp)
|
|
#in_queue.append((str(time.time()), msg.topic, msg.payload))
|
|
|
|
def MQSendL(topic, content, qos=1):
|
|
print(topic, content[0:100])
|
|
#return MQSend(topic, content)
|
|
#currentTime = int(time.time())
|
|
try:
|
|
if "_cmp" in topic:
|
|
enc_msg = zlib.compress(content.encode('utf-8'))
|
|
else:
|
|
enc_msg = content
|
|
clientL.publish(topic, enc_msg, qos=qos, retain=False)
|
|
except Exception as err:
|
|
print ("Err2B:", err)
|
|
try:
|
|
clientL.disconnect()
|
|
#client.username_pw_set('telegraf', 'well18')
|
|
clientL.connect(MQTTSERVERL, MQTT_PortL, 60)
|
|
except Exception as e:
|
|
print ("Err3b:", e)
|
|
|
|
def StoreFloorPlan(deployment_id, layout):
|
|
|
|
conn = get_db_connection()
|
|
cur = conn.cursor()
|
|
print(layout)
|
|
data = json.loads(layout)
|
|
|
|
# Extract the overlapping list
|
|
overlapping_list = str(data["overlapping"])
|
|
|
|
try:
|
|
sql = f"""
|
|
UPDATE public.deployment_details SET floor_plan = '{CleanObject(layout)}' WHERE deployment_id = {deployment_id};
|
|
"""
|
|
|
|
logger.debug(f"sql= {sql}")
|
|
cur.execute(sql)
|
|
conn.commit()
|
|
sql1 = f"""
|
|
INSERT INTO public.deployment_details (deployment_id, "overlapps")
|
|
VALUES ({deployment_id}, '{CleanObject(overlapping_list)}')
|
|
ON CONFLICT (deployment_id)
|
|
DO UPDATE SET "overlapps" = '{CleanObject(overlapping_list)}';
|
|
"""
|
|
logger.debug(f"sql= {sql1}")
|
|
cur.execute(sql1)
|
|
conn.commit()
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
AddToLog("Written/updated!")
|
|
return 1
|
|
except Exception as err:
|
|
return 0
|
|
|
|
def GetFloorPlan(deployment_id):
|
|
|
|
conn = get_db_connection()
|
|
|
|
try:
|
|
sql = f"""
|
|
SELECT floor_plan FROM public.deployment_details WHERE deployment_id = {deployment_id};
|
|
"""
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
result = cur.fetchall()#cur.fetchone()
|
|
if result != None:
|
|
return result[0][0]
|
|
else:
|
|
return ""
|
|
|
|
logger.debug(f"sql= {sql}")
|
|
conn.close()
|
|
return 1
|
|
except Exception as err:
|
|
return 0
|
|
|
|
# CORS Middleware
|
|
class CORSMiddleware:
|
|
def process_request(self, req, resp):
|
|
resp.set_header('Access-Control-Allow-Origin', '*')
|
|
resp.set_header('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE, OPTIONS')
|
|
resp.set_header('Access-Control-Allow-Headers', '*')
|
|
|
|
def process_response(self, req, resp, resource, req_succeeded):
|
|
if req.method == 'OPTIONS': # Handle preflight requests
|
|
resp.status = falcon.HTTP_200
|
|
|
|
# Add this class to your code
|
|
class RequestParser:
|
|
def __init__(self):
|
|
# Detect if we're running in debug/development mode
|
|
self.debug_mode = __name__ == "__main__" or os.environ.get('DEBUG', 'false').lower() in ('true', '1', 'yes')
|
|
logger.debug(f"RequestParser initialized in {'DEBUG' if self.debug_mode else 'PRODUCTION'} mode")
|
|
|
|
def process_request(self, req, resp):
|
|
"""Pre-process the request to ensure media is parsed early"""
|
|
logger.debug(f"RequestParser processing: {req.method} {req.path}")
|
|
|
|
# Initialize an empty form_data dict
|
|
req.context.form_data = {}
|
|
|
|
# Only process POST requests with the right content type
|
|
if req.method != 'POST' or not req.content_type or 'form-urlencoded' not in req.content_type:
|
|
logger.debug("RequestParser: Skipping (not a form POST)")
|
|
return
|
|
|
|
try:
|
|
# Different handling based on environment
|
|
if self.debug_mode:
|
|
self._process_debug(req)
|
|
else:
|
|
self._process_production(req)
|
|
|
|
except Exception as e:
|
|
logger.error(f"RequestParser error: {str(e)}")
|
|
logger.error(traceback.format_exc())
|
|
|
|
def _process_debug(self, req):
|
|
"""Process request in debug mode - optimized for local development"""
|
|
logger.debug("RequestParser: Using DEBUG mode processing")
|
|
|
|
# In debug mode, we can use Content-Length and know it's reliable
|
|
content_length = req.get_header('content-length')
|
|
|
|
if content_length:
|
|
# Content-Length is present
|
|
content_length = int(content_length)
|
|
logger.debug(f"RequestParser: Reading {content_length} bytes using Content-Length")
|
|
|
|
raw_body = req.stream.read(content_length)
|
|
if raw_body:
|
|
body_text = raw_body.decode('utf-8')
|
|
logger.debug(f"RequestParser: Successfully read {len(body_text)} chars")
|
|
|
|
# Parse the form data
|
|
import urllib.parse
|
|
form_data = dict(urllib.parse.parse_qsl(body_text))
|
|
|
|
# Store in context
|
|
req.context.form_data = form_data
|
|
logger.debug(f"RequestParser: Parsed form data: {form_data}")
|
|
|
|
# Reset the stream with the original content
|
|
import io
|
|
req.stream = io.BytesIO(raw_body)
|
|
else:
|
|
logger.debug("RequestParser: No body data read")
|
|
else:
|
|
logger.debug("RequestParser (debug): No Content-Length header")
|
|
|
|
def _process_production(self, req):
|
|
"""Process request in production mode - optimized for OpenFaaS/faasd deployment"""
|
|
logger.debug("RequestParser: Using PRODUCTION mode processing")
|
|
|
|
# Simple direct read approach for production (OpenFaaS/faasd)
|
|
# We'll limit the read to 1MB for safety
|
|
MAX_SIZE = 1024 * 1024 # 1MB
|
|
|
|
# Just read directly from the stream without checking
|
|
raw_body = req.stream.read(MAX_SIZE)
|
|
if raw_body:
|
|
body_text = raw_body.decode('utf-8')
|
|
logger.debug(f"RequestParser: Successfully read {len(body_text)} chars")
|
|
|
|
# Parse the form data
|
|
import urllib.parse
|
|
form_data = dict(urllib.parse.parse_qsl(body_text))
|
|
|
|
# Store in context
|
|
req.context.form_data = form_data
|
|
logger.debug(f"RequestParser: Parsed form data: {form_data}")
|
|
|
|
# Reset the stream with the original content
|
|
import io
|
|
req.stream = io.BytesIO(raw_body)
|
|
else:
|
|
logger.debug("RequestParser: No body data read")
|
|
|
|
|
|
def FindDeviceByRole(deployment_id, location_list):
|
|
|
|
#For purposes of activity report, Bedroom and Bathroom are determined in order of priority:
|
|
#Bedroom: "Bedroom Master", "Bedroom", "Bedroom Guest" (106, 56, 107)
|
|
#Bathroom: ""Bathroom Main","Bathroom","Bathroom Guest" (104, 103, 105)
|
|
|
|
#location_names_inverted = {"All":-1 ,"?": 0,"Office": 5,"Hallway": 6,"Garage": 7,"Outside": 8,"Conference Room": 9,"Room": 10,"Kitchen": 34,
|
|
# "Bedroom": 56,"Living Room": 78,"Bathroom": 102,"Dining Room": 103,"Bathroom Main": ,104,"Bathroom Guest": 105,
|
|
# "Bedroom Master": 106, "Bedroom Guest": 107, "Conference Room": 108, "Basement": 109, "Attic": 110, "Other": 200}
|
|
|
|
|
|
ttime = datetime.datetime.utcnow().timestamp()
|
|
|
|
devices_list, device_ids = GetProximityList(deployment_id, ttime)
|
|
|
|
if location_list != []:
|
|
for location in location_list:
|
|
for device in devices_list:
|
|
well_id = device[0]
|
|
device_id = device[1]
|
|
location_t = device[2]
|
|
if location_t == location:
|
|
return (device_id, location, well_id)
|
|
|
|
else:
|
|
conn = get_db_connection()
|
|
with conn.cursor() as cur:
|
|
|
|
#we need to find beneficiaries from list of deployments
|
|
#sql = f'SELECT device_id FROM public.devices where device_id in {device_ids} and other="other"'
|
|
sql = "SELECT device_id, location, well_id FROM public.devices WHERE device_id = ANY(%s) AND other = %s"
|
|
#print(sql)
|
|
cur.execute(sql, (device_ids, "other"))
|
|
result = cur.fetchall()#cur.fetchone()
|
|
if len(result) > 0:
|
|
return result[0]
|
|
else:
|
|
|
|
devices_list, device_ids = GetProximityList(deployment_id, ttime)
|
|
for device in devices_list:
|
|
well_id = device[0]
|
|
device_id = device[1]
|
|
location_t = device[2]
|
|
if "Bathroom" in location_t or "Bedroom" in location_t or "Kitchen" in location_t:
|
|
pass
|
|
else:
|
|
return (device_id, location_t, well_id)
|
|
|
|
return (0, 0, 0)
|
|
|
|
|
|
def ensure_date_order(from_date, to_date):
|
|
"""
|
|
Ensures that from_date is earlier than to_date.
|
|
If not, swaps the dates.
|
|
|
|
Args:
|
|
from_date: Date string in format 'YYYY-MM-DD'
|
|
to_date: Date string in format 'YYYY-MM-DD'
|
|
|
|
Returns:
|
|
Tuple of (from_date, to_date) in correct order
|
|
"""
|
|
# Compare the date strings
|
|
# This works because the 'YYYY-MM-DD' format allows for string comparison
|
|
if from_date > to_date:
|
|
# Swap the dates
|
|
return to_date, from_date
|
|
else:
|
|
# Dates are already in correct order
|
|
return from_date, to_date
|
|
|
|
def signum(x):
|
|
return (x > 0) - (x < 0)
|
|
|
|
|
|
def get_week_days_and_dates(days_back, timezone_str="America/Los_Angeles"):
|
|
"""
|
|
Generate weekdays and dates from 7 days ago until today for a given timezone.
|
|
|
|
Args:
|
|
timezone_str (str): Timezone string like "America/Los_Angeles"
|
|
|
|
Returns:
|
|
list: List of tuples containing (weekday_name, date_string)
|
|
"""
|
|
# Get the timezone object
|
|
tz = pytz.timezone(timezone_str)
|
|
|
|
# Get current date in the specified timezone
|
|
today = datetime.datetime.now(tz).date()
|
|
|
|
# Generate dates from days_back days ago to today
|
|
result = []
|
|
for i in range(days_back-1, -1, -1): # days_back days ago to today (inclusive)
|
|
date = today - timedelta(days=i)
|
|
weekday_name = date.strftime("%A") # Full weekday name
|
|
date_string = date.strftime("%Y-%m-%d") # ISO format date
|
|
day_of_month = date.day
|
|
result.append((date_string, weekday_name, day_of_month))
|
|
|
|
return result
|
|
|
|
|
|
def filter_short_groups_numpy_orig(presence_list, filter_size, device_id, dates_str):
|
|
"""
|
|
Optimized version using NumPy to remove groups of consecutive zeros
|
|
or consecutive non-zeros (based on sign) shorter than filter_size.
|
|
Mimics the iterative, shortest-first logic of filter_short_groupss.
|
|
|
|
Args:
|
|
presence_list: List of numbers (can include floats, ints, 0s).
|
|
filter_size: Minimum size of consecutive groups (by sign) to keep.
|
|
|
|
Returns:
|
|
Filtered list with short groups removed. Output contains 0s and 1s.
|
|
(Note: Differs slightly from filter_short_groupss if negative numbers
|
|
were present, as this version converts them to 0, not 2).
|
|
"""
|
|
st = time.time()
|
|
|
|
if not presence_list or filter_size <= 1:
|
|
# print(f"NumPy: Early exit/no processing time: {time.time() - st:.6f}s")
|
|
# Return a copy to avoid modifying the original list
|
|
return presence_list[:] if isinstance(presence_list, list) else list(presence_list)
|
|
|
|
# Work with a NumPy array for efficiency, ensure float type for consistency
|
|
result = np.array(presence_list, dtype=float)
|
|
n = len(result)
|
|
|
|
# Use a set to store hashable representations (tuples) of previous states for cycle detection
|
|
previous_states = set()
|
|
|
|
while True:
|
|
current_state_tuple = tuple(result)
|
|
if current_state_tuple in previous_states:
|
|
# print("NumPy: Cycle detected, breaking.")
|
|
break
|
|
previous_states.add(current_state_tuple)
|
|
|
|
# 1. Calculate the sign of each element (-1, 0, 1)
|
|
signs = np.sign(result)
|
|
|
|
# 2. Find indices where the sign changes
|
|
# np.diff calculates the difference between adjacent elements.
|
|
# A non-zero difference means the sign changed.
|
|
# np.where returns the indices *before* the change. Add 1 to get the start of the new run.
|
|
change_indices = np.where(np.diff(signs) != 0)[0] + 1
|
|
|
|
# 3. Define the boundaries of all consecutive runs (start and end indices)
|
|
# Include the start (0) and end (n) of the array.
|
|
boundaries = np.concatenate(([0], change_indices, [n]))
|
|
|
|
# 4. Identify short runs
|
|
short_runs_to_process = []
|
|
for i in range(len(boundaries) - 1):
|
|
start = boundaries[i]
|
|
end = boundaries[i+1] # Slicing is exclusive of the end index
|
|
length = end - start
|
|
|
|
if length > 0: # Ensure the run is not empty
|
|
# Determine the characteristic sign of the run (use the first element)
|
|
run_sign = signs[start]
|
|
|
|
if length < filter_size:
|
|
# --- Verification Step (Crucial) ---
|
|
# Check if the segment *still* consists of elements with the same sign.
|
|
# This handles cases where a previous modification might have altered
|
|
# part of what *was* a longer run.
|
|
current_segment_signs = np.sign(result[start:end])
|
|
if np.all(current_segment_signs == run_sign):
|
|
# If the run is short and its sign consistency is verified,
|
|
# add it to the list of candidates for modification.
|
|
short_runs_to_process.append({
|
|
'start': start,
|
|
'end': end,
|
|
'sign': run_sign,
|
|
'length': length
|
|
})
|
|
# --- End Verification ---
|
|
|
|
# 5. Check if any short runs were found
|
|
if not short_runs_to_process:
|
|
# No modifiable short runs found in this pass, the list is stable.
|
|
break
|
|
|
|
# 6. Sort the short runs: shortest first, then by start index for determinism
|
|
# This ensures we process the same run as the original iterative function would.
|
|
short_runs_to_process.sort(key=lambda r: (r['length'], r['start']))
|
|
|
|
# 7. Process ONLY the *first* (shortest) identified run in this pass
|
|
run_to_process = short_runs_to_process[0]
|
|
start = run_to_process['start']
|
|
end = run_to_process['end']
|
|
run_sign = run_to_process['sign']
|
|
|
|
# Determine the replacement value based on the sign of the run being removed
|
|
# Short runs of 0 become 1
|
|
# Short runs of non-zero (positive or negative) become 0
|
|
replacement_value = 1.0 if run_sign == 0 else 0.0
|
|
|
|
# 8. Apply the replacement to the segment using NumPy slicing
|
|
result[start:end] = replacement_value
|
|
# Loop continues because a change was made
|
|
|
|
print(f"filter_short_groups_numpy time: {time.time() - st:.6f}s")
|
|
if (time.time() - st) > 40:
|
|
print(presence_list)
|
|
# Convert back to a standard Python list for the return value
|
|
return result.tolist()
|
|
|
|
|
|
|
|
def filter_short_groups_numpy(presence_list, filter_size, device_id, dates_str):
|
|
"""
|
|
Optimized version using NumPy to remove groups of consecutive zeros
|
|
or consecutive non-zeros (based on sign) shorter than filter_size.
|
|
Mimics the iterative, shortest-first logic.
|
|
|
|
Optimization:
|
|
- Vectorized extraction of segment properties.
|
|
- Removed redundant sign verification within the segment analysis loop.
|
|
"""
|
|
# Start timer (optional, for benchmarking)
|
|
st = time.time()
|
|
|
|
if not presence_list or filter_size <= 1:
|
|
# print(f"NumPy Optimized: Early exit/no processing time: {time.time() - st:.6f}s")
|
|
return presence_list[:] if isinstance(presence_list, list) else list(presence_list)
|
|
|
|
result = np.array(presence_list, dtype=float)
|
|
n = len(result)
|
|
|
|
previous_states = set()
|
|
|
|
while True:
|
|
# Cycle detection
|
|
current_state_tuple = tuple(result)
|
|
if current_state_tuple in previous_states:
|
|
# print("NumPy Optimized: Cycle detected, breaking.")
|
|
break
|
|
previous_states.add(current_state_tuple)
|
|
|
|
# 1. Calculate the sign of each element (-1, 0, 1)
|
|
signs = np.sign(result)
|
|
|
|
# 2. Find indices where the sign changes
|
|
change_indices = np.where(np.diff(signs) != 0)[0] + 1
|
|
|
|
# 3. Define the boundaries of all consecutive runs
|
|
boundaries = np.concatenate(([0], change_indices, [n]))
|
|
|
|
# If there's only one segment (e.g., all zeros, all ones, or array is too short to have changes),
|
|
# or if the array was empty (n=0 leading to boundaries=[0,0]), no further processing is needed.
|
|
if len(boundaries) <= 2: # e.g., boundaries is [0, n] or [0,0]
|
|
break
|
|
|
|
# 4. Vectorized extraction of run properties
|
|
run_starts = boundaries[:-1]
|
|
run_ends = boundaries[1:]
|
|
run_lengths = run_ends - run_starts
|
|
# The sign of the first element of a run (from the 'signs' array computed at the
|
|
# start of this 'while' iteration) is representative of the entire run's sign,
|
|
# by definition of how 'boundaries' were created.
|
|
run_signs = signs[run_starts]
|
|
|
|
# 5. Identify short runs and collect their properties
|
|
short_runs_to_process = []
|
|
for i in range(len(run_starts)): # Iterate over all identified runs
|
|
# Ensure run_length is positive (should be, due to boundary logic, but good check)
|
|
if run_lengths[i] > 0 and run_lengths[i] < filter_size:
|
|
short_runs_to_process.append({
|
|
'start': run_starts[i],
|
|
'end': run_ends[i],
|
|
'sign': run_signs[i],
|
|
'length': run_lengths[i]
|
|
})
|
|
|
|
# 6. Check if any modifiable short runs were found
|
|
if not short_runs_to_process:
|
|
# No short runs found in this pass, the list is stable.
|
|
break
|
|
|
|
# 7. Sort the short runs: shortest first, then by start index for determinism
|
|
short_runs_to_process.sort(key=lambda r: (r['length'], r['start']))
|
|
|
|
# 8. Process ONLY the *first* (shortest) identified run in this pass
|
|
run_to_process = short_runs_to_process[0]
|
|
start = run_to_process['start']
|
|
end = run_to_process['end']
|
|
run_sign = run_to_process['sign']
|
|
|
|
# Determine the replacement value
|
|
replacement_value = 1.0 if run_sign == 0 else 0.0
|
|
|
|
# 9. Apply the replacement
|
|
result[start:end] = replacement_value
|
|
# A change was made, so the 'while True' loop continues (unless a cycle is detected next)
|
|
|
|
# End timer and print (optional)
|
|
# Your original print statements for timing:
|
|
print(f"filter_short_groups_numpy time: {time.time() - st:.6f}s")
|
|
# if (time.time() - st) > 40:
|
|
# print(presence_list) # This would print the original input on long runs
|
|
|
|
return result.tolist()
|
|
|
|
def filter_short_groups(presence_list, filter_size):
|
|
"""
|
|
Corrected version to perform the same task as filter_short_groupss,
|
|
including handling of non-zero/non-one values based on signum.
|
|
Iteratively removes the shortest group < filter_size by flipping its
|
|
signum representation (0->1, pos->0, neg->2).
|
|
|
|
Args:
|
|
presence_list: List of numbers (0s, 1s, or any other number).
|
|
filter_size: Minimum size of groups (based on signum) to keep.
|
|
Returns:
|
|
Filtered list with short groups removed, potentially containing 0, 1, 2.
|
|
"""
|
|
st = time.time()
|
|
|
|
if not presence_list or filter_size <= 1:
|
|
# print(f"filter_short_groups: Early exit/no processing time: {time.time() - st:.6f}s")
|
|
return presence_list.copy()
|
|
|
|
result = presence_list.copy()
|
|
n = len(result)
|
|
|
|
# Using a set for faster cycle detection lookups
|
|
previous_states = set()
|
|
|
|
while True:
|
|
current_state_tuple = tuple(result)
|
|
if current_state_tuple in previous_states:
|
|
# print("Cycle detected in filter_short_groups, breaking.")
|
|
break
|
|
previous_states.add(current_state_tuple)
|
|
|
|
# --- Start of logic mimicking filter_short_groupss ---
|
|
changes_made_outer = False
|
|
|
|
# 1. Find all segments based on signum
|
|
segments = []
|
|
i = 0
|
|
while i < n:
|
|
start = i
|
|
# Use signum to define the characteristic value of the run
|
|
current_signum = signum(result[i])
|
|
|
|
# Find the end of the group based on *consistent signum*
|
|
while i < n and signum(result[i]) == current_signum:
|
|
i += 1
|
|
|
|
group_length = i - start
|
|
# Store the signum value associated with the run
|
|
segments.append((start, i - 1, current_signum, group_length))
|
|
|
|
# 2. Sort segments by length (ascending) to process shortest first
|
|
segments.sort(key=lambda x: x[3])
|
|
|
|
# 3. Process the segments (find the first short one to modify)
|
|
for start, end, run_signum, length in segments:
|
|
if length < filter_size:
|
|
# Verify the segment hasn't been fundamentally altered (signum-wise)
|
|
# This check mirrors filter_short_groupss's intent, using signum consistently.
|
|
is_still_original_signum_segment = True
|
|
for k_idx in range(start, end + 1):
|
|
if signum(result[k_idx]) != run_signum:
|
|
is_still_original_signum_segment = False
|
|
break
|
|
|
|
if is_still_original_signum_segment:
|
|
# Calculate replacement value based on signum (0->1, pos->0, neg->2)
|
|
replacement_value = 1 - run_signum
|
|
|
|
# Apply replacement
|
|
segment_modified = False
|
|
for j in range(start, end + 1):
|
|
# Use direct comparison as replacement values are integers (0, 1, 2)
|
|
if result[j] != replacement_value:
|
|
result[j] = replacement_value
|
|
segment_modified = True
|
|
|
|
if segment_modified:
|
|
changes_made_outer = True
|
|
# Break after making *one* change and restart the whole process
|
|
# (finding segments, sorting, finding shortest modifiable)
|
|
break # Break from the 'for segment in segments' loop
|
|
|
|
# --- End of logic mimicking filter_short_groupss ---
|
|
|
|
if not changes_made_outer:
|
|
# If we went through all segments and made no changes, we're done.
|
|
break
|
|
|
|
print(f"filter_short_groups time: {time.time() - st:.6f}s")
|
|
return result
|
|
|
|
|
|
def filter_short_groupss(presence_list, filter_size):
|
|
"""
|
|
Iteratively remove groups of consecutive 0s or 1s that are shorter than filter_size.
|
|
Continues until no more changes are made.
|
|
|
|
Args:
|
|
presence_list: List of 0s and 1s
|
|
filter_size: Minimum size of groups to keep
|
|
|
|
Returns:
|
|
Filtered list with short groups removed
|
|
"""
|
|
st = time.time()
|
|
if not presence_list or filter_size <= 1:
|
|
return presence_list.copy()
|
|
|
|
result = presence_list.copy()
|
|
changes_made = True
|
|
|
|
while changes_made:
|
|
changes_made = False
|
|
|
|
# First identify all segments
|
|
segments = []
|
|
i = 0
|
|
n = len(result)
|
|
|
|
while i < n:
|
|
# Find the start of a group
|
|
start = i
|
|
current_value = signum(result[i])
|
|
|
|
# Find the end of the group
|
|
while i < n and signum(result[i]) == current_value:
|
|
i += 1
|
|
|
|
# Calculate group length
|
|
group_length = i - start
|
|
segments.append((start, i-1, current_value, group_length))
|
|
|
|
# Sort segments by length (ascending) to process shortest first
|
|
segments.sort(key=lambda x: x[3])
|
|
|
|
# Process the segments
|
|
for start, end, value, length in segments:
|
|
# If segment is too short, replace with opposite value
|
|
if length < filter_size:
|
|
# Verify the segment hasn't been modified by previous replacements
|
|
if all(result[j] == value for j in range(start, end+1)):
|
|
replacement = 1 - value # Toggle between 0 and 1
|
|
for j in range(start, end+1):
|
|
result[j] = replacement
|
|
changes_made = True
|
|
#print(start, end)
|
|
break # Break after making a change and restart
|
|
|
|
print("s", time.time()-st)
|
|
return result
|
|
|
|
def filter_short_segments(segments, filter_size):
|
|
"""
|
|
Iteratively remove segments that are shorter than filter_size,
|
|
replacing them with data from the previous segment.
|
|
|
|
Args:
|
|
segments: List of tuples (start_time, end_time, num_persons, duration)
|
|
filter_size: Minimum duration to keep a segment
|
|
|
|
Returns:
|
|
Filtered list of segments covering the entire time range
|
|
"""
|
|
if not segments or filter_size <= 0:
|
|
return segments.copy()
|
|
|
|
result = segments.copy()
|
|
changes_made = True
|
|
|
|
while changes_made:
|
|
changes_made = False
|
|
|
|
i = 1 # Start from the second segment
|
|
while i < len(result):
|
|
_, _, _, duration = result[i]
|
|
|
|
if duration < filter_size:
|
|
# Get the previous segment's person count
|
|
if i > 0:
|
|
_, _, prev_persons, _ = result[i-1]
|
|
start, end, _, dur = result[i]
|
|
|
|
# Replace with previous person count
|
|
result[i] = (start, end, prev_persons, dur)
|
|
changes_made = True
|
|
|
|
# Check if we can merge with previous segment
|
|
if i > 0:
|
|
prev_start, prev_end, prev_persons, prev_dur = result[i-1]
|
|
curr_start, curr_end, curr_persons, curr_dur = result[i]
|
|
|
|
if prev_persons == curr_persons and prev_end + 1 == curr_start:
|
|
# Merge segments
|
|
merged = (prev_start, curr_end, prev_persons, prev_dur + curr_dur)
|
|
result[i-1] = merged
|
|
result.pop(i)
|
|
i -= 1 # Adjust index after removing an element
|
|
changes_made = True
|
|
|
|
i += 1
|
|
|
|
# Sort segments by start time to ensure proper order
|
|
result.sort(key=lambda x: x[0])
|
|
|
|
return result
|
|
|
|
def filter_out_short_high_segments(segments, filter_size):
|
|
"""
|
|
Iteratively remove segments that are shorter than filter_size,
|
|
replacing them with data from the previous segment.
|
|
|
|
Args:
|
|
segments: List of tuples (start_time, end_time, num_persons, duration)
|
|
filter_size: Minimum duration to keep a segment
|
|
|
|
Returns:
|
|
Filtered list of segments covering the entire time range
|
|
"""
|
|
if not segments:
|
|
return segments.copy()
|
|
|
|
result = segments.copy()
|
|
changes_made = True
|
|
|
|
while changes_made:
|
|
changes_made = False
|
|
|
|
i = 1 # Start from the second segment
|
|
while i < len(result):
|
|
_, _, _, duration = result[i]
|
|
|
|
if duration < filter_size:
|
|
# Get the previous segment's person count
|
|
if i > 0:
|
|
_, _, prev_persons, _ = result[i-1]
|
|
start, end, _, dur = result[i]
|
|
|
|
# Replace with previous person count
|
|
result[i] = (start, end, prev_persons, dur)
|
|
changes_made = True
|
|
|
|
# Check if we can merge with previous segment
|
|
if i > 0:
|
|
prev_start, prev_end, prev_persons, prev_dur = result[i-1]
|
|
curr_start, curr_end, curr_persons, curr_dur = result[i]
|
|
|
|
if prev_persons == curr_persons and prev_end + 1 == curr_start:
|
|
# Merge segments
|
|
merged = (prev_start, curr_end, prev_persons, prev_dur + curr_dur)
|
|
result[i-1] = merged
|
|
result.pop(i)
|
|
i -= 1 # Adjust index after removing an element
|
|
changes_made = True
|
|
|
|
i += 1
|
|
|
|
# Sort segments by start time to ensure proper order
|
|
result.sort(key=lambda x: x[0])
|
|
|
|
return result
|
|
|
|
def filter_out_short_same_groups_iterative(presence_list, filter_size):
|
|
"""
|
|
Iteratively remove groups of consecutive sames that are shorter than filter_size.
|
|
Continues until no more changes are made.
|
|
|
|
Args:
|
|
presence_list: List of values
|
|
filter_size: Minimum size of groups to keep
|
|
|
|
Returns:
|
|
Filtered list with short groups removed
|
|
"""
|
|
if not presence_list:
|
|
return presence_list.copy()
|
|
|
|
result = presence_list.copy()
|
|
|
|
|
|
# First identify all segments
|
|
segments = []
|
|
i = 0
|
|
n = len(result)
|
|
|
|
while i < n:
|
|
# Find the start of a group
|
|
start = i
|
|
current_value = result[i]
|
|
|
|
# Find the end of the group
|
|
while i < n and result[i] == current_value:
|
|
i += 1
|
|
|
|
# Calculate group length
|
|
group_length = i - start
|
|
segments.append((start, i-1, current_value, group_length))
|
|
|
|
|
|
result = filter_out_short_high_segments(segments, filter_size)
|
|
|
|
return result
|
|
|
|
def filter_out_short_highs_iterative(presence_list, filter_size):
|
|
"""
|
|
Iteratively remove groups of consecutive sames that are shorter than filter_size.
|
|
Continues until no more changes are made.
|
|
|
|
Args:
|
|
presence_list: List of values
|
|
filter_size: Minimum size of groups to keep
|
|
|
|
Returns:
|
|
Filtered list with short groups removed
|
|
"""
|
|
if not presence_list:
|
|
return presence_list.copy()
|
|
|
|
result = presence_list.copy()
|
|
|
|
|
|
# First identify all segments
|
|
segments = []
|
|
i = 0
|
|
n = len(result)
|
|
|
|
while i < n:
|
|
# Find the start of a group
|
|
start = i
|
|
current_value = result[i]
|
|
|
|
# Find the end of the group
|
|
while i < n and result[i] == current_value:
|
|
i += 1
|
|
|
|
# Calculate group length
|
|
group_length = i - start
|
|
segments.append((start, i-1, current_value, group_length))
|
|
|
|
|
|
result = filter_out_short_high_segments(segments, filter_size)
|
|
|
|
return result
|
|
|
|
def filter_short_groups_iterative_analog(presence_list, filter_size):
|
|
"""
|
|
Iteratively remove groups of consecutive similar values that are shorter than filter_size.
|
|
For non-zero values, replaces with 0. For zero values, needs context to determine replacement.
|
|
"""
|
|
if not presence_list or filter_size <= 1:
|
|
return presence_list.copy()
|
|
|
|
result = presence_list.copy()
|
|
changes_made = True
|
|
|
|
while changes_made:
|
|
changes_made = False
|
|
|
|
# Identify all segments of consecutive similar values
|
|
segments = []
|
|
i = 0
|
|
n = len(result)
|
|
|
|
while i < n:
|
|
start = i
|
|
is_zero = (result[i] == 0)
|
|
|
|
# Find the end of the group with same characteristic (zero or non-zero)
|
|
while i < n and ((result[i] == 0) == is_zero):
|
|
i += 1
|
|
|
|
group_length = i - start
|
|
segments.append((start, i-1, is_zero, group_length))
|
|
|
|
# Process segments from shortest to longest
|
|
segments.sort(key=lambda x: x[3])
|
|
|
|
for start, end, is_zero, length in segments:
|
|
if length < filter_size:
|
|
# For short non-zero groups, replace with zeros
|
|
if not is_zero:
|
|
for j in range(start, end+1):
|
|
result[j] = 0
|
|
changes_made = True
|
|
break
|
|
else:
|
|
# For short zero groups, replace with average of surrounding non-zero values
|
|
# First, find surrounding values
|
|
left_value = 0
|
|
right_value = 0
|
|
|
|
# Look for non-zero value on the left
|
|
for j in range(start-1, -1, -1):
|
|
if result[j] != 0:
|
|
left_value = result[j]
|
|
break
|
|
|
|
# Look for non-zero value on the right
|
|
for j in range(end+1, n):
|
|
if result[j] != 0:
|
|
right_value = result[j]
|
|
break
|
|
|
|
# Calculate replacement value
|
|
if left_value > 0 and right_value > 0:
|
|
replacement = (left_value + right_value) / 2
|
|
elif left_value > 0:
|
|
replacement = left_value
|
|
elif right_value > 0:
|
|
replacement = right_value
|
|
else:
|
|
replacement = 0 # No surrounding non-zero values
|
|
|
|
# Apply replacement
|
|
for j in range(start, end+1):
|
|
result[j] = replacement
|
|
|
|
if replacement != 0: # Only mark as changed if we actually changed something
|
|
changes_made = True
|
|
break
|
|
return result
|
|
|
|
|
|
def filter_short_high_groups_iterative_analog(presence_list, filter_size):
|
|
st = time.time()
|
|
"""
|
|
More efficient implementation that still handles cascading effects.
|
|
"""
|
|
if not presence_list or filter_size <= 1:
|
|
return presence_list.copy()
|
|
|
|
result = presence_list.copy()
|
|
changes_made = True
|
|
|
|
while changes_made:
|
|
changes_made = False
|
|
i = 0
|
|
n = len(result)
|
|
|
|
# Use a single pass to find all non-zero segments
|
|
segments = []
|
|
while i < n:
|
|
# Skip zeros
|
|
if result[i] == 0:
|
|
i += 1
|
|
continue
|
|
|
|
# Found non-zero, find the end of this segment
|
|
start = i
|
|
while i < n and result[i] != 0:
|
|
i += 1
|
|
|
|
# Add segment to our list
|
|
segments.append((start, i))
|
|
|
|
# Process all short segments in one iteration
|
|
for start, end in segments:
|
|
length = end - start
|
|
if length < filter_size:
|
|
# Set all elements in this segment to zero
|
|
for j in range(start, end):
|
|
result[j] = 0
|
|
changes_made = True
|
|
# Don't break - process all short segments in this pass
|
|
|
|
# If we've made changes, we need to check again for newly formed short segments
|
|
print(f"filter_short_high_groups_iterative_analog time: {time.time() - st:.6f}s")
|
|
return result
|
|
|
|
def filter_short_high_groups_iterative_analog_orig(presence_list, filter_size):
|
|
"""
|
|
Iteratively remove groups of consecutive similar values that are shorter than filter_size.
|
|
For non-zero values, replaces with 0. For zero values, needs context to determine replacement.
|
|
"""
|
|
if not presence_list or filter_size <= 1:
|
|
return presence_list.copy()
|
|
st = time.time()
|
|
result = presence_list.copy()
|
|
changes_made = True
|
|
|
|
while changes_made:
|
|
changes_made = False
|
|
|
|
# Identify all segments of consecutive similar values
|
|
segments = []
|
|
i = 0
|
|
n = len(result)
|
|
|
|
while i < n:
|
|
start = i
|
|
is_zero = (result[i] == 0)
|
|
|
|
# Find the end of the group with same characteristic (zero or non-zero)
|
|
while i < n and ((result[i] == 0) == is_zero):
|
|
i += 1
|
|
|
|
group_length = i - start
|
|
segments.append((start, i-1, is_zero, group_length))
|
|
|
|
# Process segments from shortest to longest
|
|
segments.sort(key=lambda x: x[3])
|
|
|
|
for start, end, is_zero, length in segments:
|
|
if length < filter_size:
|
|
# For short non-zero groups, replace with zeros
|
|
if not is_zero:
|
|
for j in range(start, end+1):
|
|
result[j] = 0
|
|
changes_made = True
|
|
break
|
|
|
|
print(f"filter_short_high_groups_iterative_analog time: {time.time() - st:.6f}s")
|
|
#if (time.time() - st) > 40:
|
|
# print(presence_list)
|
|
return result
|
|
|
|
|
|
|
|
def filter_short_groupsWhat(presence_list, filter_size):
|
|
"""
|
|
Remove groups of consecutive 0s or 1s that are shorter than filter_size.
|
|
For short groups of 0s, replace with 1s.
|
|
For short groups of 1s, replace with 0s.
|
|
|
|
Args:
|
|
presence_list: List of 0s and 1s
|
|
filter_size: Minimum size of groups to keep
|
|
|
|
Returns:
|
|
Filtered list with short groups removed
|
|
"""
|
|
if not presence_list or filter_size <= 1:
|
|
return presence_list.copy()
|
|
|
|
result = presence_list.copy()
|
|
n = len(result)
|
|
|
|
# Find groups and process them
|
|
i = 0
|
|
while i < n:
|
|
# Find the start of a group
|
|
start = i
|
|
current_value = result[i]
|
|
|
|
# Find the end of the group
|
|
while i < n and result[i] == current_value:
|
|
i += 1
|
|
|
|
# Calculate group length
|
|
group_length = i - start
|
|
|
|
# If group is too short, replace with opposite value
|
|
if group_length < filter_size:
|
|
replacement = 1 - current_value # Toggle between 0 and 1
|
|
for j in range(start, i):
|
|
result[j] = replacement
|
|
|
|
return result
|
|
|
|
|
|
def GetOverlapps(deployment_id):
|
|
|
|
with get_db_connection() as db_conn:
|
|
with db_conn.cursor() as cur:
|
|
sql = f"SELECT overlapps FROM public.deployment_details WHERE deployment_id = '{deployment_id}'"
|
|
cur.execute(sql)
|
|
result = cur.fetchone() #cur.fetchall()
|
|
if result != None:
|
|
return result[0]
|
|
|
|
|
|
def GetAmpitude(point_val, segment_lenght):
|
|
if point_val == 0:
|
|
return -segment_lenght
|
|
else:
|
|
return segment_lenght
|
|
|
|
def CreateZGraph(well_id, presence_list):
|
|
"""
|
|
return size and position of consecutive groups of 0s and 1s
|
|
|
|
Args:
|
|
presence_list: List of 0s and 1s
|
|
|
|
Returns:
|
|
list of times and lengths
|
|
"""
|
|
if not presence_list:
|
|
return presence_list.copy()
|
|
|
|
#if well_id == 290:
|
|
# print("Stop")
|
|
dekas_in_day = 6 * 1440
|
|
result = []
|
|
print(well_id)
|
|
#result will look like this: [(0,34),(34,-56),(92,6),...] where (A,B)
|
|
#A: is minute of section, B: height of section +=presence -=absence
|
|
#lets find point 0 first moving backward in time
|
|
segment_lenght = 0
|
|
point_zero_val = signum(presence_list[dekas_in_day])
|
|
for i in range(dekas_in_day-1, 0, -1):
|
|
if point_zero_val != signum(presence_list[i]):
|
|
segment_lenght = dekas_in_day -1 - i
|
|
break
|
|
x = 0
|
|
y = GetAmpitude(point_zero_val, segment_lenght)
|
|
result.append((x, y))
|
|
#x = x + segment_lenght
|
|
last_y = y
|
|
last_val = point_zero_val
|
|
last_source_minute = dekas_in_day + 1
|
|
for i in range(last_source_minute, len(presence_list)):
|
|
if last_val != signum(presence_list[i]):
|
|
segment_lenght = i - dekas_in_day - x
|
|
x = x + segment_lenght
|
|
y = last_y + GetAmpitude(last_val, segment_lenght)
|
|
result.append((x, y))
|
|
result.append((x, 0))
|
|
last_y = 0
|
|
last_val = signum(presence_list[i])
|
|
|
|
#last point i is NOT 1 + last above... it is last above so 2879!
|
|
segment_lenght = i - dekas_in_day - x
|
|
x = i - dekas_in_day #last point
|
|
y = GetAmpitude(last_val, segment_lenght)
|
|
result.append((x, y))
|
|
|
|
return result
|
|
|
|
|
|
# Add this function to your code
|
|
def get_form_data(req):
|
|
"""Helper function to get form data from either context or req.media"""
|
|
# First check if we pre-parsed the form data
|
|
if hasattr(req.context, 'form_data') and req.context.form_data:
|
|
logger.debug("Using pre-parsed form data from context")
|
|
return req.context.form_data
|
|
|
|
# Otherwise try to get from req.media (for json)
|
|
try:
|
|
if req.content_type and (
|
|
falcon.MEDIA_JSON in req.content_type or
|
|
falcon.MEDIA_URLENCODED in req.content_type
|
|
):
|
|
logger.debug("Attempting to get form data from req.media")
|
|
return req.media or {}
|
|
except Exception as e:
|
|
logger.error(f"Error getting req.media: {str(e)}")
|
|
|
|
logger.debug("No form data available, returning empty dict")
|
|
return {}
|
|
|
|
def DetectMultiple(temporary_map_day_plus, overlaps_str_lst):
|
|
"""
|
|
Written by Robert Zmrzli
|
|
Detects time intervals of multiple vs single/no presence and outputs
|
|
the signed duration of each interval at its end time.
|
|
|
|
Args:
|
|
temporary_map_day_plus: Map for each device radar reads that were detected to be above threshold
|
|
overlaps_lst: List of pairs of devices that have overlapping area
|
|
Returns:
|
|
A list of tuples representing the multiple presence timeline segments.
|
|
Each segment is represented by two tuples:
|
|
1. (end_minute, signed_duration): signed_duration is the length of the
|
|
interval ending at end_minute (+ multiple, - single/none).
|
|
2. (end_minute, 0): A marker for visualization.
|
|
"""
|
|
for location_id, data_list in temporary_map_day_plus.items():
|
|
minutes_in_data = len(data_list)
|
|
break
|
|
events = []
|
|
min_time = 0
|
|
max_time = 0
|
|
|
|
|
|
#['267:273', '273:291']
|
|
seen_list = [0] * minutes_in_data
|
|
seen_where_list = [[] for _ in range(minutes_in_data)]
|
|
|
|
for location_id, data_list in temporary_map_day_plus.items():
|
|
for i in range(minutes_in_data):
|
|
if data_list[i] > 0: # Presence interval
|
|
seen_where_list[i].append(location_id)
|
|
|
|
seen_where_list_uf = seen_where_list.copy()
|
|
overlap_pairs = set()
|
|
for overlap_str in overlaps_str_lst:
|
|
nums = [int(x) for x in overlap_str.split(':')]
|
|
# Add both orderings of the pair for easier checking
|
|
overlap_pairs.add((nums[0], nums[1]))
|
|
overlap_pairs.add((nums[1], nums[0]))
|
|
|
|
# Process each sub-list in seen_where_list
|
|
for i in range(len(seen_where_list)):
|
|
locations = seen_where_list[i]
|
|
|
|
# Skip empty lists and lists with only 0 or 1 item
|
|
if len(locations) <= 1:
|
|
continue
|
|
|
|
has_non_overlapping_pair = False
|
|
|
|
for j in range(len(locations)):
|
|
for k in range(j+1, len(locations)):
|
|
loc1, loc2 = locations[j], locations[k]
|
|
|
|
# If this pair is not in our overlap_pairs, then they don't overlap
|
|
if (loc1, loc2) not in overlap_pairs:
|
|
has_non_overlapping_pair = True
|
|
break
|
|
|
|
if has_non_overlapping_pair:
|
|
break
|
|
|
|
# If all pairs overlap (no non-overlapping pairs found), clear the list
|
|
if not has_non_overlapping_pair:
|
|
seen_where_list[i] = []
|
|
|
|
|
|
variations = []
|
|
variation_index = {}
|
|
|
|
for i in range(minutes_in_data):
|
|
if len(seen_where_list[i]) > 1: # Presence interval
|
|
if seen_where_list[i] not in variations:
|
|
variations.append(seen_where_list[i])
|
|
variation_index[str(seen_where_list[i])] = len(variations) - 1
|
|
|
|
seen_list[i] = variation_index[str(seen_where_list[i])]
|
|
|
|
|
|
return seen_list, seen_where_list_uf
|
|
|
|
def minutes_to_time(minutes):
|
|
"""
|
|
Convert minutes in a day (0-1439) to HH:MM format
|
|
|
|
Args:
|
|
minutes (int): Minutes since midnight (0-1439)
|
|
|
|
Returns:
|
|
str: Time in HH:MM format
|
|
"""
|
|
# Ensure the input is within valid range
|
|
#if not 0 <= minutes <= 1439:
|
|
# raise ValueError("Minutes must be between 0 and 1439")
|
|
|
|
# Calculate hours and remaining minutes
|
|
minutes = minutes % 1440
|
|
hours = minutes // 60
|
|
mins = minutes % 60
|
|
|
|
# Format as HH:MM with leading zeros
|
|
return f"{hours:02d}:{mins:02d}"
|
|
|
|
def decas_to_time(decas):
|
|
"""
|
|
Convert decas in a day (0-8639) to HH:MM format
|
|
|
|
Args:
|
|
decas (int): decas since midnight (0-1439)
|
|
|
|
Returns:
|
|
str: Time in HH:MM format
|
|
"""
|
|
# Ensure the input is within valid range
|
|
#if not 0 <= minutes <= 1439:
|
|
# raise ValueError("Minutes must be between 0 and 1439")
|
|
|
|
# Calculate hours and remaining minutes
|
|
decas = decas % 8640
|
|
hours = decas // (6 * 60)
|
|
mins = (decas // 6) % 60
|
|
secs = 10 * (decas % 10)
|
|
|
|
# Format as HH:MM with leading zeros
|
|
return f"{hours:02d}:{mins:02d}:{secs:02d}"
|
|
|
|
def ClearOverlaps(temporary_map_day_plus, overlaps_str_lst):
|
|
"""
|
|
Detects reads that came from same person read by multiple devices that overlap, and removes weaker reads
|
|
Args:
|
|
temporary_map_day_plus: Map for each device radar reads that were detected to be above threshold
|
|
overlaps_lst: List of pairs of devices that have overlapping area
|
|
Returns:
|
|
An original temporary_map_day_plus with some reads removed
|
|
"""
|
|
## Get the number of minutes
|
|
#for location_id, data_list in temporary_map_day_plus.items():
|
|
#decas_in_data = len(data_list)
|
|
#break
|
|
|
|
if temporary_map_day_plus:
|
|
decas_in_data = len(next(iter(temporary_map_day_plus.values())))
|
|
else:
|
|
decas_in_data = 0
|
|
|
|
# Create seen_where_list with device-signal pairs
|
|
seen_where_list = [[] for _ in range(decas_in_data)]
|
|
for location_id, data_list in temporary_map_day_plus.items():
|
|
for i in range(decas_in_data):
|
|
if data_list[i] > 0: # Presence interval
|
|
#if i == (8721):
|
|
# print("stop")
|
|
seen_where_list[i].append((location_id, data_list[i]))
|
|
|
|
# Parse overlap pairs
|
|
overlap_pairs = set()
|
|
for overlap_str in overlaps_str_lst:
|
|
nums = [int(x) for x in overlap_str.split(':')]
|
|
overlap_pairs.add((nums[0], nums[1]))
|
|
overlap_pairs.add((nums[1], nums[0]))
|
|
|
|
# Process each time slot
|
|
for i in range(len(seen_where_list)):
|
|
locations = seen_where_list[i]
|
|
|
|
if len(locations) <= 1:
|
|
continue
|
|
|
|
#if i == (5713 + 8640):
|
|
# print("stop")
|
|
#if i == (8721):
|
|
# print("stop")
|
|
# Create a new list to store the filtered results
|
|
filtered_list = []
|
|
|
|
# Make a copy of locations to process
|
|
to_process = locations.copy()
|
|
|
|
# Process each device and decide whether to keep it
|
|
while to_process:
|
|
current = to_process.pop(0)
|
|
device_id, signal_strength = current
|
|
|
|
should_keep = True
|
|
devices_to_remove = []
|
|
|
|
# Compare with all other devices (including those already in filtered_list)
|
|
for other in locations:
|
|
other_device_id, other_signal_strength = other
|
|
|
|
# Skip if comparing with itself
|
|
if device_id == other_device_id:
|
|
continue
|
|
|
|
# Check if these devices overlap
|
|
if (device_id, other_device_id) in overlap_pairs:
|
|
# They overlap, keep only the stronger signal
|
|
if signal_strength < other_signal_strength:
|
|
# Other device is stronger, don't keep current
|
|
should_keep = False
|
|
break
|
|
elif signal_strength == other_signal_strength and device_id > other_device_id:
|
|
# For equal signals, use device_id as tiebreaker
|
|
should_keep = False
|
|
break
|
|
|
|
# If we should keep this device, add it to filtered list
|
|
if should_keep:
|
|
filtered_list.append(current)
|
|
|
|
# Update the original list with filtered results
|
|
|
|
#if i == (8721):
|
|
# print("stop")
|
|
seen_where_list[i] = filtered_list
|
|
|
|
# Create a new temporary_map_day_plus with the filtered data
|
|
result = {}
|
|
for location_id, data_list in temporary_map_day_plus.items():
|
|
result[location_id] = [0] * decas_in_data
|
|
|
|
# Fill in the filtered data
|
|
for i in range(decas_in_data):
|
|
#if len(seen_where_list[i]) > 1:
|
|
#if i == (8721):
|
|
# print("stop")
|
|
#print(i, decas_to_time(i), seen_where_list[i])
|
|
|
|
for device_id, signal_strength in seen_where_list[i]:
|
|
result[device_id][i] = signal_strength
|
|
|
|
return result
|
|
|
|
# Path handling middleware
|
|
class StripPathMiddleware:
|
|
def process_request(self, req, resp):
|
|
# Strip the '/function/well-api' prefix if present
|
|
path = req.path
|
|
logger.info(f"Original request path: {path}")
|
|
|
|
# Define patterns to match different URL formats
|
|
patterns = [
|
|
r'^/function/well-api', # Standard OpenFaaS path
|
|
r'^/api/well_api', # API path
|
|
]
|
|
|
|
for pattern in patterns:
|
|
if re.match(pattern, path):
|
|
# Strip the matched prefix
|
|
path = re.sub(pattern, '', path)
|
|
# Ensure path starts with a slash
|
|
if not path.startswith('/'):
|
|
path = '/' + path
|
|
# Update the request path
|
|
req.path = path
|
|
logger.info(f"Modified request path: {path}")
|
|
break
|
|
|
|
|
|
def optimized_processing(myz_data, start_time, id2well_id, device_id_2_threshold, device_field_indexes, temporary_map_day_plus, data_type):
|
|
last_device_id = None
|
|
|
|
# Pre-compute seconds per minute
|
|
seconds_per_deka = 10
|
|
|
|
# Check if we need to process all data or just specific types
|
|
process_all = data_type in ("all", "z-graph", "multiple")
|
|
|
|
for radar_read in myz_data:
|
|
local_time = radar_read[0]
|
|
device_id = radar_read[1]
|
|
|
|
# Calculate deca once
|
|
deca = int((local_time - start_time).total_seconds() / seconds_per_deka)
|
|
|
|
# Use cached lookups when possible
|
|
if device_id != last_device_id:
|
|
last_device_id = device_id
|
|
|
|
# Check if we've cached this device info
|
|
if device_id not in device_lookup_cache:
|
|
well_id = id2well_id[device_id]
|
|
radar_threshold_group_st = device_id_2_threshold[well_id]
|
|
threshold_sig, threshold = radar_threshold_group_st
|
|
threshold_sig = threshold_sig.split("_")[0]
|
|
|
|
# Cache the values
|
|
device_lookup_cache[device_id] = {
|
|
'well_id': well_id,
|
|
'threshold_sig': threshold_sig,
|
|
'threshold': threshold
|
|
}
|
|
else:
|
|
# Use cached values
|
|
cached = device_lookup_cache[device_id]
|
|
well_id = cached['well_id']
|
|
threshold_sig = cached['threshold_sig']
|
|
threshold = cached['threshold']
|
|
|
|
days_decas = len(temporary_map_day_plus[well_id])
|
|
else:
|
|
# Use already loaded values from last iteration
|
|
cached = device_lookup_cache[device_id]
|
|
well_id = cached['well_id']
|
|
threshold_sig = cached['threshold_sig']
|
|
threshold = cached['threshold']
|
|
days_decas = len(temporary_map_day_plus[well_id])
|
|
|
|
# Get radar value using cached index
|
|
radar_val = radar_read[2 + device_field_indexes[threshold_sig]]
|
|
|
|
# Process data if needed
|
|
if process_all and radar_val > threshold and deca < days_decas:
|
|
temporary_map_day_plus[well_id][deca] = radar_val
|
|
#if well_id == 269:
|
|
# print(local_time)
|
|
|
|
return temporary_map_day_plus
|
|
|
|
def optimized_radar_processing(my_data, start_time, id2well_id, device_id_2_threshold,
|
|
device_field_indexes, presence_map, data_type):
|
|
last_device_id = 0
|
|
|
|
# Cache for threshold_sig calculation which is expensive due to dictionary lookups and string splitting
|
|
threshold_sig_cache = {}
|
|
field_index_cache = {}
|
|
|
|
for radar_read in my_data:
|
|
local_time = radar_read[0]
|
|
device_id = radar_read[1]
|
|
|
|
# Calculate deca once
|
|
deca = int((local_time - start_time).total_seconds() / 10)
|
|
|
|
# Device changed - update values that depend on device
|
|
if device_id != last_device_id:
|
|
last_device_id = device_id
|
|
well_id = id2well_id[device_id]
|
|
|
|
# Calculate days_decas exactly like original
|
|
if data_type == "raw" or data_type == "all":
|
|
days_decas = len(presence_map['raw'][well_id])
|
|
else:
|
|
days_decas = len(presence_map['presence'][well_id])
|
|
|
|
# Calculate threshold_sig with caching
|
|
if device_id not in threshold_sig_cache:
|
|
radar_threshold_group_st = device_id_2_threshold[well_id]
|
|
threshold_sig, threshold = radar_threshold_group_st
|
|
threshold_sig = threshold_sig.split("_")[0]
|
|
threshold_sig_cache[device_id] = (threshold_sig, threshold)
|
|
else:
|
|
threshold_sig, threshold = threshold_sig_cache[device_id]
|
|
|
|
# Calculate field index with caching
|
|
if threshold_sig not in field_index_cache:
|
|
field_index = 2 + device_field_indexes[threshold_sig]
|
|
field_index_cache[threshold_sig] = field_index
|
|
else:
|
|
field_index = field_index_cache[threshold_sig]
|
|
else:
|
|
# Use values from previous iteration for same device
|
|
well_id = id2well_id[device_id]
|
|
|
|
# Calculate days_decas exactly like original
|
|
if data_type == "raw" or data_type == "all":
|
|
days_decas = len(presence_map['raw'][well_id])
|
|
else:
|
|
days_decas = len(presence_map['presence'][well_id])
|
|
|
|
# Use cached values
|
|
threshold_sig, threshold = threshold_sig_cache[device_id]
|
|
field_index = field_index_cache[threshold_sig]
|
|
|
|
# Get radar value using cached field index
|
|
if field_index >= len(radar_read):
|
|
radar_val = radar_read[-1]
|
|
else:
|
|
radar_val = radar_read[field_index]
|
|
|
|
# Process presence data
|
|
if data_type == "presence" or data_type == "z-graph" or data_type == "all" or data_type == "multiple":
|
|
if radar_val > threshold:
|
|
if deca < days_decas:
|
|
presence_map['presence'][well_id][deca] = 1
|
|
|
|
# Process raw data if needed
|
|
if data_type == "raw" or data_type == "all":
|
|
if deca < days_decas:
|
|
presence_map['raw'][well_id][deca] = radar_val
|
|
|
|
return presence_map
|
|
|
|
def CompressList(presence_devices_map):
|
|
|
|
for key in presence_devices_map:
|
|
presence_map_list = presence_devices_map[key]
|
|
presence_map_list_compressed = Compress(presence_map_list)
|
|
presence_devices_map[key] = presence_map_list_compressed
|
|
|
|
return presence_devices_map
|
|
|
|
def Compress(presence_map_list):
|
|
|
|
presence_map_list_compressed = []
|
|
l = len(presence_map_list)
|
|
if l > 1:
|
|
last_data_point = presence_map_list[0]
|
|
presence_map_list_compressed.append([0, last_data_point])
|
|
for i in range(1, l):
|
|
data_point = presence_map_list[i]
|
|
if data_point != last_data_point:
|
|
presence_map_list_compressed.append([i - 1, last_data_point])
|
|
presence_map_list_compressed.append([i, data_point])
|
|
last_data_point = data_point
|
|
presence_map_list_compressed.append([i, data_point])
|
|
return presence_map_list_compressed
|
|
|
|
def Decompress(pers_in_deka):
|
|
|
|
last = pers_in_deka[-1]
|
|
last_index = 1 + last[1]
|
|
result = [0] * last_index
|
|
for points in pers_in_deka:
|
|
start_deca = points[0]
|
|
end_deca = points[1]
|
|
value_deca = points[2]
|
|
for i in range(start_deca, 1+end_deca):
|
|
result[i] = value_deca
|
|
return result
|
|
|
|
def store_to_file(my_list, filename):
|
|
|
|
try:
|
|
with open(filename, 'w') as f:
|
|
json.dump(my_list, f, indent=4) # indent for pretty printing
|
|
print(f"List saved to {filename} using JSON")
|
|
except IOError:
|
|
print(f"Error: Could not write to file {filename}")
|
|
except TypeError as e:
|
|
print(f"Error: Could not serialize list to JSON. {e}") # e.g. if list contains unsupported types like sets
|
|
|
|
def find_custom_header(headers, name):
|
|
"""Helper to find a custom header value (case-insensitive name)."""
|
|
if not headers: return None
|
|
for header in headers:
|
|
if header.get('name', '').lower() == name.lower(): return header.get('value')
|
|
return None
|
|
|
|
def encode_state(parts):
|
|
"""Joins parts with a pipe and base64 encodes the result."""
|
|
plain_state = "|".join(map(str, parts))
|
|
base64_state = base64.b64encode(plain_state.encode('utf-8')).decode('ascii')
|
|
# Assuming 'logger' is your app's logger instance
|
|
logger.debug(f"Encoded state: '{plain_state}' -> '{base64_state}'")
|
|
return base64_state
|
|
|
|
def decode_state(b64_state):
|
|
"""Decodes a base64 state and splits it by pipe."""
|
|
if not b64_state: return []
|
|
try:
|
|
decoded_plain = base64.b64decode(b64_state).decode('utf-8')
|
|
parts = decoded_plain.split('|')
|
|
logger.debug(f"Decoded state: '{b64_state}' -> '{decoded_plain}' -> {parts}")
|
|
return parts
|
|
except Exception as e:
|
|
logger.error(f"Failed to decode client_state '{b64_state}': {e}")
|
|
return []
|
|
|
|
def send_telnyx_command(action_path, params, api_key):
|
|
"""
|
|
Sends a command to the Telnyx Call Control API actions endpoint.
|
|
This function should REPLACE your existing send_telnyx_command.
|
|
"""
|
|
if not api_key:
|
|
logger.error(f"CMDFAIL ('{action_path}'): API_KEY not available.")
|
|
return None
|
|
|
|
ccid = params.get("call_control_id")
|
|
if not ccid:
|
|
logger.error(f"CMDFAIL ('{action_path}'): call_control_id missing in params.")
|
|
return None
|
|
|
|
# Correct endpoint construction for V2 actions
|
|
endpoint = f"{TELNYX_API_BASE_URL}/calls/{ccid}/{action_path}"
|
|
|
|
# Body should not contain call_control_id for actions API
|
|
body = {k: v for k, v in params.items() if k != 'call_control_id'}
|
|
|
|
headers = {
|
|
"Authorization": f"Bearer {api_key}",
|
|
"Content-Type": "application/json",
|
|
"Accept": "application/json"
|
|
}
|
|
|
|
logger.info(f"SENDCMD ('{action_path}')")
|
|
logger.debug(f" Endpoint: POST {endpoint}")
|
|
logger.debug(f" JSON Payload: {json.dumps(body, indent=2)}")
|
|
|
|
try:
|
|
response = requests.post(endpoint, json=body, headers=headers, timeout=10)
|
|
response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
|
|
logger.info(f"CMDOK ('{action_path}'): Telnyx accepted. Status: {response.status_code}")
|
|
return response.json()
|
|
except requests.exceptions.HTTPError as e:
|
|
logger.error(f"CMDFAIL ('{action_path}'): Telnyx rejected. Status: {e.response.status_code}")
|
|
try:
|
|
logger.error(f" Telnyx Err Detail: {json.dumps(e.response.json(), indent=2)}")
|
|
except json.JSONDecodeError:
|
|
logger.error(f" Raw Err Body: {e.response.text[:500]}")
|
|
except requests.exceptions.RequestException as e:
|
|
logger.exception(f"CMDFAIL ('{action_path}'): Network error")
|
|
|
|
return None
|
|
|
|
def StoreToDB(data):
|
|
event_type = data.get('event_type')
|
|
sql = ""
|
|
|
|
payload_json = json.dumps(data["payload"])
|
|
call_session_id = data["payload"]['call_session_id']
|
|
if event_type == "call.initiated":
|
|
timee = data.get('occurred_at')
|
|
sql = f"""
|
|
INSERT INTO public.alarms_voice (
|
|
index,
|
|
"time",
|
|
call_session_id,
|
|
initiated
|
|
)
|
|
VALUES (
|
|
(SELECT COALESCE(MAX(index), 0) + 1 FROM public.alarms_voice), -- Auto-increment index
|
|
'{timee}'::timestamptz, -- occurred_at value
|
|
'{call_session_id}', -- call_session_id value
|
|
'{payload_json}'
|
|
); """
|
|
|
|
elif event_type == "call.answered":
|
|
sql = f"""
|
|
UPDATE public.alarms_voice
|
|
SET answered = '{payload_json}'
|
|
WHERE call_session_id = '{call_session_id}';"""
|
|
elif event_type == "call.playback.started":
|
|
sql = f"""
|
|
UPDATE public.alarms_voice
|
|
SET playback_started = '{payload_json}'
|
|
WHERE call_session_id = '{call_session_id}';"""
|
|
elif event_type == "call.playback.ended":
|
|
sql = f"""
|
|
UPDATE public.alarms_voice
|
|
SET playback_ended = '{payload_json}'
|
|
WHERE call_session_id = '{call_session_id}';"""
|
|
elif event_type == "call.hangup":
|
|
sql = f"""
|
|
UPDATE public.alarms_voice
|
|
SET hangup = '{payload_json}'
|
|
WHERE call_session_id = '{call_session_id}';"""
|
|
|
|
if sql != "":
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
print(sql)
|
|
cur.execute(sql)
|
|
|
|
|
|
|
|
def handle_telnyx_webhook(webhook_data, remote_addr, request_id):
|
|
"""Process Telnyx webhook events"""
|
|
logger.info(f"Processing Telnyx webhook from {remote_addr}, Request-ID: {request_id}")
|
|
|
|
try:
|
|
data = webhook_data.get('data', {})
|
|
event_type = data.get('event_type')
|
|
record_type = data.get('record_type')
|
|
payload = data.get('payload', {})
|
|
|
|
logger.info(f"Event: {event_type}, Record Type: {record_type}")
|
|
|
|
if not event_type or not record_type:
|
|
logger.error("Missing event_type or record_type in webhook data")
|
|
return False
|
|
|
|
call_control_id = payload.get('call_control_id')
|
|
call_session_id = payload.get('call_session_id')
|
|
|
|
# Voice Event Handling
|
|
if record_type == 'event':
|
|
logger.info(f"Processing voice event: {event_type}")
|
|
|
|
StoreToDB(data)
|
|
|
|
if event_type == 'call.initiated':
|
|
logger.info(f"Call initiated: From: {payload.get('from')}, To: {payload.get('to')}")
|
|
elif event_type == 'call.answered':
|
|
logger.info(f"Call answered: From: {payload.get('from')}, To: {payload.get('to')}")
|
|
|
|
# Get custom headers and log them
|
|
custom_headers = payload.get('custom_headers', [])
|
|
logger.debug(f"Custom headers: {json.dumps(custom_headers)}")
|
|
|
|
# Check for audio URL
|
|
audio_url = find_custom_header(custom_headers, 'X-Audio-Url')
|
|
tts_payload = find_custom_header(custom_headers, 'X-TTS-Payload')
|
|
|
|
logger.info(f"Audio URL: {audio_url}, TTS Payload: {tts_payload}")
|
|
|
|
# Play audio if URL is provided
|
|
if ENABLE_AUDIO_PLAYBACK and audio_url:
|
|
logger.info(f"Playing audio: {audio_url}")
|
|
client_state = create_client_state("answered", call_control_id, CLIENT_STATE_PREFIX)
|
|
|
|
play_params = {
|
|
"call_control_id": call_control_id,
|
|
"client_state": client_state,
|
|
"audio_url": audio_url
|
|
}
|
|
|
|
result = send_telnyx_command("actions/playback_start", play_params, TELNYX_API_KEY)
|
|
logger.info(f"Play command result: {result}")
|
|
return True
|
|
|
|
elif tts_payload:
|
|
logger.info(f"Speaking text: {tts_payload}")
|
|
client_state = create_client_state("answered", call_control_id, CLIENT_STATE_PREFIX)
|
|
|
|
speak_params = {
|
|
"payload": tts_payload,
|
|
"voice": DEFAULT_TTS_VOICE,
|
|
"language": DEFAULT_TTS_LANGUAGE,
|
|
"call_control_id": call_control_id,
|
|
"client_state": client_state
|
|
}
|
|
|
|
result = send_telnyx_command("actions/speak", speak_params, TELNYX_API_KEY)
|
|
logger.info(f"Speak command result: {result}")
|
|
return True
|
|
|
|
else:
|
|
logger.warning("No audio URL or TTS payload found in call. Hanging up.")
|
|
hangup_params = {
|
|
"call_control_id": call_control_id,
|
|
"client_state": create_client_state("nohdr_hup", call_control_id, CLIENT_STATE_PREFIX)
|
|
}
|
|
send_telnyx_command("actions/hangup", hangup_params, TELNYX_API_KEY)
|
|
return True
|
|
|
|
# Handle other voice events
|
|
elif event_type in ['call.speak.ended', 'call.playback.ended']:
|
|
status = payload.get('status')
|
|
ended_event_type = event_type.split('.')[-2]
|
|
logger.info(f"Call {ended_event_type} ended: Status={status}")
|
|
|
|
# Hang up after media finished playing
|
|
hangup_params = {
|
|
"call_control_id": call_control_id,
|
|
"client_state": create_client_state(f"{ended_event_type}_hup", call_control_id, CLIENT_STATE_PREFIX)
|
|
}
|
|
send_telnyx_command("actions/hangup", hangup_params, TELNYX_API_KEY)
|
|
return True
|
|
|
|
elif event_type == 'call.hangup':
|
|
logger.info(f"Call hung up: Cause={payload.get('cause')}")
|
|
return True
|
|
|
|
else:
|
|
logger.info(f"Other voice event: {event_type}")
|
|
return True
|
|
|
|
# SMS Event Handling
|
|
elif record_type == 'message':
|
|
logger.info(f"Processing SMS event: {event_type}")
|
|
# SMS handling code...
|
|
return True
|
|
|
|
else:
|
|
logger.warning(f"Unknown record type: {record_type}")
|
|
return False
|
|
|
|
except Exception as e:
|
|
logger.exception(f"Error in handle_telnyx_webhook: {e}")
|
|
return False
|
|
|
|
# Assume these are defined globally or accessible (e.g., from app_args or .env)
|
|
# logger = logging.getLogger(...)
|
|
# ENABLE_AUDIO_PLAYBACK = True / False
|
|
# CLIENT_STATE_PREFIX = "app_state"
|
|
# DEFAULT_TTS_VOICE = "female"
|
|
# DEFAULT_TTS_LANGUAGE = "en-US"
|
|
# TELNYX_API_KEY = "YOUR_API_KEY"
|
|
# DTMF_GATHER_TIMEOUT_SECONDS = 15 # Wait 15 seconds for DTMF input
|
|
|
|
# Placeholder for your DB function
|
|
# def StoreToDB(data):
|
|
# app_logger.debug(f"Placeholder: Storing to DB: {json.dumps(data)[:100]}") # Use app_logger
|
|
|
|
# (Your existing find_custom_header, create_client_state, send_telnyx_command should be here)
|
|
# Make sure send_telnyx_command uses app_logger
|
|
|
|
def handle_telnyx_webhook2(webhook_data, remote_addr, request_id): # Renamed logger to app_logger
|
|
"""Process Telnyx webhook events with IVR logic."""
|
|
logger.info(f"Processing Telnyx webhook from {remote_addr}, Request-ID: {request_id}")
|
|
|
|
try:
|
|
data = webhook_data.get('data', {})
|
|
event_type = data.get('event_type')
|
|
record_type = data.get('record_type')
|
|
payload = data.get('payload', {})
|
|
|
|
logger.info(f"Event: {event_type}, Record Type: {record_type}")
|
|
|
|
if not event_type or not record_type:
|
|
logger.error("Missing event_type or record_type in webhook data")
|
|
return False # Indicate failure to process
|
|
|
|
call_control_id = payload.get('call_control_id')
|
|
call_session_id = payload.get('call_session_id')
|
|
# Attempt to decode client_state if present
|
|
b64_client_state_rcvd = data.get("payload",{}).get("client_state")
|
|
plain_client_state_rcvd = ""
|
|
if b64_client_state_rcvd:
|
|
try:
|
|
plain_client_state_rcvd = base64.b64decode(b64_client_state_rcvd).decode('utf-8')
|
|
logger.info(f" Decoded Client State Received: '{plain_client_state_rcvd}'")
|
|
except Exception as e:
|
|
logger.warning(f" Could not decode client_state: {b64_client_state_rcvd}, Error: {e}")
|
|
plain_client_state_rcvd = "undecodable_state"
|
|
|
|
|
|
# Store all events to DB if needed
|
|
StoreToDB(webhook_data) # Pass the full webhook_data
|
|
|
|
# Voice Event Handling
|
|
if record_type == 'event':
|
|
logger.info(f"Processing voice event: {event_type}, CCID: {call_control_id}")
|
|
|
|
# --- Initial Call Setup ---
|
|
if event_type == 'call.initiated':
|
|
logger.info(f" Call initiated: From: {payload.get('from')}, To: {payload.get('to')}")
|
|
# No action needed here, wait for call.answered
|
|
|
|
elif event_type == 'call.answered':
|
|
logger.info(f" Call answered: From: {payload.get('from')}, To: {payload.get('to')}")
|
|
custom_headers = payload.get('custom_headers', [])
|
|
logger.debug(f" Custom headers: {json.dumps(custom_headers)}")
|
|
|
|
audio_url = find_custom_header(custom_headers, 'X-Audio-Url')
|
|
tts_payload = find_custom_header(custom_headers, 'X-TTS-Payload')
|
|
logger.info(f" X-Audio-Url: {audio_url}, X-TTS-Payload: {tts_payload}")
|
|
|
|
# This state means the main message is about to be played.
|
|
# After it ends, we'll play the options prompt.
|
|
next_client_state = create_client_state("main_media_played", call_control_id, app_args.client_state_prefix) # Use app_args
|
|
|
|
action_taken = False
|
|
if app_args.enable_audio_playback and audio_url: # Use app_args
|
|
logger.info(f" -> Playing main audio: {audio_url}")
|
|
play_params = {"call_control_id": call_control_id, "client_state": next_client_state, "audio_url": audio_url}
|
|
send_telnyx_command("actions/playback_start", play_params, app_args.api_key) # Use app_args
|
|
action_taken = True
|
|
elif tts_payload:
|
|
logger.info(f" -> Speaking main TTS: {tts_payload}")
|
|
speak_params = {"payload": tts_payload, "voice": app_args.default_tts_voice, "language": app_args.default_tts_language, "call_control_id": call_control_id, "client_state": next_client_state} # Use app_args
|
|
send_telnyx_command("actions/speak", speak_params, app_args.api_key) # Use app_args
|
|
action_taken = True
|
|
|
|
if not action_taken:
|
|
logger.warning(" -> No audio URL or TTS payload for main message. Hanging up.")
|
|
hangup_params = {"call_control_id": call_control_id, "client_state": create_client_state("no_main_media_hup", call_control_id, app_args.client_state_prefix)}
|
|
send_telnyx_command("actions/hangup", hangup_params, app_args.api_key)
|
|
|
|
# --- Handling End of Main Media Playback ---
|
|
elif event_type in ['call.speak.ended', 'call.playback.ended']:
|
|
status = payload.get('status')
|
|
ended_event_type_root = event_type.split('.')[1] # speak or playback
|
|
|
|
logger.info(f" Call {ended_event_type_root} ended: Status={status}, Current Decoded State='{plain_client_state_rcvd}'")
|
|
|
|
# Check if the main media just finished playing
|
|
if plain_client_state_rcvd.startswith(f"{app_args.client_state_prefix}_main_media_played"):
|
|
logger.info(" -> Main media finished. Playing DTMF options prompt.")
|
|
options_prompt_tts = "press 0 to repeat the message or press pound to hang up."
|
|
# This state means the options prompt is playing, and we're waiting for DTMF.
|
|
# gather_using_speak will trigger call.gather.ended
|
|
next_client_state = create_client_state("waiting_dtmf", call_control_id, app_args.client_state_prefix)
|
|
|
|
gather_params = {
|
|
"call_control_id": call_control_id,
|
|
"client_state": next_client_state,
|
|
"payload": options_prompt_tts,
|
|
"voice": app_args.default_tts_voice,
|
|
"language": app_args.default_tts_language,
|
|
"valid_digits": "0#", # Only accept 0 or #
|
|
"max_digits": 1, # Expect only one digit
|
|
"timeout_millis": app_args.dtmf_timeout_seconds * 1000, # N seconds timeout
|
|
"terminating_digits": "#" # # will also terminate gather immediately
|
|
}
|
|
send_telnyx_command("actions/gather_using_speak", gather_params, app_args.api_key)
|
|
|
|
elif plain_client_state_rcvd.startswith(f"{app_args.client_state_prefix}_replaying_main_media"):
|
|
logger.info(" -> Replayed main media finished. Playing DTMF options prompt again.")
|
|
# Same logic as above for playing options prompt
|
|
options_prompt_tts = "press 0 to repeat the message or press pound to hang up."
|
|
next_client_state = create_client_state("waiting_dtmf", call_control_id, app_args.client_state_prefix)
|
|
gather_params = {
|
|
"call_control_id": call_control_id, "client_state": next_client_state,
|
|
"payload": options_prompt_tts, "voice": app_args.default_tts_voice, "language": app_args.default_tts_language,
|
|
"valid_digits": "0#", "max_digits": 1, "timeout_millis": app_args.dtmf_timeout_seconds * 1000, "terminating_digits": "#"
|
|
}
|
|
send_telnyx_command("actions/gather_using_speak", gather_params, app_args.api_key)
|
|
|
|
else:
|
|
logger.warning(f" -> {ended_event_type_root} ended, but client_state ('{plain_client_state_rcvd}') doesn't match expected flow for options. Hanging up.")
|
|
hangup_params = {"call_control_id": call_control_id, "client_state": create_client_state(f"{ended_event_type_root}_unexpected_hup", call_control_id, app_args.client_state_prefix)}
|
|
send_telnyx_command("actions/hangup", hangup_params, app_args.api_key)
|
|
|
|
# --- Handling DTMF Input Result ---
|
|
elif event_type == 'call.gather.ended':
|
|
digits_received = payload.get('digits')
|
|
gather_status = payload.get('status') # e.g., 'completed_by_terminating_digit', 'timeout', 'call_hangup'
|
|
logger.info(f" Call Gather Ended: Digits='{digits_received}', Status='{gather_status}', Current Decoded State='{plain_client_state_rcvd}'")
|
|
|
|
if plain_client_state_rcvd.startswith(f"{app_args.client_state_prefix}_waiting_dtmf"):
|
|
if digits_received == "0":
|
|
logger.info(" -> DTMF '0' received. Replaying main message.")
|
|
# Replay the original message. We need to fetch it again from custom headers.
|
|
# This assumes the call.gather.ended payload still contains the original custom_headers.
|
|
# If not, we might need to store the original TTS/Audio URL in the client_state.
|
|
# For simplicity, let's assume custom_headers are still available or we re-evaluate.
|
|
# A more robust way would be to store the main message type/content in the client_state
|
|
# when transitioning from main_media_played.
|
|
|
|
# Let's try to get custom_headers from the current payload.
|
|
# Telnyx usually includes original call details in subsequent events.
|
|
custom_headers = payload.get('custom_headers', []) # This might not be reliable for original headers
|
|
|
|
# A BETTER APPROACH: Store original media info in client_state or retrieve from DB
|
|
# For this example, we'll try to re-evaluate based on what might be in custom_headers
|
|
# of the *call.gather.ended* event, which is NOT guaranteed to be the original ones.
|
|
# This part needs careful testing or a strategy to pass original media info.
|
|
|
|
# Simplified: Assume we need to re-fetch original custom headers if they are not in this payload.
|
|
# For now, let's just log and assume we'd need a mechanism to get original X-TTS-Payload/X-Audio-Url
|
|
logger.warning(" -> Replay logic needs access to original X-TTS-Payload/X-Audio-Url. This example will try to use current custom_headers if any, or a default.")
|
|
|
|
original_audio_url = find_custom_header(custom_headers, 'X-Audio-Url') # May not be original
|
|
original_tts_payload = find_custom_header(custom_headers, 'X-TTS-Payload') # May not be original
|
|
|
|
next_client_state = create_client_state("replaying_main_media", call_control_id, app_args.client_state_prefix)
|
|
action_taken = False
|
|
if app_args.enable_audio_playback and original_audio_url:
|
|
logger.info(f" -> Replaying audio: {original_audio_url}")
|
|
play_params = {"call_control_id": call_control_id, "client_state": next_client_state, "audio_url": original_audio_url}
|
|
send_telnyx_command("actions/playback_start", play_params, app_args.api_key)
|
|
action_taken = True
|
|
elif original_tts_payload:
|
|
logger.info(f" -> Replaying TTS: {original_tts_payload}")
|
|
speak_params = {"payload": original_tts_payload, "voice": app_args.default_tts_voice, "language": app_args.default_tts_language, "call_control_id": call_control_id, "client_state": next_client_state}
|
|
send_telnyx_command("actions/speak", speak_params, app_args.api_key)
|
|
action_taken = True
|
|
|
|
if not action_taken:
|
|
logger.error(" -> Could not find original media to replay. Hanging up.")
|
|
hangup_params = {"call_control_id": call_control_id, "client_state": create_client_state("replay_fail_hup", call_control_id, app_args.client_state_prefix)}
|
|
send_telnyx_command("actions/hangup", hangup_params, app_args.api_key)
|
|
|
|
elif digits_received == "#" or (gather_status == 'completed_by_terminating_digit' and payload.get('terminating_digit') == '#'):
|
|
logger.info(" -> DTMF '#' received or terminating digit. Hanging up.")
|
|
hangup_params = {"call_control_id": call_control_id, "client_state": create_client_state("dtmf_pound_hup", call_control_id, app_args.client_state_prefix)}
|
|
send_telnyx_command("actions/hangup", hangup_params, app_args.api_key)
|
|
elif gather_status == 'timeout':
|
|
logger.info(" -> DTMF gather timed out. Hanging up.")
|
|
hangup_params = {"call_control_id": call_control_id, "client_state": create_client_state("dtmf_timeout_hup", call_control_id, app_args.client_state_prefix)}
|
|
send_telnyx_command("actions/hangup", hangup_params, app_args.api_key)
|
|
else:
|
|
logger.warning(f" -> Gather ended with unhandled digits '{digits_received}' or status '{gather_status}'. Hanging up.")
|
|
hangup_params = {"call_control_id": call_control_id, "client_state": create_client_state("dtmf_unhandled_hup", call_control_id, app_args.client_state_prefix)}
|
|
send_telnyx_command("actions/hangup", hangup_params, app_args.api_key)
|
|
else:
|
|
logger.warning(f" -> Gather ended, but client_state ('{plain_client_state_rcvd}') doesn't match waiting_dtmf. Ignoring.")
|
|
|
|
|
|
elif event_type == 'call.hangup':
|
|
app_logger.info(f" Call Hangup Event: Cause='{payload.get('cause')}', SIPCause='{payload.get('sip_hangup_cause')}', Source='{payload.get('hangup_source')}'")
|
|
# Call is already over, no command to send.
|
|
|
|
# Log other voice events not explicitly handled above for visibility
|
|
elif event_type not in ['call.initiated', 'call.answered', 'call.speak.ended', 'call.playback.ended', 'call.gather.ended', 'call.hangup', 'call.speak.started', 'call.playback.started']:
|
|
logger.info(f" Other Voice Event: Type='{event_type}'. Payload: {json.dumps(payload, indent=2)}")
|
|
|
|
|
|
# --- SMS Event Handling (Placeholder from your snippet) ---
|
|
elif record_type == 'message':
|
|
logger.info(f"Processing SMS event: {event_type}")
|
|
# Your existing SMS handling code would go here...
|
|
# For now, just acknowledge
|
|
logger.info(" -> SMS ACK (204)")
|
|
return Response(status=204) # Ensure SMS events are also ACKed
|
|
|
|
else:
|
|
logger.warning(f"Unknown record type: {record_type}")
|
|
# Acknowledge to prevent retries from Telnyx
|
|
logger.info(" -> Unknown Record Type ACK (204)")
|
|
return Response(status=204)
|
|
|
|
# If we reached here for a voice event and didn't send a command through send_telnyx_command,
|
|
# it means we are just acknowledging the event.
|
|
logger.info(" -> Voice Event Processed (no immediate command sent or command sent async). ACK (204) to Telnyx.")
|
|
return Response(status=204) # ALWAYS ACK THE WEBHOOK
|
|
|
|
except Exception as e:
|
|
logger.exception(f"Error in handle_telnyx_webhook: {e}")
|
|
# Still try to ACK Telnyx if possible, but log the error.
|
|
# Depending on the error, Telnyx might retry if it doesn't get a 2xx.
|
|
return "Internal Server Error", 500
|
|
|
|
def handle_telnyx_webhook3(webhook_data, remote_addr, request_id):
|
|
"""
|
|
Processes Telnyx webhook events with full IVR logic for repeating messages.
|
|
This function should be added to your well-api.py.
|
|
"""
|
|
logger.info(f"Processing webhook in handle_telnyx_webhook3 from {remote_addr}, Request-ID: {request_id}")
|
|
|
|
# --- ADAPT THIS SECTION to your app's config management ---
|
|
# This example assumes config values are accessible as global constants or from a dict.
|
|
# Replace these with your actual config access method (e.g., self.config['...'])
|
|
config = {
|
|
'api_key': TELNYX_API_KEY,
|
|
'dtmf_timeout_seconds': 10,
|
|
'initial_silence_ms': 500,
|
|
'replay_silence_ms': 100,
|
|
'default_tts_voice': 'female',
|
|
'default_tts_language': 'en-US',
|
|
'client_state_prefix': 'well_api_state',
|
|
'inbound_greeting': 'Thank you for calling. We will be with you shortly.'
|
|
}
|
|
# --- END ADAPTATION SECTION ---
|
|
|
|
try:
|
|
StoreToDB(webhook_data) # Call your DB storage function first
|
|
|
|
data, payload = webhook_data.get('data', {}), webhook_data.get('data', {}).get('payload', {})
|
|
event_type, record_type, ccid = data.get('event_type'), data.get('record_type'), payload.get('call_control_id')
|
|
logger.info(f"EVENT '{event_type}' ({record_type})" + (f", CCID: {ccid}" if ccid else ""))
|
|
|
|
if record_type != 'event':
|
|
logger.info(f" -> Non-voice event ('{record_type}') received. Ignoring in this handler.")
|
|
return True
|
|
|
|
b64_client_state = payload.get("client_state")
|
|
decoded_parts = decode_state(b64_client_state)
|
|
state_name = decoded_parts[0] if decoded_parts else None
|
|
if state_name: logger.info(f" State Name Received: '{state_name}'")
|
|
|
|
current_api_key = config['api_key']
|
|
|
|
# --- State Machine Logic ---
|
|
if event_type == 'call.answered':
|
|
if payload.get('direction') == 'incoming':
|
|
logger.info(" -> Inbound call detected. Playing generic greeting and hanging up.")
|
|
next_state = encode_state(['INBOUND_GREETING_HUP'])
|
|
speak_params = {"payload": config['inbound_greeting'], "voice": config['default_tts_voice'], "language": config['default_tts_language'], "call_control_id": ccid, "client_state": next_state}
|
|
send_telnyx_command("actions/speak", speak_params, current_api_key)
|
|
else: # Outgoing call
|
|
audio_url = find_custom_header(payload.get('custom_headers'), 'X-Audio-Url')
|
|
tts_payload = find_custom_header(payload.get('custom_headers'), 'X-TTS-Payload')
|
|
media_type = "audio" if audio_url else "tts" if tts_payload else "none"
|
|
media_value = audio_url or tts_payload
|
|
if media_value:
|
|
logger.info(f" -> Outbound call. Playing {config['initial_silence_ms']}ms silence buffer.")
|
|
next_state = encode_state(['INIT_PLAY_MAIN', media_type, media_value])
|
|
send_telnyx_command("actions/play_silence", {"milliseconds": str(config['initial_silence_ms']), "call_control_id": ccid, "client_state": next_state}, current_api_key)
|
|
else:
|
|
logger.warning(" -> Outbound call, but no audio/tts payload. Hanging up.")
|
|
send_telnyx_command("actions/hangup", {"call_control_id": ccid}, current_api_key)
|
|
|
|
elif event_type == 'call.playback.ended':
|
|
if state_name == 'INIT_PLAY_MAIN': # Silence ended
|
|
logger.info(" -> Silence buffer ended. Playing main message.")
|
|
_, media_type, media_value = decoded_parts
|
|
next_state = encode_state(['MAIN_MEDIA_PLAYED', media_type, media_value])
|
|
if media_type == "audio":
|
|
send_telnyx_command("actions/playback_start", {"audio_url": media_value, "call_control_id": ccid, "client_state": next_state}, current_api_key)
|
|
elif media_type == "tts":
|
|
params = {"payload": media_value, "voice": config['default_tts_voice'], "language": config['default_tts_language'], "call_control_id": ccid, "client_state": next_state}
|
|
send_telnyx_command("actions/speak", params, current_api_key)
|
|
elif state_name == 'REPLAY_SILENCE': # Replay silence ended
|
|
logger.info(" -> Replay silence ended. Replaying main message.")
|
|
_, media_type, media_value = decoded_parts
|
|
next_state = encode_state(['REPLAYING_MEDIA', media_type, media_value])
|
|
if media_type == "audio":
|
|
send_telnyx_command("actions/playback_start", {"audio_url": media_value, "call_control_id": ccid, "client_state": next_state}, current_api_key)
|
|
elif media_type == "tts":
|
|
params = {"payload": media_value, "voice": config['default_tts_voice'], "language": config['default_tts_language'], "call_control_id": ccid, "client_state": next_state}
|
|
send_telnyx_command("actions/speak", params, current_api_key)
|
|
elif state_name in ['MAIN_MEDIA_PLAYED', 'REPLAYING_MEDIA']: # Actual audio file ended
|
|
logger.info(f" -> Main audio playback finished. Playing options menu.")
|
|
_, media_type, media_value = decoded_parts
|
|
next_state = encode_state(['WAITING_DTMF', media_type, media_value])
|
|
options_prompt = "press 0 to repeat the message or press pound to hang up."
|
|
gather_params = {
|
|
"payload": options_prompt, "voice": config['default_tts_voice'], "language": config['default_tts_language'],
|
|
"valid_digits": "0#", "max_digits": 1, "timeout_millis": config['dtmf_timeout_seconds'] * 1000, "terminating_digit": "#",
|
|
"call_control_id": ccid, "client_state": next_state
|
|
}
|
|
send_telnyx_command("actions/gather_using_speak", gather_params, current_api_key)
|
|
else:
|
|
logger.warning(f" -> Playback ended with unhandled state '{state_name}'. Hanging up.")
|
|
send_telnyx_command("actions/hangup", {"call_control_id": ccid}, current_api_key)
|
|
|
|
elif event_type == 'call.speak.ended':
|
|
if state_name in ['MAIN_MEDIA_PLAYED', 'REPLAYING_MEDIA']:
|
|
logger.info(f" -> Main message TTS finished. Playing options menu.")
|
|
_, media_type, media_value = decoded_parts
|
|
next_state = encode_state(['WAITING_DTMF', media_type, media_value])
|
|
options_prompt = "press 0 to repeat the message or press pound to hang up."
|
|
gather_params = {
|
|
"payload": options_prompt, "voice": config['default_tts_voice'], "language": config['default_tts_language'],
|
|
"valid_digits": "0#", "max_digits": 1, "timeout_millis": config['dtmf_timeout_seconds'] * 1000, "terminating_digit": "#",
|
|
"call_control_id": ccid, "client_state": next_state
|
|
}
|
|
send_telnyx_command("actions/gather_using_speak", gather_params, current_api_key)
|
|
elif state_name == 'INBOUND_GREETING_HUP':
|
|
logger.info(" -> Inbound greeting finished. Hanging up.")
|
|
send_telnyx_command("actions/hangup", {"call_control_id": ccid}, current_api_key)
|
|
else:
|
|
logger.warning(f" -> Speak ended with unhandled state '{state_name}'. Hanging up.")
|
|
send_telnyx_command("actions/hangup", {"call_control_id": ccid}, current_api_key)
|
|
|
|
elif event_type == 'call.dtmf.received':
|
|
digit = payload.get('digit')
|
|
logger.info(f" DTMF Received: Digit='{digit}'")
|
|
if digit == '#':
|
|
logger.info(" -> '#' received. Terminating call immediately.")
|
|
send_telnyx_command("actions/hangup", {"call_control_id": ccid}, current_api_key)
|
|
|
|
elif event_type == 'call.gather.ended':
|
|
logger.info(f" -> Gather ended. Digits received: '{payload.get('digits')}', Status: '{payload.get('status')}'")
|
|
if state_name == 'WAITING_DTMF':
|
|
digits = payload.get('digits')
|
|
_, media_type, media_value = decoded_parts
|
|
if digits == "0":
|
|
logger.info(f" -> '0' pressed. Playing {config['replay_silence_ms']}ms silence before replay.")
|
|
next_state = encode_state(['REPLAY_SILENCE', media_type, media_value])
|
|
send_telnyx_command("actions/play_silence", {"milliseconds": str(config['replay_silence_ms']), "call_control_id": ccid, "client_state": next_state}, current_api_key)
|
|
else:
|
|
logger.info(" -> Gather ended with non-repeat condition. Hanging up.")
|
|
send_telnyx_command("actions/hangup", {"call_control_id": ccid}, current_api_key)
|
|
else:
|
|
logger.warning(f" -> Gather ended with unhandled state '{state_name}'.")
|
|
|
|
elif event_type == 'call.hangup':
|
|
logger.info(f" Call Hangup Event: Cause='{payload.get('cause')}'")
|
|
else:
|
|
logger.info(f" -> Unhandled Voice Event: '{event_type}' with state '{state_name}'.")
|
|
|
|
return True # Return app-specific success
|
|
except Exception as e:
|
|
logger.exception(f"Error in handle_telnyx_webhook3: {e}")
|
|
return False
|
|
|
|
|
|
def FilterDevicesByDeviceId(devices_list, device_id_str):
|
|
"""
|
|
Filter devices list to include only the specified device_id.
|
|
|
|
Parameters:
|
|
devices_list: tuple of (device_details_list, device_ids_list)
|
|
device_id_str: string representation of device_id to filter by
|
|
|
|
Returns:
|
|
tuple: filtered (device_details_list, device_ids_list)
|
|
"""
|
|
try:
|
|
target_device_id = int(device_id_str)
|
|
except ValueError:
|
|
return ([], [])
|
|
|
|
device_details_list, device_ids_list = devices_list
|
|
|
|
filtered_details = []
|
|
filtered_ids = []
|
|
|
|
for i, device_details in enumerate(device_details_list):
|
|
device_id = device_details[1] # device_id is second element (index 1)
|
|
if device_id == target_device_id:
|
|
filtered_details.append(device_details)
|
|
filtered_ids.append(device_ids_list[i])
|
|
|
|
return (filtered_details, filtered_ids)
|
|
|
|
|
|
def FilterSensorsBySensorType(sensor_type):
|
|
"""
|
|
Filter s_table to include only the specified sensor type.
|
|
|
|
Parameters:
|
|
sensor_type: string name of sensor type (e.g., 'temperature', 'radar', 'voc0', etc.)
|
|
|
|
Returns:
|
|
list: filtered s_table containing only the mapped sensor name
|
|
"""
|
|
# Map user-friendly sensor names to their s_table equivalents
|
|
sensor_mapping = {
|
|
'temperature': 'avg_temperature',
|
|
'humidity': 'avg_humidity',
|
|
'pressure': 'pressure_amplitude',
|
|
'light': 'max_light',
|
|
'radar': 'radar',
|
|
'voc0': 'sensor_min_s0',
|
|
'voc1': 'sensor_min_s1',
|
|
'voc2': 'sensor_min_s2',
|
|
'voc3': 'sensor_min_s3',
|
|
'voc4': 'sensor_min_s4',
|
|
'voc5': 'sensor_min_s5',
|
|
'voc6': 'sensor_min_s6',
|
|
'voc7': 'sensor_min_s7',
|
|
'voc8': 'sensor_min_s8',
|
|
'voc9': 'sensor_min_s9'
|
|
}
|
|
|
|
# Get the actual sensor name used in s_table
|
|
mapped_sensor = sensor_mapping.get(sensor_type, sensor_type)
|
|
|
|
# Return the mapped sensor name if it's valid, otherwise empty list
|
|
if mapped_sensor in sensor_mapping.values() or mapped_sensor == sensor_type:
|
|
return [mapped_sensor]
|
|
|
|
return []
|
|
|
|
|
|
def CreateSensorsMapFast(map_file, devices_list, selected_date, bw, time_zone_s, radar_part, group_by, filtered_s_table):
|
|
"""
|
|
Create a sensor map with filtered devices and sensors.
|
|
Based on CreateMapFast but with filtering support.
|
|
|
|
Parameters:
|
|
map_file: output file path
|
|
devices_list: filtered devices list
|
|
selected_date: date string
|
|
bw: black and white flag
|
|
time_zone_s: timezone string
|
|
radar_part: radar part specification
|
|
group_by: grouping strategy
|
|
filtered_s_table: filtered sensor table
|
|
|
|
Returns:
|
|
tuple: (success_boolean, vocs_scaled_array)
|
|
"""
|
|
global Id2MACDict
|
|
|
|
st = time.time()
|
|
if radar_part == "s28":
|
|
radar_part = "(s2+s3+s4+s5+s6+s7+s8)/7"
|
|
|
|
try:
|
|
lower_than200 = 0
|
|
larger_than200 = 0
|
|
ids_list = []
|
|
|
|
for details in devices_list[0]:
|
|
well_id = details[0]
|
|
ids_list.append(details[1])
|
|
if well_id < 200:
|
|
lower_than200 += 1
|
|
else:
|
|
larger_than200 += 1
|
|
|
|
if lower_than200 > 0 and larger_than200 > 0:
|
|
return False, []
|
|
|
|
if larger_than200 > 0:
|
|
sensors_c = len(filtered_s_table)
|
|
else: # old sensors not supported
|
|
return False, []
|
|
|
|
devices_c = len(devices_list[0])
|
|
devices_list_str = ",".join(map(str, devices_list[1]))
|
|
image_file = map_file
|
|
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s)
|
|
temp_offset = -16
|
|
|
|
# Use filtered sensor table for queries
|
|
if sensors_c > 1:
|
|
sql = get_deployment_query_filtered(
|
|
devices_list_str,
|
|
time_from_str,
|
|
time_to_str,
|
|
ids_list,
|
|
radar_part,
|
|
temp_offset,
|
|
filtered_s_table
|
|
)
|
|
else:
|
|
sql = get_deployment_single_query(
|
|
devices_list_str,
|
|
time_from_str,
|
|
time_to_str,
|
|
ids_list,
|
|
radar_part,
|
|
temp_offset,
|
|
filtered_s_table[0]
|
|
)
|
|
|
|
print(sql)
|
|
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
day_data = cur.fetchall()
|
|
if day_data == None:
|
|
return False, []
|
|
|
|
stretch_by = 10
|
|
minutes = 1440
|
|
stripes = devices_c * sensors_c
|
|
arr_source_template = np.full((stripes, minutes + 4), -0.001, dtype=float)
|
|
arr_stretched_template = np.zeros((int(stripes * stretch_by), minutes, 3), dtype=np.uint8)
|
|
|
|
# Use filtered sensor table
|
|
arr_source = fast_fill_array_from_timescale_filtered(
|
|
day_data,
|
|
time_from_str,
|
|
devices_list[1],
|
|
arr_source_template,
|
|
filtered_s_table,
|
|
time_zone_s
|
|
)
|
|
|
|
arr_source = AddLimits_optimized_filtered(arr_source, devices_c, sensors_c, filtered_s_table, percentile=100)
|
|
scaled_day = CalcExtremes(arr_source, minutes, stripes)
|
|
arr_stretched, vocs_scaled = FillImage_optimized(scaled_day, devices_c, sensors_c, arr_stretched_template, group_by, bw)
|
|
SaveImageInBlob(image_file, arr_stretched)
|
|
return True, vocs_scaled
|
|
|
|
except Exception as e:
|
|
AddToLog(traceback.format_exc())
|
|
return False, []
|
|
|
|
|
|
def get_deployment_query_filtered(devices_list_str, time_from_str, time_to_str, ids_list, radar_part, temp_offset, filtered_s_table):
|
|
"""
|
|
Generate a filtered TimeScaleDB query for specific sensors only.
|
|
|
|
Parameters:
|
|
devices_list_str (str): Comma-separated string of device IDs
|
|
time_from_str (str): Start time for the query
|
|
time_to_str (str): End time for the query
|
|
ids_list (list): List of device IDs in priority order for sorting
|
|
radar_part (str): Radar column name
|
|
temp_offset (float): Temperature offset
|
|
filtered_s_table (list): List of sensor names to include
|
|
|
|
Returns:
|
|
str: Generated SQL query
|
|
"""
|
|
# Generate the CASE statement for ordering
|
|
case_statements = []
|
|
for index, device_id in enumerate(ids_list, start=1):
|
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
|
case_order = "\n ".join(case_statements)
|
|
|
|
# Build sensor-specific SELECT clauses
|
|
sensor_selects = []
|
|
sensor_aggregates = []
|
|
radar_needed = False
|
|
|
|
for sensor in filtered_s_table:
|
|
if sensor == "temperature":
|
|
sensor_selects.append(f"sr.avg_temperature+ {temp_offset} as avg_temperature")
|
|
sensor_aggregates.append("AVG(temperature) AS avg_temperature")
|
|
elif sensor == "humidity":
|
|
sensor_selects.append("sr.avg_humidity")
|
|
sensor_aggregates.append("AVG(humidity) AS avg_humidity")
|
|
elif sensor == "pressure":
|
|
sensor_selects.append("sr.pressure_amplitude")
|
|
sensor_aggregates.append("AVG(pressure) AS pressure_amplitude")
|
|
elif sensor == "light":
|
|
sensor_selects.append("sr.max_light")
|
|
sensor_aggregates.append("MAX(light) AS max_light")
|
|
elif sensor == "radar":
|
|
sensor_selects.append("rr.radar")
|
|
radar_needed = True
|
|
elif sensor.startswith("voc"):
|
|
# Extract sensor number (e.g., sensor_min_s0 -> s0)
|
|
sensor_num = sensor.replace("voc", "")
|
|
sensor_selects.append(f"sr.{sensor}")
|
|
sensor_aggregates.append(f"MIN(s{sensor_num}) AS {sensor}")
|
|
|
|
# Build the query
|
|
if radar_needed and sensor_aggregates:
|
|
# Need both sensor readings and radar readings
|
|
sql = f"""
|
|
SELECT
|
|
COALESCE(sr.minute, rr.minute) as minute,
|
|
COALESCE(sr.device_id, rr.device_id) as device_id,
|
|
{', '.join(sensor_selects)}
|
|
FROM (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
{', '.join(sensor_aggregates)}
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) sr
|
|
FULL OUTER JOIN (
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
MAX({radar_part}) AS radar
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
) rr
|
|
ON sr.minute = rr.minute AND sr.device_id = rr.device_id
|
|
ORDER BY
|
|
CASE COALESCE(sr.device_id, rr.device_id)
|
|
{case_order}
|
|
END,
|
|
COALESCE(sr.minute, rr.minute);
|
|
"""
|
|
elif radar_needed:
|
|
# Only radar needed
|
|
sql = f"""
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
MAX({radar_part}) AS radar
|
|
FROM
|
|
radar_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute;
|
|
"""
|
|
else:
|
|
# Only sensor readings needed
|
|
sql = f"""
|
|
SELECT
|
|
time_bucket('1 minute', time) AS minute,
|
|
device_id,
|
|
{', '.join(sensor_aggregates)}
|
|
FROM
|
|
sensor_readings
|
|
WHERE
|
|
device_id IN ({devices_list_str})
|
|
AND time >= '{time_from_str}'
|
|
AND time < '{time_to_str}'
|
|
GROUP BY
|
|
minute,
|
|
device_id
|
|
ORDER BY
|
|
CASE device_id
|
|
{case_order}
|
|
END,
|
|
minute;
|
|
"""
|
|
|
|
return sql
|
|
|
|
|
|
|
|
def fast_fill_array_from_timescale_filtered(day_data, time_from_str, devices_list, arr_source, filtered_s_table, timezone_str="Europe/Berlin"):
|
|
"""
|
|
Optimized array filling for filtered sensors.
|
|
|
|
Parameters:
|
|
day_data: query results
|
|
time_from_str: start time string
|
|
devices_list: list of device IDs
|
|
arr_source: array to fill
|
|
filtered_s_table: list of sensor names to process
|
|
timezone_str: timezone string
|
|
|
|
Returns:
|
|
numpy array: filled array
|
|
"""
|
|
# Convert start time to timezone-aware datetime
|
|
start_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
|
|
|
|
# Create device index mapping
|
|
device_to_index = {device_id: idx for idx, device_id in enumerate(devices_list)}
|
|
|
|
# Pre-process data into a more efficient structure
|
|
device_data = defaultdict(list)
|
|
for record in day_data:
|
|
if record[0] and record[1]: # If time and device_id exist
|
|
device_data[record[1]].append(record)
|
|
|
|
# Build column mapping based on filtered sensors
|
|
columns = {}
|
|
col_idx = 2 # Start after time and device_id
|
|
|
|
for sensor in filtered_s_table:
|
|
columns[sensor] = col_idx
|
|
col_idx += 1
|
|
|
|
# Process each device's data
|
|
for device_id, records in device_data.items():
|
|
if device_id not in device_to_index:
|
|
continue
|
|
|
|
base_idx = device_to_index[device_id] * len(filtered_s_table)
|
|
|
|
# Convert records to numpy array for faster processing
|
|
records_array = np.array(records, dtype=object)
|
|
|
|
# Calculate all minute deltas at once
|
|
times = records_array[:, 0]
|
|
minute_deltas = np.array([(t - start_time).total_seconds() / 60 for t in times], dtype=int)
|
|
|
|
# Filter valid minute deltas
|
|
valid_mask = (minute_deltas >= 0) & (minute_deltas < arr_source.shape[1] - 4)
|
|
if not np.any(valid_mask):
|
|
continue
|
|
|
|
minute_deltas = minute_deltas[valid_mask]
|
|
records_array = records_array[valid_mask]
|
|
|
|
# Process each filtered sensor
|
|
for sensor_idx, sensor_name in enumerate(filtered_s_table):
|
|
if sensor_name in columns:
|
|
row_idx = base_idx + sensor_idx
|
|
values = records_array[:, columns[sensor_name]]
|
|
|
|
# Filter out None values
|
|
valid_values = ~np.equal(values, None)
|
|
if not np.any(valid_values):
|
|
continue
|
|
|
|
# Update array in bulk
|
|
arr_source[row_idx, minute_deltas[valid_values]] = values[valid_values]
|
|
|
|
return arr_source
|
|
|
|
|
|
def AddLimits_optimized_filtered(arr_source, devices_c, sensors_c, filtered_s_table, percentile):
|
|
"""
|
|
Vectorized version of AddLimits for filtered sensors.
|
|
|
|
Parameters:
|
|
arr_source: array of shape (devices_c * sensors_c, 1444)
|
|
devices_c: number of devices
|
|
sensors_c: number of sensors per device
|
|
filtered_s_table: list of sensor names
|
|
percentile: parameter for clean_data_vectorized
|
|
"""
|
|
total_sensors = devices_c * sensors_c
|
|
|
|
# Create arrays of sensor legal values for filtered sensors
|
|
min_vals = []
|
|
max_vals = []
|
|
windows = []
|
|
|
|
for sensor_name in filtered_s_table:
|
|
if sensor_name in sensor_legal_values:
|
|
min_vals.append(sensor_legal_values[sensor_name][0])
|
|
max_vals.append(sensor_legal_values[sensor_name][1])
|
|
windows.append(sensor_legal_values[sensor_name][2])
|
|
else:
|
|
# Default values if sensor not found
|
|
min_vals.append(0)
|
|
max_vals.append(1000)
|
|
windows.append(1)
|
|
|
|
# Repeat for each device
|
|
min_vals = np.tile(min_vals, devices_c)
|
|
max_vals = np.tile(max_vals, devices_c)
|
|
windows = np.tile(windows, devices_c)
|
|
|
|
# Process rows that need cleaning (window > 2)
|
|
clean_mask = windows > 2
|
|
if np.any(clean_mask):
|
|
for window in np.unique(windows[clean_mask]):
|
|
rows_to_clean = np.where(clean_mask & (windows == window))[0]
|
|
|
|
for row_idx in rows_to_clean:
|
|
arr_source[row_idx, :1440] = clean_data_vectorized(
|
|
arr_source[row_idx, :1440],
|
|
window,
|
|
percentile
|
|
)
|
|
|
|
# Set min/max values for all rows
|
|
arr_source[:, 1440] = min_vals
|
|
arr_source[:, 1441] = max_vals
|
|
|
|
return arr_source
|
|
#==================================== ADD FUNCTIONS BEFORE ============================================
|
|
|
|
# Main API class
|
|
class WellApi:
|
|
def on_get_healthz(self, req, resp):
|
|
"""Health check endpoint"""
|
|
resp.status = HTTP_200
|
|
resp.content_type = falcon.MEDIA_TEXT
|
|
resp.text = "OK"
|
|
|
|
def on_get(self, req, resp, path=""):
|
|
"""Handle GET requests"""
|
|
global s_table_temp
|
|
|
|
logger.debug(f"GET request to path: {path}")
|
|
logger.debug(f"Sent variables: {req.params}")
|
|
logger.debug(f"All headers: {dict(req.headers)}")
|
|
if path == "" or path == "/":
|
|
# Serve the main portal page
|
|
blob_data = read_file("well_portal.html")
|
|
if blob_data:
|
|
resp.content_type = "text/html"
|
|
resp.text = blob_data
|
|
else:
|
|
# Fall back to JSON response if file not found
|
|
resp.media = {"message": "Hello from OpenFaaS Serverless Web Server!", "method": "GET"}
|
|
return
|
|
elif path == "favicon.ico":
|
|
favicon_path = "favicon.ico"
|
|
if os.path.isfile(favicon_path):
|
|
resp.content_type = 'image/x-icon'
|
|
resp.data = read_file(favicon_path, type_="BIN")
|
|
resp.status = HTTP_200
|
|
else:
|
|
resp.status = HTTP_404
|
|
return
|
|
elif path == "health":
|
|
resp.status = HTTP_200
|
|
resp.content_type = falcon.MEDIA_JSON
|
|
resp.text = json.dumps({"status": "healthy"})
|
|
return
|
|
|
|
# Authentication and authorization
|
|
token = req.params.get('token')
|
|
user_name = req.params.get('user_name')
|
|
ps = req.params.get('ps')
|
|
|
|
if ps != "" and ps != None:
|
|
#was token sent in ps field? This allows for token and ps be populated by token or ps
|
|
user_info = verify_token(ps)
|
|
if user_info["username"] == user_name:
|
|
token = ps
|
|
else:
|
|
#is this valid password?
|
|
privileges, user_id = ValidUser(user_name, ps)
|
|
if privileges == "0":
|
|
resp.media = package_response("Log-Out", HTTP_401)
|
|
return
|
|
else:
|
|
token = generate_token(user_name)
|
|
|
|
|
|
|
|
user_info = verify_token(token)
|
|
|
|
if user_info == None or user_info["username"] != user_name:
|
|
resp.media = package_response("Log-Out", HTTP_401)
|
|
return
|
|
|
|
get_function_name = req.params.get('name')
|
|
logger.debug(f"[{datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}] - {__name__}.GET_API->{get_function_name}")
|
|
privileges = GetPriviledgesOnly(user_name)
|
|
|
|
if (token and user_name) or (token and user_name):
|
|
user_info = verify_token(token)
|
|
if user_info is None or user_info["username"] != user_name:
|
|
resp.media = package_response("Log-Out", HTTP_401)
|
|
return
|
|
|
|
get_function_name = req.params.get('name')
|
|
logger.debug(f"[{datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}] - {__name__}.GET_API->{get_function_name}")
|
|
|
|
if get_function_name == "deployment_add":
|
|
user_id = req.params.get('user_id')
|
|
blob_data = read_file("edit_deployment.html")
|
|
caretaker = {'deployment_id': 0, 'beneficiary_id': user_id, 'caretaker_id': user_id, 'owner_id': user_id, 'installer_id': user_id, 'user_id': 0, 'role_ids': '2', 'access_to_deployments': '', 'email': '', 'user_name': '', 'first_name': '', 'last_name': '', 'address_street': '', 'address_city': '', 'address_zip': '', 'address_state': '', 'address_country': '', 'phone_number': '', 'picture': '/', 'key': ''}
|
|
blob_data = FillFields(blob_data, caretaker, 1)
|
|
resp.content_type = "text/html"
|
|
resp.text = blob_data
|
|
return
|
|
|
|
elif get_function_name == "devices_list":
|
|
st = time.time()
|
|
user_name = req.params.get('user_name')
|
|
privileges = GetPriviledgesOnly(user_name)
|
|
first_s = req.params.get('first')
|
|
last_s = req.params.get('last')
|
|
|
|
try:
|
|
first = int(first_s)
|
|
except ValueError:
|
|
first = 0
|
|
|
|
try:
|
|
last = int(last_s)
|
|
except ValueError:
|
|
last = 1000000
|
|
blob_data = read_file("my_devices.html")
|
|
|
|
devices = GetVisibleDevices(privileges)
|
|
users = GetUsersFromDeployments(privileges)
|
|
blob_data = UpdateDevicesTable(blob_data, devices, users)
|
|
blob_data = UpdateDeploymentsSelector(blob_data, users)
|
|
resp.content_type = "text/html"
|
|
resp.text = blob_data
|
|
#print(blob_data)
|
|
return
|
|
|
|
elif get_function_name == "deployment_edit":
|
|
deployment_id = req.params.get('deployment_id')
|
|
blob_data = read_file("edit_deployment.html")
|
|
|
|
deployment = DeploymentDetails(deployment_id)
|
|
#blob_data = blob_data.decode("utf-8")
|
|
blob_data = FillFields(blob_data, deployment, 1)
|
|
resp.content_type = "text/html"
|
|
resp.text = blob_data
|
|
return
|
|
|
|
elif get_function_name == "caretaker_add":
|
|
|
|
blob_data = read_file("edit_caretaker.html")
|
|
caretaker = {'user_id': 0, 'role_ids': '2', 'access_to_deployments': '', 'email': '', 'user_name': '', 'first_name': '', 'last_name': '', 'address_street': '', 'address_city': '', 'address_zip': '', 'address_state': '', 'address_country': '', 'phone_number': '', 'picture': '/', 'key': ''}
|
|
blob_data = FillFields(blob_data, caretaker, 1)
|
|
resp.content_type = "text/html"
|
|
resp.text = blob_data
|
|
return
|
|
elif get_function_name == "caretaker_edit":
|
|
user_id = req.params.get('user_id')
|
|
blob_data = read_file("edit_caretaker.html")
|
|
|
|
caretaker = UserDetails(user_id)
|
|
#blob_data = blob_data.decode("utf-8")
|
|
blob_data = FillFields(blob_data, caretaker, 1)
|
|
resp.content_type = "text/html"
|
|
resp.text = blob_data
|
|
return
|
|
|
|
elif get_function_name == "device_add":
|
|
|
|
blob_data = read_file("edit_device.html")
|
|
device = {'device_id': 0, 'device_mac': '', 'well_id': '', 'description': '', 'location': '', 'close_to': '', 'radar_threshold': '["s3_max",12]', 'temperature_calib': '0.0,1.0,0.0', 'humidity_calib': '0.0,1.0,0.0'}
|
|
blob_data = FillFields(blob_data, device, 1)
|
|
resp.content_type = "text/html"
|
|
resp.text = blob_data
|
|
return
|
|
elif get_function_name == "device_edit":
|
|
mac = req.params.get('mac')
|
|
|
|
blob_data = read_file("edit_device.html")
|
|
|
|
device_det = DeviceDetails(mac)
|
|
if device_det['radar_threshold'] == None or device_det['radar_threshold'] == "":
|
|
device_det['radar_threshold'] = '["s3_max",12]'
|
|
#blob_data = blob_data.decode("utf-8")
|
|
blob_data = FillFields(blob_data, device_det, 1)
|
|
resp.content_type = "text/html"
|
|
resp.text = blob_data
|
|
return
|
|
|
|
|
|
elif get_function_name == "beneficiary_edit":
|
|
user_id = req.params.get('user_id')
|
|
blob_data = read_file("edit_beneficiary.html")
|
|
|
|
beneficiary = UserDetails(user_id)
|
|
#blob_data = blob_data.decode("utf-8")
|
|
blob_data = FillFields(blob_data, beneficiary, 1)
|
|
resp.content_type = "text/html"
|
|
resp.text = blob_data
|
|
return
|
|
elif get_function_name == "beneficiary_add":
|
|
|
|
blob_data = read_file("edit_beneficiary.html")
|
|
beneficiary = {'user_id': 0, 'role_ids': '1', 'access_to_deployments': '', 'email': '', 'user_name': '', 'first_name': '', 'last_name': '', 'address_street': '', 'address_city': '', 'address_zip': '', 'address_state': '', 'address_country': '', 'phone_number': '', 'picture': '/', 'key': ''}
|
|
blob_data = FillFields(blob_data, beneficiary, 1)
|
|
resp.content_type = "text/html"
|
|
resp.text = blob_data
|
|
return
|
|
elif get_function_name == "get_image_file":
|
|
#image represents day in local time
|
|
|
|
deployment_id = req.params.get('deployment_id')
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
ddate = req.params.get("date")
|
|
ddate = ddate.replace("_","-")
|
|
group_by = req.params.get("group_by")
|
|
timee = StringToEpoch(ddate, time_zone_s)
|
|
force_recreate = req.params.get("re_create") == "true"
|
|
radar_part = req.params.get("radar_part")
|
|
map_type = int(req.params.get("map_type"))
|
|
|
|
bw = req.params.get("bw") == "true"
|
|
unique_identifier = req.params.get("unique_identifier")
|
|
filename = f"/{deployment_id}/{deployment_id}_{ddate}_{group_by}_{radar_part}_{map_type}_{bw}_dayly_image.png"
|
|
|
|
#print(check_file_exists(filename))
|
|
if not force_recreate:
|
|
file_exists, time_modified_utc = check_file_exists(filename)
|
|
if file_exists:
|
|
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
|
|
time_modified_date = time_modified_local.date()
|
|
file_date = MapFileToDate(filename)
|
|
if time_modified_date <= file_date:
|
|
force_recreate = True
|
|
else:
|
|
force_recreate = True
|
|
|
|
#ddate is in Local Time
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
#time that describes new devices in deployment_history is in UTC therefore timee is in UTC
|
|
|
|
st = time.time()
|
|
vocs_scaled = {}
|
|
#file_date is in Local time, so we are comparing that and current Local (to install) Date
|
|
if force_recreate:
|
|
st = time.time()
|
|
vocs_scaled = {}
|
|
devices_list = GetProximityList(deployment_id, timee)
|
|
s_table_temp = s_table
|
|
stored, vocs_scaled = CreateMapFast(filename, devices_list, ddate, bw, time_zone_s, radar_part, group_by) #"[bit] 1=same sensors together, 2=same device together, 4=1 der, 8=2 der
|
|
if stored != True:
|
|
AddToLog("Map not created")
|
|
#logger.warning("Map not created")
|
|
resp.media = package_response("Map not created", HTTP_401)
|
|
return
|
|
else:
|
|
AddToLog("Map created")
|
|
#lets send over MQTT vocs_scaled
|
|
json_data = numpy_to_json(vocs_scaled, devices_list)
|
|
MQSendL("/"+unique_identifier, json_data)
|
|
#print(time.time() - st)
|
|
|
|
#lets read and send image from blob
|
|
image_bytes, content_type = GetBlob(filename)
|
|
if debug:
|
|
resp.media = package_response(f'Log: {debug_string}', HTTP_200)
|
|
else:
|
|
if image_bytes is None:
|
|
raise falcon.HTTPNotFound(
|
|
title='Image not found',
|
|
description=f'Image {filename} could not be found or retrieved'
|
|
)
|
|
sys.stdout.flush()
|
|
# Set response content type and body
|
|
resp.content_type = content_type
|
|
resp.data = image_bytes
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif get_function_name == "get_sensors_map":
|
|
# Get filtering parameters
|
|
device_id_str = req.params.get('device_id')
|
|
sensor = req.params.get('sensor')
|
|
|
|
# Get standard image parameters
|
|
deployment_id = req.params.get('deployment_id')
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
ddate = req.params.get("date")
|
|
ddate = ddate.replace("_", "-")
|
|
group_by = ""#req.params.get("group_by")
|
|
timee = StringToEpoch(ddate, time_zone_s)
|
|
force_recreate = "true"
|
|
radar_part = req.params.get("radar_part")
|
|
bw = req.params.get("bw") == "true"
|
|
unique_identifier = req.params.get("unique_identifier")
|
|
|
|
# Create filename with filtering parameters
|
|
filter_suffix = ""
|
|
if device_id_str:
|
|
filter_suffix += f"_dev{device_id_str}"
|
|
if sensor:
|
|
filter_suffix += f"_sens{sensor}"
|
|
|
|
filename = f"/{deployment_id}/{deployment_id}_{ddate}_{radar_part}_{bw}{filter_suffix}_sensors_map.png"
|
|
|
|
# Check if file exists and needs recreation
|
|
if not force_recreate:
|
|
file_exists, time_modified_utc = check_file_exists(filename)
|
|
if file_exists:
|
|
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
|
|
time_modified_date = time_modified_local.date()
|
|
file_date = MapFileToDate(filename)
|
|
if time_modified_date <= file_date:
|
|
force_recreate = True
|
|
else:
|
|
force_recreate = True
|
|
|
|
# Convert date to UTC epoch for device queries
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s) + 5 # add so date boundary is avoided
|
|
|
|
st = time.time()
|
|
vocs_scaled = {}
|
|
|
|
if force_recreate:
|
|
st = time.time()
|
|
vocs_scaled = {}
|
|
|
|
# Get initial device list
|
|
devices_list = GetProximityList(deployment_id, timee)
|
|
|
|
# Apply device filtering if specified
|
|
if device_id_str:
|
|
filtered_devices = FilterDevicesByDeviceId(devices_list, device_id_str)
|
|
else:
|
|
filtered_devices = devices_list
|
|
|
|
# Apply sensor filtering if specified
|
|
if sensor:
|
|
filtered_s_table = [sensor]#FilterSensorsBySensorType(sensor)
|
|
else:
|
|
filtered_s_table = s_table
|
|
|
|
# Validate we have devices and sensors to process
|
|
if not filtered_devices[0] or not filtered_s_table:
|
|
AddToLog("No devices or sensors match the specified filters")
|
|
resp.media = package_response("No devices or sensors match the specified filters", HTTP_400)
|
|
return
|
|
|
|
# Create the filtered map
|
|
stored, vocs_scaled = CreateSensorsMapFast(
|
|
filename,
|
|
filtered_devices,
|
|
ddate,
|
|
bw,
|
|
time_zone_s,
|
|
radar_part,
|
|
group_by,
|
|
filtered_s_table
|
|
)
|
|
|
|
if stored != True:
|
|
AddToLog("Sensors map not created")
|
|
resp.media = package_response("Sensors map not created", HTTP_401)
|
|
return
|
|
else:
|
|
AddToLog("Sensors map created")
|
|
# Send over MQTT vocs_scaled
|
|
json_data = numpy_to_json(vocs_scaled, filtered_devices)
|
|
MQSendL("/" + unique_identifier, json_data)
|
|
|
|
# Read and send image from blob
|
|
image_bytes, content_type = GetBlob(filename)
|
|
if debug:
|
|
resp.media = package_response(f'Log: {debug_string}', HTTP_200)
|
|
else:
|
|
if image_bytes is None:
|
|
raise falcon.HTTPNotFound(
|
|
title='Image not found',
|
|
description=f'Image {filename} could not be found or retrieved'
|
|
)
|
|
sys.stdout.flush()
|
|
# Set response content type and body
|
|
resp.content_type = content_type
|
|
resp.data = image_bytes
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif get_function_name == "get_full_location_map":
|
|
|
|
raw = req.params.get("raw") == "true"
|
|
|
|
if raw:
|
|
#function=request_deployment_map_new
|
|
#token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6InJvYnN0ZXIiLCJleHAiOjE3MzgxNzYzNTZ9.5wzC2dVQhKlMygHPZfombTINbltNq8vxdilLIugNTtA&
|
|
#user_name=robster&
|
|
#date=2025-01-27&
|
|
#deployment_id=21&
|
|
#map_type=2
|
|
chart_type = 8
|
|
else:
|
|
chart_type = int(req.params.get("map_type"))
|
|
|
|
#image represents day in local time
|
|
logger.debug("get_full_location_map")
|
|
deployment_id = req.params.get('deployment_id')
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
ddate = req.params.get("date")
|
|
ddate = ddate.replace("_","-")
|
|
|
|
to_date = ddate
|
|
|
|
try:
|
|
to_date = req.params.get("to_date")
|
|
to_date = to_date.replace("_","-")
|
|
except:
|
|
pass
|
|
|
|
if to_date != ddate:
|
|
chart_type = int(req.params.get("map_type"))
|
|
|
|
force_recreate = req.params.get("re_create") == "true"
|
|
force_recreate_orig = force_recreate
|
|
scale_global = req.params.get("scale_global") == "true"
|
|
fast = req.params.get("fast") == "true"
|
|
bw = req.params.get("bw") == "true"
|
|
motion = req.params.get("motion") == "true"
|
|
timee = StringToEpoch(ddate, time_zone_s)
|
|
|
|
filter_minutes = int(req.params.get("filter"))
|
|
|
|
if "flavor" in req.params: #this is to be used only when creating
|
|
flavor = int(req.params.get("flavor"))
|
|
else:
|
|
flavor = 0
|
|
|
|
if bw:
|
|
bw_s = "BW"
|
|
else:
|
|
bw_s = "CLR"
|
|
|
|
if fast:
|
|
fast_s = "FAST"
|
|
else:
|
|
fast_s = "SLOW"
|
|
|
|
if motion:
|
|
motion_s = "M"
|
|
else:
|
|
motion_s = "S"
|
|
|
|
if scale_global:
|
|
scl_s = "scl"
|
|
else:
|
|
scl_s = "nscl"
|
|
|
|
|
|
if chart_type == 5 or chart_type == 7:
|
|
#now_date = req.params.get("now_date")
|
|
#now_date = now_date.replace("_","-")
|
|
filename = f"/{deployment_id}/{deployment_id}_{ddate}_{to_date}_{filter_minutes}_history_image.png"
|
|
else:
|
|
filename = f"/{deployment_id}/{deployment_id}_{ddate}_{bw_s}_{motion_s}_{scl_s}_{chart_type}_Flocation_image.png"
|
|
|
|
if not force_recreate:
|
|
file_exists, time_modified_utc = check_file_exists(filename)
|
|
#file_exists1, time_modified_utc1 = check_file_exists(filename+".bin")
|
|
if file_exists:
|
|
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
|
|
time_modified_date = time_modified_local.date()
|
|
file_date = MapFileToDate(filename)
|
|
if time_modified_date <= file_date:
|
|
force_recreate = True
|
|
else: #same date
|
|
current_time = datetime.datetime.now(pytz.timezone(time_zone_s))
|
|
time_passed = current_time - time_modified_local
|
|
#if time_passed.seconds > 300: #recreate if older than 5 minutes
|
|
# force_recreate = True
|
|
|
|
else:
|
|
force_recreate = True
|
|
|
|
if force_recreate:
|
|
ddate = ddate.replace("_","-")
|
|
#filter_minutes = 5
|
|
#filename = os.path.join(scriptDir+"/daily_maps/"+deployment, proximity_string+"_"+deployment+"_"+ddate+"_dayly_image.png")
|
|
|
|
filename = filename.replace('\\','/')
|
|
if chart_type == 4: #"collapsed":
|
|
GenerateFullLocationMap(filename, deployment_id, ddate, force_recreate, chart_type, bw, motion, scale_global, fast, time_zone_s, filter_minutes)
|
|
elif chart_type == 5: #"history":
|
|
GeneratePresenceHistory(filename, force_recreate, deployment_id, filter_minutes, ddate, to_date, ddate, time_zone_s)
|
|
elif chart_type == 7: #"history full chart":
|
|
filename = GeneratePresenceHistoryChart(filename, force_recreate_orig, deployment_id, filter_minutes, ddate, to_date, ddate, time_zone_s)
|
|
elif chart_type == 8: #"set for mobile"
|
|
GenerateFullLocationMapLabelsOut(filename, deployment_id, ddate, force_recreate, chart_type, bw, motion, scale_global, fast,time_zone_s, filter_minutes)
|
|
else:
|
|
GenerateFullLocationMap(filename, deployment_id, ddate, force_recreate, chart_type, bw, motion, scale_global, fast,time_zone_s, filter_minutes)
|
|
|
|
#lets read and send image from blob
|
|
image_bytes, content_type = GetBlob(filename)
|
|
|
|
if image_bytes is None:
|
|
raise falcon.HTTPNotFound(
|
|
title='Image not found',
|
|
description=f'Image {filename} could not be found or retrieved'
|
|
)
|
|
|
|
# Set response content type and body
|
|
resp.content_type = content_type
|
|
resp.data = image_bytes
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif get_function_name == "get_presence_map":
|
|
#image represents day in local time
|
|
|
|
deployment_id = req.params.get('deployment_id')
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
ddate = req.params.get("date")
|
|
ddate = ddate.replace("_","-")
|
|
force_recreate = req.params.get("re_create") == "true"
|
|
scale_global = req.params.get("scale_global") == "true"
|
|
fast = req.params.get("fast") == "true"
|
|
bw = req.params.get("bw") == "true"
|
|
motion = req.params.get("motion") == "true"
|
|
timee = StringToEpoch(ddate, time_zone_s)
|
|
chart_type = int(req.params.get("map_type"))
|
|
filter_minutes = int(req.params.get("filter"))
|
|
|
|
if bw:
|
|
bw_s = "BW"
|
|
else:
|
|
bw_s = "CLR"
|
|
|
|
if fast:
|
|
fast_s = "FAST"
|
|
else:
|
|
fast_s = "SLOW"
|
|
|
|
if motion:
|
|
motion_s = "M"
|
|
else:
|
|
motion_s = "S"
|
|
|
|
if scale_global:
|
|
scl_s = "scl"
|
|
else:
|
|
scl_s = "nscl"
|
|
|
|
|
|
filename = f"/{deployment_id}/{deployment_id}_{ddate}_{bw_s}_{motion_s}_{scl_s}_{chart_type}_Flocation_image.png"
|
|
|
|
if not force_recreate:
|
|
file_exists, time_modified_utc = check_file_exists(filename)
|
|
if file_exists:
|
|
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
|
|
time_modified_date = time_modified_local.date()
|
|
file_date = MapFileToDate(filename)
|
|
if time_modified_date <= file_date:
|
|
force_recreate = True
|
|
else:
|
|
force_recreate = True
|
|
|
|
|
|
if force_recreate:
|
|
ddate = ddate.replace("_","-")
|
|
days = 7
|
|
|
|
filename = filename.replace('\\','/')
|
|
if chart_type == 6: #"AI Locations":
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
|
|
|
st = time.time()
|
|
if CreatePresenceMap(filename, devices_list, ddate, 1, force_recreate, chart_type, bw, motion, scale_global, fast, filter_minutes, time_zone_s) == 0: #"[bit] 1=same sensors together, 2=same device together, 4=1 der, 8=2 der
|
|
print(ddate, "Not found")
|
|
else:
|
|
print(ddate, time.time() - st)
|
|
#lets read and send image from blob
|
|
image_bytes, content_type = GetBlob(filename)
|
|
|
|
if image_bytes is None:
|
|
raise falcon.HTTPNotFound(
|
|
title='Image not found',
|
|
description=f'Image {filename} could not be found or retrieved'
|
|
)
|
|
|
|
# Set response content type and body
|
|
resp.content_type = content_type
|
|
resp.data = image_bytes
|
|
resp.status = falcon.HTTP_200
|
|
|
|
return
|
|
elif get_function_name == "download":
|
|
|
|
deployment_id = req.params.get('deployment_id')
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
date_from = req.params.get("date_from")
|
|
date_to = req.params.get("date_to")
|
|
date_from = date_from.replace("_","-")
|
|
date_to = date_to.replace("_","-")
|
|
consolidated_by = req.params.get("consolidated_by")
|
|
force_recreate = req.params.get("re_create") == "true"
|
|
radar_part = req.params.get("radar_part")
|
|
zip_filename = f"/{deployment_id}/{deployment_id}_{date_from}_{date_to}_{consolidated_by}_data.zip"
|
|
|
|
#print(check_file_exists(filename))
|
|
if not force_recreate:
|
|
file_exists, time_modified_utc = check_file_exists(zip_filename, bucket_name="data-downloads")
|
|
if file_exists:
|
|
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
|
|
time_modified_date = time_modified_local.date()
|
|
file_date = CSVFileToDate(zip_filename)
|
|
if time_modified_date <= file_date:
|
|
force_recreate = True
|
|
else:
|
|
force_recreate = True
|
|
|
|
#ddate is in Local Time
|
|
dates = DatesSpan(date_from, date_to)
|
|
to_zip = []
|
|
for ddate in dates:
|
|
force_recreate_csv = force_recreate
|
|
csv_dayly_filename = f"/{deployment_id}/{deployment_id}_{ddate}_{consolidated_by}_data.csv"
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
if not force_recreate_csv:
|
|
#time that describes new devices in deployment_history is in UTC therefore timee is in UTC
|
|
file_exists, time_modified_utc = check_file_exists(csv_dayly_filename, bucket_name="data-downloads")
|
|
if file_exists:
|
|
time_modified_local = time_modified_utc.astimezone(pytz.timezone(time_zone_s))
|
|
time_modified_date = time_modified_local.date()
|
|
file_date = CSVFileToDate(csv_dayly_filename)
|
|
if time_modified_date <= file_date:
|
|
force_recreate_csv = True
|
|
else:
|
|
force_recreate_csv = True
|
|
st = time.time()
|
|
vocs_scaled = {}
|
|
#file_date is in Local time, so we are comparing that and current Local (to install) Date
|
|
if force_recreate_csv:
|
|
st = time.time()
|
|
vocs_scaled = {}
|
|
|
|
devices_list = GetProximityList(deployment_id, timee)
|
|
temp_offset = -16
|
|
file_stored = CreateDailyCSV(csv_dayly_filename, devices_list, ddate, vocs_scaled, time_zone_s, radar_part, consolidated_by, temp_offset) #"[bit] 1=same sensors together, 2=same device together, 4=1 der, 8=2 der
|
|
to_zip.append(file_stored)
|
|
else:
|
|
to_zip.append(csv_dayly_filename)
|
|
|
|
if to_zip:
|
|
success = zip_blobs(
|
|
blob_paths=to_zip,
|
|
zip_blob_name=zip_filename,
|
|
bucket_name="data-downloads",
|
|
minio_client=miniIO_blob_client
|
|
)
|
|
|
|
if success:
|
|
print("Files successfully zipped")
|
|
else:
|
|
print("Error occurred while zipping files")
|
|
|
|
#pack CSV files from BLOB into ZIP
|
|
#lets read and send image from blob
|
|
zip_bytes, content_type = GetBlob(zip_filename, bucket_name="data-downloads")
|
|
if debug:
|
|
resp.media = package_response(f'Log: {debug_string}', HTTP_200)
|
|
else:
|
|
if zip_bytes is None:
|
|
raise falcon.HTTPNotFound(
|
|
title='File not found',
|
|
description=f'File {zip_filename} could not be found or retrieved'
|
|
)
|
|
|
|
# Set response content type and body
|
|
resp.content_type = content_type
|
|
resp.data = zip_bytes
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
resp.media = package_response("Use POST method for this endpoint", HTTP_400)
|
|
|
|
# Default response for unmatched paths
|
|
#resp.media = package_response(f"Path: /{path}", HTTP_200)
|
|
|
|
def on_post(self, req, resp, path=""):
|
|
#ToDo make sure that any read/write data functions are authorized for this user_name
|
|
global threshold_cache, device_lookup_cache
|
|
|
|
"""Handle POST requests"""
|
|
logger.debug(f"on_post called with path: {path}")
|
|
logger.debug(f"Request method: {req.method}")
|
|
logger.debug(f"Request path: {req.path}")
|
|
logger.debug(f"Request query string: {req.query_string}")
|
|
logger.debug(f"Request headers: {req.headers}")
|
|
logger.debug(f"Request content type: {req.content_type}")
|
|
|
|
|
|
# First, check if this is a Telnyx webhook request
|
|
is_telnyx_webhook = (
|
|
req.content_type and 'application/json' in req.content_type and
|
|
req.headers.get('USER-AGENT') == 'telnyx-webhooks'
|
|
)
|
|
|
|
if is_telnyx_webhook:
|
|
logger.info("Processing Telnyx webhook request")
|
|
try:
|
|
# Read the raw request body
|
|
raw_body = req.stream.read().decode('utf-8')
|
|
logger.debug(f"Raw webhook request body: {raw_body}")
|
|
|
|
if not raw_body:
|
|
logger.error("Empty request body received from Telnyx")
|
|
resp.status = falcon.HTTP_400
|
|
resp.content_type = falcon.MEDIA_JSON
|
|
resp.text = json.dumps({"error": "Empty request body"})
|
|
return
|
|
|
|
# Parse JSON
|
|
webhook_data = json.loads(raw_body)
|
|
logger.debug(f"Parsed webhook data: {json.dumps(webhook_data)}")
|
|
|
|
# Get remote address and request ID
|
|
remote_addr = req.headers.get('X-REAL-IP') or req.headers.get('X-FORWARDED-FOR') or 'unknown'
|
|
request_id = req.headers.get("X-Request-Id") or req.headers.get("Telnyx-Request-Id") or req.headers.get("X-CALL-ID") or "N/A"
|
|
|
|
# Process the webhook
|
|
handle_telnyx_webhook(webhook_data, remote_addr, request_id)
|
|
|
|
# Set response status - always acknowledge webhooks with 204 No Content
|
|
resp.status = falcon.HTTP_204
|
|
return
|
|
|
|
except json.JSONDecodeError as e:
|
|
logger.error(f"Failed to decode JSON from webhook request body: {e}")
|
|
resp.status = falcon.HTTP_400
|
|
resp.content_type = falcon.MEDIA_JSON
|
|
resp.text = json.dumps({"error": "Invalid JSON payload"})
|
|
return
|
|
except Exception as e:
|
|
logger.exception(f"Error processing webhook: {e}")
|
|
resp.status = falcon.HTTP_500
|
|
resp.content_type = falcon.MEDIA_JSON
|
|
resp.text = json.dumps({"error": "Internal Server Error"})
|
|
return
|
|
|
|
# If we get here, it's not a Telnyx webhook, so process as normal
|
|
try:
|
|
# For non-webhook requests, get form data
|
|
form_data = get_form_data(req)
|
|
logger.debug(f"Form data: {form_data}")
|
|
|
|
except Exception as e:
|
|
logger.exception(f"Error in on_post: {e}")
|
|
resp.status = falcon.HTTP_500
|
|
resp.content_type = falcon.MEDIA_JSON
|
|
resp.text = json.dumps({"error": "Internal Server Error"})
|
|
|
|
# Get form data using our helper function - but don't read stream again
|
|
#form_data = get_form_data(req)
|
|
logger.debug(f"Form data: {form_data}")
|
|
|
|
|
|
try:
|
|
|
|
# Get basic parameters
|
|
function = form_data.get('function')
|
|
user_name = form_data.get('user_name')
|
|
logger.debug(f"Function: {function}, User: {user_name}")
|
|
|
|
if function != "credentials":
|
|
token = form_data.get('token')
|
|
ps = form_data.get('ps')
|
|
|
|
if ps != "" and ps != None:
|
|
#was token sent in ps field? This allows for token and ps be populated by token or ps
|
|
user_info = verify_token(ps)
|
|
if user_info != None:
|
|
if user_info["username"] == user_name:
|
|
token = ps
|
|
else:
|
|
#is this valid password?
|
|
privileges, user_id = ValidUser(user_name, ps)
|
|
if privileges == "0":
|
|
resp.media = package_response("Log-Out", HTTP_401)
|
|
return
|
|
else:
|
|
token = generate_token(user_name)
|
|
|
|
user_info = verify_token(token)
|
|
|
|
if user_info == None or user_info["username"] != user_name:
|
|
resp.media = package_response("Log-Out", HTTP_401)
|
|
return
|
|
|
|
|
|
#with get_db_connection() as db_conn:
|
|
privileges = GetPriviledgesOnly(user_name)
|
|
|
|
# Handle credentials function - most common case
|
|
if function == "credentials":
|
|
|
|
clientId = form_data.get('clientId')
|
|
nonce = form_data.get('nonce')
|
|
ps = form_data.get('ps')
|
|
|
|
if not user_name:
|
|
resp.media = package_response("Required field 'user_name' is missing", HTTP_400)
|
|
return
|
|
|
|
if not clientId:
|
|
resp.media = package_response("Required field 'clientId' is missing", HTTP_400)
|
|
return
|
|
|
|
if not nonce:
|
|
resp.media = package_response("Required field 'nonce' is missing", HTTP_400)
|
|
return
|
|
|
|
if not ps:
|
|
resp.media = package_response("Required field 'ps' is missing", HTTP_400)
|
|
return
|
|
|
|
|
|
|
|
if False:
|
|
pass
|
|
else:
|
|
#lets check for real
|
|
privileges, user_id = ValidUser(user_name, ps)
|
|
if privileges == "0":
|
|
access_token = 0
|
|
privileges = 0
|
|
else:
|
|
access_token = generate_token(user_name)
|
|
|
|
if privileges == "-1":
|
|
max_role = -1
|
|
else:
|
|
max_role = GetMaxRole(user_name)
|
|
if "2" in max_role:
|
|
max_role = 2
|
|
else:
|
|
max_role = 1
|
|
|
|
token_payload = {'access_token': access_token, 'privileges': privileges, 'user_id': user_id, 'max_role': max_role}
|
|
resp.media = package_response(token_payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
# Handle token-protected functions
|
|
elif function == "messages_age":
|
|
|
|
macs = form_data.get('macs')
|
|
|
|
with get_db_connection() as conn:
|
|
|
|
#print (sqlr)
|
|
with conn.cursor() as cur:
|
|
|
|
devices = MACsStrToDevIds(cur, macs)
|
|
|
|
devices_string = ",".join(f"{device_id}" for mac, device_id in devices)
|
|
|
|
|
|
sqlr = f"""
|
|
SELECT
|
|
device_id,
|
|
GREATEST(
|
|
radar_last_time,
|
|
sensor_last_time
|
|
) AS latest_time
|
|
FROM
|
|
(SELECT unnest(ARRAY[{devices_string}]) AS device_id) d
|
|
LEFT JOIN LATERAL (
|
|
SELECT time AS radar_last_time
|
|
FROM radar_readings
|
|
WHERE device_id = d.device_id
|
|
ORDER BY time DESC
|
|
LIMIT 1
|
|
) r ON true
|
|
LEFT JOIN LATERAL (
|
|
SELECT time AS sensor_last_time
|
|
FROM sensor_readings
|
|
WHERE device_id = d.device_id
|
|
ORDER BY time DESC
|
|
LIMIT 1
|
|
) s ON true;"""
|
|
logger.debug(f"sqlr= {sqlr}")
|
|
cur.execute(sqlr)
|
|
times_list = cur.fetchall()
|
|
result = {}
|
|
for i in range(len(times_list)):
|
|
if times_list[i][1] is not None:
|
|
result[devices[i][0]] = times_list[i][1].timestamp()
|
|
else:
|
|
result[devices[i][0]] = 0
|
|
|
|
dataa = {}
|
|
dataa['Command'] = "REPORT"
|
|
dataa['body'] = result
|
|
dataa['time'] = time.time()
|
|
#json_data = json.dumps(dataa)
|
|
payload = {'ok': True, 'response': dataa}
|
|
resp.media = package_response(payload)
|
|
logger.warning(f"Responded: {str(payload)}")
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "voice_ask":
|
|
|
|
question = form_data.get('question')
|
|
deployment_id = form_data.get('deployment_id')
|
|
|
|
if ('language_from' in form_data):
|
|
language_from = form_data.get('language_from').strip()
|
|
else:
|
|
language_from = "English"
|
|
|
|
if ('language_to' in form_data):
|
|
language_to = form_data.get('language_to').strip()
|
|
else:
|
|
language_to = "English"
|
|
|
|
|
|
result, language = AskGPT(question, language_from, language_to)
|
|
|
|
if result[0] == "#":
|
|
result = RunCommand(result, {}, deployment_id)
|
|
|
|
dataa = {}
|
|
dataa['Command'] = "REPORT"
|
|
dataa['body'] = result
|
|
dataa['name'] = ""
|
|
dataa['reflected'] = ""
|
|
dataa['language'] = language
|
|
dataa['time'] = time.time()
|
|
#json_data = json.dumps(dataa)
|
|
payload = {'ok': True, 'response': dataa}
|
|
resp.media = package_response(payload)
|
|
logger.warning(f"Responded: {str(payload)}")
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
|
|
elif function == "calibrate_thresholds":
|
|
#this will use current date to calibrate radar presence thresholds.
|
|
#make sure that data is well defined (has clear absence/presence signature) for all rooms for chosen day
|
|
#Format of radar_threshold field = [gates_to_use_Presence_list, p_threshold]
|
|
#We need to automate this functionality!!!
|
|
deployment_id = form_data.get('deployment_id')
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
ddate = form_data.get("date")
|
|
ddate = ddate.replace("_","-")
|
|
selected_date = ddate
|
|
|
|
|
|
|
|
stdev_range = int(form_data.get("stdev_range"))
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5
|
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
|
|
|
selected_date = FindCalibrationDate(device_ids, ddate)
|
|
|
|
devices_c = len(devices_list[0])
|
|
|
|
time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s, stdev_range)
|
|
fields = ["radar_s_min", "radar_s_max", "radar_m_max", "radar_stdev"]
|
|
cnt = 0
|
|
ids_list = []
|
|
for details in devices_list:
|
|
ids_list.append(details[1])
|
|
devices_list_str = ",".join(map(str, ids_list))
|
|
device_to_index = {device: idx for idx, device in enumerate(ids_list)}
|
|
|
|
minutes = 1440
|
|
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
for device_index in range(devices_c):
|
|
well_id = devices_list[device_index][0]
|
|
device_id = devices_list[device_index][1]
|
|
location = devices_list[device_index][2]
|
|
|
|
sql = get_device_radar_s28_only_query(time_from_str, time_to_str, device_id)
|
|
print(sql)
|
|
|
|
#sql1 = get_deployment_radar_only_colapsed_query(str(device_id), time_from_str, time_to_str, [device_id])
|
|
#print(sql1)
|
|
st = time.time()
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()
|
|
|
|
timestamps, stationary, motion = process_raw_data(my_data)
|
|
print(type(stationary))
|
|
# Find threshold above which 20% of points lie
|
|
AveragePercentSpendsThere = AveragePercentPerLocation[Consolidataed_locations[location]]
|
|
threshold_high, threshold_low = FindThreshold(stationary, AveragePercentSpendsThere)
|
|
file_save = f"threshold_graph_{location}.png"
|
|
title = f"{well_id}_{location}"
|
|
|
|
threshold2, x_percent, y_percent = ShowThresholdGraph(stationary, file_save, threshold_low, threshold_high, title, AveragePercentSpendsThere, location)
|
|
|
|
print(f"Maximum curvature point found at:")
|
|
print(f"Threshold value: {threshold2:.3f}")
|
|
print(f"X: {x_percent:.1f}% of range")
|
|
print(f"Y: {y_percent:.1f}% of points above")
|
|
|
|
ShowArray(stationary, threshold2, filename=f"stationary_{devices_list[device_index][0]}.png", title=f"stationary_{devices_list[device_index][0]}_{devices_list[device_index][2]}", style='line')
|
|
|
|
|
|
##threshold
|
|
##presence_mask, baseline, threshold = detect_presence(timestamps, stationary, motion)
|
|
|
|
### Save visualization to file
|
|
##visualize_detection(timestamps, stationary, motion, presence_mask,
|
|
## baseline, threshold)
|
|
|
|
#cur.execute(sql1)
|
|
#my_data1 = cur.fetchall()#cur.fetchone()
|
|
#print(time.time() - st)
|
|
#if my_data == None or my_data1 == None:
|
|
#logger.warning(f"No data found for device_id {device_id}")
|
|
#else:
|
|
#print(type(my_data))
|
|
##minute,
|
|
##device_id,
|
|
##s_min as radar_s_min,
|
|
##s_max as radar_s_max,
|
|
##m_max as radar_m_max
|
|
|
|
#values = [tup[1] for tup in my_data] #10 sec (RAW) data
|
|
|
|
#hist, bins = np.histogram(values, bins=1000, range=(0, 100))
|
|
#TR, BR = FindZeroIntersection(hist, bins, f'raw_{device_id}_histogram.png', device_id)
|
|
#if True:#device_id == 560:
|
|
#plot(values, filename=f"radar_{device_id}_s28.png", title=f"Radar s28 {device_id}", style='line')
|
|
#plot(hist, filename=f"radar_{device_id}_s28_hist.png", title=f"Radar s28 {device_id} histogram", style='line')
|
|
|
|
##life = [tup[3] - tup[2] + tup[4] for tup in my_data1]
|
|
#life, average = calculate_life_and_average(my_data1, stdev_range) #5 min data
|
|
#lhist, lbins = np.histogram(life, bins=1000)
|
|
#TLIFE, BLIFE = FindZeroIntersection(lhist, lbins, f'life_{device_id}_histogram.png', device_id)
|
|
|
|
#StoreThresholds2DB(device_id, TR, BR, TLIFE, BLIFE)
|
|
##for now not needed...
|
|
##ahist, abins = np.histogram(average, bins=1000)
|
|
##dummy1, dummy = FindZeroIntersection(ahist, abins)
|
|
#if True:#device_id == 560:
|
|
#plot(average, filename=f"average_{device_id}.png", title=f"Average {device_id}", style='line')
|
|
#plot(life, filename=f"life_{device_id}.png", title=f"Life {device_id}", style='line')
|
|
#plot(lhist, filename=f"life_{device_id}_hist.png", title=f"life {device_id} histogram", style='line')
|
|
##plot(ahist, filename=f"average_{device_id}_hist.png", title=f"average {device_id} histogram", style='line')
|
|
|
|
|
|
sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list)
|
|
print(sql)
|
|
my_data = []
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = cur.fetchall()#cur.fetchone()
|
|
#print(result)
|
|
if my_data == None:
|
|
return False
|
|
|
|
fields_n = len(fields)
|
|
stripes = devices_c * fields_n #radar_min and radar_max
|
|
print(my_data)
|
|
base_minute = ConvertToBase(time_from_str, time_zone_s)
|
|
#base_minute = my_data[0][0]# min(record[0] for record in my_data)
|
|
#remember: base_minute is offset (smaller) by numbr of minutes in stdev_range
|
|
st = time.time()
|
|
wave_m = np.zeros((stripes, 1440+2*stdev_range, 1), dtype=np.float32)
|
|
|
|
for record in my_data:
|
|
#(minute,device_id,s28_min,s28_max) = record
|
|
minute, device_id = record[0:2]
|
|
values = record[2:] # All the min/max values
|
|
x = int((minute - base_minute).total_seconds()/60)
|
|
|
|
device_idx = device_to_index[device_id]
|
|
#value[0] are mins, value[1] are maxes
|
|
#when trying to illustrate presence, use s28_max, when absence (night leaving bed) use s28s_min
|
|
for field_idx, value in enumerate(values):
|
|
# Calculate y position
|
|
y = device_idx * fields_n + field_idx
|
|
wave_m[y, x] = value
|
|
|
|
print(time.time()-st)
|
|
|
|
#we need to reliably determine presence and LIFE (motion) in every 5 minutes of data...
|
|
#presence is determined by average value being significntly different from last known base
|
|
#last known base is determined by average value during extended periods ( >= H hours) of low stdev (<) while it is determined that:
|
|
#person is moving elsewhere, and only 1 person is determined to be in monitored area.
|
|
|
|
#lets calculate stdevs
|
|
for device_index in range(devices_c):
|
|
y = device_index * fields_n
|
|
row = wave_m[y]
|
|
stdevs = np.zeros((1440+2*stdev_range, 1), dtype=np.float32)
|
|
stdevs, amplitude = CalcStdevs(row, stdev_range, stdevs)
|
|
wave_m[y+3] = stdevs
|
|
plot(stdevs, filename=f"radar{device_index}_stdevs.png", title=f"Radar Stedevs {device_index}", style='line')
|
|
|
|
minutes = 1440
|
|
|
|
|
|
device_index = 0
|
|
y = 0
|
|
for device in devices_list:
|
|
wave = wave_m[y][stdev_range: stdev_range + minutes]
|
|
plot(wave,
|
|
filename="radar_wave_min.png",
|
|
title="Radar Signal Min",
|
|
style='line')
|
|
# Create histogram with 1000 bins
|
|
hist, bins = np.histogram(wave, bins=1000, range=(0, 100))
|
|
|
|
#bin_centers = (bins[:-1] + bins[1:]) / 2
|
|
hist_line = hist # These are your y values
|
|
|
|
# Plot with proper axis labels
|
|
plot(hist_line,
|
|
filename="radar_histogram_min.png",
|
|
title="Radar Signal Histogram Min (1000 bins)",
|
|
style='line')
|
|
|
|
wave = wave_m[y+1]
|
|
plot(wave,
|
|
filename="radar_wave_max.png",
|
|
title="Radar Signal",
|
|
style='line')
|
|
# Create histogram with 1000 bins
|
|
hist, bins = np.histogram(wave, bins=1000, range=(0, 100))
|
|
|
|
#bin_centers = (bins[:-1] + bins[1:]) / 2
|
|
hist_line = hist # These are your y values
|
|
|
|
# Plot with proper axis labels
|
|
plot(hist_line,
|
|
filename="radar_histogram_max.png",
|
|
title="Radar Signal Histogram Max(1000 bins)",
|
|
style='line')
|
|
|
|
print(wave)
|
|
device_to_index += 1
|
|
|
|
#lets see this map
|
|
stretch_by = 5
|
|
arr_stretched = np.zeros((int(stripes*stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
|
|
st = time.time()
|
|
for yy in range(stripes):
|
|
rgb_row = []
|
|
row = wave_m[yy]
|
|
for x in range(minutes):
|
|
value = 1280 * row[x] / 100
|
|
rgb_row.append(BestColor(value))
|
|
for stretch_index in range(stretch_by):
|
|
y = yy * stretch_by + stretch_index
|
|
arr_stretched[y, :] = rgb_row
|
|
|
|
print(time.time()-st)
|
|
filename = f"{deployment_id}/{deployment_id}_{ddate}_min_max_radar.png"
|
|
SaveImageInBlob(filename, arr_stretched, [])
|
|
|
|
|
|
return
|
|
elif function == "get_time_deltas":
|
|
deployment_id = form_data.get('deployment_id')
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
sensor = form_data.get('sensor')
|
|
selected_date = form_data.get('date')
|
|
date_to = form_data.get('to_date')
|
|
radar_part = ""
|
|
sensor_data = {}
|
|
if date_to == None:
|
|
date_to = selected_date
|
|
|
|
start_date = datetime.datetime.strptime(selected_date, '%Y-%m-%d')
|
|
end_date = datetime.datetime.strptime(date_to, '%Y-%m-%d')
|
|
|
|
# Determine direction and swap dates if necessary
|
|
if start_date > end_date:
|
|
selected_date, date_to = date_to, selected_date
|
|
|
|
device_id = form_data.get('device_id')
|
|
|
|
data_type = form_data.get('data_type')
|
|
epoch_from_utc, _ = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
|
|
_, epoch_to_utc = GetLocalTimeEpochsForDate(date_to, time_zone_s) #>= #<
|
|
|
|
|
|
all_slices = {}
|
|
|
|
cleaned_values = {}
|
|
line_part = ReadSensor(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part)
|
|
st = time.time()
|
|
cleaned_values = [
|
|
(line_part[i][0], (line_part[i][0] - line_part[i-1][0]).total_seconds() * 1000)
|
|
for i in range(1, len(line_part))
|
|
]
|
|
print(time.time()-st)
|
|
|
|
if True:
|
|
# Create CSV content as a string
|
|
csv_content = "Record_Index,Timestamp,Value,Time_Diff_Seconds,Time_Diff_Milliseconds\n"
|
|
|
|
for i in range(len(line_part)):
|
|
timestamp, value = line_part[i]
|
|
|
|
if i == 0:
|
|
# First record has no previous record to compare
|
|
time_diff_seconds = 0
|
|
time_diff_ms = 0
|
|
else:
|
|
# Calculate time difference from previous record
|
|
prev_timestamp = line_part[i-1][0]
|
|
time_diff = timestamp - prev_timestamp
|
|
time_diff_seconds = time_diff.total_seconds()
|
|
time_diff_ms = time_diff_seconds * 1000
|
|
|
|
# Format the row
|
|
row = f"{i},{timestamp.isoformat()},{value},{round(time_diff_seconds, 6)},{round(time_diff_ms, 3)}\n"
|
|
csv_content += row
|
|
|
|
# Write to file
|
|
with open(f'time_differences_{sensor}_{device_id}.csv', 'w', encoding='utf-8') as f:
|
|
f.write(csv_content)
|
|
|
|
print(f"CSV file 'time_differences_{sensor}_{device_id}.csv' created successfully!")
|
|
|
|
line_part_t = [(x[0].timestamp(), x[1]) for x in cleaned_values]
|
|
|
|
sensor_data[sensor] = line_part_t
|
|
dataa = {}
|
|
all_slices = {}
|
|
all_slices[device_id] = sensor_data
|
|
dataa['Function'] = "time_deltas"
|
|
dataa['all_slices'] = all_slices
|
|
dataa['time_zone_st'] = time_zone_s
|
|
dataa['device_id'] = device_id
|
|
resp.media = package_response(dataa)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "get_sensor_deltas":
|
|
deployment_id = form_data.get('deployment_id')
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
sensor = form_data.get('sensor')
|
|
selected_date = form_data.get('date')
|
|
date_to = form_data.get('to_date')
|
|
radar_part = ""
|
|
sensor_data = {}
|
|
if date_to == None:
|
|
date_to = selected_date
|
|
|
|
start_date = datetime.datetime.strptime(selected_date, '%Y-%m-%d')
|
|
end_date = datetime.datetime.strptime(date_to, '%Y-%m-%d')
|
|
|
|
# Determine direction and swap dates if necessary
|
|
if start_date > end_date:
|
|
selected_date, date_to = date_to, selected_date
|
|
|
|
device_id = form_data.get('device_id')
|
|
|
|
data_type = form_data.get('data_type')
|
|
epoch_from_utc, _ = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
|
|
_, epoch_to_utc = GetLocalTimeEpochsForDate(date_to, time_zone_s) #>= #<
|
|
|
|
|
|
all_slices = {}
|
|
|
|
cleaned_values = {}
|
|
line_part = ReadSensor(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part)
|
|
st = time.time()
|
|
cleaned_values = [
|
|
(line_part[i][0], (line_part[i][1] - line_part[i-1][1]) / (line_part[i][0] - line_part[i-1][0]).total_seconds())
|
|
for i in range(1, len(line_part))
|
|
if (line_part[i][0] - line_part[i-1][0]).total_seconds() > 0
|
|
and abs((line_part[i][1] - line_part[i-1][1]) / (line_part[i][0] - line_part[i-1][0]).total_seconds()) <= 100
|
|
]
|
|
print(time.time()-st)
|
|
|
|
if False:
|
|
# Create CSV content as a string
|
|
csv_content = "Record_Index,Timestamp,Value,Time_Diff_Seconds,Time_Diff_Milliseconds\n"
|
|
|
|
for i in range(len(line_part)):
|
|
timestamp, value = line_part[i]
|
|
|
|
if i == 0:
|
|
# First record has no previous record to compare
|
|
time_diff_seconds = 0
|
|
time_diff_ms = 0
|
|
else:
|
|
# Calculate time difference from previous record
|
|
prev_timestamp = line_part[i-1][0]
|
|
time_diff = timestamp - prev_timestamp
|
|
time_diff_seconds = time_diff.total_seconds()
|
|
time_diff_ms = time_diff_seconds * 1000
|
|
|
|
# Format the row
|
|
row = f"{i},{timestamp.isoformat()},{value},{round(time_diff_seconds, 6)},{round(time_diff_ms, 3)}\n"
|
|
csv_content += row
|
|
|
|
# Write to file
|
|
with open(f'time_differences_{sensor}_{device_id}.csv', 'w', encoding='utf-8') as f:
|
|
f.write(csv_content)
|
|
|
|
print(f"CSV file 'time_differences_{sensor}_{device_id}.csv' created successfully!")
|
|
|
|
line_part_t = [(x[0].timestamp(), x[1]) for x in cleaned_values]
|
|
|
|
sensor_data[sensor] = line_part_t
|
|
dataa = {}
|
|
all_slices = {}
|
|
all_slices[device_id] = sensor_data
|
|
dataa['Function'] = "time_deltas"
|
|
dataa['all_slices'] = all_slices
|
|
dataa['time_zone_st'] = time_zone_s
|
|
dataa['device_id'] = device_id
|
|
resp.media = package_response(dataa)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "request_single_slice":
|
|
deployment_id = form_data.get('deployment_id')
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
selected_date = form_data.get('date')
|
|
date_to = form_data.get('to_date')
|
|
if date_to == None:
|
|
date_to = selected_date
|
|
|
|
|
|
start_date = datetime.datetime.strptime(selected_date, '%Y-%m-%d')
|
|
end_date = datetime.datetime.strptime(date_to, '%Y-%m-%d')
|
|
|
|
# Determine direction and swap dates if necessary
|
|
if start_date > end_date:
|
|
selected_date, date_to = date_to, selected_date
|
|
|
|
devices_list = form_data.get('devices_list')
|
|
radar_details = {}
|
|
#devices_list = '[267,560,"?",null,"64B70888F6F0"]'
|
|
#devices_list = '[[267,560,"?",null,"64B70888F6F0"],[268,561,"?",null,"64B70888F6F1"]]'
|
|
sensor_list_loc = [form_data.get('sensor_list')]
|
|
is_nested, device_details = check_and_parse(devices_list)
|
|
if not is_nested:
|
|
device_ids_list = [device_details[1]]
|
|
well_ids_list = [device_details[0]]
|
|
else:
|
|
device_ids_list = list(map(lambda x: x[1], device_details))
|
|
well_ids_list =list(map(lambda x: x[0], device_details))
|
|
|
|
data_type = form_data.get('data_type')
|
|
epoch_from_utc, _ = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
|
|
_, epoch_to_utc = GetLocalTimeEpochsForDate(date_to, time_zone_s) #>= #<
|
|
|
|
#we need to
|
|
buckets = ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
|
|
|
|
days = (epoch_to_utc - epoch_from_utc) / (60 * 1440)
|
|
|
|
well_id = well_ids_list[0]
|
|
all_slices = {}
|
|
radar_part = ""
|
|
if len(device_details) > 4:
|
|
device_id2_mac = {device_details[1]: device_details[4]}
|
|
#epoch_to = '1730592010' #smal sample to test
|
|
#radar_part = form_data.get('radar_part') we need to find what radar part is configured in device settings
|
|
radar_part_all = device_details[5]
|
|
if len(radar_part_all) > 1:
|
|
radar_part = radar_part_all[0]
|
|
#we need only column name and not min or max here
|
|
if "_" in radar_part:
|
|
radar_parts = radar_part.split("_")
|
|
radar_part = radar_parts[0]
|
|
radar_details[device_details[1]] = radar_part_all
|
|
for device_id in device_ids_list:
|
|
|
|
sensor_data = {}
|
|
for sensor in sensor_list_loc:
|
|
st = time.time()
|
|
if days < 3:
|
|
line_part = ReadSensor(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part)
|
|
elif days < 14:
|
|
bucket_size = "1m"
|
|
line_part = ReadSensor3(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part, bucket_size)
|
|
else:
|
|
bucket_size = "10m"
|
|
line_part = ReadSensor3(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part, bucket_size)
|
|
window = sensor_legal_values[sensor][2]
|
|
|
|
if False:
|
|
# Create CSV content as a string
|
|
csv_content = "Record_Index,Timestamp,Value,Time_Diff_Seconds,Time_Diff_Milliseconds\n"
|
|
|
|
for i in range(len(line_part)):
|
|
timestamp, value = line_part[i]
|
|
|
|
if i == 0:
|
|
# First record has no previous record to compare
|
|
time_diff_seconds = 0
|
|
time_diff_ms = 0
|
|
else:
|
|
# Calculate time difference from previous record
|
|
prev_timestamp = line_part[i-1][0]
|
|
time_diff = timestamp - prev_timestamp
|
|
time_diff_seconds = time_diff.total_seconds()
|
|
time_diff_ms = time_diff_seconds * 1000
|
|
|
|
# Format the row
|
|
row = f"{i},{timestamp.isoformat()},{value},{round(time_diff_seconds, 6)},{round(time_diff_ms, 3)}\n"
|
|
csv_content += row
|
|
|
|
# Write to file
|
|
with open('time_differences.csv', 'w', encoding='utf-8') as f:
|
|
f.write(csv_content)
|
|
|
|
print("CSV file 'time_differences.csv' created successfully!")
|
|
|
|
#print("@1", time.time() - st)
|
|
#first = 3300
|
|
#last = 3400
|
|
#line_part = line_part[first:last]
|
|
line_part_t = []
|
|
#st = time.time()
|
|
#line_part_t = [tuple(x[:2]) for x in line_part]
|
|
#print(time.time() - st)
|
|
#st = time.time()
|
|
#line_part_t = list({(dt.timestamp(), value) for dt, value in line_part})
|
|
#print(time.time() - st)
|
|
|
|
line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
|
|
st = time.time()
|
|
cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
|
|
cleaned_values = cleaned_values_t #add_boundary_points(cleaned_values_t, time_zone_s)
|
|
#print("@2", time.time() - st)
|
|
|
|
#Lets add point in minute 0 and minute 1439
|
|
|
|
#st = time.time()
|
|
#cleaned_values = clean_data_fast(line_part_t, window=5, threshold=2.0)
|
|
#print("@3", time.time() - st)
|
|
|
|
sensor_data[sensor] = cleaned_values
|
|
|
|
|
|
if len(device_details) > 4:
|
|
all_slices[device_id2_mac[device_id]] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
|
|
else:
|
|
all_slices[device_id] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
|
|
|
|
dataa = {}
|
|
dataa['Function'] = "single_slicedata"
|
|
dataa['devices_list'] = devices_list
|
|
dataa['all_slices'] = all_slices
|
|
dataa['radar_details'] = radar_details
|
|
dataa['time_zone_st'] = time_zone_s
|
|
dataa['well_id'] = well_id
|
|
resp.media = package_response(dataa)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
elif function == "get_sensor_bucketed_data_by_room_sensor":
|
|
# Inputs:
|
|
# user_name and token
|
|
# deployment_id - from which report gets deployment set (all rooms and devices) to get timezone
|
|
# date - one day in a format YYYY-MM-DD
|
|
# sensor - temperature/radar/etc.. see full list
|
|
# (tells what sensor data to be retrieved)
|
|
# "voc" for all smell use s4 (lower reading is higher smell, max=0 find min for 100%)
|
|
# "radar" returns s28
|
|
# radar_part - optional and applies only to radar (tells which segment of radar to be retrieved)
|
|
# bucket_size - ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
|
|
# location - room name (has to be unique)
|
|
# data_type - ML
|
|
# Output: son structure with the following info
|
|
# chart_data with rooms : [list]
|
|
deployment_id = form_data.get('deployment_id')
|
|
selected_date = form_data.get('date')
|
|
sensor = form_data.get('sensor') # one sensor
|
|
radar_part = form_data.get('radar_part')
|
|
buckets = ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
|
|
bucket_size = "no" if (result := form_data.get('bucket_size')) in (None, "") else (result.strip() if result.strip() in buckets else "no")
|
|
#bucket_size = res2 if (res := form_data.get('bucket_size')) is not None and (res2 := str(res).strip()) and res2 in {'no', '10s', '1m', '5m', '10m', '15m', '30m', '1h'} else 'no'
|
|
location = form_data.get('location')
|
|
data_type = form_data.get('data_type')
|
|
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
epoch_from_utc, epoch_to_utc = GetLocalTimeEpochsForDate(selected_date, time_zone_s)
|
|
|
|
# obtain devices_list for deployment_id
|
|
selected_date = selected_date.replace("_","-")
|
|
devices_list, device_ids = GetProximityList(deployment_id, epoch_from_utc)
|
|
sensor_data = {}
|
|
units = "°C"
|
|
if "America" in time_zone_s:
|
|
units = "°F"
|
|
# see https://www.w3schools.com/cssref/css_colors.php
|
|
sensor_props = {"temperature": ["red", units],
|
|
"humidity": ["blue", "%"],
|
|
"voc": ["orange", "PPM"],
|
|
"co2": ["orange", "PPM"],
|
|
"pressure": ["magenta", "Bar"],
|
|
"radar": ["cyan", "%"],
|
|
"light": ["yellow", "Lux"]}
|
|
|
|
current_time_la = datetime.datetime.now(pytz.timezone(time_zone_s))
|
|
formatted_time = current_time_la.strftime('%Y-%m-%dT%H:%M:%S') #"2025-02-06T20:09:00"
|
|
|
|
result_dictionary = {
|
|
"last_report_at": formatted_time,
|
|
"color": sensor_props[sensor][0] if sensor in s_table else "grey",
|
|
"units": sensor_props[sensor][1] if sensor in s_table else "?"
|
|
}
|
|
#sensor_mapping = {"co2": "s4", "voc": "s9"}
|
|
#sensor = sensor_mapping.get(sensor, sensor)
|
|
|
|
chart_data = []
|
|
# example data in each element of devices_list is (266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
|
|
for well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to in devices_list:
|
|
loc_and_desc = location_name
|
|
if description != None and description != "":
|
|
loc_and_desc = loc_and_desc + " " + description
|
|
|
|
if loc_and_desc == location:
|
|
line_part = ReadSensor3(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part, bucket_size)
|
|
window = sensor_legal_values[sensor][2]
|
|
line_part_t = []
|
|
line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
|
|
st = time.time()
|
|
cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
|
|
cleaned_values = cleaned_values_t #add_boundary_points(cleaned_values_t, time_zone_s)
|
|
compressed_readings = convert_timestamps_lc(cleaned_values, time_zone_s)
|
|
if sensor == "temperature":
|
|
if units == "°F":#"America" in time_zone_s:
|
|
compressed_readings = CelsiusToFahrenheitList(compressed_readings)
|
|
|
|
sensor_data[sensor] = compressed_readings
|
|
chart_data.append({'name': location_name, 'data': compressed_readings})
|
|
result_dictionary['chart_data'] = chart_data
|
|
payload = result_dictionary
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
|
|
elif function == "get_sensor_data_by_deployment_id":
|
|
# Inputs:
|
|
# user_name and token
|
|
# deployment_id - from which report gets deployment set (all rooms and devices)
|
|
# date - one day in a format YYYY-MM-DD
|
|
# sensor - temperature/radar/etc.. see full list (tells what sensor data to be retrieved)
|
|
# radar_part - optional and applies only to radar (tells which segment of radar to be retrieved)
|
|
# bucket_size - ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
|
|
# data_type - ML
|
|
# Output: son structure with the following info
|
|
# chart_data with rooms : [list]
|
|
deployment_id = form_data.get('deployment_id')
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
selected_date = form_data.get('date')
|
|
sensor = form_data.get('sensor') # one sensor
|
|
radar_part = form_data.get('radar_part')
|
|
buckets = ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
|
|
bucket_size = "no" if (result := form_data.get('bucket_size')) in (None, "") else (result.strip() if result.strip() in buckets else "no")
|
|
#bucket_size = res2 if (res := form_data.get('bucket_size')) is not None and (res2 := str(res).strip()) and res2 in {'no', '10s', '1m', '5m', '10m', '15m', '30m', '1h'} else 'no'
|
|
data_type = form_data.get('data_type')
|
|
|
|
epoch_from_utc, epoch_to_utc = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
|
|
# obtain devices_list for deployment_id
|
|
selected_date = selected_date.replace("_","-")
|
|
#timee = LocalDateToUTCEpoch(selected_date, time_zone_s)+5
|
|
devices_list, device_ids = GetProximityList(deployment_id, epoch_from_utc)
|
|
sensor_data = {}
|
|
# see https://www.w3schools.com/cssref/css_colors.php
|
|
sensor_props = {"temperature": ["red", "°C"],
|
|
"humidity": ["blue", "%"],
|
|
"voc": ["orange", "PPM"],
|
|
"co2": ["orange", "PPM"],
|
|
"pressure": ["magenta", "Bar"],
|
|
"radar": ["cyan", "%"],
|
|
"light": ["yellow", "Lux"]}
|
|
result_dictionary = {
|
|
"last_report_at": "2025-02-06T20:09:00",
|
|
"color": sensor_props[sensor][0] if sensor in s_table else "grey",
|
|
"units": sensor_props[sensor][1] if sensor in s_table else "?"
|
|
}
|
|
#sensor_mapping = {"co2": "s4", "voc": "s9"}
|
|
#sensor = sensor_mapping.get(sensor, sensor)
|
|
|
|
chart_data = []
|
|
for room_details in devices_list:
|
|
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = room_details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
|
|
line_part = ReadSensor3(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part, bucket_size)
|
|
window = sensor_legal_values[sensor][2]
|
|
line_part_t = []
|
|
line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
|
|
st = time.time()
|
|
cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
|
|
cleaned_values = add_boundary_points(cleaned_values_t, time_zone_s)
|
|
compressed_readings = convert_timestamps_lc(cleaned_values, time_zone_s)
|
|
|
|
#compressed_readings = [(time.strftime("%H:%M", time.gmtime(lst[0][0])), float(sum(t for _, t in lst)/len(lst)))
|
|
#for _, lst in ((k, list(g))
|
|
#for k, g in itertools.groupby(cleaned_values, key=lambda x: time.gmtime(x[0]).tm_hour))]
|
|
sensor_data[sensor] = compressed_readings
|
|
chart_data.append({'name': location_name,
|
|
'data': compressed_readings})
|
|
result_dictionary['chart_data'] = chart_data
|
|
#all_slices[device_id2_mac[device_id]] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
|
|
#is_neste, device_details = check_and_parse(devices_list)
|
|
#if not is_nested:
|
|
#device_ids_list = [device_details[1]]
|
|
#well_ids_list = [device_details[0]]
|
|
#else:
|
|
#device_ids_list = list(map(lambda x: x[1], device_details))
|
|
#well_ids_list =list(map(lambda x: x[0], device_details))
|
|
#well_id = well_ids_list[0]
|
|
#all_slices = {}
|
|
#device_id2_mac = {device_details[1]: device_details[4]}
|
|
#for device_id in device_ids_list:
|
|
#device_id2_mac
|
|
#sensor_data = {}
|
|
#for sensor in sensor_list_loc:
|
|
#st = time.time()
|
|
#line_part = ReadSensor(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part)
|
|
#window = sensor_legal_values[sensor][2]
|
|
#line_part_t = []
|
|
#line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
|
|
#st = time.time()
|
|
#cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
|
|
#cleaned_values = add_boundary_points(cleaned_values_t, time_zone_s)
|
|
#sensor_data[sensor] = cleaned_values
|
|
#all_slices[device_id2_mac[device_id]] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
|
|
|
|
#dataa = {}
|
|
#dataa['Function'] = "single_slicedata"
|
|
#dataa['devices_list'] = devices_list
|
|
#dataa['all_slices'] = all_slices
|
|
#dataa['time_zone_st'] = time_zone_s
|
|
#dataa['well_id'] = well_id
|
|
#resp.media = package_response(dataa)
|
|
#resp.status = falcon.HTTP_200
|
|
result_dictionary2 = {
|
|
"alert_text": "No alert",
|
|
"alert_color": "bg-green-100 text-green-700",
|
|
"last_report_at": "ISO TIMESTAMP",
|
|
"chart_data": [
|
|
{
|
|
"rooms": [
|
|
{ "name": "Bathroom",
|
|
"data": [
|
|
{"title": "12AM","value": 20},
|
|
{"title": "01AM","value": 20},
|
|
{"title": "02AM","value": 26},
|
|
{"title": "03AM","value": 16},
|
|
{"title": "04AM","value": 27},
|
|
{"title": "05AM","value": 23},
|
|
{"title": "06AM","value": 26},
|
|
{"title": "07AM","value": 17},
|
|
{"title": "08AM","value": 18},
|
|
{"title": "09AM","value": 21},
|
|
{"title": "10AM","value": 28},
|
|
{"title": "11AM","value": 24},
|
|
{"title": "12PM","value": 18},
|
|
{"title": "01PM","value": 27},
|
|
{"title": "02PM","value": 27},
|
|
{"title": "03PM","value": 19},
|
|
{"title": "04PM","value": 0},
|
|
{"title": "05PM","value": 0},
|
|
{"title": "06PM","value": 0},
|
|
{"title": "07PM","value": 0},
|
|
{"title": "08PM","value": 0},
|
|
{"title": "09PM","value": 0},
|
|
{"title": "10PM","value": 0},
|
|
{"title": "11PM","value": 0}
|
|
]
|
|
},
|
|
{ "name": "Kitchen",
|
|
"data": [
|
|
{"title": "00AM","value": 19},
|
|
{"title": "01AM","value": 10},
|
|
{"title": "02AM","value": 8},
|
|
{"title": "03AM","value": 14},
|
|
{"title": "04AM","value": 20},
|
|
{"title": "05AM","value": 8},
|
|
{"title": "06AM","value": 7},
|
|
{"title": "07AM","value": 17},
|
|
{"title": "08AM","value": 3},
|
|
{"title": "09AM","value": 19},
|
|
{"title": "10AM","value": 4},
|
|
{"title": "11AM","value": 6},
|
|
{"title": "12PM","value": 4},
|
|
{"title": "01PM","value": 14},
|
|
{"title": "02PM","value": 17},
|
|
{"title": "03PM","value": 20},
|
|
{"title": "04PM","value": 19},
|
|
{"title": "05PM","value": 15},
|
|
{"title": "06PM","value": 5},
|
|
{"title": "07PM","value": 19},
|
|
{"title": "08PM","value": 3},
|
|
{"title": "09PM","value": 30},
|
|
{"title": "10PM","value": 1},
|
|
{"title": "11PM","value": 12 }
|
|
]
|
|
},
|
|
{ "name": "Living Room",
|
|
"data": [
|
|
{"title": "00AM","value": 25},
|
|
{"title": "01AM","value": 24},
|
|
{"title": "02AM","value": 19},
|
|
{"title": "03AM","value": 20},
|
|
{"title": "04AM","value": 22},
|
|
{"title": "05AM","value": 20},
|
|
{"title": "06AM","value": 11},
|
|
{"title": "07AM","value": 5},
|
|
{"title": "08AM","value": 16},
|
|
{"title": "09AM","value": 22},
|
|
{"title": "10AM","value": 23},
|
|
{"title": "11AM","value": 14},
|
|
{"title": "12PM","value": 0},
|
|
{"title": "01PM","value": 7},
|
|
{"title": "02PM","value": 25},
|
|
{"title": "03PM","value": 29},
|
|
{"title": "04PM","value": 23},
|
|
{"title": "05PM","value": 27},
|
|
{"title": "06PM","value": 27},
|
|
{"title": "07PM","value": 20},
|
|
{"title": "08PM","value": 2},
|
|
{"title": "09PM","value": 24},
|
|
{"title": "10PM","value": 21},
|
|
{"title": "11PM","value": 14 }
|
|
]
|
|
}
|
|
]
|
|
}
|
|
]
|
|
}
|
|
payload = result_dictionary
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
#AddToLog(payload)
|
|
#return
|
|
elif function == "request_device_slice":
|
|
deployment_id = form_data.get('deployment_id')
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
epoch_from_utc = form_data.get('epoch_from')
|
|
epoch_to_utc = form_data.get('epoch_to')
|
|
device_id = form_data.get('device_id')
|
|
well_id = form_data.get('well_id')
|
|
MAC = form_data.get('MAC')
|
|
sensor_list_loc = form_data.get('sensors_list')
|
|
sensor_list = sensor_list_loc.split(",")
|
|
device_ids_list = [device_id]
|
|
well_ids_list = [well_id]
|
|
maps_dates, positions_list, timezone_s = GetDeploymentDatesBoth(deployment_id)
|
|
|
|
data_type = "RL"
|
|
#epoch_from_utc, epoch_to_utc = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
|
|
|
|
#epoch_to = '1730592010' #smal sample to test
|
|
radar_part = form_data.get('radar_part')
|
|
well_id = well_ids_list[0]
|
|
all_slices = {}
|
|
#device_id2_mac = {device_details[1]: device_details[4]}
|
|
for device_id in device_ids_list:
|
|
#device_id2_mac
|
|
sensor_data = {}
|
|
for sensor in sensor_list:
|
|
st = time.time()
|
|
line_part = ReadSensor(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part)
|
|
window = sensor_legal_values[sensor][2]
|
|
#print("@1", time.time() - st)
|
|
#first = 3300
|
|
#last = 3400
|
|
#line_part = line_part[first:last]
|
|
line_part_t = []
|
|
#st = time.time()
|
|
#line_part_t = [tuple(x[:2]) for x in line_part]
|
|
#print(time.time() - st)
|
|
#st = time.time()
|
|
#line_part_t = list({(dt.timestamp(), value) for dt, value in line_part})
|
|
#print(time.time() - st)
|
|
|
|
line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
|
|
st = time.time()
|
|
cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
|
|
#cleaned_values = cleaned_values_t #add_boundary_points(cleaned_values_t, time_zone_s)
|
|
#print("@2", time.time() - st)
|
|
|
|
#Lets add point in minute 0 and minute 1439
|
|
|
|
#st = time.time()
|
|
#cleaned_values = clean_data_fast(line_part_t, window=5, threshold=2.0)
|
|
#print("@3", time.time() - st)
|
|
cleaned_values = ScaleToCommon(cleaned_values_t, sensor)
|
|
sensor_data[sensor] = cleaned_values
|
|
all_slices[device_id] = sensor_data
|
|
|
|
dataa = {}
|
|
dataa['Function'] = "device_slicedata"
|
|
dataa['all_slices'] = all_slices
|
|
dataa['time_zone_st'] = time_zone_s
|
|
dataa['proximity'] = positions_list
|
|
dataa['well_id'] = well_id
|
|
dataa['MAC'] = MAC
|
|
resp.media = package_response(dataa)
|
|
resp.status = falcon.HTTP_200
|
|
#return
|
|
elif function == "request_single_radar_slice":
|
|
deployment_id = form_data.get('deployment_id')
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
selected_date = form_data.get('date')
|
|
devices_list = form_data.get('devices_list')
|
|
ctrl_key_state = form_data.get('ctrl_key_state')
|
|
alt_key_state = form_data.get('alt_key_state')
|
|
#devices_list = '[267,560,"?",null,"64B70888F6F0"]'
|
|
#devices_list = '[[267,560,"?",null,"64B70888F6F0"],[268,561,"?",null,"64B70888F6F1"]]'
|
|
sensor_index_list = [form_data.get('sensor_index_list')]
|
|
is_nested, device_details = check_and_parse(devices_list)
|
|
if not is_nested:
|
|
device_ids_list = [device_details[1]]
|
|
well_ids_list = [device_details[0]]
|
|
else:
|
|
device_ids_list = list(map(lambda x: x[1], device_details))
|
|
well_ids_list =list(map(lambda x: x[0], device_details))
|
|
|
|
epoch_from_utc, epoch_to_utc = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
|
|
|
|
#epoch_to = '1730592010' #smal sample to test
|
|
radar_part = form_data.get('radar_part')
|
|
well_id = well_ids_list[0]
|
|
all_slices = {}
|
|
device_id2_mac = {device_details[1]: device_details[4]}
|
|
for device_id in device_ids_list:
|
|
device_id2_mac
|
|
sensor_data = {}
|
|
for sensor_index in sensor_index_list:
|
|
st = time.time()
|
|
sensor = ["m0", "m1", "m2", "m3", "m4", "m5", "m6", "m7", "m8", "m08_max", "s2", "s3", "s4", "s5", "s6", "s7", "s8", "s28_max", "s28_min"][int(sensor_index)]
|
|
|
|
line_part = ReadRadarDetail(device_id, sensor, epoch_from_utc, epoch_to_utc, alt_key_state)
|
|
window = sensor_legal_values["radar"][2]
|
|
|
|
line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
|
|
st = time.time()
|
|
cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
|
|
cleaned_values = add_boundary_points(cleaned_values_t, time_zone_s)
|
|
if len(sensor) < 4:
|
|
sensor_data[sensor+"_max"] = cleaned_values
|
|
else:
|
|
sensor_data[sensor] = cleaned_values
|
|
all_slices[device_id2_mac[device_id]] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
|
|
|
|
dataa = {}
|
|
dataa['Function'] = "single_slicedata"
|
|
dataa['devices_list'] = devices_list
|
|
dataa['all_slices'] = all_slices
|
|
dataa['time_zone_st'] = time_zone_s
|
|
dataa['well_id'] = well_id
|
|
resp.media = package_response(dataa)
|
|
resp.status = falcon.HTTP_200
|
|
elif function == "get_deployment":
|
|
blob_data = read_file("deployment.html")
|
|
deployment_id = form_data.get('deployment_id')
|
|
#lets update "Deployments" select
|
|
users = GetUsersFromDeployments(privileges)
|
|
blob_data = UpdateDeploymentsSelector(blob_data, users, False, deployment_id)
|
|
|
|
resp.content_type = "text/html"
|
|
resp.text = blob_data
|
|
return
|
|
elif function == "get_deployment_j":
|
|
deployment_id = form_data.get('deployment_id')
|
|
time_zone_st = GetTimeZoneOfDeployment(deployment_id)
|
|
date = form_data.get('date')
|
|
if date == None:
|
|
|
|
# Get today's date
|
|
local_timezone = pytz.timezone(time_zone_st) # Replace with your local timezone
|
|
date = datetime.datetime.now(local_timezone).strftime('%Y-%m-%d')
|
|
|
|
#epoch_from_utc = int(datetime.datetime.strptime(date, "%Y-%m-%d").timestamp())
|
|
#devices_list, device_ids = GetProximityList(deployment_id, epoch_from_utc)
|
|
|
|
dataa = {}
|
|
dataa['Function'] = "deployment_details"
|
|
if privileges == "-1":
|
|
deployment = DeploymentDetails(deployment_id)
|
|
dataa['deployment_details'] = deployment
|
|
else:
|
|
privileges = privileges.split(",")
|
|
if deployment_id in privileges:
|
|
deployment = DeploymentDetails(deployment_id)
|
|
dataa['deployment_details'] = deployment
|
|
|
|
|
|
resp.media = package_response(dataa)
|
|
resp.status = falcon.HTTP_200
|
|
|
|
return
|
|
elif function == "set_floor_layout":
|
|
deployment_id = form_data.get('deployment_id')
|
|
layout = form_data.get('layout')
|
|
|
|
if privileges == "-1" or deployment_id in privileges:
|
|
ok = StoreFloorPlan(deployment_id, layout)
|
|
payload = {'ok': ok}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
else:
|
|
payload = {'ok': 0, 'error': "not allowed"}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
|
|
return
|
|
elif function == "get_floor_layout":
|
|
deployment_id = form_data.get('deployment_id')
|
|
|
|
dataa = {}
|
|
dataa['Function'] = "deployment_details"
|
|
if privileges == "-1":
|
|
layout = GetFloorPlan(deployment_id)
|
|
dataa['layout'] = layout
|
|
else:
|
|
privileges = privileges.split(",")
|
|
if deployment_id in privileges:
|
|
layout = GetFloorPlan(deployment_id)
|
|
dataa['layout'] = layout
|
|
|
|
|
|
resp.media = package_response(dataa)
|
|
resp.status = falcon.HTTP_200
|
|
|
|
return
|
|
elif function == "get_beneficiary":
|
|
user_id = form_data.get('user_id')
|
|
all_beneficiaries = ListBeneficiaries(privileges, user_id)
|
|
beneficiaries_list = []
|
|
for beneficiary_temp in all_beneficiaries:
|
|
beneficiaries_list.append(str(beneficiary_temp[0]))
|
|
|
|
dataa = {}
|
|
dataa['Function'] = "beneficiary_details"
|
|
if user_id in beneficiaries_list:
|
|
beneficiary = UserDetails(user_id)
|
|
#lets remove fields not relevant for beneficiary
|
|
try:
|
|
del beneficiary['time_edit']
|
|
except:
|
|
pass
|
|
|
|
try:
|
|
del beneficiary['user_edit']
|
|
except:
|
|
pass
|
|
|
|
try:
|
|
del beneficiary['access_to_deployments']
|
|
except:
|
|
pass
|
|
dataa['beneficiary_details'] = beneficiary
|
|
resp.media = package_response(dataa)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
elif function == "get_caretaker":
|
|
|
|
user_name = form_data.get('user_name')
|
|
|
|
|
|
all_caretakers = ListCaretakers(privileges, user_name)
|
|
if len(all_caretakers) > 1:
|
|
user_id = form_data.get('user_id')
|
|
else:
|
|
user_id = str(all_caretakers[0][0])
|
|
|
|
caretakers_list = []
|
|
for caretakers_temp in all_caretakers:
|
|
caretakers_list.append(str(caretakers_temp[0]))
|
|
|
|
dataa = {}
|
|
dataa['Function'] = "caretaker_details"
|
|
if user_id in caretakers_list:
|
|
caretaker = UserDetails(user_id)
|
|
#lets remove fields not relevant for beneficiary
|
|
try:
|
|
del caretaker['time_edit']
|
|
except:
|
|
pass
|
|
|
|
try:
|
|
del caretaker['user_edit']
|
|
except:
|
|
pass
|
|
|
|
dataa['caretaker_details'] = caretaker
|
|
resp.media = package_response(dataa)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "get_device":
|
|
device_id = form_data.get('device_id')
|
|
devices = GetVisibleDevices(privileges)
|
|
dataa = {}
|
|
dataa['Function'] = "device_details"
|
|
dataa['device_details'] = []
|
|
if privileges == "-1":
|
|
#device_det = GetDeviceDetails(device_id)
|
|
device_det = GetDeviceDetailsSingle(device_id)
|
|
if device_det['radar_threshold'] == None or device_det['radar_threshold'] == "":
|
|
device_det['radar_threshold'] = '["s3_max",12]'
|
|
dataa['device_details'] = device_det
|
|
else:
|
|
devices_list = []
|
|
for device_id_temp in devices:
|
|
devices_list.append(str(device_id_temp[0]))
|
|
|
|
if device_id in devices_list:
|
|
device_det = GetDeviceDetailsSingle(device_id)
|
|
if device_det['radar_threshold'] == None or device_det['radar_threshold'] == "":
|
|
device_det['radar_threshold'] = '["s3_max",12]'
|
|
|
|
|
|
dataa['device_details'] = device_det
|
|
|
|
|
|
resp.media = package_response(dataa)
|
|
resp.status = falcon.HTTP_200
|
|
|
|
return
|
|
elif function == "request_deployment_map_new":
|
|
st = time.time()
|
|
print(f"$0 ----{time.time() - st}")
|
|
deployment_id = form_data.get('deployment_id')
|
|
map_type = form_data.get('map_type')
|
|
print(f"$1 ----{time.time() - st}")
|
|
maps_dates, positions_list, timezone_s = GetDeploymentDatesBoth(deployment_id)
|
|
print(f"$2 ----{time.time() - st}")
|
|
datee = form_data.get('date')
|
|
if maps_dates != []:
|
|
|
|
if datee == "2022-4-2": #that one is default in HTML so disregard
|
|
datee = maps_dates[0]
|
|
|
|
locations_desc_map = {}
|
|
for details in positions_list:
|
|
well_id = details[0]
|
|
location = details[2]
|
|
if details[3] != None and details[3] != "":
|
|
location = location +" "+ details[3]
|
|
|
|
if details[6] != None and details[6] != "":
|
|
location = location +" "+ details[6]
|
|
|
|
MAC = details[4]
|
|
locations_desc_map[well_id] = location
|
|
print(f"$3 ----{time.time() - st}")
|
|
|
|
dataa = {}
|
|
dataa['Function'] = "deployments_maps_report"
|
|
dataa['proximity'] = positions_list
|
|
maps_dates.sort(reverse = True)
|
|
dataa['maps_dates'] = maps_dates
|
|
dataa['device_count'] = len(positions_list)
|
|
dataa['time_zone'] = timezone_s
|
|
dataa['map_type'] = map_type
|
|
|
|
#MACs_list = GetMACsListSimple(positions_list)
|
|
#MACs_map = {}
|
|
|
|
#for details in positions_list:
|
|
# id = details[0]
|
|
# MAC = details[3]
|
|
# MACs_map[id] = MAC
|
|
#for i in range(len(MACs_list)):
|
|
# MACs_map[devices_list[i]] = MACs_list[i][0]
|
|
|
|
id = positions_list[0][0]
|
|
#dataa['MACs_map'] = MACs_map
|
|
dataa['locations_desc_map'] = locations_desc_map
|
|
#proximity_list = proximity.split(",")
|
|
print(f"$4 ----{time.time() - st}")
|
|
|
|
if id < 200:
|
|
checkmarks_string = 'T><input checked type="checkbox" id="t-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'H><input type="checkbox" id="h-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'P><input type="checkbox" id="p-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'C><input type="checkbox" id="c-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'V><input type="checkbox" id="v-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'L><input type="checkbox" id="l-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'R><input type="checkbox" id="r-check" value="0" onchange="UpdateSelections();"/><br>'
|
|
else: #>200 = ["Temperature", "Humidity", "Pressure", "Light", "Radar", "VOC"]
|
|
|
|
checkmarks_string = 'T><input checked type="checkbox" id="t-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'H><input type="checkbox" id="h-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'P><input type="checkbox" id="p-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'L><input type="checkbox" id="l-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'R><input type="checkbox" id="r-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
|
|
checkmarks_string = checkmarks_string + 'S0><input type="checkbox" id="v0-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'S1><input type="checkbox" id="v1-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'S2><input type="checkbox" id="v2-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'S3><input type="checkbox" id="v3-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'S4><input type="checkbox" id="v4-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'S5><input type="checkbox" id="v5-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'S6><input type="checkbox" id="v6-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'S7><input type="checkbox" id="v7-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'S8><input type="checkbox" id="v8-check" value="0" onchange="UpdateSelections();"/>\n'
|
|
checkmarks_string = checkmarks_string + 'S9><input type="checkbox" id="v9-check" value="0" onchange="UpdateSelections();"/><br>'
|
|
|
|
checked_or_not = " checked"
|
|
|
|
for index in range(len(positions_list)):
|
|
details = positions_list[index]
|
|
device_id = details[0]
|
|
location = details[2]
|
|
if details[3] != None and details[3] != "":
|
|
location = location + " " + details[3]
|
|
|
|
if details[6] != None and details[6] != "":
|
|
location = location + " " + details[6]
|
|
|
|
checkmarks_string = checkmarks_string + str(device_id) + '><input'+checked_or_not+' type="checkbox" id="device_check'+str(index)+'" value="0" title="'+location+'" onchange="UpdateSelections();" />\n'
|
|
checked_or_not = ''
|
|
|
|
print(f"$5 ----{time.time() - st}")
|
|
|
|
dataa['checkmarks'] = checkmarks_string
|
|
resp.media = package_response(dataa)
|
|
resp.status = falcon.HTTP_200
|
|
|
|
elif function == "request_proximity":
|
|
deployment = form_data.get('deployment_id')
|
|
timee = form_data.get('time')
|
|
#timee = StringToEpoch(datee)
|
|
#print(deployment, timee)
|
|
well_ids, device_ids = GetProximityList(deployment, timee)
|
|
#print(proximity)
|
|
dataa = {}
|
|
dataa['Function'] = "proximity_report"
|
|
if len(well_ids) > 0:
|
|
dataa['proximity'] = well_ids
|
|
else:
|
|
dataa['proximity'] = []
|
|
|
|
resp.media = package_response(dataa)
|
|
resp.status = falcon.HTTP_200
|
|
elif function == "request_devices":
|
|
deployment_id = form_data.get('deployment_id')
|
|
group_id = form_data.get('group_id')
|
|
location = form_data.get('location')
|
|
if location == "0":
|
|
location = "All"
|
|
is_fresh = form_data.get('is_fresh')
|
|
matching_devices = GetMatchingDevices(privileges, group_id, deployment_id, location)
|
|
dataa = {}
|
|
dataa['Function'] = "devices_report"
|
|
if len(matching_devices) > 0:
|
|
dataa['devices'] = matching_devices
|
|
else:
|
|
dataa['devices'] = []
|
|
|
|
resp.media = package_response(dataa)
|
|
resp.status = falcon.HTTP_200
|
|
|
|
elif function == "get_deployment_details":
|
|
deployment_id = form_data.get('deployment_id')
|
|
group_id = form_data.get('group_id')
|
|
location = form_data.get('location')
|
|
if location == "0":
|
|
location = "All"
|
|
is_fresh = form_data.get('is_fresh')
|
|
matching_devices = GetMatchingDevicesComplete(privileges, group_id, deployment_id, location)
|
|
deployment = DeploymentDetails(deployment_id)
|
|
dataa = {}
|
|
dataa['Function'] = "devices_report"
|
|
if len(matching_devices) > 0:
|
|
dataa['devices'] = matching_devices
|
|
else:
|
|
dataa['devices'] = []
|
|
|
|
if len(deployment) > 0:
|
|
dataa['details'] = deployment
|
|
else:
|
|
dataa['details'] = {}
|
|
resp.media = package_response(dataa)
|
|
resp.status = falcon.HTTP_200
|
|
|
|
elif function == "device_form":
|
|
editing_device_id = form_data.get('editing_device_id')
|
|
|
|
ok = StoreDevice2DB(form_data, editing_device_id)
|
|
if ok == 1:
|
|
payload = {'ok': ok}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
else:
|
|
payload = {'ok': ok, 'error': debug_string}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
elif function == "device_delete":
|
|
|
|
#check if admin!
|
|
|
|
ok = DeleteRecordFromDB(form_data)
|
|
|
|
payload = {'ok': ok}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "get_raw_data":
|
|
#container = GetReference("/MAC")
|
|
#MAC = req_dict["MAC"][0]
|
|
#sensor = req_dict["sensor"][0]
|
|
#if "part" in req_dict:
|
|
#part = req_dict["part"][0]
|
|
#else:
|
|
#part = ""
|
|
#from_time = req_dict["from_time"][0]
|
|
#to_time = req_dict["to_time"][0]
|
|
#timezone_str = req_dict["tzone"][0]
|
|
#AddToLog("get_raw_data:" + str(MAC) +","+ str(sensor) + "," + str(from_time) + "," + str(to_time) + "," + part+ "," + timezone_str)
|
|
##raw_data = GetRawSensorData(container, MAC, sensor, from_time, to_time, timezone_str)
|
|
#raw_data = []#GetRawSensorDataFromBlobStorage(MAC, sensor, part, from_time, to_time, timezone_str)
|
|
data_payload = {'raw_data': raw_data}
|
|
resp.media = package_response(data_payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "get_presence_data":
|
|
|
|
deployment_id = form_data.get('deployment_id')
|
|
device_id_in_s = form_data.get('device_id')
|
|
device_id_in = None
|
|
refresh = form_data.get('refresh') == "1"
|
|
|
|
if privileges != "-1":
|
|
privileges_lst = privileges.split(",")
|
|
if deployment_id not in privileges_lst:
|
|
data_payload = {}
|
|
resp.media = package_response(data_payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
filter = int(form_data.get('filter'))
|
|
ddate = form_data.get('date')
|
|
ddate = ddate.replace("_","-")
|
|
to_date = form_data.get('to_date')
|
|
|
|
if to_date == None:
|
|
to_date = ddate
|
|
else:
|
|
to_date = to_date.replace("_","-")
|
|
|
|
ddate, to_date = ensure_date_order(ddate, to_date)
|
|
|
|
|
|
date_obj = datetime.datetime.strptime(ddate, "%Y-%m-%d")
|
|
# Subtract one day
|
|
previous_day = date_obj - timedelta(days=1)
|
|
# Convert back to string
|
|
prev_date = previous_day.strftime("%Y-%m-%d")
|
|
|
|
data_type = form_data.get('data_type') #all, raw, presence, z-graph
|
|
if data_type == None or data_type == "":
|
|
data_type = "presence"
|
|
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
|
|
|
if device_id_in_s != None: #lets remove other devices, since asking for one
|
|
device_id_in = int(device_id_in_s)
|
|
device_ids = [id for id in device_ids if id == device_id_in]
|
|
devices_list = [device for device in devices_list if device[1] == device_id_in]
|
|
|
|
time_from_str, _ = GetLocalTimeForDate(ddate, time_zone_s)
|
|
_, time_to_str = GetLocalTimeForDate(to_date, time_zone_s)
|
|
|
|
time_from = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
|
|
time_to = datetime.datetime.strptime(time_to_str, '%Y-%m-%d %H:%M:%S%z')
|
|
epoch_time = calendar.timegm(time_from.utctimetuple())
|
|
|
|
presence_map = {}
|
|
presence_map["time_start"] = epoch_time
|
|
presence_map["time_zone"] = time_zone_s
|
|
|
|
# Calculate the difference in days
|
|
days_difference = (time_to - time_from).days
|
|
|
|
if data_type == "all" or data_type == "z-graph" or data_type == "multiple":
|
|
|
|
# Convert string to datetime object
|
|
date_obj = datetime.datetime.strptime(time_from_str, "%Y-%m-%d %H:%M:%S%z")
|
|
# Subtract one day
|
|
previous_day = date_obj - timedelta(days=1)
|
|
|
|
# Format back to string in the same format
|
|
time_from_z_str = previous_day.strftime("%Y-%m-%d %H:%M:%S%z")
|
|
|
|
|
|
device_id_2_threshold = {}
|
|
device_id_2_location = {0: "Outside"}
|
|
|
|
for details in devices_list:
|
|
|
|
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
|
|
if radar_threshold_group_st == None:
|
|
radar_threshold_group_st = '["s3_max",12]' #last value is threshold to s28 composite
|
|
|
|
if len(radar_threshold_group_st) > 8:
|
|
radar_threshold_group = json.loads(radar_threshold_group_st)
|
|
else:
|
|
radar_threshold_group = ["s3_max",12]
|
|
|
|
print(well_id, radar_threshold_group)
|
|
device_id_2_location[device_id] = location_name
|
|
device_id_2_threshold[device_id] = radar_threshold_group
|
|
|
|
|
|
ids_list = []
|
|
well_ids = []
|
|
id2well_id = {}
|
|
radar_fields_of_interest = []
|
|
device_field_indexes = {}
|
|
for details in devices_list:
|
|
|
|
if device_id_in == None or details[1] == device_id_in:
|
|
threshold_str = details[5]
|
|
try:
|
|
threshold_lst = json.loads(threshold_str)
|
|
except:
|
|
threshold_lst = ["s3",12]
|
|
#threshold_lst = ["s3_max",12]
|
|
|
|
radar_field = threshold_lst[0]
|
|
#since we are getting 10 sec dat, no more need for min or max...
|
|
radar_field = radar_field.split("_")[0]
|
|
if radar_field not in radar_fields_of_interest:
|
|
device_field_indexes[radar_field] = len(radar_fields_of_interest)
|
|
radar_fields_of_interest.append(radar_field)
|
|
|
|
ids_list.append(details[1])
|
|
id2well_id[details[1]] = details[0]
|
|
well_ids.append(details[0])
|
|
presence_map["well_ids"] = well_ids
|
|
|
|
|
|
devices_list_str = ','.join(str(device[1]) for device in devices_list)
|
|
#sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
|
|
sql = get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
|
|
print(sql)
|
|
if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
|
|
#zsql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_z_str, time_to_str, ids_list, radar_fields_of_interest)
|
|
zsql = get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_z_str, time_to_str, ids_list, radar_fields_of_interest)
|
|
print(zsql)
|
|
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = None
|
|
myz_data = None
|
|
|
|
my_data = cur.fetchall()
|
|
if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
|
|
cur.execute(zsql)
|
|
myz_data = cur.fetchall()
|
|
|
|
if my_data != None:
|
|
|
|
device_id_2_threshold = {}
|
|
device_id_2_location = {0: "Outside"}
|
|
row_nr_2_device_id = {}
|
|
cnt = 0
|
|
row_nr_2_device_id[0] = 0
|
|
|
|
#presence_map['longpresence'] and temporary_map_day_plus are similar, except one is used for Z-graph, and another for multiple persons detection
|
|
|
|
if data_type == "presence" or data_type == "all" or data_type == "z-graph" or data_type == "multiple":
|
|
presence_map['presence'] = {}
|
|
presence_map['longpresence'] = {}
|
|
|
|
if data_type == "raw" or data_type == "all":
|
|
presence_map['raw'] = {}
|
|
|
|
for details in devices_list:
|
|
#(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]','')
|
|
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details
|
|
|
|
if data_type == "raw" or data_type == "all":
|
|
zeros_list = [0] * 6 * 1440 * days_difference
|
|
presence_map['raw'][well_id] = zeros_list
|
|
|
|
if data_type == "presence" or data_type == "all" or data_type == "z-graph" or data_type == "multiple":
|
|
zeros_list = [0] * 6 * 1440 * days_difference
|
|
presence_map['presence'][well_id] = zeros_list
|
|
|
|
|
|
#presence_map[][well_id] = zeros_list
|
|
cnt += 1
|
|
row_nr_2_device_id[cnt] = well_id
|
|
|
|
if radar_threshold_group_st == None:
|
|
radar_threshold_group_st = '["s3",12]' #last value is threshold to s28 composite
|
|
|
|
if len(radar_threshold_group_st) > 8:
|
|
radar_threshold_group = json.loads(radar_threshold_group_st)
|
|
else:
|
|
radar_threshold_group = ["s3",12]
|
|
|
|
device_id_2_location[well_id] = location_name
|
|
device_id_2_threshold[well_id] = radar_threshold_group
|
|
|
|
start_time_ = my_data[0][0]
|
|
parsed_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
|
|
|
|
start_time = datetime.datetime(
|
|
parsed_time.year,
|
|
parsed_time.month,
|
|
parsed_time.day,
|
|
parsed_time.hour - 7, # Adjust for UTC-7
|
|
parsed_time.minute,
|
|
parsed_time.second,
|
|
tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))
|
|
)
|
|
|
|
presence_map = optimized_radar_processing(my_data, start_time_, id2well_id, device_id_2_threshold, device_field_indexes, presence_map, data_type)
|
|
|
|
#last_device_id = 0
|
|
#for radar_read in my_data: #(datetime.datetime(2025, 4, 28, 0, 0, tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))), 559, 6.512857142857143, 6.91, 9.28)
|
|
#local_time = radar_read[0]
|
|
#deca = int((local_time - start_time).total_seconds() / 10)
|
|
#device_id = radar_read[1]
|
|
#if device_id != last_device_id:
|
|
#last_device_id = device_id
|
|
#if data_type == "raw" or data_type == "all":
|
|
#days_decas = len(presence_map['raw'][id2well_id[device_id]])
|
|
#else:
|
|
#days_decas = len(presence_map['presence'][id2well_id[device_id]])
|
|
#well_id = id2well_id[device_id]
|
|
#radar_threshold_group_st = device_id_2_threshold[well_id]
|
|
#threshold_sig, threshold = radar_threshold_group_st
|
|
#threshold_sig = threshold_sig.split("_")[0]
|
|
|
|
#radar_val = radar_read[2+device_field_indexes[threshold_sig]]
|
|
#if data_type == "presence" or data_type == "z-graph" or data_type == "all" or data_type == "multiple":
|
|
#if radar_val > threshold:
|
|
#if deca < days_decas:
|
|
#presence_map['presence'][id2well_id[device_id]][deca] = 1
|
|
|
|
#if data_type == "raw" or data_type == "all":
|
|
#if deca < days_decas:
|
|
#presence_map['raw'][id2well_id[device_id]][deca] = radar_val
|
|
|
|
|
|
if myz_data != None:
|
|
temporary_map_day_plus = {}
|
|
presence_map['z_graph'] = {}
|
|
for details in devices_list:
|
|
#(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]','')
|
|
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details
|
|
|
|
if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
|
|
zeros_list = [0] * 6 * 1440 * (days_difference + 1) #+1 is for previous day
|
|
|
|
presence_map['z_graph'][well_id] = [] #just place holder
|
|
temporary_map_day_plus[well_id] = zeros_list
|
|
presence_map['longpresence'][well_id] = zeros_list #just place holder
|
|
|
|
|
|
parsed_time = datetime.datetime.strptime(time_from_z_str, '%Y-%m-%d %H:%M:%S%z')
|
|
|
|
start_time = datetime.datetime(
|
|
parsed_time.year,
|
|
parsed_time.month,
|
|
parsed_time.day,
|
|
parsed_time.hour - 7, # Adjust for UTC-7
|
|
parsed_time.minute,
|
|
parsed_time.second,
|
|
tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))
|
|
)
|
|
|
|
|
|
#start_time_ = myz_data[0][0]
|
|
st = time.time()
|
|
device_lookup_cache = {}
|
|
threshold_cache = {}
|
|
temporary_map_day_plus = optimized_processing(myz_data, start_time, id2well_id, device_id_2_threshold, device_field_indexes, temporary_map_day_plus, data_type)
|
|
|
|
if data_type == "all" or data_type == "z-graph" or data_type == "presence" or data_type == "multiple":
|
|
overlaps_str = GetOverlapps(deployment_id)
|
|
overlaps_lst = []
|
|
if overlaps_str != None:
|
|
if ":" in overlaps_str:
|
|
overlaps_lst = json.loads(overlaps_str)
|
|
temporary_map_day_plus = ClearOverlaps(temporary_map_day_plus, overlaps_lst)
|
|
|
|
if data_type == "all" or data_type == "z-graph" or data_type == "presence" or data_type == "multiple":
|
|
for device_id in ids_list:
|
|
device_id_str = str(device_id)
|
|
|
|
if data_type == "presence" or data_type == "all":
|
|
if filter > 1:
|
|
#presence_list = filter_short_groups_numpy(presence_map["presence"][id2well_id[device_id]], filter, device_id, ddate+"-"+to_date)
|
|
#cnt = 0
|
|
#device_id_str = 524 Kitchen
|
|
#for item in presence_map["presence"][id2well_id[device_id]]:
|
|
# if item > 0:
|
|
# print(cnt, item)
|
|
# cnt += 1
|
|
|
|
#3302 = 1 should not be filtered
|
|
|
|
inlist = presence_map["presence"][id2well_id[device_id]]
|
|
#presence_list = filter_short_groups_c_wc(presence_map["presence"][id2well_id[device_id]], filter, device_id_str, ddate, to_date, time_zone_s, refresh)
|
|
presence_list = filter_short_groups_c_wc(inlist, filter, device_id_str, ddate, to_date, time_zone_s, refresh)
|
|
|
|
#cnt = 0 #here First non 0 is at 12006 That is wrong!
|
|
#for item in presence_list:
|
|
# if item > 0:
|
|
# print(cnt, item)
|
|
# cnt += 1
|
|
#presence_listt = filter_short_groupss(presence_map["presence"][id2well_id[device_id]], filter)
|
|
#if presence_list != presence_listt:
|
|
# print("stop")
|
|
if data_type != "presence":
|
|
#longpresence_list = filter_short_groups_numpy(presence_map["longpresence"][id2well_id[device_id]], filter, device_id, ddate+"-"+to_date)
|
|
longpresence_list = filter_short_groups_c_wc(presence_map["longpresence"][id2well_id[device_id]], filter, device_id_str, prev_date, to_date, time_zone_s)
|
|
presence_map["presence"][id2well_id[device_id]] = presence_list
|
|
if data_type != "presence":
|
|
presence_map["longpresence"][id2well_id[device_id]] = longpresence_list
|
|
|
|
else: #straight decas
|
|
presence_list = presence_map["presence"][id2well_id[device_id]]
|
|
|
|
if data_type != "presence":
|
|
longpresence_list = presence_map["longpresence"][id2well_id[device_id]]
|
|
|
|
|
|
if data_type == "z-graph":
|
|
if filter > 1:
|
|
longpresence_list = filter_short_groups_c_wc(presence_map["longpresence"][id2well_id[device_id]], filter, device_id_str, prev_date, to_date, time_zone_s, refresh)
|
|
presence_map["longpresence"][id2well_id[device_id]] = longpresence_list
|
|
|
|
else: #straight decas
|
|
longpresence_list = presence_map["longpresence"][id2well_id[device_id]]
|
|
|
|
|
|
if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
|
|
if filter > 1: #straight decas
|
|
#presence_list1 = filter_short_high_groups_iterative_analog_orig(temporary_map_day_plus[id2well_id[device_id]], filter)
|
|
presence_list1 = filter_short_high_groups_iterative_analog(temporary_map_day_plus[id2well_id[device_id]], filter)
|
|
#if (presence_list1 == presence_list2):
|
|
# print("OK!")
|
|
#else:
|
|
# print("WRONG!")
|
|
else:
|
|
presence_list1 = temporary_map_day_plus[id2well_id[device_id]]
|
|
|
|
temporary_map_day_plus[id2well_id[device_id]] = presence_list1
|
|
|
|
|
|
if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
|
|
for device_id in ids_list:
|
|
#print(device_id_2_threshold[id2well_id[device_id]])
|
|
presence_list = CreateZGraph(id2well_id[device_id], presence_map["longpresence"][id2well_id[device_id]]) #temporary_map_day_plus[id2well_id[device_id]])
|
|
presence_map["z_graph"][id2well_id[device_id]] = presence_list
|
|
|
|
|
|
if data_type == "all" or data_type == "multiple":
|
|
#lets create "multiple" series
|
|
seen_at_lst, seen_where_list_uf = DetectMultiple(temporary_map_day_plus, overlaps_lst)
|
|
#here seen_at is straight decas
|
|
#seen_at = [1 if x >= 2 else 0 for x in seen_at]
|
|
pers_in_deka = []
|
|
dekas_in_day = 6 * 1440
|
|
for i in range(dekas_in_day, len(seen_where_list_uf)):
|
|
n_pers = seen_where_list_uf[i]
|
|
pers_in_deka.append(100*len(n_pers))
|
|
|
|
seen_at = filter_out_short_highs_iterative(seen_at_lst, filter) #this converts decas into compressed format!
|
|
seen_at_lst = Decompress(seen_at)
|
|
pers_in_deka = filter_out_short_same_groups_iterative(pers_in_deka, filter)
|
|
persons_decompressed = Decompress(pers_in_deka)
|
|
persons = Compress(persons_decompressed)
|
|
|
|
multiple_list = CreateZGraph("multiple", seen_at_lst)
|
|
presence_map["multiple"] = multiple_list
|
|
presence_map["persons"] = persons
|
|
|
|
if data_type == "z-graph":
|
|
if "raw" in presence_map:
|
|
del presence_map["raw"]
|
|
if "presence" in presence_map:
|
|
del presence_map["presence"]
|
|
if "longpresence" in presence_map:
|
|
del presence_map["longpresence"]
|
|
|
|
if data_type == "multiple":
|
|
if "raw" in presence_map:
|
|
del presence_map["raw"]
|
|
if "presence" in presence_map:
|
|
del presence_map["presence"]
|
|
if "longpresence" in presence_map:
|
|
del presence_map["longpresence"]
|
|
if "z_graph" in presence_map:
|
|
del presence_map["z_graph"]
|
|
|
|
if "presence" in presence_map:
|
|
presence_map["presence"] = CompressList(presence_map["presence"])
|
|
|
|
data_payload = presence_map
|
|
resp.media = package_response(data_payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "get_zgraph_data":
|
|
|
|
deployment_id = form_data.get('deployment_id')
|
|
|
|
if privileges != "-1":
|
|
privileges_lst = privileges.split(",")
|
|
if deployment_id not in privileges_lst:
|
|
data_payload = {}
|
|
resp.media = package_response(data_payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
device_id = int(form_data.get('device_id'))
|
|
|
|
devices = GetVisibleDevices(privileges)
|
|
|
|
if not any(item[0] == device_id for item in devices):
|
|
data_payload = {}
|
|
resp.media = package_response(data_payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
filter = int(form_data.get('filter'))
|
|
ddate = form_data.get('date')
|
|
ddate = ddate.replace("_","-")
|
|
to_date = form_data.get('to_date')
|
|
|
|
if to_date == None:
|
|
to_date = ddate
|
|
else:
|
|
to_date = to_date.replace("_","-")
|
|
|
|
ddate, to_date = ensure_date_order(ddate, to_date)
|
|
data_type = "z-graph"
|
|
|
|
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
|
|
timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
|
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
|
|
|
time_from_str, _ = GetLocalTimeForDate(ddate, time_zone_s)
|
|
_, time_to_str = GetLocalTimeForDate(to_date, time_zone_s)
|
|
|
|
time_from = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
|
|
time_to = datetime.datetime.strptime(time_to_str, '%Y-%m-%d %H:%M:%S%z')
|
|
epoch_time = calendar.timegm(time_from.utctimetuple())
|
|
|
|
presence_map = {}
|
|
presence_map["time_start"] = epoch_time
|
|
presence_map["time_zone"] = time_zone_s
|
|
|
|
# Calculate the difference in days
|
|
days_difference = (time_to - time_from).days
|
|
|
|
# Convert string to datetime object
|
|
date_obj = datetime.datetime.strptime(time_from_str, "%Y-%m-%d %H:%M:%S%z")
|
|
# Subtract one day
|
|
previous_day = date_obj - timedelta(days=1)
|
|
|
|
# Format back to string in the same format
|
|
time_from_z_str = previous_day.strftime("%Y-%m-%d %H:%M:%S%z")
|
|
|
|
|
|
device_id_2_threshold = {}
|
|
device_id_2_location = {0: "Outside"}
|
|
|
|
for details in devices_list:
|
|
|
|
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
|
|
|
|
if radar_threshold_group_st == None:
|
|
radar_threshold_group_st = '["s3_max",12]' #last value is threshold to s28 composite
|
|
|
|
if len(radar_threshold_group_st) > 8:
|
|
radar_threshold_group = json.loads(radar_threshold_group_st)
|
|
else:
|
|
radar_threshold_group = ["s3_max",12]
|
|
|
|
print(well_id, radar_threshold_group)
|
|
|
|
device_id_2_location[device_id] = location_name
|
|
device_id_2_threshold[device_id] = radar_threshold_group
|
|
|
|
ids_list = []
|
|
well_ids = []
|
|
id2well_id = {}
|
|
radar_fields_of_interest = []
|
|
device_field_indexes = {}
|
|
for details in devices_list:
|
|
threshold_str = details[5]
|
|
try:
|
|
threshold_lst = json.loads(threshold_str)
|
|
except:
|
|
threshold_lst = ["s3",12]
|
|
#threshold_lst = ["s3_max",12]
|
|
|
|
radar_field = threshold_lst[0]
|
|
#since we are getting 10 sec dat, no more need for min or max...
|
|
radar_field = radar_field.split("_")[0]
|
|
if radar_field not in radar_fields_of_interest:
|
|
device_field_indexes[radar_field] = len(radar_fields_of_interest)
|
|
radar_fields_of_interest.append(radar_field)
|
|
|
|
ids_list.append(details[1])
|
|
id2well_id[details[1]] = details[0]
|
|
well_ids.append(details[0])
|
|
presence_map["well_ids"] = well_ids
|
|
|
|
|
|
devices_list_str = ','.join(str(device[1]) for device in devices_list)
|
|
#sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
|
|
sql = get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
|
|
print(sql)
|
|
if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
|
|
#zsql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_z_str, time_to_str, ids_list, radar_fields_of_interest)
|
|
zsql = get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_z_str, time_to_str, ids_list, radar_fields_of_interest)
|
|
print(zsql)
|
|
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(sql)
|
|
my_data = None
|
|
myz_data = None
|
|
|
|
my_data = cur.fetchall()
|
|
if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
|
|
cur.execute(zsql)
|
|
myz_data = cur.fetchall()
|
|
|
|
if my_data != None:
|
|
|
|
device_id_2_threshold = {}
|
|
device_id_2_location = {0: "Outside"}
|
|
row_nr_2_device_id = {}
|
|
cnt = 0
|
|
row_nr_2_device_id[0] = 0
|
|
|
|
#presence_map['longpresence'] and temporary_map_day_plus are similar, except one is used for Z-graph, and another for multiple persons detection
|
|
|
|
if data_type == "presence" or data_type == "all" or data_type == "z-graph" or data_type == "multiple":
|
|
presence_map['presence'] = {}
|
|
presence_map['longpresence'] = {}
|
|
|
|
if data_type == "raw" or data_type == "all":
|
|
presence_map['raw'] = {}
|
|
|
|
for details in devices_list:
|
|
#(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]','')
|
|
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details
|
|
|
|
if data_type == "raw" or data_type == "all":
|
|
zeros_list = [0] * 6 * 1440 * days_difference
|
|
presence_map['raw'][well_id] = zeros_list
|
|
|
|
if data_type == "presence" or data_type == "all" or data_type == "z-graph" or data_type == "multiple":
|
|
zeros_list = [0] * 6 * 1440 * days_difference
|
|
presence_map['presence'][well_id] = zeros_list
|
|
|
|
|
|
#presence_map[][well_id] = zeros_list
|
|
cnt += 1
|
|
row_nr_2_device_id[cnt] = well_id
|
|
|
|
if radar_threshold_group_st == None:
|
|
radar_threshold_group_st = '["s3",12]' #last value is threshold to s28 composite
|
|
|
|
if len(radar_threshold_group_st) > 8:
|
|
radar_threshold_group = json.loads(radar_threshold_group_st)
|
|
else:
|
|
radar_threshold_group = ["s3",12]
|
|
|
|
device_id_2_location[well_id] = location_name
|
|
device_id_2_threshold[well_id] = radar_threshold_group
|
|
|
|
start_time_ = my_data[0][0]
|
|
parsed_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
|
|
|
|
start_time = datetime.datetime(
|
|
parsed_time.year,
|
|
parsed_time.month,
|
|
parsed_time.day,
|
|
parsed_time.hour - 7, # Adjust for UTC-7
|
|
parsed_time.minute,
|
|
parsed_time.second,
|
|
tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))
|
|
)
|
|
|
|
presence_map = optimized_radar_processing(my_data, start_time_, id2well_id, device_id_2_threshold, device_field_indexes, presence_map, data_type)
|
|
|
|
#last_device_id = 0
|
|
#for radar_read in my_data: #(datetime.datetime(2025, 4, 28, 0, 0, tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))), 559, 6.512857142857143, 6.91, 9.28)
|
|
#local_time = radar_read[0]
|
|
#deca = int((local_time - start_time).total_seconds() / 10)
|
|
#device_id = radar_read[1]
|
|
#if device_id != last_device_id:
|
|
#last_device_id = device_id
|
|
#if data_type == "raw" or data_type == "all":
|
|
#days_decas = len(presence_map['raw'][id2well_id[device_id]])
|
|
#else:
|
|
#days_decas = len(presence_map['presence'][id2well_id[device_id]])
|
|
#well_id = id2well_id[device_id]
|
|
#radar_threshold_group_st = device_id_2_threshold[well_id]
|
|
#threshold_sig, threshold = radar_threshold_group_st
|
|
#threshold_sig = threshold_sig.split("_")[0]
|
|
|
|
#radar_val = radar_read[2+device_field_indexes[threshold_sig]]
|
|
#if data_type == "presence" or data_type == "z-graph" or data_type == "all" or data_type == "multiple":
|
|
#if radar_val > threshold:
|
|
#if deca < days_decas:
|
|
#presence_map['presence'][id2well_id[device_id]][deca] = 1
|
|
|
|
#if data_type == "raw" or data_type == "all":
|
|
#if deca < days_decas:
|
|
#presence_map['raw'][id2well_id[device_id]][deca] = radar_val
|
|
|
|
|
|
if myz_data != None:
|
|
temporary_map_day_plus = {}
|
|
presence_map['z_graph'] = {}
|
|
for details in devices_list:
|
|
#(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]','')
|
|
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details
|
|
|
|
if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
|
|
zeros_list = [0] * 6 * 1440 * (days_difference + 1) #+1 is for previous day
|
|
|
|
presence_map['z_graph'][well_id] = [] #just place holder
|
|
temporary_map_day_plus[well_id] = zeros_list
|
|
presence_map['longpresence'][well_id] = zeros_list #just place holder
|
|
|
|
|
|
parsed_time = datetime.datetime.strptime(time_from_z_str, '%Y-%m-%d %H:%M:%S%z')
|
|
|
|
start_time = datetime.datetime(
|
|
parsed_time.year,
|
|
parsed_time.month,
|
|
parsed_time.day,
|
|
parsed_time.hour - 7, # Adjust for UTC-7
|
|
parsed_time.minute,
|
|
parsed_time.second,
|
|
tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))
|
|
)
|
|
|
|
|
|
#start_time_ = myz_data[0][0]
|
|
st = time.time()
|
|
device_lookup_cache = {}
|
|
threshold_cache = {}
|
|
temporary_map_day_plus = optimized_processing(myz_data, start_time, id2well_id, device_id_2_threshold, device_field_indexes, temporary_map_day_plus, data_type)
|
|
|
|
if data_type == "all" or data_type == "z-graph" or data_type == "presence" or data_type == "multiple":
|
|
overlaps_str = GetOverlapps(deployment_id)
|
|
overlaps_lst = []
|
|
if overlaps_str != None:
|
|
if ":" in overlaps_str:
|
|
overlaps_lst = json.loads(overlaps_str)
|
|
temporary_map_day_plus = ClearOverlaps(temporary_map_day_plus, overlaps_lst)
|
|
|
|
if data_type == "all" or data_type == "z-graph" or data_type == "presence" or data_type == "multiple":
|
|
for device_id in ids_list:
|
|
device_id_str = str(device_id)
|
|
if data_type == "presence" or data_type == "all" or data_type == "z-graph":
|
|
if filter > 1:
|
|
#presence_list = filter_short_groups_numpy(presence_map["presence"][id2well_id[device_id]], filter, device_id, ddate+"-"+to_date)
|
|
presence_list = filter_short_groups_c_wc(presence_map["presence"][id2well_id[device_id]], filter, device_id_str, ddate, to_date, time_zone_s)
|
|
#presence_listt = filter_short_groupss(presence_map["presence"][id2well_id[device_id]], filter)
|
|
#if presence_list != presence_listt:
|
|
# print("stop")
|
|
if data_type != "presence":
|
|
#longpresence_list = filter_short_groups_numpy(presence_map["longpresence"][id2well_id[device_id]], filter, device_id, ddate+"-"+to_date)
|
|
longpresence_list = filter_short_groups_c_wc(presence_map["longpresence"][id2well_id[device_id]], filter, device_id_str, prev_date, to_date, time_zone_s)
|
|
#longpresence_listt = filter_short_groupss(presence_map["longpresence"][id2well_id[device_id]], filter)
|
|
#if longpresence_list != longpresence_listt:
|
|
# print("stop")
|
|
# store_to_file(presence_map["longpresence"][id2well_id[device_id]], "test_list")
|
|
presence_map["presence"][id2well_id[device_id]] = presence_list
|
|
if data_type != "presence":
|
|
presence_map["longpresence"][id2well_id[device_id]] = longpresence_list
|
|
|
|
else: #straight decas
|
|
presence_list = presence_map["presence"][id2well_id[device_id]]
|
|
|
|
if data_type != "presence":
|
|
longpresence_list = presence_map["longpresence"][id2well_id[device_id]]
|
|
|
|
|
|
|
|
if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
|
|
if filter > 1: #straight decas
|
|
presence_list1 = filter_short_high_groups_iterative_analog(temporary_map_day_plus[id2well_id[device_id]], filter)
|
|
else:
|
|
presence_list1 = temporary_map_day_plus[id2well_id[device_id]]
|
|
|
|
temporary_map_day_plus[id2well_id[device_id]] = presence_list1
|
|
|
|
|
|
if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
|
|
for device_id in ids_list:
|
|
#print(device_id_2_threshold[id2well_id[device_id]])
|
|
presence_list = CreateZGraph(id2well_id[device_id], presence_map["longpresence"][id2well_id[device_id]]) #temporary_map_day_plus[id2well_id[device_id]])
|
|
presence_map["z_graph"][id2well_id[device_id]] = presence_list
|
|
|
|
|
|
if data_type == "all" or data_type == "multiple":
|
|
#lets create "multiple" series
|
|
seen_at_lst, seen_where_list_uf = DetectMultiple(temporary_map_day_plus, overlaps_lst)
|
|
#here seen_at is straight decas
|
|
#seen_at = [1 if x >= 2 else 0 for x in seen_at]
|
|
pers_in_deka = []
|
|
dekas_in_day = 6 * 1440
|
|
for i in range(dekas_in_day, len(seen_where_list_uf)):
|
|
n_pers = seen_where_list_uf[i]
|
|
pers_in_deka.append(100*len(n_pers))
|
|
|
|
seen_at = filter_out_short_highs_iterative(seen_at_lst, filter) #this converts decas into compressed format!
|
|
seen_at_lst = Decompress(seen_at)
|
|
pers_in_deka = filter_out_short_same_groups_iterative(pers_in_deka, filter)
|
|
persons_decompressed = Decompress(pers_in_deka)
|
|
persons = Compress(persons_decompressed)
|
|
|
|
multiple_list = CreateZGraph("multiple", seen_at_lst)
|
|
presence_map["multiple"] = multiple_list
|
|
presence_map["persons"] = persons
|
|
|
|
if data_type == "z-graph":
|
|
if "raw" in presence_map:
|
|
del presence_map["raw"]
|
|
if "presence" in presence_map:
|
|
del presence_map["presence"]
|
|
if "longpresence" in presence_map:
|
|
del presence_map["longpresence"]
|
|
|
|
if data_type == "multiple":
|
|
if "raw" in presence_map:
|
|
del presence_map["raw"]
|
|
if "presence" in presence_map:
|
|
del presence_map["presence"]
|
|
if "longpresence" in presence_map:
|
|
del presence_map["longpresence"]
|
|
if "z_graph" in presence_map:
|
|
del presence_map["z_graph"]
|
|
|
|
if "presence" in presence_map:
|
|
presence_map["presence"] = CompressList(presence_map["presence"])
|
|
|
|
data_payload = presence_map
|
|
resp.media = package_response(data_payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "get_candle_data":
|
|
container = GetReference("/MAC")
|
|
MAC = req_dict["MAC"][0]
|
|
sensor = req_dict["sensor"][0]
|
|
from_time = req_dict["from_time"][0]
|
|
to_time = req_dict["to_time"][0]
|
|
part = req_dict["part"][0]
|
|
tzone = req_dict["tzone"][0]
|
|
AddToLog(str(req_dict))
|
|
candle_data = GetCandleSensorData(container, MAC, sensor, from_time, to_time, part, tzone)
|
|
data_payload = {'candle_data': candle_data}
|
|
resp.media = package_response(data_payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "deployment_form":
|
|
editing_deployment_id = form_data.get('editing_deployment_id')
|
|
|
|
ok = StoreDeployment2DB(form_data, editing_deployment_id)
|
|
if ok == 1:
|
|
payload = {'ok': ok}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
else:
|
|
payload = {'ok': ok, 'error': debug_string}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
elif function == "deployment_delete":
|
|
ok = DeleteRecordFromDB(form_data)
|
|
|
|
payload = {'ok': ok}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "deployments_list":
|
|
result_list = []
|
|
first_s = form_data.get('first')
|
|
last_s = form_data.get('last')
|
|
user_id = form_data.get('user_id')
|
|
first = 0
|
|
last = 1000000
|
|
|
|
try:
|
|
if first_s != None:
|
|
first = int(first_s)
|
|
except ValueError:
|
|
pass
|
|
|
|
try:
|
|
if last_s != None:
|
|
last = int(last_s)
|
|
except ValueError:
|
|
pass
|
|
|
|
#user_id = form_data.get('user_id')
|
|
if user_id == "" or user_id == None:
|
|
#user_id = GetUserId(user_name)
|
|
privileges, user_id = GetPriviledgesAndUserId(user_name)
|
|
else:
|
|
privileges = GetPriviledgesOnly(user_name)
|
|
|
|
all_deployments = ListDeployments(privileges, user_id)
|
|
|
|
cnt = 0
|
|
|
|
for deployment in all_deployments:
|
|
cnt += 1
|
|
if cnt >= first:
|
|
caretaker_min_object = {"deployment_id": deployment['deployment_id'], "email": user_id_2_user[deployment['beneficiary_id']][3], "first_name": user_id_2_user[deployment['beneficiary_id']][5], "last_name": user_id_2_user[deployment['beneficiary_id']][6]}
|
|
result_list.append(caretaker_min_object)
|
|
if cnt > last:
|
|
break
|
|
|
|
payload = {'result_list': result_list}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
elif function == "device_list":
|
|
result_list = []
|
|
first_s = form_data.get('first')
|
|
last_s = form_data.get('last')
|
|
|
|
try:
|
|
first = int(first_s)
|
|
except ValueError:
|
|
first = 0
|
|
|
|
try:
|
|
last = int(last_s)
|
|
except ValueError:
|
|
last = 1000000
|
|
|
|
#user_id = form_data.get('user_id')
|
|
|
|
devices = GetVisibleDevices(privileges)
|
|
|
|
payload = {'result_list': devices}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "device_list_by_deployment":
|
|
result_list = []
|
|
first_s = form_data.get('first')
|
|
last_s = form_data.get('last')
|
|
deployment_id = form_data.get('deployment_id')
|
|
try:
|
|
first = int(first_s)
|
|
except ValueError:
|
|
first = 0
|
|
|
|
try:
|
|
last = int(last_s)
|
|
except ValueError:
|
|
last = 1000000
|
|
|
|
|
|
if privileges == "-1":
|
|
devices = GetVisibleDevices(deployment_id)
|
|
else:
|
|
privileges = privileges.split(",")
|
|
if deployment_id in privileges:
|
|
devices = GetVisibleDevices(deployment_id)
|
|
|
|
payload = {'result_list': devices}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "device_list_4_gui":
|
|
result_list = []
|
|
deploymentData = []
|
|
deviceData = []
|
|
macs_list = []
|
|
user_id = GetUserId(user_name)
|
|
all_deployments = ListDeployments(privileges, user_id)
|
|
#{'deployment_id': 21, 'beneficiary_id': 25, 'caretaker_id': 1, 'owner_id': 1, 'installer_id': 1, 'address_street': '661 Encore Way', 'address_city': 'San Jose', 'address_zip': '95134', 'address_state': 'CA', 'address_country': 'USA', 'devices': '["64B70888FAB0","64B70888F860","64B70888F6F0","64B708896BDC","64B708897428","64B70888FA84","64B70889062C"]', 'wifis': '', 'persons': 1, 'gender': 1, 'race': 1, 'born': 1940, 'pets': 0, 'time_zone': 'America/Los_Angeles'}
|
|
MAC2Deployment = {}
|
|
|
|
for deployment in all_deployments:
|
|
beneficiary_id = deployment['beneficiary_id']
|
|
user = GetNameFromUserId(beneficiary_id)
|
|
name = f"{user[1]} {user[2]}"
|
|
deploymentData.append({'deployment_id': str(deployment['deployment_id']), 'name': name})
|
|
devices = deployment['devices']
|
|
if devices != None:
|
|
|
|
devices_list = ToList(devices)
|
|
for device in devices_list:
|
|
macs_list.append(device)
|
|
MAC2Deployment[device] = deployment['deployment_id']
|
|
#deviceData.append({'well_id': device[0], 'mac': device[1]})
|
|
|
|
deployment_id_list = []
|
|
deviceData = []
|
|
|
|
#row_data = [device_id, well_id, mac, last_message_epoch, location_names[location_id], description, deployment_ids[cnt][0]]
|
|
|
|
with get_db_connection() as conn:
|
|
with conn.cursor() as cur:
|
|
device_ids, device_list = MACsToWellIds(cur, macs_list)
|
|
|
|
|
|
for device in device_list:
|
|
if MAC2Deployment[device[4]] != "":
|
|
deviceData.append({'well_id': device[0], 'mac': device[4], 'room_name': device[2], 'deployment_id': MAC2Deployment[device[4]]})
|
|
|
|
#deploymentData = [{'deployment_id': '21', 'name': 'Robert Zmrzli House'}, {'deployment_id': '36', 'name': 'Fred Zmrzli Apartment'}]
|
|
#deviceData = [{ 'well_id': '300', 'mac': '64B70888F6F0', 'room_name': 'Living Room', 'deployment_id': '21' }, { 'well_id': '301', 'mac': '64B70888F6F1', 'room_name': 'Bathroom Main', 'deployment_id': '36' }]
|
|
|
|
payload = {
|
|
'status': "success", 'deploymentData': deploymentData, 'deviceData': deviceData
|
|
}
|
|
|
|
logger.debug(f"device_list_4_gui------ {payload} ------------------------------------------")
|
|
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "caretaker_form":
|
|
editing_user_id = form_data.get('editing_user_id')
|
|
email = form_data.get('email')
|
|
user_id = form_data.get('user_id')
|
|
if "@" not in email:
|
|
resp.media = package_response("Missing or illegal 'email' parameter", HTTP_400)
|
|
return
|
|
|
|
print(privileges)
|
|
if privileges == "-1":
|
|
ok = StoreCaretaker2DB(form_data, editing_user_id, user_id)
|
|
if ok == 1:
|
|
payload = {'ok': ok}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
else:
|
|
payload = {'ok': ok, 'error': debug_string}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif "-1" in privileges:
|
|
payload = {'ok': 0, 'error': "Not allowed!"}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
|
|
elif function == "caretaker_delete":
|
|
if privileges == "-1":
|
|
ok = DeleteRecordFromDB(form_data)
|
|
else:
|
|
ok = 0
|
|
AddToLog(ok)
|
|
payload = {'ok': ok}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "caretakers_list":
|
|
result_list = []
|
|
first_s = form_data.get('first')
|
|
last_s = form_data.get('last')
|
|
|
|
try:
|
|
first = int(first_s)
|
|
except ValueError:
|
|
first = 0
|
|
|
|
try:
|
|
last = int(last_s)
|
|
except ValueError:
|
|
last = 1000000
|
|
|
|
if privileges == "-1":
|
|
all_caretakers = ListCaretakers(privileges, user_name)
|
|
|
|
cnt = 0
|
|
|
|
for caretaker in all_caretakers:
|
|
cnt += 1
|
|
if cnt >= first:
|
|
caretaker_min_object = {"user_id": caretaker[0], "email": caretaker[3], "first_name": caretaker[5], "last_name": caretaker[6]}
|
|
result_list.append(caretaker_min_object)
|
|
if cnt > last:
|
|
break
|
|
elif "-1" in privileges:
|
|
all_caretakers = ListCaretakers(privileges, user_name)
|
|
|
|
cnt = 0
|
|
|
|
for caretaker in all_caretakers:
|
|
cnt += 1
|
|
if cnt >= first:
|
|
caretaker_min_object = {"user_id": caretaker[0], "email": caretaker[3], "first_name": caretaker[5], "last_name": caretaker[6]}
|
|
result_list.append(caretaker_min_object)
|
|
if cnt > last:
|
|
break
|
|
payload = {'result_list': result_list}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "beneficiary_form":
|
|
editing_user_id = form_data.get('editing_user_id')
|
|
email = form_data.get('email')
|
|
user_id = GetUserId(user_name)
|
|
if "@" in email:
|
|
ok, error_string = StoreBeneficiary2DB(form_data, editing_user_id, user_id)
|
|
if ok == 1:
|
|
payload = {'ok': ok}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
else:
|
|
payload = {'ok': ok, 'error': error_string}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
else:
|
|
payload = {'ok': ok}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "beneficiary_delete":
|
|
|
|
|
|
ok = DeleteRecordFromDB(form_data)
|
|
|
|
payload = {'ok': ok}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "beneficiaries_list":
|
|
result_list = []
|
|
first_s = form_data.get('first')
|
|
last_s = form_data.get('last')
|
|
|
|
try:
|
|
first = int(first_s)
|
|
except ValueError:
|
|
first = 0
|
|
|
|
try:
|
|
last = int(last_s)
|
|
except ValueError:
|
|
last = 1000000
|
|
|
|
user_id = form_data.get('user_id')
|
|
all_beneficiaries = ListBeneficiaries(privileges, user_id)
|
|
|
|
cnt = 0
|
|
|
|
for beneficiary in all_beneficiaries:
|
|
cnt += 1
|
|
if cnt >= first:
|
|
beneficiary_min_object = {"user_id": beneficiary[0], "email": beneficiary[3], "first_name": beneficiary[5], "last_name": beneficiary[6]}
|
|
result_list.append(beneficiary_min_object)
|
|
if cnt > last:
|
|
break
|
|
|
|
payload = {'result_list': result_list}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
elif function == "activities_report_details":
|
|
deployment_id = form_data.get('deployment_id')
|
|
|
|
timezone_str = GetTimeZoneOfDeployment(deployment_id)
|
|
filterr = form_data.get('filter')
|
|
if filterr == None:
|
|
filterr = 6
|
|
else:
|
|
filterr = int(filterr)
|
|
|
|
refresh = form_data.get('refresh') == "1"
|
|
ddate = current_date_at_tz(timezone_str)
|
|
timee = LocalDateToUTCEpoch(ddate, timezone_str)+5 #add so date boundary is avoided
|
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
|
|
|
#Here we need to add per day: (all based on Z-graph data!)
|
|
#Bathroom visits number
|
|
#Bathroom time spent
|
|
#Sleep weakes number (As breaks in Z-graph indicates in 10PM to 9AM period)
|
|
#Sleep length (For now add all times seen in bedroom)
|
|
#Kitchen visits number
|
|
#Kitchen time spent
|
|
#Most frequented room visits number
|
|
#Most frequented room time spent
|
|
|
|
#Lets find device_id of bathroom sensor
|
|
|
|
|
|
bathroom_device_id, location_ba, bathroom_well_id = FindDeviceByRole(deployment_id, ["Bathroom Main", "Bathroom", "Bathroom Guest"])
|
|
bedroom_device_id, location_be, bedroom_well_id = FindDeviceByRole(deployment_id, ["Bedroom Master", "Bedroom", "Bedroom Guest"])
|
|
kitchen_device_id, location_ke, kitchen_well_id = FindDeviceByRole(deployment_id, ["Kitchen"])
|
|
most_present_device_id, location_ot, most_present_well_id = FindDeviceByRole(deployment_id, []) #this will find most_present (as defined in other filed of device record)
|
|
|
|
if isinstance(location_ot, int):
|
|
other_location = location_names[location_ot]
|
|
else:
|
|
other_location = location_ot
|
|
|
|
#weekly
|
|
week_dates = get_week_days_and_dates(7, timezone_str)
|
|
month_dates = get_week_days_and_dates(30, timezone_str)
|
|
six_months_dates = get_week_days_and_dates(180, timezone_str)
|
|
|
|
other_color = Loc2Color[other_location][0]
|
|
rgb_string = f"rgb({other_color[0]}, {other_color[1]}, {other_color[2]})"
|
|
|
|
rooms_reports = [("Bathroom", "blue", bathroom_device_id, bathroom_well_id), ("Bedroom", "green", bedroom_device_id, bedroom_well_id), ("Kitchen", "red", kitchen_device_id, kitchen_well_id), (other_location, rgb_string, most_present_device_id, most_present_well_id)]
|
|
|
|
six_months_report = []
|
|
for room_details in rooms_reports:
|
|
device_id = room_details[2]
|
|
if device_id > 0:
|
|
|
|
well_id = room_details[3]
|
|
radar_threshold_group_st = {device[1]: device[5] for device in devices_list}[device_id]
|
|
room = {"name": room_details[0],"color": room_details[1]}
|
|
data = []
|
|
|
|
for day_activity in six_months_dates:
|
|
datee = day_activity[0]
|
|
hours, events_count = GetActivities(device_id, well_id, datee, filterr, refresh, timezone_str, radar_threshold_group_st)
|
|
|
|
if hours > 18:
|
|
print("Too long 6m!!!", device_id, well_id, datee, filterr, refresh, timezone_str, radar_threshold_group_st)
|
|
|
|
data_record = { "title": str(day_activity[2]), "events": events_count, "hours": hours}
|
|
data.append(data_record)
|
|
|
|
room["data"] = data
|
|
six_months_report.append(room)
|
|
|
|
weekly_report = []
|
|
for room_details in rooms_reports:
|
|
device_id = room_details[2]
|
|
if device_id > 0:
|
|
well_id = room_details[3]
|
|
radar_threshold_group_st = {device[1]: device[5] for device in devices_list}[device_id]
|
|
room = {"name": room_details[0],"color": room_details[1]}
|
|
data = []
|
|
|
|
for day_activity in week_dates:
|
|
datee = day_activity[0]
|
|
hours, events_count = GetActivities(device_id, well_id, datee, filterr, refresh, timezone_str, radar_threshold_group_st)
|
|
data_record = { "title": day_activity[1], "events": events_count, "hours": hours}
|
|
data.append(data_record)
|
|
|
|
room["data"] = data
|
|
weekly_report.append(room)
|
|
|
|
monthly_report = []
|
|
for room_details in rooms_reports:
|
|
device_id = room_details[2]
|
|
if device_id > 0:
|
|
well_id = room_details[3]
|
|
radar_threshold_group_st = {device[1]: device[5] for device in devices_list}[device_id]
|
|
room = {"name": room_details[0],"color": room_details[1]}
|
|
data = []
|
|
|
|
for day_activity in month_dates:
|
|
datee = day_activity[0]
|
|
hours, events_count = GetActivities(device_id, well_id, datee, filterr, refresh, timezone_str, radar_threshold_group_st)
|
|
#if datee == "2025-05-20" and device_id == 572:
|
|
# print(hours)
|
|
if hours > 18:
|
|
print("Too long m!!!", device_id, well_id, datee, filterr, refresh, timezone_str, radar_threshold_group_st)
|
|
|
|
data_record = { "title": str(day_activity[2]), "events": events_count, "hours": hours}
|
|
data.append(data_record)
|
|
|
|
room["data"] = data
|
|
monthly_report.append(room)
|
|
|
|
|
|
|
|
|
|
result_dictionary = {
|
|
"alert_text": "No alert",
|
|
"alert_color": "bg-green-100 text-green-700",
|
|
"chart_data": [
|
|
{
|
|
"name": "Weekly",
|
|
"rooms": [
|
|
{
|
|
"name": "Bathroom",
|
|
"color": "blue",
|
|
"data": [
|
|
{ "title": "Monday", "events": 186, "hours": 80.56 },
|
|
{ "title": "Tuesday", "events": 305, "hours": 200 },
|
|
{ "title": "Wednesday", "events": 237, "hours": 120 },
|
|
{ "title": "Thursday", "events": 73, "hours": 190 },
|
|
{ "title": "Friday", "events": 209, "hours": 130 },
|
|
{ "title": "Saturday", "events": 214, "hours": 140 },
|
|
{ "title": "Sunday", "events": 150, "hours": 100 }
|
|
]
|
|
},
|
|
{
|
|
"name": "Bedroom",
|
|
"color": "green",
|
|
"data": [
|
|
{ "title": "Monday", "events": 186, "hours": 80 },
|
|
{ "title": "Tuesday", "events": 305, "hours": 200 },
|
|
{ "title": "Wednesday", "events": 237, "hours": 120 },
|
|
{ "title": "Thursday", "events": 73, "hours": 190 },
|
|
{ "title": "Friday", "events": 209, "hours": 130 },
|
|
{ "title": "Saturday", "events": 214, "hours": 140 },
|
|
{ "title": "Sunday", "events": 150, "hours": 100 }
|
|
]
|
|
},
|
|
{
|
|
"name": "Kitchen",
|
|
"color": "red",
|
|
"data": [
|
|
{ "title": "Monday", "events": 186, "hours": 80 },
|
|
{ "title": "Tuesday", "events": 305, "hours": 200 },
|
|
{ "title": "Wednesday", "events": 237, "hours": 120 },
|
|
{ "title": "Thursday", "events": 73, "hours": 190 },
|
|
{ "title": "Friday", "events": 209, "hours": 130 },
|
|
{ "title": "Saturday", "events": 214, "hours": 140 },
|
|
{ "title": "Sunday", "events": 150, "hours": 100 }
|
|
]
|
|
},
|
|
{
|
|
"name": "Other",
|
|
"color": "yellow",
|
|
"data": [
|
|
{ "title": "Monday", "events": 186, "hours": 80 },
|
|
{ "title": "Tuesday", "events": 305, "hours": 200 },
|
|
{ "title": "Wednesday", "events": 237, "hours": 120 },
|
|
{ "title": "Thursday", "events": 73, "hours": 190 },
|
|
{ "title": "Friday", "events": 209, "hours": 130 },
|
|
{ "title": "Saturday", "events": 214, "hours": 140 },
|
|
{ "title": "Sunday", "events": 150, "hours": 100 }
|
|
]
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "Monthly",
|
|
"rooms": [
|
|
{
|
|
"name": "Bathroom",
|
|
"color": "purple",
|
|
"data": [
|
|
{ "title": "01", "events": 67, "hours": 45 },
|
|
{ "title": "02", "events": 97, "hours": 67 },
|
|
{ "title": "03", "events": 87, "hours": 23 },
|
|
{ "title": "04", "events": 42, "hours": 12 },
|
|
{ "title": "05", "events": 64, "hours": 48 },
|
|
{ "title": "06", "events": 53, "hours": 34 },
|
|
{ "title": "07", "events": 75, "hours": 23 },
|
|
{ "title": "08", "events": 45, "hours": 56 },
|
|
{ "title": "09", "events": 85, "hours": 47 },
|
|
{ "title": "10", "events": 34, "hours": 29 },
|
|
{ "title": "11", "events": 49, "hours": 30 },
|
|
{ "title": "12", "events": 62, "hours": 33 },
|
|
{ "title": "13", "events": 75, "hours": 44 },
|
|
{ "title": "14", "events": 88, "hours": 57 },
|
|
{ "title": "15", "events": 94, "hours": 65 },
|
|
{ "title": "16", "events": 45, "hours": 21 },
|
|
{ "title": "17", "events": 76, "hours": 54 },
|
|
{ "title": "18", "events": 85, "hours": 62 },
|
|
{ "title": "19", "events": 43, "hours": 28 },
|
|
{ "title": "20", "events": 59, "hours": 34 },
|
|
{ "title": "21", "events": 78, "hours": 56 },
|
|
{ "title": "22", "events": 64, "hours": 39 },
|
|
{ "title": "23", "events": 93, "hours": 72 },
|
|
{ "title": "24", "events": 52, "hours": 28 },
|
|
{ "title": "25", "events": 71, "hours": 48 },
|
|
{ "title": "26", "events": 85, "hours": 63 }
|
|
]
|
|
},
|
|
{
|
|
"name": "Bedroom",
|
|
"color": "#3b82f6",
|
|
"data": [
|
|
{ "title": "01", "events": 61, "hours": 42 },
|
|
{ "title": "02", "events": 72, "hours": 36 },
|
|
{ "title": "03", "events": 94, "hours": 49 },
|
|
{ "title": "04", "events": 67, "hours": 59 },
|
|
{ "title": "05", "events": 54, "hours": 20 },
|
|
{ "title": "06", "events": 77, "hours": 64 },
|
|
{ "title": "07", "events": 81, "hours": 70 },
|
|
{ "title": "08", "events": 53, "hours": 25 },
|
|
{ "title": "09", "events": 79, "hours": 42 },
|
|
{ "title": "10", "events": 84, "hours": 65 },
|
|
{ "title": "11", "events": 62, "hours": 54 },
|
|
{ "title": "12", "events": 45, "hours": 23 },
|
|
{ "title": "13", "events": 88, "hours": 71 },
|
|
{ "title": "14", "events": 74, "hours": 44 },
|
|
{ "title": "15", "events": 91, "hours": 59 },
|
|
{ "title": "16", "events": 46, "hours": 31 },
|
|
{ "title": "17", "events": 73, "hours": 40 },
|
|
{ "title": "18", "events": 85, "hours": 63 },
|
|
{ "title": "19", "events": 78, "hours": 66 },
|
|
{ "title": "20", "events": 66, "hours": 42 },
|
|
{ "title": "21", "events": 95, "hours": 78 },
|
|
{ "title": "22", "events": 57, "hours": 39 },
|
|
{ "title": "23", "events": 72, "hours": 48 },
|
|
{ "title": "24", "events": 48, "hours": 21 },
|
|
{ "title": "25", "events": 89, "hours": 61 },
|
|
{ "title": "26", "events": 77, "hours": 44 }
|
|
]
|
|
},
|
|
{
|
|
"name": "Kitchen",
|
|
"color": "orange",
|
|
"data": [
|
|
{ "title": "01", "events": 94, "hours": 59 },
|
|
{ "title": "02", "events": 62, "hours": 48 },
|
|
{ "title": "03", "events": 76, "hours": 38 },
|
|
{ "title": "04", "events": 81, "hours": 62 },
|
|
{ "title": "05", "events": 64, "hours": 27 },
|
|
{ "title": "06", "events": 53, "hours": 31 },
|
|
{ "title": "07", "events": 92, "hours": 65 },
|
|
{ "title": "08", "events": 85, "hours": 42 },
|
|
{ "title": "09", "events": 74, "hours": 35 },
|
|
{ "title": "10", "events": 67, "hours": 55 },
|
|
{ "title": "11", "events": 49, "hours": 23 },
|
|
{ "title": "12", "events": 88, "hours": 75 },
|
|
{ "title": "13", "events": 93, "hours": 66 },
|
|
{ "title": "14", "events": 76, "hours": 34 },
|
|
{ "title": "15", "events": 59, "hours": 39 },
|
|
{ "title": "16", "events": 72, "hours": 51 },
|
|
{ "title": "17", "events": 83, "hours": 44 },
|
|
{ "title": "18", "events": 74, "hours": 33 },
|
|
{ "title": "19", "events": 69, "hours": 28 },
|
|
{ "title": "20", "events": 85, "hours": 56 },
|
|
{ "title": "21", "events": 53, "hours": 22 },
|
|
{ "title": "22", "events": 92, "hours": 70 },
|
|
{ "title": "23", "events": 71, "hours": 41 },
|
|
{ "title": "24", "events": 67, "hours": 25 },
|
|
{ "title": "25", "events": 86, "hours": 74 },
|
|
{ "title": "26", "events": 94, "hours": 68 }
|
|
]
|
|
},
|
|
{
|
|
"name": "Other",
|
|
"color": "hotpink",
|
|
"data": [
|
|
{ "title": "01", "events": 57, "hours": 27 },
|
|
{ "title": "02", "events": 74, "hours": 33 },
|
|
{ "title": "03", "events": 84, "hours": 53 },
|
|
{ "title": "04", "events": 95, "hours": 68 },
|
|
{ "title": "05", "events": 71, "hours": 48 },
|
|
{ "title": "06", "events": 92, "hours": 76 },
|
|
{ "title": "07", "events": 85, "hours": 62 },
|
|
{ "title": "08", "events": 49, "hours": 25 },
|
|
{ "title": "09", "events": 66, "hours": 38 },
|
|
{ "title": "10", "events": 63, "hours": 31 },
|
|
{ "title": "11", "events": 75, "hours": 47 },
|
|
{ "title": "12", "events": 94, "hours": 72 },
|
|
{ "title": "13", "events": 79, "hours": 49 },
|
|
{ "title": "14", "events": 72, "hours": 45 },
|
|
{ "title": "15", "events": 88, "hours": 61 },
|
|
{ "title": "16", "events": 83, "hours": 52 },
|
|
{ "title": "17", "events": 92, "hours": 76 },
|
|
{ "title": "18", "events": 73, "hours": 40 },
|
|
{ "title": "19", "events": 65, "hours": 28 },
|
|
{ "title": "20", "events": 76, "hours": 63 },
|
|
{ "title": "21", "events": 58, "hours": 30 },
|
|
{ "title": "22", "events": 84, "hours": 67 },
|
|
{ "title": "23", "events": 72, "hours": 41 },
|
|
{ "title": "24", "events": 79, "hours": 46 },
|
|
{ "title": "25", "events": 63, "hours": 29 },
|
|
{ "title": "26", "events": 68, "hours": 39 }
|
|
]
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "6 Months",
|
|
"rooms": [
|
|
{
|
|
"name": "Bathroom",
|
|
"color": "purple",
|
|
"data": [
|
|
{ "title": "October", "events": 62, "hours": 23 },
|
|
{ "title": "November", "events": 76, "hours": 42 },
|
|
{ "title": "December", "events": 85, "hours": 54 },
|
|
{ "title": "January", "events": 94, "hours": 67 },
|
|
{ "title": "February", "events": 63, "hours": 35 },
|
|
{ "title": "March", "events": 81, "hours": 46 }
|
|
]
|
|
},
|
|
{
|
|
"name": "Bedroom",
|
|
"color": "#3b82f6",
|
|
"data": [
|
|
{ "title": "October", "events": 64, "hours": 35 },
|
|
{ "title": "November", "events": 88, "hours": 71 },
|
|
{ "title": "December", "events": 79, "hours": 54 },
|
|
{ "title": "January", "events": 72, "hours": 49 },
|
|
{ "title": "February", "events": 53, "hours": 32 },
|
|
{ "title": "March", "events": 93, "hours": 67 }
|
|
]
|
|
},
|
|
{
|
|
"name": "Kitchen",
|
|
"color": "orange",
|
|
"data": [
|
|
{ "title": "October", "events": 92, "hours": 65 },
|
|
{ "title": "November", "events": 85, "hours": 62 },
|
|
{ "title": "December", "events": 74, "hours": 49 },
|
|
{ "title": "January", "events": 63, "hours": 33 },
|
|
{ "title": "February", "events": 78, "hours": 56 },
|
|
{ "title": "March", "events": 69, "hours": 41 }
|
|
]
|
|
},
|
|
{
|
|
"name": "Other",
|
|
"color": "hotpink",
|
|
"data": [
|
|
{ "title": "October", "events": 88, "hours": 54 },
|
|
{ "title": "November", "events": 72, "hours": 39 },
|
|
{ "title": "December", "events": 84, "hours": 63 },
|
|
{ "title": "January", "events": 76, "hours": 46 },
|
|
{ "title": "February", "events": 93, "hours": 72 },
|
|
{ "title": "March", "events": 68, "hours": 29 }
|
|
]
|
|
}
|
|
]
|
|
}
|
|
]
|
|
}
|
|
|
|
result_dictionary["chart_data"][0]["rooms"] = weekly_report
|
|
result_dictionary["chart_data"][1]["rooms"] = monthly_report
|
|
result_dictionary["chart_data"][2]["rooms"] = six_months_report
|
|
|
|
|
|
|
|
payload = result_dictionary #{'result_dictionary': result_dictionary}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
AddToLog(payload)
|
|
return
|
|
|
|
|
|
elif function == "dashboard_list":
|
|
# works in UTC only
|
|
|
|
logger.error(f"------------------------------- dashboard_list ------------------------------------------")
|
|
|
|
caretaker = user_name
|
|
#date_s = form_data.get('date')
|
|
time_s = form_data.get('time')
|
|
date_s = datetime.datetime.utcnow().strftime("%Y-%m-%d")
|
|
filterr = form_data.get('filter')
|
|
if filterr == None:
|
|
filterr = 5
|
|
|
|
privileges = GetPriviledgesOnly(caretaker)
|
|
|
|
deployments_list = GetUsersFromDeployments(privileges)
|
|
|
|
#all_beneficiaries = ListBeneficiariesOfCaretaker(caretaker) #GetPriviledgesOnly
|
|
#AddToLog(all_beneficiaries)
|
|
|
|
result_list = []
|
|
|
|
for deployment_id, first_name, last_name in deployments_list:
|
|
details = GetSensorsDetailsFromDeployment(deployment_id, date_s, filterr)
|
|
if details != {}:
|
|
|
|
details["units"] = "°C"
|
|
if "America" in details["time_zone"]:
|
|
details["temperature"] = CelsiusToFahrenheit(details["temperature"])
|
|
details["units"] = "°F"
|
|
devices_list, device_ids = GetProximityList(deployment_id, date_s)
|
|
# convert dates back to UTC
|
|
#details['bathroom_at'] = pytz.timezone(details['time_zone']).localize(datetime.datetime.strptime(details['bathroom_at'], "%Y-%m-%dT%H:%M:%S")).astimezone(pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S")
|
|
#details['kitchen_at'] = pytz.timezone(details['time_zone']).localize(datetime.datetime.strptime(details['bathroom_at'], "%Y-%m-%dT%H:%M:%S")).astimezone(pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S")
|
|
#details['bedroom_at'] = pytz.timezone(details['time_zone']).localize(datetime.datetime.strptime(details['bedroom_at'], "%Y-%m-%dT%H:%M:%S")).astimezone(pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S")
|
|
#details['last_detected_time'] = pytz.timezone(details['time_zone']).localize(datetime.datetime.strptime(details['last_detected_time'], "%Y-%m-%dT%H:%M:%S")).astimezone(pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S")
|
|
location_list = []
|
|
for room_details in devices_list:
|
|
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = room_details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
|
|
if description == None or description == "":
|
|
location_list.append(location_name)
|
|
else:
|
|
location_list.append(location_name + " " + description)
|
|
details["deployment_id"] = deployment_id
|
|
details["location_list"] = location_list
|
|
result_list.append(details)
|
|
|
|
payload = {'result_list': result_list}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
AddToLog(payload)
|
|
return
|
|
|
|
|
|
elif function == "dashboard_single":
|
|
caretaker = user_name
|
|
#date_s = form_data.get('date')
|
|
date_s = datetime.datetime.utcnow().strftime("%Y-%m-%d")
|
|
deployment_id = form_data.get('deployment_id')
|
|
filterr = form_data.get('filter')
|
|
if filterr == None:
|
|
filterr = 5
|
|
|
|
|
|
#all_beneficiaries = ListBeneficiariesOfCaretaker(caretaker) #GetPriviledgesOnly
|
|
#AddToLog(all_beneficiaries)
|
|
|
|
result_list = []
|
|
|
|
details = GetSensorsDetailsFromDeployment(deployment_id, date_s, filterr, False)
|
|
details["units"] = "°C"
|
|
if "America" in details["time_zone"]:
|
|
details["temperature"] = CelsiusToFahrenheit(details["temperature"])
|
|
details["units"] = "°F"
|
|
devices_list, device_ids = GetProximityList(deployment_id, date_s)
|
|
location_list = []
|
|
for room_details in devices_list:
|
|
well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = room_details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
|
|
if description == None or description == "":
|
|
location_list.append(location_name)
|
|
else:
|
|
location_list.append(location_name + " " + description)
|
|
details["deployment_id"] = deployment_id
|
|
details["location_list"] = location_list
|
|
settings = {"wellness_score": False, "last_seen": False, "sleep_report": True, "activity_report": True, "temperature": True, "humidity": True, "air_pressure": True, "light": True, "air_quality": True, "radar": True, "other_activities": False}
|
|
details["settings"] = settings
|
|
result_list.append(details)
|
|
payload = {'result_list': result_list}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
AddToLog(payload)
|
|
return
|
|
|
|
elif function == "request_node_red":
|
|
logger.error(f"------------------------------- {function} ------------------------------------------")
|
|
#this will:
|
|
# 1.prepare folder and settings.js
|
|
# 2.start instance on node-red and return it's return port
|
|
#caretaker = user_name
|
|
#date_s = form_data.get('date')
|
|
time_s = datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
|
|
#deployment_id = form_data.get('deployment_id')
|
|
#redis_conn.set('node_red_requests', str([radar_threshold_signal, radar_threshold_value]))
|
|
# Hashes (dictionaries)
|
|
logger.error(f"Storing to node_red_requests {user_name}")
|
|
redis_conn.hset('node_red_requests', mapping={
|
|
'user_name': user_name,
|
|
'token': token,
|
|
'time': time_s,
|
|
'requests': 1
|
|
})
|
|
|
|
payload = {'ok': 1}
|
|
logger.error(f"Responding {payload}")
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
|
|
elif function == "get_node_red_port":
|
|
#this will:
|
|
# 1.prepare folder and settings.js
|
|
# 2.start instance on node-red and return it's return port
|
|
hash_data = GetRedisMap(f'node_red_status_{user_name}')
|
|
port = 0
|
|
if hash_data != {}:
|
|
port = hash_data['port']
|
|
#date_s = form_data.get('date')
|
|
#date_s = datetime.datetime.utcnow().strftime("%Y-%m-%d")
|
|
#deployment_id = form_data.get('deployment_id')
|
|
payload = {'port': port}
|
|
logger.debug(f"get_node_red_port: {payload}")
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
elif function == "activity_detected":
|
|
#this will:
|
|
# 1.store to REDIS time of last activity
|
|
time_s = form_data.get('time')
|
|
|
|
hash_data = GetRedisMap(f'node_red_status_{user_name}')
|
|
port = 0
|
|
if hash_data != {}:
|
|
port = hash_data['port']
|
|
|
|
redis_conn.hset(f'node_red_status_{user_name}', mapping={
|
|
'port': port,
|
|
'last_activity': time_s
|
|
})
|
|
|
|
payload = {'ok': 1}
|
|
logger.debug(f"activity_detected: {payload}")
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
|
|
elif function == "store_flow":
|
|
#this will:
|
|
# 1.store flow into DB
|
|
time_s = form_data.get('time')
|
|
flow_json = form_data.get('flow')
|
|
logger.debug(f"store_flow: {flow_json}")
|
|
StoreFlow2DB(user_name, time_s, flow_json)
|
|
payload = {'ok': 1}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
elif function == "store_alarms":
|
|
#this will:
|
|
# 1.store flow into DB
|
|
deployment_id = form_data.get('deployment_id')
|
|
device_id = form_data.get('device_id')
|
|
deployment_alarms_json = form_data.get('deployment_alarms')
|
|
device_alarms_json = form_data.get('device_alarms')
|
|
|
|
logger.debug(f"store_alarms: {deployment_alarms_json}")
|
|
|
|
|
|
if privileges == "-1" or deployment_id in privileges:
|
|
ok = StoreAlarms2DB(deployment_id, device_id, deployment_alarms_json, device_alarms_json)
|
|
|
|
redis_conn.set('alarm_device_settings_'+device_id, device_alarms_json)
|
|
redis_conn.set('alarm_deployment_settings_'+deployment_id, deployment_alarms_json)
|
|
|
|
# Create record dictionary
|
|
record = {
|
|
'user_name': user_name,
|
|
'deployment_id': deployment_id,
|
|
'device_id': device_id
|
|
}
|
|
|
|
# Convert dictionary to JSON string for storage in Redis list
|
|
record_json = json.dumps(record)
|
|
|
|
# Add to queue (list) - lpush adds to the left/front of the list
|
|
redis_conn.lpush('new_alarms', record_json)
|
|
|
|
|
|
payload = {'ok': ok}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
else:
|
|
payload = {'ok': 0, 'error': "not allowed"}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
|
|
return
|
|
|
|
elif function == "send_walarm":
|
|
# Extract data from form
|
|
deployment_id = form_data.get('deployment_id')
|
|
device_id = form_data.get('device_id')
|
|
location = form_data.get('location')
|
|
method = form_data.get('method')
|
|
feature = form_data.get('feature')
|
|
currentAlertTableMode = form_data.get('currentAlertTableMode')
|
|
time_s = datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
|
|
content = form_data.get('content')
|
|
enabledCellContent = form_data.get('enabledCellContent')
|
|
currentUnits = form_data.get('currentUnits')
|
|
test_only = form_data.get('test_only')
|
|
action = form_data.get('action')
|
|
|
|
logger.debug(f"send_requests: {user_name}")
|
|
|
|
# Create record dictionary
|
|
record = {
|
|
'user_name': user_name,
|
|
'deployment_id': deployment_id,
|
|
'location': location,
|
|
'method': method,
|
|
'feature': feature,
|
|
'currentAlertTableMode': currentAlertTableMode,
|
|
'time': time_s,
|
|
'content': content,
|
|
'currentUnits': currentUnits,
|
|
'test_only': test_only,
|
|
'action': action,
|
|
'enabledCellContent': enabledCellContent
|
|
}
|
|
|
|
# Convert dictionary to JSON string for storage in Redis list
|
|
record_json = json.dumps(record)
|
|
|
|
# Add to queue (list) - lpush adds to the left/front of the list
|
|
redis_conn.lpush('send_requests', record_json)
|
|
|
|
payload = {'ok': 1}
|
|
logger.error(f"Responding {payload}")
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
elif function == "node-red_deployed":
|
|
#this will:
|
|
# 1.store flow into DB
|
|
time_s = form_data.get('time')
|
|
logger.debug(f"node-red_deployed: {user_name}")
|
|
redis_conn.hset('node-red_deployed', mapping={
|
|
'user_name': user_name,
|
|
'token': token,
|
|
'time': time_s,
|
|
'requests': 1
|
|
})
|
|
|
|
payload = {'ok': 1}
|
|
logger.error(f"Responding {payload}")
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
|
|
else:
|
|
AddToLog("Error: function not recognized!")
|
|
payload = {'ok': 0, 'error': debug_string}
|
|
resp.media = package_response(payload)
|
|
resp.status = falcon.HTTP_200
|
|
return
|
|
|
|
except Exception as e:
|
|
print(traceback.format_exc())
|
|
resp.media = package_response(f"Error: {str(e)} {traceback.format_exc()}", HTTP_500)
|
|
|
|
|
|
def on_put(self, req, resp, path=""):
|
|
"""Handle PUT requests"""
|
|
if path == "users":
|
|
logger.info("PUT request to users endpoint")
|
|
try:
|
|
# Parse the request body
|
|
request_data = json.loads(req.stream.read().decode('utf-8'))
|
|
|
|
# TODO: Implement user update logic
|
|
|
|
resp.status = HTTP_200
|
|
resp.content_type = falcon.MEDIA_JSON
|
|
resp.text = json.dumps({"id": request_data.get("id"), "message": "User updated"})
|
|
except json.JSONDecodeError:
|
|
resp.status = HTTP_400
|
|
resp.content_type = falcon.MEDIA_JSON
|
|
resp.text = json.dumps({"error": "Invalid JSON"})
|
|
else:
|
|
resp.media = package_response(f"PUT to /{path} not implemented", HTTP_400)
|
|
|
|
def on_delete(self, req, resp, path=""):
|
|
"""Handle DELETE requests"""
|
|
if path == "users":
|
|
logger.info("DELETE request to users endpoint")
|
|
resp.status = HTTP_200
|
|
resp.content_type = falcon.MEDIA_JSON
|
|
resp.text = json.dumps({"message": "User deleted"})
|
|
else:
|
|
resp.media = package_response(f"DELETE to /{path} not implemented", HTTP_400)
|
|
|
|
# Initialize data files
|
|
|
|
|
|
logger.error(f"------------------------------- STARTED ------------------------------------------")
|
|
try:
|
|
searches_text = read_file("searches.json")
|
|
searches_dict = json.loads(searches_text) if searches_text else {}
|
|
|
|
dialogs_data = read_file("dialog.json")
|
|
dialog_dict = json.loads(dialogs_data) if dialogs_data else {"utterances": {}, "intents": {}}
|
|
|
|
intent_map = dialog_dict.get("utterances", {})
|
|
utterances = {}
|
|
for key in intent_map:
|
|
logger.debug(key)
|
|
list_of_utterances = intent_map[key]
|
|
for utterance in list_of_utterances:
|
|
utterances[utterance] = key
|
|
|
|
intents = dialog_dict.get("intents", {})
|
|
except Exception as e:
|
|
logger.error(f"Error initializing data files: {str(e)}")
|
|
searches_dict = {}
|
|
utterances = {}
|
|
intents = {}
|
|
|
|
# Create Falcon application with middleware
|
|
middlewares = [CORSMiddleware(), RequestParser(), StripPathMiddleware()]
|
|
try:
|
|
# For newer Falcon versions
|
|
app = falcon.App(middleware=middlewares)
|
|
except:
|
|
# For older Falcon versions
|
|
app = falcon.API(middleware=middlewares)
|
|
|
|
#logger.error(f"@1")
|
|
# Add routes for well-api
|
|
well_api_instance = WellApi()
|
|
|
|
# New routes for well_api with multiple access paths
|
|
app.add_route('/function/well-api', well_api_instance)
|
|
app.add_route('/function/well-api/{path}', well_api_instance)
|
|
app.add_route('/api/well_api', well_api_instance)
|
|
app.add_route('/api/well_api/{path}', well_api_instance)
|
|
app.add_route('/healthz', well_api_instance, suffix='healthz')
|
|
|
|
# Add routes for the standard API paths
|
|
app.add_route('/health', well_api_instance)
|
|
app.add_route('/users', well_api_instance)
|
|
app.add_route('/items', well_api_instance)
|
|
|
|
# Keep the original routes for backward compatibility
|
|
app.add_route('/', well_api_instance)
|
|
app.add_route('/{path}', well_api_instance)
|
|
|
|
MQTTSERVERL = "eluxnetworks.net"
|
|
MQTT_PortL = 443
|
|
|
|
MyName = "well-api"
|
|
|
|
clientL = mqtt.Client(client_id=MyName+str(time.time()), transport="websockets")
|
|
clientL.tls_set(cert_reqs=ssl.CERT_NONE) # For self-signed certs, use proper CA in production
|
|
clientL.ws_set_options(path="/mqtt") # Important! Same path as in your JS code
|
|
clientL.username_pw_set("well_user","We3l1_best!")
|
|
|
|
clientL.on_connect = on_connectL
|
|
clientL.on_message = on_messageL
|
|
|
|
#clientL.connect(MQTTSERVERL, MQTT_PortL, 60)
|
|
#lientL.loop_start()
|
|
#logger.error(f"@2")
|
|
|
|
# This code runs when executed directly (for development/debugging)
|
|
if __name__ == "__main__":
|
|
from wsgiref.simple_server import make_server
|
|
redis_conn = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)
|
|
# Use port 8000 for local debugging
|
|
port = int(os.environ.get('PORT', 8000))
|
|
#port = int(os.environ.get('PORT', 1998))
|
|
|
|
# Create a WSGI server
|
|
with make_server('', port, app) as httpd:
|
|
print(f'Serving on port {port}...')
|
|
|
|
# Serve until process is killed
|
|
httpd.serve_forever()
|
|
else:
|
|
redis_conn = redis.Redis(host=redis_host, port=6379, db=0)
|