diff --git a/.env b/.env
index 33b8bea..55e47f2 100644
--- a/.env
+++ b/.env
@@ -9,11 +9,11 @@ MINIO_HOST=192.168.68.70
MINIO_PORT=9000
DAILY_MAPS_BUCKET_NAME=daily-maps
JWT_SECRET=Well_202502110501
-OPENAI_API_KEY=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
+OPENAI_API_KEY=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
OPENAI_API_MODEL_ENGINE=gpt-3.5-turbo
-
REDIS_PORT="6379"
-
+MQTT_USER=well_user
+MQTT_PASS=We3l1_best!
TELNYX_VOICE_URL=https://api.telnyx.com/v2/calls
#TELNYX_WEBHOOK_URL_VOICE=https://eluxnetworks.net/telnyx-webhook
TELNYX_WEBHOOK_URL_VOICE=http://eluxnetworks.net:1998/function/well-api/api
diff --git a/stack.yml b/stack.yml
index 0cd71ac..53a21e8 100644
--- a/stack.yml
+++ b/stack.yml
@@ -47,11 +47,14 @@ functions:
JWT_SECRET: "Well_202502110501"
MASTER_ADMIN: "robster"
MASTER_PS: "rob2"
- OPENAI_API_KEY: "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"
+ OPENAI_API_KEY: "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"
+
OPENAI_API_MODEL_ENGINE: "gpt-3.5-turbo"
REDIS_PORT: "6379"
TELNYX_API_KEY: "KEY0196087A75998434A30FA637CE4FDAFF_ZljGj9KBSAQL0zXx4Sb5eW"
TELNYX_API_BASE_URL: "https://api.telnyx.com/v2"
+ MQTT_USER: "well_user"
+ MQTT_PASS: "We3l1_best!"
deploy:
resources:
diff --git a/well-api.py b/well-api.py
index 3aa940f..9be7f48 100644
--- a/well-api.py
+++ b/well-api.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-#Vesion 2.0.1
+#Version 2.0.2 9/25/2025
import os
import sys
import ast
@@ -27,7 +27,6 @@ import ssl
import hashlib
import itertools
from collections import defaultdict, deque
-import warnings
from io import BytesIO
import zipfile
from minio import Minio
@@ -44,6 +43,7 @@ import requests
import uuid
import csv
import random
+import urllib.parse
base_url = "http://192.168.68.70:5050"
@@ -57,7 +57,6 @@ except ImportError as e:
device_lookup_cache = {}
-threshold_cache = {}
humidity_offset = 34
temperature_offset = -10
st = 0
@@ -75,7 +74,6 @@ if EnablePlot:
import matplotlib
matplotlib.use('Agg') # Set the backend before importing pyplot
import matplotlib.pyplot as plt
- from matplotlib.colors import LinearSegmentedColormap
import matplotlib.dates as mdates
# Configure logging
@@ -114,6 +112,10 @@ AveragePercentPerLocation = {"Bedroom":[29, 37.5], "Bathroom":[2, 4], "Office":[
"Room":[5, 10],"Kitchen":[5, 12.5], "Living Room":[5, 10],
"Dining Room":[5, 10], "Basement":[0, 0.2], "Attic":[0, 0.2]}
+races = ["Asian","Black or African American","White","Native American or Alaskan Native","Native Hawaiian or other Pacific Islander","Hispanic or Latino","Middle Eastern","Other"]
+sexes = ["Male","Female"]
+
+
location_indexes = {}
for i in location_names:
@@ -142,29 +144,31 @@ MINIO_HOST = os.getenv('MINIO_HOST')
MINIO_PORT = os.getenv('MINIO_PORT')
DAILY_MAPS_BUCKET_NAME = os.getenv('DAILY_MAPS_BUCKET_NAME')
JWT_SECRET = os.getenv('JWT_SECRET')
-MASTER_ADMIN = os.getenv('MASTER_ADMIN')
-MASTER_PS = os.getenv('MASTER_PS')
OPENAI_API_KEY = os.getenv('OPENAI_API_KEY')
+
+#logger.debug(f"OPENAI_API_KEY: {OPENAI_API_KEY}")
+
model_engine = os.getenv('OPENAI_API_MODEL_ENGINE')
# Redis Configuration
REDIS_HOST = os.getenv('REDIS_HOST', 'localhost')
+
REDIS_PORT = int(os.getenv('REDIS_PORT'))
REDIS_DB = int(os.getenv('REDIS_DB', 0))
REDIS_PASSWORD = os.getenv('REDIS_PASSWORD', None)
-ENABLE_AUDIO_PLAYBACK = True
-OPT_IN_KEYWORD = "WELLNUOJOIN"
DEFAULT_TTS_VOICE = "female"
DEFAULT_TTS_LANGUAGE = "en-US"
-CLIENT_STATE_PREFIX = "app_state"
TELNYX_API_KEY = os.getenv('TELNYX_API_KEY')
TELNYX_API_BASE_URL = os.getenv("TELNYX_API_BASE_URL")
-logger.debug(f"REDIS_PORT: {REDIS_PORT}")
-logger.debug(f"TELNYX_API_KEY: {TELNYX_API_KEY}")
-logger.debug(f"TELNYX_API_BASE_URL: {TELNYX_API_BASE_URL}")
+MQTT_USER = os.getenv('MQTT_USER')
+MQTT_PASS = os.getenv('MQTT_PASS')
+
+#logger.debug(f"REDIS_PORT: {REDIS_PORT}")
+#logger.debug(f"TELNYX_API_KEY: {TELNYX_API_KEY}")
+#logger.debug(f"TELNYX_API_BASE_URL: {TELNYX_API_BASE_URL}")
redis_host = os.getenv('REDIS_HOST', '192.168.68.70')
redis_host = '192.168.68.70'
@@ -301,143 +305,6 @@ def format_special_cases(word):
return word.capitalize()
-def recreate_address(parsed_address):
- """
- Recreate a properly formatted address string from parsed components
-
- Args:
- parsed_address (dict): Dictionary containing parsed address components
- OR list of tuples: [('value', 'type'), ...]
-
- Expected keys/types:
- - street_number/house_number
- - street_name/road
- - apt/unit/suite (optional)
- - city
- - state
- - zip_code/postcode
- - country
-
- Returns:
- str: Properly formatted address string
- """
-
- # Handle both dict and list of tuples formats
- if isinstance(parsed_address, list):
- # Convert list of tuples to dict
- addr_dict = {}
- for value, addr_type in parsed_address:
- # Map the parser's field names to our expected names
- if addr_type == 'house_number':
- # Only use house_number if street_number doesn't already exist
- if 'street_number' not in addr_dict:
- addr_dict['street_number'] = value
- elif addr_type == 'street_number':
- # street_number takes priority over house_number
- addr_dict['street_number'] = value
- elif addr_type == 'road':
- addr_dict['street_name'] = value
- elif addr_type == 'postcode':
- addr_dict['zip_code'] = value
- else:
- addr_dict[addr_type] = value
- parsed_address = addr_dict
-
- # Format each component
- street_number = format_address_component(
- parsed_address.get('street_number', ''), 'street_number'
- )
-
- street_name = format_address_component(
- parsed_address.get('street_name', ''), 'street_name'
- )
-
- # Handle apartment/unit/suite
- apt_unit = ''
- for key in ['apt', 'apartment', 'unit', 'suite', 'ste']:
- if parsed_address.get(key):
- apt_value = format_address_component(parsed_address[key], 'apt')
- if key.lower() in ['apt', 'apartment']:
- apt_unit = f"Apt {apt_value}"
- elif key.lower() == 'unit':
- apt_unit = f"Unit {apt_value}"
- elif key.lower() in ['suite', 'ste']:
- apt_unit = f"Suite {apt_value}"
- break
-
- city = format_address_component(
- parsed_address.get('city', ''), 'city'
- )
-
- state = format_address_component(
- parsed_address.get('state', ''), 'state'
- )
-
- zip_code = format_address_component(
- parsed_address.get('zip_code', ''), 'zip_code'
- )
-
- country = format_address_component(
- parsed_address.get('country', ''), 'country'
- )
-
- # Build the address string - detect format based on available components
- if country and not state:
- # International format (like Croatian): Street Number, PostalCode, City, Country
- address_parts = []
-
- # Street address line
- street_parts = [street_number, street_name]
- street_line = ' '.join(filter(None, street_parts))
-
- if apt_unit:
- street_line += f", {apt_unit}"
-
- if street_line:
- address_parts.append(street_line)
-
- # Add postal code, city, country as separate parts
- if zip_code:
- address_parts.append(zip_code)
- if city:
- address_parts.append(city)
- if country:
- address_parts.append(country)
-
- return ', '.join(address_parts)
-
- else:
- # US format: Street\nCity, State ZIP
- address_parts = []
-
- # Street address line
- street_parts = [street_number, street_name]
- street_line = ' '.join(filter(None, street_parts))
-
- if apt_unit:
- street_line += f", {apt_unit}"
-
- if street_line:
- address_parts.append(street_line)
-
- # City, State ZIP line
- city_state_zip = []
- if city:
- city_state_zip.append(city)
-
- if state and zip_code:
- city_state_zip.append(f"{state} {zip_code}")
- elif state:
- city_state_zip.append(state)
- elif zip_code:
- city_state_zip.append(zip_code)
-
- if city_state_zip:
- address_parts.append(', '.join(city_state_zip))
-
- return '\n'.join(address_parts)
-
-
def GetRedisInt(key_name):
try:
result = int(redis_conn.get(key_name).decode('utf-8'))
@@ -1270,74 +1137,75 @@ def StoreThresholds2DB(device_id, TR, BR, TLIFE, BLIFE):
return 0
-def StoreBeneficiary2DB(parameters, editing_user_id, user_id):
+#def StoreBeneficiary2DB(parameters, editing_user_id, user_id):
- #print('\nCreating create_caretaker\n')
- # Create a caretaker object. This object has nested properties and various types including numbers, DateTimes and strings.
- # This can be saved as JSON as is without converting into rows/columns.
- conn = get_db_connection()
- cur = conn.cursor()
- error_string = ""
- if editing_user_id == None or editing_user_id == "":
- editing_user_id = "0"
+ ##print('\nCreating create_caretaker\n')
+ ## Create a caretaker object. This object has nested properties and various types including numbers, DateTimes and strings.
+ ## This can be saved as JSON as is without converting into rows/columns.
- try:
+ #conn = get_db_connection()
+ #cur = conn.cursor()
+ #error_string = ""
+ #if editing_user_id == None or editing_user_id == "":
+ #editing_user_id = "0"
- current_utc_time = datetime.datetime.now(timezone.utc)
+ #try:
- # Convert to epoch time
- current_epoch_time = current_utc_time.timestamp()
+ #current_utc_time = datetime.datetime.now(timezone.utc)
- if editing_user_id != "0":
- sql = f"""
- UPDATE public.person_details
- SET
- email = '{CleanObject(parameters.get('email'))}',
- user_name = '{CleanObject(parameters.get('new_user_name'))}',
- first_name = '{CleanObject(parameters.get('first_name'))}',
- last_name = '{CleanObject(parameters.get('last_name'))}',
- address_street = '{CleanObject(parameters.get('address_street'))}',
- address_city = '{CleanObject(parameters.get('address_city'))}',
- address_zip = '{CleanObject(parameters.get('address_zip'))}',
- address_state = '{CleanObject(parameters.get('address_state'))}',
- address_country = '{CleanObject(parameters.get('address_country'))}',
- time_edit = {current_epoch_time},
- user_edit = {user_id},
- role_ids = '{CleanObject(parameters.get('role_ids'))}',
- phone_number = '{CleanObject(parameters.get('phone_number'))}',
- picture = '{CleanObject(parameters.get('picture'))}',
- key = '{CleanObject(parameters.get('key'))}'
- WHERE user_id = {editing_user_id}; -- replace 34 with the actual person_id you want to update
- """
+ ## Convert to epoch time
+ #current_epoch_time = current_utc_time.timestamp()
- else:
- sql = f"""
- INSERT INTO public.person_details
- (role_ids, email, user_name, first_name, last_name, address_street, address_city, address_zip, address_state, address_country, time_edit, user_edit, phone_number, picture, key)
- VALUES
- ('{CleanObject(parameters.get('role_ids'))}', '{CleanObject(parameters.get('email'))}', '{CleanObject(parameters.get('new_user_name'))}',
- '{CleanObject(parameters.get('first_name'))}', '{CleanObject(parameters.get('last_name'))}', '{CleanObject(parameters.get('address_street'))}',
- '{CleanObject(parameters.get('address_city'))}', '{CleanObject(parameters.get('address_zip'))}', '{CleanObject(parameters.get('address_state'))}',
- '{CleanObject(parameters.get('address_country'))}', {current_epoch_time}, {user_id}, '{CleanObject(parameters.get('phone_number'))}',
- '{CleanObject(parameters.get('picture'))}', '{CleanObject(parameters.get('key'))}');
- """
- logger.debug(f"sql= {sql}")
- # Execute update query
- cur.execute(sql)
+ #if editing_user_id != "0":
+ #sql = f"""
+ #UPDATE public.person_details
+ #SET
+ #email = '{CleanObject(parameters.get('email'))}',
+ #user_name = '{CleanObject(parameters.get('new_user_name'))}',
+ #first_name = '{CleanObject(parameters.get('first_name'))}',
+ #last_name = '{CleanObject(parameters.get('last_name'))}',
+ #address_street = '{CleanObject(parameters.get('address_street'))}',
+ #address_city = '{CleanObject(parameters.get('address_city'))}',
+ #address_zip = '{CleanObject(parameters.get('address_zip'))}',
+ #address_state = '{CleanObject(parameters.get('address_state'))}',
+ #address_country = '{CleanObject(parameters.get('address_country'))}',
+ #time_edit = {current_epoch_time},
+ #user_edit = {user_id},
+ #role_ids = '{CleanObject(parameters.get('role_ids'))}',
+ #phone_number = '{CleanObject(parameters.get('phone_number'))}',
+ #picture = '{CleanObject(parameters.get('picture'))}',
+ #key = '{CleanObject(parameters.get('key'))}'
+ #WHERE user_id = {editing_user_id}; -- replace 34 with the actual person_id you want to update
+ #"""
- # Commit the changes to the database
- conn.commit()
+ #else:
+ #sql = f"""
+ #INSERT INTO public.person_details
+ #(role_ids, email, user_name, first_name, last_name, address_street, address_city, address_zip, address_state, address_country, time_edit, user_edit, phone_number, picture, key)
+ #VALUES
+ #('{CleanObject(parameters.get('role_ids'))}', '{CleanObject(parameters.get('email'))}', '{CleanObject(parameters.get('new_user_name'))}',
+ #'{CleanObject(parameters.get('first_name'))}', '{CleanObject(parameters.get('last_name'))}', '{CleanObject(parameters.get('address_street'))}',
+ #'{CleanObject(parameters.get('address_city'))}', '{CleanObject(parameters.get('address_zip'))}', '{CleanObject(parameters.get('address_state'))}',
+ #'{CleanObject(parameters.get('address_country'))}', {current_epoch_time}, {user_id}, '{CleanObject(parameters.get('phone_number'))}',
+ #'{CleanObject(parameters.get('picture'))}', '{CleanObject(parameters.get('key'))}');
+ #"""
+ #logger.debug(f"sql= {sql}")
+ ## Execute update query
+ #cur.execute(sql)
- # Close the cursor and connection
- cur.close()
- conn.close()
+ ## Commit the changes to the database
+ #conn.commit()
- AddToLog("Written/updated!")
- return 1, error_string
- except Exception as err:
- error_string = traceback.format_exc()
- AddToLog(error_string)
- return 0, error_string
+ ## Close the cursor and connection
+ #cur.close()
+ #conn.close()
+
+ #AddToLog("Written/updated!")
+ #return 1, error_string
+ #except Exception as err:
+ #error_string = traceback.format_exc()
+ #AddToLog(error_string)
+ #return 0, error_string
def is_valid_email_strict(email):
"""
@@ -1435,6 +1303,22 @@ def AccountExists(user_name):
return False
return False
+def AccountByEmailExists(email):
+ conn = get_db_connection()
+
+ sql = "SELECT user_name, user_id, key, role_ids, access_to_deployments FROM public.person_details WHERE email = '" + email + "'"
+
+ with conn.cursor() as cur:
+ cur.execute(sql)
+ result = cur.fetchone() #cur.fetchall()
+ if result != None:
+ if len(result) > 0:
+ return result[0], result[1], result[2], result[3], result[4]
+ else:
+ return None, None, None, None, None
+ return None, None, None, None, None
+
+
def DeleteRecordFromDB(form_data):
caretaker = form_data['user_name']
@@ -1641,10 +1525,162 @@ def DeleteRecordFromDB(form_data):
return 0
def StoreCaretaker2DB(parameters, editing_user_id, user_id):
+ """
+ Store or update caretaker information in the database.
+ Only updates non-empty fields for existing records.
+ """
+ conn = get_db_connection()
+ cur = conn.cursor()
- #print('\nCreating create_caretaker\n')
- # Create a caretaker object. This object has nested properties and various types including numbers, DateTimes and strings.
- # This can be saved as JSON as is without converting into rows/columns.
+ # Normalize editing_user_id
+ if editing_user_id in (None, "", 0, "0"):
+ editing_user_id = None
+
+ try:
+ current_utc_time = datetime.datetime.now(timezone.utc)
+ current_epoch_time = current_utc_time.timestamp()
+
+ if editing_user_id: # UPDATE existing record
+ return _update_person_details(cur, conn, parameters, current_epoch_time, user_id, editing_user_id)
+ else: # INSERT new record
+ return _insert_person_details(cur, conn, parameters, current_epoch_time, user_id)
+
+ except Exception as err:
+ AddToLog(traceback.format_exc())
+ if conn:
+ conn.rollback()
+ return 0
+ finally:
+ if cur:
+ cur.close()
+ if conn:
+ conn.close()
+
+
+def _update_person_details(cur, conn, parameters, current_epoch_time, user_id, editing_user_id):
+ """Update existing person details, only overwriting non-empty fields."""
+
+ # Define field mappings (database_column: parameter_key)
+ field_mappings = {
+ 'role_ids': 'role_ids',
+ 'access_to_deployments': 'access_to_deployments',
+ 'user_name': 'new_user_name',
+ 'first_name': 'first_name',
+ 'last_name': 'last_name',
+ 'address_street': 'address_street',
+ 'address_city': 'address_city',
+ 'address_zip': 'address_zip',
+ 'address_state': 'address_state',
+ 'address_country': 'address_country',
+ 'phone_number': 'phone_number',
+ 'picture': 'picture',
+ 'key': 'key'
+ }
+
+ # Build SET clause dynamically for non-empty fields
+ updates = []
+ values = []
+
+ for db_field, param_key in field_mappings.items():
+ value = parameters.get(param_key)
+ if _is_not_empty(value):
+ updates.append(f"{db_field} = %s")
+ values.append(value)
+
+ # Always update time_edit and user_edit
+ updates.extend(["time_edit = %s", "user_edit = %s"])
+ values.extend([current_epoch_time, user_id])
+
+ if len(updates) > 2: # More than just time_edit and user_edit
+ sql = f"""
+ UPDATE public.person_details
+ SET {', '.join(updates)}
+ WHERE user_id = %s
+ """
+ values.append(editing_user_id)
+
+ logger.debug(f"UPDATE sql= {sql}")
+ logger.debug(f"UPDATE values= {values}")
+
+ cur.execute(sql, values)
+ conn.commit()
+ AddToLog("Updated!")
+ return 1
+ else:
+ # Only time_edit and user_edit would be updated
+ sql = "UPDATE public.person_details SET time_edit = %s, user_edit = %s WHERE user_id = %s"
+ values = [current_epoch_time, user_id, editing_user_id]
+
+ cur.execute(sql, values)
+ conn.commit()
+ AddToLog("Updated timestamp only!")
+ return 1
+
+
+def _insert_person_details(cur, conn, parameters, current_epoch_time, user_id):
+ """Insert new person details record."""
+
+ # Define all fields for INSERT (including email which is required for new records)
+ field_mappings = {
+ 'role_ids': 'role_ids',
+ 'access_to_deployments': 'access_to_deployments',
+ 'email': 'email', # Required for INSERT
+ 'user_name': 'new_user_name',
+ 'first_name': 'first_name',
+ 'last_name': 'last_name',
+ 'address_street': 'address_street',
+ 'address_city': 'address_city',
+ 'address_zip': 'address_zip',
+ 'address_state': 'address_state',
+ 'address_country': 'address_country',
+ 'phone_number': 'phone_number',
+ 'picture': 'picture',
+ 'key': 'key'
+ }
+
+ # Build column names and values
+ columns = list(field_mappings.keys()) + ['time_edit', 'user_edit']
+ values = []
+
+ # Get values for all fields (None for missing ones)
+ for param_key in field_mappings.values():
+ value = parameters.get(param_key)
+ values.append(value if value is not None else None)
+
+ # Add time_edit and user_edit
+ values.extend([current_epoch_time, user_id])
+
+ # Create placeholders
+ placeholders = ', '.join(['%s'] * len(values))
+ column_names = ', '.join(columns)
+
+ sql = f"""
+ INSERT INTO public.person_details ({column_names})
+ VALUES ({placeholders})
+ """
+
+ logger.debug(f"INSERT sql= {sql}")
+ logger.debug(f"INSERT values= {values}")
+
+ cur.execute(sql, values)
+ conn.commit()
+ AddToLog("Written!")
+ return 1
+
+
+def _is_not_empty(value):
+ """Check if a value is not empty/None."""
+ if value is None:
+ return False
+ if isinstance(value, str):
+ return value.strip() != ''
+ if isinstance(value, (list, dict)):
+ return len(value) > 0
+ return True
+
+def StoreBeneficiary2DB(parameters, editing_user_id, user_id):
+ #editing_user_id is user thet will be updated
+ #user_id is user that is doing updating
conn = get_db_connection()
cur = conn.cursor()
@@ -1652,61 +1688,119 @@ def StoreCaretaker2DB(parameters, editing_user_id, user_id):
editing_user_id = "0"
try:
-
current_utc_time = datetime.datetime.now(timezone.utc)
-
- # Convert to epoch time
current_epoch_time = current_utc_time.timestamp()
+ # Filter out empty string values
+ filtered_params = {k: v for k, v in parameters.items() if v != "" and v is not None}
+
if editing_user_id != "0":
+ # UPDATE operation - only update non-empty fields
+ if not filtered_params:
+ logger.warning("No non-empty fields to update")
+ return 0, 0
+
+ # Build dynamic SET clause for non-empty fields only
+ set_clauses = []
+ values = []
+
+ field_mapping = {
+ 'role_ids': 'role_ids',
+ 'access_to_deployments': 'access_to_deployments',
+ 'email': 'email',
+ 'new_user_name': 'user_name',
+ 'first_name': 'first_name',
+ 'last_name': 'last_name',
+ 'address_street': 'address_street',
+ 'address_city': 'address_city',
+ 'address_zip': 'address_zip',
+ 'address_state': 'address_state',
+ 'address_country': 'address_country',
+ 'phone_number': 'phone_number',
+ 'picture': 'picture',
+ 'key': 'key'
+ }
+
+ for param_key, db_field in field_mapping.items():
+ if param_key in filtered_params:
+ set_clauses.append(f"{db_field} = %s")
+ values.append(filtered_params[param_key])
+
+ # Always update time_edit and user_edit
+ set_clauses.extend(["time_edit = %s", "user_edit = %s"])
+ values.extend([current_epoch_time, user_id])
+
sql = f"""
UPDATE public.person_details
- SET
- role_ids = '{parameters.get('role_ids')}',
- access_to_deployments = '{parameters.get('access_to_deployments')}',
- email = '{parameters.get('email')}',
- user_name = '{parameters.get('new_user_name')}',
- first_name = '{parameters.get('first_name')}',
- last_name = '{parameters.get('last_name')}',
- address_street = '{parameters.get('address_street')}',
- address_city = '{parameters.get('address_city')}',
- address_zip = '{parameters.get('address_zip')}',
- address_state = '{parameters.get('address_state')}',
- address_country = '{parameters.get('address_country')}',
- time_edit = {current_epoch_time},
- user_edit = {user_id},
- phone_number = '{parameters.get('phone_number')}',
- picture = '{parameters.get('picture')}',
- key = '{parameters.get('key')}'
- WHERE user_id = {editing_user_id}; -- replace 34 with the actual person_id you want to update
+ SET {', '.join(set_clauses)}
+ WHERE user_id = %s
"""
+ values.append(editing_user_id)
else:
- sql = f"""
+ # INSERT operation - use None/NULL for empty fields
+ field_mapping = {
+ 'role_ids': 'role_ids',
+ 'access_to_deployments': 'access_to_deployments',
+ 'email': 'email',
+ 'new_user_name': 'user_name',
+ 'first_name': 'first_name',
+ 'last_name': 'last_name',
+ 'address_street': 'address_street',
+ 'address_city': 'address_city',
+ 'address_zip': 'address_zip',
+ 'address_state': 'address_state',
+ 'address_country': 'address_country',
+ 'phone_number': 'phone_number',
+ 'picture': 'picture',
+ 'key': 'key'
+ }
+
+ # Prepare values, using None for empty strings
+ values = []
+ for param_key in field_mapping.keys():
+ value = parameters.get(param_key)
+ values.append(value if value != "" and value is not None else None)
+
+ values.extend([current_epoch_time, user_id])
+
+ sql = """
INSERT INTO public.person_details
- (role_ids, access_to_deployments, email, user_name, first_name, last_name, address_street, address_city, address_zip, address_state, address_country, time_edit, user_edit, phone_number, picture, key)
- VALUES
- ('{parameters.get('role_ids')}', '{parameters.get('access_to_deployments')}', '{parameters.get('email')}', '{parameters.get('new_user_name')}', '{parameters.get('first_name')}',
- '{parameters.get('last_name')}', '{parameters.get('address_street')}', '{parameters.get('address_city')}', '{parameters.get('address_zip')}', '{parameters.get('address_state')}',
- '{parameters.get('address_country')}', {current_epoch_time}, {user_id}, '{parameters.get('phone_number')}', '{parameters.get('picture')}', '{parameters.get('key')}');
+ (role_ids, access_to_deployments, email, user_name, first_name, last_name,
+ address_street, address_city, address_zip, address_state, address_country,
+ time_edit, user_edit, phone_number, picture, key)
+ VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
+ RETURNING user_id
"""
+
logger.debug(f"sql= {sql}")
- # Execute update query
- cur.execute(sql)
+ logger.debug(f"values= {values}")
- # Commit the changes to the database
- conn.commit()
+ # Execute with parameterized query (prevents SQL injection)
+ cur.execute(sql, values)
- # Close the cursor and connection
- cur.close()
- conn.close()
+ # Get the new user_id if this was an INSERT
+ if editing_user_id == "0":
+ new_user_id = cur.fetchone()[0]
+ conn.commit()
+ cur.close()
+ conn.close()
+ AddToLog(f"New user created with ID: {new_user_id}")
+ return new_user_id, 1
+ else:
+ conn.commit()
+ cur.close()
+ conn.close()
+ AddToLog("User updated successfully!")
+ return editing_user_id, 0
- AddToLog("Written/updated!")
- return 1
except Exception as err:
AddToLog(traceback.format_exc())
- return 0
- return ok
+ if conn:
+ conn.rollback()
+ cur.close()
+ conn.close()
+ return 0, 0
def StoreFlow2DB(user_name, time_s, flow_json):
@@ -1739,6 +1833,43 @@ def StoreFlow2DB(user_name, time_s, flow_json):
conn.close()
logger.debug(f"Closing!")
+def PersonInDB(f_l_name):
+ conn = get_db_connection()
+ cur = conn.cursor()
+
+ fname,lname = f_l_name.split(" ")
+ user_id = 0
+ try:
+ sql = f"""
+ SELECT user_id FROM public.person_details
+ WHERE (LOWER(first_name) = LOWER('{fname}') AND LOWER(last_name) = LOWER('{lname}'))
+ OR (LOWER(first_name) = LOWER('{lname}') AND LOWER(last_name) = LOWER('{fname}'));
+ """
+ logger.debug(f"sql= {sql}")
+ cur.execute(sql)
+ user_id = cur.fetchone()[0]
+ except Exception as err:
+ print(str(err))
+ return user_id
+
+def GetDepoymentId(beneficiary_id):
+ conn = get_db_connection()
+ cur = conn.cursor()
+ result = []
+ try:
+ sql = f"""
+ SELECT deployment_id FROM public.deployment_details
+ WHERE beneficiary_id = {beneficiary_id};
+ """
+ logger.debug(f"sql= {sql}")
+ cur.execute(sql)
+ deployments_id = cur.fetchall()
+ result = [item[0] for item in deployments_id]
+ except Exception as err:
+ print(str(err))
+ return result
+
+
def StoreAlarms2DB(deployment_id, device_id, deployment_alarms_json, device_alarms_json):
conn = get_db_connection()
@@ -1946,11 +2077,12 @@ def StoreDeployment2DB(parameters, editing_deployment_id):
else:
sql = f"""
INSERT INTO public.deployment_details
- (deployment_id, beneficiary_id, caretaker_id, owner_id, installer_id, address_street, address_city, address_zip, address_state, address_country)
+ (deployment_id, beneficiary_id, caretaker_id, owner_id, installer_id, address_street, address_city, address_zip, address_state, address_country, wifis, devices, lat, lng, gps_age)
VALUES
({new_deployment_id}, {CleanObject(parameters.get('beneficiary_id'), "n")}, {CleanObject(parameters.get('caretaker_id'), "n")}, {CleanObject(parameters.get('owner_id'), "n")}, {CleanObject(parameters.get('installer_id'), "n")},
'{CleanObject(parameters.get('address_street'))}', '{CleanObject(parameters.get('address_city'))}', '{CleanObject(parameters.get('address_zip'))}', '{CleanObject(parameters.get('address_state'))}',
- '{CleanObject(parameters.get('address_country'))}');
+ '{CleanObject(parameters.get('address_country'))}','{CleanObject(parameters.get('wifis'))}','{CleanObject(parameters.get('devices'))}','{CleanObject(parameters.get('lat'))}',
+ '{CleanObject(parameters.get('lng'))}','{CleanObject(parameters.get('gps_age'))}');
"""
logger.debug(f"sql= {sql}")
# Execute update query
@@ -1959,23 +2091,21 @@ def StoreDeployment2DB(parameters, editing_deployment_id):
# Commit the changes to the database
conn.commit()
-
-
-
# Close the cursor and connection
cur.close()
conn.close()
AddToLog("Written/updated!")
- return 1
+ if editing_deployment_id != "0":
+ return editing_deployment_id, 0
+ else:
+ return new_deployment_id, 1
except Exception as err:
AddToLog(traceback.format_exc())
- return 0
- return ok
+ return 0, 0
def StoreDevice2DB(parameters, editing_device_id):
- import uuid
- import time
+
call_id = str(uuid.uuid4())[:8]
logger.debug(f"[{call_id}] StoreDevice2DB ENTRY - editing_device_id: {editing_device_id}")
@@ -2775,7 +2905,7 @@ def GetVisibleDevices(deployments):
devices_groups = cur.fetchall()#cur.fetchone()
deployment_ids = []
for deployment_id, dev_group in devices_groups:
- if dev_group != None:
+ if dev_group != None and dev_group != "":
if len(dev_group) > 10:
if "[" not in dev_group:
if "," not in dev_group:
@@ -2791,7 +2921,11 @@ def GetVisibleDevices(deployments):
deployment_ids.append((deployment_id, mac))
else:
print(f"Deployment {deployment_id} has dev_group empty")
- devices_details = GetDeviceDetails(cur, deployment_ids, -1)
+
+ devices_details = []
+ if deployment_ids != []:
+ devices_details = GetDeviceDetails(cur, deployment_ids, -1)
+
#devices_details.append(devices_detail)
return devices_details
@@ -3280,7 +3414,7 @@ def filter_short_groups_c_wc(presence_list, filter_size, device_id_str, from_dat
def GetLastDurationMinutes(deployment_id, selected_devices, filter, ddate):
- global threshold_cache, device_lookup_cache
+ global device_lookup_cache
max_sleep = 0
max_device_id = 0
@@ -3499,7 +3633,7 @@ def GetLastDurationMinutes(deployment_id, selected_devices, filter, ddate):
#start_time_ = myz_data[0][0]
st = time.time()
device_lookup_cache = {}
- threshold_cache = {}
+
temporary_map_day_plus = optimized_processing(myz_data, start_time, id2well_id, device_id_2_threshold, device_field_indexes, temporary_map_day_plus, data_type)
presence_map['longpresence'][well_id] = temporary_map_day_plus[well_id]
@@ -4424,18 +4558,26 @@ def getLastEditedBeneficiary(beneficiary):
return "",""
def GetDeploymentNameFromId(Id):
+ deployment_name = ""
+ with get_db_connection() as conn:
+ with conn.cursor() as cur:
+ sqlr = f"SELECT name FROM deployments WHERE id = {Id}"
+ deployment_name = ReadCleanStringDB(cur, sqlr)
+ return deployment_name
- con = sqlite3.connect(main_db)
- con.text_factory = str
- cur = con.cursor()
- results=[]
- SQL = "SELECT name FROM deployments WHERE id =" + Id
- df = cur.execute(SQL)
- results = cur.fetchall()
- if len(results) > 0:
- return results[0][0]
- else:
- return ""
+def GetDeploymentDetailsFromBeneficiary(beneficiary_id, editing_deployment_id):
+ results = []
+ with get_db_connection() as conn:
+ with conn.cursor() as cur:
+ if editing_deployment_id == "0":
+ sqlr = f"SELECT * FROM deployment_details WHERE beneficiary_id = {beneficiary_id}"
+ else:
+ sqlr = f"SELECT * FROM deployment_details WHERE deployment_id = {editing_deployment_id}"
+ cur.execute(sqlr)
+ results = cur.fetchall()
+ if len(results) > 0:
+ return results
+ return []
def GetTimeZoneOfDeployment(deployment_id):
time_zone_st = 'America/Los_Angeles'
@@ -4445,6 +4587,18 @@ def GetTimeZoneOfDeployment(deployment_id):
time_zone_st = ReadCleanStringDB(cur, sqlr)
return time_zone_st
+def GetDeploymentHistoryLast(deployment_id):
+ results = []
+ with get_db_connection() as conn:
+ with conn.cursor() as cur:
+ sqlr = f"SELECT * FROM deployment_history WHERE deployment_id = {deployment_id} ORDER BY time DESC LIMIT 1"
+ cur.execute(sqlr)
+ results = cur.fetchall()
+ if len(results) > 0:
+ return results[0]
+ return []
+
+
def StringToEpoch(date_string, time_zone_s):
"""
Convert a date string to epoch timestamp for start of day (midnight) in specified timezone
@@ -6474,9 +6628,6 @@ def create_light_optimized_heatmap(my_data, bw, fields, wave_m, device_to_index,
if len(my_data) < 1:
return wave_m
- import numpy as np
- import pytz
-
# Get the local timezone
local_tz = pytz.timezone(timezone_st)
@@ -6559,9 +6710,6 @@ def create_temperature_optimized_heatmap(my_data, bw, fields, wave_m, device_to_
if len(my_data) < 1:
return wave_m
- import numpy as np
- import pytz
-
# Get the local timezone
local_tz = pytz.timezone(timezone_st)
@@ -10552,6 +10700,38 @@ def GetBlob(file_name, bucket_name="daily-maps"):
logger.error(f"Error: {traceback.format_exc()}")
return None, None
+def GetJPG(file_name, bucket_name="user-pictures"):
+ """
+ Retrieve image from blob storage
+
+ Args:
+ file_name (str): Name of the file to retrieve from blob storage
+
+ Returns:
+ tuple: (image_bytes, content_type)
+ Returns None, None if image not found or error occurs
+ """
+ logger.debug(f"GetJPG({file_name})")
+ try:
+ # Get the object from blob storage
+ data = miniIO_blob_client.get_object(
+ bucket_name,
+ file_name
+ )
+
+ # Read the data into bytes
+ data_bytes = data.read()
+ #logger.debug(f"len(data_bytes)={len(data_bytes)}")
+
+ if bucket_name == "user-pictures":
+ return data_bytes, 'image/jpg'
+ else:
+ return data_bytes, 'application/zip'
+
+ except Exception as e:
+ logger.error(f"Error: {traceback.format_exc()}")
+ return None, None
+
def MapFileToDate(map_file):
#'/Volumes/XTRM-Q/wellnuo/daily_maps/1/1_2023-11-07_dayly_image.png'
@@ -10700,8 +10880,6 @@ def ReadSensorDeltas(device_id, sensor, time_from_epoch, time_to_epoch, data_typ
return result
def ReadSensor3(device_id, sensor, time_from_epoch, time_to_epoch, data_type, radar_part, bucket_size="no"):
- import datetime
- from datetime import timezone
# Convert epoch to datetime and format as ISO 8601 strings with timezone
time_utc = datetime.datetime.fromtimestamp(float(time_from_epoch), tz=timezone.utc)
@@ -13294,6 +13472,14 @@ class RequestParser:
logger.error(f"RequestParser error: {str(e)}")
logger.error(traceback.format_exc())
+ def _read_chunked_safely(self, req):
+ """Safely read chunked data"""
+ try:
+ # Try reading with a reasonable size limit
+ return req.bounded_stream.read(50 * 1024 * 1024) # 50MB
+ except:
+ return b''
+
def _process_debug(self, req):
"""Process request in debug mode - optimized for local development"""
logger.debug("RequestParser: Using DEBUG mode processing")
@@ -13312,7 +13498,7 @@ class RequestParser:
logger.debug(f"RequestParser: Successfully read {len(body_text)} chars")
# Parse the form data
- import urllib.parse
+
form_data = dict(urllib.parse.parse_qsl(body_text))
# Store in context
@@ -13320,7 +13506,6 @@ class RequestParser:
logger.debug(f"RequestParser: Parsed form data: {form_data}")
# Reset the stream with the original content
- import io
req.stream = io.BytesIO(raw_body)
else:
logger.debug("RequestParser: No body data read")
@@ -13331,26 +13516,30 @@ class RequestParser:
"""Process request in production mode - optimized for OpenFaaS/faasd deployment"""
logger.debug("RequestParser: Using PRODUCTION mode processing")
- # Simple direct read approach for production (OpenFaaS/faasd)
- # We'll limit the read to 1MB for safety
- MAX_SIZE = 1024 * 1024 # 1MB
+ # Try Content-Length first
+ content_length = req.get_header('content-length')
+ if content_length:
+ content_length = int(content_length)
+ logger.debug(f"RequestParser: Reading {content_length} bytes using Content-Length")
+ raw_body = req.stream.read(content_length)
+ else:
+ logger.debug("RequestParser (production): No Content-Length header - reading available data")
+
+ # Read all available data (faasd buffers the complete request)
+ raw_body = req.stream.read() # Read everything available
- # Just read directly from the stream without checking
- raw_body = req.stream.read(MAX_SIZE)
if raw_body:
body_text = raw_body.decode('utf-8')
logger.debug(f"RequestParser: Successfully read {len(body_text)} chars")
# Parse the form data
- import urllib.parse
form_data = dict(urllib.parse.parse_qsl(body_text))
# Store in context
req.context.form_data = form_data
- logger.debug(f"RequestParser: Parsed form data: {form_data}")
+ logger.debug(f"RequestParser: Parsed form data keys: {list(form_data.keys())}")
# Reset the stream with the original content
- import io
req.stream = io.BytesIO(raw_body)
else:
logger.debug("RequestParser: No body data read")
@@ -13367,7 +13556,7 @@ def FindDeviceByRole(deployment_id, location_list):
# "Bedroom Master": 106, "Bedroom Guest": 107, "Conference Room": 108, "Basement": 109, "Attic": 110, "Other": 200}
- ttime = datetime.datetime.utcnow().timestamp()
+ ttime = datetime.datetime.now(datetime.timezone.utc).timestamp()
devices_list, device_ids = GetProximityList(deployment_id, ttime)
@@ -14912,6 +15101,25 @@ def ParseAddress(address_string):
print(f"Error: {e}")
return {}
+def GetTZFromGPS(latitude, longitude):
+ try:
+ payload = {"latitude": latitude,"longitude": longitude }
+
+ response = requests.post(
+ f"{base_url}/gps_to_timezone",
+ data=json.dumps(payload),
+ headers={"Content-Type": "application/json"}
+ )
+ result_map = response.json()
+ if result_map["success"]:
+ return response.json()["timezone"]
+ else:
+ return ""
+
+ except Exception as e:
+ print(f"Error: {e}")
+ return ""
+
def JoinAddress(address_map):
try:
payload = address_map
@@ -14982,7 +15190,10 @@ def StoreToDB(data):
print ("Error in StoreToDB:", e)
def handle_telnyx_webhook(webhook_data, remote_addr, request_id):
- """Process Telnyx webhook events"""
+ """
+ Process Telnyx webhook events with DTMF controls for repeating and hanging up.
+ This version works with both well-alerts.py and tstMP3Call.sh.
+ """
logger.info(f"Processing Telnyx webhook from {remote_addr}, Request-ID: {request_id}")
try:
@@ -14997,104 +15208,126 @@ def handle_telnyx_webhook(webhook_data, remote_addr, request_id):
logger.error("Missing event_type or record_type in webhook data")
return False
- call_control_id = payload.get('call_control_id')
- call_session_id = payload.get('call_session_id')
-
- # Voice Event Handling
- if record_type == 'event':
- logger.info(f"Processing voice event: {event_type}")
-
- StoreToDB(data)
-
- if event_type == 'call.initiated':
- logger.info(f"Call initiated: From: {payload.get('from')}, To: {payload.get('to')}")
- elif event_type == 'call.answered':
- logger.info(f"Call answered: From: {payload.get('from')}, To: {payload.get('to')}")
-
- # Get custom headers and log them
- custom_headers = payload.get('custom_headers', [])
- logger.debug(f"Custom headers: {json.dumps(custom_headers)}")
-
- # Check for audio URL
- audio_url = find_custom_header(custom_headers, 'X-Audio-Url')
- tts_payload = find_custom_header(custom_headers, 'X-TTS-Payload')
-
- logger.info(f"Audio URL: {audio_url}, TTS Payload: {tts_payload}")
-
- # Play audio if URL is provided
- if ENABLE_AUDIO_PLAYBACK and audio_url:
- logger.info(f"Playing audio: {audio_url}")
- client_state = create_client_state("answered", call_control_id, CLIENT_STATE_PREFIX)
-
- play_params = {
- "call_control_id": call_control_id,
- "client_state": client_state,
- "audio_url": audio_url
- }
-
- result = send_telnyx_command("actions/playback_start", play_params, TELNYX_API_KEY)
- logger.info(f"Play command result: {result}")
- return True
-
- elif tts_payload:
- logger.info(f"Speaking text: {tts_payload}")
- client_state = create_client_state("answered", call_control_id, CLIENT_STATE_PREFIX)
-
- speak_params = {
- "payload": tts_payload,
- "voice": DEFAULT_TTS_VOICE,
- "language": DEFAULT_TTS_LANGUAGE,
- "call_control_id": call_control_id,
- "client_state": client_state
- }
-
- result = send_telnyx_command("actions/speak", speak_params, TELNYX_API_KEY)
- logger.info(f"Speak command result: {result}")
- return True
-
- else:
- logger.warning("No audio URL or TTS payload found in call. Hanging up.")
- hangup_params = {
- "call_control_id": call_control_id,
- "client_state": create_client_state("nohdr_hup", call_control_id, CLIENT_STATE_PREFIX)
- }
- send_telnyx_command("actions/hangup", hangup_params, TELNYX_API_KEY)
- return True
-
- # Handle other voice events
- elif event_type in ['call.speak.ended', 'call.playback.ended']:
- status = payload.get('status')
- ended_event_type = event_type.split('.')[-2]
- logger.info(f"Call {ended_event_type} ended: Status={status}")
-
- # Hang up after media finished playing
- hangup_params = {
- "call_control_id": call_control_id,
- "client_state": create_client_state(f"{ended_event_type}_hup", call_control_id, CLIENT_STATE_PREFIX)
- }
- send_telnyx_command("actions/hangup", hangup_params, TELNYX_API_KEY)
- return True
-
- elif event_type == 'call.hangup':
- logger.info(f"Call hung up: Cause={payload.get('cause')}")
- return True
-
- else:
- logger.info(f"Other voice event: {event_type}")
- return True
-
- # SMS Event Handling
- elif record_type == 'message':
- logger.info(f"Processing SMS event: {event_type}")
- # SMS handling code...
+ if record_type == 'message':
+ logger.info("Processing SMS event...")
+ # Existing SMS handling code can go here.
return True
- else:
+ if record_type != 'event':
logger.warning(f"Unknown record type: {record_type}")
return False
+ # --- Voice Event Handling ---
+ call_control_id = payload.get('call_control_id')
+ b64_client_state = payload.get("client_state")
+ state_parts = decode_state(b64_client_state)
+ state_name = state_parts[0] if state_parts else None
+
+ logger.info(f"Processing voice event: {event_type}, State: {state_name}")
+ StoreToDB(data) # Store all events as before
+
+ # --- State Machine Logic ---
+
+ if event_type == 'call.initiated':
+ logger.info(f"Call initiated: From: {payload.get('from')}, To: {payload.get('to')}")
+
+ elif event_type == 'call.answered':
+ logger.info(f"Call answered: From: {payload.get('from')}, To: {payload.get('to')}")
+
+ # Determine media to play
+ custom_headers = payload.get('custom_headers', [])
+ audio_url = find_custom_header(custom_headers, 'X-Audio-Url')
+ tts_payload = find_custom_header(custom_headers, 'X-TTS-Payload')
+
+ media_type = "audio" if audio_url else "tts" if tts_payload else "none"
+ media_value = audio_url or tts_payload
+
+ if media_value:
+ logger.info(f"Playing main message via {media_type}.")
+ # Create the initial state and encode it
+ next_state = encode_state(['MAIN_MEDIA_PLAYED', media_type, media_value])
+
+ if media_type == "audio":
+ play_params = {
+ "call_control_id": call_control_id,
+ "client_state": next_state,
+ "audio_url": media_value
+ }
+ send_telnyx_command("actions/playback_start", play_params, TELNYX_API_KEY)
+ else: # tts
+ speak_params = {
+ "payload": media_value,
+ "voice": DEFAULT_TTS_VOICE,
+ "language": DEFAULT_TTS_LANGUAGE,
+ "call_control_id": call_control_id,
+ "client_state": next_state
+ }
+ send_telnyx_command("actions/speak", speak_params, TELNYX_API_KEY)
+ else:
+ logger.warning("No audio URL or TTS payload found. Hanging up.")
+ send_telnyx_command("actions/hangup", {"call_control_id": call_control_id}, TELNYX_API_KEY)
+
+ elif event_type in ['call.speak.ended', 'call.playback.ended']:
+ logger.info(f"Media ended with status: {payload.get('status')}")
+
+ if state_name in ['MAIN_MEDIA_PLAYED', 'REPLAYING_MEDIA']:
+ logger.info("Main message finished. Playing options menu.")
+ _, media_type, media_value = state_parts # Unpack state
+
+ # Create new state for waiting for DTMF input
+ next_state = encode_state(['WAITING_DTMF', media_type, media_value])
+ options_prompt = "press 0 to repeat the message, or press the pound key to hang up."
+
+ gather_params = {
+ "payload": options_prompt,
+ "voice": DEFAULT_TTS_VOICE,
+ "language": DEFAULT_TTS_LANGUAGE,
+ "valid_digits": "0#",
+ "max_digits": 1,
+ "timeout_millis": 10000, # 10 seconds
+ "call_control_id": call_control_id,
+ "client_state": next_state
+ }
+ send_telnyx_command("actions/gather_using_speak", gather_params, TELNYX_API_KEY)
+ else:
+ logger.warning(f"Media ended with unhandled state '{state_name}'. Hanging up.")
+ send_telnyx_command("actions/hangup", {"call_control_id": call_control_id}, TELNYX_API_KEY)
+
+ elif event_type == 'call.dtmf.received':
+ digit = payload.get('digit')
+ logger.info(f"DTMF Received: Digit='{digit}'")
+ if digit == '#':
+ logger.info("'#' received. Terminating call immediately.")
+ send_telnyx_command("actions/hangup", {"call_control_id": call_control_id}, TELNYX_API_KEY)
+
+ elif event_type == 'call.gather.ended':
+ logger.info(f"Gather ended. Digits: '{payload.get('digits')}', Status: '{payload.get('status')}'")
+
+ if state_name == 'WAITING_DTMF':
+ digits = payload.get('digits')
+ _, media_type, media_value = state_parts # Unpack state
+
+ if digits == "0":
+ logger.info("'0' pressed. Replaying main message.")
+ next_state = encode_state(['REPLAYING_MEDIA', media_type, media_value])
+ if media_type == "audio":
+ send_telnyx_command("actions/playback_start", {"audio_url": media_value, "call_control_id": call_control_id, "client_state": next_state}, TELNYX_API_KEY)
+ else: # tts
+ send_telnyx_command("actions/speak", {"payload": media_value, "voice": DEFAULT_TTS_VOICE, "language": DEFAULT_TTS_LANGUAGE, "call_control_id": call_control_id, "client_state": next_state}, TELNYX_API_KEY)
+ else:
+ logger.info("Gather ended without a repeat command (timeout or hangup). Ending call.")
+ send_telnyx_command("actions/hangup", {"call_control_id": call_control_id}, TELNYX_API_KEY)
+
+ elif event_type == 'call.hangup':
+ logger.info(f"Call hung up: Cause='{payload.get('cause')}'")
+
+ else:
+ logger.info(f"Unhandled voice event: {event_type}")
+
+ return True # Acknowledge the webhook
+
except Exception as e:
- logger.exception(f"Error in handle_telnyx_webhook: {e}")
+ logger.exception(f"Critical error in handle_telnyx_webhook4: {e}")
return False
# Assume these are defined globally or accessible (e.g., from app_args or .env)
@@ -16119,11 +16352,13 @@ class WellApi:
except ValueError:
last = 1000000
blob_data = read_file("my_devices.html")
+ devices = []
+ if len(privileges) > 0:
- devices = GetVisibleDevices(privileges)
- users = GetUsersFromDeployments(privileges)
- blob_data = UpdateDevicesTable(blob_data, devices, users)
- blob_data = UpdateDeploymentsSelector(blob_data, users)
+ devices = GetVisibleDevices(privileges)
+ users = GetUsersFromDeployments(privileges)
+ blob_data = UpdateDevicesTable(blob_data, devices, users)
+ blob_data = UpdateDeploymentsSelector(blob_data, users)
resp.content_type = "text/html"
resp.text = blob_data
#print(blob_data)
@@ -16270,6 +16505,28 @@ class WellApi:
resp.data = image_bytes
resp.status = falcon.HTTP_200
return
+ elif get_function_name == "get_photo":
+ #image represents day in local time
+
+ imageName = req.params.get('imageName')
+ filename = f"{imageName}"
+
+ #lets read and send image from blob
+ image_bytes, content_type = GetJPG(filename)
+ if debug:
+ resp.media = package_response(f'Log: {debug_string}', HTTP_200)
+ else:
+ if image_bytes is None:
+ raise falcon.HTTPNotFound(
+ title='Image not found',
+ description=f'Image {filename} could not be found or retrieved'
+ )
+ sys.stdout.flush()
+ # Set response content type and body
+ resp.content_type = content_type
+ resp.data = image_bytes
+ resp.status = falcon.HTTP_200
+ return
elif get_function_name == "get_sensors_map":
# Get filtering parameters
@@ -16679,7 +16936,7 @@ class WellApi:
def on_post(self, req, resp, path=""):
#ToDo make sure that any read/write data functions are authorized for this user_name
- global threshold_cache, device_lookup_cache
+ global device_lookup_cache
"""Handle POST requests"""
logger.debug(f"on_post called with path: {path}")
@@ -16739,1355 +16996,54 @@ class WellApi:
return
# If we get here, it's not a Telnyx webhook, so process as normal
+ logger.debug(f"on_post called with path: {path}")
+ logger.debug(f"Request content type: {req.content_type}")
+
try:
- # For non-webhook requests, get form data
+ # Use your existing get_form_data function for URL-encoded
form_data = get_form_data(req)
logger.debug(f"Form data: {form_data}")
- except Exception as e:
- logger.exception(f"Error in on_post: {e}")
- resp.status = falcon.HTTP_500
- resp.content_type = falcon.MEDIA_JSON
- resp.text = json.dumps({"error": "Internal Server Error"})
+ # Add the Base64 photo fix
+ if 'beneficiary_photo' in form_data:
+ photo_fixed = quick_fix_base64_photo(form_data)
+ if photo_fixed:
+ logger.debug("Base64 photo processed successfully")
- # Get form data using our helper function - but don't read stream again
- #form_data = get_form_data(req)
- logger.debug(f"Form data: {form_data}")
+ # Your existing processing logic continues here with form_data...
- try:
+ # Get form data using our helper function - but don't read stream again
+ #form_data = get_form_data(req)
+ logger.debug(f"Form data: {form_data}")
- # Get basic parameters
- function = form_data.get('function')
- user_name = form_data.get('user_name')
- logger.debug(f"Function: {function}, User: {user_name}")
- if function != "credentials" and function != "new_user_form" and function != "set_deployment":
- token = form_data.get('token')
- ps = form_data.get('ps')
-
- if ps != "" and ps != None:
- #was token sent in ps field? This allows for token and ps be populated by token or ps
- user_info = verify_token(ps)
- if user_info != None:
- if user_info["username"] == user_name:
- token = ps
- else:
- #is this valid password?
- privileges, user_id = ValidUser(user_name, ps)
- if privileges == "0":
- resp.media = package_response("Log-Out", HTTP_401)
- return
- else:
- token = generate_token(user_name)
-
- user_info = verify_token(token)
-
- if user_info == None or user_info["username"] != user_name:
- resp.media = package_response("Log-Out", HTTP_401)
- return
-
-
- #with get_db_connection() as db_conn:
- privileges = GetPriviledgesOnly(user_name)
-
- # Handle credentials function - most common case
- if function == "credentials":
-
- clientId = form_data.get('clientId')
- nonce = form_data.get('nonce')
- ps = form_data.get('ps')
-
- if not user_name:
- resp.media = package_response("Required field 'user_name' is missing", HTTP_400)
- return
-
- if not clientId:
- resp.media = package_response("Required field 'clientId' is missing", HTTP_400)
- return
-
- if not nonce:
- resp.media = package_response("Required field 'nonce' is missing", HTTP_400)
- return
-
- if not ps:
- resp.media = package_response("Required field 'ps' is missing", HTTP_400)
- return
-
-
-
- if False:
- pass
- else:
- #lets check for real
- privileges, user_id = ValidUser(user_name, ps)
- if privileges == "0":
- access_token = 0
- privileges = 0
- else:
- access_token = generate_token(user_name)
-
- if privileges == "-1":
- max_role = -1
- else:
- max_role = GetMaxRole(user_name)
- if "2" in max_role:
- max_role = 2
- else:
- max_role = 1
-
- token_payload = {'access_token': access_token, 'privileges': privileges, 'user_id': user_id, 'max_role': max_role}
- resp.media = package_response(token_payload)
- resp.status = falcon.HTTP_200
- return
-
- # Handle token-protected functions
- elif function == "messages_age":
-
- macs = form_data.get('macs')
-
- with get_db_connection() as conn:
-
- #print (sqlr)
- with conn.cursor() as cur:
-
- devices = MACsStrToDevIds(cur, macs)
-
- devices_string = ",".join(f"{device_id}" for mac, device_id in devices)
-
-
- sqlr = f"""
- SELECT
- device_id,
- GREATEST(
- radar_last_time,
- sensor_last_time
- ) AS latest_time
- FROM
- (SELECT unnest(ARRAY[{devices_string}]) AS device_id) d
- LEFT JOIN LATERAL (
- SELECT time AS radar_last_time
- FROM radar_readings
- WHERE device_id = d.device_id
- ORDER BY time DESC
- LIMIT 1
- ) r ON true
- LEFT JOIN LATERAL (
- SELECT time AS sensor_last_time
- FROM sensor_readings
- WHERE device_id = d.device_id
- ORDER BY time DESC
- LIMIT 1
- ) s ON true;"""
- logger.debug(f"sqlr= {sqlr}")
- cur.execute(sqlr)
- times_list = cur.fetchall()
- result = {}
- for i in range(len(times_list)):
- if times_list[i][1] is not None:
- result[devices[i][0]] = times_list[i][1].timestamp()
- else:
- result[devices[i][0]] = 0
-
- dataa = {}
- dataa['Command'] = "REPORT"
- dataa['body'] = result
- dataa['time'] = time.time()
- #json_data = json.dumps(dataa)
- payload = {'ok': True, 'response': dataa}
- resp.media = package_response(payload)
- logger.warning(f"Responded: {str(payload)}")
- resp.status = falcon.HTTP_200
- return
-
- elif function == "voice_ask":
-
- question = form_data.get('question')
- deployment_id = form_data.get('deployment_id')
-
- if ('language_from' in form_data):
- language_from = form_data.get('language_from').strip()
- else:
- language_from = "English"
-
- if ('language_to' in form_data):
- language_to = form_data.get('language_to').strip()
- else:
- language_to = "English"
-
-
- result, language = AskGPT(question, language_from, language_to)
-
- if result[0] == "#":
- result = RunCommand(result, {}, deployment_id)
-
- dataa = {}
- dataa['Command'] = "REPORT"
- dataa['body'] = result
- dataa['name'] = ""
- dataa['reflected'] = ""
- dataa['language'] = language
- dataa['time'] = time.time()
- #json_data = json.dumps(dataa)
- payload = {'ok': True, 'response': dataa}
- resp.media = package_response(payload)
- logger.warning(f"Responded: {str(payload)}")
- resp.status = falcon.HTTP_200
- return
-
-
- elif function == "calibrate_thresholds":
- #this will use current date to calibrate radar presence thresholds.
- #make sure that data is well defined (has clear absence/presence signature) for all rooms for chosen day
- #Format of radar_threshold field = [gates_to_use_Presence_list, p_threshold]
- #We need to automate this functionality!!!
- deployment_id = form_data.get('deployment_id')
- time_zone_s = GetTimeZoneOfDeployment(deployment_id)
- ddate = form_data.get("date")
- ddate = ddate.replace("_","-")
- selected_date = ddate
-
-
-
- stdev_range = int(form_data.get("stdev_range"))
- timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5
- devices_list, device_ids = GetProximityList(deployment_id, timee)
-
- selected_date = FindCalibrationDate(device_ids, ddate)
-
- devices_c = len(devices_list[0])
-
- time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s, stdev_range)
- fields = ["radar_s_min", "radar_s_max", "radar_m_max", "radar_stdev"]
- cnt = 0
- ids_list = []
- for details in devices_list:
- ids_list.append(details[1])
- devices_list_str = ",".join(map(str, ids_list))
- device_to_index = {device: idx for idx, device in enumerate(ids_list)}
-
- minutes = 1440
-
- with get_db_connection() as conn:
- with conn.cursor() as cur:
- for device_index in range(devices_c):
- well_id = devices_list[device_index][0]
- device_id = devices_list[device_index][1]
- location = devices_list[device_index][2]
-
- sql = get_device_radar_s28_only_query(time_from_str, time_to_str, device_id)
- print(sql)
-
- #sql1 = get_deployment_radar_only_colapsed_query(str(device_id), time_from_str, time_to_str, [device_id])
- #print(sql1)
- st = time.time()
- cur.execute(sql)
- my_data = cur.fetchall()
-
- timestamps, stationary, motion = process_raw_data(my_data)
- print(type(stationary))
- # Find threshold above which 20% of points lie
- AveragePercentSpendsThere = AveragePercentPerLocation[Consolidataed_locations[location]]
- threshold_high, threshold_low = FindThreshold(stationary, AveragePercentSpendsThere)
- file_save = f"threshold_graph_{location}.png"
- title = f"{well_id}_{location}"
-
- threshold2, x_percent, y_percent = ShowThresholdGraph(stationary, file_save, threshold_low, threshold_high, title, AveragePercentSpendsThere, location)
-
- print(f"Maximum curvature point found at:")
- print(f"Threshold value: {threshold2:.3f}")
- print(f"X: {x_percent:.1f}% of range")
- print(f"Y: {y_percent:.1f}% of points above")
-
- ShowArray(stationary, threshold2, filename=f"stationary_{devices_list[device_index][0]}.png", title=f"stationary_{devices_list[device_index][0]}_{devices_list[device_index][2]}", style='line')
-
-
- ##threshold
- ##presence_mask, baseline, threshold = detect_presence(timestamps, stationary, motion)
-
- ### Save visualization to file
- ##visualize_detection(timestamps, stationary, motion, presence_mask,
- ## baseline, threshold)
-
- #cur.execute(sql1)
- #my_data1 = cur.fetchall()#cur.fetchone()
- #print(time.time() - st)
- #if my_data == None or my_data1 == None:
- #logger.warning(f"No data found for device_id {device_id}")
- #else:
- #print(type(my_data))
- ##minute,
- ##device_id,
- ##s_min as radar_s_min,
- ##s_max as radar_s_max,
- ##m_max as radar_m_max
-
- #values = [tup[1] for tup in my_data] #10 sec (RAW) data
-
- #hist, bins = np.histogram(values, bins=1000, range=(0, 100))
- #TR, BR = FindZeroIntersection(hist, bins, f'raw_{device_id}_histogram.png', device_id)
- #if True:#device_id == 560:
- #plot(values, filename=f"radar_{device_id}_s28.png", title=f"Radar s28 {device_id}", style='line')
- #plot(hist, filename=f"radar_{device_id}_s28_hist.png", title=f"Radar s28 {device_id} histogram", style='line')
-
- ##life = [tup[3] - tup[2] + tup[4] for tup in my_data1]
- #life, average = calculate_life_and_average(my_data1, stdev_range) #5 min data
- #lhist, lbins = np.histogram(life, bins=1000)
- #TLIFE, BLIFE = FindZeroIntersection(lhist, lbins, f'life_{device_id}_histogram.png', device_id)
-
- #StoreThresholds2DB(device_id, TR, BR, TLIFE, BLIFE)
- ##for now not needed...
- ##ahist, abins = np.histogram(average, bins=1000)
- ##dummy1, dummy = FindZeroIntersection(ahist, abins)
- #if True:#device_id == 560:
- #plot(average, filename=f"average_{device_id}.png", title=f"Average {device_id}", style='line')
- #plot(life, filename=f"life_{device_id}.png", title=f"Life {device_id}", style='line')
- #plot(lhist, filename=f"life_{device_id}_hist.png", title=f"life {device_id} histogram", style='line')
- ##plot(ahist, filename=f"average_{device_id}_hist.png", title=f"average {device_id} histogram", style='line')
-
-
- sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list)
- print(sql)
- my_data = []
- with get_db_connection() as conn:
- with conn.cursor() as cur:
- cur.execute(sql)
- my_data = cur.fetchall()#cur.fetchone()
- #print(result)
- if my_data == None:
- return False
-
- fields_n = len(fields)
- stripes = devices_c * fields_n #radar_min and radar_max
- print(my_data)
- base_minute = ConvertToBase(time_from_str, time_zone_s)
- #base_minute = my_data[0][0]# min(record[0] for record in my_data)
- #remember: base_minute is offset (smaller) by numbr of minutes in stdev_range
- st = time.time()
- wave_m = np.zeros((stripes, 1440+2*stdev_range, 1), dtype=np.float32)
-
- for record in my_data:
- #(minute,device_id,s28_min,s28_max) = record
- minute, device_id = record[0:2]
- values = record[2:] # All the min/max values
- x = int((minute - base_minute).total_seconds()/60)
-
- device_idx = device_to_index[device_id]
- #value[0] are mins, value[1] are maxes
- #when trying to illustrate presence, use s28_max, when absence (night leaving bed) use s28s_min
- for field_idx, value in enumerate(values):
- # Calculate y position
- y = device_idx * fields_n + field_idx
- wave_m[y, x] = value
-
- print(time.time()-st)
-
- #we need to reliably determine presence and LIFE (motion) in every 5 minutes of data...
- #presence is determined by average value being significntly different from last known base
- #last known base is determined by average value during extended periods ( >= H hours) of low stdev (<) while it is determined that:
- #person is moving elsewhere, and only 1 person is determined to be in monitored area.
-
- #lets calculate stdevs
- for device_index in range(devices_c):
- y = device_index * fields_n
- row = wave_m[y]
- stdevs = np.zeros((1440+2*stdev_range, 1), dtype=np.float32)
- stdevs, amplitude = CalcStdevs(row, stdev_range, stdevs)
- wave_m[y+3] = stdevs
- plot(stdevs, filename=f"radar{device_index}_stdevs.png", title=f"Radar Stedevs {device_index}", style='line')
-
- minutes = 1440
-
-
- device_index = 0
- y = 0
- for device in devices_list:
- wave = wave_m[y][stdev_range: stdev_range + minutes]
- plot(wave,
- filename="radar_wave_min.png",
- title="Radar Signal Min",
- style='line')
- # Create histogram with 1000 bins
- hist, bins = np.histogram(wave, bins=1000, range=(0, 100))
-
- #bin_centers = (bins[:-1] + bins[1:]) / 2
- hist_line = hist # These are your y values
-
- # Plot with proper axis labels
- plot(hist_line,
- filename="radar_histogram_min.png",
- title="Radar Signal Histogram Min (1000 bins)",
- style='line')
-
- wave = wave_m[y+1]
- plot(wave,
- filename="radar_wave_max.png",
- title="Radar Signal",
- style='line')
- # Create histogram with 1000 bins
- hist, bins = np.histogram(wave, bins=1000, range=(0, 100))
-
- #bin_centers = (bins[:-1] + bins[1:]) / 2
- hist_line = hist # These are your y values
-
- # Plot with proper axis labels
- plot(hist_line,
- filename="radar_histogram_max.png",
- title="Radar Signal Histogram Max(1000 bins)",
- style='line')
-
- print(wave)
- device_to_index += 1
-
- #lets see this map
- stretch_by = 5
- arr_stretched = np.zeros((int(stripes*stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
- st = time.time()
- for yy in range(stripes):
- rgb_row = []
- row = wave_m[yy]
- for x in range(minutes):
- value = 1280 * row[x] / 100
- rgb_row.append(BestColor(value))
- for stretch_index in range(stretch_by):
- y = yy * stretch_by + stretch_index
- arr_stretched[y, :] = rgb_row
-
- print(time.time()-st)
- filename = f"{deployment_id}/{deployment_id}_{ddate}_min_max_radar.png"
- SaveImageInBlob(filename, arr_stretched, [])
-
-
- return
- elif function == "get_time_deltas":
- deployment_id = form_data.get('deployment_id')
- time_zone_s = GetTimeZoneOfDeployment(deployment_id)
- sensor = form_data.get('sensor')
- selected_date = form_data.get('date')
- date_to = form_data.get('to_date')
- radar_part = ""
- sensor_data = {}
- if date_to == None:
- date_to = selected_date
-
- start_date = datetime.datetime.strptime(selected_date, '%Y-%m-%d')
- end_date = datetime.datetime.strptime(date_to, '%Y-%m-%d')
-
- # Determine direction and swap dates if necessary
- if start_date > end_date:
- selected_date, date_to = date_to, selected_date
-
- device_id = form_data.get('device_id')
-
- data_type = form_data.get('data_type')
- epoch_from_utc, _ = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
- _, epoch_to_utc = GetLocalTimeEpochsForDate(date_to, time_zone_s) #>= #<
-
-
- all_slices = {}
-
- cleaned_values = {}
- line_part = ReadSensor(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part)
- st = time.time()
- cleaned_values = [
- (line_part[i][0], (line_part[i][0] - line_part[i-1][0]).total_seconds() * 1000)
- for i in range(1, len(line_part))
- ]
- print(time.time()-st)
-
- if True:
- # Create CSV content as a string
- csv_content = "Record_Index,Timestamp,Value,Time_Diff_Seconds,Time_Diff_Milliseconds\n"
-
- for i in range(len(line_part)):
- timestamp, value = line_part[i]
-
- if i == 0:
- # First record has no previous record to compare
- time_diff_seconds = 0
- time_diff_ms = 0
- else:
- # Calculate time difference from previous record
- prev_timestamp = line_part[i-1][0]
- time_diff = timestamp - prev_timestamp
- time_diff_seconds = time_diff.total_seconds()
- time_diff_ms = time_diff_seconds * 1000
-
- # Format the row
- row = f"{i},{timestamp.isoformat()},{value},{round(time_diff_seconds, 6)},{round(time_diff_ms, 3)}\n"
- csv_content += row
-
- # Write to file
- with open(f'time_differences_{sensor}_{device_id}.csv', 'w', encoding='utf-8') as f:
- f.write(csv_content)
-
- print(f"CSV file 'time_differences_{sensor}_{device_id}.csv' created successfully!")
-
- line_part_t = [(x[0].timestamp(), x[1]) for x in cleaned_values]
-
- sensor_data[sensor] = line_part_t
- dataa = {}
- all_slices = {}
- all_slices[device_id] = sensor_data
- dataa['Function'] = "time_deltas"
- dataa['all_slices'] = all_slices
- dataa['time_zone_st'] = time_zone_s
- dataa['device_id'] = device_id
- resp.media = package_response(dataa)
- resp.status = falcon.HTTP_200
- return
-
- elif function == "get_sensor_deltas":
- deployment_id = form_data.get('deployment_id')
- time_zone_s = GetTimeZoneOfDeployment(deployment_id)
- sensor = form_data.get('sensor')
- selected_date = form_data.get('date')
- date_to = form_data.get('to_date')
- radar_part = ""
- sensor_data = {}
- if date_to == None:
- date_to = selected_date
-
- start_date = datetime.datetime.strptime(selected_date, '%Y-%m-%d')
- end_date = datetime.datetime.strptime(date_to, '%Y-%m-%d')
-
- # Determine direction and swap dates if necessary
- if start_date > end_date:
- selected_date, date_to = date_to, selected_date
-
- device_id = form_data.get('device_id')
-
- data_type = form_data.get('data_type')
- epoch_from_utc, _ = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
- _, epoch_to_utc = GetLocalTimeEpochsForDate(date_to, time_zone_s) #>= #<
-
-
- all_slices = {}
-
- cleaned_values = {}
- line_part = ReadSensorDeltas(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part)
- st = time.time()
- cleaned_values =line_part
- #[
- #(line_part[i][0], (line_part[i][1] - line_part[i-1][1]) / (line_part[i][0] - line_part[i-1][0]).total_seconds())
- #for i in range(1, len(line_part))
- #if (line_part[i][0] - line_part[i-1][0]).total_seconds() > 0
- #and abs((line_part[i][1] - line_part[i-1][1]) / (line_part[i][0] - line_part[i-1][0]).total_seconds()) <= 100
- #]
- #print(time.time()-st)
-
-
-
- line_part_t = [(x[0].timestamp(), x[1]) for x in cleaned_values]
-
- sensor_data[sensor] = line_part_t
- dataa = {}
- all_slices = {}
- all_slices[device_id] = sensor_data
- dataa['Function'] = "time_deltas"
- dataa['all_slices'] = all_slices
- dataa['time_zone_st'] = time_zone_s
- dataa['device_id'] = device_id
- resp.media = package_response(dataa)
- resp.status = falcon.HTTP_200
- return
-
- elif function == "request_single_slice":
- deployment_id = form_data.get('deployment_id')
- time_zone_s = GetTimeZoneOfDeployment(deployment_id)
- selected_date = form_data.get('date')
- date_to = form_data.get('to_date')
- if date_to == None:
- date_to = selected_date
-
- start_date = datetime.datetime.strptime(selected_date, '%Y-%m-%d')
- end_date = datetime.datetime.strptime(date_to, '%Y-%m-%d')
-
- # Determine direction and swap dates if necessary
- if start_date > end_date:
- selected_date, date_to = date_to, selected_date
-
- devices_list = form_data.get('devices_list')
- radar_details = {}
- #devices_list = '[267,560,"?",null,"64B70888F6F0"]'
- #devices_list = '[[267,560,"?",null,"64B70888F6F0"],[268,561,"?",null,"64B70888F6F1"]]'
- sensor_list_loc = [form_data.get('sensor_list')]
- is_nested, device_details = check_and_parse(devices_list)
- if not is_nested:
- device_ids_list = [device_details[1]]
- well_ids_list = [device_details[0]]
- else:
- device_ids_list = list(map(lambda x: x[1], device_details))
- well_ids_list =list(map(lambda x: x[0], device_details))
-
- data_type = form_data.get('data_type')
- epoch_from_utc, _ = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
- _, epoch_to_utc = GetLocalTimeEpochsForDate(date_to, time_zone_s) #>= #<
-
- #we need to
- buckets = ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
-
- days = (epoch_to_utc - epoch_from_utc) / (60 * 1440)
-
- well_id = well_ids_list[0]
- all_slices = {}
- radar_part = ""
- if len(device_details) > 4:
- device_id2_mac = {device_details[1]: device_details[4]}
- #device_id2_mac = {device_details[1]: device_details[3]}
- #epoch_to = '1730592010' #smal sample to test
- #radar_part = form_data.get('radar_part') we need to find what radar part is configured in device settings
- if len(device_details) > 5:
- radar_part_all = device_details[5]
-
- if type(radar_part_all) == str:
- radar_part_all = ["s3_max",int(radar_part_all)]
- elif type(radar_part_all) == int:
- radar_part_all = ["s3_max",radar_part_all]
- elif type(radar_part_all) == list:
- pass
-
-
-
- else:
- radar_part_all = ["s3_max",12]
-
-
- if len(radar_part_all) > 1:
- radar_part = radar_part_all[0]
- #we need only column name and not min or max here
- if "_" in radar_part:
- radar_parts = radar_part.split("_")
- radar_part = radar_parts[0]
- radar_details[device_details[1]] = radar_part_all
-
- #devices = GetVisibleDevices(deployment_id)
- temp_calib, humid_calib = GetCalibMaps(device_ids_list)
-
- for device_id in device_ids_list:
-
- sensor_data = {}
- for sensor in sensor_list_loc:
- st = time.time()
- if days < 3:
- line_part = ReadSensor(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part)
- elif days < 14:
- bucket_size = "1m"
- line_part = ReadSensor3(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part, bucket_size)
- else:
- bucket_size = "10m"
- line_part = ReadSensor3(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part, bucket_size)
-
- #Lets apply calibration:
- if sensor == "temperature":
- temperature_calib = temperature_offset #float(temp_calib[device_id].split(",")[2])
- line_part = [(timestamp, value + temperature_calib) for timestamp, value in line_part]
-
- if sensor == "humidity":
- line_part = [(timestamp, value + humidity_offset) for timestamp, value in line_part]
-
-
- window = sensor_legal_values[sensor][2]
-
- if False:
- # Create CSV content as a string
- csv_content = "Record_Index,Timestamp,Value,Time_Diff_Seconds,Time_Diff_Milliseconds\n"
-
- for i in range(len(line_part)):
- timestamp, value = line_part[i]
-
- if i == 0:
- # First record has no previous record to compare
- time_diff_seconds = 0
- time_diff_ms = 0
- else:
- # Calculate time difference from previous record
- prev_timestamp = line_part[i-1][0]
- time_diff = timestamp - prev_timestamp
- time_diff_seconds = time_diff.total_seconds()
- time_diff_ms = time_diff_seconds * 1000
-
- # Format the row
- row = f"{i},{timestamp.isoformat()},{value},{round(time_diff_seconds, 6)},{round(time_diff_ms, 3)}\n"
- csv_content += row
-
- # Write to file
- with open('time_differences.csv', 'w', encoding='utf-8') as f:
- f.write(csv_content)
-
- print("CSV file 'time_differences.csv' created successfully!")
-
- #print("@1", time.time() - st)
- #first = 3300
- #last = 3400
- #line_part = line_part[first:last]
- line_part_t = []
- #st = time.time()
- #line_part_t = [tuple(x[:2]) for x in line_part]
- #print(time.time() - st)
- #st = time.time()
- #line_part_t = list({(dt.timestamp(), value) for dt, value in line_part})
- #print(time.time() - st)
-
- line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
- st = time.time()
- cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
- cleaned_values = cleaned_values_t #add_boundary_points(cleaned_values_t, time_zone_s)
- #print("@2", time.time() - st)
-
- #Lets add point in minute 0 and minute 1439
-
- #st = time.time()
- #cleaned_values = clean_data_fast(line_part_t, window=5, threshold=2.0)
- #print("@3", time.time() - st)
-
- sensor_data[sensor] = cleaned_values
-
-
- if len(device_details) > 4:
- all_slices[device_id2_mac[device_id]] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
- else:
- all_slices[device_id] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
-
- dataa = {}
- dataa['Function'] = "single_slicedata"
- dataa['devices_list'] = devices_list
- dataa['all_slices'] = all_slices
- dataa['radar_details'] = radar_details
- dataa['time_zone_st'] = time_zone_s
- dataa['well_id'] = well_id
- resp.media = package_response(dataa)
- resp.status = falcon.HTTP_200
- return
- elif function == "get_sensor_bucketed_data_by_room_sensor":
- # Inputs:
- # user_name and token
- # deployment_id - from which report gets deployment set (all rooms and devices) to get timezone
- # date - one day in a format YYYY-MM-DD
- # sensor - temperature/radar/etc.. see full list
- # (tells what sensor data to be retrieved)
- # "voc" for all smell use s4 (lower reading is higher smell, max=0 find min for 100%)
- # "radar" returns s28
- # radar_part - optional and applies only to radar (tells which segment of radar to be retrieved)
- # bucket_size - ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
- # location - room name (has to be unique)
- # data_type - ML
- # Output: son structure with the following info
- # chart_data with rooms : [list]
- deployment_id = form_data.get('deployment_id')
- selected_date = form_data.get('date')
- sensor = form_data.get('sensor') # one sensor
- radar_part = form_data.get('radar_part')
- buckets = ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
- bucket_size = "no" if (result := form_data.get('bucket_size')) in (None, "") else (result.strip() if result.strip() in buckets else "no")
- #bucket_size = res2 if (res := form_data.get('bucket_size')) is not None and (res2 := str(res).strip()) and res2 in {'no', '10s', '1m', '5m', '10m', '15m', '30m', '1h'} else 'no'
- location = form_data.get('location')
- data_type = form_data.get('data_type')
-
- time_zone_s = GetTimeZoneOfDeployment(deployment_id)
- epoch_from_utc, epoch_to_utc = GetLocalTimeEpochsForDate(selected_date, time_zone_s)
-
- # obtain devices_list for deployment_id
- selected_date = selected_date.replace("_","-")
- devices_list, device_ids = GetProximityList(deployment_id, epoch_from_utc)
- sensor_data = {}
- units = "°C"
- if "America" in time_zone_s:
- units = "°F"
- # see https://www.w3schools.com/cssref/css_colors.php
- sensor_props = {"temperature": ["red", units],
- "humidity": ["blue", "%"],
- "voc": ["orange", "PPM"],
- "co2": ["orange", "PPM"],
- "pressure": ["magenta", "Bar"],
- "radar": ["cyan", "%"],
- "light": ["yellow", "Lux"]}
-
- current_time_la = datetime.datetime.now(pytz.timezone(time_zone_s))
- formatted_time = current_time_la.strftime('%Y-%m-%dT%H:%M:%S') #"2025-02-06T20:09:00"
-
- result_dictionary = {
- "last_report_at": formatted_time,
- "color": sensor_props[sensor][0] if sensor in s_table else "grey",
- "units": sensor_props[sensor][1] if sensor in s_table else "?"
- }
- #sensor_mapping = {"co2": "s4", "voc": "s9"}
- #sensor = sensor_mapping.get(sensor, sensor)
- temp_calib, humid_calib = GetCalibMaps(device_ids)
- #print(device_ids)
- #print(temp_calib)
- #print(humid_calib)
- #print("++++++++++++++++++")
- chart_data = []
- # example data in each element of devices_list is (266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
- for well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to in devices_list:
- loc_and_desc = location_name
- if description != None and description != "":
- loc_and_desc = loc_and_desc + " " + description
-
- if loc_and_desc == location:
- line_part = ReadSensor3(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part, bucket_size)
-
- if sensor == "temperature":
- if "," in temp_calib[device_id]:
- temperature_calib = float(temp_calib[device_id].split(",")[2])
- else:
- temperature_calib = -10
-
- line_part = [(timestamp, value + temperature_calib) for timestamp, value in line_part]
-
-
- if sensor == "humidity":
- line_part = [(timestamp, value + humidity_offset) for timestamp, value in line_part]
-
-
- window = sensor_legal_values[sensor][2]
- line_part_t = []
- line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
- st = time.time()
- cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
- cleaned_values = cleaned_values_t #add_boundary_points(cleaned_values_t, time_zone_s)
- compressed_readings = convert_timestamps_lc(cleaned_values, time_zone_s)
- if sensor == "temperature":
- if units == "°F":#"America" in time_zone_s:
- compressed_readings = CelsiusToFahrenheitList(compressed_readings)
-
- sensor_data[sensor] = compressed_readings
- chart_data.append({'name': location_name, 'data': compressed_readings})
- result_dictionary['chart_data'] = chart_data
- payload = result_dictionary
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
-
- elif function == "get_sensor_data_by_deployment_id":
- # Inputs:
- # user_name and token
- # deployment_id - from which report gets deployment set (all rooms and devices)
- # date - one day in a format YYYY-MM-DD
- # sensor - temperature/radar/etc.. see full list (tells what sensor data to be retrieved)
- # radar_part - optional and applies only to radar (tells which segment of radar to be retrieved)
- # bucket_size - ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
- # data_type - ML
- # Output: son structure with the following info
- # chart_data with rooms : [list]
- deployment_id = form_data.get('deployment_id')
- time_zone_s = GetTimeZoneOfDeployment(deployment_id)
- selected_date = form_data.get('date')
- sensor = form_data.get('sensor') # one sensor
- radar_part = form_data.get('radar_part')
- buckets = ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
- bucket_size = "no" if (result := form_data.get('bucket_size')) in (None, "") else (result.strip() if result.strip() in buckets else "no")
- #bucket_size = res2 if (res := form_data.get('bucket_size')) is not None and (res2 := str(res).strip()) and res2 in {'no', '10s', '1m', '5m', '10m', '15m', '30m', '1h'} else 'no'
- data_type = form_data.get('data_type')
-
- epoch_from_utc, epoch_to_utc = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
- # obtain devices_list for deployment_id
- selected_date = selected_date.replace("_","-")
- #timee = LocalDateToUTCEpoch(selected_date, time_zone_s)+5
- devices_list, device_ids = GetProximityList(deployment_id, epoch_from_utc)
- sensor_data = {}
- # see https://www.w3schools.com/cssref/css_colors.php
- sensor_props = {"temperature": ["red", "°C"],
- "humidity": ["blue", "%"],
- "voc": ["orange", "PPM"],
- "co2": ["orange", "PPM"],
- "pressure": ["magenta", "Bar"],
- "radar": ["cyan", "%"],
- "light": ["yellow", "Lux"]}
- result_dictionary = {
- "last_report_at": "2025-02-06T20:09:00",
- "color": sensor_props[sensor][0] if sensor in s_table else "grey",
- "units": sensor_props[sensor][1] if sensor in s_table else "?"
- }
- #sensor_mapping = {"co2": "s4", "voc": "s9"}
- #sensor = sensor_mapping.get(sensor, sensor)
-
- chart_data = []
- for room_details in devices_list:
- well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = room_details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
- line_part = ReadSensor3(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part, bucket_size)
-
- if sensor == "temperature":
- temperature_calib = float(temp_calib[device_id].split(",")[2])
- line_part = [(timestamp, value + temperature_calib) for timestamp, value in line_part]
-
-
- window = sensor_legal_values[sensor][2]
- line_part_t = []
- line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
- st = time.time()
- cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
- cleaned_values = add_boundary_points(cleaned_values_t, time_zone_s)
- compressed_readings = convert_timestamps_lc(cleaned_values, time_zone_s)
-
- #compressed_readings = [(time.strftime("%H:%M", time.gmtime(lst[0][0])), float(sum(t for _, t in lst)/len(lst)))
- #for _, lst in ((k, list(g))
- #for k, g in itertools.groupby(cleaned_values, key=lambda x: time.gmtime(x[0]).tm_hour))]
- sensor_data[sensor] = compressed_readings
- chart_data.append({'name': location_name,
- 'data': compressed_readings})
- result_dictionary['chart_data'] = chart_data
- #all_slices[device_id2_mac[device_id]] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
- #is_neste, device_details = check_and_parse(devices_list)
- #if not is_nested:
- #device_ids_list = [device_details[1]]
- #well_ids_list = [device_details[0]]
- #else:
- #device_ids_list = list(map(lambda x: x[1], device_details))
- #well_ids_list =list(map(lambda x: x[0], device_details))
- #well_id = well_ids_list[0]
- #all_slices = {}
- #device_id2_mac = {device_details[1]: device_details[4]}
- #for device_id in device_ids_list:
- #device_id2_mac
- #sensor_data = {}
- #for sensor in sensor_list_loc:
- #st = time.time()
- #line_part = ReadSensor(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part)
- #window = sensor_legal_values[sensor][2]
- #line_part_t = []
- #line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
- #st = time.time()
- #cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
- #cleaned_values = add_boundary_points(cleaned_values_t, time_zone_s)
- #sensor_data[sensor] = cleaned_values
- #all_slices[device_id2_mac[device_id]] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
-
- #dataa = {}
- #dataa['Function'] = "single_slicedata"
- #dataa['devices_list'] = devices_list
- #dataa['all_slices'] = all_slices
- #dataa['time_zone_st'] = time_zone_s
- #dataa['well_id'] = well_id
- #resp.media = package_response(dataa)
- #resp.status = falcon.HTTP_200
- result_dictionary2 = {
- "alert_text": "No alert",
- "alert_color": "bg-green-100 text-green-700",
- "last_report_at": "ISO TIMESTAMP",
- "chart_data": [
- {
- "rooms": [
- { "name": "Bathroom",
- "data": [
- {"title": "12AM","value": 20},
- {"title": "01AM","value": 20},
- {"title": "02AM","value": 26},
- {"title": "03AM","value": 16},
- {"title": "04AM","value": 27},
- {"title": "05AM","value": 23},
- {"title": "06AM","value": 26},
- {"title": "07AM","value": 17},
- {"title": "08AM","value": 18},
- {"title": "09AM","value": 21},
- {"title": "10AM","value": 28},
- {"title": "11AM","value": 24},
- {"title": "12PM","value": 18},
- {"title": "01PM","value": 27},
- {"title": "02PM","value": 27},
- {"title": "03PM","value": 19},
- {"title": "04PM","value": 0},
- {"title": "05PM","value": 0},
- {"title": "06PM","value": 0},
- {"title": "07PM","value": 0},
- {"title": "08PM","value": 0},
- {"title": "09PM","value": 0},
- {"title": "10PM","value": 0},
- {"title": "11PM","value": 0}
- ]
- },
- { "name": "Kitchen",
- "data": [
- {"title": "00AM","value": 19},
- {"title": "01AM","value": 10},
- {"title": "02AM","value": 8},
- {"title": "03AM","value": 14},
- {"title": "04AM","value": 20},
- {"title": "05AM","value": 8},
- {"title": "06AM","value": 7},
- {"title": "07AM","value": 17},
- {"title": "08AM","value": 3},
- {"title": "09AM","value": 19},
- {"title": "10AM","value": 4},
- {"title": "11AM","value": 6},
- {"title": "12PM","value": 4},
- {"title": "01PM","value": 14},
- {"title": "02PM","value": 17},
- {"title": "03PM","value": 20},
- {"title": "04PM","value": 19},
- {"title": "05PM","value": 15},
- {"title": "06PM","value": 5},
- {"title": "07PM","value": 19},
- {"title": "08PM","value": 3},
- {"title": "09PM","value": 30},
- {"title": "10PM","value": 1},
- {"title": "11PM","value": 12 }
- ]
- },
- { "name": "Living Room",
- "data": [
- {"title": "00AM","value": 25},
- {"title": "01AM","value": 24},
- {"title": "02AM","value": 19},
- {"title": "03AM","value": 20},
- {"title": "04AM","value": 22},
- {"title": "05AM","value": 20},
- {"title": "06AM","value": 11},
- {"title": "07AM","value": 5},
- {"title": "08AM","value": 16},
- {"title": "09AM","value": 22},
- {"title": "10AM","value": 23},
- {"title": "11AM","value": 14},
- {"title": "12PM","value": 0},
- {"title": "01PM","value": 7},
- {"title": "02PM","value": 25},
- {"title": "03PM","value": 29},
- {"title": "04PM","value": 23},
- {"title": "05PM","value": 27},
- {"title": "06PM","value": 27},
- {"title": "07PM","value": 20},
- {"title": "08PM","value": 2},
- {"title": "09PM","value": 24},
- {"title": "10PM","value": 21},
- {"title": "11PM","value": 14 }
- ]
- }
- ]
- }
- ]
- }
- payload = result_dictionary
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- #AddToLog(payload)
- #return
- elif function == "request_device_slice":
- deployment_id = form_data.get('deployment_id')
- time_zone_s = GetTimeZoneOfDeployment(deployment_id)
- epoch_from_utc = form_data.get('epoch_from')
- epoch_to_utc = form_data.get('epoch_to')
- device_id = form_data.get('device_id')
- well_id = form_data.get('well_id')
- MAC = form_data.get('MAC')
- sensor_list_loc = form_data.get('sensors_list')
- sensor_list = sensor_list_loc.split(",")
- device_ids_list = [device_id]
- well_ids_list = [well_id]
- maps_dates, positions_list, timezone_s = GetDeploymentDatesBoth(deployment_id)
-
- data_type = "RL"
- #epoch_from_utc, epoch_to_utc = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
-
- #epoch_to = '1730592010' #smal sample to test
- radar_part = form_data.get('radar_part')
- well_id = well_ids_list[0]
- all_slices = {}
- #device_id2_mac = {device_details[1]: device_details[4]}
- for device_id in device_ids_list:
- #device_id2_mac
- sensor_data = {}
- for sensor in sensor_list:
- st = time.time()
- line_part = ReadSensor(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part)
- window = sensor_legal_values[sensor][2]
- #print("@1", time.time() - st)
- #first = 3300
- #last = 3400
- #line_part = line_part[first:last]
- line_part_t = []
- #st = time.time()
- #line_part_t = [tuple(x[:2]) for x in line_part]
- #print(time.time() - st)
- #st = time.time()
- #line_part_t = list({(dt.timestamp(), value) for dt, value in line_part})
- #print(time.time() - st)
-
- line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
- st = time.time()
- cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
- #cleaned_values = cleaned_values_t #add_boundary_points(cleaned_values_t, time_zone_s)
- #print("@2", time.time() - st)
-
- #Lets add point in minute 0 and minute 1439
-
- #st = time.time()
- #cleaned_values = clean_data_fast(line_part_t, window=5, threshold=2.0)
- #print("@3", time.time() - st)
- cleaned_values = ScaleToCommon(cleaned_values_t, sensor)
- sensor_data[sensor] = cleaned_values
- all_slices[device_id] = sensor_data
-
- dataa = {}
- dataa['Function'] = "device_slicedata"
- dataa['all_slices'] = all_slices
- dataa['time_zone_st'] = time_zone_s
- dataa['proximity'] = positions_list
- dataa['well_id'] = well_id
- dataa['MAC'] = MAC
- resp.media = package_response(dataa)
- resp.status = falcon.HTTP_200
- #return
- elif function == "request_single_radar_slice":
- deployment_id = form_data.get('deployment_id')
- time_zone_s = GetTimeZoneOfDeployment(deployment_id)
- selected_date = form_data.get('date')
- devices_list = form_data.get('devices_list')
- ctrl_key_state = form_data.get('ctrl_key_state')
- alt_key_state = form_data.get('alt_key_state')
- #devices_list = '[267,560,"?",null,"64B70888F6F0"]'
- #devices_list = '[[267,560,"?",null,"64B70888F6F0"],[268,561,"?",null,"64B70888F6F1"]]'
- sensor_index_list = [form_data.get('sensor_index_list')]
- is_nested, device_details = check_and_parse(devices_list)
- if not is_nested:
- device_ids_list = [device_details[1]]
- well_ids_list = [device_details[0]]
- else:
- device_ids_list = list(map(lambda x: x[1], device_details))
- well_ids_list =list(map(lambda x: x[0], device_details))
-
- epoch_from_utc, epoch_to_utc = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
-
- #epoch_to = '1730592010' #smal sample to test
- radar_part = form_data.get('radar_part')
- well_id = well_ids_list[0]
- all_slices = {}
- device_id2_mac = {device_details[1]: device_details[4]}
- for device_id in device_ids_list:
- device_id2_mac
- sensor_data = {}
- for sensor_index in sensor_index_list:
- st = time.time()
- sensor = ["m0", "m1", "m2", "m3", "m4", "m5", "m6", "m7", "m8", "m08_max", "s2", "s3", "s4", "s5", "s6", "s7", "s8", "s28_max", "s28_min"][int(sensor_index)]
-
- line_part = ReadRadarDetail(device_id, sensor, epoch_from_utc, epoch_to_utc, alt_key_state)
- window = sensor_legal_values["radar"][2]
-
- line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
- st = time.time()
- cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
- cleaned_values = add_boundary_points(cleaned_values_t, time_zone_s)
- if len(sensor) < 4:
- sensor_data[sensor+"_max"] = cleaned_values
- else:
- sensor_data[sensor] = cleaned_values
- all_slices[device_id2_mac[device_id]] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
-
- dataa = {}
- dataa['Function'] = "single_slicedata"
- dataa['devices_list'] = devices_list
- dataa['all_slices'] = all_slices
- dataa['time_zone_st'] = time_zone_s
- dataa['well_id'] = well_id
- resp.media = package_response(dataa)
- resp.status = falcon.HTTP_200
- elif function == "get_deployment":
- blob_data = read_file("deployment.html")
- deployment_id = form_data.get('deployment_id')
- #lets update "Deployments" select
- users = GetUsersFromDeployments(privileges)
- blob_data = UpdateDeploymentsSelector(blob_data, users, False, deployment_id)
-
- resp.content_type = "text/html"
- resp.text = blob_data
- return
- elif function == "get_deployment_j":
- deployment_id = form_data.get('deployment_id')
- time_zone_st = GetTimeZoneOfDeployment(deployment_id)
- date = form_data.get('date')
- if date == None:
-
- # Get today's date
- local_timezone = pytz.timezone(time_zone_st) # Replace with your local timezone
- date = datetime.datetime.now(local_timezone).strftime('%Y-%m-%d')
-
- #epoch_from_utc = int(datetime.datetime.strptime(date, "%Y-%m-%d").timestamp())
- #devices_list, device_ids = GetProximityList(deployment_id, epoch_from_utc)
-
- dataa = {}
- dataa['Function'] = "deployment_details"
- if privileges == "-1":
- deployment = DeploymentDetails(deployment_id)
- dataa['deployment_details'] = deployment
- else:
- privileges = privileges.split(",")
- if deployment_id in privileges:
- deployment = DeploymentDetails(deployment_id)
- dataa['deployment_details'] = deployment
-
-
- resp.media = package_response(dataa)
- resp.status = falcon.HTTP_200
-
- return
- elif function == "set_floor_layout":
- deployment_id = form_data.get('deployment_id')
- layout = form_data.get('layout')
-
- if privileges == "-1" or deployment_id in privileges:
- ok = StoreFloorPlan(deployment_id, layout)
- payload = {'ok': ok}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- else:
- payload = {'ok': 0, 'error': "not allowed"}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
-
- return
- elif function == "get_floor_layout":
- deployment_id = form_data.get('deployment_id')
-
- dataa = {}
- dataa['Function'] = "deployment_details"
- if privileges == "-1":
- layout = GetFloorPlan(deployment_id)
- dataa['layout'] = layout
- else:
- privileges = privileges.split(",")
- if deployment_id in privileges:
- layout = GetFloorPlan(deployment_id)
- dataa['layout'] = layout
-
-
- resp.media = package_response(dataa)
- resp.status = falcon.HTTP_200
-
- return
- elif function == "get_beneficiary":
- user_id = form_data.get('user_id')
- all_beneficiaries = ListBeneficiaries(privileges, user_id)
- beneficiaries_list = []
- for beneficiary_temp in all_beneficiaries:
- beneficiaries_list.append(str(beneficiary_temp[0]))
-
- dataa = {}
- dataa['Function'] = "beneficiary_details"
- if user_id in beneficiaries_list:
- beneficiary = UserDetails(user_id)
- #lets remove fields not relevant for beneficiary
- try:
- del beneficiary['time_edit']
- except:
- pass
-
- try:
- del beneficiary['user_edit']
- except:
- pass
-
- try:
- del beneficiary['access_to_deployments']
- except:
- pass
- dataa['beneficiary_details'] = beneficiary
- resp.media = package_response(dataa)
- resp.status = falcon.HTTP_200
- return
- elif function == "get_caretaker":
+ try:
+ # Get basic parameters
+ function = form_data.get('function')
user_name = form_data.get('user_name')
+ logger.debug(f"Function: {function}, User: {user_name}")
+ if function != "credentials" and function != "new_user_form":# and function != "set_deployment":
+ token = form_data.get('token')
+ ps = form_data.get('ps')
- all_caretakers = ListCaretakers(privileges, user_name)
- if len(all_caretakers) > 1:
- user_id = form_data.get('user_id')
- else:
- user_id = str(all_caretakers[0][0])
+ if ps != "" and ps != None:
+ #was token sent in ps field? This allows for token and ps be populated by token or ps
+ user_info = verify_token(ps)
+ if user_info != None:
+ if user_info["username"] == user_name:
+ token = ps
+ else:
+ #is this valid password?
+ privileges, user_id = ValidUser(user_name, ps)
+ if privileges == "0":
+ resp.media = package_response("Log-Out", HTTP_401)
+ return
+ else:
+ token = generate_token(user_name)
- caretakers_list = []
- for caretakers_temp in all_caretakers:
- caretakers_list.append(str(caretakers_temp[0]))
-
- dataa = {}
- dataa['Function'] = "caretaker_details"
- if user_id in caretakers_list:
- caretaker = UserDetails(user_id)
- #lets remove fields not relevant for beneficiary
- try:
- del caretaker['time_edit']
- except:
- pass
-
- try:
- del caretaker['user_edit']
- except:
- pass
-
- dataa['caretaker_details'] = caretaker
- resp.media = package_response(dataa)
- resp.status = falcon.HTTP_200
- return
-
- elif function == "get_device":
- device_id = form_data.get('device_id')
- device_mac = form_data.get('mac')
- min_well_id = form_data.get('min_well_id')
- if min_well_id != None:
- min_well_id = int(min_well_id)
-
- if device_mac != None:
- device_det = GetDeviceDetailsSingleFromMac(device_mac)
- print(device_det)
- dataa = {}
- dataa['Function'] = "device_details"
- dataa['device_details'] = device_det
- if device_det == {}:
- dataa['next_well_id'] = GetNextWellId(min_well_id)
- else:
- devices = GetVisibleDevices(privileges)
- dataa = {}
- dataa['Function'] = "device_details"
- dataa['device_details'] = {}
- if privileges == "-1":
- #device_det = GetDeviceDetails(device_id)
- device_det = GetDeviceDetailsSingle(device_id)
- if device_det['radar_threshold'] == None or device_det['radar_threshold'] == "":
- device_det['radar_threshold'] = '["s3_max",12]'
- dataa['device_details'] = device_det
- else:
- devices_list = []
- for device_id_temp in devices:
- devices_list.append(str(device_id_temp[0]))
-
- if device_id in devices_list:
- device_det = GetDeviceDetailsSingle(device_id)
- if device_det['radar_threshold'] == None or device_det['radar_threshold'] == "":
- device_det['radar_threshold'] = '["s3_max",12]'
-
-
- dataa['device_details'] = device_det
-
-
- resp.media = package_response(dataa)
- resp.status = falcon.HTTP_200
-
- return
-
-
- elif function == "set_deployment":
-
- user_name = form_data.get('user_name')
- token = form_data.get('token')
- password = form_data.get('password')
-
-
- if user_name == "" or user_name == None: #new user
-
- user_name
- resp.media = package_response("Log-Out", HTTP_401)
- return
-
- if password != "" and password != None:
- privileges, user_id = ValidUser(user_name, password)
- if privileges == "0":
- resp.media = package_response("Log-Out", HTTP_401)
- return
- else:
- token = generate_token(user_name)
-
-
-
- if token != "" and token != None:
user_info = verify_token(token)
if user_info == None or user_info["username"] != user_name:
@@ -18095,620 +17051,3969 @@ class WellApi:
return
- deployment = form_data.get('deployment')
- beneficiary_name = form_data.get('beneficiary_name')
- beneficiary_email = form_data.get('beneficiary_email')
- beneficiary_address = form_data.get('beneficiary_address')
- caretaker_username = form_data.get('caretaker_username')
- caretaker_email = form_data.get('caretaker_email')
- num_people = int(form_data.get('num_people'))
- pets = int(form_data.get('pets'))
- year_born = int(form_data.get('year_born'))
- gender = form_data.get('gender')
- race = form_data.get('race')
- gps_lat = form_data.get('gps_lat')
- gps_lng = form_data.get('gps_lng')
- devices = form_data.get('devices')
- devices_list = json.loads(devices)
- wifis = form_data.get('wifis')
- wifis_list = json.loads(wifis)
+ #with get_db_connection() as db_conn:
+ privileges = GetPriviledgesOnly(user_name)
- print(deployment)
- print(beneficiary_name)
- print(beneficiary_email)
- print(beneficiary_address)
- print(caretaker_username)
- print(caretaker_email)
- print(num_people)
- print(pets)
- print(year_born)
- print(gender)
- print(race)
- print(gps_lat)
- print(gps_lng)
- print(devices_list)
- print(wifis_list)
+ # Handle credentials function - most common case
+ if function == "credentials":
- #lets check if devices listed are not part of existing deployment
- success, result = DevicesNotUsed(devices, user_name)
+ clientId = form_data.get('clientId')
+ nonce = form_data.get('nonce')
+ ps = form_data.get('ps')
- if success:
- if result["deployed"]:
- error_string = f"These devices are already deployed: {result['deployed']}"
- print(error_string)
- payload = {'ok': 0, 'error': error_string}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
+ if not user_name:
+ resp.media = package_response("Required field 'user_name' is missing", HTTP_400)
return
- if result["not_found"]:
- error_string = f"These devices are not available: {result['not_found']}"
- print(error_string)
- payload = {'ok': 0, 'error': error_string}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
+ if not clientId:
+ resp.media = package_response("Required field 'clientId' is missing", HTTP_400)
return
- if not result["deployed"] and not result["not_found"]:
- print("All devices are available for deployment")
- ok, error_string = StoreDisclaimer2DB(form_data)
+ if not nonce:
+ resp.media = package_response("Required field 'nonce' is missing", HTTP_400)
+ return
- if ok == 1:
- #Lets check if this user already has account or not
- if not AccountExists(user_name):
- #lets create new account for this caretaker
- if password == None or password == "":
- password = CreatePassword(8)
+ if not ps:
+ resp.media = package_response("Required field 'ps' is missing", HTTP_400)
+ return
- #lets suplement form_data with parts needed for existing StoreCaretaker2DB function
- editing_user_id = "0" #specify if editing existing user, otherwise "0"
- user_id = "0" #user that is adding this record. New user so "0"
- form_data['role_ids'] = "1,2"
- form_data['access_to_deployments'] = "45"
- #form_data['email'] = "" #this one matches
- form_data['new_user_name'] = form_data['user_name']
- form_data['first_name'] = form_data['firstName']
- form_data['last_name'] = form_data['lastName']
- form_data['address_street'] = ""
- form_data['address_city'] = ""
- form_data['address_zip'] = ""
- form_data['address_state'] = ""
- form_data['address_country'] = ""
- form_data['phone_number'] = form_data['phone']
- form_data['picture'] = "/"
- form_data['key'] = password
- ok = StoreCaretaker2DB(form_data, editing_user_id, user_id)
- if ok == 1:
+ if False:
+ pass
+ else:
+ #lets check for real
+ privileges, user_id = ValidUser(user_name, ps)
+ if privileges == "0":
+ access_token = 0
+ privileges = 0
+ else:
+ access_token = generate_token(user_name)
- #we need to call cd ~/mqtt-auth-service/acl_manager.py
+ if privileges == "-1":
+ max_role = -1
+ else:
+ max_role = GetMaxRole(user_name)
+ if "2" in max_role:
+ max_role = 2
+ else:
+ max_role = 1
- SendWelcomeEmail(form_data['email'], form_data['first_name'], form_data['last_name'], devices, form_data['phone_number'], form_data['new_user_name'], form_data['key'])
- payload = {'ok': ok}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
+ token_payload = {'access_token': access_token, 'privileges': privileges, 'user_id': user_id, 'max_role': max_role}
+ resp.media = package_response(token_payload)
+ resp.status = falcon.HTTP_200
+ return
+
+ # Handle token-protected functions
+ elif function == "messages_age":
+
+ macs = form_data.get('macs')
+
+ with get_db_connection() as conn:
+
+ #print (sqlr)
+ with conn.cursor() as cur:
+
+ devices = MACsStrToDevIds(cur, macs)
+
+ devices_string = ",".join(f"{device_id}" for mac, device_id in devices)
+
+
+ sqlr = f"""
+ SELECT
+ device_id,
+ GREATEST(
+ radar_last_time,
+ sensor_last_time
+ ) AS latest_time
+ FROM
+ (SELECT unnest(ARRAY[{devices_string}]) AS device_id) d
+ LEFT JOIN LATERAL (
+ SELECT time AS radar_last_time
+ FROM radar_readings
+ WHERE device_id = d.device_id
+ ORDER BY time DESC
+ LIMIT 1
+ ) r ON true
+ LEFT JOIN LATERAL (
+ SELECT time AS sensor_last_time
+ FROM sensor_readings
+ WHERE device_id = d.device_id
+ ORDER BY time DESC
+ LIMIT 1
+ ) s ON true;"""
+ logger.debug(f"sqlr= {sqlr}")
+ cur.execute(sqlr)
+ times_list = cur.fetchall()
+ result = {}
+ for i in range(len(times_list)):
+ if times_list[i][1] is not None:
+ result[devices[i][0]] = times_list[i][1].timestamp()
+ else:
+ result[devices[i][0]] = 0
+
+ dataa = {}
+ dataa['Command'] = "REPORT"
+ dataa['body'] = result
+ dataa['time'] = time.time()
+ #json_data = json.dumps(dataa)
+ payload = {'ok': True, 'response': dataa}
+ resp.media = package_response(payload)
+ logger.warning(f"Responded: {str(payload)}")
+ resp.status = falcon.HTTP_200
+ return
+
+ elif function == "voice_ask":
+
+ question = form_data.get('question')
+ deployment_id = form_data.get('deployment_id')
+
+ if ('language_from' in form_data):
+ language_from = form_data.get('language_from').strip()
+ else:
+ language_from = "English"
+
+ if ('language_to' in form_data):
+ language_to = form_data.get('language_to').strip()
+ else:
+ language_to = "English"
+
+
+ result, language = AskGPT(question, language_from, language_to)
+
+ if result[0] == "#":
+ result = RunCommand(result, {}, deployment_id)
+
+ dataa = {}
+ dataa['Command'] = "REPORT"
+ dataa['body'] = result
+ dataa['name'] = ""
+ dataa['reflected'] = ""
+ dataa['language'] = language
+ dataa['time'] = time.time()
+ #json_data = json.dumps(dataa)
+ payload = {'ok': True, 'response': dataa}
+ resp.media = package_response(payload)
+ logger.warning(f"Responded: {str(payload)}")
+ resp.status = falcon.HTTP_200
+ return
+
+
+ elif function == "calibrate_thresholds":
+ #this will use current date to calibrate radar presence thresholds.
+ #make sure that data is well defined (has clear absence/presence signature) for all rooms for chosen day
+ #Format of radar_threshold field = [gates_to_use_Presence_list, p_threshold]
+ #We need to automate this functionality!!!
+ deployment_id = form_data.get('deployment_id')
+ time_zone_s = GetTimeZoneOfDeployment(deployment_id)
+ ddate = form_data.get("date")
+ ddate = ddate.replace("_","-")
+ selected_date = ddate
+
+
+
+ stdev_range = int(form_data.get("stdev_range"))
+ timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5
+ devices_list, device_ids = GetProximityList(deployment_id, timee)
+
+ selected_date = FindCalibrationDate(device_ids, ddate)
+
+ devices_c = len(devices_list[0])
+
+ time_from_str, time_to_str = GetLocalTimeForDate(selected_date, time_zone_s, stdev_range)
+ fields = ["radar_s_min", "radar_s_max", "radar_m_max", "radar_stdev"]
+ cnt = 0
+ ids_list = []
+ for details in devices_list:
+ ids_list.append(details[1])
+ devices_list_str = ",".join(map(str, ids_list))
+ device_to_index = {device: idx for idx, device in enumerate(ids_list)}
+
+ minutes = 1440
+
+ with get_db_connection() as conn:
+ with conn.cursor() as cur:
+ for device_index in range(devices_c):
+ well_id = devices_list[device_index][0]
+ device_id = devices_list[device_index][1]
+ location = devices_list[device_index][2]
+
+ sql = get_device_radar_s28_only_query(time_from_str, time_to_str, device_id)
+ print(sql)
+
+ #sql1 = get_deployment_radar_only_colapsed_query(str(device_id), time_from_str, time_to_str, [device_id])
+ #print(sql1)
+ st = time.time()
+ cur.execute(sql)
+ my_data = cur.fetchall()
+
+ timestamps, stationary, motion = process_raw_data(my_data)
+ print(type(stationary))
+ # Find threshold above which 20% of points lie
+ AveragePercentSpendsThere = AveragePercentPerLocation[Consolidataed_locations[location]]
+ threshold_high, threshold_low = FindThreshold(stationary, AveragePercentSpendsThere)
+ file_save = f"threshold_graph_{location}.png"
+ title = f"{well_id}_{location}"
+
+ threshold2, x_percent, y_percent = ShowThresholdGraph(stationary, file_save, threshold_low, threshold_high, title, AveragePercentSpendsThere, location)
+
+ print(f"Maximum curvature point found at:")
+ print(f"Threshold value: {threshold2:.3f}")
+ print(f"X: {x_percent:.1f}% of range")
+ print(f"Y: {y_percent:.1f}% of points above")
+
+ ShowArray(stationary, threshold2, filename=f"stationary_{devices_list[device_index][0]}.png", title=f"stationary_{devices_list[device_index][0]}_{devices_list[device_index][2]}", style='line')
+
+
+ ##threshold
+ ##presence_mask, baseline, threshold = detect_presence(timestamps, stationary, motion)
+
+ ### Save visualization to file
+ ##visualize_detection(timestamps, stationary, motion, presence_mask,
+ ## baseline, threshold)
+
+ #cur.execute(sql1)
+ #my_data1 = cur.fetchall()#cur.fetchone()
+ #print(time.time() - st)
+ #if my_data == None or my_data1 == None:
+ #logger.warning(f"No data found for device_id {device_id}")
+ #else:
+ #print(type(my_data))
+ ##minute,
+ ##device_id,
+ ##s_min as radar_s_min,
+ ##s_max as radar_s_max,
+ ##m_max as radar_m_max
+
+ #values = [tup[1] for tup in my_data] #10 sec (RAW) data
+
+ #hist, bins = np.histogram(values, bins=1000, range=(0, 100))
+ #TR, BR = FindZeroIntersection(hist, bins, f'raw_{device_id}_histogram.png', device_id)
+ #if True:#device_id == 560:
+ #plot(values, filename=f"radar_{device_id}_s28.png", title=f"Radar s28 {device_id}", style='line')
+ #plot(hist, filename=f"radar_{device_id}_s28_hist.png", title=f"Radar s28 {device_id} histogram", style='line')
+
+ ##life = [tup[3] - tup[2] + tup[4] for tup in my_data1]
+ #life, average = calculate_life_and_average(my_data1, stdev_range) #5 min data
+ #lhist, lbins = np.histogram(life, bins=1000)
+ #TLIFE, BLIFE = FindZeroIntersection(lhist, lbins, f'life_{device_id}_histogram.png', device_id)
+
+ #StoreThresholds2DB(device_id, TR, BR, TLIFE, BLIFE)
+ ##for now not needed...
+ ##ahist, abins = np.histogram(average, bins=1000)
+ ##dummy1, dummy = FindZeroIntersection(ahist, abins)
+ #if True:#device_id == 560:
+ #plot(average, filename=f"average_{device_id}.png", title=f"Average {device_id}", style='line')
+ #plot(life, filename=f"life_{device_id}.png", title=f"Life {device_id}", style='line')
+ #plot(lhist, filename=f"life_{device_id}_hist.png", title=f"life {device_id} histogram", style='line')
+ ##plot(ahist, filename=f"average_{device_id}_hist.png", title=f"average {device_id} histogram", style='line')
+
+
+ sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list)
+ print(sql)
+ my_data = []
+ with get_db_connection() as conn:
+ with conn.cursor() as cur:
+ cur.execute(sql)
+ my_data = cur.fetchall()#cur.fetchone()
+ #print(result)
+ if my_data == None:
+ return False
+
+ fields_n = len(fields)
+ stripes = devices_c * fields_n #radar_min and radar_max
+ print(my_data)
+ base_minute = ConvertToBase(time_from_str, time_zone_s)
+ #base_minute = my_data[0][0]# min(record[0] for record in my_data)
+ #remember: base_minute is offset (smaller) by numbr of minutes in stdev_range
+ st = time.time()
+ wave_m = np.zeros((stripes, 1440+2*stdev_range, 1), dtype=np.float32)
+
+ for record in my_data:
+ #(minute,device_id,s28_min,s28_max) = record
+ minute, device_id = record[0:2]
+ values = record[2:] # All the min/max values
+ x = int((minute - base_minute).total_seconds()/60)
+
+ device_idx = device_to_index[device_id]
+ #value[0] are mins, value[1] are maxes
+ #when trying to illustrate presence, use s28_max, when absence (night leaving bed) use s28s_min
+ for field_idx, value in enumerate(values):
+ # Calculate y position
+ y = device_idx * fields_n + field_idx
+ wave_m[y, x] = value
+
+ print(time.time()-st)
+
+ #we need to reliably determine presence and LIFE (motion) in every 5 minutes of data...
+ #presence is determined by average value being significntly different from last known base
+ #last known base is determined by average value during extended periods ( >= H hours) of low stdev (<) while it is determined that:
+ #person is moving elsewhere, and only 1 person is determined to be in monitored area.
+
+ #lets calculate stdevs
+ for device_index in range(devices_c):
+ y = device_index * fields_n
+ row = wave_m[y]
+ stdevs = np.zeros((1440+2*stdev_range, 1), dtype=np.float32)
+ stdevs, amplitude = CalcStdevs(row, stdev_range, stdevs)
+ wave_m[y+3] = stdevs
+ plot(stdevs, filename=f"radar{device_index}_stdevs.png", title=f"Radar Stedevs {device_index}", style='line')
+
+ minutes = 1440
+
+
+ device_index = 0
+ y = 0
+ for device in devices_list:
+ wave = wave_m[y][stdev_range: stdev_range + minutes]
+ plot(wave,
+ filename="radar_wave_min.png",
+ title="Radar Signal Min",
+ style='line')
+ # Create histogram with 1000 bins
+ hist, bins = np.histogram(wave, bins=1000, range=(0, 100))
+
+ #bin_centers = (bins[:-1] + bins[1:]) / 2
+ hist_line = hist # These are your y values
+
+ # Plot with proper axis labels
+ plot(hist_line,
+ filename="radar_histogram_min.png",
+ title="Radar Signal Histogram Min (1000 bins)",
+ style='line')
+
+ wave = wave_m[y+1]
+ plot(wave,
+ filename="radar_wave_max.png",
+ title="Radar Signal",
+ style='line')
+ # Create histogram with 1000 bins
+ hist, bins = np.histogram(wave, bins=1000, range=(0, 100))
+
+ #bin_centers = (bins[:-1] + bins[1:]) / 2
+ hist_line = hist # These are your y values
+
+ # Plot with proper axis labels
+ plot(hist_line,
+ filename="radar_histogram_max.png",
+ title="Radar Signal Histogram Max(1000 bins)",
+ style='line')
+
+ print(wave)
+ device_to_index += 1
+
+ #lets see this map
+ stretch_by = 5
+ arr_stretched = np.zeros((int(stripes*stretch_by), minutes, 3), dtype=np.uint8) #array to be written as image 3 for RGB channels
+ st = time.time()
+ for yy in range(stripes):
+ rgb_row = []
+ row = wave_m[yy]
+ for x in range(minutes):
+ value = 1280 * row[x] / 100
+ rgb_row.append(BestColor(value))
+ for stretch_index in range(stretch_by):
+ y = yy * stretch_by + stretch_index
+ arr_stretched[y, :] = rgb_row
+
+ print(time.time()-st)
+ filename = f"{deployment_id}/{deployment_id}_{ddate}_min_max_radar.png"
+ SaveImageInBlob(filename, arr_stretched, [])
+
+
+ return
+ elif function == "get_time_deltas":
+ deployment_id = form_data.get('deployment_id')
+ time_zone_s = GetTimeZoneOfDeployment(deployment_id)
+ sensor = form_data.get('sensor')
+ selected_date = form_data.get('date')
+ date_to = form_data.get('to_date')
+ radar_part = ""
+ sensor_data = {}
+ if date_to == None:
+ date_to = selected_date
+
+ start_date = datetime.datetime.strptime(selected_date, '%Y-%m-%d')
+ end_date = datetime.datetime.strptime(date_to, '%Y-%m-%d')
+
+ # Determine direction and swap dates if necessary
+ if start_date > end_date:
+ selected_date, date_to = date_to, selected_date
+
+ device_id = form_data.get('device_id')
+
+ data_type = form_data.get('data_type')
+ epoch_from_utc, _ = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
+ _, epoch_to_utc = GetLocalTimeEpochsForDate(date_to, time_zone_s) #>= #<
+
+
+ all_slices = {}
+
+ cleaned_values = {}
+ line_part = ReadSensor(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part)
+ st = time.time()
+ cleaned_values = [
+ (line_part[i][0], (line_part[i][0] - line_part[i-1][0]).total_seconds() * 1000)
+ for i in range(1, len(line_part))
+ ]
+ print(time.time()-st)
+
+ if True:
+ # Create CSV content as a string
+ csv_content = "Record_Index,Timestamp,Value,Time_Diff_Seconds,Time_Diff_Milliseconds\n"
+
+ for i in range(len(line_part)):
+ timestamp, value = line_part[i]
+
+ if i == 0:
+ # First record has no previous record to compare
+ time_diff_seconds = 0
+ time_diff_ms = 0
+ else:
+ # Calculate time difference from previous record
+ prev_timestamp = line_part[i-1][0]
+ time_diff = timestamp - prev_timestamp
+ time_diff_seconds = time_diff.total_seconds()
+ time_diff_ms = time_diff_seconds * 1000
+
+ # Format the row
+ row = f"{i},{timestamp.isoformat()},{value},{round(time_diff_seconds, 6)},{round(time_diff_ms, 3)}\n"
+ csv_content += row
+
+ # Write to file
+ with open(f'time_differences_{sensor}_{device_id}.csv', 'w', encoding='utf-8') as f:
+ f.write(csv_content)
+
+ print(f"CSV file 'time_differences_{sensor}_{device_id}.csv' created successfully!")
+
+ line_part_t = [(x[0].timestamp(), x[1]) for x in cleaned_values]
+
+ sensor_data[sensor] = line_part_t
+ dataa = {}
+ all_slices = {}
+ all_slices[device_id] = sensor_data
+ dataa['Function'] = "time_deltas"
+ dataa['all_slices'] = all_slices
+ dataa['time_zone_st'] = time_zone_s
+ dataa['device_id'] = device_id
+ resp.media = package_response(dataa)
+ resp.status = falcon.HTTP_200
+ return
+
+ elif function == "get_sensor_deltas":
+ deployment_id = form_data.get('deployment_id')
+ time_zone_s = GetTimeZoneOfDeployment(deployment_id)
+ sensor = form_data.get('sensor')
+ selected_date = form_data.get('date')
+ date_to = form_data.get('to_date')
+ radar_part = ""
+ sensor_data = {}
+ if date_to == None:
+ date_to = selected_date
+
+ start_date = datetime.datetime.strptime(selected_date, '%Y-%m-%d')
+ end_date = datetime.datetime.strptime(date_to, '%Y-%m-%d')
+
+ # Determine direction and swap dates if necessary
+ if start_date > end_date:
+ selected_date, date_to = date_to, selected_date
+
+ device_id = form_data.get('device_id')
+
+ data_type = form_data.get('data_type')
+ epoch_from_utc, _ = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
+ _, epoch_to_utc = GetLocalTimeEpochsForDate(date_to, time_zone_s) #>= #<
+
+
+ all_slices = {}
+
+ cleaned_values = {}
+ line_part = ReadSensorDeltas(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part)
+ st = time.time()
+ cleaned_values =line_part
+ #[
+ #(line_part[i][0], (line_part[i][1] - line_part[i-1][1]) / (line_part[i][0] - line_part[i-1][0]).total_seconds())
+ #for i in range(1, len(line_part))
+ #if (line_part[i][0] - line_part[i-1][0]).total_seconds() > 0
+ #and abs((line_part[i][1] - line_part[i-1][1]) / (line_part[i][0] - line_part[i-1][0]).total_seconds()) <= 100
+ #]
+ #print(time.time()-st)
+
+
+
+ line_part_t = [(x[0].timestamp(), x[1]) for x in cleaned_values]
+
+ sensor_data[sensor] = line_part_t
+ dataa = {}
+ all_slices = {}
+ all_slices[device_id] = sensor_data
+ dataa['Function'] = "time_deltas"
+ dataa['all_slices'] = all_slices
+ dataa['time_zone_st'] = time_zone_s
+ dataa['device_id'] = device_id
+ resp.media = package_response(dataa)
+ resp.status = falcon.HTTP_200
+ return
+
+ elif function == "request_single_slice":
+ deployment_id = form_data.get('deployment_id')
+ time_zone_s = GetTimeZoneOfDeployment(deployment_id)
+ selected_date = form_data.get('date')
+ date_to = form_data.get('to_date')
+ if date_to == None:
+ date_to = selected_date
+
+ start_date = datetime.datetime.strptime(selected_date, '%Y-%m-%d')
+ end_date = datetime.datetime.strptime(date_to, '%Y-%m-%d')
+
+ # Determine direction and swap dates if necessary
+ if start_date > end_date:
+ selected_date, date_to = date_to, selected_date
+
+ devices_list = form_data.get('devices_list')
+ radar_details = {}
+ #devices_list = '[267,560,"?",null,"64B70888F6F0"]'
+ #devices_list = '[[267,560,"?",null,"64B70888F6F0"],[268,561,"?",null,"64B70888F6F1"]]'
+ sensor_list_loc = [form_data.get('sensor_list')]
+ is_nested, device_details = check_and_parse(devices_list)
+ if not is_nested:
+ device_ids_list = [device_details[1]]
+ well_ids_list = [device_details[0]]
+ else:
+ device_ids_list = list(map(lambda x: x[1], device_details))
+ well_ids_list =list(map(lambda x: x[0], device_details))
+
+ data_type = form_data.get('data_type')
+ epoch_from_utc, _ = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
+ _, epoch_to_utc = GetLocalTimeEpochsForDate(date_to, time_zone_s) #>= #<
+
+ #we need to
+ buckets = ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
+
+ days = (epoch_to_utc - epoch_from_utc) / (60 * 1440)
+
+ well_id = well_ids_list[0]
+ all_slices = {}
+ radar_part = ""
+ if len(device_details) > 4:
+ device_id2_mac = {device_details[1]: device_details[4]}
+ #device_id2_mac = {device_details[1]: device_details[3]}
+ #epoch_to = '1730592010' #smal sample to test
+ #radar_part = form_data.get('radar_part') we need to find what radar part is configured in device settings
+ if len(device_details) > 5:
+ radar_part_all = device_details[5]
+
+ if type(radar_part_all) == str:
+ radar_part_all = ["s3_max",int(radar_part_all)]
+ elif type(radar_part_all) == int:
+ radar_part_all = ["s3_max",radar_part_all]
+ elif type(radar_part_all) == list:
+ pass
+
+
+
+ else:
+ radar_part_all = ["s3_max",12]
+
+
+ if len(radar_part_all) > 1:
+ radar_part = radar_part_all[0]
+ #we need only column name and not min or max here
+ if "_" in radar_part:
+ radar_parts = radar_part.split("_")
+ radar_part = radar_parts[0]
+ radar_details[device_details[1]] = radar_part_all
+
+ #devices = GetVisibleDevices(deployment_id)
+ temp_calib, humid_calib = GetCalibMaps(device_ids_list)
+
+ for device_id in device_ids_list:
+
+ sensor_data = {}
+ for sensor in sensor_list_loc:
+ st = time.time()
+ if days < 3:
+ line_part = ReadSensor(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part)
+ elif days < 14:
+ bucket_size = "1m"
+ line_part = ReadSensor3(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part, bucket_size)
+ else:
+ bucket_size = "10m"
+ line_part = ReadSensor3(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part, bucket_size)
+
+ #Lets apply calibration:
+ if sensor == "temperature":
+ temperature_calib = temperature_offset #float(temp_calib[device_id].split(",")[2])
+ line_part = [(timestamp, value + temperature_calib) for timestamp, value in line_part]
+
+ if sensor == "humidity":
+ line_part = [(timestamp, value + humidity_offset) for timestamp, value in line_part]
+
+
+ window = sensor_legal_values[sensor][2]
+
+ if False:
+ # Create CSV content as a string
+ csv_content = "Record_Index,Timestamp,Value,Time_Diff_Seconds,Time_Diff_Milliseconds\n"
+
+ for i in range(len(line_part)):
+ timestamp, value = line_part[i]
+
+ if i == 0:
+ # First record has no previous record to compare
+ time_diff_seconds = 0
+ time_diff_ms = 0
+ else:
+ # Calculate time difference from previous record
+ prev_timestamp = line_part[i-1][0]
+ time_diff = timestamp - prev_timestamp
+ time_diff_seconds = time_diff.total_seconds()
+ time_diff_ms = time_diff_seconds * 1000
+
+ # Format the row
+ row = f"{i},{timestamp.isoformat()},{value},{round(time_diff_seconds, 6)},{round(time_diff_ms, 3)}\n"
+ csv_content += row
+
+ # Write to file
+ with open('time_differences.csv', 'w', encoding='utf-8') as f:
+ f.write(csv_content)
+
+ print("CSV file 'time_differences.csv' created successfully!")
+
+ #print("@1", time.time() - st)
+ #first = 3300
+ #last = 3400
+ #line_part = line_part[first:last]
+ line_part_t = []
+ #st = time.time()
+ #line_part_t = [tuple(x[:2]) for x in line_part]
+ #print(time.time() - st)
+ #st = time.time()
+ #line_part_t = list({(dt.timestamp(), value) for dt, value in line_part})
+ #print(time.time() - st)
+
+ line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
+ st = time.time()
+ cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
+ cleaned_values = cleaned_values_t #add_boundary_points(cleaned_values_t, time_zone_s)
+ #print("@2", time.time() - st)
+
+ #Lets add point in minute 0 and minute 1439
+
+ #st = time.time()
+ #cleaned_values = clean_data_fast(line_part_t, window=5, threshold=2.0)
+ #print("@3", time.time() - st)
+
+ sensor_data[sensor] = cleaned_values
+
+
+ if len(device_details) > 4:
+ all_slices[device_id2_mac[device_id]] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
+ else:
+ all_slices[device_id] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
+
+ dataa = {}
+ dataa['Function'] = "single_slicedata"
+ dataa['devices_list'] = devices_list
+ dataa['all_slices'] = all_slices
+ dataa['radar_details'] = radar_details
+ dataa['time_zone_st'] = time_zone_s
+ dataa['well_id'] = well_id
+ resp.media = package_response(dataa)
+ resp.status = falcon.HTTP_200
+ return
+ elif function == "get_sensor_bucketed_data_by_room_sensor":
+ # Inputs:
+ # user_name and token
+ # deployment_id - from which report gets deployment set (all rooms and devices) to get timezone
+ # date - one day in a format YYYY-MM-DD
+ # sensor - temperature/radar/etc.. see full list
+ # (tells what sensor data to be retrieved)
+ # "voc" for all smell use s4 (lower reading is higher smell, max=0 find min for 100%)
+ # "radar" returns s28
+ # radar_part - optional and applies only to radar (tells which segment of radar to be retrieved)
+ # bucket_size - ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
+ # location - room name (has to be unique)
+ # data_type - ML
+ # Output: son structure with the following info
+ # chart_data with rooms : [list]
+ deployment_id = form_data.get('deployment_id')
+ selected_date = form_data.get('date')
+ sensor = form_data.get('sensor') # one sensor
+ radar_part = form_data.get('radar_part')
+ buckets = ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
+ bucket_size = "no" if (result := form_data.get('bucket_size')) in (None, "") else (result.strip() if result.strip() in buckets else "no")
+ #bucket_size = res2 if (res := form_data.get('bucket_size')) is not None and (res2 := str(res).strip()) and res2 in {'no', '10s', '1m', '5m', '10m', '15m', '30m', '1h'} else 'no'
+ location = form_data.get('location')
+ data_type = form_data.get('data_type')
+
+ time_zone_s = GetTimeZoneOfDeployment(deployment_id)
+ epoch_from_utc, epoch_to_utc = GetLocalTimeEpochsForDate(selected_date, time_zone_s)
+
+ # obtain devices_list for deployment_id
+ selected_date = selected_date.replace("_","-")
+ devices_list, device_ids = GetProximityList(deployment_id, epoch_from_utc)
+ sensor_data = {}
+ units = "°C"
+ if "America" in time_zone_s:
+ units = "°F"
+ # see https://www.w3schools.com/cssref/css_colors.php
+ sensor_props = {"temperature": ["red", units],
+ "humidity": ["blue", "%"],
+ "voc": ["orange", "PPM"],
+ "co2": ["orange", "PPM"],
+ "pressure": ["magenta", "Bar"],
+ "radar": ["cyan", "%"],
+ "light": ["yellow", "Lux"]}
+
+ current_time_la = datetime.datetime.now(pytz.timezone(time_zone_s))
+ formatted_time = current_time_la.strftime('%Y-%m-%dT%H:%M:%S') #"2025-02-06T20:09:00"
+
+ result_dictionary = {
+ "last_report_at": formatted_time,
+ "color": sensor_props[sensor][0] if sensor in s_table else "grey",
+ "units": sensor_props[sensor][1] if sensor in s_table else "?"
+ }
+ #sensor_mapping = {"co2": "s4", "voc": "s9"}
+ #sensor = sensor_mapping.get(sensor, sensor)
+ temp_calib, humid_calib = GetCalibMaps(device_ids)
+ #print(device_ids)
+ #print(temp_calib)
+ #print(humid_calib)
+ #print("++++++++++++++++++")
+ chart_data = []
+ # example data in each element of devices_list is (266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
+ for well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to in devices_list:
+ loc_and_desc = location_name
+ if description != None and description != "":
+ loc_and_desc = loc_and_desc + " " + description
+
+ if loc_and_desc == location:
+ line_part = ReadSensor3(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part, bucket_size)
+
+ if sensor == "temperature":
+ if "," in temp_calib[device_id]:
+ temperature_calib = float(temp_calib[device_id].split(",")[2])
+ else:
+ temperature_calib = -10
+
+ line_part = [(timestamp, value + temperature_calib) for timestamp, value in line_part]
+
+
+ if sensor == "humidity":
+ line_part = [(timestamp, value + humidity_offset) for timestamp, value in line_part]
+
+
+ window = sensor_legal_values[sensor][2]
+ line_part_t = []
+ line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
+ st = time.time()
+ cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
+ cleaned_values = cleaned_values_t #add_boundary_points(cleaned_values_t, time_zone_s)
+ compressed_readings = convert_timestamps_lc(cleaned_values, time_zone_s)
+ if sensor == "temperature":
+ if units == "°F":#"America" in time_zone_s:
+ compressed_readings = CelsiusToFahrenheitList(compressed_readings)
+
+ sensor_data[sensor] = compressed_readings
+ chart_data.append({'name': location_name, 'data': compressed_readings})
+ result_dictionary['chart_data'] = chart_data
+ payload = result_dictionary
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+
+ elif function == "get_sensor_data_by_deployment_id":
+ # Inputs:
+ # user_name and token
+ # deployment_id - from which report gets deployment set (all rooms and devices)
+ # date - one day in a format YYYY-MM-DD
+ # sensor - temperature/radar/etc.. see full list (tells what sensor data to be retrieved)
+ # radar_part - optional and applies only to radar (tells which segment of radar to be retrieved)
+ # bucket_size - ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
+ # data_type - ML
+ # Output: son structure with the following info
+ # chart_data with rooms : [list]
+ deployment_id = form_data.get('deployment_id')
+ time_zone_s = GetTimeZoneOfDeployment(deployment_id)
+ selected_date = form_data.get('date')
+ sensor = form_data.get('sensor') # one sensor
+ radar_part = form_data.get('radar_part')
+ buckets = ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
+ bucket_size = "no" if (result := form_data.get('bucket_size')) in (None, "") else (result.strip() if result.strip() in buckets else "no")
+ #bucket_size = res2 if (res := form_data.get('bucket_size')) is not None and (res2 := str(res).strip()) and res2 in {'no', '10s', '1m', '5m', '10m', '15m', '30m', '1h'} else 'no'
+ data_type = form_data.get('data_type')
+
+ epoch_from_utc, epoch_to_utc = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
+ # obtain devices_list for deployment_id
+ selected_date = selected_date.replace("_","-")
+ #timee = LocalDateToUTCEpoch(selected_date, time_zone_s)+5
+ devices_list, device_ids = GetProximityList(deployment_id, epoch_from_utc)
+ sensor_data = {}
+ # see https://www.w3schools.com/cssref/css_colors.php
+ sensor_props = {"temperature": ["red", "°C"],
+ "humidity": ["blue", "%"],
+ "voc": ["orange", "PPM"],
+ "co2": ["orange", "PPM"],
+ "pressure": ["magenta", "Bar"],
+ "radar": ["cyan", "%"],
+ "light": ["yellow", "Lux"]}
+ result_dictionary = {
+ "last_report_at": "2025-02-06T20:09:00",
+ "color": sensor_props[sensor][0] if sensor in s_table else "grey",
+ "units": sensor_props[sensor][1] if sensor in s_table else "?"
+ }
+ #sensor_mapping = {"co2": "s4", "voc": "s9"}
+ #sensor = sensor_mapping.get(sensor, sensor)
+
+ chart_data = []
+ for room_details in devices_list:
+ well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = room_details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
+ line_part = ReadSensor3(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part, bucket_size)
+
+ if sensor == "temperature":
+ temperature_calib = float(temp_calib[device_id].split(",")[2])
+ line_part = [(timestamp, value + temperature_calib) for timestamp, value in line_part]
+
+
+ window = sensor_legal_values[sensor][2]
+ line_part_t = []
+ line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
+ st = time.time()
+ cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
+ cleaned_values = add_boundary_points(cleaned_values_t, time_zone_s)
+ compressed_readings = convert_timestamps_lc(cleaned_values, time_zone_s)
+
+ #compressed_readings = [(time.strftime("%H:%M", time.gmtime(lst[0][0])), float(sum(t for _, t in lst)/len(lst)))
+ #for _, lst in ((k, list(g))
+ #for k, g in itertools.groupby(cleaned_values, key=lambda x: time.gmtime(x[0]).tm_hour))]
+ sensor_data[sensor] = compressed_readings
+ chart_data.append({'name': location_name,
+ 'data': compressed_readings})
+ result_dictionary['chart_data'] = chart_data
+ #all_slices[device_id2_mac[device_id]] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
+ #is_neste, device_details = check_and_parse(devices_list)
+ #if not is_nested:
+ #device_ids_list = [device_details[1]]
+ #well_ids_list = [device_details[0]]
+ #else:
+ #device_ids_list = list(map(lambda x: x[1], device_details))
+ #well_ids_list =list(map(lambda x: x[0], device_details))
+ #well_id = well_ids_list[0]
+ #all_slices = {}
+ #device_id2_mac = {device_details[1]: device_details[4]}
+ #for device_id in device_ids_list:
+ #device_id2_mac
+ #sensor_data = {}
+ #for sensor in sensor_list_loc:
+ #st = time.time()
+ #line_part = ReadSensor(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part)
+ #window = sensor_legal_values[sensor][2]
+ #line_part_t = []
+ #line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
+ #st = time.time()
+ #cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
+ #cleaned_values = add_boundary_points(cleaned_values_t, time_zone_s)
+ #sensor_data[sensor] = cleaned_values
+ #all_slices[device_id2_mac[device_id]] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
+
+ #dataa = {}
+ #dataa['Function'] = "single_slicedata"
+ #dataa['devices_list'] = devices_list
+ #dataa['all_slices'] = all_slices
+ #dataa['time_zone_st'] = time_zone_s
+ #dataa['well_id'] = well_id
+ #resp.media = package_response(dataa)
+ #resp.status = falcon.HTTP_200
+ result_dictionary2 = {
+ "alert_text": "No alert",
+ "alert_color": "bg-green-100 text-green-700",
+ "last_report_at": "ISO TIMESTAMP",
+ "chart_data": [
+ {
+ "rooms": [
+ { "name": "Bathroom",
+ "data": [
+ {"title": "12AM","value": 20},
+ {"title": "01AM","value": 20},
+ {"title": "02AM","value": 26},
+ {"title": "03AM","value": 16},
+ {"title": "04AM","value": 27},
+ {"title": "05AM","value": 23},
+ {"title": "06AM","value": 26},
+ {"title": "07AM","value": 17},
+ {"title": "08AM","value": 18},
+ {"title": "09AM","value": 21},
+ {"title": "10AM","value": 28},
+ {"title": "11AM","value": 24},
+ {"title": "12PM","value": 18},
+ {"title": "01PM","value": 27},
+ {"title": "02PM","value": 27},
+ {"title": "03PM","value": 19},
+ {"title": "04PM","value": 0},
+ {"title": "05PM","value": 0},
+ {"title": "06PM","value": 0},
+ {"title": "07PM","value": 0},
+ {"title": "08PM","value": 0},
+ {"title": "09PM","value": 0},
+ {"title": "10PM","value": 0},
+ {"title": "11PM","value": 0}
+ ]
+ },
+ { "name": "Kitchen",
+ "data": [
+ {"title": "00AM","value": 19},
+ {"title": "01AM","value": 10},
+ {"title": "02AM","value": 8},
+ {"title": "03AM","value": 14},
+ {"title": "04AM","value": 20},
+ {"title": "05AM","value": 8},
+ {"title": "06AM","value": 7},
+ {"title": "07AM","value": 17},
+ {"title": "08AM","value": 3},
+ {"title": "09AM","value": 19},
+ {"title": "10AM","value": 4},
+ {"title": "11AM","value": 6},
+ {"title": "12PM","value": 4},
+ {"title": "01PM","value": 14},
+ {"title": "02PM","value": 17},
+ {"title": "03PM","value": 20},
+ {"title": "04PM","value": 19},
+ {"title": "05PM","value": 15},
+ {"title": "06PM","value": 5},
+ {"title": "07PM","value": 19},
+ {"title": "08PM","value": 3},
+ {"title": "09PM","value": 30},
+ {"title": "10PM","value": 1},
+ {"title": "11PM","value": 12 }
+ ]
+ },
+ { "name": "Living Room",
+ "data": [
+ {"title": "00AM","value": 25},
+ {"title": "01AM","value": 24},
+ {"title": "02AM","value": 19},
+ {"title": "03AM","value": 20},
+ {"title": "04AM","value": 22},
+ {"title": "05AM","value": 20},
+ {"title": "06AM","value": 11},
+ {"title": "07AM","value": 5},
+ {"title": "08AM","value": 16},
+ {"title": "09AM","value": 22},
+ {"title": "10AM","value": 23},
+ {"title": "11AM","value": 14},
+ {"title": "12PM","value": 0},
+ {"title": "01PM","value": 7},
+ {"title": "02PM","value": 25},
+ {"title": "03PM","value": 29},
+ {"title": "04PM","value": 23},
+ {"title": "05PM","value": 27},
+ {"title": "06PM","value": 27},
+ {"title": "07PM","value": 20},
+ {"title": "08PM","value": 2},
+ {"title": "09PM","value": 24},
+ {"title": "10PM","value": 21},
+ {"title": "11PM","value": 14 }
+ ]
+ }
+ ]
+ }
+ ]
+ }
+ payload = result_dictionary
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ #AddToLog(payload)
+ #return
+ elif function == "request_device_slice":
+ deployment_id = form_data.get('deployment_id')
+ time_zone_s = GetTimeZoneOfDeployment(deployment_id)
+ epoch_from_utc = form_data.get('epoch_from')
+ epoch_to_utc = form_data.get('epoch_to')
+ device_id = form_data.get('device_id')
+ well_id = form_data.get('well_id')
+ MAC = form_data.get('MAC')
+ sensor_list_loc = form_data.get('sensors_list')
+ sensor_list = sensor_list_loc.split(",")
+ device_ids_list = [device_id]
+ well_ids_list = [well_id]
+ maps_dates, positions_list, timezone_s = GetDeploymentDatesBoth(deployment_id)
+
+ data_type = "RL"
+ #epoch_from_utc, epoch_to_utc = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
+
+ #epoch_to = '1730592010' #smal sample to test
+ radar_part = form_data.get('radar_part')
+ well_id = well_ids_list[0]
+ all_slices = {}
+ #device_id2_mac = {device_details[1]: device_details[4]}
+ for device_id in device_ids_list:
+ #device_id2_mac
+ sensor_data = {}
+ for sensor in sensor_list:
+ st = time.time()
+ line_part = ReadSensor(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part)
+ window = sensor_legal_values[sensor][2]
+ #print("@1", time.time() - st)
+ #first = 3300
+ #last = 3400
+ #line_part = line_part[first:last]
+ line_part_t = []
+ #st = time.time()
+ #line_part_t = [tuple(x[:2]) for x in line_part]
+ #print(time.time() - st)
+ #st = time.time()
+ #line_part_t = list({(dt.timestamp(), value) for dt, value in line_part})
+ #print(time.time() - st)
+
+ line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
+ st = time.time()
+ cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
+ #cleaned_values = cleaned_values_t #add_boundary_points(cleaned_values_t, time_zone_s)
+ #print("@2", time.time() - st)
+
+ #Lets add point in minute 0 and minute 1439
+
+ #st = time.time()
+ #cleaned_values = clean_data_fast(line_part_t, window=5, threshold=2.0)
+ #print("@3", time.time() - st)
+ cleaned_values = ScaleToCommon(cleaned_values_t, sensor)
+ sensor_data[sensor] = cleaned_values
+ all_slices[device_id] = sensor_data
+
+ dataa = {}
+ dataa['Function'] = "device_slicedata"
+ dataa['all_slices'] = all_slices
+ dataa['time_zone_st'] = time_zone_s
+ dataa['proximity'] = positions_list
+ dataa['well_id'] = well_id
+ dataa['MAC'] = MAC
+ resp.media = package_response(dataa)
+ resp.status = falcon.HTTP_200
+ #return
+ elif function == "request_single_radar_slice":
+ deployment_id = form_data.get('deployment_id')
+ time_zone_s = GetTimeZoneOfDeployment(deployment_id)
+ selected_date = form_data.get('date')
+ devices_list = form_data.get('devices_list')
+ ctrl_key_state = form_data.get('ctrl_key_state')
+ alt_key_state = form_data.get('alt_key_state')
+ #devices_list = '[267,560,"?",null,"64B70888F6F0"]'
+ #devices_list = '[[267,560,"?",null,"64B70888F6F0"],[268,561,"?",null,"64B70888F6F1"]]'
+ sensor_index_list = form_data.get('sensor_index_list').split(",")
+ is_nested, device_details = check_and_parse(devices_list)
+ if not is_nested:
+ device_ids_list = [device_details[1]]
+ well_ids_list = [device_details[0]]
+ else:
+ device_ids_list = list(map(lambda x: x[1], device_details))
+ well_ids_list =list(map(lambda x: x[0], device_details))
+
+ epoch_from_utc, epoch_to_utc = GetLocalTimeEpochsForDate(selected_date, time_zone_s) #>= #<
+
+ #epoch_to = '1730592010' #smal sample to test
+ radar_part = form_data.get('radar_part')
+ well_id = well_ids_list[0]
+ all_slices = {}
+ device_id2_mac = {device_details[1]: device_details[4]}
+ for device_id in device_ids_list:
+ device_id2_mac
+ sensor_data = {}
+ for sensor_index in sensor_index_list:
+ st = time.time()
+ sensor = ["m0", "m1", "m2", "m3", "m4", "m5", "m6", "m7", "m8", "m08_max", "s2", "s3", "s4", "s5", "s6", "s7", "s8", "s28_max", "s28_min"][int(sensor_index)]
+
+ line_part = ReadRadarDetail(device_id, sensor, epoch_from_utc, epoch_to_utc, alt_key_state)
+ window = sensor_legal_values["radar"][2]
+
+ line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
+ st = time.time()
+ cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
+ cleaned_values = add_boundary_points(cleaned_values_t, time_zone_s)
+ if len(sensor) < 4:
+ sensor_data[sensor+"_max"] = cleaned_values
+ else:
+ sensor_data[sensor] = cleaned_values
+ all_slices[device_id2_mac[device_id]] = sensor_data #use MAC instead of device_id, since device is sending data with MAC only
+
+ dataa = {}
+ dataa['Function'] = "single_slicedata"
+ dataa['devices_list'] = devices_list
+ dataa['all_slices'] = all_slices
+ dataa['time_zone_st'] = time_zone_s
+ dataa['well_id'] = well_id
+ resp.media = package_response(dataa)
+ resp.status = falcon.HTTP_200
+ elif function == "get_deployment":
+ blob_data = read_file("deployment.html")
+ deployment_id = form_data.get('deployment_id')
+ #lets update "Deployments" select
+ users = GetUsersFromDeployments(privileges)
+ blob_data = UpdateDeploymentsSelector(blob_data, users, False, deployment_id)
+
+ resp.content_type = "text/html"
+ resp.text = blob_data
+ return
+ elif function == "get_deployment_j":
+ deployment_id = form_data.get('deployment_id')
+ time_zone_st = GetTimeZoneOfDeployment(deployment_id)
+ date = form_data.get('date')
+ if date == None:
+
+ # Get today's date
+ local_timezone = pytz.timezone(time_zone_st) # Replace with your local timezone
+ date = datetime.datetime.now(local_timezone).strftime('%Y-%m-%d')
+
+ #epoch_from_utc = int(datetime.datetime.strptime(date, "%Y-%m-%d").timestamp())
+ #devices_list, device_ids = GetProximityList(deployment_id, epoch_from_utc)
+
+ dataa = {}
+ dataa['Function'] = "deployment_details"
+ if privileges == "-1":
+ deployment = DeploymentDetails(deployment_id)
+ dataa['deployment_details'] = deployment
+ else:
+ privileges = privileges.split(",")
+ if deployment_id in privileges:
+ deployment = DeploymentDetails(deployment_id)
+ dataa['deployment_details'] = deployment
+
+
+ resp.media = package_response(dataa)
+ resp.status = falcon.HTTP_200
+
+ return
+ elif function == "set_floor_layout":
+ deployment_id = form_data.get('deployment_id')
+ layout = form_data.get('layout')
+
+ if privileges == "-1" or deployment_id in privileges:
+ ok = StoreFloorPlan(deployment_id, layout)
+ payload = {'ok': ok}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ else:
+ payload = {'ok': 0, 'error': "not allowed"}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+
+ return
+ elif function == "get_floor_layout":
+ deployment_id = form_data.get('deployment_id')
+
+ dataa = {}
+ dataa['Function'] = "deployment_details"
+ if privileges == "-1":
+ layout = GetFloorPlan(deployment_id)
+ dataa['layout'] = layout
+ else:
+ privileges = privileges.split(",")
+ if deployment_id in privileges:
+ layout = GetFloorPlan(deployment_id)
+ dataa['layout'] = layout
+
+
+ resp.media = package_response(dataa)
+ resp.status = falcon.HTTP_200
+
+ return
+ elif function == "get_beneficiary":
+ user_id = form_data.get('user_id')
+ all_beneficiaries = ListBeneficiaries(privileges, user_id)
+ beneficiaries_list = []
+ for beneficiary_temp in all_beneficiaries:
+ beneficiaries_list.append(str(beneficiary_temp[0]))
+
+ dataa = {}
+ dataa['Function'] = "beneficiary_details"
+ if user_id in beneficiaries_list:
+ beneficiary = UserDetails(user_id)
+ #lets remove fields not relevant for beneficiary
+ try:
+ del beneficiary['time_edit']
+ except:
+ pass
+
+ try:
+ del beneficiary['user_edit']
+ except:
+ pass
+
+ try:
+ del beneficiary['access_to_deployments']
+ except:
+ pass
+ dataa['beneficiary_details'] = beneficiary
+ resp.media = package_response(dataa)
+ resp.status = falcon.HTTP_200
+ return
+ elif function == "get_caretaker":
+
+ user_name = form_data.get('user_name')
+
+
+ all_caretakers = ListCaretakers(privileges, user_name)
+ if len(all_caretakers) > 1:
+ user_id = form_data.get('user_id')
+ else:
+ user_id = str(all_caretakers[0][0])
+
+ caretakers_list = []
+ for caretakers_temp in all_caretakers:
+ caretakers_list.append(str(caretakers_temp[0]))
+
+ dataa = {}
+ dataa['Function'] = "caretaker_details"
+ if user_id in caretakers_list:
+ caretaker = UserDetails(user_id)
+ #lets remove fields not relevant for beneficiary
+ try:
+ del caretaker['time_edit']
+ except:
+ pass
+
+ try:
+ del caretaker['user_edit']
+ except:
+ pass
+
+ dataa['caretaker_details'] = caretaker
+ resp.media = package_response(dataa)
+ resp.status = falcon.HTTP_200
+ return
+
+ elif function == "get_device":
+ device_id = form_data.get('device_id')
+ device_mac = form_data.get('mac')
+ min_well_id = form_data.get('min_well_id')
+ if min_well_id != None:
+ min_well_id = int(min_well_id)
+
+ if device_mac != None:
+ device_det = GetDeviceDetailsSingleFromMac(device_mac)
+ print(device_det)
+ dataa = {}
+ dataa['Function'] = "device_details"
+ dataa['device_details'] = device_det
+ if device_det == {}:
+ dataa['next_well_id'] = GetNextWellId(min_well_id)
+ else:
+ devices = GetVisibleDevices(privileges)
+ dataa = {}
+ dataa['Function'] = "device_details"
+ dataa['device_details'] = {}
+ if privileges == "-1":
+ #device_det = GetDeviceDetails(device_id)
+ device_det = GetDeviceDetailsSingle(device_id)
+ if device_det['radar_threshold'] == None or device_det['radar_threshold'] == "":
+ device_det['radar_threshold'] = '["s3_max",12]'
+ dataa['device_details'] = device_det
+ else:
+ devices_list = []
+ for device_id_temp in devices:
+ devices_list.append(str(device_id_temp[0]))
+
+ if device_id in devices_list:
+ device_det = GetDeviceDetailsSingle(device_id)
+ if device_det['radar_threshold'] == None or device_det['radar_threshold'] == "":
+ device_det['radar_threshold'] = '["s3_max",12]'
+
+
+ dataa['device_details'] = device_det
+
+
+ resp.media = package_response(dataa)
+ resp.status = falcon.HTTP_200
+
+ return
+
+
+ elif function == "set_deployment":
+ credentials_changed = False
+ devices_changed = False
+ #at this point user_name has to be known (created, checked and communicated over mqtt (preserved!) to user's device)
+ #this call is from Mobile App if user_name does not exist, it will be created here
+ user_name = form_data.get('user_name')
+ privileges, user_id = GetPriviledgesAndUserId(user_name)
+ beneficiary_name = form_data.get('beneficiary_name')
+ if " " in beneficiary_name.strip():
+ form_data['firstName'], form_data['lastName'] = beneficiary_name.split(" ")
+ else:
+ form_data['firstName'] = beneficiary_name.strip()
+ form_data['lastName'] = ""
+
+ beneficiary_address = form_data.get('beneficiary_address')
+ beneficiary_user_name = form_data.get('beneficiary_user_name')
+ password = form_data.get('beneficiary_password')
+ address_map = ParseAddress(beneficiary_address)
+ #print(address_map)
+ #{'component_count': 6, 'parsed_components': {'city': 'saratoga', 'country': 'united states', 'house_number': '18569', 'postcode': '95070', 'road': 'allendale avenue', 'state': 'ca'}, 'success': True}
+
+ email = form_data.get('beneficiary_email')
+ #token = form_data.get('token')
+ signature = form_data.get('signature')
+
+ reuse_existing_devices = form_data.get('reuse_existing_devices')
+
+ deployment = form_data.get('deployment')
+ beneficiary_name = form_data.get('beneficiary_name')
+ image_file_name = beneficiary_name.replace(" ","_") + ".jpg"
+ gps_lat = form_data.get('lat')
+ gps_lng = form_data.get('lng')
+ time_zone_s = GetTZFromGPS(gps_lat, gps_lng)
+ devices = form_data.get('devices')
+
+ #debug_received_data(form_data)
+ # Or better yet, update to handle both cases for backward compatibility:
+ if form_data.get("file_sent", "") != "":
+ # Old Base64 method
+ StoreFile2Blob(form_data["file_sent"], image_file_name, "user-pictures")
+ elif "beneficiary_photo" in files:
+ # New multipart method
+ try:
+ with open('beneficiary.jpg', 'rb') as f:
+ image_data = f.read()
+ image_base64 = base64.b64encode(image_data).decode('utf-8')
+ StoreFile2Blob(image_base64, image_file_name, "user-pictures")
+ os.remove('beneficiary.jpg')
+ except Exception as e:
+ logger.error(f"Failed to process beneficiary photo: {e}")
+
+ devices_list = json.loads(devices)
+ deployments_id = []
+
+ in_db_user_name, editing_user_id, password_in_db, role_ids, priviledges = AccountByEmailExists(email)
+ if editing_user_id != None:
+ user_name = GenerateUserNameWithContext(in_db_user_name, form_data['firstName'], form_data['lastName'], editing_user_id)
+ else:
+ user_name = GenerateUserNameWithContext(beneficiary_user_name, form_data['firstName'], form_data['lastName'], 0)
+
+ if user_name != in_db_user_name:
+ credentials_changed = True
+
+ if (password == None or password == ""):
+ if password_in_db != None:
+ password = password_in_db
+ else:
+ password = CreatePassword(12)
+
+ if password != password_in_db:
+ credentials_changed = True
+
+ if deployment == "NEW":
+ if in_db_user_name != None:
+ #this email exists in DB, so cannot be used for NEW deployment!
+ error_string = f"This email cannot be used for new account!"
+ print(error_string)
+ payload = {'ok': 0, 'error': error_string}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+ editing_user_id = "0" #beneficiary_id #specify if editing existing user, otherwise "0"
+ editing_deployment_id = "0"
+ else:
+ editing_deployment_id = deployment.split(" ")[0]
+
+ #this email is not in db, so we will create new user no matter what (NEW was requested!)
+ #Can same person have multiple deployments?
+ #Yes, they can have different sensors, visible to different people!
+
+ ##Lets check if this beneficiary exists in DB
+ #beneficiary_id = PersonInDB(beneficiary_name)
+ #if beneficiary_id > 0: #there, so find deployment #
+ ##Can same person have multiple deployments?
+ ##Yes, they can have different sensors, visible to different people!
+ #deployments_id = GetDepoymentId(beneficiary_id)
+ #if deployments_id != []:
+
+
+
+ if editing_user_id == "0":
+ form_data['role_ids'] = "1" #we need to make sure that "1" is added to potentially "2" in there
+ else:
+ if role_ids == "2": #we need to add 1
+ form_data['role_ids'] = "1,2"
+ else:
+ form_data['role_ids'] = role_ids
+
+ form_data['access_to_deployments'] = priviledges #at this point we do not know it, since deployment is not yet created! #str(deployment_id)
+ #we need to update above field in DB after new deployment is generated
+ form_data['new_user_name'] = form_data['user_name']
+ form_data['first_name'] = form_data['firstName']
+ form_data['last_name'] = form_data['lastName']
+
+ print(address_map)
+ #{'component_count': 6, 'parsed_components': {'city': 'saratoga', 'country': 'united states', 'house_number': '18569', 'postcode': '95070', 'road': 'allendale avenue', 'state': 'ca'}, 'success': True}
+ #{'component_count': 5, 'parsed_components': {'city': 'rijeka', 'country': 'croatia', 'house_number': '51a', 'postcode': '51000', 'road': 'labinska ul.'}, 'success': True}
+
+ form_data['address_street'] = GetIfThere(address_map, "house_number") + " " + GetIfThere(address_map, "road")
+ form_data['address_city'] = GetIfThere(address_map, "city")
+ form_data['address_zip'] = GetIfThere(address_map, "postcode")
+ form_data['address_state'] = GetIfThere(address_map, "state")
+ form_data['address_country'] = GetIfThere(address_map, "country")
+
+ form_data['phone_number'] = ""#form_data['phone']
+ form_data['picture'] = image_file_name
+ form_data['key'] = password
+ #update person_details
+ beneficiary_id, if_new = StoreBeneficiary2DB(form_data, editing_user_id, user_id) #this will update or create beneficiary
+ #lets check if devices listed are not part of existing deployment
+ if reuse_existing_devices == "0": #do re-use
+ success, result = DevicesNotUsed(devices, user_name)
+ else:
+ success = True
+ result["deployed"] = []
+ result["not_found"] = []
+
+ if success: #this should always be true!
+ if result["deployed"]:
+ error_string = f"These devices are already deployed: {result['deployed']}"
+ print(error_string)
+ payload = {'ok': 0, 'error': error_string}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+
+ if result["not_found"]:
+ error_string = f"These devices are not available: {result['not_found']}"
+ print(error_string)
+ payload = {'ok': 0, 'error': error_string}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+
+ if not result["deployed"] and not result["not_found"]:
+ print("All devices are available for deployment")
+ #ok, error_string = StoreDisclaimer2DB(form_data)
+
+ #In DB, we need to update or insert into: deployments, deployment_details and deployment_history
+ #can there be more than 1 deployment per beneficiary?
+ #yes. Different set of devices visible to different Caretaker. So below can return multiple deployments.
+
+ #editing_deployment_id = "0"
+ deployment_details = []
+ if if_new == 0: #existing beneficiary
+ deployment_details = GetDeploymentDetailsFromBeneficiary(beneficiary_id, editing_deployment_id) #this only returns first one, even if more! TODO (handle it better)
+
+
+ form_data_temp = {}
+ form_data_temp['editing_deployment_id'] = editing_deployment_id
+ form_data_temp['beneficiary_id'] = beneficiary_id
+ form_data_temp['caretaker_id'] = user_id
+ form_data_temp['owner_id'] = user_id
+ form_data_temp['installer_id'] = user_id
+
+ form_data_temp['address_street'] = form_data['address_street']
+ form_data_temp['address_city'] = form_data['address_city']
+ form_data_temp['address_zip'] = form_data['address_zip']
+ form_data_temp['address_state'] = form_data['address_state']
+ form_data_temp['address_country'] = form_data['address_country']
+ form_data_temp['persons'] = form_data.get('persons')
+ form_data_temp['gender'] = form_data.get('gender')
+ form_data_temp['race'] = form_data.get('race')
+ form_data_temp['born'] = form_data.get('born')
+ form_data_temp['pets'] = form_data.get('pets')
+ form_data_temp['wifis'] = form_data.get('wifis')
+ form_data_temp['lat'] = form_data.get('lat')
+ form_data_temp['lng'] = form_data.get('lng')
+ form_data_temp['gps_age'] = form_data.get('gps_age')
+ form_data_temp['time_zone_s'] = time_zone_s
+ #["64B7088909FC", "64B7088909B8", "901506CA3DA0", "142B2F81A020", "64B7088905BC", "64B708890898", "64B70888FAD4","64B7088904BC"]
+ form_data_temp['devices'] = WellIDs2MAC(form_data.get('devices'))
+ form_data_temp['wifis'] = ConvertToMapString(form_data.get('wifis'))
+ editing_deployment_id, is_new_deployment = StoreDeployment2DB(form_data_temp, editing_deployment_id)
+
+ if is_new_deployment == 1 and if_new:
+ #This is new email, therefore person, so we need to give him access to only this deployment
+ form_data['access_to_deployments'] = str(editing_deployment_id)
+ beneficiary_id, if_new = StoreBeneficiary2DB(form_data, editing_user_id, user_id)
+
+ #deployment_id = deployment_details[0]
+ #we need to update deployment_history table if devices changed
+ devices_in_history_last = GetDeploymentHistoryLast(editing_deployment_id)
+ if len(devices_in_history_last) > 0:
+ devices_in_history_last = devices_in_history_last[3]
+ if ListsSame(devices_in_history_last, form_data_temp['devices']) == False:
+ ok = StoreToDeploymentHistory(editing_deployment_id, form_data_temp['devices'])
+ devices_changed = True
+
+ #-- Fix permissions
+ #GRANT USAGE, SELECT ON SEQUENCE deployment_history_id_seq TO well_app;
+ #-- Fix sequence sync
+ #SELECT setval('deployment_history_id_seq', (SELECT COALESCE(MAX(id), 0) FROM deployment_history));
else:
- privileges, user_id = ValidUser(user_name, password)
- if user_id == "0": #bad password
- error_string = f"Password does not match user {user_name}"
- payload = {'ok': 0, 'error': error_string}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
+ ok = StoreToDeploymentHistory(editing_deployment_id, form_data_temp['devices'])
+ devices_changed = True
+
+ print(devices_in_history_last)
+ ok = 1
+
+ if credentials_changed:
+ if if_new == 1:
+ #we need to call cd ~/mqtt-auth-service/acl_manager.py
+ #we created new beneficiary and he needs welcome email
+ #if deployment == "NEW":
+ SendWelcomeBeneficiaryEmail(form_data['email'], form_data['first_name'], form_data['last_name'], devices, form_data['phone_number'], form_data['new_user_name'], form_data['key'])
+ else:
+ SendCredentialsChangedEmail(form_data['email'], form_data['first_name'], form_data['last_name'], devices, form_data['phone_number'], form_data['new_user_name'], form_data['key'])
+ else:
+ if devices_changed:
+ CallICLUpdate()
payload = {'ok': ok}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
- else:
- payload = {'ok': ok, 'error': error_string}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
-
- else:
- error_string = f"Error: {result}"
- payload = {'ok': 0, 'error': error_string}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
- elif function == "purge_phantom_records":
- last_valid_id = int(form_data.get('last_valid_device_id'))
- try:
- conn = get_db_connection()
- with conn.cursor() as cur:
- # 1. Check for non-null radar_threshold records
- cur.execute("""
- SELECT COUNT(*)
- FROM public.devices
- WHERE device_id > %s AND radar_threshold IS NOT NULL
- """, (last_valid_id,))
+ else: #this should not be here
+ error_string = f"Error: {result}"
+ payload = {'ok': 0, 'error': error_string}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
- non_null_count = cur.fetchone()[0]
+ elif function == "purge_phantom_records":
+ last_valid_id = int(form_data.get('last_valid_device_id'))
- if non_null_count > 0:
- resp.media = {
- "status": "aborted",
- "reason": f"Found {non_null_count} records with device_id > {last_valid_id} that have radar_threshold NOT NULL"
- }
- return
+ try:
+ conn = get_db_connection()
+ with conn.cursor() as cur:
+ # 1. Check for non-null radar_threshold records
+ cur.execute("""
+ SELECT COUNT(*)
+ FROM public.devices
+ WHERE device_id > %s AND radar_threshold IS NOT NULL
+ """, (last_valid_id,))
- # 2. Delete phantom records
- cur.execute("""
- DELETE FROM public.devices
- WHERE device_id > %s AND radar_threshold IS NULL
- """, (last_valid_id,))
+ non_null_count = cur.fetchone()[0]
- deleted_count = cur.rowcount
+ if non_null_count > 0:
+ resp.media = {
+ "status": "aborted",
+ "reason": f"Found {non_null_count} records with device_id > {last_valid_id} that have radar_threshold NOT NULL"
+ }
+ return
- if deleted_count > 0:
- # 3. Reset sequence to the ACTUAL maximum device_id in the table
- cur.execute("SELECT COALESCE(MAX(device_id), 0) FROM public.devices")
- actual_max_id = cur.fetchone()[0]
+ # 2. Delete phantom records
+ cur.execute("""
+ DELETE FROM public.devices
+ WHERE device_id > %s AND radar_threshold IS NULL
+ """, (last_valid_id,))
- # Reset sequence to actual max
- cur.execute("SELECT setval('devices_device_id_seq', %s, true)", (actual_max_id,))
+ deleted_count = cur.rowcount
- # Get next ID to verify
- cur.execute("SELECT nextval('devices_device_id_seq')")
- next_id = cur.fetchone()[0]
+ if deleted_count > 0:
+ # 3. Reset sequence to the ACTUAL maximum device_id in the table
+ cur.execute("SELECT COALESCE(MAX(device_id), 0) FROM public.devices")
+ actual_max_id = cur.fetchone()[0]
- conn.commit()
+ # Reset sequence to actual max
+ cur.execute("SELECT setval('devices_device_id_seq', %s, true)", (actual_max_id,))
- resp.media = {
- "status": "success",
- "deleted_count": deleted_count,
- "actual_max_device_id": actual_max_id,
- "sequence_reset_to": actual_max_id,
- "next_device_id": next_id
- }
- else:
- resp.media = {
- "status": "no_action",
- "message": "No phantom records found to delete"
- }
+ # Get next ID to verify
+ cur.execute("SELECT nextval('devices_device_id_seq')")
+ next_id = cur.fetchone()[0]
- except Exception as e:
- conn.rollback()
- resp.media = {"error": str(e)}
- resp.status = falcon.HTTP_500
+ conn.commit()
- elif function == "request_deployment_map_new":
- st = time.time()
- print(f"$0 ----{time.time() - st}")
- deployment_id = form_data.get('deployment_id')
- map_type = form_data.get('map_type')
- print(f"$1 ----{time.time() - st}")
- maps_dates, positions_list, timezone_s = GetDeploymentDatesBoth(deployment_id)
- print(f"$2 ----{time.time() - st}")
- datee = form_data.get('date')
- if maps_dates != []:
+ resp.media = {
+ "status": "success",
+ "deleted_count": deleted_count,
+ "actual_max_device_id": actual_max_id,
+ "sequence_reset_to": actual_max_id,
+ "next_device_id": next_id
+ }
+ else:
+ resp.media = {
+ "status": "no_action",
+ "message": "No phantom records found to delete"
+ }
- if datee == "2022-4-2": #that one is default in HTML so disregard
- datee = maps_dates[0]
+ except Exception as e:
+ conn.rollback()
+ resp.media = {"error": str(e)}
+ resp.status = falcon.HTTP_500
- locations_desc_map = {}
- for details in positions_list:
- well_id = details[0]
- location = details[2]
- if details[3] != None and details[3] != "":
- location = location +" "+ details[3]
+ elif function == "request_deployment_map_new":
+ st = time.time()
+ print(f"$0 ----{time.time() - st}")
+ deployment_id = form_data.get('deployment_id')
+ map_type = form_data.get('map_type')
+ print(f"$1 ----{time.time() - st}")
+ maps_dates, positions_list, timezone_s = GetDeploymentDatesBoth(deployment_id)
+ print(f"$2 ----{time.time() - st}")
+ datee = form_data.get('date')
+ if maps_dates != []:
- if details[6] != None and details[6] != "":
- location = location +" "+ details[6]
+ if datee == "2022-4-2": #that one is default in HTML so disregard
+ datee = maps_dates[0]
- MAC = details[4]
- locations_desc_map[well_id] = location
- print(f"$3 ----{time.time() - st}")
+ locations_desc_map = {}
+ for details in positions_list:
+ well_id = details[0]
+ location = details[2]
+ if details[3] != None and details[3] != "":
+ location = location +" "+ details[3]
+ if details[6] != None and details[6] != "":
+ location = location +" "+ details[6]
+
+ MAC = details[4]
+ locations_desc_map[well_id] = location
+ print(f"$3 ----{time.time() - st}")
+
+ dataa = {}
+ dataa['Function'] = "deployments_maps_report"
+ dataa['proximity'] = positions_list
+ maps_dates.sort(reverse = True)
+ dataa['maps_dates'] = maps_dates
+ dataa['device_count'] = len(positions_list)
+ dataa['time_zone'] = timezone_s
+ dataa['map_type'] = map_type
+
+ #MACs_list = GetMACsListSimple(positions_list)
+ #MACs_map = {}
+
+ #for details in positions_list:
+ # id = details[0]
+ # MAC = details[3]
+ # MACs_map[id] = MAC
+ #for i in range(len(MACs_list)):
+ # MACs_map[devices_list[i]] = MACs_list[i][0]
+
+ id = positions_list[0][0]
+ #dataa['MACs_map'] = MACs_map
+ dataa['locations_desc_map'] = locations_desc_map
+ #proximity_list = proximity.split(",")
+ print(f"$4 ----{time.time() - st}")
+
+ if id < 200:
+ checkmarks_string = 'T>\n'
+ checkmarks_string = checkmarks_string + 'H>\n'
+ checkmarks_string = checkmarks_string + 'P>\n'
+ checkmarks_string = checkmarks_string + 'C>\n'
+ checkmarks_string = checkmarks_string + 'V>\n'
+ checkmarks_string = checkmarks_string + 'L>\n'
+ checkmarks_string = checkmarks_string + 'R>
'
+ else: #>200 = ["Temperature", "Humidity", "Pressure", "Light", "Radar", "VOC"]
+
+ checkmarks_string = 'T>\n'
+ checkmarks_string = checkmarks_string + 'H>\n'
+ checkmarks_string = checkmarks_string + 'P>\n'
+ checkmarks_string = checkmarks_string + 'L>\n'
+ checkmarks_string = checkmarks_string + 'R>\n'
+
+ checkmarks_string = checkmarks_string + 'S0>\n'
+ checkmarks_string = checkmarks_string + 'S1>\n'
+ checkmarks_string = checkmarks_string + 'S2>\n'
+ checkmarks_string = checkmarks_string + 'S3>\n'
+ checkmarks_string = checkmarks_string + 'S4>\n'
+ checkmarks_string = checkmarks_string + 'S5>\n'
+ checkmarks_string = checkmarks_string + 'S6>\n'
+ checkmarks_string = checkmarks_string + 'S7>\n'
+ checkmarks_string = checkmarks_string + 'S8>\n'
+ checkmarks_string = checkmarks_string + 'S9>
'
+
+ checked_or_not = " checked"
+
+ for index in range(len(positions_list)):
+ details = positions_list[index]
+ device_id = details[0]
+ location = details[2]
+ if details[3] != None and details[3] != "":
+ location = location + " " + details[3]
+
+ if details[6] != None and details[6] != "":
+ location = location + " " + details[6]
+
+ checkmarks_string = checkmarks_string + str(device_id) + '>\n'
+ checked_or_not = ''
+
+ print(f"$5 ----{time.time() - st}")
+
+ dataa['checkmarks'] = checkmarks_string
+ resp.media = package_response(dataa)
+ resp.status = falcon.HTTP_200
+
+ elif function == "request_proximity":
+ deployment = form_data.get('deployment_id')
+ timee = form_data.get('time')
+ #timee = StringToEpoch(datee)
+ #print(deployment, timee)
+ well_ids, device_ids = GetProximityList(deployment, timee)
+ #print(proximity)
dataa = {}
- dataa['Function'] = "deployments_maps_report"
- dataa['proximity'] = positions_list
- maps_dates.sort(reverse = True)
- dataa['maps_dates'] = maps_dates
- dataa['device_count'] = len(positions_list)
- dataa['time_zone'] = timezone_s
- dataa['map_type'] = map_type
+ dataa['Function'] = "proximity_report"
+ if len(well_ids) > 0:
+ dataa['proximity'] = well_ids
+ else:
+ dataa['proximity'] = []
- #MACs_list = GetMACsListSimple(positions_list)
- #MACs_map = {}
+ resp.media = package_response(dataa)
+ resp.status = falcon.HTTP_200
+ elif function == "request_devices":
+ deployment_id = form_data.get('deployment_id')
+ group_id = form_data.get('group_id')
+ location = form_data.get('location')
+ if location == "0":
+ location = "All"
+ is_fresh = form_data.get('is_fresh')
+ matching_devices = GetMatchingDevices(privileges, group_id, deployment_id, location)
+ dataa = {}
+ dataa['Function'] = "devices_report"
+ if len(matching_devices) > 0:
+ dataa['devices'] = matching_devices
+ else:
+ dataa['devices'] = []
- #for details in positions_list:
- # id = details[0]
- # MAC = details[3]
- # MACs_map[id] = MAC
- #for i in range(len(MACs_list)):
- # MACs_map[devices_list[i]] = MACs_list[i][0]
-
- id = positions_list[0][0]
- #dataa['MACs_map'] = MACs_map
- dataa['locations_desc_map'] = locations_desc_map
- #proximity_list = proximity.split(",")
- print(f"$4 ----{time.time() - st}")
-
- if id < 200:
- checkmarks_string = 'T>\n'
- checkmarks_string = checkmarks_string + 'H>\n'
- checkmarks_string = checkmarks_string + 'P>\n'
- checkmarks_string = checkmarks_string + 'C>\n'
- checkmarks_string = checkmarks_string + 'V>\n'
- checkmarks_string = checkmarks_string + 'L>\n'
- checkmarks_string = checkmarks_string + 'R>
'
- else: #>200 = ["Temperature", "Humidity", "Pressure", "Light", "Radar", "VOC"]
-
- checkmarks_string = 'T>\n'
- checkmarks_string = checkmarks_string + 'H>\n'
- checkmarks_string = checkmarks_string + 'P>\n'
- checkmarks_string = checkmarks_string + 'L>\n'
- checkmarks_string = checkmarks_string + 'R>\n'
-
- checkmarks_string = checkmarks_string + 'S0>\n'
- checkmarks_string = checkmarks_string + 'S1>\n'
- checkmarks_string = checkmarks_string + 'S2>\n'
- checkmarks_string = checkmarks_string + 'S3>\n'
- checkmarks_string = checkmarks_string + 'S4>\n'
- checkmarks_string = checkmarks_string + 'S5>\n'
- checkmarks_string = checkmarks_string + 'S6>\n'
- checkmarks_string = checkmarks_string + 'S7>\n'
- checkmarks_string = checkmarks_string + 'S8>\n'
- checkmarks_string = checkmarks_string + 'S9>
'
-
- checked_or_not = " checked"
-
- for index in range(len(positions_list)):
- details = positions_list[index]
- device_id = details[0]
- location = details[2]
- if details[3] != None and details[3] != "":
- location = location + " " + details[3]
-
- if details[6] != None and details[6] != "":
- location = location + " " + details[6]
-
- checkmarks_string = checkmarks_string + str(device_id) + '>\n'
- checked_or_not = ''
-
- print(f"$5 ----{time.time() - st}")
-
- dataa['checkmarks'] = checkmarks_string
resp.media = package_response(dataa)
resp.status = falcon.HTTP_200
- elif function == "request_proximity":
- deployment = form_data.get('deployment_id')
- timee = form_data.get('time')
- #timee = StringToEpoch(datee)
- #print(deployment, timee)
- well_ids, device_ids = GetProximityList(deployment, timee)
- #print(proximity)
- dataa = {}
- dataa['Function'] = "proximity_report"
- if len(well_ids) > 0:
- dataa['proximity'] = well_ids
- else:
- dataa['proximity'] = []
+ elif function == "get_deployment_details":
+ deployment_id = form_data.get('deployment_id')
+ group_id = form_data.get('group_id')
+ location = form_data.get('location')
+ if location == "0":
+ location = "All"
+ is_fresh = form_data.get('is_fresh')
+ matching_devices = GetMatchingDevicesComplete(privileges, group_id, deployment_id, location)
+ deployment = DeploymentDetails(deployment_id)
+ dataa = {}
+ dataa['Function'] = "devices_report"
+ if len(matching_devices) > 0:
+ dataa['devices'] = matching_devices
+ else:
+ dataa['devices'] = []
- resp.media = package_response(dataa)
- resp.status = falcon.HTTP_200
- elif function == "request_devices":
- deployment_id = form_data.get('deployment_id')
- group_id = form_data.get('group_id')
- location = form_data.get('location')
- if location == "0":
- location = "All"
- is_fresh = form_data.get('is_fresh')
- matching_devices = GetMatchingDevices(privileges, group_id, deployment_id, location)
- dataa = {}
- dataa['Function'] = "devices_report"
- if len(matching_devices) > 0:
- dataa['devices'] = matching_devices
- else:
- dataa['devices'] = []
-
- resp.media = package_response(dataa)
- resp.status = falcon.HTTP_200
-
- elif function == "get_deployment_details":
- deployment_id = form_data.get('deployment_id')
- group_id = form_data.get('group_id')
- location = form_data.get('location')
- if location == "0":
- location = "All"
- is_fresh = form_data.get('is_fresh')
- matching_devices = GetMatchingDevicesComplete(privileges, group_id, deployment_id, location)
- deployment = DeploymentDetails(deployment_id)
- dataa = {}
- dataa['Function'] = "devices_report"
- if len(matching_devices) > 0:
- dataa['devices'] = matching_devices
- else:
- dataa['devices'] = []
-
- if len(deployment) > 0:
- dataa['details'] = deployment
- else:
- dataa['details'] = {}
- resp.media = package_response(dataa)
- resp.status = falcon.HTTP_200
-
- elif function == "device_form":
- import uuid
- request_id = str(uuid.uuid4())[:8]
- logger.debug(f"[{request_id}] device_form ENTRY")
-
- device_id = None
- if 'editing_device_id' in form_data:
- device_id = int(form_data.get('editing_device_id'))
- logger.debug(f"[{request_id}] Found editing_device_id: {device_id}")
- else:
- logger.debug(f"[{request_id}] No editing_device_id found, device_id = {device_id}")
-
- ok = 0
- logger.debug(f"[{request_id}] privileges = {privileges}")
-
- if privileges == "-1":
- logger.debug(f"[{request_id}] CALLING StoreDevice2DB with device_id: {device_id}")
- ok = StoreDevice2DB(form_data, device_id)
- logger.debug(f"[{request_id}] StoreDevice2DB returned: {ok}")
- payload = {'ok': 1}
- resp.media = package_response(payload)
+ if len(deployment) > 0:
+ dataa['details'] = deployment
+ else:
+ dataa['details'] = {}
+ resp.media = package_response(dataa)
resp.status = falcon.HTTP_200
- else:
- logger.debug(f"[{request_id}] Non-admin path...")
- if device_id != None:
+ elif function == "device_form":
+
+ request_id = str(uuid.uuid4())[:8]
+ logger.debug(f"[{request_id}] device_form ENTRY")
+
+ device_id = None
+ if 'editing_device_id' in form_data:
+ device_id = int(form_data.get('editing_device_id'))
+ logger.debug(f"[{request_id}] Found editing_device_id: {device_id}")
+ else:
+ logger.debug(f"[{request_id}] No editing_device_id found, device_id = {device_id}")
+
+ ok = 0
+ logger.debug(f"[{request_id}] privileges = {privileges}")
+
+ if privileges == "-1":
+ logger.debug(f"[{request_id}] CALLING StoreDevice2DB with device_id: {device_id}")
+ ok = StoreDevice2DB(form_data, device_id)
+ logger.debug(f"[{request_id}] StoreDevice2DB returned: {ok}")
+ payload = {'ok': 1}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+
+ else:
+ logger.debug(f"[{request_id}] Non-admin path...")
+ if device_id != None:
+ devices = GetVisibleDevices(privileges)
+ for device in devices:
+ if device[0] == device_id:
+ logger.debug(f"[{request_id}] CALLING StoreDevice2DB in loop with device_id: {device_id}")
+ ok = StoreDevice2DB(form_data, device_id)
+ logger.debug(f"[{request_id}] StoreDevice2DB in loop returned: {ok}")
+ break
+ else:
+ logger.debug(f"[{request_id}] device_id is None, returning error")
+ payload = {'ok': 0}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_500
+ return
+
+ logger.debug(f"[{request_id}] Final ok value: {ok}")
+ elif function == "device_set_group":
+ group_id = int(form_data.get('group_id'))
+
+ MAC = form_data.get('mac')
+ if MAC != None:
+ device_id_or_mac = MAC
+ else:
+ device_id_or_mac = int(form_data.get('device_id'))
+
+ ok = ""
+ if privileges == "-1":
+ ok = StoreGroupToDevice(device_id_or_mac, group_id, user_name)
+ else:
devices = GetVisibleDevices(privileges)
for device in devices:
- if device[0] == device_id:
- logger.debug(f"[{request_id}] CALLING StoreDevice2DB in loop with device_id: {device_id}")
- ok = StoreDevice2DB(form_data, device_id)
- logger.debug(f"[{request_id}] StoreDevice2DB in loop returned: {ok}")
+ if device[0] == device_id_or_mac:
+ ok = StoreGroupToDevice(device_id_or_mac, group_id, user_name)
break
- else:
- logger.debug(f"[{request_id}] device_id is None, returning error")
- payload = {'ok': 0}
+
+ if ok != "":
+
+ payload = ok
resp.media = package_response(payload)
- resp.status = falcon.HTTP_500
+ resp.status = falcon.HTTP_200
return
+ else:
+ debug_string = "This device_id is not editable"
+ payload = {'ok': ok, 'error': debug_string}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTPError
+ return
+ elif function == "device_set_well_id":
+ well_id = int(form_data.get('well_id'))
- logger.debug(f"[{request_id}] Final ok value: {ok}")
- elif function == "device_set_group":
- group_id = int(form_data.get('group_id'))
+ MAC = form_data.get('mac')
- MAC = form_data.get('mac')
- if MAC != None:
- device_id_or_mac = MAC
- else:
- device_id_or_mac = int(form_data.get('device_id'))
+ if MAC != None:
+ device_id_or_mac = MAC
+ else:
+ device_id_or_mac = int(form_data.get('device_id'))
- ok = ""
- if privileges == "-1":
- ok = StoreGroupToDevice(device_id_or_mac, group_id, user_name)
- else:
- devices = GetVisibleDevices(privileges)
- for device in devices:
- if device[0] == device_id_or_mac:
- ok = StoreGroupToDevice(device_id_or_mac, group_id, user_name)
- break
- if ok != "":
+ ok = ""
+ if privileges == "-1":
+ ok = StoreWellIdToDevice(device_id_or_mac, well_id, user_name)
+ else:
+ devices = GetVisibleDevices(privileges)
+ for device in devices:
+ if device[0] == device_id_or_mac:
+ ok = StoreWellIdToDevice(device_id_or_mac, well_id, user_name)
+ break
- payload = ok
+ if ok != "":
+
+ payload = ok
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+ else:
+ debug_string = "This device_id is not editable"
+ payload = {'ok': ok, 'error': debug_string}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTPError
+ return
+ elif function == "device_get_live":
+
+ MAC = form_data.get('mac')
+
+ if MAC != None:
+ device_id_or_mac = MAC
+ else:
+ device_id_or_mac = int(form_data.get('device_id'))
+
+ ok = ""
+ if privileges == "-1":
+ ok = GetDeviceLive(device_id_or_mac, user_name)
+ else:
+ devices = GetVisibleDevices(privileges)
+ for device in devices:
+ if device[0] == device_id_or_mac:
+ ok = GetDeviceLive(device_id_or_mac, user_name)
+ break
+
+ if ok != "":
+
+ payload = ok
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+ else:
+ debug_string = "This device_id is not editable"
+ payload = {'ok': ok, 'error': debug_string}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTPError
+ return
+ elif function == "device_set_network_id":
+ network_id = int(form_data.get('network_id'))
+
+ MAC = form_data.get('mac')
+
+ if MAC != None:
+ device_id_or_mac = MAC
+ else:
+ device_id_or_mac = int(form_data.get('device_id'))
+
+ ok = ""
+ if privileges == "-1":
+ ok = StoreNetworkIdToDevice(device_id_or_mac, network_id, user_name)
+ else:
+ devices = GetVisibleDevices(privileges)
+ for device in devices:
+ if device[0] == device_id_or_mac:
+ ok = StoreNetworkIdToDevice(device_id_or_mac, network_id, user_name)
+ break
+
+ if ok != "":
+
+ payload = ok
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+ else:
+ debug_string = "This device_id is not editable"
+ payload = {'ok': ok, 'error': debug_string}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTPError
+ return
+ elif function == "device_reboot":
+ if 'mac' in form_data:
+ MAC = form_data.get('mac').upper()
+ device_id_or_mac = MAC
+ else:
+ device_id = int(form_data.get('device_id'))
+ device_id_or_mac = device_id
+
+ ok = ""
+ if privileges == "-1":
+ ok = DeviceReboot(device_id_or_mac, user_name)
+ else:
+ devices = GetVisibleDevices(privileges)
+ #for this to work, device_id needs to be specified, not MAC!
+ for device in devices:
+ if device[0] == device_id_or_mac:
+ ok = DeviceReboot(device_id_or_mac, user_name)
+ break
+
+ print(f"OK = {ok}")
+ if ok != "":
+
+ payload = ok
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+ else:
+ debug_string = "This device_id is not editable"
+ payload = {'ok': ok, 'error': debug_string}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTPError
+ return
+ elif function == "device_delete":
+
+ #check if admin!
+
+ ok = DeleteRecordFromDB(form_data)
+
+ payload = {'ok': ok}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
- else:
- debug_string = "This device_id is not editable"
- payload = {'ok': ok, 'error': debug_string}
- resp.media = package_response(payload)
- resp.status = falcon.HTTPError
- return
- elif function == "device_set_well_id":
- well_id = int(form_data.get('well_id'))
-
- MAC = form_data.get('mac')
-
- if MAC != None:
- device_id_or_mac = MAC
- else:
- device_id_or_mac = int(form_data.get('device_id'))
+ elif function == "alarm_on_off":
+ deployment_id = form_data.get('deployment_id')
+ alarm_on = int(form_data.get('alarm_on'))
- ok = ""
- if privileges == "-1":
- ok = StoreWellIdToDevice(device_id_or_mac, well_id, user_name)
- else:
- devices = GetVisibleDevices(privileges)
- for device in devices:
- if device[0] == device_id_or_mac:
- ok = StoreWellIdToDevice(device_id_or_mac, well_id, user_name)
- break
+ if privileges != "-1":
+ privileges_lst = privileges.split(",")
+ if deployment_id not in privileges_lst:
+ data_payload = {}
+ resp.media = package_response(data_payload)
+ resp.status = falcon.HTTP_200
+ return
- if ok != "":
+ # Lets prepare data to do same as store_alarms function
+ #read alarm_deployment_settings and all alarm_device_settings from db, and armm all bits that are enabled
+ deployment_alarms_json, device_alarms_json_map = GetAlarmAllDetails(deployment_id)
+ deployment_alarms = json.loads(deployment_alarms_json)
+ enabled = deployment_alarms["enabled"]
+ if alarm_on == 0:
+ if GetBit(enabled, 2):
+ enabled = set_character(enabled, 2, "0")
+ deployment_alarms["enabled"] = enabled
+ deployment_alarms_json = json.dumps(deployment_alarms)
+ redis_conn.set('alarm_deployment_settings_'+deployment_id, deployment_alarms_json)
+ ok = StoreAlarms2DBSimple(deployment_id, 0, deployment_alarms_json, "")
+ else:
+ if not GetBit(enabled, 2):
+ enabled = set_character(enabled, 2, "1")
+ deployment_alarms["enabled"] = enabled
+ deployment_alarms_json = json.dumps(deployment_alarms)
+ redis_conn.set('alarm_deployment_settings_'+deployment_id, deployment_alarms_json)
+ ok = StoreAlarms2DBSimple(deployment_id, 0, deployment_alarms_json, "")
- payload = ok
+
+ if False: #no need to do it since every new_alarms call reads alarm_deployment_settings_ always
+ record = {
+ 'user_name': user_name,
+ 'deployment_id': deployment_id,
+ 'device_id': device_id
+ }
+ record_json = json.dumps(record)
+ redis_conn.lpush('new_alarms', record_json)
+
+ if alarm_on != 0:
+ for device_id in device_alarms_json_map:
+ device_alarms_json = device_alarms_json_map[device_id]
+ device_alarms = json.loads(device_alarms_json)
+ enabled_alarms = device_alarms["enabled_alarms"]
+ armed_states = device_alarms["armed_states"]
+
+ if GetBit(enabled_alarms, 8):
+ armed_states = set_character(armed_states, 8, "1")
+ if GetBit(enabled_alarms, 9):
+ armed_states = set_character(armed_states, 9, "1")
+ if GetBit(enabled_alarms, 10):
+ armed_states = set_character(armed_states, 10, "1")
+
+ device_alarms["armed_states"] = armed_states
+ device_alarms_json = json.dumps(device_alarms)
+ redis_conn.set(f'alarm_device_settings_{device_id}', device_alarms_json)
+ ok = StoreAlarms2DBSimple(0, device_id, "", device_alarms_json)
+
+ #of course it is needed, how will well-alerts know that new data is stored to db?
+ # Create record dictionary
+ record = {
+ 'user_name': user_name,
+ 'deployment_id': deployment_id,
+ 'device_id': device_id
+ }
+
+ # Convert dictionary to JSON string for storage in Redis list
+ record_json = json.dumps(record)
+
+ # Add to queue (list) - lpush adds to the left/front of the list
+ redis_conn.lpush('new_alarms', record_json)
+
+
+ payload = {'ok': ok}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
- else:
- debug_string = "This device_id is not editable"
- payload = {'ok': ok, 'error': debug_string}
- resp.media = package_response(payload)
- resp.status = falcon.HTTPError
- return
- elif function == "device_get_live":
- MAC = form_data.get('mac')
+ elif function == "get_alarm_state":
+ deployment_id = form_data.get('deployment_id')
- if MAC != None:
- device_id_or_mac = MAC
- else:
- device_id_or_mac = int(form_data.get('device_id'))
+ if privileges != "-1":
+ privileges_lst = privileges.split(",")
+ if deployment_id not in privileges_lst:
+ data_payload = {}
+ resp.media = package_response(data_payload)
+ resp.status = falcon.HTTP_200
+ return
- ok = ""
- if privileges == "-1":
- ok = GetDeviceLive(device_id_or_mac, user_name)
- else:
- devices = GetVisibleDevices(privileges)
- for device in devices:
- if device[0] == device_id_or_mac:
- ok = GetDeviceLive(device_id_or_mac, user_name)
- break
+ # Lets prepare data to do same as store_alarms function
+ #read alarm_deployment_settings and all alarm_device_settings from db, and armm all bits that are enabled
+ deployment_alarms_json, device_alarms_json_map = GetAlarmAllDetails(deployment_id)
+ deployment_alarms = json.loads(deployment_alarms_json)
+ enabled = deployment_alarms["enabled"]
+ if not GetBit(enabled, 2):
+ alarm_state = 2 #off
+ else:
+ #if any device was trigerred, show 0, otherwise 1
+ alarm_state = 1
+ for device_id in device_alarms_json_map:
+ device_alarms_json = device_alarms_json_map[device_id]
+ device_alarms = json.loads(device_alarms_json)
+ enabled_alarms = device_alarms["enabled_alarms"]
+ armed_states = device_alarms["armed_states"]
- if ok != "":
+ if GetBit(enabled_alarms, 8):
+ if not GetBit(armed_states, 8): #if 0
+ alarm_state = 0
+ break
+ if GetBit(enabled_alarms, 9):
+ if not GetBit(armed_states, 9):
+ alarm_state = 0
+ break
+ if GetBit(enabled_alarms, 10):
+ if not GetBit(armed_states, 10):
+ alarm_state = 0
+ break
- payload = ok
+
+ payload = {'ok': 1, 'alarm_state':alarm_state}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
- else:
- debug_string = "This device_id is not editable"
- payload = {'ok': ok, 'error': debug_string}
- resp.media = package_response(payload)
- resp.status = falcon.HTTPError
- return
- elif function == "device_set_network_id":
- network_id = int(form_data.get('network_id'))
+ elif function == "submit_mobile_message":
+ message = form_data.get('message')
+ mqtt_id = form_data.get("mqtt_id")
+ privileges, user_id = GetPriviledgesAndUserId(user_name)
+ if "function" in message:
+ current_utc_time = datetime.datetime.now(timezone.utc)
+ message_map = json.loads(message)
+ func = message_map["function"]
- MAC = form_data.get('mac')
- if MAC != None:
- device_id_or_mac = MAC
- else:
- device_id_or_mac = int(form_data.get('device_id'))
+ conn = get_db_connection()
+ cur = conn.cursor()
+ error_string = ""
+ ok = 1
+ try:
- ok = ""
- if privileges == "-1":
- ok = StoreNetworkIdToDevice(device_id_or_mac, network_id, user_name)
- else:
- devices = GetVisibleDevices(privileges)
- for device in devices:
- if device[0] == device_id_or_mac:
- ok = StoreNetworkIdToDevice(device_id_or_mac, network_id, user_name)
- break
+ current_utc_time = datetime.datetime.now(timezone.utc)
- if ok != "":
+ # Convert to epoch time
+ current_epoch_time = int(current_utc_time.timestamp() *1000)
- payload = ok
+
+ sql = f"""
+ INSERT INTO public.mobile_clients_messages
+ (time, mqtt_id, message, function)
+ VALUES
+ ({current_epoch_time}, '{CleanObject(mqtt_id)}','{CleanObject(message)}','{CleanObject(func)}');
+ """
+ logger.debug(f"sql= {sql}")
+ # Execute update query
+ cur.execute(sql)
+
+ # Commit the changes to the database
+ conn.commit()
+
+ # Close the cursor and connection
+
+
+ except Exception as e:
+ logger.error(f"Error inserting to mobile_clients_messages: {str(e)}")
+ ok = 0
+
+
+ try:
+
+ current_utc_time = datetime.datetime.now(timezone.utc)
+
+ # Convert to epoch time
+ current_epoch_time = int(current_utc_time.timestamp() *1000)
+
+
+ sql1 = f"""
+ INSERT INTO public.mobile_clients
+ (mqtt_id, user_name, user_id, last_message, last_message_time)
+ VALUES
+ ('{CleanObject(mqtt_id)}', '{CleanObject(user_name)}', {user_id}, '{CleanObject(message)}', {current_epoch_time})
+ ON CONFLICT (mqtt_id)
+ DO UPDATE SET
+ user_name = EXCLUDED.user_name,
+ user_id = EXCLUDED.user_id,
+ last_message = EXCLUDED.last_message,
+ last_message_time = EXCLUDED.last_message_time;
+ """
+ logger.debug(f"sql= {sql1}")
+ # Execute update query
+ cur.execute(sql1)
+
+ # Commit the changes to the database
+ conn.commit()
+
+ except Exception as e:
+ logger.error(f"Error inserting to mobile_clients: {str(e)}")
+ ok = 0
+
+ cur.close()
+ conn.close()
+ payload = {'ok': ok}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
return
- else:
- debug_string = "This device_id is not editable"
- payload = {'ok': ok, 'error': debug_string}
- resp.media = package_response(payload)
- resp.status = falcon.HTTPError
+ elif function == "get_raw_data":
+ #container = GetReference("/MAC")
+ #MAC = req_dict["MAC"][0]
+ #sensor = req_dict["sensor"][0]
+ #if "part" in req_dict:
+ #part = req_dict["part"][0]
+ #else:
+ #part = ""
+ #from_time = req_dict["from_time"][0]
+ #to_time = req_dict["to_time"][0]
+ #timezone_str = req_dict["tzone"][0]
+ #AddToLog("get_raw_data:" + str(MAC) +","+ str(sensor) + "," + str(from_time) + "," + str(to_time) + "," + part+ "," + timezone_str)
+ ##raw_data = GetRawSensorData(container, MAC, sensor, from_time, to_time, timezone_str)
+ raw_data = []#GetRawSensorDataFromBlobStorage(MAC, sensor, part, from_time, to_time, timezone_str)
+ data_payload = {'raw_data': raw_data}
+ resp.media = package_response(data_payload)
+ resp.status = falcon.HTTP_200
return
- elif function == "device_reboot":
- if 'mac' in form_data:
- MAC = form_data.get('mac').upper()
- device_id_or_mac = MAC
- else:
+
+ elif function == "get_presence_data":
+
+ deployment_id = form_data.get('deployment_id')
+ device_id_in_s = form_data.get('device_id')
+ device_id_in = None
+ refresh = True#form_data.get('refresh') == "1"
+
+ if privileges != "-1":
+ privileges_lst = privileges.split(",")
+ if deployment_id not in privileges_lst:
+ data_payload = {}
+ resp.media = package_response(data_payload)
+ resp.status = falcon.HTTP_200
+ return
+
+ filter = int(form_data.get('filter'))
+ ddate = form_data.get('date')
+ ddate = ddate.replace("_","-")
+ to_date = form_data.get('to_date')
+
+ if to_date == None:
+ to_date = ddate
+ else:
+ to_date = to_date.replace("_","-")
+
+ ddate, to_date = ensure_date_order(ddate, to_date)
+
+
+ date_obj = datetime.datetime.strptime(ddate, "%Y-%m-%d")
+ # Subtract one day
+ previous_day = date_obj - timedelta(days=1)
+ # Convert back to string
+ prev_date = previous_day.strftime("%Y-%m-%d")
+
+ data_type = form_data.get('data_type') #all, raw, presence, z-graph
+ if data_type == None or data_type == "":
+ data_type = "presence"
+
+ time_zone_s = GetTimeZoneOfDeployment(deployment_id)
+ timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
+ devices_list, device_ids = GetProximityList(deployment_id, timee)
+
+ if device_id_in_s != None: #lets remove other devices, since asking for one
+ device_id_in = int(device_id_in_s)
+ device_ids = [id for id in device_ids if id == device_id_in]
+ devices_list = [device for device in devices_list if device[1] == device_id_in]
+
+ time_from_str, _ = GetLocalTimeForDate(ddate, time_zone_s)
+ _, time_to_str = GetLocalTimeForDate(to_date, time_zone_s)
+
+
+
+ time_to = datetime.datetime.strptime(time_to_str, '%Y-%m-%d %H:%M:%S%z')
+
+
+
+
+ # Calculate the difference in days
+
+ # Convert string to datetime object
+ date_obj = datetime.datetime.strptime(time_from_str, "%Y-%m-%d %H:%M:%S%z")
+ # Subtract one day
+ previous_day = date_obj - timedelta(days=1)
+
+ # Format back to string in the same format
+ time_from_z_str = previous_day.strftime("%Y-%m-%d %H:%M:%S%z")
+
+ time_from = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
+ time_from_z = datetime.datetime.strptime(time_from_z_str, '%Y-%m-%d %H:%M:%S%z')
+ epoch_time = calendar.timegm(time_from_z.utctimetuple())
+
+ days_difference = (time_to - time_from).days
+ days_difference_long = days_difference + 1
+
+
+ #epoch_time = calendar.timegm(time_from.utctimetuple())
+ presence_map = {}
+ presence_map["time_start"] = epoch_time
+ presence_map["time_zone"] = time_zone_s
+
+
+
+ device_id_2_threshold = {}
+ device_id_2_location = {0: "Outside"}
+
+ for details in devices_list:
+
+ well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
+ if radar_threshold_group_st == None:
+ radar_threshold_group_st = '["s3_max",12]' #last value is threshold to s28 composite
+
+ if len(radar_threshold_group_st) > 8:
+ radar_threshold_group = json.loads(radar_threshold_group_st)
+ else:
+ radar_threshold_group = ["s3_max",12]
+
+ print(well_id, radar_threshold_group)
+ device_id_2_location[device_id] = location_name
+ device_id_2_threshold[device_id] = radar_threshold_group
+
+
+ ids_list = []
+ well_ids = []
+ id2well_id = {}
+ radar_fields_of_interest = []
+ device_field_indexes = {}
+ for details in devices_list:
+
+ if device_id_in == None or details[1] == device_id_in:
+ if "," in details[5]:
+ threshold_str = details[5]
+ try:
+ threshold_lst = json.loads(threshold_str)
+ except:
+ threshold_lst = ["s3",12]
+ #threshold_lst = ["s3_max",12]
+ else:
+ threshold_lst = ["s3",int(details[5])]
+ radar_field = threshold_lst[0]
+ #since we are getting 10 sec dat, no more need for min or max...
+ radar_field = radar_field.split("_")[0]
+ if radar_field not in radar_fields_of_interest:
+ device_field_indexes[radar_field] = len(radar_fields_of_interest)
+ radar_fields_of_interest.append(radar_field)
+
+ ids_list.append(details[1])
+ id2well_id[details[1]] = details[0]
+ well_ids.append(details[0])
+ presence_map["well_ids"] = well_ids
+
+
+ devices_list_str = ','.join(str(device[1]) for device in devices_list)
+ #sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
+ #sql = get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
+ #print(sql)
+ zsql = get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_z_str, time_to_str, ids_list, radar_fields_of_interest)
+ print(zsql)
+
+ with get_db_connection() as conn:
+ with conn.cursor() as cur:
+ #cur.execute(sql)
+ #my_data = None
+ myz_data = None
+
+ #my_data = cur.fetchall()
+ cur.execute(zsql)
+ myz_data = cur.fetchall()
+ device_id_2_threshold = {}
+ device_id_2_location = {0: "Outside"}
+ row_nr_2_device_id = {}
+ cnt = 0
+ row_nr_2_device_id[0] = 0
+
+
+ if myz_data != None:
+ temporary_map_day_plus = {}
+ presence_map['z_graph'] = {}
+ presence_map['longpresence'] = {}
+ presence_map['raw'] = {}
+
+ parsed_time = datetime.datetime.strptime(time_from_z_str, '%Y-%m-%d %H:%M:%S%z')
+
+ #start_time = datetime.datetime(
+ #parsed_time.year,
+ #parsed_time.month,
+ #parsed_time.day,
+ #parsed_time.hour - 7, # Adjust for UTC-7
+ #parsed_time.minute,
+ #parsed_time.second,
+ #tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))
+ #)
+ start_time = parsed_time.astimezone(pytz.UTC)
+
+ for details in devices_list:
+ #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]','')
+ well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details
+
+
+ if radar_threshold_group_st == None:
+ radar_threshold_group_st = '["s3",12]' #last value is threshold to s28 composite
+
+ if len(radar_threshold_group_st) > 8:
+ radar_threshold_group = json.loads(radar_threshold_group_st)
+ else:
+ radar_threshold_group = ["s3",12]
+
+ device_id_2_location[device_id] = location_name
+ device_id_2_threshold[device_id] = radar_threshold_group
+
+
+ presence_map['z_graph'][well_id] = [] #just place holder
+ temporary_map_day_plus[well_id] = [0] * 6 * 1440 * days_difference_long
+ presence_map['longpresence'][well_id] = [0] * 6 * 1440 * days_difference_long #just place holder
+ presence_map['raw'][well_id] = [0] * 6 * 1440 * days_difference_long #just place holder
+
+
+ print(f"start_time: {start_time}")
+ print(f"epoch_time being sent: {epoch_time}")
+ print(f"epoch_time as date: {datetime.datetime.fromtimestamp(epoch_time, tz=pytz.UTC)}")
+
+
+ #start_time_ = myz_data[0][0]
+ st = time.time()
+ device_lookup_cache = {}
+ temporary_map_day_plus = optimized_processing(myz_data, start_time, id2well_id, device_id_2_threshold, device_field_indexes, temporary_map_day_plus, data_type)
+ presence_map = optimized_radar_processing(myz_data, start_time, id2well_id, device_id_2_threshold, device_field_indexes, presence_map, data_type)
+
+ #save_list_to_csv_method1(presence_map['longpresence'][475], "longpresence_initial_data.csv")
+
+
+
+ overlaps_str = GetOverlapps(deployment_id)
+ overlaps_lst = []
+ if overlaps_str != None:
+ if ":" in overlaps_str:
+ overlaps_lst = json.loads(overlaps_str)
+ temporary_map_day_plus = ClearOverlaps(temporary_map_day_plus, overlaps_lst)
+
+ for device_id in ids_list:
+ device_id_str = str(device_id)
+
+ if filter > 1:
+ longpresence_list = filter_short_groups_c_wc(presence_map["longpresence"][id2well_id[device_id]], filter, device_id_str, prev_date, to_date, time_zone_s, refresh)
+ presence_map["longpresence"][id2well_id[device_id]] = longpresence_list
+
+ else: #straight decas
+ #presence_list = presence_map["presence"][id2well_id[device_id]]
+
+ #if data_type != "presence":
+ longpresence_list = presence_map["longpresence"][id2well_id[device_id]]
+
+ z_graph = CreateZGraphAI(presence_map["longpresence"][id2well_id[device_id]]) #temporary_map_day_plus[id2well_id[device_id]])
+ presence_map["z_graph"][id2well_id[device_id]] = z_graph
+
+
+ if data_type == "all" or data_type == "multiple":
+ #lets create "multiple" series
+ seen_at_lst, seen_where_list_uf = DetectMultiple(temporary_map_day_plus, overlaps_lst)
+ #here seen_at is straight decas
+ #seen_at = [1 if x >= 2 else 0 for x in seen_at]
+ pers_in_deka = []
+ dekas_in_day = 6 * 1440
+ for i in range(dekas_in_day, len(seen_where_list_uf)):
+ n_pers = seen_where_list_uf[i]
+ pers_in_deka.append(100*len(n_pers))
+
+ seen_at = filter_out_short_highs_iterative(seen_at_lst, filter) #this converts decas into compressed format!
+ seen_at_lst = Decompress(seen_at)
+ pers_in_deka = filter_out_short_same_groups_iterative(pers_in_deka, filter)
+ persons_decompressed = Decompress(pers_in_deka)
+ persons = Compress(persons_decompressed)
+
+ multiple_list = CreateZGraphAI(seen_at_lst)
+ presence_map["multiple"] = multiple_list
+ presence_map["persons"] = persons
+
+ presence_map["presence"] = CompressList(presence_map["longpresence"])
+
+ if data_type == "z-graph":
+ if "raw" in presence_map:
+ del presence_map["raw"]
+
+ #if "presence" in presence_map:
+ # del presence_map["presence"]
+
+ if "longpresence" in presence_map:
+ del presence_map["longpresence"]
+
+ if data_type == "multiple":
+ if "raw" in presence_map:
+ del presence_map["raw"]
+ #if "presence" in presence_map:
+ # del presence_map["presence"]
+
+ if "z_graph" in presence_map:
+ del presence_map["z_graph"]
+
+ #if "presence" in presence_map:
+ if "longpresence" in presence_map:
+ del presence_map["longpresence"]
+
+ data_payload = presence_map
+ resp.media = package_response(data_payload)
+ resp.status = falcon.HTTP_200
+ return
+
+ elif function == "get_zgraph_data":
+
+ deployment_id = form_data.get('deployment_id')
+
+ if privileges != "-1":
+ privileges_lst = privileges.split(",")
+ if deployment_id not in privileges_lst:
+ data_payload = {}
+ resp.media = package_response(data_payload)
+ resp.status = falcon.HTTP_200
+ return
+
device_id = int(form_data.get('device_id'))
- device_id_or_mac = device_id
- ok = ""
- if privileges == "-1":
- ok = DeviceReboot(device_id_or_mac, user_name)
- else:
devices = GetVisibleDevices(privileges)
- #for this to work, device_id needs to be specified, not MAC!
- for device in devices:
- if device[0] == device_id_or_mac:
- ok = DeviceReboot(device_id_or_mac, user_name)
- break
- print(f"OK = {ok}")
- if ok != "":
-
- payload = ok
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
- else:
- debug_string = "This device_id is not editable"
- payload = {'ok': ok, 'error': debug_string}
- resp.media = package_response(payload)
- resp.status = falcon.HTTPError
- return
- elif function == "device_delete":
-
- #check if admin!
-
- ok = DeleteRecordFromDB(form_data)
-
- payload = {'ok': ok}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
- elif function == "alarm_on_off":
- deployment_id = form_data.get('deployment_id')
- alarm_on = int(form_data.get('alarm_on'))
-
-
- if privileges != "-1":
- privileges_lst = privileges.split(",")
- if deployment_id not in privileges_lst:
+ if not any(item[0] == device_id for item in devices):
data_payload = {}
resp.media = package_response(data_payload)
resp.status = falcon.HTTP_200
return
- # Lets prepare data to do same as store_alarms function
- #read alarm_deployment_settings and all alarm_device_settings from db, and armm all bits that are enabled
- deployment_alarms_json, device_alarms_json_map = GetAlarmAllDetails(deployment_id)
- deployment_alarms = json.loads(deployment_alarms_json)
- enabled = deployment_alarms["enabled"]
- if alarm_on == 0:
- if GetBit(enabled, 2):
- enabled = set_character(enabled, 2, "0")
- deployment_alarms["enabled"] = enabled
- deployment_alarms_json = json.dumps(deployment_alarms)
- redis_conn.set('alarm_deployment_settings_'+deployment_id, deployment_alarms_json)
- ok = StoreAlarms2DBSimple(deployment_id, 0, deployment_alarms_json, "")
- else:
- if not GetBit(enabled, 2):
- enabled = set_character(enabled, 2, "1")
- deployment_alarms["enabled"] = enabled
- deployment_alarms_json = json.dumps(deployment_alarms)
- redis_conn.set('alarm_deployment_settings_'+deployment_id, deployment_alarms_json)
- ok = StoreAlarms2DBSimple(deployment_id, 0, deployment_alarms_json, "")
+ filter = int(form_data.get('filter'))
+ ddate = form_data.get('date')
+ ddate = ddate.replace("_","-")
+ to_date = form_data.get('to_date')
+
+ if to_date == None:
+ to_date = ddate
+ else:
+ to_date = to_date.replace("_","-")
+
+ ddate, to_date = ensure_date_order(ddate, to_date)
+ data_type = "z-graph"
+
+ time_zone_s = GetTimeZoneOfDeployment(deployment_id)
+ timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
+ devices_list, device_ids = GetProximityList(deployment_id, timee)
+
+ time_from_str, _ = GetLocalTimeForDate(ddate, time_zone_s)
+ _, time_to_str = GetLocalTimeForDate(to_date, time_zone_s)
+
+ time_from_z = datetime.datetime.strptime(time_from_z_str, '%Y-%m-%d %H:%M:%S%z')
+ epoch_time = calendar.timegm(time_from_z.utctimetuple())
+
+ #time_from = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
+ #epoch_time = calendar.timegm(time_from.utctimetuple())
+ time_to = datetime.datetime.strptime(time_to_str, '%Y-%m-%d %H:%M:%S%z')
+
+ presence_map = {}
+ presence_map["time_start"] = epoch_time
+ presence_map["time_zone"] = time_zone_s
+
+ # Calculate the difference in days
+ days_difference = (time_to - time_from).days
+ days_difference_long = days_difference + 1
+ # Convert string to datetime object
+ date_obj = datetime.datetime.strptime(time_from_str, "%Y-%m-%d %H:%M:%S%z")
+ # Subtract one day
+ previous_day = date_obj - timedelta(days=1)
+
+ # Format back to string in the same format
+ time_from_z_str = previous_day.strftime("%Y-%m-%d %H:%M:%S%z")
- if False: #no need to do it since every new_alarms call reads alarm_deployment_settings_ always
- record = {
- 'user_name': user_name,
- 'deployment_id': deployment_id,
- 'device_id': device_id
+ device_id_2_threshold = {}
+ device_id_2_location = {0: "Outside"}
+
+ for details in devices_list:
+
+ well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
+
+ if radar_threshold_group_st == None:
+ radar_threshold_group_st = '["s3_max",12]' #last value is threshold to s28 composite
+
+ if len(radar_threshold_group_st) > 8:
+ radar_threshold_group = json.loads(radar_threshold_group_st)
+ else:
+ radar_threshold_group = ["s3_max",12]
+
+ print(well_id, radar_threshold_group)
+
+ device_id_2_location[device_id] = location_name
+ device_id_2_threshold[device_id] = radar_threshold_group
+
+ ids_list = []
+ well_ids = []
+ id2well_id = {}
+ radar_fields_of_interest = []
+ device_field_indexes = {}
+ for details in devices_list:
+ threshold_str = details[5]
+ try:
+ threshold_lst = json.loads(threshold_str)
+ except:
+ threshold_lst = ["s3",12]
+ #threshold_lst = ["s3_max",12]
+
+ radar_field = threshold_lst[0]
+ #since we are getting 10 sec dat, no more need for min or max...
+ radar_field = radar_field.split("_")[0]
+ if radar_field not in radar_fields_of_interest:
+ device_field_indexes[radar_field] = len(radar_fields_of_interest)
+ radar_fields_of_interest.append(radar_field)
+
+ ids_list.append(details[1])
+ id2well_id[details[1]] = details[0]
+ well_ids.append(details[0])
+ presence_map["well_ids"] = well_ids
+
+
+ devices_list_str = ','.join(str(device[1]) for device in devices_list)
+ #sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
+ sql = get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
+ print(sql)
+ if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
+ #zsql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_z_str, time_to_str, ids_list, radar_fields_of_interest)
+ zsql = get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_z_str, time_to_str, ids_list, radar_fields_of_interest)
+ print(zsql)
+
+ with get_db_connection() as conn:
+ with conn.cursor() as cur:
+ #cur.execute(sql)
+ #my_data = None
+ myz_data = None
+
+ #my_data = cur.fetchall()
+ #if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
+ cur.execute(zsql)
+ myz_data = cur.fetchall()
+
+ #if my_data != None:
+
+ #device_id_2_threshold = {}
+ #device_id_2_location = {0: "Outside"}
+ #row_nr_2_device_id = {}
+ #cnt = 0
+ #row_nr_2_device_id[0] = 0
+
+ ##presence_map['longpresence'] and temporary_map_day_plus are similar, except one is used for Z-graph, and another for multiple persons detection
+
+ #if data_type == "presence" or data_type == "all" or data_type == "z-graph" or data_type == "multiple":
+ #presence_map['presence'] = {}
+ #presence_map['longpresence'] = {}
+
+ #if data_type == "raw" or data_type == "all":
+ #presence_map['raw'] = {}
+
+ #for details in devices_list:
+ ##(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]','')
+ #well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details
+
+ #if data_type == "raw" or data_type == "all":
+ #zeros_list = [0] * 6 * 1440 * days_difference
+ #presence_map['raw'][well_id] = zeros_list
+
+ #if data_type == "presence" or data_type == "all" or data_type == "z-graph" or data_type == "multiple":
+ #zeros_list = [0] * 6 * 1440 * days_difference
+ #presence_map['presence'][well_id] = zeros_list
+
+
+ ##presence_map[][well_id] = zeros_list
+ #cnt += 1
+ #row_nr_2_device_id[cnt] = well_id
+
+ #if radar_threshold_group_st == None:
+ #radar_threshold_group_st = '["s3",12]' #last value is threshold to s28 composite
+
+ #if len(radar_threshold_group_st) > 8:
+ #radar_threshold_group = json.loads(radar_threshold_group_st)
+ #else:
+ #radar_threshold_group = ["s3",12]
+
+ #device_id_2_location[well_id] = location_name
+ #device_id_2_threshold[well_id] = radar_threshold_group
+
+ #start_time_ = my_data[0][0]
+ #parsed_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
+
+ #start_time = datetime.datetime(
+ #parsed_time.year,
+ #parsed_time.month,
+ #parsed_time.day,
+ #parsed_time.hour - 7, # Adjust for UTC-7
+ #parsed_time.minute,
+ #parsed_time.second,
+ #tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))
+ #)
+
+ #presence_map = optimized_radar_processing(my_data, start_time_, id2well_id, device_id_2_threshold, device_field_indexes, presence_map, data_type)
+
+ ##last_device_id = 0
+ ##for radar_read in my_data: #(datetime.datetime(2025, 4, 28, 0, 0, tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))), 559, 6.512857142857143, 6.91, 9.28)
+ ##local_time = radar_read[0]
+ ##deca = int((local_time - start_time).total_seconds() / 10)
+ ##device_id = radar_read[1]
+ ##if device_id != last_device_id:
+ ##last_device_id = device_id
+ ##if data_type == "raw" or data_type == "all":
+ ##days_decas = len(presence_map['raw'][id2well_id[device_id]])
+ ##else:
+ ##days_decas = len(presence_map['presence'][id2well_id[device_id]])
+ ##well_id = id2well_id[device_id]
+ ##radar_threshold_group_st = device_id_2_threshold[device_id]
+ ##threshold_sig, threshold = radar_threshold_group_st
+ ##threshold_sig = threshold_sig.split("_")[0]
+
+ ##radar_val = radar_read[2+device_field_indexes[threshold_sig]]
+ ##if data_type == "presence" or data_type == "z-graph" or data_type == "all" or data_type == "multiple":
+ ##if radar_val > threshold:
+ ##if deca < days_decas:
+ ##presence_map['presence'][id2well_id[device_id]][deca] = 1
+
+ ##if data_type == "raw" or data_type == "all":
+ ##if deca < days_decas:
+ ##presence_map['raw'][id2well_id[device_id]][deca] = radar_val
+
+
+ if myz_data != None:
+ temporary_map_day_plus = {}
+ presence_map['z_graph'] = {}
+ for details in devices_list:
+ #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]','')
+ well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details
+
+ #if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
+ #zeros_list = [0] * 6 * 1440 * (days_difference_long) #+1 is for previous day
+
+ presence_map['z_graph'][well_id] = [] #just place holder
+ temporary_map_day_plus[well_id] = [0] * 6 * 1440 * (days_difference_long)
+ presence_map['longpresence'][well_id] = [0] * 6 * 1440 * (days_difference_long) #just place holder
+
+
+ parsed_time = datetime.datetime.strptime(time_from_z_str, '%Y-%m-%d %H:%M:%S%z')
+
+ start_time = datetime.datetime(
+ parsed_time.year,
+ parsed_time.month,
+ parsed_time.day,
+ parsed_time.hour - 7, # Adjust for UTC-7
+ parsed_time.minute,
+ parsed_time.second,
+ tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))
+ )
+
+
+ #start_time_ = myz_data[0][0]
+ st = time.time()
+ device_lookup_cache = {}
+ temporary_map_day_plus = optimized_processing(myz_data, start_time, id2well_id, device_id_2_threshold, device_field_indexes, temporary_map_day_plus, data_type)
+
+ if data_type == "all" or data_type == "z-graph" or data_type == "presence" or data_type == "multiple":
+ overlaps_str = GetOverlapps(deployment_id)
+ overlaps_lst = []
+ if overlaps_str != None:
+ if ":" in overlaps_str:
+ overlaps_lst = json.loads(overlaps_str)
+ temporary_map_day_plus = ClearOverlaps(temporary_map_day_plus, overlaps_lst)
+
+ #if data_type == "all" or data_type == "z-graph" or data_type == "presence" or data_type == "multiple":
+ for device_id in ids_list:
+ device_id_str = str(device_id)
+ #if data_type == "presence" or data_type == "all" or data_type == "z-graph":
+ if filter > 1:
+ #presence_list = filter_short_groups_numpy(presence_map["presence"][id2well_id[device_id]], filter, device_id, ddate+"-"+to_date)
+ #presence_list = filter_short_groups_c_wc(presence_map["presence"][id2well_id[device_id]], filter, device_id_str, ddate, to_date, time_zone_s)
+ #presence_listt = filter_short_groupss(presence_map["presence"][id2well_id[device_id]], filter)
+ #if presence_list != presence_listt:
+ # print("stop")
+ #if data_type != "presence":
+ #longpresence_list = filter_short_groups_numpy(presence_map["longpresence"][id2well_id[device_id]], filter, device_id, ddate+"-"+to_date)
+ longpresence_list = filter_short_groups_c_wc(presence_map["longpresence"][id2well_id[device_id]], filter, device_id_str, prev_date, to_date, time_zone_s)
+ #longpresence_listt = filter_short_groupss(presence_map["longpresence"][id2well_id[device_id]], filter)
+ #if longpresence_list != longpresence_listt:
+ # print("stop")
+ # store_to_file(presence_map["longpresence"][id2well_id[device_id]], "test_list")
+ #presence_map["presence"][id2well_id[device_id]] = presence_list
+ #if data_type != "presence":
+ presence_map["longpresence"][id2well_id[device_id]] = longpresence_list
+
+ else: #straight decas
+ #presence_list = presence_map["presence"][id2well_id[device_id]]
+
+ #if data_type != "presence":
+ longpresence_list = presence_map["longpresence"][id2well_id[device_id]]
+
+
+
+ #if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
+ if filter > 1: #straight decas
+ presence_list1 = filter_short_high_groups_iterative_analog(temporary_map_day_plus[id2well_id[device_id]], filter)
+ else:
+ presence_list1 = temporary_map_day_plus[id2well_id[device_id]]
+
+ temporary_map_day_plus[id2well_id[device_id]] = presence_list1
+
+
+ #if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
+ for device_id in ids_list:
+ #print(device_id_2_threshold[id2well_id[device_id]])
+ z_graph = CreateZGraphAI(presence_map["longpresence"][id2well_id[device_id]]) #temporary_map_day_plus[id2well_id[device_id]])
+ presence_map["z_graph"][id2well_id[device_id]] = z_graph
+
+
+ if data_type == "all" or data_type == "multiple":
+ #lets create "multiple" series
+ seen_at_lst, seen_where_list_uf = DetectMultiple(temporary_map_day_plus, overlaps_lst)
+ #here seen_at is straight decas
+ #seen_at = [1 if x >= 2 else 0 for x in seen_at]
+ pers_in_deka = []
+ dekas_in_day = 6 * 1440
+ for i in range(dekas_in_day, len(seen_where_list_uf)):
+ n_pers = seen_where_list_uf[i]
+ pers_in_deka.append(100*len(n_pers))
+
+ seen_at = filter_out_short_highs_iterative(seen_at_lst, filter) #this converts decas into compressed format!
+ seen_at_lst = Decompress(seen_at)
+ pers_in_deka = filter_out_short_same_groups_iterative(pers_in_deka, filter)
+ persons_decompressed = Decompress(pers_in_deka)
+ persons = Compress(persons_decompressed)
+
+ multiple_list = CreateZGraphAI(seen_at_lst)
+ presence_map["multiple"] = multiple_list
+ presence_map["persons"] = persons
+
+ presence_map["presence"] = presence_map["longpresence"]
+ if data_type == "z-graph":
+ if "raw" in presence_map:
+ del presence_map["raw"]
+ if "presence" in presence_map:
+ del presence_map["presence"]
+ if "longpresence" in presence_map:
+ del presence_map["longpresence"]
+
+ if data_type == "multiple":
+ if "raw" in presence_map:
+ del presence_map["raw"]
+ if "presence" in presence_map:
+ del presence_map["presence"]
+ if "longpresence" in presence_map:
+ del presence_map["longpresence"]
+ if "z_graph" in presence_map:
+ del presence_map["z_graph"]
+
+ if "presence" in presence_map:
+ presence_map["presence"] = CompressList(presence_map["presence"])
+
+ data_payload = presence_map
+ resp.media = package_response(data_payload)
+ resp.status = falcon.HTTP_200
+ return
+
+ elif function == "get_candle_data":
+ container = GetReference("/MAC")
+ MAC = req_dict["MAC"][0]
+ sensor = req_dict["sensor"][0]
+ from_time = req_dict["from_time"][0]
+ to_time = req_dict["to_time"][0]
+ part = req_dict["part"][0]
+ tzone = req_dict["tzone"][0]
+ AddToLog(str(req_dict))
+ candle_data = GetCandleSensorData(container, MAC, sensor, from_time, to_time, part, tzone)
+ data_payload = {'candle_data': candle_data}
+ resp.media = package_response(data_payload)
+ resp.status = falcon.HTTP_200
+ return
+
+ elif function == "deployment_from_app":
+ editing_deployment_id = form_data.get('editing_deployment_id')
+
+ editing_deployment_id, is_new_deployment = StoreDeployment2DB(form_data, editing_deployment_id)
+ if editing_deployment_id != 0:
+ payload = {'ok': 1, 'deployment_id': editing_deployment_id}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+ else:
+ payload = {'ok': 0, 'error': debug_string, 'deployment_id': editing_deployment_id}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+ elif function == "deployment_form":
+ editing_deployment_id = form_data.get('editing_deployment_id')
+
+ editing_deployment_id, is_new_deployment = StoreDeployment2DB(form_data, editing_deployment_id)
+ if editing_deployment_id != 0:
+ payload = {'ok': 1, 'deployment_id': editing_deployment_id}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+ else:
+ payload = {'ok': 0, 'deployment_id': editing_deployment_id}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+ elif function == "deployment_delete":
+ ok = DeleteRecordFromDB(form_data)
+
+ payload = {'ok': ok}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+
+ elif function == "deployments_list":
+ result_list = []
+ first_s = form_data.get('first')
+ last_s = form_data.get('last')
+ user_id = form_data.get('user_id')
+ first = 0
+ last = 1000000
+
+ try:
+ if first_s != None:
+ first = int(first_s)
+ except ValueError:
+ pass
+
+ try:
+ if last_s != None:
+ last = int(last_s)
+ except ValueError:
+ pass
+
+ #user_id = form_data.get('user_id')
+ if user_id == "" or user_id == None:
+ privileges, user_id = GetPriviledgesAndUserId(user_name)
+ else:
+ privileges = GetPriviledgesOnly(user_name)
+
+ all_deployments = ListDeployments(privileges, user_id)
+
+ cnt = 0
+
+ for deployment in all_deployments:
+ cnt += 1
+ if cnt >= first:
+ if deployment['beneficiary_id'] in user_id_2_user.keys():
+ caretaker_min_object = {"deployment_id": deployment['deployment_id'], "email": user_id_2_user[deployment['beneficiary_id']][3], "first_name": user_id_2_user[deployment['beneficiary_id']][5], "last_name": user_id_2_user[deployment['beneficiary_id']][6]}
+ else:
+ caretaker_min_object = {"deployment_id": deployment['deployment_id'], "email": deployment['beneficiary_id'][3], "first_name": deployment['beneficiary_id'][5], "last_name": deployment['beneficiary_id'][6]}
+ result_list.append(caretaker_min_object)
+ if cnt > last:
+ break
+
+ payload = {'result_list': result_list}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+ elif function == "device_list":
+ result_list = []
+ first_s = form_data.get('first')
+ last_s = form_data.get('last')
+
+ try:
+ first = int(first_s)
+ except ValueError:
+ first = 0
+
+ try:
+ last = int(last_s)
+ except ValueError:
+ last = 1000000
+
+ #user_id = form_data.get('user_id')
+
+ devices = GetVisibleDevices(privileges)
+
+ payload = {'result_list': devices}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+
+ elif function == "get_devices_locations":
+ well_ids = form_data.get('well_ids')
+ details_list = WellId2Details(well_ids)
+ #print(details_list)
+
+ to_report = []
+
+ for details in details_list:
+ if details[3] == -1:
+ report_record = str(details[0]) + " ?"
+ else:
+ report_record = str(details[0]) + " " + location_names[details[3]]
+ if details[4] != "" and details[4] != "initial":
+ report_record = report_record + " " +details[4]
+ if details[3] == -1:
+ to_report.append((details[0], "?", details[4], report_record))
+ else:
+ to_report.append((details[0], location_names[details[3]], details[4], report_record))
+
+ #print(to_report)
+ payload = {'deployments': to_report}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+ elif function == "find_deployments":
+ #For all devices, find deployments that they are part of
+ #For all those deployments, return:
+ #deployment_id First_name Last_name (of beneficiary)
+ #list of (well_id, location_descriptions) all devices in each deployment
+
+
+ well_ids = form_data.get('well_ids')
+ #well_ids_lst = well_ids.split(",")
+ details_list = WellId2Details(well_ids)
+ to_report = []
+
+ privileges_lst = []
+ if "," in privileges:
+ privileges_lst = privileges.split(",")
+
+ if len(details_list) > 0:
+ macs_list_clean = []
+ devices_details = {}
+
+ for entry in details_list:
+ macs_list_clean.append(entry[2])
+ macs_formatted = "', '".join(macs_list_clean)
+
+ with get_db_connection() as conn:
+ with conn.cursor() as cur:
+ sql = f"SELECT deployment_id, beneficiary_id, devices FROM public.deployment_details WHERE devices::text ~* ANY(ARRAY['{macs_formatted}']);"
+ print(sql)
+ cur.execute(sql)
+ result = cur.fetchall()
+
+ if result != None and result != []:
+ users_list_clean = []
+ for entry in result: #for every deployment
+ macs_list_clean = []
+ deployment_id = str(entry[0])
+ if privileges == '-1':
+ deployment = DeploymentDetails(deployment_id)
+ address_str = ExtractAddress(deployment)
+ deployment = PurgeDeployment(deployment)
+ deployment["address"] = address_str
+
+ users_list_clean.append(str(entry[1])) #beneficiary_id
+ users_formatted = ", ".join(users_list_clean)
+
+ mac_devices_in_deployment = ast.literal_eval(entry[2])
+ for mac in mac_devices_in_deployment:
+ macs_list_clean.append(mac)
+
+ device_ids, device_list = MACsToWellIds(cur, macs_list_clean)
+
+ devices_details[deployment_id] = deployment
+
+ elif deployment_id in privileges_lst:
+ deployment = DeploymentDetails(deployment_id)
+ address_str = ExtractAddress(deployment)
+ deployment = PurgeDeployment(deployment)
+ deployment["address"] = address_str
+ users_list_clean.append(str(entry[1]))
+ users_formatted = ", ".join(users_list_clean)
+
+ mac_devices_in_deployment = ast.literal_eval(entry[2])
+ for mac in mac_devices_in_deployment:
+ macs_list_clean.append(mac)
+
+ device_ids, device_list = MACsToWellIds(cur, macs_list_clean)
+ devices_details[deployment_id] = deployment
+
+ device_list_to_report = []
+
+ for device in device_list:
+ #we need well_id, location and decription only
+ device_list_to_report.append((device[0], device[2], device[3]))
+ sql = f"SELECT first_name, last_name, email, picture FROM public.person_details WHERE user_id IN ({users_formatted});"
+ print(sql)
+ cur.execute(sql)
+ result1 = cur.fetchall()
+ counter = 0
+
+ for entry in result:
+ deployment_id = str(entry[0])
+ deployment = devices_details[deployment_id]
+ first_name, last_name, email, photo = result1[counter]
+ deployment["beneficiary_first_name"] = first_name
+ deployment["beneficiary_last_name"] = last_name
+ deployment["beneficiary_email"] = email
+ deployment["photo"] = photo
+ devices_details[deployment_id] = deployment
+ to_report.append((entry, device_list_to_report, devices_details[deployment_id]))
+
+
+ print(to_report)
+ payload = {'deployments': to_report}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+
+ elif function == "device_list_by_deployment":
+ result_list = []
+ first_s = form_data.get('first')
+ last_s = form_data.get('last')
+ deployment_id = form_data.get('deployment_id')
+ try:
+ first = int(first_s)
+ except ValueError:
+ first = 0
+
+ try:
+ last = int(last_s)
+ except ValueError:
+ last = 1000000
+
+
+ if privileges == "-1":
+ devices = GetVisibleDevices(deployment_id)
+ else:
+ privileges = privileges.split(",")
+ if deployment_id in privileges:
+ devices = GetVisibleDevices(deployment_id)
+
+ payload = {'result_list': devices}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+
+ elif function == "device_list_4_gui":
+ result_list = []
+ deploymentData = []
+ deviceData = []
+ macs_list = []
+ user_id = GetUserId(user_name)
+ all_deployments = ListDeployments(privileges, user_id)
+ #{'deployment_id': 21, 'beneficiary_id': 25, 'caretaker_id': 1, 'owner_id': 1, 'installer_id': 1, 'address_street': '661 Encore Way', 'address_city': 'San Jose', 'address_zip': '95134', 'address_state': 'CA', 'address_country': 'USA', 'devices': '["64B70888FAB0","64B70888F860","64B70888F6F0","64B708896BDC","64B708897428","64B70888FA84","64B70889062C"]', 'wifis': '', 'persons': 1, 'gender': 1, 'race': 1, 'born': 1940, 'pets': 0, 'time_zone': 'America/Los_Angeles'}
+ MAC2Deployment = {}
+
+ for deployment in all_deployments:
+ beneficiary_id = deployment['beneficiary_id']
+ user = GetNameFromUserId(beneficiary_id)
+ name = f"{user[1]} {user[2]}"
+ deploymentData.append({'deployment_id': str(deployment['deployment_id']), 'name': name})
+ devices = deployment['devices']
+ if devices != None and devices != None != "":
+
+ devices_list = ToList(devices)
+ for device in devices_list:
+ macs_list.append(device)
+ MAC2Deployment[device] = deployment['deployment_id']
+ #deviceData.append({'well_id': device[0], 'mac': device[1]})
+
+ deployment_id_list = []
+ deviceData = []
+
+ #row_data = [device_id, well_id, mac, last_message_epoch, location_names[location_id], description, deployment_ids[cnt][0]]
+
+ with get_db_connection() as conn:
+ with conn.cursor() as cur:
+ device_ids, device_list = MACsToWellIds(cur, macs_list)
+
+
+ for device in device_list:
+ if MAC2Deployment[device[4]] != "":
+ deviceData.append({'well_id': device[0], 'mac': device[4], 'room_name': device[2], 'deployment_id': MAC2Deployment[device[4]]})
+
+ #deploymentData = [{'deployment_id': '21', 'name': 'Robert Zmrzli House'}, {'deployment_id': '36', 'name': 'Fred Zmrzli Apartment'}]
+ #deviceData = [{ 'well_id': '300', 'mac': '64B70888F6F0', 'room_name': 'Living Room', 'deployment_id': '21' }, { 'well_id': '301', 'mac': '64B70888F6F1', 'room_name': 'Bathroom Main', 'deployment_id': '36' }]
+
+ payload = {
+ 'status': "success", 'deploymentData': deploymentData, 'deviceData': deviceData
}
- record_json = json.dumps(record)
- redis_conn.lpush('new_alarms', record_json)
- if alarm_on != 0:
- for device_id in device_alarms_json_map:
- device_alarms_json = device_alarms_json_map[device_id]
- device_alarms = json.loads(device_alarms_json)
- enabled_alarms = device_alarms["enabled_alarms"]
- armed_states = device_alarms["armed_states"]
+ logger.debug(f"device_list_4_gui------ {payload} ------------------------------------------")
- if GetBit(enabled_alarms, 8):
- armed_states = set_character(armed_states, 8, "1")
- if GetBit(enabled_alarms, 9):
- armed_states = set_character(armed_states, 9, "1")
- if GetBit(enabled_alarms, 10):
- armed_states = set_character(armed_states, 10, "1")
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
- device_alarms["armed_states"] = armed_states
- device_alarms_json = json.dumps(device_alarms)
- redis_conn.set(f'alarm_device_settings_{device_id}', device_alarms_json)
- ok = StoreAlarms2DBSimple(0, device_id, "", device_alarms_json)
+ elif function == "caretaker_form":
+ editing_user_id = form_data.get('editing_user_id')
+ email = form_data.get('email')
+ user_id = form_data.get('user_id')
+ if "@" not in email:
+ resp.media = package_response("Missing or illegal 'email' parameter", HTTP_400)
+ return
+
+ print(privileges)
+ if privileges == "-1":
+ ok = StoreCaretaker2DB(form_data, editing_user_id, user_id)
+ if ok == 1:
+ payload = {'ok': ok}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+ else:
+ payload = {'ok': ok, 'error': debug_string}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+
+ elif "-1" in privileges:
+ payload = {'ok': 0, 'error': "Not allowed!"}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+
+
+ elif function == "caretaker_delete":
+ if privileges == "-1":
+ ok = DeleteRecordFromDB(form_data)
+ else:
+ ok = 0
+ AddToLog(ok)
+ payload = {'ok': ok}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+
+ elif function == "caretakers_list":
+ result_list = []
+ first_s = form_data.get('first')
+ last_s = form_data.get('last')
+
+ try:
+ first = int(first_s)
+ except ValueError:
+ first = 0
+
+ try:
+ last = int(last_s)
+ except ValueError:
+ last = 1000000
+
+ if privileges == "-1":
+ all_caretakers = ListCaretakers(privileges, user_name)
+
+ cnt = 0
+
+ for caretaker in all_caretakers:
+ cnt += 1
+ if cnt >= first:
+ caretaker_min_object = {"user_id": caretaker[0], "email": caretaker[3], "first_name": caretaker[5], "last_name": caretaker[6]}
+ result_list.append(caretaker_min_object)
+ if cnt > last:
+ break
+ elif "-1" in privileges:
+ all_caretakers = ListCaretakers(privileges, user_name)
+
+ cnt = 0
+
+ for caretaker in all_caretakers:
+ cnt += 1
+ if cnt >= first:
+ caretaker_min_object = {"user_id": caretaker[0], "email": caretaker[3], "first_name": caretaker[5], "last_name": caretaker[6]}
+ result_list.append(caretaker_min_object)
+ if cnt > last:
+ break
+ payload = {'result_list': result_list}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+
+ elif function == "new_user_form":
+ #this is only used from disclaimer form!
+ devices = form_data.get('devices')
+ user_name = form_data.get('user_name')
+ password = form_data.get('password')
+ email = form_data.get('email')
+
+ #if user_name == "" or password == "":
+ #lets skip comparission here
+ if True:
+ success = True
+ result = {}
+ result["deployed"] = []
+ result["not_found"] = []
+ else:
+ #lets check if devices listed are not part of existing deployment
+ success, result = DevicesNotUsed(devices, user_name)
+
+ if success:
+ if result["deployed"]:
+ error_string = f"These devices are already deployed: {result['deployed']}"
+ print(error_string)
+ payload = {'ok': 0, 'error': error_string}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+
+ if result["not_found"]:
+ error_string = f"These devices are not available: {result['not_found']}"
+ print(error_string)
+ payload = {'ok': 0, 'error': error_string}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+
+ print("All devices are available for deployment")
+ ok, error_string = StoreDisclaimer2DB(form_data)
+
+ if ok == False:
+ payload = {'ok': ok, 'error': error_string}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+
+ in_db_user_name, user_id, password_in_db, role_ids, priviledges = AccountByEmailExists(email)
+ if user_id != None:
+ user_name = GenerateUserNameWithContext(in_db_user_name, form_data['firstName'], form_data['lastName'], user_id)
+ else:
+ user_name = GenerateUserNameWithContext(user_name, form_data['firstName'], form_data['lastName'], 0)
+
+
+ if (password == None or password == ""):
+ if password_in_db != None:
+ password = password_in_db
+ else:
+ password = CreatePassword(12)
+
+
+
+ #user_id = "0" #user that is adding this record. New user so "0"
+ #For now we will assume that person is unique by fist and last name
+ #user_id = PersonInDB(form_data['firstName']+" "+form_data['lastName'])
+
+ #lets create new account for this caretaker
+
+
+ #lets suplement form_data with parts needed for existing StoreCaretaker2DB function
+ editing_user_id = user_id #specify if editing existing user, otherwise "0"
+
+ form_data['role_ids'] = "2"
+ form_data['access_to_deployments'] = ""
+ #form_data['email'] = "" #this one matches
+
+ form_data['new_user_name'] = user_name
+
+ form_data['first_name'] = form_data['firstName']
+ form_data['last_name'] = form_data['lastName']
+ form_data['address_street'] = ""
+ form_data['address_city'] = ""
+ form_data['address_zip'] = ""
+ form_data['address_state'] = ""
+ form_data['address_country'] = ""
+ form_data['phone_number'] = form_data['phone']
+ form_data['picture'] = "/"
+ form_data['key'] = password
+
+ #logger.debug(f"editing_user_id= {editing_user_id} user_id= {user_id}")
+ ok = StoreCaretaker2DB(form_data, editing_user_id, user_id)
+
+ if ok == 1:
+ #~/mqtt-auth-service/acl_manager.py is called there because what deserved email (change in user_name, password or devices) deserves acl update
+ SendWelcomeCaretakerEmail(form_data['email'], form_data['first_name'], form_data['last_name'], devices, form_data['phone_number'], form_data['new_user_name'], form_data['key'], form_data['signature'])
+ payload = {'ok': ok}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+
+ else:
+ privileges, user_id = ValidUser(user_name, password)
+ if user_id == "0": #bad password
+ error_string = f"Password does not match user {user_name}"
+ payload = {'ok': 0, 'error': error_string}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+
+ payload = {'ok': ok}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+
+
+ else:
+ error_string = f"Error: {result}"
+ payload = {'ok': 0, 'error': error_string}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+
+ elif function == "beneficiary_form":
+ editing_user_id = form_data.get('editing_user_id')
+ email = form_data.get('email')
+ user_id = GetUserId(user_name)
+ if "@" in email:
+ beneficiary_id, if_new = StoreBeneficiary2DB(form_data, editing_user_id, user_id)
+ if beneficiary_id != 0:
+ payload = {'ok': 1, 'beneficiary_id': beneficiary_id}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+ else:
+ payload = {'ok': 0, 'beneficiary_id': beneficiary_id}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+ else:
+ payload = {'ok': 0, 'beneficiary_id': 0}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+
+ elif function == "beneficiary_delete":
+
+
+ ok = DeleteRecordFromDB(form_data)
+
+ payload = {'ok': ok}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+
+ elif function == "beneficiaries_list":
+ result_list = []
+ first_s = form_data.get('first')
+ last_s = form_data.get('last')
+
+ try:
+ first = int(first_s)
+ except ValueError:
+ first = 0
+
+ try:
+ last = int(last_s)
+ except ValueError:
+ last = 1000000
+
+ user_id = form_data.get('user_id')
+ all_beneficiaries = ListBeneficiaries(privileges, user_id)
+
+ cnt = 0
+
+ for beneficiary in all_beneficiaries:
+ cnt += 1
+ if cnt >= first:
+ beneficiary_min_object = {"user_id": beneficiary[0], "email": beneficiary[3], "first_name": beneficiary[5], "last_name": beneficiary[6]}
+ result_list.append(beneficiary_min_object)
+ if cnt > last:
+ break
+
+ payload = {'result_list': result_list}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ return
+
+ elif function == "activities_report_details":
+ deployment_id = form_data.get('deployment_id')
+
+ timezone_str = GetTimeZoneOfDeployment(deployment_id)
+ filterr = form_data.get('filter')
+ if filterr == None:
+ filterr = 6
+ else:
+ filterr = int(filterr)
+
+ refresh = form_data.get('refresh') == "1"
+ ddate = current_date_at_tz(timezone_str)
+ timee = LocalDateToUTCEpoch(ddate, timezone_str)+5 #add so date boundary is avoided
+ devices_list, device_ids = GetProximityList(deployment_id, timee)
+
+ #Here we need to add per day: (all based on Z-graph data!)
+ #Bathroom visits number
+ #Bathroom time spent
+ #Sleep weakes number (As breaks in Z-graph indicates in 10PM to 9AM period)
+ #Sleep length (For now add all times seen in bedroom)
+ #Kitchen visits number
+ #Kitchen time spent
+ #Most frequented room visits number
+ #Most frequented room time spent
+
+ #Lets find device_id of bathroom sensor
+
+
+ bathroom_device_id, location_ba, bathroom_well_id = FindDeviceByRole(deployment_id, ["Bathroom Main", "Bathroom", "Bathroom Guest"])
+ bedroom_device_id, location_be, bedroom_well_id = FindDeviceByRole(deployment_id, ["Bedroom Master", "Bedroom", "Bedroom Guest"])
+ kitchen_device_id, location_ke, kitchen_well_id = FindDeviceByRole(deployment_id, ["Kitchen"])
+ most_present_device_id, location_ot, most_present_well_id = FindDeviceByRole(deployment_id, []) #this will find most_present (as defined in other filed of device record)
+
+ if isinstance(location_ot, int):
+ other_location = location_names[location_ot]
+ else:
+ other_location = location_ot
+
+ #weekly
+ week_dates = get_week_days_and_dates(7, timezone_str)
+ month_dates = get_week_days_and_dates(30, timezone_str)
+ six_months_dates = get_week_days_and_dates(180, timezone_str)
+
+ other_color = Loc2Color[other_location][0]
+ rgb_string = f"rgb({other_color[0]}, {other_color[1]}, {other_color[2]})"
+
+ rooms_reports = [("Bathroom", "blue", bathroom_device_id, bathroom_well_id), ("Bedroom", "green", bedroom_device_id, bedroom_well_id), ("Kitchen", "red", kitchen_device_id, kitchen_well_id), (other_location, rgb_string, most_present_device_id, most_present_well_id)]
+
+ six_months_report = []
+ for room_details in rooms_reports:
+ device_id = room_details[2]
+ if device_id > 0:
+
+ well_id = room_details[3]
+ radar_threshold_group_st = {device[1]: device[5] for device in devices_list}[device_id]
+ room = {"name": room_details[0],"color": room_details[1]}
+ data = []
+
+ for day_activity in six_months_dates:
+ datee = day_activity[0]
+ hours, events_count = GetActivities(device_id, well_id, datee, filterr, refresh, timezone_str, radar_threshold_group_st)
+
+ if hours > 18:
+ print("Too long 6m!!!", device_id, well_id, datee, filterr, refresh, timezone_str, radar_threshold_group_st)
+
+ data_record = { "title": str(day_activity[2]), "events": events_count, "hours": hours}
+ data.append(data_record)
+
+ room["data"] = data
+ six_months_report.append(room)
+
+ weekly_report = []
+ for room_details in rooms_reports:
+ device_id = room_details[2]
+ if device_id > 0:
+ well_id = room_details[3]
+ radar_threshold_group_st = {device[1]: device[5] for device in devices_list}[device_id]
+ room = {"name": room_details[0],"color": room_details[1]}
+ data = []
+
+ for day_activity in week_dates:
+ datee = day_activity[0]
+ hours, events_count = GetActivities(device_id, well_id, datee, filterr, refresh, timezone_str, radar_threshold_group_st)
+ data_record = { "title": day_activity[1], "events": events_count, "hours": hours}
+ data.append(data_record)
+
+ room["data"] = data
+ weekly_report.append(room)
+
+ monthly_report = []
+ for room_details in rooms_reports:
+ device_id = room_details[2]
+ if device_id > 0:
+ well_id = room_details[3]
+ radar_threshold_group_st = {device[1]: device[5] for device in devices_list}[device_id]
+ room = {"name": room_details[0],"color": room_details[1]}
+ data = []
+
+ for day_activity in month_dates:
+ datee = day_activity[0]
+ hours, events_count = GetActivities(device_id, well_id, datee, filterr, refresh, timezone_str, radar_threshold_group_st)
+ #if datee == "2025-05-20" and device_id == 572:
+ # print(hours)
+ if hours > 18:
+ print("Too long m!!!", device_id, well_id, datee, filterr, refresh, timezone_str, radar_threshold_group_st)
+
+ data_record = { "title": str(day_activity[2]), "events": events_count, "hours": hours}
+ data.append(data_record)
+
+ room["data"] = data
+ monthly_report.append(room)
+
+
+
+
+ result_dictionary = {
+ "alert_text": "No alert",
+ "alert_color": "bg-green-100 text-green-700",
+ "chart_data": [
+ {
+ "name": "Weekly",
+ "rooms": [
+ {
+ "name": "Bathroom",
+ "color": "blue",
+ "data": [
+ { "title": "Monday", "events": 186, "hours": 80.56 },
+ { "title": "Tuesday", "events": 305, "hours": 200 },
+ { "title": "Wednesday", "events": 237, "hours": 120 },
+ { "title": "Thursday", "events": 73, "hours": 190 },
+ { "title": "Friday", "events": 209, "hours": 130 },
+ { "title": "Saturday", "events": 214, "hours": 140 },
+ { "title": "Sunday", "events": 150, "hours": 100 }
+ ]
+ },
+ {
+ "name": "Bedroom",
+ "color": "green",
+ "data": [
+ { "title": "Monday", "events": 186, "hours": 80 },
+ { "title": "Tuesday", "events": 305, "hours": 200 },
+ { "title": "Wednesday", "events": 237, "hours": 120 },
+ { "title": "Thursday", "events": 73, "hours": 190 },
+ { "title": "Friday", "events": 209, "hours": 130 },
+ { "title": "Saturday", "events": 214, "hours": 140 },
+ { "title": "Sunday", "events": 150, "hours": 100 }
+ ]
+ },
+ {
+ "name": "Kitchen",
+ "color": "red",
+ "data": [
+ { "title": "Monday", "events": 186, "hours": 80 },
+ { "title": "Tuesday", "events": 305, "hours": 200 },
+ { "title": "Wednesday", "events": 237, "hours": 120 },
+ { "title": "Thursday", "events": 73, "hours": 190 },
+ { "title": "Friday", "events": 209, "hours": 130 },
+ { "title": "Saturday", "events": 214, "hours": 140 },
+ { "title": "Sunday", "events": 150, "hours": 100 }
+ ]
+ },
+ {
+ "name": "Other",
+ "color": "yellow",
+ "data": [
+ { "title": "Monday", "events": 186, "hours": 80 },
+ { "title": "Tuesday", "events": 305, "hours": 200 },
+ { "title": "Wednesday", "events": 237, "hours": 120 },
+ { "title": "Thursday", "events": 73, "hours": 190 },
+ { "title": "Friday", "events": 209, "hours": 130 },
+ { "title": "Saturday", "events": 214, "hours": 140 },
+ { "title": "Sunday", "events": 150, "hours": 100 }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "Monthly",
+ "rooms": [
+ {
+ "name": "Bathroom",
+ "color": "purple",
+ "data": [
+ { "title": "01", "events": 67, "hours": 45 },
+ { "title": "02", "events": 97, "hours": 67 },
+ { "title": "03", "events": 87, "hours": 23 },
+ { "title": "04", "events": 42, "hours": 12 },
+ { "title": "05", "events": 64, "hours": 48 },
+ { "title": "06", "events": 53, "hours": 34 },
+ { "title": "07", "events": 75, "hours": 23 },
+ { "title": "08", "events": 45, "hours": 56 },
+ { "title": "09", "events": 85, "hours": 47 },
+ { "title": "10", "events": 34, "hours": 29 },
+ { "title": "11", "events": 49, "hours": 30 },
+ { "title": "12", "events": 62, "hours": 33 },
+ { "title": "13", "events": 75, "hours": 44 },
+ { "title": "14", "events": 88, "hours": 57 },
+ { "title": "15", "events": 94, "hours": 65 },
+ { "title": "16", "events": 45, "hours": 21 },
+ { "title": "17", "events": 76, "hours": 54 },
+ { "title": "18", "events": 85, "hours": 62 },
+ { "title": "19", "events": 43, "hours": 28 },
+ { "title": "20", "events": 59, "hours": 34 },
+ { "title": "21", "events": 78, "hours": 56 },
+ { "title": "22", "events": 64, "hours": 39 },
+ { "title": "23", "events": 93, "hours": 72 },
+ { "title": "24", "events": 52, "hours": 28 },
+ { "title": "25", "events": 71, "hours": 48 },
+ { "title": "26", "events": 85, "hours": 63 }
+ ]
+ },
+ {
+ "name": "Bedroom",
+ "color": "#3b82f6",
+ "data": [
+ { "title": "01", "events": 61, "hours": 42 },
+ { "title": "02", "events": 72, "hours": 36 },
+ { "title": "03", "events": 94, "hours": 49 },
+ { "title": "04", "events": 67, "hours": 59 },
+ { "title": "05", "events": 54, "hours": 20 },
+ { "title": "06", "events": 77, "hours": 64 },
+ { "title": "07", "events": 81, "hours": 70 },
+ { "title": "08", "events": 53, "hours": 25 },
+ { "title": "09", "events": 79, "hours": 42 },
+ { "title": "10", "events": 84, "hours": 65 },
+ { "title": "11", "events": 62, "hours": 54 },
+ { "title": "12", "events": 45, "hours": 23 },
+ { "title": "13", "events": 88, "hours": 71 },
+ { "title": "14", "events": 74, "hours": 44 },
+ { "title": "15", "events": 91, "hours": 59 },
+ { "title": "16", "events": 46, "hours": 31 },
+ { "title": "17", "events": 73, "hours": 40 },
+ { "title": "18", "events": 85, "hours": 63 },
+ { "title": "19", "events": 78, "hours": 66 },
+ { "title": "20", "events": 66, "hours": 42 },
+ { "title": "21", "events": 95, "hours": 78 },
+ { "title": "22", "events": 57, "hours": 39 },
+ { "title": "23", "events": 72, "hours": 48 },
+ { "title": "24", "events": 48, "hours": 21 },
+ { "title": "25", "events": 89, "hours": 61 },
+ { "title": "26", "events": 77, "hours": 44 }
+ ]
+ },
+ {
+ "name": "Kitchen",
+ "color": "orange",
+ "data": [
+ { "title": "01", "events": 94, "hours": 59 },
+ { "title": "02", "events": 62, "hours": 48 },
+ { "title": "03", "events": 76, "hours": 38 },
+ { "title": "04", "events": 81, "hours": 62 },
+ { "title": "05", "events": 64, "hours": 27 },
+ { "title": "06", "events": 53, "hours": 31 },
+ { "title": "07", "events": 92, "hours": 65 },
+ { "title": "08", "events": 85, "hours": 42 },
+ { "title": "09", "events": 74, "hours": 35 },
+ { "title": "10", "events": 67, "hours": 55 },
+ { "title": "11", "events": 49, "hours": 23 },
+ { "title": "12", "events": 88, "hours": 75 },
+ { "title": "13", "events": 93, "hours": 66 },
+ { "title": "14", "events": 76, "hours": 34 },
+ { "title": "15", "events": 59, "hours": 39 },
+ { "title": "16", "events": 72, "hours": 51 },
+ { "title": "17", "events": 83, "hours": 44 },
+ { "title": "18", "events": 74, "hours": 33 },
+ { "title": "19", "events": 69, "hours": 28 },
+ { "title": "20", "events": 85, "hours": 56 },
+ { "title": "21", "events": 53, "hours": 22 },
+ { "title": "22", "events": 92, "hours": 70 },
+ { "title": "23", "events": 71, "hours": 41 },
+ { "title": "24", "events": 67, "hours": 25 },
+ { "title": "25", "events": 86, "hours": 74 },
+ { "title": "26", "events": 94, "hours": 68 }
+ ]
+ },
+ {
+ "name": "Other",
+ "color": "hotpink",
+ "data": [
+ { "title": "01", "events": 57, "hours": 27 },
+ { "title": "02", "events": 74, "hours": 33 },
+ { "title": "03", "events": 84, "hours": 53 },
+ { "title": "04", "events": 95, "hours": 68 },
+ { "title": "05", "events": 71, "hours": 48 },
+ { "title": "06", "events": 92, "hours": 76 },
+ { "title": "07", "events": 85, "hours": 62 },
+ { "title": "08", "events": 49, "hours": 25 },
+ { "title": "09", "events": 66, "hours": 38 },
+ { "title": "10", "events": 63, "hours": 31 },
+ { "title": "11", "events": 75, "hours": 47 },
+ { "title": "12", "events": 94, "hours": 72 },
+ { "title": "13", "events": 79, "hours": 49 },
+ { "title": "14", "events": 72, "hours": 45 },
+ { "title": "15", "events": 88, "hours": 61 },
+ { "title": "16", "events": 83, "hours": 52 },
+ { "title": "17", "events": 92, "hours": 76 },
+ { "title": "18", "events": 73, "hours": 40 },
+ { "title": "19", "events": 65, "hours": 28 },
+ { "title": "20", "events": 76, "hours": 63 },
+ { "title": "21", "events": 58, "hours": 30 },
+ { "title": "22", "events": 84, "hours": 67 },
+ { "title": "23", "events": 72, "hours": 41 },
+ { "title": "24", "events": 79, "hours": 46 },
+ { "title": "25", "events": 63, "hours": 29 },
+ { "title": "26", "events": 68, "hours": 39 }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "6 Months",
+ "rooms": [
+ {
+ "name": "Bathroom",
+ "color": "purple",
+ "data": [
+ { "title": "October", "events": 62, "hours": 23 },
+ { "title": "November", "events": 76, "hours": 42 },
+ { "title": "December", "events": 85, "hours": 54 },
+ { "title": "January", "events": 94, "hours": 67 },
+ { "title": "February", "events": 63, "hours": 35 },
+ { "title": "March", "events": 81, "hours": 46 }
+ ]
+ },
+ {
+ "name": "Bedroom",
+ "color": "#3b82f6",
+ "data": [
+ { "title": "October", "events": 64, "hours": 35 },
+ { "title": "November", "events": 88, "hours": 71 },
+ { "title": "December", "events": 79, "hours": 54 },
+ { "title": "January", "events": 72, "hours": 49 },
+ { "title": "February", "events": 53, "hours": 32 },
+ { "title": "March", "events": 93, "hours": 67 }
+ ]
+ },
+ {
+ "name": "Kitchen",
+ "color": "orange",
+ "data": [
+ { "title": "October", "events": 92, "hours": 65 },
+ { "title": "November", "events": 85, "hours": 62 },
+ { "title": "December", "events": 74, "hours": 49 },
+ { "title": "January", "events": 63, "hours": 33 },
+ { "title": "February", "events": 78, "hours": 56 },
+ { "title": "March", "events": 69, "hours": 41 }
+ ]
+ },
+ {
+ "name": "Other",
+ "color": "hotpink",
+ "data": [
+ { "title": "October", "events": 88, "hours": 54 },
+ { "title": "November", "events": 72, "hours": 39 },
+ { "title": "December", "events": 84, "hours": 63 },
+ { "title": "January", "events": 76, "hours": 46 },
+ { "title": "February", "events": 93, "hours": 72 },
+ { "title": "March", "events": 68, "hours": 29 }
+ ]
+ }
+ ]
+ }
+ ]
+ }
+
+ result_dictionary["chart_data"][0]["rooms"] = weekly_report
+ result_dictionary["chart_data"][1]["rooms"] = monthly_report
+ result_dictionary["chart_data"][2]["rooms"] = six_months_report
+
+
+
+ payload = result_dictionary #{'result_dictionary': result_dictionary}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ AddToLog(payload)
+ return
+
+
+ elif function == "dashboard_list":
+ # works in UTC only
+
+ logger.error(f"------------------------------- dashboard_list ------------------------------------------")
+
+ caretaker = user_name
+ #date_s = form_data.get('date')
+ time_s = form_data.get('time')
+ date_s = datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d")
+ filterr = form_data.get('filter')
+ if filterr == None:
+ filterr = 5
+
+ privileges = GetPriviledgesOnly(caretaker)
+ deployments_list = []
+ if privileges != '':
+ deployments_list = GetUsersFromDeployments(privileges)
+
+ #all_beneficiaries = ListBeneficiariesOfCaretaker(caretaker) #GetPriviledgesOnly
+ #AddToLog(all_beneficiaries)
+
+ result_list = []
+
+ for deployment_id, first_name, last_name in deployments_list:
+ details = GetSensorsDetailsFromDeployment(deployment_id, date_s, filterr)
+ if details != {}:
+
+ details["units"] = "°C"
+ if "America" in details["time_zone"]:
+ details["temperature"] = CelsiusToFahrenheit(details["temperature"])
+ details["units"] = "°F"
+ devices_list, device_ids = GetProximityList(deployment_id, date_s)
+ # convert dates back to UTC
+ #details['bathroom_at'] = pytz.timezone(details['time_zone']).localize(datetime.datetime.strptime(details['bathroom_at'], "%Y-%m-%dT%H:%M:%S")).astimezone(pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S")
+ #details['kitchen_at'] = pytz.timezone(details['time_zone']).localize(datetime.datetime.strptime(details['bathroom_at'], "%Y-%m-%dT%H:%M:%S")).astimezone(pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S")
+ #details['bedroom_at'] = pytz.timezone(details['time_zone']).localize(datetime.datetime.strptime(details['bedroom_at'], "%Y-%m-%dT%H:%M:%S")).astimezone(pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S")
+ #details['last_detected_time'] = pytz.timezone(details['time_zone']).localize(datetime.datetime.strptime(details['last_detected_time'], "%Y-%m-%dT%H:%M:%S")).astimezone(pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S")
+ location_list = []
+ for room_details in devices_list:
+ well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = room_details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
+ if description == None or description == "":
+ location_list.append(location_name)
+ else:
+ location_list.append(location_name + " " + description)
+ details["deployment_id"] = deployment_id
+ details["location_list"] = location_list
+ result_list.append(details)
+
+ payload = {'result_list': result_list}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ AddToLog(payload)
+ return
+
+
+ elif function == "dashboard_single":
+ caretaker = user_name
+ #date_s = form_data.get('date')
+ date_s = datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d")
+ deployment_id = form_data.get('deployment_id')
+ filterr = form_data.get('filter')
+ if filterr == None:
+ filterr = 5
+
+
+ #all_beneficiaries = ListBeneficiariesOfCaretaker(caretaker) #GetPriviledgesOnly
+ #AddToLog(all_beneficiaries)
+
+ result_list = []
+
+ details = GetSensorsDetailsFromDeployment(deployment_id, date_s, filterr, False)
+ details["units"] = "°C"
+ if "America" in details["time_zone"]:
+ details["temperature"] = CelsiusToFahrenheit(details["temperature"])
+ details["bedroom_temperature"] = CelsiusToFahrenheit(details["bedroom_temperature"])
+ details["units"] = "°F"
+ devices_list, device_ids = GetProximityList(deployment_id, date_s)
+ location_list = []
+ for room_details in devices_list:
+ well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = room_details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
+ if description == None or description == "":
+ location_list.append(location_name)
+ else:
+ location_list.append(location_name + " " + description)
+ details["deployment_id"] = deployment_id
+ details["location_list"] = location_list
+ settings = {"wellness_score": False, "last_seen": False, "sleep_report": True, "activity_report": True, "temperature": True, "humidity": True, "air_pressure": True, "light": True, "air_quality": True, "radar": True, "other_activities": False}
+ details["settings"] = settings
+ result_list.append(details)
+ payload = {'result_list': result_list}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ AddToLog(payload)
+ return
+
+ elif function == "request_node_red":
+ logger.error(f"------------------------------- {function} ------------------------------------------")
+ #this will:
+ # 1.prepare folder and settings.js
+ # 2.start instance on node-red and return it's return port
+ #caretaker = user_name
+ #date_s = form_data.get('date')
+ time_s = datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
+ #deployment_id = form_data.get('deployment_id')
+ #redis_conn.set('node_red_requests', str([radar_threshold_signal, radar_threshold_value]))
+ # Hashes (dictionaries)
+ logger.error(f"Storing to node_red_requests {user_name}")
+ redis_conn.hset('node_red_requests', mapping={
+ 'user_name': user_name,
+ 'token': token,
+ 'time': time_s,
+ 'requests': 1
+ })
+
+ payload = {'ok': 1}
+ logger.error(f"Responding {payload}")
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+
+ elif function == "get_node_red_port":
+ #this will:
+ # 1.prepare folder and settings.js
+ # 2.start instance on node-red and return it's return port
+ hash_data = GetRedisMap(f'node_red_status_{user_name}')
+ port = 0
+ if hash_data != {}:
+ port = hash_data['port']
+ #date_s = form_data.get('date')
+ #date_s = datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d")
+ #deployment_id = form_data.get('deployment_id')
+ payload = {'port': port}
+ logger.debug(f"get_node_red_port: {payload}")
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ elif function == "activity_detected":
+ #this will:
+ # 1.store to REDIS time of last activity
+ time_s = form_data.get('time')
+
+ hash_data = GetRedisMap(f'node_red_status_{user_name}')
+ port = 0
+ if hash_data != {}:
+ port = hash_data['port']
+
+ redis_conn.hset(f'node_red_status_{user_name}', mapping={
+ 'port': port,
+ 'last_activity': time_s
+ })
+
+ payload = {'ok': 1}
+ logger.debug(f"activity_detected: {payload}")
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+
+ elif function == "store_flow":
+ #this will:
+ # 1.store flow into DB
+ time_s = form_data.get('time')
+ flow_json = form_data.get('flow')
+ logger.debug(f"store_flow: {flow_json}")
+ StoreFlow2DB(user_name, time_s, flow_json)
+ payload = {'ok': 1}
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+ elif function == "store_alarms":
+ #this will:
+ # 1.store flow into DB
+ deployment_id = form_data.get('deployment_id')
+ device_id = form_data.get('device_id')
+ deployment_alarms_json = form_data.get('deployment_alarms')
+ device_alarms_json = form_data.get('device_alarms')
+
+ logger.debug(f"store_alarms: {deployment_alarms_json}")
+
+
+ if privileges == "-1" or deployment_id in privileges:
+ ok = StoreAlarms2DB(deployment_id, device_id, deployment_alarms_json, device_alarms_json)
+
+ redis_conn.set('alarm_device_settings_'+device_id, device_alarms_json)
+ redis_conn.set('alarm_deployment_settings_'+deployment_id, deployment_alarms_json)
- #of course it is needed, how will well-alerts know that new data is stored to db?
# Create record dictionary
record = {
'user_name': user_name,
@@ -18723,1993 +21028,93 @@ class WellApi:
redis_conn.lpush('new_alarms', record_json)
- payload = {'ok': ok}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
-
- elif function == "get_alarm_state":
- deployment_id = form_data.get('deployment_id')
-
- if privileges != "-1":
- privileges_lst = privileges.split(",")
- if deployment_id not in privileges_lst:
- data_payload = {}
- resp.media = package_response(data_payload)
- resp.status = falcon.HTTP_200
- return
-
- # Lets prepare data to do same as store_alarms function
- #read alarm_deployment_settings and all alarm_device_settings from db, and armm all bits that are enabled
- deployment_alarms_json, device_alarms_json_map = GetAlarmAllDetails(deployment_id)
- deployment_alarms = json.loads(deployment_alarms_json)
- enabled = deployment_alarms["enabled"]
- if not GetBit(enabled, 2):
- alarm_state = 2 #off
- else:
- #if any device was trigerred, show 0, otherwise 1
- alarm_state = 1
- for device_id in device_alarms_json_map:
- device_alarms_json = device_alarms_json_map[device_id]
- device_alarms = json.loads(device_alarms_json)
- enabled_alarms = device_alarms["enabled_alarms"]
- armed_states = device_alarms["armed_states"]
-
- if GetBit(enabled_alarms, 8):
- if not GetBit(armed_states, 8): #if 0
- alarm_state = 0
- break
- if GetBit(enabled_alarms, 9):
- if not GetBit(armed_states, 9):
- alarm_state = 0
- break
- if GetBit(enabled_alarms, 10):
- if not GetBit(armed_states, 10):
- alarm_state = 0
- break
-
-
- payload = {'ok': 1, 'alarm_state':alarm_state}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
- elif function == "submit_mobile_message":
- message = form_data.get('message')
- mqtt_id = form_data.get("mqtt_id")
- privileges, user_id = GetPriviledgesAndUserId(user_name)
- if "function" in message:
- current_utc_time = datetime.datetime.now(timezone.utc)
- message_map = json.loads(message)
- func = message_map["function"]
-
-
- conn = get_db_connection()
- cur = conn.cursor()
- error_string = ""
- ok = 1
- try:
-
- current_utc_time = datetime.datetime.now(timezone.utc)
-
- # Convert to epoch time
- current_epoch_time = int(current_utc_time.timestamp() *1000)
-
-
- sql = f"""
- INSERT INTO public.mobile_clients_messages
- (time, mqtt_id, message, function)
- VALUES
- ({current_epoch_time}, '{CleanObject(mqtt_id)}','{CleanObject(message)}','{CleanObject(func)}');
- """
- logger.debug(f"sql= {sql}")
- # Execute update query
- cur.execute(sql)
-
- # Commit the changes to the database
- conn.commit()
-
- # Close the cursor and connection
-
-
- except Exception as e:
- logger.error(f"Error inserting to mobile_clients_messages: {str(e)}")
- ok = 0
-
-
- try:
-
- current_utc_time = datetime.datetime.now(timezone.utc)
-
- # Convert to epoch time
- current_epoch_time = int(current_utc_time.timestamp() *1000)
-
-
- sql1 = f"""
- INSERT INTO public.mobile_clients
- (mqtt_id, user_name, user_id, last_message, last_message_time)
- VALUES
- ('{CleanObject(mqtt_id)}', '{CleanObject(user_name)}', {user_id}, '{CleanObject(message)}', {current_epoch_time})
- ON CONFLICT (mqtt_id)
- DO UPDATE SET
- user_name = EXCLUDED.user_name,
- user_id = EXCLUDED.user_id,
- last_message = EXCLUDED.last_message,
- last_message_time = EXCLUDED.last_message_time;
- """
- logger.debug(f"sql= {sql1}")
- # Execute update query
- cur.execute(sql1)
-
- # Commit the changes to the database
- conn.commit()
-
- except Exception as e:
- logger.error(f"Error inserting to mobile_clients: {str(e)}")
- ok = 0
-
- cur.close()
- conn.close()
- payload = {'ok': ok}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
- elif function == "get_raw_data":
- #container = GetReference("/MAC")
- #MAC = req_dict["MAC"][0]
- #sensor = req_dict["sensor"][0]
- #if "part" in req_dict:
- #part = req_dict["part"][0]
- #else:
- #part = ""
- #from_time = req_dict["from_time"][0]
- #to_time = req_dict["to_time"][0]
- #timezone_str = req_dict["tzone"][0]
- #AddToLog("get_raw_data:" + str(MAC) +","+ str(sensor) + "," + str(from_time) + "," + str(to_time) + "," + part+ "," + timezone_str)
- ##raw_data = GetRawSensorData(container, MAC, sensor, from_time, to_time, timezone_str)
- raw_data = []#GetRawSensorDataFromBlobStorage(MAC, sensor, part, from_time, to_time, timezone_str)
- data_payload = {'raw_data': raw_data}
- resp.media = package_response(data_payload)
- resp.status = falcon.HTTP_200
- return
-
- elif function == "get_presence_data":
-
- deployment_id = form_data.get('deployment_id')
- device_id_in_s = form_data.get('device_id')
- device_id_in = None
- refresh = True#form_data.get('refresh') == "1"
-
- if privileges != "-1":
- privileges_lst = privileges.split(",")
- if deployment_id not in privileges_lst:
- data_payload = {}
- resp.media = package_response(data_payload)
- resp.status = falcon.HTTP_200
- return
-
- filter = int(form_data.get('filter'))
- ddate = form_data.get('date')
- ddate = ddate.replace("_","-")
- to_date = form_data.get('to_date')
-
- if to_date == None:
- to_date = ddate
- else:
- to_date = to_date.replace("_","-")
-
- ddate, to_date = ensure_date_order(ddate, to_date)
-
-
- date_obj = datetime.datetime.strptime(ddate, "%Y-%m-%d")
- # Subtract one day
- previous_day = date_obj - timedelta(days=1)
- # Convert back to string
- prev_date = previous_day.strftime("%Y-%m-%d")
-
- data_type = form_data.get('data_type') #all, raw, presence, z-graph
- if data_type == None or data_type == "":
- data_type = "presence"
-
- time_zone_s = GetTimeZoneOfDeployment(deployment_id)
- timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
- devices_list, device_ids = GetProximityList(deployment_id, timee)
-
- if device_id_in_s != None: #lets remove other devices, since asking for one
- device_id_in = int(device_id_in_s)
- device_ids = [id for id in device_ids if id == device_id_in]
- devices_list = [device for device in devices_list if device[1] == device_id_in]
-
- time_from_str, _ = GetLocalTimeForDate(ddate, time_zone_s)
- _, time_to_str = GetLocalTimeForDate(to_date, time_zone_s)
-
-
-
- time_to = datetime.datetime.strptime(time_to_str, '%Y-%m-%d %H:%M:%S%z')
-
-
-
-
- # Calculate the difference in days
-
- # Convert string to datetime object
- date_obj = datetime.datetime.strptime(time_from_str, "%Y-%m-%d %H:%M:%S%z")
- # Subtract one day
- previous_day = date_obj - timedelta(days=1)
-
- # Format back to string in the same format
- time_from_z_str = previous_day.strftime("%Y-%m-%d %H:%M:%S%z")
-
- time_from = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
- time_from_z = datetime.datetime.strptime(time_from_z_str, '%Y-%m-%d %H:%M:%S%z')
- epoch_time = calendar.timegm(time_from_z.utctimetuple())
-
- days_difference = (time_to - time_from).days
- days_difference_long = days_difference + 1
-
-
- #epoch_time = calendar.timegm(time_from.utctimetuple())
- presence_map = {}
- presence_map["time_start"] = epoch_time
- presence_map["time_zone"] = time_zone_s
-
-
-
- device_id_2_threshold = {}
- device_id_2_location = {0: "Outside"}
-
- for details in devices_list:
-
- well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
- if radar_threshold_group_st == None:
- radar_threshold_group_st = '["s3_max",12]' #last value is threshold to s28 composite
-
- if len(radar_threshold_group_st) > 8:
- radar_threshold_group = json.loads(radar_threshold_group_st)
- else:
- radar_threshold_group = ["s3_max",12]
-
- print(well_id, radar_threshold_group)
- device_id_2_location[device_id] = location_name
- device_id_2_threshold[device_id] = radar_threshold_group
-
-
- ids_list = []
- well_ids = []
- id2well_id = {}
- radar_fields_of_interest = []
- device_field_indexes = {}
- for details in devices_list:
-
- if device_id_in == None or details[1] == device_id_in:
- if "," in details[5]:
- threshold_str = details[5]
- try:
- threshold_lst = json.loads(threshold_str)
- except:
- threshold_lst = ["s3",12]
- #threshold_lst = ["s3_max",12]
- else:
- threshold_lst = ["s3",int(details[5])]
- radar_field = threshold_lst[0]
- #since we are getting 10 sec dat, no more need for min or max...
- radar_field = radar_field.split("_")[0]
- if radar_field not in radar_fields_of_interest:
- device_field_indexes[radar_field] = len(radar_fields_of_interest)
- radar_fields_of_interest.append(radar_field)
-
- ids_list.append(details[1])
- id2well_id[details[1]] = details[0]
- well_ids.append(details[0])
- presence_map["well_ids"] = well_ids
-
-
- devices_list_str = ','.join(str(device[1]) for device in devices_list)
- #sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
- #sql = get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
- #print(sql)
- zsql = get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_z_str, time_to_str, ids_list, radar_fields_of_interest)
- print(zsql)
-
- with get_db_connection() as conn:
- with conn.cursor() as cur:
- #cur.execute(sql)
- #my_data = None
- myz_data = None
-
- #my_data = cur.fetchall()
- cur.execute(zsql)
- myz_data = cur.fetchall()
- device_id_2_threshold = {}
- device_id_2_location = {0: "Outside"}
- row_nr_2_device_id = {}
- cnt = 0
- row_nr_2_device_id[0] = 0
-
-
- if myz_data != None:
- temporary_map_day_plus = {}
- presence_map['z_graph'] = {}
- presence_map['longpresence'] = {}
- presence_map['raw'] = {}
-
- parsed_time = datetime.datetime.strptime(time_from_z_str, '%Y-%m-%d %H:%M:%S%z')
-
- #start_time = datetime.datetime(
- #parsed_time.year,
- #parsed_time.month,
- #parsed_time.day,
- #parsed_time.hour - 7, # Adjust for UTC-7
- #parsed_time.minute,
- #parsed_time.second,
- #tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))
- #)
- start_time = parsed_time.astimezone(pytz.UTC)
-
- for details in devices_list:
- #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]','')
- well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details
-
-
- if radar_threshold_group_st == None:
- radar_threshold_group_st = '["s3",12]' #last value is threshold to s28 composite
-
- if len(radar_threshold_group_st) > 8:
- radar_threshold_group = json.loads(radar_threshold_group_st)
- else:
- radar_threshold_group = ["s3",12]
-
- device_id_2_location[device_id] = location_name
- device_id_2_threshold[device_id] = radar_threshold_group
-
-
- presence_map['z_graph'][well_id] = [] #just place holder
- temporary_map_day_plus[well_id] = [0] * 6 * 1440 * days_difference_long
- presence_map['longpresence'][well_id] = [0] * 6 * 1440 * days_difference_long #just place holder
- presence_map['raw'][well_id] = [0] * 6 * 1440 * days_difference_long #just place holder
-
-
- print(f"start_time: {start_time}")
- print(f"epoch_time being sent: {epoch_time}")
- print(f"epoch_time as date: {datetime.datetime.fromtimestamp(epoch_time, tz=pytz.UTC)}")
-
-
- #start_time_ = myz_data[0][0]
- st = time.time()
- device_lookup_cache = {}
- threshold_cache = {}
- temporary_map_day_plus = optimized_processing(myz_data, start_time, id2well_id, device_id_2_threshold, device_field_indexes, temporary_map_day_plus, data_type)
- presence_map = optimized_radar_processing(myz_data, start_time, id2well_id, device_id_2_threshold, device_field_indexes, presence_map, data_type)
-
- #save_list_to_csv_method1(presence_map['longpresence'][475], "longpresence_initial_data.csv")
-
-
-
- overlaps_str = GetOverlapps(deployment_id)
- overlaps_lst = []
- if overlaps_str != None:
- if ":" in overlaps_str:
- overlaps_lst = json.loads(overlaps_str)
- temporary_map_day_plus = ClearOverlaps(temporary_map_day_plus, overlaps_lst)
-
- for device_id in ids_list:
- device_id_str = str(device_id)
-
- if filter > 1:
- longpresence_list = filter_short_groups_c_wc(presence_map["longpresence"][id2well_id[device_id]], filter, device_id_str, prev_date, to_date, time_zone_s, refresh)
- presence_map["longpresence"][id2well_id[device_id]] = longpresence_list
-
- else: #straight decas
- #presence_list = presence_map["presence"][id2well_id[device_id]]
-
- #if data_type != "presence":
- longpresence_list = presence_map["longpresence"][id2well_id[device_id]]
-
- z_graph = CreateZGraphAI(presence_map["longpresence"][id2well_id[device_id]]) #temporary_map_day_plus[id2well_id[device_id]])
- presence_map["z_graph"][id2well_id[device_id]] = z_graph
-
-
- if data_type == "all" or data_type == "multiple":
- #lets create "multiple" series
- seen_at_lst, seen_where_list_uf = DetectMultiple(temporary_map_day_plus, overlaps_lst)
- #here seen_at is straight decas
- #seen_at = [1 if x >= 2 else 0 for x in seen_at]
- pers_in_deka = []
- dekas_in_day = 6 * 1440
- for i in range(dekas_in_day, len(seen_where_list_uf)):
- n_pers = seen_where_list_uf[i]
- pers_in_deka.append(100*len(n_pers))
-
- seen_at = filter_out_short_highs_iterative(seen_at_lst, filter) #this converts decas into compressed format!
- seen_at_lst = Decompress(seen_at)
- pers_in_deka = filter_out_short_same_groups_iterative(pers_in_deka, filter)
- persons_decompressed = Decompress(pers_in_deka)
- persons = Compress(persons_decompressed)
-
- multiple_list = CreateZGraphAI(seen_at_lst)
- presence_map["multiple"] = multiple_list
- presence_map["persons"] = persons
-
- presence_map["presence"] = CompressList(presence_map["longpresence"])
-
- if data_type == "z-graph":
- if "raw" in presence_map:
- del presence_map["raw"]
-
- #if "presence" in presence_map:
- # del presence_map["presence"]
-
- if "longpresence" in presence_map:
- del presence_map["longpresence"]
-
- if data_type == "multiple":
- if "raw" in presence_map:
- del presence_map["raw"]
- #if "presence" in presence_map:
- # del presence_map["presence"]
-
- if "z_graph" in presence_map:
- del presence_map["z_graph"]
-
- #if "presence" in presence_map:
- if "longpresence" in presence_map:
- del presence_map["longpresence"]
-
- data_payload = presence_map
- resp.media = package_response(data_payload)
- resp.status = falcon.HTTP_200
- return
-
- elif function == "get_zgraph_data":
-
- deployment_id = form_data.get('deployment_id')
-
- if privileges != "-1":
- privileges_lst = privileges.split(",")
- if deployment_id not in privileges_lst:
- data_payload = {}
- resp.media = package_response(data_payload)
- resp.status = falcon.HTTP_200
- return
-
- device_id = int(form_data.get('device_id'))
-
- devices = GetVisibleDevices(privileges)
-
- if not any(item[0] == device_id for item in devices):
- data_payload = {}
- resp.media = package_response(data_payload)
- resp.status = falcon.HTTP_200
- return
-
- filter = int(form_data.get('filter'))
- ddate = form_data.get('date')
- ddate = ddate.replace("_","-")
- to_date = form_data.get('to_date')
-
- if to_date == None:
- to_date = ddate
- else:
- to_date = to_date.replace("_","-")
-
- ddate, to_date = ensure_date_order(ddate, to_date)
- data_type = "z-graph"
-
- time_zone_s = GetTimeZoneOfDeployment(deployment_id)
- timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5 #add so date boundary is avoided
- devices_list, device_ids = GetProximityList(deployment_id, timee)
-
- time_from_str, _ = GetLocalTimeForDate(ddate, time_zone_s)
- _, time_to_str = GetLocalTimeForDate(to_date, time_zone_s)
-
- time_from_z = datetime.datetime.strptime(time_from_z_str, '%Y-%m-%d %H:%M:%S%z')
- epoch_time = calendar.timegm(time_from_z.utctimetuple())
-
- #time_from = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
- #epoch_time = calendar.timegm(time_from.utctimetuple())
- time_to = datetime.datetime.strptime(time_to_str, '%Y-%m-%d %H:%M:%S%z')
-
- presence_map = {}
- presence_map["time_start"] = epoch_time
- presence_map["time_zone"] = time_zone_s
-
- # Calculate the difference in days
- days_difference = (time_to - time_from).days
- days_difference_long = days_difference + 1
- # Convert string to datetime object
- date_obj = datetime.datetime.strptime(time_from_str, "%Y-%m-%d %H:%M:%S%z")
- # Subtract one day
- previous_day = date_obj - timedelta(days=1)
-
- # Format back to string in the same format
- time_from_z_str = previous_day.strftime("%Y-%m-%d %H:%M:%S%z")
-
-
- device_id_2_threshold = {}
- device_id_2_location = {0: "Outside"}
-
- for details in devices_list:
-
- well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
-
- if radar_threshold_group_st == None:
- radar_threshold_group_st = '["s3_max",12]' #last value is threshold to s28 composite
-
- if len(radar_threshold_group_st) > 8:
- radar_threshold_group = json.loads(radar_threshold_group_st)
- else:
- radar_threshold_group = ["s3_max",12]
-
- print(well_id, radar_threshold_group)
-
- device_id_2_location[device_id] = location_name
- device_id_2_threshold[device_id] = radar_threshold_group
-
- ids_list = []
- well_ids = []
- id2well_id = {}
- radar_fields_of_interest = []
- device_field_indexes = {}
- for details in devices_list:
- threshold_str = details[5]
- try:
- threshold_lst = json.loads(threshold_str)
- except:
- threshold_lst = ["s3",12]
- #threshold_lst = ["s3_max",12]
-
- radar_field = threshold_lst[0]
- #since we are getting 10 sec dat, no more need for min or max...
- radar_field = radar_field.split("_")[0]
- if radar_field not in radar_fields_of_interest:
- device_field_indexes[radar_field] = len(radar_fields_of_interest)
- radar_fields_of_interest.append(radar_field)
-
- ids_list.append(details[1])
- id2well_id[details[1]] = details[0]
- well_ids.append(details[0])
- presence_map["well_ids"] = well_ids
-
-
- devices_list_str = ','.join(str(device[1]) for device in devices_list)
- #sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
- sql = get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
- print(sql)
- if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
- #zsql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_z_str, time_to_str, ids_list, radar_fields_of_interest)
- zsql = get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_z_str, time_to_str, ids_list, radar_fields_of_interest)
- print(zsql)
-
- with get_db_connection() as conn:
- with conn.cursor() as cur:
- #cur.execute(sql)
- #my_data = None
- myz_data = None
-
- #my_data = cur.fetchall()
- #if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
- cur.execute(zsql)
- myz_data = cur.fetchall()
-
- #if my_data != None:
-
- #device_id_2_threshold = {}
- #device_id_2_location = {0: "Outside"}
- #row_nr_2_device_id = {}
- #cnt = 0
- #row_nr_2_device_id[0] = 0
-
- ##presence_map['longpresence'] and temporary_map_day_plus are similar, except one is used for Z-graph, and another for multiple persons detection
-
- #if data_type == "presence" or data_type == "all" or data_type == "z-graph" or data_type == "multiple":
- #presence_map['presence'] = {}
- #presence_map['longpresence'] = {}
-
- #if data_type == "raw" or data_type == "all":
- #presence_map['raw'] = {}
-
- #for details in devices_list:
- ##(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]','')
- #well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details
-
- #if data_type == "raw" or data_type == "all":
- #zeros_list = [0] * 6 * 1440 * days_difference
- #presence_map['raw'][well_id] = zeros_list
-
- #if data_type == "presence" or data_type == "all" or data_type == "z-graph" or data_type == "multiple":
- #zeros_list = [0] * 6 * 1440 * days_difference
- #presence_map['presence'][well_id] = zeros_list
-
-
- ##presence_map[][well_id] = zeros_list
- #cnt += 1
- #row_nr_2_device_id[cnt] = well_id
-
- #if radar_threshold_group_st == None:
- #radar_threshold_group_st = '["s3",12]' #last value is threshold to s28 composite
-
- #if len(radar_threshold_group_st) > 8:
- #radar_threshold_group = json.loads(radar_threshold_group_st)
- #else:
- #radar_threshold_group = ["s3",12]
-
- #device_id_2_location[well_id] = location_name
- #device_id_2_threshold[well_id] = radar_threshold_group
-
- #start_time_ = my_data[0][0]
- #parsed_time = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
-
- #start_time = datetime.datetime(
- #parsed_time.year,
- #parsed_time.month,
- #parsed_time.day,
- #parsed_time.hour - 7, # Adjust for UTC-7
- #parsed_time.minute,
- #parsed_time.second,
- #tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))
- #)
-
- #presence_map = optimized_radar_processing(my_data, start_time_, id2well_id, device_id_2_threshold, device_field_indexes, presence_map, data_type)
-
- ##last_device_id = 0
- ##for radar_read in my_data: #(datetime.datetime(2025, 4, 28, 0, 0, tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))), 559, 6.512857142857143, 6.91, 9.28)
- ##local_time = radar_read[0]
- ##deca = int((local_time - start_time).total_seconds() / 10)
- ##device_id = radar_read[1]
- ##if device_id != last_device_id:
- ##last_device_id = device_id
- ##if data_type == "raw" or data_type == "all":
- ##days_decas = len(presence_map['raw'][id2well_id[device_id]])
- ##else:
- ##days_decas = len(presence_map['presence'][id2well_id[device_id]])
- ##well_id = id2well_id[device_id]
- ##radar_threshold_group_st = device_id_2_threshold[device_id]
- ##threshold_sig, threshold = radar_threshold_group_st
- ##threshold_sig = threshold_sig.split("_")[0]
-
- ##radar_val = radar_read[2+device_field_indexes[threshold_sig]]
- ##if data_type == "presence" or data_type == "z-graph" or data_type == "all" or data_type == "multiple":
- ##if radar_val > threshold:
- ##if deca < days_decas:
- ##presence_map['presence'][id2well_id[device_id]][deca] = 1
-
- ##if data_type == "raw" or data_type == "all":
- ##if deca < days_decas:
- ##presence_map['raw'][id2well_id[device_id]][deca] = radar_val
-
-
- if myz_data != None:
- temporary_map_day_plus = {}
- presence_map['z_graph'] = {}
- for details in devices_list:
- #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]','')
- well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = details
-
- #if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
- #zeros_list = [0] * 6 * 1440 * (days_difference_long) #+1 is for previous day
-
- presence_map['z_graph'][well_id] = [] #just place holder
- temporary_map_day_plus[well_id] = [0] * 6 * 1440 * (days_difference_long)
- presence_map['longpresence'][well_id] = [0] * 6 * 1440 * (days_difference_long) #just place holder
-
-
- parsed_time = datetime.datetime.strptime(time_from_z_str, '%Y-%m-%d %H:%M:%S%z')
-
- start_time = datetime.datetime(
- parsed_time.year,
- parsed_time.month,
- parsed_time.day,
- parsed_time.hour - 7, # Adjust for UTC-7
- parsed_time.minute,
- parsed_time.second,
- tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))
- )
-
-
- #start_time_ = myz_data[0][0]
- st = time.time()
- device_lookup_cache = {}
- threshold_cache = {}
- temporary_map_day_plus = optimized_processing(myz_data, start_time, id2well_id, device_id_2_threshold, device_field_indexes, temporary_map_day_plus, data_type)
-
- if data_type == "all" or data_type == "z-graph" or data_type == "presence" or data_type == "multiple":
- overlaps_str = GetOverlapps(deployment_id)
- overlaps_lst = []
- if overlaps_str != None:
- if ":" in overlaps_str:
- overlaps_lst = json.loads(overlaps_str)
- temporary_map_day_plus = ClearOverlaps(temporary_map_day_plus, overlaps_lst)
-
- #if data_type == "all" or data_type == "z-graph" or data_type == "presence" or data_type == "multiple":
- for device_id in ids_list:
- device_id_str = str(device_id)
- #if data_type == "presence" or data_type == "all" or data_type == "z-graph":
- if filter > 1:
- #presence_list = filter_short_groups_numpy(presence_map["presence"][id2well_id[device_id]], filter, device_id, ddate+"-"+to_date)
- #presence_list = filter_short_groups_c_wc(presence_map["presence"][id2well_id[device_id]], filter, device_id_str, ddate, to_date, time_zone_s)
- #presence_listt = filter_short_groupss(presence_map["presence"][id2well_id[device_id]], filter)
- #if presence_list != presence_listt:
- # print("stop")
- #if data_type != "presence":
- #longpresence_list = filter_short_groups_numpy(presence_map["longpresence"][id2well_id[device_id]], filter, device_id, ddate+"-"+to_date)
- longpresence_list = filter_short_groups_c_wc(presence_map["longpresence"][id2well_id[device_id]], filter, device_id_str, prev_date, to_date, time_zone_s)
- #longpresence_listt = filter_short_groupss(presence_map["longpresence"][id2well_id[device_id]], filter)
- #if longpresence_list != longpresence_listt:
- # print("stop")
- # store_to_file(presence_map["longpresence"][id2well_id[device_id]], "test_list")
- #presence_map["presence"][id2well_id[device_id]] = presence_list
- #if data_type != "presence":
- presence_map["longpresence"][id2well_id[device_id]] = longpresence_list
-
- else: #straight decas
- #presence_list = presence_map["presence"][id2well_id[device_id]]
-
- #if data_type != "presence":
- longpresence_list = presence_map["longpresence"][id2well_id[device_id]]
-
-
-
- #if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
- if filter > 1: #straight decas
- presence_list1 = filter_short_high_groups_iterative_analog(temporary_map_day_plus[id2well_id[device_id]], filter)
- else:
- presence_list1 = temporary_map_day_plus[id2well_id[device_id]]
-
- temporary_map_day_plus[id2well_id[device_id]] = presence_list1
-
-
- #if data_type == "z-graph" or data_type == "all" or data_type == "multiple":
- for device_id in ids_list:
- #print(device_id_2_threshold[id2well_id[device_id]])
- z_graph = CreateZGraphAI(presence_map["longpresence"][id2well_id[device_id]]) #temporary_map_day_plus[id2well_id[device_id]])
- presence_map["z_graph"][id2well_id[device_id]] = z_graph
-
-
- if data_type == "all" or data_type == "multiple":
- #lets create "multiple" series
- seen_at_lst, seen_where_list_uf = DetectMultiple(temporary_map_day_plus, overlaps_lst)
- #here seen_at is straight decas
- #seen_at = [1 if x >= 2 else 0 for x in seen_at]
- pers_in_deka = []
- dekas_in_day = 6 * 1440
- for i in range(dekas_in_day, len(seen_where_list_uf)):
- n_pers = seen_where_list_uf[i]
- pers_in_deka.append(100*len(n_pers))
-
- seen_at = filter_out_short_highs_iterative(seen_at_lst, filter) #this converts decas into compressed format!
- seen_at_lst = Decompress(seen_at)
- pers_in_deka = filter_out_short_same_groups_iterative(pers_in_deka, filter)
- persons_decompressed = Decompress(pers_in_deka)
- persons = Compress(persons_decompressed)
-
- multiple_list = CreateZGraphAI(seen_at_lst)
- presence_map["multiple"] = multiple_list
- presence_map["persons"] = persons
-
- presence_map["presence"] = presence_map["longpresence"]
- if data_type == "z-graph":
- if "raw" in presence_map:
- del presence_map["raw"]
- if "presence" in presence_map:
- del presence_map["presence"]
- if "longpresence" in presence_map:
- del presence_map["longpresence"]
-
- if data_type == "multiple":
- if "raw" in presence_map:
- del presence_map["raw"]
- if "presence" in presence_map:
- del presence_map["presence"]
- if "longpresence" in presence_map:
- del presence_map["longpresence"]
- if "z_graph" in presence_map:
- del presence_map["z_graph"]
-
- if "presence" in presence_map:
- presence_map["presence"] = CompressList(presence_map["presence"])
-
- data_payload = presence_map
- resp.media = package_response(data_payload)
- resp.status = falcon.HTTP_200
- return
-
- elif function == "get_candle_data":
- container = GetReference("/MAC")
- MAC = req_dict["MAC"][0]
- sensor = req_dict["sensor"][0]
- from_time = req_dict["from_time"][0]
- to_time = req_dict["to_time"][0]
- part = req_dict["part"][0]
- tzone = req_dict["tzone"][0]
- AddToLog(str(req_dict))
- candle_data = GetCandleSensorData(container, MAC, sensor, from_time, to_time, part, tzone)
- data_payload = {'candle_data': candle_data}
- resp.media = package_response(data_payload)
- resp.status = falcon.HTTP_200
- return
-
- elif function == "deployment_form":
- editing_deployment_id = form_data.get('editing_deployment_id')
-
- ok = StoreDeployment2DB(form_data, editing_deployment_id)
- if ok == 1:
- payload = {'ok': ok}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
- else:
- payload = {'ok': ok, 'error': debug_string}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
- elif function == "deployment_delete":
- ok = DeleteRecordFromDB(form_data)
-
- payload = {'ok': ok}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
-
- elif function == "deployments_list":
- result_list = []
- first_s = form_data.get('first')
- last_s = form_data.get('last')
- user_id = form_data.get('user_id')
- first = 0
- last = 1000000
-
- try:
- if first_s != None:
- first = int(first_s)
- except ValueError:
- pass
-
- try:
- if last_s != None:
- last = int(last_s)
- except ValueError:
- pass
-
- #user_id = form_data.get('user_id')
- if user_id == "" or user_id == None:
- #user_id = GetUserId(user_name)
- privileges, user_id = GetPriviledgesAndUserId(user_name)
- else:
- privileges = GetPriviledgesOnly(user_name)
-
- all_deployments = ListDeployments(privileges, user_id)
-
- cnt = 0
-
- for deployment in all_deployments:
- cnt += 1
- if cnt >= first:
- if deployment['beneficiary_id'] in user_id_2_user.keys():
- caretaker_min_object = {"deployment_id": deployment['deployment_id'], "email": user_id_2_user[deployment['beneficiary_id']][3], "first_name": user_id_2_user[deployment['beneficiary_id']][5], "last_name": user_id_2_user[deployment['beneficiary_id']][6]}
- else:
- caretaker_min_object = {"deployment_id": deployment['deployment_id'], "email": deployment['beneficiary_id'][3], "first_name": deployment['beneficiary_id'][5], "last_name": deployment['beneficiary_id'][6]}
- result_list.append(caretaker_min_object)
- if cnt > last:
- break
-
- payload = {'result_list': result_list}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
- elif function == "device_list":
- result_list = []
- first_s = form_data.get('first')
- last_s = form_data.get('last')
-
- try:
- first = int(first_s)
- except ValueError:
- first = 0
-
- try:
- last = int(last_s)
- except ValueError:
- last = 1000000
-
- #user_id = form_data.get('user_id')
-
- devices = GetVisibleDevices(privileges)
-
- payload = {'result_list': devices}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
-
- elif function == "get_devices_locations":
- well_ids = form_data.get('well_ids')
- details_list = WellId2Details(well_ids)
- #print(details_list)
-
- to_report = []
-
- for details in details_list:
- if details[3] == -1:
- report_record = str(details[0]) + " ?"
- else:
- report_record = str(details[0]) + " " + location_names[details[3]]
- if details[4] != "" and details[4] != "initial":
- report_record = report_record + " " +details[4]
- if details[3] == -1:
- to_report.append((details[0], "?", details[4], report_record))
- else:
- to_report.append((details[0], location_names[details[3]], details[4], report_record))
-
- #print(to_report)
- payload = {'deployments': to_report}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
- elif function == "find_deployments":
- #For all devices, find deployments that they are part of
- #For all those deployments, return:
- #deployment_id First_name Last_name (of beneficiary)
- #list of (well_id, location_descriptions) all devices in each deployment
-
-
- well_ids = form_data.get('well_ids')
- #well_ids_lst = well_ids.split(",")
- details_list = WellId2Details(well_ids)
- to_report = []
-
- privileges_lst = []
- if "," in privileges:
- privileges_lst = privileges.split(",")
-
- if len(details_list) > 0:
- macs_list_clean = []
- devices_details = {}
-
- for entry in details_list:
- macs_list_clean.append(entry[2])
- macs_formatted = "', '".join(macs_list_clean)
-
- with get_db_connection() as conn:
- with conn.cursor() as cur:
- sql = f"SELECT deployment_id, beneficiary_id, devices FROM public.deployment_details WHERE devices::text ~* ANY(ARRAY['{macs_formatted}']);"
- print(sql)
- cur.execute(sql)
- result = cur.fetchall()
-
- if result != None and result != []:
- users_list_clean = []
- for entry in result: #for every deployment
- macs_list_clean = []
- deployment_id = str(entry[0])
- if privileges == '-1':
- deployment = DeploymentDetails(deployment_id)
- address_str = ExtractAddress(deployment)
- deployment = PurgeDeployment(deployment)
- deployment["address"] = address_str
-
- users_list_clean.append(str(entry[1]))
- users_formatted = ", ".join(users_list_clean)
-
- mac_devices_in_deployment = ast.literal_eval(entry[2])
- for mac in mac_devices_in_deployment:
- macs_list_clean.append(mac)
-
- device_ids, device_list = MACsToWellIds(cur, macs_list_clean)
-
- devices_details[deployment_id] = deployment
-
- elif deployment_id in privileges_lst:
- deployment = DeploymentDetails(deployment_id)
- address_str = ExtractAddress(deployment)
- deployment = PurgeDeployment(deployment)
- deployment["address"] = address_str
- users_list_clean.append(str(entry[1]))
- users_formatted = ", ".join(users_list_clean)
-
- mac_devices_in_deployment = ast.literal_eval(entry[2])
- for mac in mac_devices_in_deployment:
- macs_list_clean.append(mac)
-
- device_ids, device_list = MACsToWellIds(cur, macs_list_clean)
- devices_details[deployment_id] = deployment
-
- device_list_to_report = []
-
- for device in device_list:
- #we need well_id, location and decription only
- device_list_to_report.append((device[0], device[2], device[3]))
- sql = f"SELECT first_name, last_name, email FROM public.person_details WHERE user_id IN ({users_formatted});"
- print(sql)
- cur.execute(sql)
- result1 = cur.fetchall()
- counter = 0
-
- for entry in result:
- deployment_id = str(entry[0])
- deployment = devices_details[deployment_id]
- first_name, last_name, email = result1[counter]
- deployment["beneficiary_first_name"] = first_name
- deployment["beneficiary_last_name"] = last_name
- deployment["beneficiary_email"] = last_name
- devices_details[deployment_id] = deployment
- to_report.append((entry, device_list_to_report, devices_details[deployment_id]))
-
-
- print(to_report)
- payload = {'deployments': to_report}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
-
- elif function == "device_list_by_deployment":
- result_list = []
- first_s = form_data.get('first')
- last_s = form_data.get('last')
- deployment_id = form_data.get('deployment_id')
- try:
- first = int(first_s)
- except ValueError:
- first = 0
-
- try:
- last = int(last_s)
- except ValueError:
- last = 1000000
-
-
- if privileges == "-1":
- devices = GetVisibleDevices(deployment_id)
- else:
- privileges = privileges.split(",")
- if deployment_id in privileges:
- devices = GetVisibleDevices(deployment_id)
-
- payload = {'result_list': devices}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
-
- elif function == "device_list_4_gui":
- result_list = []
- deploymentData = []
- deviceData = []
- macs_list = []
- user_id = GetUserId(user_name)
- all_deployments = ListDeployments(privileges, user_id)
- #{'deployment_id': 21, 'beneficiary_id': 25, 'caretaker_id': 1, 'owner_id': 1, 'installer_id': 1, 'address_street': '661 Encore Way', 'address_city': 'San Jose', 'address_zip': '95134', 'address_state': 'CA', 'address_country': 'USA', 'devices': '["64B70888FAB0","64B70888F860","64B70888F6F0","64B708896BDC","64B708897428","64B70888FA84","64B70889062C"]', 'wifis': '', 'persons': 1, 'gender': 1, 'race': 1, 'born': 1940, 'pets': 0, 'time_zone': 'America/Los_Angeles'}
- MAC2Deployment = {}
-
- for deployment in all_deployments:
- beneficiary_id = deployment['beneficiary_id']
- user = GetNameFromUserId(beneficiary_id)
- name = f"{user[1]} {user[2]}"
- deploymentData.append({'deployment_id': str(deployment['deployment_id']), 'name': name})
- devices = deployment['devices']
- if devices != None and devices != None != "":
-
- devices_list = ToList(devices)
- for device in devices_list:
- macs_list.append(device)
- MAC2Deployment[device] = deployment['deployment_id']
- #deviceData.append({'well_id': device[0], 'mac': device[1]})
-
- deployment_id_list = []
- deviceData = []
-
- #row_data = [device_id, well_id, mac, last_message_epoch, location_names[location_id], description, deployment_ids[cnt][0]]
-
- with get_db_connection() as conn:
- with conn.cursor() as cur:
- device_ids, device_list = MACsToWellIds(cur, macs_list)
-
-
- for device in device_list:
- if MAC2Deployment[device[4]] != "":
- deviceData.append({'well_id': device[0], 'mac': device[4], 'room_name': device[2], 'deployment_id': MAC2Deployment[device[4]]})
-
- #deploymentData = [{'deployment_id': '21', 'name': 'Robert Zmrzli House'}, {'deployment_id': '36', 'name': 'Fred Zmrzli Apartment'}]
- #deviceData = [{ 'well_id': '300', 'mac': '64B70888F6F0', 'room_name': 'Living Room', 'deployment_id': '21' }, { 'well_id': '301', 'mac': '64B70888F6F1', 'room_name': 'Bathroom Main', 'deployment_id': '36' }]
-
- payload = {
- 'status': "success", 'deploymentData': deploymentData, 'deviceData': deviceData
- }
-
- logger.debug(f"device_list_4_gui------ {payload} ------------------------------------------")
-
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
-
- elif function == "caretaker_form":
- editing_user_id = form_data.get('editing_user_id')
- email = form_data.get('email')
- user_id = form_data.get('user_id')
- if "@" not in email:
- resp.media = package_response("Missing or illegal 'email' parameter", HTTP_400)
- return
-
- print(privileges)
- if privileges == "-1":
- ok = StoreCaretaker2DB(form_data, editing_user_id, user_id)
- if ok == 1:
payload = {'ok': ok}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
- return
else:
- payload = {'ok': ok, 'error': debug_string}
+ payload = {'ok': 0, 'error': "not allowed"}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
- return
- elif "-1" in privileges:
- payload = {'ok': 0, 'error': "Not allowed!"}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
return
-
- elif function == "caretaker_delete":
- if privileges == "-1":
- ok = DeleteRecordFromDB(form_data)
- else:
- ok = 0
- AddToLog(ok)
- payload = {'ok': ok}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
-
- elif function == "caretakers_list":
- result_list = []
- first_s = form_data.get('first')
- last_s = form_data.get('last')
-
- try:
- first = int(first_s)
- except ValueError:
- first = 0
-
- try:
- last = int(last_s)
- except ValueError:
- last = 1000000
-
- if privileges == "-1":
- all_caretakers = ListCaretakers(privileges, user_name)
-
- cnt = 0
-
- for caretaker in all_caretakers:
- cnt += 1
- if cnt >= first:
- caretaker_min_object = {"user_id": caretaker[0], "email": caretaker[3], "first_name": caretaker[5], "last_name": caretaker[6]}
- result_list.append(caretaker_min_object)
- if cnt > last:
- break
- elif "-1" in privileges:
- all_caretakers = ListCaretakers(privileges, user_name)
-
- cnt = 0
-
- for caretaker in all_caretakers:
- cnt += 1
- if cnt >= first:
- caretaker_min_object = {"user_id": caretaker[0], "email": caretaker[3], "first_name": caretaker[5], "last_name": caretaker[6]}
- result_list.append(caretaker_min_object)
- if cnt > last:
- break
- payload = {'result_list': result_list}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
-
- elif function == "new_user_form":
- devices = form_data.get('devices')
- user_name = form_data.get('user_name')
- password = form_data.get('password')
- #lets check if devices listed are not part of existing deployment
- success, result = DevicesNotUsed(devices, user_name)
-
- if success:
- if result["deployed"]:
- error_string = f"These devices are already deployed: {result['deployed']}"
- print(error_string)
- payload = {'ok': 0, 'error': error_string}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
-
- if result["not_found"]:
- error_string = f"These devices are not available: {result['not_found']}"
- print(error_string)
- payload = {'ok': 0, 'error': error_string}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
-
- if not result["deployed"] and not result["not_found"]:
- print("All devices are available for deployment")
- ok, error_string = StoreDisclaimer2DB(form_data)
-
- if ok == 1:
- #Lets check if this user already has account or not
- if not AccountExists(user_name):
- #lets create new account for this caretaker
- if password == None or password == "":
- password = CreatePassword(8)
-
- #lets suplement form_data with parts needed for existing StoreCaretaker2DB function
- editing_user_id = "0" #specify if editing existing user, otherwise "0"
- user_id = "0" #user that is adding this record. New user so "0"
- form_data['role_ids'] = "1,2"
- form_data['access_to_deployments'] = "45"
- #form_data['email'] = "" #this one matches
- form_data['new_user_name'] = form_data['user_name']
- form_data['first_name'] = form_data['firstName']
- form_data['last_name'] = form_data['lastName']
- form_data['address_street'] = ""
- form_data['address_city'] = ""
- form_data['address_zip'] = ""
- form_data['address_state'] = ""
- form_data['address_country'] = ""
- form_data['phone_number'] = form_data['phone']
- form_data['picture'] = "/"
- form_data['key'] = password
-
- ok = StoreCaretaker2DB(form_data, editing_user_id, user_id)
-
- if ok == 1:
-
- #we need to call cd ~/mqtt-auth-service/acl_manager.py
-
- SendWelcomeEmail(form_data['email'], form_data['first_name'], form_data['last_name'], devices, form_data['phone_number'], form_data['new_user_name'], form_data['key'])
- payload = {'ok': ok}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
-
- else:
- privileges, user_id = ValidUser(user_name, password)
- if user_id == "0": #bad password
- error_string = f"Password does not match user {user_name}"
- payload = {'ok': 0, 'error': error_string}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
-
- payload = {'ok': ok}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
- else:
- payload = {'ok': ok, 'error': error_string}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
-
- else:
- error_string = f"Error: {result}"
- payload = {'ok': 0, 'error': error_string}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
-
- elif function == "beneficiary_form":
- editing_user_id = form_data.get('editing_user_id')
- email = form_data.get('email')
- user_id = GetUserId(user_name)
- if "@" in email:
- ok, error_string = StoreBeneficiary2DB(form_data, editing_user_id, user_id)
- if ok == 1:
- payload = {'ok': ok}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
- else:
- payload = {'ok': ok, 'error': error_string}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
- else:
- payload = {'ok': ok}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
-
- elif function == "beneficiary_delete":
-
-
- ok = DeleteRecordFromDB(form_data)
-
- payload = {'ok': ok}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
-
- elif function == "beneficiaries_list":
- result_list = []
- first_s = form_data.get('first')
- last_s = form_data.get('last')
-
- try:
- first = int(first_s)
- except ValueError:
- first = 0
-
- try:
- last = int(last_s)
- except ValueError:
- last = 1000000
-
- user_id = form_data.get('user_id')
- all_beneficiaries = ListBeneficiaries(privileges, user_id)
-
- cnt = 0
-
- for beneficiary in all_beneficiaries:
- cnt += 1
- if cnt >= first:
- beneficiary_min_object = {"user_id": beneficiary[0], "email": beneficiary[3], "first_name": beneficiary[5], "last_name": beneficiary[6]}
- result_list.append(beneficiary_min_object)
- if cnt > last:
- break
-
- payload = {'result_list': result_list}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
-
- elif function == "activities_report_details":
- deployment_id = form_data.get('deployment_id')
-
- timezone_str = GetTimeZoneOfDeployment(deployment_id)
- filterr = form_data.get('filter')
- if filterr == None:
- filterr = 6
- else:
- filterr = int(filterr)
-
- refresh = form_data.get('refresh') == "1"
- ddate = current_date_at_tz(timezone_str)
- timee = LocalDateToUTCEpoch(ddate, timezone_str)+5 #add so date boundary is avoided
- devices_list, device_ids = GetProximityList(deployment_id, timee)
-
- #Here we need to add per day: (all based on Z-graph data!)
- #Bathroom visits number
- #Bathroom time spent
- #Sleep weakes number (As breaks in Z-graph indicates in 10PM to 9AM period)
- #Sleep length (For now add all times seen in bedroom)
- #Kitchen visits number
- #Kitchen time spent
- #Most frequented room visits number
- #Most frequented room time spent
-
- #Lets find device_id of bathroom sensor
-
-
- bathroom_device_id, location_ba, bathroom_well_id = FindDeviceByRole(deployment_id, ["Bathroom Main", "Bathroom", "Bathroom Guest"])
- bedroom_device_id, location_be, bedroom_well_id = FindDeviceByRole(deployment_id, ["Bedroom Master", "Bedroom", "Bedroom Guest"])
- kitchen_device_id, location_ke, kitchen_well_id = FindDeviceByRole(deployment_id, ["Kitchen"])
- most_present_device_id, location_ot, most_present_well_id = FindDeviceByRole(deployment_id, []) #this will find most_present (as defined in other filed of device record)
-
- if isinstance(location_ot, int):
- other_location = location_names[location_ot]
- else:
- other_location = location_ot
-
- #weekly
- week_dates = get_week_days_and_dates(7, timezone_str)
- month_dates = get_week_days_and_dates(30, timezone_str)
- six_months_dates = get_week_days_and_dates(180, timezone_str)
-
- other_color = Loc2Color[other_location][0]
- rgb_string = f"rgb({other_color[0]}, {other_color[1]}, {other_color[2]})"
-
- rooms_reports = [("Bathroom", "blue", bathroom_device_id, bathroom_well_id), ("Bedroom", "green", bedroom_device_id, bedroom_well_id), ("Kitchen", "red", kitchen_device_id, kitchen_well_id), (other_location, rgb_string, most_present_device_id, most_present_well_id)]
-
- six_months_report = []
- for room_details in rooms_reports:
- device_id = room_details[2]
- if device_id > 0:
-
- well_id = room_details[3]
- radar_threshold_group_st = {device[1]: device[5] for device in devices_list}[device_id]
- room = {"name": room_details[0],"color": room_details[1]}
- data = []
-
- for day_activity in six_months_dates:
- datee = day_activity[0]
- hours, events_count = GetActivities(device_id, well_id, datee, filterr, refresh, timezone_str, radar_threshold_group_st)
-
- if hours > 18:
- print("Too long 6m!!!", device_id, well_id, datee, filterr, refresh, timezone_str, radar_threshold_group_st)
-
- data_record = { "title": str(day_activity[2]), "events": events_count, "hours": hours}
- data.append(data_record)
-
- room["data"] = data
- six_months_report.append(room)
-
- weekly_report = []
- for room_details in rooms_reports:
- device_id = room_details[2]
- if device_id > 0:
- well_id = room_details[3]
- radar_threshold_group_st = {device[1]: device[5] for device in devices_list}[device_id]
- room = {"name": room_details[0],"color": room_details[1]}
- data = []
-
- for day_activity in week_dates:
- datee = day_activity[0]
- hours, events_count = GetActivities(device_id, well_id, datee, filterr, refresh, timezone_str, radar_threshold_group_st)
- data_record = { "title": day_activity[1], "events": events_count, "hours": hours}
- data.append(data_record)
-
- room["data"] = data
- weekly_report.append(room)
-
- monthly_report = []
- for room_details in rooms_reports:
- device_id = room_details[2]
- if device_id > 0:
- well_id = room_details[3]
- radar_threshold_group_st = {device[1]: device[5] for device in devices_list}[device_id]
- room = {"name": room_details[0],"color": room_details[1]}
- data = []
-
- for day_activity in month_dates:
- datee = day_activity[0]
- hours, events_count = GetActivities(device_id, well_id, datee, filterr, refresh, timezone_str, radar_threshold_group_st)
- #if datee == "2025-05-20" and device_id == 572:
- # print(hours)
- if hours > 18:
- print("Too long m!!!", device_id, well_id, datee, filterr, refresh, timezone_str, radar_threshold_group_st)
-
- data_record = { "title": str(day_activity[2]), "events": events_count, "hours": hours}
- data.append(data_record)
-
- room["data"] = data
- monthly_report.append(room)
-
-
-
-
- result_dictionary = {
- "alert_text": "No alert",
- "alert_color": "bg-green-100 text-green-700",
- "chart_data": [
- {
- "name": "Weekly",
- "rooms": [
- {
- "name": "Bathroom",
- "color": "blue",
- "data": [
- { "title": "Monday", "events": 186, "hours": 80.56 },
- { "title": "Tuesday", "events": 305, "hours": 200 },
- { "title": "Wednesday", "events": 237, "hours": 120 },
- { "title": "Thursday", "events": 73, "hours": 190 },
- { "title": "Friday", "events": 209, "hours": 130 },
- { "title": "Saturday", "events": 214, "hours": 140 },
- { "title": "Sunday", "events": 150, "hours": 100 }
- ]
- },
- {
- "name": "Bedroom",
- "color": "green",
- "data": [
- { "title": "Monday", "events": 186, "hours": 80 },
- { "title": "Tuesday", "events": 305, "hours": 200 },
- { "title": "Wednesday", "events": 237, "hours": 120 },
- { "title": "Thursday", "events": 73, "hours": 190 },
- { "title": "Friday", "events": 209, "hours": 130 },
- { "title": "Saturday", "events": 214, "hours": 140 },
- { "title": "Sunday", "events": 150, "hours": 100 }
- ]
- },
- {
- "name": "Kitchen",
- "color": "red",
- "data": [
- { "title": "Monday", "events": 186, "hours": 80 },
- { "title": "Tuesday", "events": 305, "hours": 200 },
- { "title": "Wednesday", "events": 237, "hours": 120 },
- { "title": "Thursday", "events": 73, "hours": 190 },
- { "title": "Friday", "events": 209, "hours": 130 },
- { "title": "Saturday", "events": 214, "hours": 140 },
- { "title": "Sunday", "events": 150, "hours": 100 }
- ]
- },
- {
- "name": "Other",
- "color": "yellow",
- "data": [
- { "title": "Monday", "events": 186, "hours": 80 },
- { "title": "Tuesday", "events": 305, "hours": 200 },
- { "title": "Wednesday", "events": 237, "hours": 120 },
- { "title": "Thursday", "events": 73, "hours": 190 },
- { "title": "Friday", "events": 209, "hours": 130 },
- { "title": "Saturday", "events": 214, "hours": 140 },
- { "title": "Sunday", "events": 150, "hours": 100 }
- ]
- }
- ]
- },
- {
- "name": "Monthly",
- "rooms": [
- {
- "name": "Bathroom",
- "color": "purple",
- "data": [
- { "title": "01", "events": 67, "hours": 45 },
- { "title": "02", "events": 97, "hours": 67 },
- { "title": "03", "events": 87, "hours": 23 },
- { "title": "04", "events": 42, "hours": 12 },
- { "title": "05", "events": 64, "hours": 48 },
- { "title": "06", "events": 53, "hours": 34 },
- { "title": "07", "events": 75, "hours": 23 },
- { "title": "08", "events": 45, "hours": 56 },
- { "title": "09", "events": 85, "hours": 47 },
- { "title": "10", "events": 34, "hours": 29 },
- { "title": "11", "events": 49, "hours": 30 },
- { "title": "12", "events": 62, "hours": 33 },
- { "title": "13", "events": 75, "hours": 44 },
- { "title": "14", "events": 88, "hours": 57 },
- { "title": "15", "events": 94, "hours": 65 },
- { "title": "16", "events": 45, "hours": 21 },
- { "title": "17", "events": 76, "hours": 54 },
- { "title": "18", "events": 85, "hours": 62 },
- { "title": "19", "events": 43, "hours": 28 },
- { "title": "20", "events": 59, "hours": 34 },
- { "title": "21", "events": 78, "hours": 56 },
- { "title": "22", "events": 64, "hours": 39 },
- { "title": "23", "events": 93, "hours": 72 },
- { "title": "24", "events": 52, "hours": 28 },
- { "title": "25", "events": 71, "hours": 48 },
- { "title": "26", "events": 85, "hours": 63 }
- ]
- },
- {
- "name": "Bedroom",
- "color": "#3b82f6",
- "data": [
- { "title": "01", "events": 61, "hours": 42 },
- { "title": "02", "events": 72, "hours": 36 },
- { "title": "03", "events": 94, "hours": 49 },
- { "title": "04", "events": 67, "hours": 59 },
- { "title": "05", "events": 54, "hours": 20 },
- { "title": "06", "events": 77, "hours": 64 },
- { "title": "07", "events": 81, "hours": 70 },
- { "title": "08", "events": 53, "hours": 25 },
- { "title": "09", "events": 79, "hours": 42 },
- { "title": "10", "events": 84, "hours": 65 },
- { "title": "11", "events": 62, "hours": 54 },
- { "title": "12", "events": 45, "hours": 23 },
- { "title": "13", "events": 88, "hours": 71 },
- { "title": "14", "events": 74, "hours": 44 },
- { "title": "15", "events": 91, "hours": 59 },
- { "title": "16", "events": 46, "hours": 31 },
- { "title": "17", "events": 73, "hours": 40 },
- { "title": "18", "events": 85, "hours": 63 },
- { "title": "19", "events": 78, "hours": 66 },
- { "title": "20", "events": 66, "hours": 42 },
- { "title": "21", "events": 95, "hours": 78 },
- { "title": "22", "events": 57, "hours": 39 },
- { "title": "23", "events": 72, "hours": 48 },
- { "title": "24", "events": 48, "hours": 21 },
- { "title": "25", "events": 89, "hours": 61 },
- { "title": "26", "events": 77, "hours": 44 }
- ]
- },
- {
- "name": "Kitchen",
- "color": "orange",
- "data": [
- { "title": "01", "events": 94, "hours": 59 },
- { "title": "02", "events": 62, "hours": 48 },
- { "title": "03", "events": 76, "hours": 38 },
- { "title": "04", "events": 81, "hours": 62 },
- { "title": "05", "events": 64, "hours": 27 },
- { "title": "06", "events": 53, "hours": 31 },
- { "title": "07", "events": 92, "hours": 65 },
- { "title": "08", "events": 85, "hours": 42 },
- { "title": "09", "events": 74, "hours": 35 },
- { "title": "10", "events": 67, "hours": 55 },
- { "title": "11", "events": 49, "hours": 23 },
- { "title": "12", "events": 88, "hours": 75 },
- { "title": "13", "events": 93, "hours": 66 },
- { "title": "14", "events": 76, "hours": 34 },
- { "title": "15", "events": 59, "hours": 39 },
- { "title": "16", "events": 72, "hours": 51 },
- { "title": "17", "events": 83, "hours": 44 },
- { "title": "18", "events": 74, "hours": 33 },
- { "title": "19", "events": 69, "hours": 28 },
- { "title": "20", "events": 85, "hours": 56 },
- { "title": "21", "events": 53, "hours": 22 },
- { "title": "22", "events": 92, "hours": 70 },
- { "title": "23", "events": 71, "hours": 41 },
- { "title": "24", "events": 67, "hours": 25 },
- { "title": "25", "events": 86, "hours": 74 },
- { "title": "26", "events": 94, "hours": 68 }
- ]
- },
- {
- "name": "Other",
- "color": "hotpink",
- "data": [
- { "title": "01", "events": 57, "hours": 27 },
- { "title": "02", "events": 74, "hours": 33 },
- { "title": "03", "events": 84, "hours": 53 },
- { "title": "04", "events": 95, "hours": 68 },
- { "title": "05", "events": 71, "hours": 48 },
- { "title": "06", "events": 92, "hours": 76 },
- { "title": "07", "events": 85, "hours": 62 },
- { "title": "08", "events": 49, "hours": 25 },
- { "title": "09", "events": 66, "hours": 38 },
- { "title": "10", "events": 63, "hours": 31 },
- { "title": "11", "events": 75, "hours": 47 },
- { "title": "12", "events": 94, "hours": 72 },
- { "title": "13", "events": 79, "hours": 49 },
- { "title": "14", "events": 72, "hours": 45 },
- { "title": "15", "events": 88, "hours": 61 },
- { "title": "16", "events": 83, "hours": 52 },
- { "title": "17", "events": 92, "hours": 76 },
- { "title": "18", "events": 73, "hours": 40 },
- { "title": "19", "events": 65, "hours": 28 },
- { "title": "20", "events": 76, "hours": 63 },
- { "title": "21", "events": 58, "hours": 30 },
- { "title": "22", "events": 84, "hours": 67 },
- { "title": "23", "events": 72, "hours": 41 },
- { "title": "24", "events": 79, "hours": 46 },
- { "title": "25", "events": 63, "hours": 29 },
- { "title": "26", "events": 68, "hours": 39 }
- ]
- }
- ]
- },
- {
- "name": "6 Months",
- "rooms": [
- {
- "name": "Bathroom",
- "color": "purple",
- "data": [
- { "title": "October", "events": 62, "hours": 23 },
- { "title": "November", "events": 76, "hours": 42 },
- { "title": "December", "events": 85, "hours": 54 },
- { "title": "January", "events": 94, "hours": 67 },
- { "title": "February", "events": 63, "hours": 35 },
- { "title": "March", "events": 81, "hours": 46 }
- ]
- },
- {
- "name": "Bedroom",
- "color": "#3b82f6",
- "data": [
- { "title": "October", "events": 64, "hours": 35 },
- { "title": "November", "events": 88, "hours": 71 },
- { "title": "December", "events": 79, "hours": 54 },
- { "title": "January", "events": 72, "hours": 49 },
- { "title": "February", "events": 53, "hours": 32 },
- { "title": "March", "events": 93, "hours": 67 }
- ]
- },
- {
- "name": "Kitchen",
- "color": "orange",
- "data": [
- { "title": "October", "events": 92, "hours": 65 },
- { "title": "November", "events": 85, "hours": 62 },
- { "title": "December", "events": 74, "hours": 49 },
- { "title": "January", "events": 63, "hours": 33 },
- { "title": "February", "events": 78, "hours": 56 },
- { "title": "March", "events": 69, "hours": 41 }
- ]
- },
- {
- "name": "Other",
- "color": "hotpink",
- "data": [
- { "title": "October", "events": 88, "hours": 54 },
- { "title": "November", "events": 72, "hours": 39 },
- { "title": "December", "events": 84, "hours": 63 },
- { "title": "January", "events": 76, "hours": 46 },
- { "title": "February", "events": 93, "hours": 72 },
- { "title": "March", "events": 68, "hours": 29 }
- ]
- }
- ]
- }
- ]
- }
-
- result_dictionary["chart_data"][0]["rooms"] = weekly_report
- result_dictionary["chart_data"][1]["rooms"] = monthly_report
- result_dictionary["chart_data"][2]["rooms"] = six_months_report
-
-
-
- payload = result_dictionary #{'result_dictionary': result_dictionary}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- AddToLog(payload)
- return
-
-
- elif function == "dashboard_list":
- # works in UTC only
-
- logger.error(f"------------------------------- dashboard_list ------------------------------------------")
-
- caretaker = user_name
- #date_s = form_data.get('date')
- time_s = form_data.get('time')
- date_s = datetime.datetime.utcnow().strftime("%Y-%m-%d")
- filterr = form_data.get('filter')
- if filterr == None:
- filterr = 5
-
- privileges = GetPriviledgesOnly(caretaker)
-
- deployments_list = GetUsersFromDeployments(privileges)
-
- #all_beneficiaries = ListBeneficiariesOfCaretaker(caretaker) #GetPriviledgesOnly
- #AddToLog(all_beneficiaries)
-
- result_list = []
-
- for deployment_id, first_name, last_name in deployments_list:
- details = GetSensorsDetailsFromDeployment(deployment_id, date_s, filterr)
- if details != {}:
-
- details["units"] = "°C"
- if "America" in details["time_zone"]:
- details["temperature"] = CelsiusToFahrenheit(details["temperature"])
- details["units"] = "°F"
- devices_list, device_ids = GetProximityList(deployment_id, date_s)
- # convert dates back to UTC
- #details['bathroom_at'] = pytz.timezone(details['time_zone']).localize(datetime.datetime.strptime(details['bathroom_at'], "%Y-%m-%dT%H:%M:%S")).astimezone(pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S")
- #details['kitchen_at'] = pytz.timezone(details['time_zone']).localize(datetime.datetime.strptime(details['bathroom_at'], "%Y-%m-%dT%H:%M:%S")).astimezone(pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S")
- #details['bedroom_at'] = pytz.timezone(details['time_zone']).localize(datetime.datetime.strptime(details['bedroom_at'], "%Y-%m-%dT%H:%M:%S")).astimezone(pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S")
- #details['last_detected_time'] = pytz.timezone(details['time_zone']).localize(datetime.datetime.strptime(details['last_detected_time'], "%Y-%m-%dT%H:%M:%S")).astimezone(pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S")
- location_list = []
- for room_details in devices_list:
- well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = room_details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
- if description == None or description == "":
- location_list.append(location_name)
- else:
- location_list.append(location_name + " " + description)
- details["deployment_id"] = deployment_id
- details["location_list"] = location_list
- result_list.append(details)
-
- payload = {'result_list': result_list}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- AddToLog(payload)
- return
-
-
- elif function == "dashboard_single":
- caretaker = user_name
- #date_s = form_data.get('date')
- date_s = datetime.datetime.utcnow().strftime("%Y-%m-%d")
- deployment_id = form_data.get('deployment_id')
- filterr = form_data.get('filter')
- if filterr == None:
- filterr = 5
-
-
- #all_beneficiaries = ListBeneficiariesOfCaretaker(caretaker) #GetPriviledgesOnly
- #AddToLog(all_beneficiaries)
-
- result_list = []
-
- details = GetSensorsDetailsFromDeployment(deployment_id, date_s, filterr, False)
- details["units"] = "°C"
- if "America" in details["time_zone"]:
- details["temperature"] = CelsiusToFahrenheit(details["temperature"])
- details["bedroom_temperature"] = CelsiusToFahrenheit(details["bedroom_temperature"])
- details["units"] = "°F"
- devices_list, device_ids = GetProximityList(deployment_id, date_s)
- location_list = []
- for room_details in devices_list:
- well_id, device_id, location_name, description, MAC, radar_threshold_group_st, close_to = room_details #(266, 559, 'Bathroom', None, '64B70888FAB0', '["s3_max",12]')
- if description == None or description == "":
- location_list.append(location_name)
- else:
- location_list.append(location_name + " " + description)
- details["deployment_id"] = deployment_id
- details["location_list"] = location_list
- settings = {"wellness_score": False, "last_seen": False, "sleep_report": True, "activity_report": True, "temperature": True, "humidity": True, "air_pressure": True, "light": True, "air_quality": True, "radar": True, "other_activities": False}
- details["settings"] = settings
- result_list.append(details)
- payload = {'result_list': result_list}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- AddToLog(payload)
- return
-
- elif function == "request_node_red":
- logger.error(f"------------------------------- {function} ------------------------------------------")
- #this will:
- # 1.prepare folder and settings.js
- # 2.start instance on node-red and return it's return port
- #caretaker = user_name
- #date_s = form_data.get('date')
- time_s = datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
- #deployment_id = form_data.get('deployment_id')
- #redis_conn.set('node_red_requests', str([radar_threshold_signal, radar_threshold_value]))
- # Hashes (dictionaries)
- logger.error(f"Storing to node_red_requests {user_name}")
- redis_conn.hset('node_red_requests', mapping={
- 'user_name': user_name,
- 'token': token,
- 'time': time_s,
- 'requests': 1
- })
-
- payload = {'ok': 1}
- logger.error(f"Responding {payload}")
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
-
- elif function == "get_node_red_port":
- #this will:
- # 1.prepare folder and settings.js
- # 2.start instance on node-red and return it's return port
- hash_data = GetRedisMap(f'node_red_status_{user_name}')
- port = 0
- if hash_data != {}:
- port = hash_data['port']
- #date_s = form_data.get('date')
- #date_s = datetime.datetime.utcnow().strftime("%Y-%m-%d")
- #deployment_id = form_data.get('deployment_id')
- payload = {'port': port}
- logger.debug(f"get_node_red_port: {payload}")
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- elif function == "activity_detected":
- #this will:
- # 1.store to REDIS time of last activity
- time_s = form_data.get('time')
-
- hash_data = GetRedisMap(f'node_red_status_{user_name}')
- port = 0
- if hash_data != {}:
- port = hash_data['port']
-
- redis_conn.hset(f'node_red_status_{user_name}', mapping={
- 'port': port,
- 'last_activity': time_s
- })
-
- payload = {'ok': 1}
- logger.debug(f"activity_detected: {payload}")
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
-
- elif function == "store_flow":
- #this will:
- # 1.store flow into DB
- time_s = form_data.get('time')
- flow_json = form_data.get('flow')
- logger.debug(f"store_flow: {flow_json}")
- StoreFlow2DB(user_name, time_s, flow_json)
- payload = {'ok': 1}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- elif function == "store_alarms":
- #this will:
- # 1.store flow into DB
- deployment_id = form_data.get('deployment_id')
- device_id = form_data.get('device_id')
- deployment_alarms_json = form_data.get('deployment_alarms')
- device_alarms_json = form_data.get('device_alarms')
-
- logger.debug(f"store_alarms: {deployment_alarms_json}")
-
-
- if privileges == "-1" or deployment_id in privileges:
- ok = StoreAlarms2DB(deployment_id, device_id, deployment_alarms_json, device_alarms_json)
-
- redis_conn.set('alarm_device_settings_'+device_id, device_alarms_json)
- redis_conn.set('alarm_deployment_settings_'+deployment_id, deployment_alarms_json)
+ elif function == "send_walarm":
+ # Extract data from form
+ deployment_id = form_data.get('deployment_id')
+ device_id = form_data.get('device_id')
+ location = form_data.get('location')
+ method = form_data.get('method')
+ feature = form_data.get('feature')
+ currentAlertTableMode = form_data.get('currentAlertTableMode')
+ time_s = datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
+ content = form_data.get('content')
+ enabledCellContent = form_data.get('enabledCellContent')
+ currentUnits = form_data.get('currentUnits')
+ test_only = form_data.get('test_only')
+ action = form_data.get('action')
+
+ logger.debug(f"send_requests: {user_name}")
# Create record dictionary
record = {
'user_name': user_name,
'deployment_id': deployment_id,
- 'device_id': device_id
+ 'location': location,
+ 'method': method,
+ 'feature': feature,
+ 'currentAlertTableMode': currentAlertTableMode,
+ 'time': time_s,
+ 'content': content,
+ 'currentUnits': currentUnits,
+ 'test_only': test_only,
+ 'action': action,
+ 'enabledCellContent': enabledCellContent
}
# Convert dictionary to JSON string for storage in Redis list
record_json = json.dumps(record)
# Add to queue (list) - lpush adds to the left/front of the list
- redis_conn.lpush('new_alarms', record_json)
+ redis_conn.lpush('send_requests', record_json)
-
- payload = {'ok': ok}
+ payload = {'ok': 1}
+ logger.error(f"Responding {payload}")
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
+ elif function == "node-red_deployed":
+ #this will:
+ # 1.store flow into DB
+ time_s = form_data.get('time')
+ logger.debug(f"node-red_deployed: {user_name}")
+ redis_conn.hset('node-red_deployed', mapping={
+ 'user_name': user_name,
+ 'token': token,
+ 'time': time_s,
+ 'requests': 1
+ })
+
+ payload = {'ok': 1}
+ logger.error(f"Responding {payload}")
+ resp.media = package_response(payload)
+ resp.status = falcon.HTTP_200
+
else:
- payload = {'ok': 0, 'error': "not allowed"}
+ debug_string = "Error: function not recognized!"
+ AddToLog(debug_string)
+ payload = {'ok': 0, 'error': debug_string}
resp.media = package_response(payload)
resp.status = falcon.HTTP_200
+ return
- return
+ except Exception as e:
+ print(traceback.format_exc())
+ resp.media = package_response(f"Error: {str(e)} {traceback.format_exc()}", HTTP_500)
- elif function == "send_walarm":
- # Extract data from form
- deployment_id = form_data.get('deployment_id')
- device_id = form_data.get('device_id')
- location = form_data.get('location')
- method = form_data.get('method')
- feature = form_data.get('feature')
- currentAlertTableMode = form_data.get('currentAlertTableMode')
- time_s = datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
- content = form_data.get('content')
- enabledCellContent = form_data.get('enabledCellContent')
- currentUnits = form_data.get('currentUnits')
- test_only = form_data.get('test_only')
- action = form_data.get('action')
-
- logger.debug(f"send_requests: {user_name}")
-
- # Create record dictionary
- record = {
- 'user_name': user_name,
- 'deployment_id': deployment_id,
- 'location': location,
- 'method': method,
- 'feature': feature,
- 'currentAlertTableMode': currentAlertTableMode,
- 'time': time_s,
- 'content': content,
- 'currentUnits': currentUnits,
- 'test_only': test_only,
- 'action': action,
- 'enabledCellContent': enabledCellContent
- }
-
- # Convert dictionary to JSON string for storage in Redis list
- record_json = json.dumps(record)
-
- # Add to queue (list) - lpush adds to the left/front of the list
- redis_conn.lpush('send_requests', record_json)
-
- payload = {'ok': 1}
- logger.error(f"Responding {payload}")
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- elif function == "node-red_deployed":
- #this will:
- # 1.store flow into DB
- time_s = form_data.get('time')
- logger.debug(f"node-red_deployed: {user_name}")
- redis_conn.hset('node-red_deployed', mapping={
- 'user_name': user_name,
- 'token': token,
- 'time': time_s,
- 'requests': 1
- })
-
- payload = {'ok': 1}
- logger.error(f"Responding {payload}")
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
-
- else:
- debug_string = "Error: function not recognized!"
- AddToLog(debug_string)
- payload = {'ok': 0, 'error': debug_string}
- resp.media = package_response(payload)
- resp.status = falcon.HTTP_200
- return
except Exception as e:
- print(traceback.format_exc())
- resp.media = package_response(f"Error: {str(e)} {traceback.format_exc()}", HTTP_500)
+ logger.error(f"Error in on_post: {e}")
+ resp.status = falcon.HTTP_500
+ resp.media = {"error": "Internal server error"}
def on_put(self, req, resp, path=""):
@@ -20743,9 +21148,831 @@ class WellApi:
resp.media = package_response(f"DELETE to /{path} not implemented", HTTP_400)
-def SendWelcomeEmail(email, first_name, last_name, devices, phone_number, user_name, password):
+#$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
+def read_bounded_stream_data(request):
+ """Read data from bounded stream using stream-specific properties"""
+
+ stream = request.bounded_stream
+ logger.debug(f"Stream properties: eof={getattr(stream, 'eof', None)}, "
+ f"stream_len={getattr(stream, 'stream_len', None)}, "
+ f"bytes_remaining={getattr(stream, '_bytes_remaining', None)}")
+
+ try:
+ # Method 1: Check if stream has length info
+ if hasattr(stream, 'stream_len') and stream.stream_len:
+ logger.debug(f"Stream has length: {stream.stream_len}")
+ data = stream.read(stream.stream_len)
+ logger.debug(f"Read {len(data)} bytes using stream_len")
+ return data
+
+ # Method 2: Check bytes remaining
+ if hasattr(stream, '_bytes_remaining') and stream._bytes_remaining:
+ logger.debug(f"Bytes remaining: {stream._bytes_remaining}")
+ data = stream.read(stream._bytes_remaining)
+ logger.debug(f"Read {len(data)} bytes using bytes_remaining")
+ return data
+
+ # Method 3: Try to exhaust the stream
+ if hasattr(stream, 'exhaust'):
+ logger.debug("Trying stream.exhaust()")
+ data = stream.exhaust()
+ logger.debug(f"Exhausted stream: {len(data) if data else 0} bytes")
+ return data or b''
+
+ # Method 4: Check if we can access underlying stream
+ if hasattr(stream, 'stream'):
+ logger.debug("Trying underlying stream")
+ underlying = stream.stream
+ data = underlying.read()
+ logger.debug(f"Read from underlying stream: {len(data)} bytes")
+ return data
+
+ logger.error("No viable method to read from bounded stream")
+ return b''
+
+ except Exception as e:
+ logger.error(f"Failed to read bounded stream: {e}")
+ return b''
+
+def read_chunked_data(request):
+ """Read chunked transfer data safely"""
+
+ try:
+ # Try different methods to read the data
+ data = b''
+
+ # Method 1: Try reading in small chunks with timeout
+ chunk_size = 8192
+ max_chunks = 1000 # Prevent infinite loops
+ chunks_read = 0
+
+ while chunks_read < max_chunks:
+ try:
+ chunk = request.bounded_stream.read(chunk_size)
+ if not chunk:
+ break
+ data += chunk
+ chunks_read += 1
+
+ # Log progress for large uploads
+ if len(data) % (chunk_size * 10) == 0:
+ logger.debug(f"Read {len(data)} bytes so far...")
+
+ except Exception as e:
+ logger.debug(f"Chunk read error after {len(data)} bytes: {e}")
+ break
+
+ return data
+
+ except Exception as e:
+ logger.error(f"Failed to read chunked data: {e}")
+ return b''
+
+def FixDeploymentHistorySequence(connection) -> bool:
+ """
+ Fix the sequence for deployment_history table if it's out of sync.
+
+ Args:
+ connection: Database connection object
+
+ Returns:
+ bool: True if sequence was fixed successfully, False otherwise
+ """
+ try:
+ cursor = connection.cursor()
+
+ # Reset sequence to the correct value
+ cursor.execute("""
+ SELECT setval('deployment_history_id_seq',
+ (SELECT COALESCE(MAX(id), 0) FROM deployment_history));
+ """)
+
+ connection.commit()
+ cursor.close()
+
+ print("Deployment history sequence has been fixed")
+ return True
+
+ except psycopg2.Error as e:
+ print(f"Error fixing sequence: {e}")
+ connection.rollback()
+ return False
+
+def StoreToDeploymentHistory(deployment_id: int, proximity: str) -> bool:
+ """
+ Store a new entry to the deployment_history table with auto-generated ID and current epoch time.
+
+ Args:
+ deployment_id: The deployment ID to store
+ proximity: The proximity data to store
+ connection: Database connection object (psycopg2 connection)
+
+ Returns:
+ bool: True if the record was successfully inserted, False otherwise
+ """
+ try:
+
+ connection = get_db_connection()
+ cursor = connection.cursor()
+
+ # Get current epoch time
+ current_epoch_time = time.time()
+
+ # Insert query - id is auto-generated by the sequence, we provide deployment_id, time, and proximity
+ query = """
+ INSERT INTO deployment_history (deployment_id, time, proximity)
+ VALUES (%s, %s, %s)
+ """
+
+ # Execute the query with parameters
+ cursor.execute(query, (deployment_id, current_epoch_time, proximity))
+
+ # Commit the transaction
+ connection.commit()
+
+ cursor.close()
+
+ print(f"Successfully inserted record: deployment_id={deployment_id}, time={current_epoch_time}, proximity='{proximity}'")
+ return True
+
+ except psycopg2.IntegrityError as e:
+ if "duplicate key value violates unique constraint" in str(e):
+ print("Sequence appears to be out of sync. Attempting to fix...")
+ connection.rollback()
+
+ # Try to fix the sequence
+ if FixDeploymentHistorySequence(connection):
+ # Retry the insertion after fixing sequence
+ try:
+ cursor = connection.cursor()
+ current_epoch_time = time.time()
+ cursor.execute(query, (deployment_id, current_epoch_time, proximity))
+ connection.commit()
+ cursor.close()
+ print(f"Successfully inserted record after sequence fix: deployment_id={deployment_id}")
+ return True
+ except Exception as retry_error:
+ print(f"Failed to insert even after sequence fix: {retry_error}")
+ connection.rollback()
+ return False
+ else:
+ print("Failed to fix sequence")
+ return False
+ else:
+ print(f"Database integrity error in StoreToDeploymentHistory: {e}")
+ connection.rollback()
+ return False
+ except psycopg2.Error as e:
+ print(f"Database error in StoreToDeploymentHistory: {e}")
+ connection.rollback()
+ return False
+ except Exception as e:
+ print(f"Unexpected error in StoreToDeploymentHistory: {e}")
+ connection.rollback()
+ return False
+
+def ListsSame(s1: str, s2: str) -> bool:
+ """
+ Compare two JSON strings containing MAC address lists and return True if they contain
+ the same MAC addresses regardless of order, False if they differ.
+
+ Args:
+ s1: JSON string containing list of MAC addresses
+ s2: JSON string containing list of MAC addresses
+
+ Returns:
+ bool: True if both lists contain the same MAC addresses (ignoring order),
+ False if they contain different MAC addresses or if there's an error parsing
+ """
+ try:
+ # Parse both JSON strings into Python lists
+ list1 = json.loads(s1)
+ list2 = json.loads(s2)
+
+ # Convert lists to sets for order-independent comparison
+ set1 = set(list1)
+ set2 = set(list2)
+
+ # Return True if sets are equal (same elements regardless of order)
+ return set1 == set2
+
+ except (json.JSONDecodeError, TypeError) as e:
+ print(f"Error parsing JSON strings: {e}")
+ return False
+
+def WellIDs2MAC(devices_details_str: str) -> str:
+ """
+ Extract well_ids from devices_details_str and return corresponding device_mac values as JSON string.
+
+ Args:
+ devices_details_str: JSON string containing device details in format ["215:Bedroom ", "218:Bathroom ", ...]
+ connection: Database connection object (psycopg2 connection)
+
+ Returns:
+ JSON string containing list of device_mac values like ["64B7088908BC", "64B70889043C", ...]
+ """
+ try:
+ # Parse the JSON string to get the list
+ devices_list = json.loads(devices_details_str)
+
+ # Extract well_ids from each entry (everything before the colon)
+ well_ids = []
+ for device_entry in devices_list:
+ # Split by colon and take the first part, then convert to int
+ well_id = int(device_entry.split(':')[0])
+ well_ids.append(well_id)
+
+ # Query database for device_mac values
+
+ with get_db_connection() as conn:
+
+ with conn.cursor() as cur:
+
+ # Use parameterized query to avoid SQL injection
+ placeholders = ','.join(['%s'] * len(well_ids))
+ query = f"SELECT device_mac FROM devices WHERE well_id IN ({placeholders}) ORDER BY well_id"
+
+ cur.execute(query, well_ids)
+ results = cur.fetchall()
+
+ # Extract device_mac values from results
+ device_macs = [row[0] for row in results]
+
+ cur.close()
+
+ # Return as JSON string
+ return json.dumps(device_macs)
+
+ except (json.JSONDecodeError, ValueError, psycopg2.Error) as e:
+ print(f"Error in WellIDs2MAC: {e}")
+ return "[]"
+
+
+def ConvertToMapString(my_string ):
+ array = json.loads(my_string)
+
+ # Split each element and create dictionary
+ result_dict = {}
+ for item in array:
+ key, value = item.split('|', 1) # Split on first pipe only
+ result_dict[key] = value
+
+ # Convert back to JSON string
+ return json.dumps(result_dict)
+
+
+def GetIfThere(address_map, key):
+ address_parts = address_map["parsed_components"]
+ if key in address_parts:
+ return address_parts[key]
+ else:
+ return ""
+
+
+def StoreFile2Blob(source_file_name, destination_file_name, path_to_use):
+ """
+ Store any file from local disk to MinIO blob storage
+
+ Args:
+ source_file_name (str): Path to the source file on disk
+ destination_file_name (str): Name to store the file as in blob storage
+ path_to_use (str): Bucket name/path in MinIO (e.g., "user-pictures")
+
+ Returns:
+ bool: True if successful, False otherwise
+ """
+ try:
+ # Check if source file exists
+ if not os.path.exists(source_file_name):
+ raise Exception(f"Source file does not exist: {source_file_name}")
+
+ # Get file size
+ file_size = os.path.getsize(source_file_name)
+ AddToLog(f"File size: {file_size} bytes")
+
+ # Open and read the file in binary mode
+ with open(source_file_name, 'rb') as file:
+ # Store in MinIO blob storage
+ miniIO_blob_client.put_object(
+ path_to_use, # Bucket name (e.g., "user-pictures")
+ destination_file_name, # Object name in bucket
+ file, # File object
+ file_size # File size
+ )
+
+ AddToLog(f"Successfully stored {source_file_name} as {destination_file_name} in {path_to_use}")
+ return True
+
+ except Exception as e:
+ AddToLog(f"{traceback.format_exc()}")
+ logger.error(f"{traceback.format_exc()}")
+ return False
+
+def parse_multipart_data_manual(body_data, boundary):
+ """Manually parse multipart form data"""
+
+ form_data = {}
+ files = {}
+
+ # Split on boundary
+ parts = body_data.split(b'--' + boundary)
+
+ for part in parts[1:-1]: # Skip first empty and last closing parts
+ if not part.strip():
+ continue
+
+ # Split headers from content
+ header_end = part.find(b'\r\n\r\n')
+ if header_end == -1:
+ continue
+
+ headers = part[:header_end].decode('utf-8', errors='ignore')
+ content = part[header_end + 4:]
+
+ # Remove trailing CRLF
+ if content.endswith(b'\r\n'):
+ content = content[:-2]
+
+ # Parse Content-Disposition header
+ if 'Content-Disposition: form-data;' in headers:
+ # Extract field name
+ name_start = headers.find('name="') + 6
+ name_end = headers.find('"', name_start)
+ if name_start < 6 or name_end == -1:
+ continue
+
+ field_name = headers[name_start:name_end]
+
+ # Check if it's a file
+ if 'filename=' in headers:
+ # Extract filename
+ filename_start = headers.find('filename="') + 10
+ filename_end = headers.find('"', filename_start)
+ filename = headers[filename_start:filename_end] if filename_start >= 10 and filename_end != -1 else 'unknown'
+
+ files[field_name] = {
+ 'filename': filename,
+ 'data': content,
+ 'size': len(content)
+ }
+ else:
+ # Text field
+ form_data[field_name] = content.decode('utf-8', errors='ignore')
+
+ return form_data, files
+
+def debug_multipart_info(request):
+ """Debug multipart request info"""
+ info = {
+ "content_type": getattr(request, 'content_type', 'None'),
+ "content_length": getattr(request, 'content_length', 'None'),
+ "headers": dict(request.headers) if hasattr(request.headers, 'items') else str(request.headers),
+ "method": getattr(request, 'method', 'None')
+ }
+
+ logger.debug(f"Debug multipart info: {info}")
+ return info
+
+def handle_multipart_request(request):
+ """Handle multipart form data request for your framework"""
+ debug_multipart_info(request)
+ try:
+ # Get the boundary from content type - try different ways to access headers
+ content_type = ''
+
+ # Try multiple ways to get the content type
+ if hasattr(request, 'content_type') and request.content_type:
+ content_type = request.content_type
+ elif hasattr(request.headers, 'get'):
+ content_type = request.headers.get('CONTENT-TYPE', '')
+ elif 'CONTENT-TYPE' in request.headers:
+ content_type = request.headers['CONTENT-TYPE']
+
+ print(f"Content-Type: {content_type}")
+
+ if 'boundary=' not in content_type:
+ print("No boundary found, trying alternative header access...")
+ # Debug: print all headers to see the structure
+ print(f"Headers type: {type(request.headers)}")
+ print(f"Headers: {request.headers}")
+ return {"error": "No boundary found in multipart request"}
+
+ boundary = content_type.split('boundary=')[1]
+ print(f"Found boundary: {boundary}")
+
+ # Convert to bytes
+ boundary = boundary.encode()
+
+ # Get content length
+ content_length = request.content_length
+ if content_length is None:
+ # Try different ways to get content length
+ if hasattr(request.headers, 'get'):
+ content_length = int(request.headers.get('CONTENT-LENGTH', 0))
+ elif 'CONTENT-LENGTH' in request.headers:
+ content_length = int(request.headers['CONTENT-LENGTH'])
+ else:
+ content_length = 0
+
+ print(f"Content length: {content_length}")
+
+ if content_length > 50 * 1024 * 1024: # 50MB limit
+ return {"error": "Request too large"}
+
+ if content_length == 0:
+ return {"error": "No content"}
+
+ # Read the raw body data safely
+ body_data = b''
+ bytes_read = 0
+ chunk_size = 8192
+
+ print("Starting to read body data...")
+ while bytes_read < content_length:
+ remaining = content_length - bytes_read
+ to_read = min(chunk_size, remaining)
+
+ chunk = request.bounded_stream.read(to_read)
+ if not chunk:
+ print(f"No more data at {bytes_read} bytes")
+ break
+
+ body_data += chunk
+ bytes_read += len(chunk)
+
+ if bytes_read % (chunk_size * 10) == 0: # Log every 80KB
+ print(f"Read {bytes_read}/{content_length} bytes")
+
+ print(f"Finished reading {len(body_data)} bytes of multipart data")
+
+ # Parse multipart data
+ form_data, files = parse_multipart_data_manual(body_data, boundary)
+
+ print(f"Parsed form fields: {list(form_data.keys())}")
+ print(f"Parsed files: {list(files.keys())}")
+
+ # Handle photo file
+ if 'beneficiary_photo' in files:
+ photo = files['beneficiary_photo']
+ photo_data = photo['data']
+
+ print(f"Received photo: {photo['filename']}, {len(photo_data)} bytes")
+
+ # Validate and save JPEG
+ if photo_data[:3] == b'\xff\xd8\xff' and photo_data.endswith(b'\xff\xd9'):
+ with open(photo['filename'], 'wb') as f:
+ f.write(photo_data)
+ print("✅ Multipart photo saved successfully")
+ else:
+ print(f"❌ Invalid JPEG data - starts with: {photo_data[:10].hex()}")
+
+ return {"status": "success", "form_data": form_data, "files": files, "filename": photo['filename']}
+
+ except Exception as e:
+ print(f"Error processing multipart request: {e}")
+ traceback.print_exc()
+ return {"error": str(e)}
+
+
+def parse_multipart_data(request_body, content_type):
+ """Parse multipart data - returns (form_data, files)"""
+
+ # Extract boundary
+ boundary = content_type.split('boundary=')[1].encode()
+
+ # Split on boundary
+ parts = request_body.split(b'--' + boundary)
+
+ form_data = {}
+ files = {}
+
+ for part in parts[1:-1]: # Skip first empty and last closing parts
+ if not part.strip():
+ continue
+
+ # Split headers from content
+ header_end = part.find(b'\r\n\r\n')
+ if header_end == -1:
+ continue
+
+ headers = part[:header_end].decode('utf-8')
+ content = part[header_end + 4:]
+
+ # Parse Content-Disposition header
+ if 'Content-Disposition: form-data;' in headers:
+ # Extract field name
+ name_start = headers.find('name="') + 6
+ name_end = headers.find('"', name_start)
+ field_name = headers[name_start:name_end]
+
+ # Check if it's a file
+ if 'filename=' in headers:
+ # Extract filename
+ filename_start = headers.find('filename="') + 10
+ filename_end = headers.find('"', filename_start)
+ filename = headers[filename_start:filename_end]
+
+ # Remove trailing CRLF
+ if content.endswith(b'\r\n'):
+ content = content[:-2]
+
+ files[field_name] = {
+ 'filename': filename,
+ 'data': content,
+ 'size': len(content)
+ }
+ else:
+ # Text field - remove trailing CRLF
+ if content.endswith(b'\r\n'):
+ content = content[:-2]
+ form_data[field_name] = content.decode('utf-8')
+
+ return form_data, files
+
+def quick_fix_base64_photo(form_data):
+ """Fix corrupted Base64 photo from URL-encoded form"""
+ photo_b64 = form_data.get('beneficiary_photo')
+
+ if not photo_b64:
+ return False
+
+ try:
+
+ # Remove whitespace corruption
+ clean = re.sub(r'\s+', '', photo_b64)
+
+ # Fix padding
+ missing_padding = len(clean) % 4
+ if missing_padding:
+ clean += '=' * (4 - missing_padding)
+
+ # Decode and validate
+ image_data = base64.b64decode(clean)
+
+ if image_data[:3] == b'\xff\xd8\xff':
+ with open('beneficiary.jpg', 'wb') as f:
+ f.write(image_data)
+
+ print(f"Base64 photo beneficiary.jpg fixed and saved: {len(image_data)} bytes")
+ return True
+ else:
+ logger.error("Invalid JPEG header in Base64 data")
+ return False
+
+ except Exception as e:
+ logger.error(f"Base64 photo fix failed: {e}")
+ return False
+
+
+
+#def debug_received_data(form_data):
+ #"""Debug what we actually received"""
+
+ #beneficiary_photo_b64 = form_data.get('beneficiary_photo')
+
+ #if not beneficiary_photo_b64:
+ #print("❌ No photo data received")
+ #return None
+
+ #print("=== PYTHON RECEIVE DEBUGGING ===")
+ #print(f"Received base64 length: {len(beneficiary_photo_b64)}")
+ #print(f"First 50 chars: {beneficiary_photo_b64[:50]}")
+ #print(f"Last 50 chars: {beneficiary_photo_b64[-50:]}")
+
+ ## Check for whitespace
+ #whitespace_count = len(beneficiary_photo_b64) - len(beneficiary_photo_b64.replace(' ', '').replace('\n', '').replace('\r', '').replace('\t', ''))
+ #print(f"Whitespace characters: {whitespace_count}")
+
+ ## Clean and check length
+ #clean_b64 = beneficiary_photo_b64.replace(' ', '').replace('\n', '').replace('\r', '').replace('\t', '')
+ #print(f"Clean base64 length: {len(clean_b64)}")
+ #print(f"Clean mod 4: {len(clean_b64) % 4}")
+
+ ## Calculate expected original size
+ #expected_bytes = (len(clean_b64) * 3) // 4
+ #print(f"Expected decoded size: {expected_bytes} bytes")
+
+ ## Try to decode with minimal processing
+ #try:
+ ## Just fix padding without removing characters
+ #missing_padding = len(clean_b64) % 4
+ #if missing_padding:
+ #padded_b64 = clean_b64 + '=' * (4 - missing_padding)
+ #else:
+ #padded_b64 = clean_b64
+
+ #print(f"After padding: {len(padded_b64)} chars")
+
+ #decoded_bytes = base64.b64decode(padded_b64)
+ #print(f"✅ Decoded successfully: {len(decoded_bytes)} bytes")
+
+ ## Check first and last bytes
+ #if len(decoded_bytes) > 10:
+ #first_bytes = ' '.join(f'{b:02x}' for b in decoded_bytes[:10])
+ #last_bytes = ' '.join(f'{b:02x}' for b in decoded_bytes[-10:])
+ #print(f"First 10 bytes: {first_bytes}")
+ #print(f"Last 10 bytes: {last_bytes}")
+
+ ## Calculate hash for comparison
+ #data_hash = hashlib.md5(decoded_bytes).hexdigest()
+ #print(f"MD5 hash: {data_hash}")
+
+ ## Save raw decoded data
+ #with open('raw_decoded.jpg', 'wb') as f:
+ #f.write(decoded_bytes)
+ #print("Saved raw decoded data to raw_decoded.jpg")
+
+ ## Try to analyze the structure
+ #if decoded_bytes[:3] == b'\xff\xd8\xff':
+ #print("✅ Valid JPEG header")
+
+ ## Look for JPEG end marker
+ #if decoded_bytes.endswith(b'\xff\xd9'):
+ #print("✅ Valid JPEG end marker")
+ #else:
+ ## Find where JPEG data actually ends
+ #last_ffd9 = decoded_bytes.rfind(b'\xff\xd9')
+ #if last_ffd9 != -1:
+ #print(f"⚠️ JPEG end marker found at position {last_ffd9}, but file continues for {len(decoded_bytes) - last_ffd9 - 2} more bytes")
+
+ ## Try extracting just the JPEG part
+ #jpeg_only = decoded_bytes[:last_ffd9 + 2]
+ #with open('jpeg_extracted.jpg', 'wb') as f:
+ #f.write(jpeg_only)
+ #print(f"Extracted JPEG part ({len(jpeg_only)} bytes) to jpeg_extracted.jpg")
+
+ #else:
+ #print("❌ No JPEG end marker found anywhere")
+ #else:
+ #print("❌ Invalid JPEG header")
+
+ #return decoded_bytes
+
+ #except Exception as e:
+ #print(f"❌ Decode failed: {e}")
+ #return None
+
+
+def fix_incomplete_jpeg(image_bytes):
+ """Add missing JPEG end marker if needed"""
+
+ print(f"Original image size: {len(image_bytes)} bytes")
+ print(f"Ends with: {image_bytes[-10:].hex()}")
+
+ # Check if JPEG end marker is present
+ if not image_bytes.endswith(b'\xff\xd9'):
+ print("❌ Missing JPEG end marker, adding it...")
+ fixed_bytes = image_bytes + b'\xff\xd9'
+ print(f"Fixed image size: {len(fixed_bytes)} bytes")
+
+ # Test if this fixes the image
+ try:
+ with Image.open(io.BytesIO(fixed_bytes)) as img:
+ img.load() # Force load to verify
+ print(f"✅ Successfully repaired JPEG: {img.format} {img.size}")
+ return fixed_bytes
+ except Exception as e:
+ print(f"Adding end marker didn't work: {e}")
+
+ return image_bytes
+
+def robust_base64_decode_v3(base64_string):
+ """Fixed version that handles the padding correctly"""
+
+ print(f"Original string length: {len(base64_string)}")
+
+ # Clean the string
+ clean_string = re.sub(r'\s+', '', base64_string)
+ print(f"After cleaning: {len(clean_string)}")
+
+ # The issue is with padding calculation
+ # Let's try different approaches
+ strategies = []
+
+ # Strategy 1: Remove characters until we get valid length
+ for remove_count in range(10):
+ test_string = clean_string[:-remove_count] if remove_count > 0 else clean_string
+
+ # Calculate proper padding
+ missing_padding = len(test_string) % 4
+ if missing_padding:
+ padded_string = test_string + '=' * (4 - missing_padding)
+ else:
+ padded_string = test_string
+
+ strategies.append((f"Remove {remove_count}, pad to {len(padded_string)}", padded_string))
+
+ for strategy_name, test_string in strategies:
+ try:
+ print(f"Trying {strategy_name}")
+
+ # Decode
+ image_bytes = base64.b64decode(test_string)
+ print(f" Decoded to {len(image_bytes)} bytes")
+
+ # Check JPEG header
+ if image_bytes[:3] == b'\xff\xd8\xff':
+ print(f" ✅ Valid JPEG header")
+
+ # Try to fix missing end marker
+ fixed_bytes = fix_incomplete_jpeg(image_bytes)
+
+ # Final validation
+ try:
+ with Image.open(io.BytesIO(fixed_bytes)) as img:
+ img.load()
+ print(f" ✅ Valid complete JPEG: {img.format} {img.size}")
+ return fixed_bytes
+ except Exception as e:
+ print(f" ❌ Still invalid: {e}")
+ continue
+ else:
+ print(f" ❌ Invalid JPEG header: {image_bytes[:10].hex()}")
+
+ except Exception as e:
+ print(f" {strategy_name} failed: {e}")
+
+ return None
+
+
+def fix_base64_padding(base64_string):
+ """
+ Fix base64 padding issues
+ """
+ # Remove whitespace
+ base64_string = re.sub(r'\s+', '', base64_string)
+
+ # Calculate missing padding
+ missing_padding = len(base64_string) % 4
+
+ if missing_padding == 1:
+ # If 1 character short, this is likely corrupted data
+ # Try removing the last character and then pad
+ base64_string = base64_string[:-1]
+ missing_padding = len(base64_string) % 4
+
+ if missing_padding:
+ base64_string += '=' * (4 - missing_padding)
+
+ return base64_string
+
+
+def GenerateUserNameWithContext(user_name, first_name: str, last_name: str, user_id) -> str:
+ """
+ Generate a unique username with automatic cursor management.
+ Same functionality as GenerateUserName but with context manager for cursor.
+ If a potential username is found but matches the provided user_id, it can be used.
+ """
+ connection = get_db_connection()
+ if not first_name or not last_name:
+ raise ValueError("Both first_name and last_name must be provided and non-empty")
+ first_letter = first_name.strip().lower()[0]
+ clean_last_name = last_name.strip().lower().replace(' ', '')
+ if user_name == None or user_name.strip() == "":
+ base_username = f"{first_letter}{clean_last_name}"
+ base_username = ''.join(c for c in base_username if c.isalnum())
+ else:
+ base_username = user_name
+
+ def is_username_available(cursor, username):
+ """Check if username is available or belongs to the current user_id"""
+ cursor.execute(
+ "SELECT user_id FROM public.person_details WHERE LOWER(user_name) = %s",
+ (username,)
+ )
+ result = cursor.fetchone()
+ if result is None:
+ # Username doesn't exist, it's available
+ return True
+ elif result[0] == user_id:
+ # Username exists but belongs to current user, it's available
+ return True
+ else:
+ # Username exists and belongs to different user, not available
+ return False
+
+ try:
+ with connection.cursor() as cursor:
+ # Check base username
+ if is_username_available(cursor, base_username):
+ return base_username
+
+ # Find next available username
+ counter = 1
+ while counter <= 9999:
+ candidate_username = f"{base_username}{counter}"
+ if is_username_available(cursor, candidate_username):
+ return candidate_username
+ counter += 1
+ raise RuntimeError("Unable to generate unique username after 9999 attempts")
+ except psycopg2.Error as e:
+ raise RuntimeError(f"Database error occurred: {e}")
+
+def SendWelcomeCaretakerEmail(email, first_name, last_name, devices, phone_number, user_name, password, signature):
logger.error(f"Requesting welcome email to {email}")
- redis_conn.hset('messaging_requests', mapping={
+
+ queue_data = {
+ 'function': "new_caretaker",
'email': email,
'user_name': user_name,
'first_name': first_name,
@@ -20753,8 +21980,55 @@ def SendWelcomeEmail(email, first_name, last_name, devices, phone_number, user_n
'devices': devices,
'phone_number': phone_number,
'password': password,
- 'requests': 1
- })
+ 'signature': signature,
+ 'requests': 1,
+ 'timestamp': time.time() # Optional: add timestamp
+ }
+ redis_conn.xadd('messaging_requests_stream', queue_data)
+
+
+def SendWelcomeBeneficiaryEmail(email, first_name, last_name, devices, phone_number, user_name, password):
+ logger.error(f"Requesting welcome beneficiary email to {email}")
+ queue_data = {
+ 'function': "new_beneficiary",
+ 'email': email,
+ 'user_name': user_name,
+ 'first_name': first_name,
+ 'last_name': last_name,
+ 'devices': devices,
+ 'phone_number': phone_number,
+ 'password': password,
+ 'signature': signature,
+ 'requests': 1,
+ 'timestamp': time.time() # Optional: add timestamp
+ }
+ redis_conn.xadd('messaging_requests_stream', queue_data)
+
+def SendCredentialsChangedEmail(email, first_name, last_name, devices, phone_number, user_name, password):
+ logger.error(f"Requesting credentials changed of beneficiary email to {email}")
+ queue_data = {
+ 'function': "credentials_updated",
+ 'email': email,
+ 'user_name': user_name,
+ 'first_name': first_name,
+ 'last_name': last_name,
+ 'devices': devices,
+ 'phone_number': phone_number,
+ 'password': password,
+ 'signature': signature,
+ 'requests': 1,
+ 'timestamp': time.time() # Optional: add timestamp
+ }
+ redis_conn.xadd('messaging_requests_stream', queue_data)
+
+def CallICLUpdate():
+ logger.error(f"Requesting ACL update")
+ queue_data = {
+ 'function': "update_acl",
+ 'requests': 1,
+ 'timestamp': time.time() # Optional: add timestamp
+ }
+ redis_conn.xadd('messaging_requests_stream', queue_data)
@@ -20776,7 +22050,12 @@ def DevicesNotUsed(devices, user_name):
return 1, {"deployed": [], "not_found": []}
# Clean and split the devices string
- device_list = [CleanObject(device.strip()) for device in devices.split(',') if device.strip()]
+ #device_list = [CleanObject(device.strip()) for device in devices.split(',') if device.strip()]
+
+ devices_clean = devices.strip('[]"').replace('"', '')
+ device_list = [device.split(':')[0].strip() for device in devices_clean.split(',') if device.strip()]
+
+
if not device_list:
return 1, {"deployed": [], "not_found": []}
@@ -21240,10 +22519,15 @@ MQTT_PortL = 443
MyName = "well-api"
-clientL = mqtt.Client(client_id=MyName+str(time.time()), transport="websockets")
+clientL = mqtt.Client(
+ client_id=MyName+str(time.time()),
+ transport="websockets",
+ callback_api_version=mqtt.CallbackAPIVersion.VERSION2
+)
+
clientL.tls_set(cert_reqs=ssl.CERT_NONE) # For self-signed certs, use proper CA in production
clientL.ws_set_options(path="/mqtt") # Important! Same path as in your JS code
-clientL.username_pw_set("well_user","We3l1_best!")
+clientL.username_pw_set(MQTT_USER, MQTT_PASS)
clientL.on_connect = on_connectL
clientL.on_message = on_messageL