Ver 2.0.0 back to original webhook, and ReadObjectMinIO cn check data
This commit is contained in:
parent
67c6813c35
commit
8bc18f0c3b
711
well-api.py
711
well-api.py
@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
#Vesion 2.0.0
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import ast
|
import ast
|
||||||
@ -395,21 +395,53 @@ def SaveGenericObjectInBlob(bucket_name, file_name, obj):
|
|||||||
logger.error(f"Error saving object to blob: {traceback.format_exc()}")
|
logger.error(f"Error saving object to blob: {traceback.format_exc()}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def ReadObjectMinIO(bucket_name, file_name):
|
|
||||||
|
|
||||||
|
def ReadObjectMinIO(bucket_name, file_name, filter_date=None):
|
||||||
|
"""
|
||||||
|
Read object from MinIO with optional date filtering.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
bucket_name (str): Name of the MinIO bucket
|
||||||
|
file_name (str): Name of the file/object
|
||||||
|
filter_date (str, optional): Date string in format "YYYY-MM-DD".
|
||||||
|
If provided, returns empty string if object
|
||||||
|
was modified before or on this date.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Object content as string, empty string if filtered out, or None on error
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
|
# If date filtering is requested, check object's last modified date first
|
||||||
|
if filter_date:
|
||||||
|
try:
|
||||||
|
# Get object metadata to check last modified date
|
||||||
|
stat = miniIO_blob_client.stat_object(bucket_name, file_name)
|
||||||
|
last_modified = stat.last_modified
|
||||||
|
|
||||||
|
# Parse filter date (assuming format YYYY-MM-DD)
|
||||||
|
target_date = datetime.datetime.strptime(filter_date, "%Y-%m-%d").date()
|
||||||
|
|
||||||
|
# If object was modified before or on target date, return empty string
|
||||||
|
if last_modified.date() <= target_date:
|
||||||
|
return None
|
||||||
|
|
||||||
|
except S3Error as e:
|
||||||
|
logger.error(f"Error getting metadata for {file_name}: {e}")
|
||||||
|
return None
|
||||||
|
except ValueError as e:
|
||||||
|
logger.error(f"Invalid date format '{filter_date}': {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
# Retrieve the object data
|
# Retrieve the object data
|
||||||
response = miniIO_blob_client.get_object(bucket_name, file_name)
|
response = miniIO_blob_client.get_object(bucket_name, file_name)
|
||||||
|
|
||||||
# Read the data from response
|
# Read the data from response
|
||||||
data_bytes = response.read()
|
data_bytes = response.read()
|
||||||
|
# Convert bytes to string
|
||||||
# Convert bytes to string and then load into a dictionary
|
|
||||||
data_string = data_bytes.decode('utf-8')
|
data_string = data_bytes.decode('utf-8')
|
||||||
|
|
||||||
# Don't forget to close the response
|
# Don't forget to close the response
|
||||||
response.close()
|
response.close()
|
||||||
response.release_conn()
|
response.release_conn()
|
||||||
|
|
||||||
return data_string
|
return data_string
|
||||||
|
|
||||||
except S3Error as e:
|
except S3Error as e:
|
||||||
@ -1817,7 +1849,7 @@ def GetDeviceDetailsComplete(cur, deployment_ids, location_id):
|
|||||||
mac = device_table_record[1]
|
mac = device_table_record[1]
|
||||||
well_id = device_table_record[2]
|
well_id = device_table_record[2]
|
||||||
description = device_table_record[3]
|
description = device_table_record[3]
|
||||||
alarm_details = device_table_record[36]
|
alarm_details = device_table_record[16]
|
||||||
if description == None:
|
if description == None:
|
||||||
description = ""
|
description = ""
|
||||||
if device_table_record[5] != None:
|
if device_table_record[5] != None:
|
||||||
@ -2308,11 +2340,17 @@ def filter_short_groups_c_wc(presence_list, filter_size, device_id_str, from_dat
|
|||||||
filtered_day_str = None
|
filtered_day_str = None
|
||||||
|
|
||||||
if not refresh:
|
if not refresh:
|
||||||
filtered_day_str = ReadObjectMinIO("filtered-presence", filename_day_presence)
|
filtered_day_str = ReadObjectMinIO("filtered-presence", filename_day_presence, current_date_str)
|
||||||
|
|
||||||
if filtered_day_str is None:
|
if filtered_day_str is not None and filtered_day_str != "":
|
||||||
|
has_larger = bool(re.search(r'\b(?:[2-9]|\d{2,})\.\d+\b', filtered_day_str))
|
||||||
|
if has_larger:
|
||||||
|
filtered_day_str = None
|
||||||
|
|
||||||
|
if filtered_day_str is None or filtered_day_str == "":
|
||||||
# Filter the input data
|
# Filter the input data
|
||||||
input_data = presence_list[input_start_idx:input_end_idx]
|
input_data = presence_list[input_start_idx:input_end_idx]
|
||||||
|
print(input_start_idx, input_end_idx, filter_size, device_id_str, from_date, len(input_data))
|
||||||
filtered_data = filter_short_groups_c(input_data, filter_size, device_id_str, from_date)
|
filtered_data = filter_short_groups_c(input_data, filter_size, device_id_str, from_date)
|
||||||
|
|
||||||
# Extract the portion corresponding to this day
|
# Extract the portion corresponding to this day
|
||||||
@ -2530,7 +2568,7 @@ def GetLastDurationMinutes(deployment_id, selected_devices, filter, ddate):
|
|||||||
tzinfo=datetime.timezone(datetime.timedelta(hours=-7))
|
tzinfo=datetime.timezone(datetime.timedelta(hours=-7))
|
||||||
)
|
)
|
||||||
|
|
||||||
presence_map = optimized_radar_processing(my_data, start_time, id2well_id, device_id_2_threshold, device_field_indexes, presence_map, data_type)
|
presence_map = optimized_radar_processing(my_data, start_time_, id2well_id, device_id_2_threshold, device_field_indexes, presence_map, data_type)
|
||||||
|
|
||||||
|
|
||||||
if myz_data != None:
|
if myz_data != None:
|
||||||
@ -2937,18 +2975,18 @@ def GetSensorsDetailsFromDeployment(deployment_id, ddate, filter_minutes, fast=T
|
|||||||
"last_detected_time": last_present_time,
|
"last_detected_time": last_present_time,
|
||||||
"wellness_score_percent": wellness_score_percent,
|
"wellness_score_percent": wellness_score_percent,
|
||||||
"wellness_descriptor_color": "bg-green-100 text-green-700",
|
"wellness_descriptor_color": "bg-green-100 text-green-700",
|
||||||
"bedroom_temperature": bedroom_temperature,
|
"bedroom_temperature": round(bedroom_temperature, 2),
|
||||||
"sleep_bathroom_visit_count": sleep_bathroom_visit_count,
|
"sleep_bathroom_visit_count": sleep_bathroom_visit_count,
|
||||||
"bedroom_co2": bedroom_co2,
|
"bedroom_co2": bedroom_co2,
|
||||||
"shower_detected_time": shower_detected_time,
|
"shower_detected_time": shower_detected_time,
|
||||||
"breakfast_detected_time": breakfast_detected_time,
|
"breakfast_detected_time": breakfast_detected_time,
|
||||||
"living_room_time_spent": living_room_time_spent,
|
"living_room_time_spent": round(living_room_time_spent, 2),
|
||||||
"outside_hours": outside_hours,
|
"outside_hours": round(outside_hours, 2),
|
||||||
"wellness_descriptor": "Great!",
|
"wellness_descriptor": "Great!",
|
||||||
"last_seen_alert": "Alert = None",
|
"last_seen_alert": "Alert = None",
|
||||||
"last_seen_alert_colors": "bg-green-100 text-green-700", #https://tailwindcss.com/docs/colors
|
"last_seen_alert_colors": "bg-green-100 text-green-700", #https://tailwindcss.com/docs/colors
|
||||||
"most_time_spent_in": "Bedroom",
|
"most_time_spent_in": "Bedroom",
|
||||||
"sleep_hours": sleep_hours
|
"sleep_hours": round(sleep_hours, 2)
|
||||||
}
|
}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(traceback.format_exc())
|
print(traceback.format_exc())
|
||||||
@ -6108,6 +6146,124 @@ def ConvertToBase(time_from_str, time_zone_s):
|
|||||||
dt = datetime.datetime.strptime(time_from_str, "%Y-%m-%d %H:%M:%S%z")
|
dt = datetime.datetime.strptime(time_from_str, "%Y-%m-%d %H:%M:%S%z")
|
||||||
return dt
|
return dt
|
||||||
|
|
||||||
|
def GetTimeAndEvents(data):
|
||||||
|
"""
|
||||||
|
Calculates non-zero elements and consecutive non-zero groups using itertools.
|
||||||
|
This is often the most readable and efficient pure Python approach.
|
||||||
|
"""
|
||||||
|
# Fast way to count non-zeros since they are all 1.0
|
||||||
|
#non_zeros = int(sum(data))
|
||||||
|
non_zeros = sum(1 for x in data if x != 0)
|
||||||
|
# Count groups of non-zero elements
|
||||||
|
events = sum(1 for key, group in itertools.groupby(data) if key != 0.0)
|
||||||
|
return non_zeros, events
|
||||||
|
|
||||||
|
def current_date_at_tz(timezone_str):
|
||||||
|
"""
|
||||||
|
Returns the current date in the specified timezone in yyyy-mm-dd format.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
timezone_str (str): Timezone string like "America/Los_Angeles"
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Current date in yyyy-mm-dd format
|
||||||
|
"""
|
||||||
|
# Get the timezone object
|
||||||
|
tz = pytz.timezone(timezone_str)
|
||||||
|
|
||||||
|
# Get current datetime in the specified timezone
|
||||||
|
current_dt = datetime.datetime.now(tz)
|
||||||
|
|
||||||
|
# Format as yyyy-mm-dd
|
||||||
|
return current_dt.strftime('%Y-%m-%d')
|
||||||
|
|
||||||
|
|
||||||
|
def GetActivities(device_id, well_id, date_str, filter_size, refresh, timezone_str, radar_threshold_group_st):
|
||||||
|
#filtered_day has non 0 points that exceeded threshold of radar reads
|
||||||
|
device_id_str = str(device_id)
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
time_from_str, time_to_str = GetLocalTimeForDate(date_str, timezone_str)
|
||||||
|
filename_day_presence = f"/{device_id_str}/{device_id_str}_{date_str}_{filter_size}_presence.bin"
|
||||||
|
filtered_day_str = None
|
||||||
|
if refresh == False and date_str != current_date_at_tz(timezone_str):
|
||||||
|
has_larger = False
|
||||||
|
filtered_day_str = ReadObjectMinIO("filtered-presence", filename_day_presence, date_str)
|
||||||
|
if filtered_day_str != None and filtered_day_str != "":
|
||||||
|
has_larger = bool(re.search(r'\b(?:[2-9]|\d{2,})\.\d+\b', filtered_day_str))
|
||||||
|
if has_larger:
|
||||||
|
filtered_day_str = None
|
||||||
|
if filtered_day_str == None:
|
||||||
|
|
||||||
|
radar_fields_of_interest = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
threshold_lst = json.loads(radar_threshold_group_st)
|
||||||
|
except:
|
||||||
|
threshold_lst = ["s3_max",12]
|
||||||
|
radar_fields_of_interest = [threshold_lst[0]]
|
||||||
|
ids_list = [int(device_id)]
|
||||||
|
devices_list_str = device_id_str
|
||||||
|
#sql = get_deployment_radar_only_colapsed_query(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
|
||||||
|
sql = get_deployment_radar_10sec_snapped_query_min_max(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest)
|
||||||
|
print(sql)
|
||||||
|
|
||||||
|
with get_db_connection() as conn:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(sql)
|
||||||
|
my_data = None
|
||||||
|
my_data = cur.fetchall()
|
||||||
|
|
||||||
|
days_difference = 1
|
||||||
|
zeros_list = [0] * 6 * 1440 * days_difference
|
||||||
|
presence_map = {'presence': {}}
|
||||||
|
presence_map['presence'][well_id] = zeros_list
|
||||||
|
|
||||||
|
if radar_threshold_group_st == None:
|
||||||
|
radar_threshold_group_st = '["s3",12]' #last value is threshold to s28 composite
|
||||||
|
|
||||||
|
if len(radar_threshold_group_st) > 8:
|
||||||
|
radar_threshold_group = json.loads(radar_threshold_group_st)
|
||||||
|
else:
|
||||||
|
radar_threshold_group = ["s3",12]
|
||||||
|
|
||||||
|
device_id_2_location = {well_id: ""}
|
||||||
|
device_id_2_threshold = {well_id: radar_threshold_group}
|
||||||
|
device_field_indexes = {radar_threshold_group[0].split("_")[0]: 1} #len(radar_fields_of_interest)
|
||||||
|
id2well_id = {device_id: well_id}
|
||||||
|
|
||||||
|
if len(my_data) > 1:
|
||||||
|
|
||||||
|
start_time_ = my_data[0][0]
|
||||||
|
parsed_time_ = datetime.datetime.strptime(time_from_str, '%Y-%m-%d %H:%M:%S%z')
|
||||||
|
|
||||||
|
#start_time = datetime.datetime(
|
||||||
|
#parsed_time.year,
|
||||||
|
#parsed_time.month,
|
||||||
|
#parsed_time.day,
|
||||||
|
#parsed_time.hour, # Adjust for UTC-7
|
||||||
|
#parsed_time.minute,
|
||||||
|
#parsed_time.second,
|
||||||
|
#tzinfo=datetime.timezone(datetime.timedelta(hours=-7))
|
||||||
|
#)
|
||||||
|
|
||||||
|
presence_map = optimized_radar_processing(my_data, start_time_, id2well_id, device_id_2_threshold, device_field_indexes, presence_map, "presence")
|
||||||
|
|
||||||
|
presence_list = filter_short_groups_c_wc(presence_map["presence"][id2well_id[device_id]], filter_size, device_id_str, date_str, date_str, timezone_str)
|
||||||
|
filtered_day_str = ReadObjectMinIO("filtered-presence", filename_day_presence)
|
||||||
|
filtered_day = json.loads(filtered_day_str)
|
||||||
|
else:
|
||||||
|
filtered_day = json.loads(filtered_day_str)
|
||||||
|
|
||||||
|
non_zeros, events = GetTimeAndEvents(filtered_day)
|
||||||
|
|
||||||
|
return(non_zeros / 360, events) #decas to hours
|
||||||
|
except Exception as e:
|
||||||
|
print(filename_day_presence)
|
||||||
|
print(filtered_day_str)
|
||||||
|
print(traceback.format_exc())
|
||||||
|
return(0, 0)
|
||||||
def CreateFullLocationMap(location_image_file, devices_list, selected_date,
|
def CreateFullLocationMap(location_image_file, devices_list, selected_date,
|
||||||
map_type, force_recreate, chart_type, bw, motion, scale_global, fast, filter_minutes, time_zone_s):
|
map_type, force_recreate, chart_type, bw, motion, scale_global, fast, filter_minutes, time_zone_s):
|
||||||
#global Id2MACDict
|
#global Id2MACDict
|
||||||
@ -8617,6 +8773,69 @@ def get_deployment_radar_10sec_snapped_query(devices_list_str, time_from_str, ti
|
|||||||
"""
|
"""
|
||||||
return sql
|
return sql
|
||||||
|
|
||||||
|
def get_deployment_radar_10sec_snapped_query_min_max(devices_list_str, time_from_str, time_to_str, ids_list, radar_fields_of_interest):
|
||||||
|
"""
|
||||||
|
Generate a TimeScaleDB query for radar readings based on device IDs with time snapped to 10-second intervals.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
devices_list_str (str): Comma-separated string of device IDs
|
||||||
|
time_from_str (str): Start time for the query
|
||||||
|
time_to_str (str): End time for the query
|
||||||
|
ids_list (list): List of device IDs in priority order for sorting
|
||||||
|
radar_fields_of_interest (list): List of field names required across all devices
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Generated SQL query
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Generate the CASE statement for ordering based on the provided ids_list
|
||||||
|
case_statements = []
|
||||||
|
for index, device_id in enumerate(ids_list, start=1):
|
||||||
|
case_statements.append(f"WHEN {device_id} THEN {index}")
|
||||||
|
|
||||||
|
case_order = "\n ".join(case_statements)
|
||||||
|
|
||||||
|
# Handle fields processing
|
||||||
|
select_fields = []
|
||||||
|
for field in radar_fields_of_interest:
|
||||||
|
|
||||||
|
radar_fields = field.split("_")
|
||||||
|
field_t = radar_fields[0]
|
||||||
|
if field_t == "s28":
|
||||||
|
if radar_fields[1] == "max":
|
||||||
|
select_fields.append("MAX((s2+s3+s4+s5+s6+s7+s8)/7) AS s28")
|
||||||
|
else:
|
||||||
|
select_fields.append("MIN((s2+s3+s4+s5+s6+s7+s8)/7) AS s28")
|
||||||
|
else:
|
||||||
|
if radar_fields[1] == "max":
|
||||||
|
select_fields.append(f"MAX({field_t}) as {field}")
|
||||||
|
else:
|
||||||
|
select_fields.append(f"MIN({field_t}) as {field}")
|
||||||
|
|
||||||
|
fields_str = ", ".join(select_fields)
|
||||||
|
|
||||||
|
sql = f"""
|
||||||
|
SELECT
|
||||||
|
time_bucket('10 seconds', time) AS ten_seconds,
|
||||||
|
device_id,
|
||||||
|
{fields_str}
|
||||||
|
FROM
|
||||||
|
radar_readings
|
||||||
|
WHERE
|
||||||
|
device_id IN ({devices_list_str})
|
||||||
|
AND time >= '{time_from_str}'
|
||||||
|
AND time < '{time_to_str}'
|
||||||
|
GROUP BY
|
||||||
|
ten_seconds,
|
||||||
|
device_id
|
||||||
|
ORDER BY
|
||||||
|
CASE device_id
|
||||||
|
{case_order}
|
||||||
|
END,
|
||||||
|
ten_seconds
|
||||||
|
"""
|
||||||
|
return sql
|
||||||
|
|
||||||
def export_query_to_minio_chunked(connection_params, query, minio_client, bucket_name, blob_name=None, chunksize=10000):
|
def export_query_to_minio_chunked(connection_params, query, minio_client, bucket_name, blob_name=None, chunksize=10000):
|
||||||
"""
|
"""
|
||||||
Export query results to MinIO as CSV in chunks to handle large datasets
|
Export query results to MinIO as CSV in chunks to handle large datasets
|
||||||
@ -11559,6 +11778,58 @@ class RequestParser:
|
|||||||
else:
|
else:
|
||||||
logger.debug("RequestParser: No body data read")
|
logger.debug("RequestParser: No body data read")
|
||||||
|
|
||||||
|
|
||||||
|
def FindDeviceByRole(deployment_id, location_list):
|
||||||
|
|
||||||
|
#For purposes of activity report, Bedroom and Bathroom are determined in order of priority:
|
||||||
|
#Bedroom: "Bedroom Master", "Bedroom", "Bedroom Guest" (106, 56, 107)
|
||||||
|
#Bathroom: ""Bathroom Main","Bathroom","Bathroom Guest" (104, 103, 105)
|
||||||
|
|
||||||
|
#location_names_inverted = {"All":-1 ,"?": 0,"Office": 5,"Hallway": 6,"Garage": 7,"Outside": 8,"Conference Room": 9,"Room": 10,"Kitchen": 34,
|
||||||
|
# "Bedroom": 56,"Living Room": 78,"Bathroom": 102,"Dining Room": 103,"Bathroom Main": ,104,"Bathroom Guest": 105,
|
||||||
|
# "Bedroom Master": 106, "Bedroom Guest": 107, "Conference Room": 108, "Basement": 109, "Attic": 110, "Other": 200}
|
||||||
|
|
||||||
|
|
||||||
|
ttime = datetime.datetime.utcnow().timestamp()
|
||||||
|
|
||||||
|
devices_list, device_ids = GetProximityList(deployment_id, ttime)
|
||||||
|
|
||||||
|
if location_list != []:
|
||||||
|
for location in location_list:
|
||||||
|
for device in devices_list:
|
||||||
|
well_id = device[0]
|
||||||
|
device_id = device[1]
|
||||||
|
location_t = device[2]
|
||||||
|
if location_t == location:
|
||||||
|
return (device_id, location, well_id)
|
||||||
|
|
||||||
|
else:
|
||||||
|
conn = get_db_connection()
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
|
||||||
|
#we need to find beneficiaries from list of deployments
|
||||||
|
#sql = f'SELECT device_id FROM public.devices where device_id in {device_ids} and other="other"'
|
||||||
|
sql = "SELECT device_id, location, well_id FROM public.devices WHERE device_id = ANY(%s) AND other = %s"
|
||||||
|
#print(sql)
|
||||||
|
cur.execute(sql, (device_ids, "other"))
|
||||||
|
result = cur.fetchall()#cur.fetchone()
|
||||||
|
if len(result) > 0:
|
||||||
|
return result[0]
|
||||||
|
else:
|
||||||
|
|
||||||
|
devices_list, device_ids = GetProximityList(deployment_id, ttime)
|
||||||
|
for device in devices_list:
|
||||||
|
well_id = device[0]
|
||||||
|
device_id = device[1]
|
||||||
|
location_t = device[2]
|
||||||
|
if "Bathroom" in location_t or "Bedroom" in location_t or "Kitchen" in location_t:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
return (device_id, location_t, well_id)
|
||||||
|
|
||||||
|
return (0, 0, 0)
|
||||||
|
|
||||||
|
|
||||||
def ensure_date_order(from_date, to_date):
|
def ensure_date_order(from_date, to_date):
|
||||||
"""
|
"""
|
||||||
Ensures that from_date is earlier than to_date.
|
Ensures that from_date is earlier than to_date.
|
||||||
@ -11584,6 +11855,34 @@ def signum(x):
|
|||||||
return (x > 0) - (x < 0)
|
return (x > 0) - (x < 0)
|
||||||
|
|
||||||
|
|
||||||
|
def get_week_days_and_dates(days_back, timezone_str="America/Los_Angeles"):
|
||||||
|
"""
|
||||||
|
Generate weekdays and dates from 7 days ago until today for a given timezone.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
timezone_str (str): Timezone string like "America/Los_Angeles"
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: List of tuples containing (weekday_name, date_string)
|
||||||
|
"""
|
||||||
|
# Get the timezone object
|
||||||
|
tz = pytz.timezone(timezone_str)
|
||||||
|
|
||||||
|
# Get current date in the specified timezone
|
||||||
|
today = datetime.datetime.now(tz).date()
|
||||||
|
|
||||||
|
# Generate dates from days_back days ago to today
|
||||||
|
result = []
|
||||||
|
for i in range(days_back-1, -1, -1): # days_back days ago to today (inclusive)
|
||||||
|
date = today - timedelta(days=i)
|
||||||
|
weekday_name = date.strftime("%A") # Full weekday name
|
||||||
|
date_string = date.strftime("%Y-%m-%d") # ISO format date
|
||||||
|
day_of_month = date.day
|
||||||
|
result.append((date_string, weekday_name, day_of_month))
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
def filter_short_groups_numpy_orig(presence_list, filter_size, device_id, dates_str):
|
def filter_short_groups_numpy_orig(presence_list, filter_size, device_id, dates_str):
|
||||||
"""
|
"""
|
||||||
Optimized version using NumPy to remove groups of consecutive zeros
|
Optimized version using NumPy to remove groups of consecutive zeros
|
||||||
@ -12815,6 +13114,9 @@ def optimized_radar_processing(my_data, start_time, id2well_id, device_id_2_thre
|
|||||||
field_index = field_index_cache[threshold_sig]
|
field_index = field_index_cache[threshold_sig]
|
||||||
|
|
||||||
# Get radar value using cached field index
|
# Get radar value using cached field index
|
||||||
|
if field_index >= len(radar_read):
|
||||||
|
radar_val = radar_read[-1]
|
||||||
|
else:
|
||||||
radar_val = radar_read[field_index]
|
radar_val = radar_read[field_index]
|
||||||
|
|
||||||
# Process presence data
|
# Process presence data
|
||||||
@ -12880,57 +13182,77 @@ def store_to_file(my_list, filename):
|
|||||||
print(f"Error: Could not serialize list to JSON. {e}") # e.g. if list contains unsupported types like sets
|
print(f"Error: Could not serialize list to JSON. {e}") # e.g. if list contains unsupported types like sets
|
||||||
|
|
||||||
def find_custom_header(headers, name):
|
def find_custom_header(headers, name):
|
||||||
"""Find a custom header by name in the Telnyx webhook payload"""
|
"""Helper to find a custom header value (case-insensitive name)."""
|
||||||
if not headers:
|
if not headers: return None
|
||||||
return None
|
|
||||||
for header in headers:
|
for header in headers:
|
||||||
if header.get('name', '').lower() == name.lower():
|
if header.get('name', '').lower() == name.lower(): return header.get('value')
|
||||||
return header.get('value')
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def create_client_state(base_event, call_control_id, prefix):
|
def encode_state(parts):
|
||||||
"""Create a base64 encoded client state string as required by Telnyx API"""
|
"""Joins parts with a pipe and base64 encodes the result."""
|
||||||
# Create the plain text client state string
|
plain_state = "|".join(map(str, parts))
|
||||||
plain_state = f"{prefix}_{base_event}_{call_control_id[:8]}" if call_control_id else f"{prefix}_{base_event}_unknownccid"
|
|
||||||
|
|
||||||
# Encode to base64 as required by Telnyx API
|
|
||||||
base64_state = base64.b64encode(plain_state.encode('utf-8')).decode('ascii')
|
base64_state = base64.b64encode(plain_state.encode('utf-8')).decode('ascii')
|
||||||
|
# Assuming 'logger' is your app's logger instance
|
||||||
logger.debug(f"Client state created: '{plain_state}' -> base64: '{base64_state}'")
|
logger.debug(f"Encoded state: '{plain_state}' -> '{base64_state}'")
|
||||||
return base64_state
|
return base64_state
|
||||||
|
|
||||||
def send_telnyx_command(command, params, api_key):
|
def decode_state(b64_state):
|
||||||
"""Send command to Telnyx API"""
|
"""Decodes a base64 state and splits it by pipe."""
|
||||||
|
if not b64_state: return []
|
||||||
|
try:
|
||||||
|
decoded_plain = base64.b64decode(b64_state).decode('utf-8')
|
||||||
|
parts = decoded_plain.split('|')
|
||||||
|
logger.debug(f"Decoded state: '{b64_state}' -> '{decoded_plain}' -> {parts}")
|
||||||
|
return parts
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to decode client_state '{b64_state}': {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
def send_telnyx_command(action_path, params, api_key):
|
||||||
|
"""
|
||||||
|
Sends a command to the Telnyx Call Control API actions endpoint.
|
||||||
|
This function should REPLACE your existing send_telnyx_command.
|
||||||
|
"""
|
||||||
|
if not api_key:
|
||||||
|
logger.error(f"CMDFAIL ('{action_path}'): API_KEY not available.")
|
||||||
|
return None
|
||||||
|
|
||||||
|
ccid = params.get("call_control_id")
|
||||||
|
if not ccid:
|
||||||
|
logger.error(f"CMDFAIL ('{action_path}'): call_control_id missing in params.")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Correct endpoint construction for V2 actions
|
||||||
|
endpoint = f"{TELNYX_API_BASE_URL}/calls/{ccid}/{action_path}"
|
||||||
|
|
||||||
|
# Body should not contain call_control_id for actions API
|
||||||
|
body = {k: v for k, v in params.items() if k != 'call_control_id'}
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
"Authorization": f"Bearer {api_key}",
|
"Authorization": f"Bearer {api_key}",
|
||||||
"Content-Type": "application/json"
|
"Content-Type": "application/json",
|
||||||
|
"Accept": "application/json"
|
||||||
}
|
}
|
||||||
|
|
||||||
call_control_id = params.pop("call_control_id", None)
|
logger.info(f"SENDCMD ('{action_path}')")
|
||||||
data = json.dumps(params)
|
logger.debug(f" Endpoint: POST {endpoint}")
|
||||||
|
logger.debug(f" JSON Payload: {json.dumps(body, indent=2)}")
|
||||||
logger.debug(f"Sending {command} command to Telnyx API: {data}")
|
|
||||||
|
|
||||||
# Fix for v3 call control IDs - use the correct URL format
|
|
||||||
if call_control_id and call_control_id.startswith("v3:"):
|
|
||||||
# For v3 calls
|
|
||||||
url = f"{TELNYX_API_BASE_URL}/calls/{call_control_id}/{command}"
|
|
||||||
else:
|
|
||||||
# For backward compatibility with v2 or other formats
|
|
||||||
url = f"{TELNYX_API_BASE_URL}/{call_control_id}/{command}"
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = requests.post(url, headers=headers, data=data)
|
response = requests.post(endpoint, json=body, headers=headers, timeout=10)
|
||||||
if response.status_code >= 200 and response.status_code < 300:
|
response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
|
||||||
logger.debug(f"Telnyx command {command} sent successfully.")
|
logger.info(f"CMDOK ('{action_path}'): Telnyx accepted. Status: {response.status_code}")
|
||||||
return True
|
return response.json()
|
||||||
else:
|
except requests.exceptions.HTTPError as e:
|
||||||
logger.error(f"Telnyx rejected {command} command. Status: {response.status_code}")
|
logger.error(f"CMDFAIL ('{action_path}'): Telnyx rejected. Status: {e.response.status_code}")
|
||||||
logger.error(f"Response body: {response.text}\n")
|
try:
|
||||||
return False
|
logger.error(f" Telnyx Err Detail: {json.dumps(e.response.json(), indent=2)}")
|
||||||
except Exception as e:
|
except json.JSONDecodeError:
|
||||||
logger.exception(f"Error sending Telnyx command {command}: {str(e)}")
|
logger.error(f" Raw Err Body: {e.response.text[:500]}")
|
||||||
return False
|
except requests.exceptions.RequestException as e:
|
||||||
|
logger.exception(f"CMDFAIL ('{action_path}'): Network error")
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
def StoreToDB(data):
|
def StoreToDB(data):
|
||||||
event_type = data.get('event_type')
|
event_type = data.get('event_type')
|
||||||
@ -13332,6 +13654,151 @@ def handle_telnyx_webhook2(webhook_data, remote_addr, request_id): # Renamed log
|
|||||||
# Depending on the error, Telnyx might retry if it doesn't get a 2xx.
|
# Depending on the error, Telnyx might retry if it doesn't get a 2xx.
|
||||||
return "Internal Server Error", 500
|
return "Internal Server Error", 500
|
||||||
|
|
||||||
|
def handle_telnyx_webhook3(webhook_data, remote_addr, request_id):
|
||||||
|
"""
|
||||||
|
Processes Telnyx webhook events with full IVR logic for repeating messages.
|
||||||
|
This function should be added to your well-api.py.
|
||||||
|
"""
|
||||||
|
logger.info(f"Processing webhook in handle_telnyx_webhook3 from {remote_addr}, Request-ID: {request_id}")
|
||||||
|
|
||||||
|
# --- ADAPT THIS SECTION to your app's config management ---
|
||||||
|
# This example assumes config values are accessible as global constants or from a dict.
|
||||||
|
# Replace these with your actual config access method (e.g., self.config['...'])
|
||||||
|
config = {
|
||||||
|
'api_key': TELNYX_API_KEY,
|
||||||
|
'dtmf_timeout_seconds': 10,
|
||||||
|
'initial_silence_ms': 500,
|
||||||
|
'replay_silence_ms': 100,
|
||||||
|
'default_tts_voice': 'female',
|
||||||
|
'default_tts_language': 'en-US',
|
||||||
|
'client_state_prefix': 'well_api_state',
|
||||||
|
'inbound_greeting': 'Thank you for calling. We will be with you shortly.'
|
||||||
|
}
|
||||||
|
# --- END ADAPTATION SECTION ---
|
||||||
|
|
||||||
|
try:
|
||||||
|
StoreToDB(webhook_data) # Call your DB storage function first
|
||||||
|
|
||||||
|
data, payload = webhook_data.get('data', {}), webhook_data.get('data', {}).get('payload', {})
|
||||||
|
event_type, record_type, ccid = data.get('event_type'), data.get('record_type'), payload.get('call_control_id')
|
||||||
|
logger.info(f"EVENT '{event_type}' ({record_type})" + (f", CCID: {ccid}" if ccid else ""))
|
||||||
|
|
||||||
|
if record_type != 'event':
|
||||||
|
logger.info(f" -> Non-voice event ('{record_type}') received. Ignoring in this handler.")
|
||||||
|
return True
|
||||||
|
|
||||||
|
b64_client_state = payload.get("client_state")
|
||||||
|
decoded_parts = decode_state(b64_client_state)
|
||||||
|
state_name = decoded_parts[0] if decoded_parts else None
|
||||||
|
if state_name: logger.info(f" State Name Received: '{state_name}'")
|
||||||
|
|
||||||
|
current_api_key = config['api_key']
|
||||||
|
|
||||||
|
# --- State Machine Logic ---
|
||||||
|
if event_type == 'call.answered':
|
||||||
|
if payload.get('direction') == 'incoming':
|
||||||
|
logger.info(" -> Inbound call detected. Playing generic greeting and hanging up.")
|
||||||
|
next_state = encode_state(['INBOUND_GREETING_HUP'])
|
||||||
|
speak_params = {"payload": config['inbound_greeting'], "voice": config['default_tts_voice'], "language": config['default_tts_language'], "call_control_id": ccid, "client_state": next_state}
|
||||||
|
send_telnyx_command("actions/speak", speak_params, current_api_key)
|
||||||
|
else: # Outgoing call
|
||||||
|
audio_url = find_custom_header(payload.get('custom_headers'), 'X-Audio-Url')
|
||||||
|
tts_payload = find_custom_header(payload.get('custom_headers'), 'X-TTS-Payload')
|
||||||
|
media_type = "audio" if audio_url else "tts" if tts_payload else "none"
|
||||||
|
media_value = audio_url or tts_payload
|
||||||
|
if media_value:
|
||||||
|
logger.info(f" -> Outbound call. Playing {config['initial_silence_ms']}ms silence buffer.")
|
||||||
|
next_state = encode_state(['INIT_PLAY_MAIN', media_type, media_value])
|
||||||
|
send_telnyx_command("actions/play_silence", {"milliseconds": str(config['initial_silence_ms']), "call_control_id": ccid, "client_state": next_state}, current_api_key)
|
||||||
|
else:
|
||||||
|
logger.warning(" -> Outbound call, but no audio/tts payload. Hanging up.")
|
||||||
|
send_telnyx_command("actions/hangup", {"call_control_id": ccid}, current_api_key)
|
||||||
|
|
||||||
|
elif event_type == 'call.playback.ended':
|
||||||
|
if state_name == 'INIT_PLAY_MAIN': # Silence ended
|
||||||
|
logger.info(" -> Silence buffer ended. Playing main message.")
|
||||||
|
_, media_type, media_value = decoded_parts
|
||||||
|
next_state = encode_state(['MAIN_MEDIA_PLAYED', media_type, media_value])
|
||||||
|
if media_type == "audio":
|
||||||
|
send_telnyx_command("actions/playback_start", {"audio_url": media_value, "call_control_id": ccid, "client_state": next_state}, current_api_key)
|
||||||
|
elif media_type == "tts":
|
||||||
|
params = {"payload": media_value, "voice": config['default_tts_voice'], "language": config['default_tts_language'], "call_control_id": ccid, "client_state": next_state}
|
||||||
|
send_telnyx_command("actions/speak", params, current_api_key)
|
||||||
|
elif state_name == 'REPLAY_SILENCE': # Replay silence ended
|
||||||
|
logger.info(" -> Replay silence ended. Replaying main message.")
|
||||||
|
_, media_type, media_value = decoded_parts
|
||||||
|
next_state = encode_state(['REPLAYING_MEDIA', media_type, media_value])
|
||||||
|
if media_type == "audio":
|
||||||
|
send_telnyx_command("actions/playback_start", {"audio_url": media_value, "call_control_id": ccid, "client_state": next_state}, current_api_key)
|
||||||
|
elif media_type == "tts":
|
||||||
|
params = {"payload": media_value, "voice": config['default_tts_voice'], "language": config['default_tts_language'], "call_control_id": ccid, "client_state": next_state}
|
||||||
|
send_telnyx_command("actions/speak", params, current_api_key)
|
||||||
|
elif state_name in ['MAIN_MEDIA_PLAYED', 'REPLAYING_MEDIA']: # Actual audio file ended
|
||||||
|
logger.info(f" -> Main audio playback finished. Playing options menu.")
|
||||||
|
_, media_type, media_value = decoded_parts
|
||||||
|
next_state = encode_state(['WAITING_DTMF', media_type, media_value])
|
||||||
|
options_prompt = "press 0 to repeat the message or press pound to hang up."
|
||||||
|
gather_params = {
|
||||||
|
"payload": options_prompt, "voice": config['default_tts_voice'], "language": config['default_tts_language'],
|
||||||
|
"valid_digits": "0#", "max_digits": 1, "timeout_millis": config['dtmf_timeout_seconds'] * 1000, "terminating_digit": "#",
|
||||||
|
"call_control_id": ccid, "client_state": next_state
|
||||||
|
}
|
||||||
|
send_telnyx_command("actions/gather_using_speak", gather_params, current_api_key)
|
||||||
|
else:
|
||||||
|
logger.warning(f" -> Playback ended with unhandled state '{state_name}'. Hanging up.")
|
||||||
|
send_telnyx_command("actions/hangup", {"call_control_id": ccid}, current_api_key)
|
||||||
|
|
||||||
|
elif event_type == 'call.speak.ended':
|
||||||
|
if state_name in ['MAIN_MEDIA_PLAYED', 'REPLAYING_MEDIA']:
|
||||||
|
logger.info(f" -> Main message TTS finished. Playing options menu.")
|
||||||
|
_, media_type, media_value = decoded_parts
|
||||||
|
next_state = encode_state(['WAITING_DTMF', media_type, media_value])
|
||||||
|
options_prompt = "press 0 to repeat the message or press pound to hang up."
|
||||||
|
gather_params = {
|
||||||
|
"payload": options_prompt, "voice": config['default_tts_voice'], "language": config['default_tts_language'],
|
||||||
|
"valid_digits": "0#", "max_digits": 1, "timeout_millis": config['dtmf_timeout_seconds'] * 1000, "terminating_digit": "#",
|
||||||
|
"call_control_id": ccid, "client_state": next_state
|
||||||
|
}
|
||||||
|
send_telnyx_command("actions/gather_using_speak", gather_params, current_api_key)
|
||||||
|
elif state_name == 'INBOUND_GREETING_HUP':
|
||||||
|
logger.info(" -> Inbound greeting finished. Hanging up.")
|
||||||
|
send_telnyx_command("actions/hangup", {"call_control_id": ccid}, current_api_key)
|
||||||
|
else:
|
||||||
|
logger.warning(f" -> Speak ended with unhandled state '{state_name}'. Hanging up.")
|
||||||
|
send_telnyx_command("actions/hangup", {"call_control_id": ccid}, current_api_key)
|
||||||
|
|
||||||
|
elif event_type == 'call.dtmf.received':
|
||||||
|
digit = payload.get('digit')
|
||||||
|
logger.info(f" DTMF Received: Digit='{digit}'")
|
||||||
|
if digit == '#':
|
||||||
|
logger.info(" -> '#' received. Terminating call immediately.")
|
||||||
|
send_telnyx_command("actions/hangup", {"call_control_id": ccid}, current_api_key)
|
||||||
|
|
||||||
|
elif event_type == 'call.gather.ended':
|
||||||
|
logger.info(f" -> Gather ended. Digits received: '{payload.get('digits')}', Status: '{payload.get('status')}'")
|
||||||
|
if state_name == 'WAITING_DTMF':
|
||||||
|
digits = payload.get('digits')
|
||||||
|
_, media_type, media_value = decoded_parts
|
||||||
|
if digits == "0":
|
||||||
|
logger.info(f" -> '0' pressed. Playing {config['replay_silence_ms']}ms silence before replay.")
|
||||||
|
next_state = encode_state(['REPLAY_SILENCE', media_type, media_value])
|
||||||
|
send_telnyx_command("actions/play_silence", {"milliseconds": str(config['replay_silence_ms']), "call_control_id": ccid, "client_state": next_state}, current_api_key)
|
||||||
|
else:
|
||||||
|
logger.info(" -> Gather ended with non-repeat condition. Hanging up.")
|
||||||
|
send_telnyx_command("actions/hangup", {"call_control_id": ccid}, current_api_key)
|
||||||
|
else:
|
||||||
|
logger.warning(f" -> Gather ended with unhandled state '{state_name}'.")
|
||||||
|
|
||||||
|
elif event_type == 'call.hangup':
|
||||||
|
logger.info(f" Call Hangup Event: Cause='{payload.get('cause')}'")
|
||||||
|
else:
|
||||||
|
logger.info(f" -> Unhandled Voice Event: '{event_type}' with state '{state_name}'.")
|
||||||
|
|
||||||
|
return True # Return app-specific success
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(f"Error in handle_telnyx_webhook3: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
#==================================== ADD FUNCTIONS BEFORE ============================================
|
#==================================== ADD FUNCTIONS BEFORE ============================================
|
||||||
|
|
||||||
# Main API class
|
# Main API class
|
||||||
@ -15633,7 +16100,7 @@ class WellApi:
|
|||||||
tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))
|
tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))
|
||||||
)
|
)
|
||||||
|
|
||||||
presence_map = optimized_radar_processing(my_data, start_time, id2well_id, device_id_2_threshold, device_field_indexes, presence_map, data_type)
|
presence_map = optimized_radar_processing(my_data, start_time_, id2well_id, device_id_2_threshold, device_field_indexes, presence_map, data_type)
|
||||||
|
|
||||||
#last_device_id = 0
|
#last_device_id = 0
|
||||||
#for radar_read in my_data: #(datetime.datetime(2025, 4, 28, 0, 0, tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))), 559, 6.512857142857143, 6.91, 9.28)
|
#for radar_read in my_data: #(datetime.datetime(2025, 4, 28, 0, 0, tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))), 559, 6.512857142857143, 6.91, 9.28)
|
||||||
@ -16009,7 +16476,7 @@ class WellApi:
|
|||||||
tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))
|
tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))
|
||||||
)
|
)
|
||||||
|
|
||||||
presence_map = optimized_radar_processing(my_data, start_time, id2well_id, device_id_2_threshold, device_field_indexes, presence_map, data_type)
|
presence_map = optimized_radar_processing(my_data, start_time_, id2well_id, device_id_2_threshold, device_field_indexes, presence_map, data_type)
|
||||||
|
|
||||||
#last_device_id = 0
|
#last_device_id = 0
|
||||||
#for radar_read in my_data: #(datetime.datetime(2025, 4, 28, 0, 0, tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))), 559, 6.512857142857143, 6.91, 9.28)
|
#for radar_read in my_data: #(datetime.datetime(2025, 4, 28, 0, 0, tzinfo=datetime.timezone(datetime.timedelta(days=-1, seconds=61200))), 559, 6.512857142857143, 6.91, 9.28)
|
||||||
@ -16506,6 +16973,119 @@ class WellApi:
|
|||||||
|
|
||||||
elif function == "activities_report_details":
|
elif function == "activities_report_details":
|
||||||
deployment_id = form_data.get('deployment_id')
|
deployment_id = form_data.get('deployment_id')
|
||||||
|
|
||||||
|
timezone_str = GetTimeZoneOfDeployment(deployment_id)
|
||||||
|
filterr = form_data.get('filter')
|
||||||
|
if filterr == None:
|
||||||
|
filterr = 6
|
||||||
|
else:
|
||||||
|
filterr = int(filterr)
|
||||||
|
|
||||||
|
refresh = form_data.get('refresh') == "1"
|
||||||
|
ddate = current_date_at_tz(timezone_str)
|
||||||
|
timee = LocalDateToUTCEpoch(ddate, timezone_str)+5 #add so date boundary is avoided
|
||||||
|
devices_list, device_ids = GetProximityList(deployment_id, timee)
|
||||||
|
|
||||||
|
#Here we need to add per day: (all based on Z-graph data!)
|
||||||
|
#Bathroom visits number
|
||||||
|
#Bathroom time spent
|
||||||
|
#Sleep weakes number (As breaks in Z-graph indicates in 10PM to 9AM period)
|
||||||
|
#Sleep length (For now add all times seen in bedroom)
|
||||||
|
#Kitchen visits number
|
||||||
|
#Kitchen time spent
|
||||||
|
#Most frequented room visits number
|
||||||
|
#Most frequented room time spent
|
||||||
|
|
||||||
|
#Lets find device_id of bathroom sensor
|
||||||
|
|
||||||
|
|
||||||
|
bathroom_device_id, location_ba, bathroom_well_id = FindDeviceByRole(deployment_id, ["Bathroom Main", "Bathroom", "Bathroom Guest"])
|
||||||
|
bedroom_device_id, location_be, bedroom_well_id = FindDeviceByRole(deployment_id, ["Bedroom Master", "Bedroom", "Bedroom Guest"])
|
||||||
|
kitchen_device_id, location_ke, kitchen_well_id = FindDeviceByRole(deployment_id, ["Kitchen"])
|
||||||
|
most_present_device_id, location_ot, most_present_well_id = FindDeviceByRole(deployment_id, []) #this will find most_present (as defined in other filed of device record)
|
||||||
|
|
||||||
|
if isinstance(location_ot, int):
|
||||||
|
other_location = location_names[location_ot]
|
||||||
|
else:
|
||||||
|
other_location = location_ot
|
||||||
|
|
||||||
|
#weekly
|
||||||
|
week_dates = get_week_days_and_dates(7, timezone_str)
|
||||||
|
month_dates = get_week_days_and_dates(30, timezone_str)
|
||||||
|
six_months_dates = get_week_days_and_dates(180, timezone_str)
|
||||||
|
|
||||||
|
other_color = Loc2Color[other_location][0]
|
||||||
|
rgb_string = f"rgb({other_color[0]}, {other_color[1]}, {other_color[2]})"
|
||||||
|
|
||||||
|
rooms_reports = [("Bathroom", "blue", bathroom_device_id, bathroom_well_id), ("Bedroom", "green", bedroom_device_id, bedroom_well_id), ("Kitchen", "red", kitchen_device_id, kitchen_well_id), (other_location, rgb_string, most_present_device_id, most_present_well_id)]
|
||||||
|
|
||||||
|
six_months_report = []
|
||||||
|
for room_details in rooms_reports:
|
||||||
|
device_id = room_details[2]
|
||||||
|
if device_id > 0:
|
||||||
|
|
||||||
|
well_id = room_details[3]
|
||||||
|
radar_threshold_group_st = {device[1]: device[5] for device in devices_list}[device_id]
|
||||||
|
room = {"name": room_details[0],"color": room_details[1]}
|
||||||
|
data = []
|
||||||
|
|
||||||
|
for day_activity in six_months_dates:
|
||||||
|
datee = day_activity[0]
|
||||||
|
hours, events_count = GetActivities(device_id, well_id, datee, filterr, refresh, timezone_str, radar_threshold_group_st)
|
||||||
|
|
||||||
|
if hours > 18:
|
||||||
|
print("Too long 6m!!!", device_id, well_id, datee, filterr, refresh, timezone_str, radar_threshold_group_st)
|
||||||
|
|
||||||
|
data_record = { "title": str(day_activity[2]), "events": events_count, "hours": hours}
|
||||||
|
data.append(data_record)
|
||||||
|
|
||||||
|
room["data"] = data
|
||||||
|
six_months_report.append(room)
|
||||||
|
|
||||||
|
weekly_report = []
|
||||||
|
for room_details in rooms_reports:
|
||||||
|
device_id = room_details[2]
|
||||||
|
if device_id > 0:
|
||||||
|
well_id = room_details[3]
|
||||||
|
radar_threshold_group_st = {device[1]: device[5] for device in devices_list}[device_id]
|
||||||
|
room = {"name": room_details[0],"color": room_details[1]}
|
||||||
|
data = []
|
||||||
|
|
||||||
|
for day_activity in week_dates:
|
||||||
|
datee = day_activity[0]
|
||||||
|
hours, events_count = GetActivities(device_id, well_id, datee, filterr, refresh, timezone_str, radar_threshold_group_st)
|
||||||
|
data_record = { "title": day_activity[1], "events": events_count, "hours": hours}
|
||||||
|
data.append(data_record)
|
||||||
|
|
||||||
|
room["data"] = data
|
||||||
|
weekly_report.append(room)
|
||||||
|
|
||||||
|
monthly_report = []
|
||||||
|
for room_details in rooms_reports:
|
||||||
|
device_id = room_details[2]
|
||||||
|
if device_id > 0:
|
||||||
|
well_id = room_details[3]
|
||||||
|
radar_threshold_group_st = {device[1]: device[5] for device in devices_list}[device_id]
|
||||||
|
room = {"name": room_details[0],"color": room_details[1]}
|
||||||
|
data = []
|
||||||
|
|
||||||
|
for day_activity in month_dates:
|
||||||
|
datee = day_activity[0]
|
||||||
|
hours, events_count = GetActivities(device_id, well_id, datee, filterr, refresh, timezone_str, radar_threshold_group_st)
|
||||||
|
#if datee == "2025-05-20" and device_id == 572:
|
||||||
|
# print(hours)
|
||||||
|
if hours > 18:
|
||||||
|
print("Too long m!!!", device_id, well_id, datee, filterr, refresh, timezone_str, radar_threshold_group_st)
|
||||||
|
|
||||||
|
data_record = { "title": str(day_activity[2]), "events": events_count, "hours": hours}
|
||||||
|
data.append(data_record)
|
||||||
|
|
||||||
|
room["data"] = data
|
||||||
|
monthly_report.append(room)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
result_dictionary = {
|
result_dictionary = {
|
||||||
"alert_text": "No alert",
|
"alert_text": "No alert",
|
||||||
"alert_color": "bg-green-100 text-green-700",
|
"alert_color": "bg-green-100 text-green-700",
|
||||||
@ -16515,9 +17095,9 @@ class WellApi:
|
|||||||
"rooms": [
|
"rooms": [
|
||||||
{
|
{
|
||||||
"name": "Bathroom",
|
"name": "Bathroom",
|
||||||
"color": "purple",
|
"color": "blue",
|
||||||
"data": [
|
"data": [
|
||||||
{ "title": "Monday", "events": 186, "hours": 80 },
|
{ "title": "Monday", "events": 186, "hours": 80.56 },
|
||||||
{ "title": "Tuesday", "events": 305, "hours": 200 },
|
{ "title": "Tuesday", "events": 305, "hours": 200 },
|
||||||
{ "title": "Wednesday", "events": 237, "hours": 120 },
|
{ "title": "Wednesday", "events": 237, "hours": 120 },
|
||||||
{ "title": "Thursday", "events": 73, "hours": 190 },
|
{ "title": "Thursday", "events": 73, "hours": 190 },
|
||||||
@ -16528,7 +17108,7 @@ class WellApi:
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "Bedroom",
|
"name": "Bedroom",
|
||||||
"color": "#3b82f6",
|
"color": "green",
|
||||||
"data": [
|
"data": [
|
||||||
{ "title": "Monday", "events": 186, "hours": 80 },
|
{ "title": "Monday", "events": 186, "hours": 80 },
|
||||||
{ "title": "Tuesday", "events": 305, "hours": 200 },
|
{ "title": "Tuesday", "events": 305, "hours": 200 },
|
||||||
@ -16541,7 +17121,7 @@ class WellApi:
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "Kitchen",
|
"name": "Kitchen",
|
||||||
"color": "orange",
|
"color": "red",
|
||||||
"data": [
|
"data": [
|
||||||
{ "title": "Monday", "events": 186, "hours": 80 },
|
{ "title": "Monday", "events": 186, "hours": 80 },
|
||||||
{ "title": "Tuesday", "events": 305, "hours": 200 },
|
{ "title": "Tuesday", "events": 305, "hours": 200 },
|
||||||
@ -16554,7 +17134,7 @@ class WellApi:
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "Other",
|
"name": "Other",
|
||||||
"color": "hotpink",
|
"color": "yellow",
|
||||||
"data": [
|
"data": [
|
||||||
{ "title": "Monday", "events": 186, "hours": 80 },
|
{ "title": "Monday", "events": 186, "hours": 80 },
|
||||||
{ "title": "Tuesday", "events": 305, "hours": 200 },
|
{ "title": "Tuesday", "events": 305, "hours": 200 },
|
||||||
@ -16756,6 +17336,12 @@ class WellApi:
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
result_dictionary["chart_data"][0]["rooms"] = weekly_report
|
||||||
|
result_dictionary["chart_data"][1]["rooms"] = monthly_report
|
||||||
|
result_dictionary["chart_data"][2]["rooms"] = six_months_report
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
payload = result_dictionary #{'result_dictionary': result_dictionary}
|
payload = result_dictionary #{'result_dictionary': result_dictionary}
|
||||||
resp.media = package_response(payload)
|
resp.media = package_response(payload)
|
||||||
resp.status = falcon.HTTP_200
|
resp.status = falcon.HTTP_200
|
||||||
@ -16849,10 +17435,7 @@ class WellApi:
|
|||||||
details["location_list"] = location_list
|
details["location_list"] = location_list
|
||||||
settings = {"wellness_score": False, "last_seen": False, "sleep_report": True, "activity_report": True, "temperature": True, "humidity": True, "air_pressure": True, "light": True, "air_quality": True, "radar": True, "other_activities": False}
|
settings = {"wellness_score": False, "last_seen": False, "sleep_report": True, "activity_report": True, "temperature": True, "humidity": True, "air_pressure": True, "light": True, "air_quality": True, "radar": True, "other_activities": False}
|
||||||
details["settings"] = settings
|
details["settings"] = settings
|
||||||
|
|
||||||
|
|
||||||
result_list.append(details)
|
result_list.append(details)
|
||||||
|
|
||||||
payload = {'result_list': result_list}
|
payload = {'result_list': result_list}
|
||||||
resp.media = package_response(payload)
|
resp.media = package_response(payload)
|
||||||
resp.status = falcon.HTTP_200
|
resp.status = falcon.HTTP_200
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user