Added get_sensor_matrix function

This commit is contained in:
NucBox_EVO-X2\robert 2026-01-08 17:39:29 -08:00
parent 556c64f311
commit 5a72cc9dfb
2 changed files with 700 additions and 8 deletions

View File

@ -15,3 +15,4 @@ pandas
matplotlib matplotlib
redis redis
requests requests
msgpack

View File

@ -45,6 +45,7 @@ import uuid
import csv import csv
import random import random
import urllib.parse import urllib.parse
import msgpack
base_url = "http://192.168.68.70:5050" base_url = "http://192.168.68.70:5050"
@ -13014,30 +13015,51 @@ def ReadSensor3(device_id, sensor, time_from_epoch, time_to_epoch, data_type, ra
AND time <= '{time_to_str}' AND time <= '{time_to_str}'
ORDER BY time ASC; ORDER BY time ASC;
""" """
elif sensor[0] == "s": elif sensor[0] == "s" and sensor[1:].isdigit():
# For sensors whose name starts with "s" (for example, smell sensors) # For sensors s0 through s79
sensor_num = int(sensor[1:])
# Determine the mtype condition based on sensor number
if 0 <= sensor_num <= 9:
mtype_condition = "(mtype = 0 OR mtype = 100)"
elif 10 <= sensor_num <= 79:
# s10-s19 -> 110, s20-s29 -> 120, ..., s70-s79 -> 170
mtype_value = 100 + (sensor_num // 10) * 10
mtype_condition = f"mtype = {mtype_value}"
else:
# For sensors outside s0-s79, no mtype filter (or handle as error)
mtype_condition = "TRUE"
# For s10+, the actual column is s0-s9 (sensor_num mod 10)
if sensor_num >= 10:
actual_column = f"s{sensor_num % 10}"
else:
actual_column = sensor
if use_bucket: if use_bucket:
sqlr = f""" sqlr = f"""
SELECT time_bucket('{bucket_interval}', time) AS time, SELECT time_bucket('{bucket_interval}', time) AS time,
{avgmax}({sensor}) AS smell {avgmax}({actual_column}) AS smell
FROM sensor_readings FROM sensor_readings
WHERE device_id = {device_id} WHERE device_id = {device_id}
AND {sensor} >= '{legal_min}' AND {actual_column} >= '{legal_min}'
AND {sensor} <= '{legal_max}' AND {actual_column} <= '{legal_max}'
AND time >= '{time_from_str}' AND time >= '{time_from_str}'
AND time <= '{time_to_str}' AND time <= '{time_to_str}'
AND {mtype_condition}
GROUP BY time_bucket('{bucket_interval}', time) GROUP BY time_bucket('{bucket_interval}', time)
ORDER BY time ASC; ORDER BY time ASC;
""" """
else: else:
sqlr = f""" sqlr = f"""
SELECT time, {sensor} AS smell SELECT time, {actual_column} AS smell
FROM sensor_readings FROM sensor_readings
WHERE device_id = {device_id} WHERE device_id = {device_id}
AND {sensor} >= '{legal_min}' AND {actual_column} >= '{legal_min}'
AND {sensor} <= '{legal_max}' AND {actual_column} <= '{legal_max}'
AND time >= '{time_from_str}' AND time >= '{time_from_str}'
AND time <= '{time_to_str}' AND time <= '{time_to_str}'
AND {mtype_condition}
ORDER BY time ASC; ORDER BY time ASC;
""" """
elif sensor == "co2": elif sensor == "co2":
@ -13201,6 +13223,243 @@ def ReadSensor3(device_id, sensor, time_from_epoch, time_to_epoch, data_type, ra
return result return result
def ReadSensorBatch(device_ids, sensor, time_from_epoch, time_to_epoch, data_type, radar_part, bucket_size="no"):
"""
Fetch sensor data for ALL devices in a single query.
Returns: dict of {device_id: [(timestamp, value), ...]}
"""
if not device_ids:
return {}
if time_to_epoch < time_from_epoch:
time_to_epoch, time_from_epoch = time_from_epoch, time_to_epoch
# Convert epoch to datetime strings
time_utc = datetime.datetime.fromtimestamp(float(time_from_epoch), tz=timezone.utc)
time_from_str = time_utc.strftime("%Y-%m-%d %H:%M:%S%z")
time_utc = datetime.datetime.fromtimestamp(float(time_to_epoch), tz=timezone.utc)
time_to_str = time_utc.strftime("%Y-%m-%d %H:%M:%S%z")
legal_min, legal_max, window = sensor_legal_values[sensor]
# Build device_ids list for SQL IN clause
device_ids_str = ",".join(str(d) for d in device_ids)
use_bucket = bucket_size != "no"
if use_bucket:
mapping = {
"10s": "10 seconds",
"1m": "1 minute",
"5m": "5 minutes",
"10m": "10 minutes",
"15m": "15 minutes",
"30m": "30 minutes",
"1h": "1 hour"
}
bucket_interval = mapping.get(bucket_size, bucket_size)
avgmax = "AVG"
# Build SQL based on sensor type (similar logic, but with device_id IN (...))
if sensor == "radar":
avgmax = "MAX"
radar_expr = "(s2+s3+s4+s5+s6+s7+s8)/7" if radar_part == "s28" else (radar_part or "s2")
if use_bucket:
sqlr = f"""
SELECT device_id,
time_bucket('{bucket_interval}', time) AS time,
{avgmax}({radar_expr}) AS value
FROM radar_readings
WHERE device_id IN ({device_ids_str})
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
GROUP BY device_id, time_bucket('{bucket_interval}', time)
ORDER BY device_id, time ASC;
"""
else:
sqlr = f"""
SELECT device_id, time, {radar_expr} AS value
FROM radar_readings
WHERE device_id IN ({device_ids_str})
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
ORDER BY device_id, time ASC;
"""
elif sensor[0] == "s" and sensor[1:].isdigit():
sensor_num = int(sensor[1:])
if 0 <= sensor_num <= 9:
mtype_condition = "(mtype = 0 OR mtype = 100)"
elif 10 <= sensor_num <= 79:
mtype_value = 100 + (sensor_num // 10) * 10
mtype_condition = f"mtype = {mtype_value}"
else:
mtype_condition = "TRUE"
actual_column = f"s{sensor_num % 10}" if sensor_num >= 10 else sensor
if use_bucket:
sqlr = f"""
SELECT device_id,
time_bucket('{bucket_interval}', time) AS time,
{avgmax}({actual_column}) AS value
FROM sensor_readings
WHERE device_id IN ({device_ids_str})
AND {actual_column} >= {legal_min}
AND {actual_column} <= {legal_max}
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
AND {mtype_condition}
GROUP BY device_id, time_bucket('{bucket_interval}', time)
ORDER BY device_id, time ASC;
"""
else:
sqlr = f"""
SELECT device_id, time, {actual_column} AS value
FROM sensor_readings
WHERE device_id IN ({device_ids_str})
AND {actual_column} >= {legal_min}
AND {actual_column} <= {legal_max}
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
AND {mtype_condition}
ORDER BY device_id, time ASC;
"""
elif sensor == "humidity":
col_expr = f"1 * humidity + 5.0"
if use_bucket:
sqlr = f"""
SELECT device_id,
time_bucket('{bucket_interval}', time) AS time,
{avgmax}({col_expr}) AS value
FROM sensor_readings
WHERE device_id IN ({device_ids_str})
AND humidity >= {legal_min}
AND humidity <= {legal_max}
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
GROUP BY device_id, time_bucket('{bucket_interval}', time)
ORDER BY device_id, time ASC;
"""
else:
sqlr = f"""
SELECT device_id, time, {col_expr} AS value
FROM sensor_readings
WHERE device_id IN ({device_ids_str})
AND humidity >= {legal_min}
AND humidity <= {legal_max}
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
ORDER BY device_id, time ASC;
"""
elif sensor == "temperature":
if use_bucket:
sqlr = f"""
SELECT device_id,
time_bucket('{bucket_interval}', time) AS time,
AVG(temperature) AS value
FROM sensor_readings
WHERE device_id IN ({device_ids_str})
AND temperature >= {legal_min}
AND temperature <= {legal_max}
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
AND mtype > 4
GROUP BY device_id, time_bucket('{bucket_interval}', time)
ORDER BY device_id, time ASC;
"""
else:
sqlr = f"""
SELECT device_id, time, temperature AS value
FROM sensor_readings
WHERE device_id IN ({device_ids_str})
AND temperature >= {legal_min}
AND temperature <= {legal_max}
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
AND mtype > 4
ORDER BY device_id, time ASC;
"""
elif sensor == "light":
if use_bucket:
sqlr = f"""
SELECT device_id,
time_bucket('{bucket_interval}', time) AS time,
MAX(light) AS value
FROM sensor_readings
WHERE device_id IN ({device_ids_str})
AND light >= {legal_min}
AND light <= {legal_max}
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
AND mtype > 4
GROUP BY device_id, time_bucket('{bucket_interval}', time)
ORDER BY device_id, time ASC;
"""
else:
sqlr = f"""
SELECT device_id, time, light AS value
FROM sensor_readings
WHERE device_id IN ({device_ids_str})
AND light >= {legal_min}
AND light <= {legal_max}
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
AND mtype > 4
ORDER BY device_id, time ASC;
"""
# Add more sensor types as needed (co2, voc, etc.)
else:
# Generic fallback
if use_bucket:
sqlr = f"""
SELECT device_id,
time_bucket('{bucket_interval}', time) AS time,
AVG({sensor}) AS value
FROM sensor_readings
WHERE device_id IN ({device_ids_str})
AND {sensor} >= {legal_min}
AND {sensor} <= {legal_max}
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
GROUP BY device_id, time_bucket('{bucket_interval}', time)
ORDER BY device_id, time ASC;
"""
else:
sqlr = f"""
SELECT device_id, time, {sensor} AS value
FROM sensor_readings
WHERE device_id IN ({device_ids_str})
AND {sensor} >= {legal_min}
AND {sensor} <= {legal_max}
AND time >= '{time_from_str}'
AND time <= '{time_to_str}'
ORDER BY device_id, time ASC;
"""
logger.debug(f"sqlr = {sqlr}")
with get_db_connection() as conn:
with conn.cursor() as cur:
cur.execute(sqlr)
results = cur.fetchall()
# Group results by device_id
from collections import defaultdict
device_data = defaultdict(list)
for row in results:
device_id, timestamp, value = row
device_data[device_id].append((timestamp, value))
return dict(device_data)
def ReadRadarDetail(device_id, sensor, time_from_epoch, time_to_epoch, alt_key_state): def ReadRadarDetail(device_id, sensor, time_from_epoch, time_to_epoch, alt_key_state):
time_utc = datetime.datetime.fromtimestamp(float(time_from_epoch), tz=timezone.utc) time_utc = datetime.datetime.fromtimestamp(float(time_from_epoch), tz=timezone.utc)
@ -13553,6 +13812,99 @@ def clean_data_pd(line_part_t, window=5, percentile=99):
# Return filtered data # Return filtered data
return list(zip(x[good_points], y[good_points])) return list(zip(x[good_points], y[good_points]))
def clean_data_batch(device_data_dict, device_ids, window=5, percentile=99):
"""
Clean data for ALL devices at once using vectorized operations.
Args:
device_data_dict: {device_id: [(timestamp, value), ...], ...}
device_ids: list of device IDs to process
window: rolling window size
percentile: percentile threshold for outlier detection
Returns:
{device_id: [(timestamp, value), ...], ...} - cleaned data
"""
# Build a single DataFrame from all devices
rows = []
for device_id in device_ids:
for ts, val in device_data_dict.get(device_id, []):
rows.append((device_id, ts, val))
if not rows:
return {did: [] for did in device_ids}
df = pd.DataFrame(rows, columns=['device_id', 'timestamp', 'value'])
# Calculate rolling median PER DEVICE using groupby + transform
df['median'] = df.groupby('device_id')['value'].transform(
lambda x: x.rolling(window=window, center=True, min_periods=1).median()
)
# Calculate deviations
df['deviation'] = np.abs(df['value'] - df['median'])
# Calculate percentile threshold PER DEVICE
df['threshold'] = df.groupby('device_id')['deviation'].transform(
lambda x: np.percentile(x, percentile) if len(x) > 0 else np.inf
)
# Filter good points
df_clean = df[df['deviation'] <= df['threshold']]
# Convert back to dict format
result = {did: [] for did in device_ids}
for device_id, group in df_clean.groupby('device_id'):
result[device_id] = list(zip(group['timestamp'], group['value']))
return result
def process_sensor_fast(device_data, device_ids, sensor, minute_start, minute_end,
temp_calib, window=5, percentile=99):
"""Process sensor data directly to matrix - no timestamp handling needed."""
minute_count = minute_end - minute_start + 1
room_count = len(device_ids)
device_idx_map = {did: i for i, did in enumerate(device_ids)}
# Pre-allocate output matrix
matrix = np.full((room_count, minute_count), np.nan, dtype=np.float64)
for device_id in device_ids:
raw_data = device_data.get(device_id, [])
if not raw_data:
continue
# Extract ONLY values - ignore timestamps completely
values = np.array([row[1] for row in raw_data], dtype=np.float64)
# Quick median filter using scipy (C implementation)
if len(values) >= window:
from scipy.ndimage import median_filter
medians = median_filter(values, size=window, mode='nearest')
deviations = np.abs(values - medians)
threshold = np.percentile(deviations, percentile)
mask = deviations <= threshold
values = values[mask]
# Temperature calibration (vectorized)
if sensor == "temperature":
calib_str = temp_calib.get(device_id, "")
calib = float(calib_str.split(",")[2]) if "," in calib_str else -10.0
values += calib
# Write directly to matrix - position IS the minute index
row_idx = device_idx_map[device_id]
n_values = min(len(values), minute_count)
matrix[row_idx, :n_values] = values[:n_values]
# Convert NaN to 0 and cast to uint16
matrix = np.nan_to_num(matrix, nan=0.0)
if sensor == "light":
return matrix.astype(np.uint16)
else:
return matrix.astype(np.float32)
def CombineStripes(result_filename, stripes_files): def CombineStripes(result_filename, stripes_files):
try: try:
# Open the first image to get the width and initialize the height # Open the first image to get the width and initialize the height
@ -19355,6 +19707,81 @@ def get_job_sensor_bucketed_data(form_data):
finally: finally:
if conn: conn.close() if conn: conn.close()
def _parse_sensor_list(sensors):
sensors_raw = (sensors or "light,humidity,radar").strip()
sensor_names = [s.strip() for s in sensors_raw.split(",") if s.strip()]
for sensor_name in sensor_names:
if sensor_name not in s_table_80:
raise ValueError(f"Unsupported sensor: {sensor_name}")
return sensor_names
def _rows_to_uint16_matrix(room_ids, minute_start, minute_end, sensor_rows):
room_count = len(room_ids)
minute_count = minute_end - minute_start + 1
room_id_to_index = {room_id: index for index, room_id in enumerate(room_ids)}
matrix_values = np.zeros((room_count, minute_count), dtype=np.uint16)
for room_id, minute_of_day, value in sensor_rows:
room_index = room_id_to_index.get(int(room_id))
if room_index is None:
continue
minute_index = int(minute_of_day) - minute_start
if minute_index < 0 or minute_index >= minute_count:
continue
numeric_value = int(value)
if numeric_value < 0:
numeric_value = 0
if numeric_value > 65535:
numeric_value = 65535
matrix_values[room_index, minute_index] = np.uint16(numeric_value)
return matrix_values
def _rows_to_right_format_matrix(room_ids, minute_start, minute_end, sensor_rows, sensor):
room_count = len(room_ids)
minute_count = minute_end - minute_start + 1
room_id_to_index = {room_id: index for index, room_id in enumerate(room_ids)}
if sensor == "light":
matrix_values = np.zeros((room_count, minute_count), dtype=np.uint16)
else:
matrix_values = np.zeros((room_count, minute_count), dtype=np.float32)
#lets format data with appropriate format depending on sensor
#temperature = np.float32
#humidity = np.float32
#light = np.int16
#pressure = np.float32
#radar = np.float32
#smell_components = np.float32
for room_id, minute_of_day, value in sensor_rows:
room_index = room_id_to_index.get(int(room_id))
if room_index is None:
continue
minute_index = int(minute_of_day) - minute_start
if minute_index < 0 or minute_index >= minute_count:
continue
if sensor == "light":
numeric_value = int(value)
if numeric_value < 0:
numeric_value = 0
if numeric_value > 65535:
numeric_value = 65535
matrix_values[room_index, minute_index] = np.uint16(numeric_value)
else:
matrix_values[room_index, minute_index] = np.float32(value)
return matrix_values
#=== End WellDry API functions ===# #=== End WellDry API functions ===#
#==================================== ADD FUNCTIONS BEFORE ============================================ #==================================== ADD FUNCTIONS BEFORE ============================================
@ -21282,6 +21709,270 @@ class WellApi:
resp.status = falcon.HTTP_200 resp.status = falcon.HTTP_200
#AddToLog(payload) #AddToLog(payload)
#return #return
elif function == "get_sensor_matrix_slow":
# Inputs:
# user_name and token
# deployment_id - from which report gets deployment set (all rooms and devices)
# date - one day in a format YYYY-MM-DD
# minute_start - starting minute in a day
# minute_end - ending minute in a day
# sensors - sensor1,sensor2 ... temperature/radar/etc.. see full list (tells what sensor data to be retrieved)
# radar_part - optional and applies only to radar (tells which segment of radar to be retrieved)
# bucket_size - ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
# Output: structure with the following info
# chart_data with rooms : [list]
deployment_id = form_data.get('deployment_id')
ddate = form_data.get('date')
minute_start = int(form_data.get('minute_start'))
minute_end = int(form_data.get('minute_end'))
sensors = form_data.get('sensors')
radar_part = form_data.get('radar_part')
bucket_size = form_data.get('bucket_size')
radar_part = form_data.get('radar_part')
buckets = ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
bucket_size = "no" if (result := form_data.get('bucket_size')) in (None, "") else (result.strip() if result.strip() in buckets else "no")
bucket_size = "1m"
ddate = ddate.replace("_","-")
to_date = form_data.get('to_date')
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
if to_date == None:
to_date = ddate
else:
to_date = to_date.replace("_","-")
ddate, to_date = ensure_date_order(ddate, to_date)
minute_start, minute_end = ensure_date_order(minute_start, minute_end)
d1 = datetime.datetime.strptime(ddate, '%Y-%m-%d')
d2 = datetime.datetime.strptime(to_date, '%Y-%m-%d')
days = (d2 - d1).days + 1 # +1 for inclusive count
sensor_names = _parse_sensor_list(sensors)
radar_part = ""
sensor_data = {}
#days = (epoch_to_utc - epoch_from_utc) / (60 * 1440)
#sensor = form_data.get('sensor') # one sensor
#bucket_size = res2 if (res := form_data.get('bucket_size')) is not None and (res2 := str(res).strip()) and res2 in {'no', '10s', '1m', '5m', '10m', '15m', '30m', '1h'} else 'no'
data_type = form_data.get('data_type')
epoch_from_utc, epoch_to_utc = GetLocalTimeEpochsForDate(ddate, time_zone_s, days) #>= #<
epoch_to_utc = epoch_from_utc + (1 + minute_end) * 60
epoch_from_utc += minute_start * 60
# obtain devices_list for deployment_id
#timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5
devices_list, device_ids = GetProximityList(deployment_id, epoch_from_utc)
room_count = len(device_ids)
minute_count = minute_end - minute_start + 1
sensor_data = {}
sensors_payload = {}
latest_as_of_unix_ts = None
computed_ts = time.time()
# see https://www.w3schools.com/cssref/css_colors.php
st = time.time()
for sensor in sensor_names:
sensor_rows = []
for device_id in device_ids:
temp_calib, humid_calib = GetCalibMaps([device_id])
print("@1", time.time()-st)
line_part = ReadSensor3(device_id, sensor, epoch_from_utc, epoch_to_utc, data_type, radar_part, bucket_size)
print("@2", time.time()-st)
#sensor_rows = _fetch_sensor_rows_timescale(
# deployment_id=deployment_id,
# sensor_name=sensor_name,
# date_str=date_str,
# minute_start=minute_start,
# minute_end=minute_end,
# room_ids=room_ids,
#)
window = sensor_legal_values[sensor][2]
line_part_t = []
line_part_t = [(x[0].timestamp(), x[1]) for x in line_part]
st = time.time()
cleaned_values_t = clean_data_pd(line_part_t, window=window, percentile=99)
cleaned_values = cleaned_values_t
if sensor == "temperature":
if "," in temp_calib[device_id]:
temperature_calib = float(temp_calib[device_id].split(",")[2])
else:
temperature_calib = -10.0
cleaned_values = [(tim, value + temperature_calib) for tim, value in cleaned_values]
#leave only values
#cleaned_values = [value for timestamp, value in cleaned_values_t]
minute_index = 0
for timestamp, value in cleaned_values:
minute_of_day = minute_start + minute_index
minute_index += 1
sensor_rows.append((device_id, minute_of_day, value))
print("@3", time.time()-st)
sensor_matrix = _rows_to_right_format_matrix(
room_ids=device_ids,
minute_start=minute_start,
minute_end=minute_end,
sensor_rows=sensor_rows,
sensor=sensor
)
sensors_payload[sensor] = {
"data": sensor_matrix.tobytes(order="C")
}
# If you can compute "as_of" during query, set it here
latest_as_of_unix_ts = max(latest_as_of_unix_ts or 0, computed_ts)
response_payload = {
"ok": True,
"deployment_id": deployment_id,
"date": ddate,
"minute_start": minute_start,
"minute_end": minute_end,
"room_ids": device_ids,
"dtype": "uint16",
"shape": [room_count, minute_count],
"sensors": sensors_payload,
"as_of_unix_ts": latest_as_of_unix_ts or time.time(),
}
packed_bytes = msgpack.packb(response_payload, use_bin_type=True)
resp.status = falcon.HTTP_200
resp.content_type = "application/msgpack"
resp.data = packed_bytes
return
elif function == "get_sensor_matrix":
# Inputs:
# user_name and token
# deployment_id - from which report gets deployment set (all rooms and devices)
# date - one day in a format YYYY-MM-DD
# minute_start - starting minute in a day
# minute_end - ending minute in a day
# sensors - sensor1,sensor2 ... temperature/radar/etc.. see full list (tells what sensor data to be retrieved)
# radar_part - optional and applies only to radar (tells which segment of radar to be retrieved)
# bucket_size - ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
# Output: structure with the following info
# chart_data with rooms : [list]
deployment_id = form_data.get('deployment_id')
ddate = form_data.get('date')
minute_start = int(form_data.get('minute_start'))
minute_end = int(form_data.get('minute_end'))
sensors = form_data.get('sensors')
radar_part = form_data.get('radar_part')
bucket_size = form_data.get('bucket_size')
radar_part = form_data.get('radar_part')
buckets = ['no', '10s', '1m', '5m', '10m', '15m', '30m', '1h']
bucket_size = "no" if (result := form_data.get('bucket_size')) in (None, "") else (result.strip() if result.strip() in buckets else "no")
bucket_size = "1m"
ddate = ddate.replace("_","-")
to_date = form_data.get('to_date')
time_zone_s = GetTimeZoneOfDeployment(deployment_id)
if to_date == None:
to_date = ddate
else:
to_date = to_date.replace("_","-")
ddate, to_date = ensure_date_order(ddate, to_date)
minute_start, minute_end = ensure_date_order(minute_start, minute_end)
d1 = datetime.datetime.strptime(ddate, '%Y-%m-%d')
d2 = datetime.datetime.strptime(to_date, '%Y-%m-%d')
days = (d2 - d1).days + 1 # +1 for inclusive count
sensor_names = _parse_sensor_list(sensors)
radar_part = ""
sensor_data = {}
#days = (epoch_to_utc - epoch_from_utc) / (60 * 1440)
#sensor = form_data.get('sensor') # one sensor
#bucket_size = res2 if (res := form_data.get('bucket_size')) is not None and (res2 := str(res).strip()) and res2 in {'no', '10s', '1m', '5m', '10m', '15m', '30m', '1h'} else 'no'
data_type = form_data.get('data_type')
epoch_from_utc, epoch_to_utc = GetLocalTimeEpochsForDate(ddate, time_zone_s, days) #>= #<
epoch_to_utc = epoch_from_utc + (1 + minute_end) * 60
epoch_from_utc += minute_start * 60
# obtain devices_list for deployment_id
#timee = LocalDateToUTCEpoch(ddate, time_zone_s)+5
devices_list, device_ids = GetProximityList(deployment_id, epoch_from_utc)
room_count = len(device_ids)
minute_count = minute_end - minute_start + 1
sensor_data = {}
sensors_payload = {}
latest_as_of_unix_ts = None
computed_ts = time.time()
# see https://www.w3schools.com/cssref/css_colors.php
st = time.time()
temp_calib, humid_calib = GetCalibMaps(device_ids)
for sensor in sensor_names:
window = sensor_legal_values[sensor][2]
st = time.time()
device_data = ReadSensorBatch(
device_ids, sensor, epoch_from_utc, epoch_to_utc,
data_type, radar_part, bucket_size
)
print(f"ReadSensorBatch: {time.time()-st:.3f}")
st2 = time.time()
sensor_matrix = process_sensor_fast(
device_data, device_ids, sensor, minute_start, minute_end,
temp_calib, window, percentile=99
)
print(f"Processing: {time.time()-st2:.3f}")
sensors_payload[sensor] = {"data": sensor_matrix.tobytes(order="C")}
print(f"Total for {sensor}: {time.time()-st:.3f}\n")
# If you can compute "as_of" during query, set it here
latest_as_of_unix_ts = max(latest_as_of_unix_ts or 0, computed_ts)
response_payload = {
"ok": True,
"deployment_id": deployment_id,
"date": ddate,
"minute_start": minute_start,
"minute_end": minute_end,
"room_ids": device_ids,
"dtype": "uint16",
"shape": [room_count, minute_count],
"sensors": sensors_payload,
"as_of_unix_ts": latest_as_of_unix_ts or time.time(),
}
packed_bytes = msgpack.packb(response_payload, use_bin_type=True)
resp.status = falcon.HTTP_200
resp.content_type = "application/msgpack"
resp.data = packed_bytes
return
elif function == "request_device_slice": elif function == "request_device_slice":
deployment_id = form_data.get('deployment_id') deployment_id = form_data.get('deployment_id')
time_zone_s = GetTimeZoneOfDeployment(deployment_id) time_zone_s = GetTimeZoneOfDeployment(deployment_id)