Initial push
This commit is contained in:
parent
8dec350d2d
commit
1dbb91f8ba
16
.env
Normal file
16
.env
Normal file
@ -0,0 +1,16 @@
|
||||
# Database settings
|
||||
DB_NAME=wellnuo
|
||||
DB_USER=well_app
|
||||
DB_PASSWORD=well_app_2024
|
||||
DB_HOST=192.168.68.70
|
||||
DB_PORT=5432
|
||||
|
||||
# RabbitMQ settings
|
||||
RABBITMQ_URL=amqp://well_pipe:wellnuo_2024@192.168.68.70:5672//
|
||||
RABBITMQ_QUEUE=evgrid-dev-demo
|
||||
|
||||
# Processing settings
|
||||
PROCESSING_PERIOD=300
|
||||
QUEUE_LENGTH_THRESHOLD=1000
|
||||
BATCH_SIZE=100
|
||||
|
||||
20
Dockerfile
Normal file
20
Dockerfile
Normal file
@ -0,0 +1,20 @@
|
||||
FROM python:3.9-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install build dependencies for psycopg2
|
||||
RUN apk add --no-cache postgresql-dev gcc python3-dev musl-dev
|
||||
|
||||
# Install Python dependencies
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Create src directory and copy application code
|
||||
RUN mkdir -p /app/src
|
||||
COPY src/app.py /app/src/
|
||||
COPY src/new-ca.crt /app/src/new-ca.crt
|
||||
|
||||
# Expose port for health checks
|
||||
EXPOSE 8086
|
||||
|
||||
CMD ["python", "src/app.py"]
|
||||
37
queue-muncher.yml
Normal file
37
queue-muncher.yml
Normal file
@ -0,0 +1,37 @@
|
||||
version: 1.0
|
||||
provider:
|
||||
name: openfaas
|
||||
gateway: http://192.168.68.70:8084
|
||||
functions:
|
||||
queue-muncher:
|
||||
image: repo.eluxnetworks.net/queue-muncher:latest
|
||||
labels:
|
||||
com.openfaas.scale.zero: false
|
||||
network: "host" # Add this line
|
||||
environment:
|
||||
read_timeout: "1h"
|
||||
write_timeout: "1h"
|
||||
exec_timeout: "1h"
|
||||
|
||||
# RabbitMQ configuration
|
||||
RABBITMQ_URL: "amqp://well_pipe:wellnuo_2024@192.168.68.70:5672//"
|
||||
RABBITMQ_QUEUE: "evgrid-dev-demo"
|
||||
REDIS_HOST: "192.168.68.70"
|
||||
# Database configuration
|
||||
DB_NAME: "wellnuo"
|
||||
DB_USER: "well_app"
|
||||
DB_PASSWORD: "well_app_2024"
|
||||
DB_HOST: "192.168.68.70"
|
||||
DB_PORT: "5432"
|
||||
MQTTSERVER: "192.168.68.70"
|
||||
MQTT_PORT: "1883"
|
||||
# Processing settings
|
||||
PROCESSING_PERIOD: "300"
|
||||
QUEUE_LENGTH_THRESHOLD: "1000"
|
||||
BATCH_SIZE: "100"
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 256Mi
|
||||
requests:
|
||||
memory: 128Mi
|
||||
233
redis_test.py
Normal file
233
redis_test.py
Normal file
@ -0,0 +1,233 @@
|
||||
import os
|
||||
import redis
|
||||
import psycopg2
|
||||
from dotenv import load_dotenv
|
||||
import ast
|
||||
import time
|
||||
|
||||
load_dotenv()
|
||||
|
||||
DB_NAME = os.getenv('DB_NAME')
|
||||
DB_USER = os.getenv('DB_USER')
|
||||
DB_PASSWORD = os.getenv('DB_PASSWORD')
|
||||
DB_HOST = os.getenv('DB_HOST')
|
||||
DB_PORT = os.getenv('DB_PORT')
|
||||
|
||||
# Connect to Redis (assuming it's running on localhost with default port)
|
||||
r = redis.Redis(host='localhost', port=6379, db=0)
|
||||
|
||||
def GetRedisString(key_name):
|
||||
try:
|
||||
result = r.get(key_name).decode('utf-8')
|
||||
except:
|
||||
result = None
|
||||
return result
|
||||
|
||||
def GetRedisInt(key_name):
|
||||
try:
|
||||
result = int(r.get(key_name).decode('utf-8'))
|
||||
except:
|
||||
result = None
|
||||
return result
|
||||
|
||||
def GetRedisFloat(key_name):
|
||||
try:
|
||||
result = float(r.get(key_name).decode('utf-8'))
|
||||
except:
|
||||
result = None
|
||||
|
||||
return result
|
||||
|
||||
value = GetRedisFloat('lastseen_501')
|
||||
print(value)
|
||||
r.set('lastseen_510', 1.444)
|
||||
|
||||
value = GetRedisFloat('lastseen_510')
|
||||
print(value)
|
||||
|
||||
def get_db_connection():
|
||||
return psycopg2.connect(dbname=DB_NAME, user=DB_USER, password=DB_PASSWORD, host=DB_HOST, port=DB_PORT)
|
||||
|
||||
def GetLastPointQuery(device_id, field_name, threshold_value):
|
||||
"""
|
||||
Generate a SQL query to find the last point in radar_readings where the specified field
|
||||
meets the threshold condition.
|
||||
|
||||
Parameters:
|
||||
device_id (int): The device ID to filter by
|
||||
field_name (str): The field to check, e.g., "s2_max", "s28_min", etc.
|
||||
threshold_value (float): The threshold value to compare against
|
||||
|
||||
Returns:
|
||||
str: SQL query string
|
||||
"""
|
||||
# Parse the field name to get base field and operation
|
||||
parts = field_name.split('_')
|
||||
base_field = parts[0]
|
||||
operation = parts[1] if len(parts) > 1 else None
|
||||
|
||||
# Define the field expression based on base_field
|
||||
if base_field == 's28':
|
||||
field_expr = "(s2+s3+s4+s5+s6+s7+s8)/7"
|
||||
elif base_field == 'm08':
|
||||
field_expr = "(m0+m1+m2+m3+m4+m5+m6+m7+m8)/9"
|
||||
else:
|
||||
field_expr = base_field
|
||||
|
||||
# Define comparison operator based on operation
|
||||
operator = ">" if operation == "max" else "<"
|
||||
|
||||
# Generate the SQL query
|
||||
query = f"""
|
||||
SELECT "time" AS point_time, {field_expr} AS point_value FROM radar_readings WHERE device_id = {device_id}
|
||||
AND {field_expr} {operator} {threshold_value} ORDER BY "time" DESC LIMIT 1;
|
||||
"""
|
||||
|
||||
return query
|
||||
|
||||
|
||||
def GetLastDetected(device_id, field_name, threshold_value):
|
||||
# Example usage:
|
||||
|
||||
query = GetLastPointQuery(device_id, field_name, threshold_value)
|
||||
print(query)
|
||||
last_detected = None
|
||||
with get_db_connection() as conn:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(query)
|
||||
last_detected = cur.fetchone()[0].timestamp()
|
||||
|
||||
return last_detected
|
||||
|
||||
def UpdateLastSeen(new_message_dict):
|
||||
print(new_message_dict)
|
||||
|
||||
device_id_s = str(new_message_dict['device_id'])
|
||||
|
||||
#matches code in well-api
|
||||
radar_threshold_signal = "s3_max"
|
||||
radar_threshold_value = 10
|
||||
|
||||
threshold_details = GetRedisString('radar_threshold'+device_id_s)
|
||||
try:
|
||||
radar_threshold_list = ast.literal_eval(threshold_details)
|
||||
radar_threshold_signal = radar_threshold_list[0]
|
||||
radar_threshold_value = radar_threshold_list[1]
|
||||
except:
|
||||
#key not found so read from DB, and store to key
|
||||
sql = f"""
|
||||
SELECT "radar_threshold" AS threshold FROM devices WHERE device_id = {device_id_s};
|
||||
"""
|
||||
|
||||
with get_db_connection() as conn:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql)
|
||||
threshold_details = cur.fetchone()[0] #cur.fetchall()#
|
||||
print(threshold_details)
|
||||
radar_threshold_signal = "s3_max"
|
||||
radar_threshold_value = 10
|
||||
|
||||
if threshold_details != None:
|
||||
|
||||
threshold_details_list = ast.literal_eval(threshold_details)
|
||||
radar_threshold_signal = threshold_details_list[0]
|
||||
radar_threshold_value = threshold_details_list[1]
|
||||
|
||||
r.set('radar_threshold'+device_id_s, str([radar_threshold_signal, radar_threshold_value]))
|
||||
|
||||
|
||||
|
||||
#lets determine if presence is detected
|
||||
component = radar_threshold_signal.split("_")[0]
|
||||
radar_val = 0
|
||||
if component == "s28":
|
||||
radar_val = sum(new_message_dict['radar'][1][3:9]) / new_message_dict['radar'][1][0]
|
||||
elif component[0] == "s":
|
||||
component_index = ast.literal_eval(component[1])
|
||||
radar_val = new_message_dict['radar'][1][1 + component_index] / new_message_dict['radar'][1][0]
|
||||
elif component[0] == "m":
|
||||
component_index = ast.literal_eval(component[1])
|
||||
radar_val = new_message_dict['radar'][0][5 + component_index] / new_message_dict['radar'][0][0]
|
||||
|
||||
if radar_val > radar_threshold_value: #seen now
|
||||
r.set('lastseen_'+device_id_s, new_message_dict['time'])
|
||||
|
||||
else: #not seen now, but lets determine when he was and add the key
|
||||
last_seen = GetRedisFloat('lastseen_'+device_id_s)
|
||||
if last_seen == None:
|
||||
last_seen = GetLastDetected(device_id_s, radar_threshold_signal, radar_threshold_value)
|
||||
r.set('lastseen_'+device_id_s, last_seen)
|
||||
|
||||
|
||||
new_message_dict = {'time': 1743554317.579559, 'device_id': 499, 'radar': [[100, 100, 0, 0, 0, 2226, 2654, 425, 523, 445, 340, 436, 339, 548], [100, 0, 0, 706, 657, 547, 570, 553, 509, 499]]}
|
||||
|
||||
st = time.time()
|
||||
UpdateLastSeen(new_message_dict)
|
||||
print(time.time()-st)
|
||||
|
||||
if False:
|
||||
# Simple key-value
|
||||
|
||||
value = r.get('simple_key')
|
||||
print(f"Simple key value: {value.decode('utf-8')}")
|
||||
|
||||
# WRITING DATA TO REDIS
|
||||
|
||||
# Simple key-value
|
||||
r.set('simple_key', 'Hello, Redis!')
|
||||
|
||||
# Setting expiration (TTL) in seconds
|
||||
r.setex('expiring_key', 60, 'This will expire in 60 seconds')
|
||||
|
||||
# Working with numbers
|
||||
r.set('counter', 0)
|
||||
r.incr('counter') # Increment by 1
|
||||
r.incrby('counter', 5) # Increment by 5
|
||||
|
||||
# Lists
|
||||
r.rpush('my_list', 'item1')
|
||||
r.rpush('my_list', 'item2', 'item3') # Add multiple items
|
||||
|
||||
# Sets
|
||||
r.sadd('my_set', 'member1', 'member2', 'member3')
|
||||
|
||||
# Hashes (dictionaries)
|
||||
r.hset('user:1000', mapping={
|
||||
'username': 'john_doe',
|
||||
'email': 'john@example.com',
|
||||
'visits': 10
|
||||
})
|
||||
|
||||
# READING DATA FROM REDIS
|
||||
|
||||
# Simple key-value
|
||||
value = r.get('simple_key')
|
||||
print(f"Simple key value: {value.decode('utf-8')}")
|
||||
|
||||
# Check current counter value
|
||||
counter = r.get('counter')
|
||||
print(f"Counter value: {counter.decode('utf-8')}")
|
||||
|
||||
# Lists
|
||||
all_items = r.lrange('my_list', 0, -1) # Get all items
|
||||
print("List items:", [item.decode('utf-8') for item in all_items])
|
||||
|
||||
# Sets
|
||||
all_members = r.smembers('my_set')
|
||||
print("Set members:", [member.decode('utf-8') for member in all_members])
|
||||
|
||||
# Check if a member exists in a set
|
||||
is_member = r.sismember('my_set', 'member1')
|
||||
print(f"Is 'member1' in set? {is_member}")
|
||||
|
||||
# Hashes
|
||||
username = r.hget('user:1000', 'username')
|
||||
print(f"Username: {username.decode('utf-8')}")
|
||||
|
||||
# Get all hash fields
|
||||
user_data = r.hgetall('user:1000')
|
||||
print("User data:", {k.decode('utf-8'): v.decode('utf-8') for k, v in user_data.items()})
|
||||
|
||||
# Check remaining TTL for expiring key (in seconds)
|
||||
ttl = r.ttl('expiring_key')
|
||||
print(f"Expiring key TTL: {ttl} seconds")
|
||||
6
requirements.txt
Normal file
6
requirements.txt
Normal file
@ -0,0 +1,6 @@
|
||||
paho-mqtt
|
||||
kombu>=5.0.0
|
||||
psycopg2-binary>=2.8.6
|
||||
python-dotenv>=0.15.0
|
||||
prometheus-client>=0.11.0
|
||||
redis
|
||||
19
secrets/mqtt-ca-cert
Normal file
19
secrets/mqtt-ca-cert
Normal file
@ -0,0 +1,19 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDHTCCAgWgAwIBAgIUak6HXnrheUs5SnAXYa+jUU1l28YwDQYJKoZIhvcNAQEL
|
||||
BQAwHjEcMBoGA1UEAwwTZWx1eG5ldHdvcmtzLm5ldCBDQTAeFw0yNTAyMjYwNTA4
|
||||
MDFaFw0zNTAyMjQwNTA4MDFaMB4xHDAaBgNVBAMME2VsdXhuZXR3b3Jrcy5uZXQg
|
||||
Q0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC0ooycKCciiC9CY6mP
|
||||
ph+WS1I42kf3Io7kzZ1/gMFb1EJxabMlAga94NmWO9uQkwiaFQOWrH2cvLyLL9kD
|
||||
kz7ZmQUij4C2sHU2CQkqG0mo8xeBxaFYmSXwAd0jYd/6GHABCF63/USWIrfUPkNt
|
||||
f7HaoM6yPZ61w3Ne0G5Kfd5/HsTiRiGbpCXHUpp6NMeuG59j1Ma+eEDXPKMimKti
|
||||
R3bCMI5tOsCOey6yjEP+DituitqUZZYKPmk+7cvi1tK50OGMT330P+mPZPJQxauK
|
||||
dew3mhTv5iKiGYhdN5ZFUy1KVJHf3y3rmNjEWesU0X8483v4tuhhcjNIA+D8/Tcn
|
||||
qKQ5AgMBAAGjUzBRMB0GA1UdDgQWBBTd6ubEStLdE60De4Re5IQENKn/aTAfBgNV
|
||||
HSMEGDAWgBTd6ubEStLdE60De4Re5IQENKn/aTAPBgNVHRMBAf8EBTADAQH/MA0G
|
||||
CSqGSIb3DQEBCwUAA4IBAQAMkrP0zdt1uAOI1B77nV7+EZSzJxahrubo8opjrkvd
|
||||
4/stCVG6OfDutxARvCdC4OoJiRPGXBiA22bIi7ZMl7DTpA+CeFEICfF3MKcf8xIT
|
||||
V5sCm25dX+KHwACWNccNazpIlIAVGxMmcSs70SlMIfPksCW2FRibsVnzESQCgQcP
|
||||
e6owfVvsnQNN4UADki4JMZJ1RQ/nUNM3aNJSf/SFVJYjiUXHLNJY65FfiYV4MGRi
|
||||
Yq+NDPs3D0KLkwQ03FFcw56TdPnCAYuihDHkITuQDtS1DawLhuwFDDPcys5Mnigc
|
||||
1Y1o5V4Z6L6xkttwgD9zP3DQJscDZ/Gki1NqYV3TvJPr
|
||||
-----END CERTIFICATE-----
|
||||
1307
src/app.py
Normal file
1307
src/app.py
Normal file
File diff suppressed because it is too large
Load Diff
BIN
src/data_backups/data_radar.pkl
Normal file
BIN
src/data_backups/data_radar.pkl
Normal file
Binary file not shown.
BIN
src/data_backups/data_sensors.pkl
Normal file
BIN
src/data_backups/data_sensors.pkl
Normal file
Binary file not shown.
BIN
src/data_backups_100/data_radar.pkl
Normal file
BIN
src/data_backups_100/data_radar.pkl
Normal file
Binary file not shown.
19
src/new-ca.crt
Normal file
19
src/new-ca.crt
Normal file
@ -0,0 +1,19 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDHTCCAgWgAwIBAgIUak6HXnrheUs5SnAXYa+jUU1l28YwDQYJKoZIhvcNAQEL
|
||||
BQAwHjEcMBoGA1UEAwwTZWx1eG5ldHdvcmtzLm5ldCBDQTAeFw0yNTAyMjYwNTA4
|
||||
MDFaFw0zNTAyMjQwNTA4MDFaMB4xHDAaBgNVBAMME2VsdXhuZXR3b3Jrcy5uZXQg
|
||||
Q0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC0ooycKCciiC9CY6mP
|
||||
ph+WS1I42kf3Io7kzZ1/gMFb1EJxabMlAga94NmWO9uQkwiaFQOWrH2cvLyLL9kD
|
||||
kz7ZmQUij4C2sHU2CQkqG0mo8xeBxaFYmSXwAd0jYd/6GHABCF63/USWIrfUPkNt
|
||||
f7HaoM6yPZ61w3Ne0G5Kfd5/HsTiRiGbpCXHUpp6NMeuG59j1Ma+eEDXPKMimKti
|
||||
R3bCMI5tOsCOey6yjEP+DituitqUZZYKPmk+7cvi1tK50OGMT330P+mPZPJQxauK
|
||||
dew3mhTv5iKiGYhdN5ZFUy1KVJHf3y3rmNjEWesU0X8483v4tuhhcjNIA+D8/Tcn
|
||||
qKQ5AgMBAAGjUzBRMB0GA1UdDgQWBBTd6ubEStLdE60De4Re5IQENKn/aTAfBgNV
|
||||
HSMEGDAWgBTd6ubEStLdE60De4Re5IQENKn/aTAPBgNVHRMBAf8EBTADAQH/MA0G
|
||||
CSqGSIb3DQEBCwUAA4IBAQAMkrP0zdt1uAOI1B77nV7+EZSzJxahrubo8opjrkvd
|
||||
4/stCVG6OfDutxARvCdC4OoJiRPGXBiA22bIi7ZMl7DTpA+CeFEICfF3MKcf8xIT
|
||||
V5sCm25dX+KHwACWNccNazpIlIAVGxMmcSs70SlMIfPksCW2FRibsVnzESQCgQcP
|
||||
e6owfVvsnQNN4UADki4JMZJ1RQ/nUNM3aNJSf/SFVJYjiUXHLNJY65FfiYV4MGRi
|
||||
Yq+NDPs3D0KLkwQ03FFcw56TdPnCAYuihDHkITuQDtS1DawLhuwFDDPcys5Mnigc
|
||||
1Y1o5V4Z6L6xkttwgD9zP3DQJscDZ/Gki1NqYV3TvJPr
|
||||
-----END CERTIFICATE-----
|
||||
0
timescaledb_backup.dump
Normal file
0
timescaledb_backup.dump
Normal file
Loading…
x
Reference in New Issue
Block a user