Skip to content

Commit

Permalink
v0.1614 - detection aggregation improvements
Browse files Browse the repository at this point in the history
  • Loading branch information
FlyingFathead committed Nov 29, 2024
1 parent c55ea9c commit 4b0b89e
Show file tree
Hide file tree
Showing 6 changed files with 62 additions and 11 deletions.
2 changes: 1 addition & 1 deletion .catgitinclude
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
yolov8_live_rtmp_stream_detection.py
# config.ini
config.ini
# remote_sync.py
web_server.py
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -269,6 +269,8 @@ Use `utils/batch_humdet_yolo8_opencv2.py` to run YOLOv8 batch detection on direc
- Add hooks for sending detections to web servers or APIs

## Changelog
- **v0.1614**
- More reactive log updates to webui
- **v0.1613**
- Even more load balancing fixes; aggregation parsing improvements
- **v0.1612**
Expand Down
9 changes: 9 additions & 0 deletions config.ini
Original file line number Diff line number Diff line change
Expand Up @@ -105,9 +105,18 @@ webserver_max_fps = 10
interval_checks = true
# check interval in seconds for active connections
check_interval = 15

[aggregation]
# setting the option below to `true` saves aggregated detections across sessions
enable_persistent_aggregated_detections = true
aggregated_detections_file = ./logs/aggregated_detections.json
# log rotation (WIP atm)
# Max size in bytes (e.g., 1MB)
# max_aggregated_detections_size = 1048576
# keep_old_aggregations = true
# max_old_aggregations = 5
# maximum number of aggregated entries to fetch for webui (larger lists may clog up the works!)
webui_max_aggregation_entries = 100

[webui]
# Web UI Configuration
Expand Down
2 changes: 1 addition & 1 deletion version.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
# version.py

version_number = "0.1613 (Nov 28 2024)"
version_number = "0.1614 (Nov 29 2024)"
50 changes: 43 additions & 7 deletions web_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import sys
import os
import signal
import shutil

from collections import deque
from datetime import datetime
Expand All @@ -30,7 +31,7 @@
logger.setLevel(logging.INFO)
# Prevent messages from propagating to the root logger
logger.propagate = False

# init flask
app = Flask(__name__)

# Flask proxy fix
Expand Down Expand Up @@ -97,8 +98,8 @@ def load_config(config_file='config.ini'):
interval_checks = config.getboolean('webserver', 'interval_checks', fallback=True)
check_interval = config.getint('webserver', 'check_interval', fallback=10)
# Persistent aggregated detections
ENABLE_PERSISTENT_AGGREGATED_DETECTIONS = config.getboolean('webserver', 'enable_persistent_aggregated_detections', fallback=False)
AGGREGATED_DETECTIONS_FILE = config.get('webserver', 'aggregated_detections_file', fallback='./logs/aggregated_detections.json')
ENABLE_PERSISTENT_AGGREGATED_DETECTIONS = config.getboolean('aggregation', 'enable_persistent_aggregated_detections', fallback=False)
AGGREGATED_DETECTIONS_FILE = config.get('aggregation', 'aggregated_detections_file', fallback='./logs/aggregated_detections.json')

if ENABLE_PERSISTENT_AGGREGATED_DETECTIONS:
logger.info(f"Persistent aggregated detections enabled. Logging to file: {AGGREGATED_DETECTIONS_FILE}")
Expand Down Expand Up @@ -131,15 +132,42 @@ def load_config(config_file='config.ini'):
# Initialize the lock for connected_clients
connected_clients_lock = threading.Lock()

# save aggregated detections if enabled
# rotate aggregated files
def rotate_aggregated_files():
"""Rotates the aggregated detections files when they exceed the max size."""
base_file = AGGREGATED_DETECTIONS_FILE
keep_old = config.getboolean('aggregation', 'keep_old_aggregations', fallback=True)
max_old = config.getint('aggregation', 'max_old_aggregations', fallback=5)

if not keep_old:
os.remove(base_file)
logger.info("Old aggregated detections file removed.")
return

# Rotate files
for i in range(max_old, 0, -1):
old_file = f"{base_file}.{i}"
if os.path.exists(old_file):
if i == max_old:
os.remove(old_file)
else:
new_file = f"{base_file}.{i+1}"
os.rename(old_file, new_file)
# Rename the current file to .1
if os.path.exists(base_file):
os.rename(base_file, f"{base_file}.1")
logger.info("Aggregated detections file rotated.")

# Save aggregated detections if enabled
def save_aggregated_detections():
"""Saves the aggregated detections to a JSON file."""
if ENABLE_PERSISTENT_AGGREGATED_DETECTIONS:
with aggregated_lock:
data = list(aggregated_detections_list)
try:
# Write the data to the aggregated detections file
with open(AGGREGATED_DETECTIONS_FILE, 'w') as f:
json.dump(data, f, default=str) # Use default=str to handle any non-serializable data
json.dump(data, f, default=str)
logger.info("Aggregated detections saved to persistent storage.")
except Exception as e:
logger.error(f"Error saving aggregated detections to file: {e}")
Expand Down Expand Up @@ -217,6 +245,8 @@ def start_web_server(host='0.0.0.0', port=5000, detection_log_path=None,
logs_lock=None, config=None, save_dir_base=None):
"""Starts the Flask web server."""

app.config['config'] = config # Store config in Flask's config if needed

# Initialize SAVE_DIR_BASE within the web server process
SAVE_DIR_BASE = get_base_save_dir(config)
app.config['SAVE_DIR_BASE'] = SAVE_DIR_BASE
Expand Down Expand Up @@ -541,9 +571,15 @@ def video_feed():
# get aggregated detections rather than flood the webui
@app.route('/api/detections')
def get_detections():
# Read max_entries from config.ini with a fallback to 100
max_entries = config.getint('aggregation', 'webui_max_aggregation_entries', fallback=100)
with aggregated_lock:
aggregated_detections = list(aggregated_detections_list)
return jsonify(aggregated_detections)
aggregated_detections = list(aggregated_detections_list)[:max_entries]
response = jsonify(aggregated_detections)
response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
response.headers['Pragma'] = 'no-cache'
response.headers['Expires'] = '0'
return response

# // (old method)
# @app.route('/api/detections')
Expand Down
8 changes: 6 additions & 2 deletions yolov8_live_rtmp_stream_detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,8 +131,12 @@ def load_config(config_path=None):
WEBSERVER_PORT = config.getint('webserver', 'webserver_port', fallback=5000)

# Read AGGREGATED_DETECTIONS_FILE from config for remote sync
ENABLE_PERSISTENT_AGGREGATED_DETECTIONS = config.getboolean('webserver', 'enable_persistent_aggregated_detections', fallback=False)
AGGREGATED_DETECTIONS_FILE = config.get('webserver', 'aggregated_detections_file', fallback='./logs/aggregated_detections.json')
ENABLE_PERSISTENT_AGGREGATED_DETECTIONS = config.getboolean('aggregation', 'enable_persistent_aggregated_detections', fallback=False)
AGGREGATED_DETECTIONS_FILE = config.get('aggregation', 'aggregated_detections_file', fallback='./logs/aggregated_detections.json')


webui_max_aggregation_entries = 100


# Initialize the image save queue and stop event
# image_save_queue = Queue()
Expand Down

0 comments on commit 4b0b89e

Please sign in to comment.