diff --git a/.gitignore b/.gitignore index c1b142f..967e380 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,8 @@ __pycache__/ # Ignore logs and temporary files +access.log +detections.log *.log *.tmp diff --git a/README.md b/README.md index c9f168c..a172809 100644 --- a/README.md +++ b/README.md @@ -266,6 +266,8 @@ Use `utils/batch_humdet_yolo8_opencv2.py` to run YOLOv8 batch detection on direc - Add hooks for sending detections to web servers or APIs ## Changelog +- **v0.160** (Oct-13-2024) WebUI access logging added + - Can be enabled/disabled and defined in `config.ini` - **v0.159** (Oct-12-2024) Fixes to the detection saving logic - **v0.158** (Oct-11-2024) **Even more webUI updates** - Human detections get aggregated in the webUI within a cooldown period diff --git a/config.ini b/config.ini index 0cdb978..6dfcb54 100644 --- a/config.ini +++ b/config.ini @@ -64,6 +64,7 @@ enable_detection_logging_to_file = True log_directory = ./logs log_file = logging.log detection_log_file = detections.log +access_log_file = access.log [webserver] enable_webserver = true @@ -75,6 +76,10 @@ webserver_port = 5000 # Note that especially on higher resolutions, high FPS's may introduce # computational overhead where the stream starts to lag. webserver_max_fps = 10 +# interval checking for webUI connections true/false +interval_checks = true +# check interval in seconds for active connections +check_interval = 15 [webui] # Web UI Configuration diff --git a/web_server.py b/web_server.py index fc372fb..53925c0 100644 --- a/web_server.py +++ b/web_server.py @@ -5,6 +5,7 @@ # Web server module for real-time YOLOv8 detection # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +import os from collections import deque from datetime import datetime import threading @@ -48,6 +49,27 @@ def load_config(config_file='config.ini'): # Load configurations config = load_config() +# Extract logging directory and files from the config +log_directory = config.get('logging', 'log_directory', fallback='./logs') +if not os.path.exists(log_directory): + os.makedirs(log_directory) # Ensure log directory exists + +log_file = os.path.join(log_directory, config.get('logging', 'log_file', fallback='logging.log')) +detection_log_file = os.path.join(log_directory, config.get('logging', 'detection_log_file', fallback='detections.log')) +access_log_file = os.path.join(log_directory, config.get('logging', 'access_log_file', fallback='access.log')) + +# Configure the access logger +access_log_handler = logging.FileHandler(access_log_file) +access_log_formatter = logging.Formatter('%(asctime)s - %(message)s') +access_log_handler.setFormatter(access_log_formatter) +access_logger = logging.getLogger('access_logger') +access_logger.addHandler(access_log_handler) +access_logger.setLevel(logging.INFO) + +# Dictionary to store timestamps of recent requests for each IP +last_logged_time = {} +log_interval = 10 # Interval in seconds to aggregate logs for frequent requests + # Extract configurations with fallbacks ENABLE_WEBSERVER = config.getboolean('webserver', 'enable_webserver', fallback=True) WEBSERVER_HOST = config.get('webserver', 'webserver_host', fallback='0.0.0.0') @@ -55,17 +77,39 @@ def load_config(config_file='config.ini'): WEBSERVER_MAX_FPS = config.getint('webserver', 'webserver_max_fps', fallback=10) WEBUI_COOLDOWN_AGGREGATION = config.getint('webui', 'webui_cooldown_aggregation', fallback=30) WEBUI_BOLD_THRESHOLD = config.getint('webui', 'webui_bold_threshold', fallback=10) +# Read check_interval from config.ini with a fallback to 10 +interval_checks = config.getboolean('webserver', 'interval_checks', fallback=True) +check_interval = config.getint('webserver', 'check_interval', fallback=10) + +# // client tracking +# Keep track of connected clients +connected_clients = {} # Configure logging for the web server logger = logging.getLogger('web_server') logger.setLevel(logging.INFO) -# # Console handler -# console_handler = logging.StreamHandler() -# console_handler.setLevel(logging.INFO) -# formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') -# console_handler.setFormatter(formatter) -# logger.addHandler(console_handler) +# Initialize the lock for connected_clients +connected_clients_lock = threading.Lock() + +# Periodically check and log active client connections. +def log_active_connections(): + """Periodically log active client connections.""" + while True: + time.sleep(check_interval) + current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S') + with connected_clients_lock: + if connected_clients: + logger.info(f"Active connections at {current_time}: {connected_clients}") + else: + logger.info(f"No active web UI connections at {current_time}, check interval is {check_interval} seconds.") + +# Conditionally start the background thread for logging active connections +if interval_checks: + threading.Thread(target=log_active_connections, daemon=True).start() + logger.info("Active connections logging is enabled.") +else: + logger.info("Active connections logging is disabled.") # Log the active configurations on startup logger.info("======================================================") @@ -74,6 +118,7 @@ def load_config(config_file='config.ini'): logger.info(f"Web Server Host: {WEBSERVER_HOST}") logger.info(f"Web Server Port: {WEBSERVER_PORT}") logger.info(f"Web Server Max FPS: {WEBSERVER_MAX_FPS}") +logger.info(f"Check Interval: {check_interval} seconds") logger.info(f"Web UI Cooldown Aggregation: {WEBUI_COOLDOWN_AGGREGATION} seconds") logger.info(f"Web UI Bold Threshold: {WEBUI_BOLD_THRESHOLD}") logger.info("======================================================") @@ -230,10 +275,36 @@ def aggregation_thread_function(cooldown=30, bold_threshold=10): @app.before_request def log_request_info(): - logging.info(f"Request URL: {request.url}") + client_ip = request.headers.get('X-Forwarded-For', request.remote_addr) + current_time = time.time() + + # Log request details into access log + # access_logger.info(f"Client IP: {client_ip} - Request URL: {request.url} - Method: {request.method} - User Agent: {request.user_agent}") + + # List of endpoints to ignore logging (e.g., video feed spam) + # excluded_routes = ['/video_feed', '/static/', '/favicon.ico'] + excluded_routes = "" + + # Optional: Add this if you need more detailed logs in the main log + logging.info("⚠️ User connected to the webUI:") logging.info(f"Request path: {request.path}") logging.info(f"Request headers: {request.headers}") + # Track when clients hit an endpoint + if request.path not in excluded_routes: + with connected_clients_lock: + connected_clients[client_ip] = datetime.now().strftime('%Y-%m-%d %H:%M:%S') + + # Log requests for non-excluded routes immediately + if request.path not in excluded_routes: + access_logger.info(f"Client IP: {client_ip} - Request URL: {request.url} - Request Path: {request.path} - Request Headers: {request.headers} - Method: {request.method} - User Agent: {request.user_agent}") + else: + # Check if enough time has passed to log this IP again + last_time = last_logged_time.get(client_ip, 0) + if current_time - last_time > log_interval: + access_logger.info(f"Aggregated log - Client IP: {client_ip} - Request Path: {request.path} - Request Headers: {request.headers} - Request URL: {request.url} - Method: {request.method} - User Agent: {request.user_agent}") + last_logged_time[client_ip] = current_time + @app.route('/api/current_time') def get_current_time(): """API endpoint to return the current host time.""" diff --git a/yolov8_live_rtmp_stream_detection.py b/yolov8_live_rtmp_stream_detection.py index 68b6417..0fcf6c2 100755 --- a/yolov8_live_rtmp_stream_detection.py +++ b/yolov8_live_rtmp_stream_detection.py @@ -5,7 +5,7 @@ # https://github.com/FlyingFathead/dvr-yolov8-detection # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Version number -version_number = 0.159 +version_number = 0.160 import cv2 import torch