Skip to content

Commit

Permalink
v0.1601 - access logs for webUI
Browse files Browse the repository at this point in the history
  • Loading branch information
FlyingFathead committed Oct 13, 2024
1 parent 9dd55b2 commit f067ff3
Show file tree
Hide file tree
Showing 4 changed files with 129 additions and 43 deletions.
3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -266,6 +266,9 @@ Use `utils/batch_humdet_yolo8_opencv2.py` to run YOLOv8 batch detection on direc
- Add hooks for sending detections to web servers or APIs

## Changelog
- **v0.1601** Active access logging for webUI connections; improved
- Access via webUI is logged by default to `logs/access.log`
- See `config.ini` for more options
- **v0.160** (Oct-13-2024) WebUI access logging added
- Can be enabled/disabled and defined in `config.ini`
- **v0.159** (Oct-12-2024) Fixes to the detection saving logic
Expand Down
9 changes: 9 additions & 0 deletions utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
import shutil
import sys

# print term width horizontal line
def hz_line(character='-'):
terminal_width = shutil.get_terminal_size().columns
line = character * terminal_width
print(line)
sys.stdout.flush() # Flush the output to the terminal immediately
150 changes: 110 additions & 40 deletions web_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,8 @@
# Configure logging for the web server
logger = logging.getLogger('web_server')
logger.setLevel(logging.INFO)

# Prevent messages from propagating to the root logger
logger.propagate = False # Add this line
logger.propagate = False

app = Flask(__name__)
# Flask proxy fix
Expand Down Expand Up @@ -85,24 +84,69 @@ def load_config(config_file='config.ini'):
# Keep track of connected clients
connected_clients = {}

# Configure logging for the web server
logger = logging.getLogger('web_server')
logger.setLevel(logging.INFO)

# Initialize the lock for connected_clients
connected_clients_lock = threading.Lock()

# Periodically check and log active client connections.
def log_active_connections():
"""Periodically log active client connections."""
while True:
time.sleep(check_interval)
current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
with connected_clients_lock:
if connected_clients:
logger.info(f"Active connections at {current_time}: {connected_clients}")
else:
logger.info(f"No active web UI connections at {current_time}, check interval is {check_interval} seconds.")
"""Periodically log active client connections and remove inactive ones."""
try:
logger.info("Starting active connections logging thread.") # Confirmation log
previous_clients = set()
timeout = 60 # seconds
while True:
time.sleep(check_interval)
current_time = datetime.now()
current_time_str = current_time.strftime('%Y-%m-%d %H:%M:%S')

with connected_clients_lock:
active_clients = set(connected_clients.keys())

# Identify inactive clients
inactive_ips = []
for ip, last_seen in connected_clients.items():
if (current_time - last_seen).total_seconds() > timeout:
inactive_ips.append(ip)

# Remove inactive clients
for ip in inactive_ips:
del connected_clients[ip]
logger.info(f"Removed inactive client: {ip}")

# Update active_clients after removals
active_clients = set(connected_clients.keys())

if active_clients != previous_clients: # Log only when there's a change
if active_clients:
logger.info(f"Active connections at {current_time_str}: {', '.join(active_clients)}")
else:
logger.info(f"No active web UI connections at {current_time_str}, check interval is {check_interval} seconds.")

previous_clients = active_clients.copy() # Update to avoid redundant logging
else:
logger.debug(f"No change in active connections at {current_time_str}.")
except Exception as e:
logger.error(f"Error in log_active_connections thread: {e}")

# # Periodically check and log active client connections.
# def log_active_connections():
# """Periodically log active client connections."""
# logger.info("Starting active connections logging thread.") # Add this log to confirm thread starts
# previous_clients = set()
# while True:
# time.sleep(check_interval)
# current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')

# with connected_clients_lock:
# active_clients = set(connected_clients.keys())

# if active_clients != previous_clients: # Only log when there's a change in active connections
# if active_clients:
# logger.info(f"Active connections at {current_time}: {connected_clients}")
# else:
# logger.info(f"No active web UI connections at {current_time}, check interval is {check_interval} seconds.")

# previous_clients = active_clients.copy() # Update previous clients to avoid redundant logging

# Conditionally start the background thread for logging active connections
if interval_checks:
Expand Down Expand Up @@ -278,32 +322,58 @@ def log_request_info():
client_ip = request.headers.get('X-Forwarded-For', request.remote_addr)
current_time = time.time()

# Log request details into access log
# access_logger.info(f"Client IP: {client_ip} - Request URL: {request.url} - Method: {request.method} - User Agent: {request.user_agent}")

# List of endpoints to ignore logging (e.g., video feed spam)
# excluded_routes = ['/video_feed', '/static/', '/favicon.ico']
excluded_routes = ""

# Optional: Add this if you need more detailed logs in the main log
logging.info("⚠️ User connected to the webUI:")
logging.info(f"Request path: {request.path}")
logging.info(f"Request headers: {request.headers}")

# Track when clients hit an endpoint
if request.path not in excluded_routes:
with connected_clients_lock:
connected_clients[client_ip] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')

# Log requests for non-excluded routes immediately
if request.path not in excluded_routes:
# List of endpoints to completely ignore logging
excluded_routes = ['/api/current_time', '/api/detections', '/api/logs', '/video_feed', '/static/', '/favicon.ico']

# Track IP addresses for active connections, regardless of the route
with connected_clients_lock:
# connected_clients[client_ip] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
connected_clients[client_ip] = datetime.now()

# If the current request path is in the excluded routes, skip logging
if any(request.path.startswith(route) for route in excluded_routes):
return

# Get the last logged time for this client
last_time = last_logged_time.get(client_ip, 0)

# Log requests only if enough time has passed
if current_time - last_time > log_interval:
access_logger.info(f"Client IP: {client_ip} - Request URL: {request.url} - Request Path: {request.path} - Request Headers: {request.headers} - Method: {request.method} - User Agent: {request.user_agent}")
else:
# Check if enough time has passed to log this IP again
last_time = last_logged_time.get(client_ip, 0)
if current_time - last_time > log_interval:
access_logger.info(f"Aggregated log - Client IP: {client_ip} - Request Path: {request.path} - Request Headers: {request.headers} - Request URL: {request.url} - Method: {request.method} - User Agent: {request.user_agent}")
last_logged_time[client_ip] = current_time
last_logged_time[client_ip] = current_time

# @app.before_request
# def log_request_info():
# client_ip = request.headers.get('X-Forwarded-For', request.remote_addr)
# current_time = time.time()

# # Log request details into access log
# # access_logger.info(f"Client IP: {client_ip} - Request URL: {request.url} - Method: {request.method} - User Agent: {request.user_agent}")

# # List of endpoints to ignore logging (e.g., video feed spam)
# # excluded_routes = ['/video_feed', '/static/', '/favicon.ico']
# # excluded_routes = "/api/current_time"
# excluded_routes = ['/api/current_time', '/api/detections']

# # Optional: Add this if you need more detailed logs in the main log
# # logging.info("⚠️ User connected to the webUI:")
# # logging.info(f"Request path: {request.path}")
# # logging.info(f"Request headers: {request.headers}")

# # Track when clients hit an endpoint
# if request.path not in excluded_routes:
# with connected_clients_lock:
# connected_clients[client_ip] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')

# # Log requests for non-excluded routes immediately
# if request.path not in excluded_routes:
# access_logger.info(f"Client IP: {client_ip} - Request URL: {request.url} - Request Path: {request.path} - Request Headers: {request.headers} - Method: {request.method} - User Agent: {request.user_agent}")
# else:
# # Check if enough time has passed to log this IP again
# last_time = last_logged_time.get(client_ip, 0)
# if current_time - last_time > log_interval:
# access_logger.info(f"Aggregated log - Client IP: {client_ip} - Request Path: {request.path} - Request Headers: {request.headers} - Request URL: {request.url} - Method: {request.method} - User Agent: {request.user_agent}")
# last_logged_time[client_ip] = current_time

@app.route('/api/current_time')
def get_current_time():
Expand Down
10 changes: 7 additions & 3 deletions yolov8_live_rtmp_stream_detection.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
#
# yolov8_live_rtmp_stream_detection.py
# (Updated Oct 10, 2024)
# (Updated Oct 13, 2024)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# https://github.com/FlyingFathead/dvr-yolov8-detection
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Version number
version_number = 0.160
version_number = 0.161

import cv2
import torch
import logging
import numpy as np
from utils import hz_line

# Time and timezone related
import time
Expand All @@ -32,6 +32,10 @@
# Import web server functions
from web_server import start_web_server, set_output_frame

hz_line()
print(f"::: dvr-yolov8-detection v{version_number} | https://github.com/FlyingFathead/dvr-yolov8-detection/")
hz_line()

# Shared data structures
detections_list = deque(maxlen=100) # Store up to 100 latest detections on web UI
logs_list = deque(maxlen=100) # Store up to 100 latest logs on web UI
Expand Down

0 comments on commit f067ff3

Please sign in to comment.