first commit
This commit is contained in:
1
layers/presentation/__init__.py
Normal file
1
layers/presentation/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Empty file to make this a Python package
|
||||
BIN
layers/presentation/__pycache__/__init__.cpython-39.pyc
Normal file
BIN
layers/presentation/__pycache__/__init__.cpython-39.pyc
Normal file
Binary file not shown.
BIN
layers/presentation/__pycache__/api_routes.cpython-39.pyc
Normal file
BIN
layers/presentation/__pycache__/api_routes.cpython-39.pyc
Normal file
Binary file not shown.
BIN
layers/presentation/__pycache__/redis_subscriber.cpython-39.pyc
Normal file
BIN
layers/presentation/__pycache__/redis_subscriber.cpython-39.pyc
Normal file
Binary file not shown.
BIN
layers/presentation/__pycache__/websocket_handler.cpython-39.pyc
Normal file
BIN
layers/presentation/__pycache__/websocket_handler.cpython-39.pyc
Normal file
Binary file not shown.
404
layers/presentation/api_routes.py
Normal file
404
layers/presentation/api_routes.py
Normal file
@@ -0,0 +1,404 @@
|
||||
"""
|
||||
API routes for the energy monitoring system
|
||||
Presentation Layer - handles HTTP endpoints and request/response formatting
|
||||
"""
|
||||
from fastapi import APIRouter, HTTPException, Query, Depends
|
||||
from typing import List, Optional, Dict, Any
|
||||
from datetime import datetime, timedelta
|
||||
import time
|
||||
import logging
|
||||
|
||||
from models import (
|
||||
DataQuery, DataResponse, SensorType, SensorStatus, HealthCheck
|
||||
)
|
||||
from ..business.sensor_service import SensorService
|
||||
from ..business.room_service import RoomService
|
||||
from ..business.analytics_service import AnalyticsService
|
||||
from ..infrastructure.database_connection import database_connection
|
||||
from ..infrastructure.redis_connection import redis_connection
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
# Initialize services
|
||||
sensor_service = SensorService()
|
||||
room_service = RoomService()
|
||||
analytics_service = AnalyticsService()
|
||||
|
||||
# Dependency to check database connection
|
||||
async def check_database():
|
||||
"""Dependency to ensure database is connected"""
|
||||
try:
|
||||
db = await database_connection.get_database()
|
||||
return db
|
||||
except Exception as e:
|
||||
logger.error(f"Database connection failed: {e}")
|
||||
raise HTTPException(status_code=503, detail="Database unavailable")
|
||||
|
||||
@router.get("/sensors", summary="Get all sensors")
|
||||
async def get_sensors(
|
||||
room: Optional[str] = Query(None, description="Filter by room"),
|
||||
sensor_type: Optional[SensorType] = Query(None, description="Filter by sensor type"),
|
||||
status: Optional[SensorStatus] = Query(None, description="Filter by status"),
|
||||
db=Depends(check_database)
|
||||
):
|
||||
"""Get list of all registered sensors with optional filtering"""
|
||||
try:
|
||||
# Build filters
|
||||
filters = {}
|
||||
if room:
|
||||
filters["room"] = room
|
||||
if sensor_type:
|
||||
filters["sensor_type"] = sensor_type.value
|
||||
if status:
|
||||
filters["status"] = status.value
|
||||
|
||||
result = await sensor_service.get_all_sensors(filters)
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting sensors: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
@router.get("/sensors/{sensor_id}", summary="Get sensor details")
|
||||
async def get_sensor(sensor_id: str, db=Depends(check_database)):
|
||||
"""Get detailed information about a specific sensor"""
|
||||
try:
|
||||
result = await sensor_service.get_sensor_details(sensor_id)
|
||||
|
||||
if not result:
|
||||
raise HTTPException(status_code=404, detail="Sensor not found")
|
||||
|
||||
return result
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting sensor {sensor_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
@router.get("/sensors/{sensor_id}/data", summary="Get sensor historical data")
|
||||
async def get_sensor_data(
|
||||
sensor_id: str,
|
||||
start_time: Optional[int] = Query(None, description="Start timestamp (Unix)"),
|
||||
end_time: Optional[int] = Query(None, description="End timestamp (Unix)"),
|
||||
limit: int = Query(100, description="Maximum records to return"),
|
||||
offset: int = Query(0, description="Records to skip"),
|
||||
db=Depends(check_database)
|
||||
):
|
||||
"""Get historical data for a specific sensor"""
|
||||
try:
|
||||
start_query_time = time.time()
|
||||
|
||||
# Build query
|
||||
query = {"sensor_id": sensor_id}
|
||||
|
||||
if start_time or end_time:
|
||||
time_query = {}
|
||||
if start_time:
|
||||
time_query["$gte"] = datetime.fromtimestamp(start_time)
|
||||
if end_time:
|
||||
time_query["$lte"] = datetime.fromtimestamp(end_time)
|
||||
query["created_at"] = time_query
|
||||
|
||||
# Get total count and readings through service layer
|
||||
from ..infrastructure.repositories import SensorReadingRepository
|
||||
repo = SensorReadingRepository()
|
||||
|
||||
total_count = await repo.count_by_query(query)
|
||||
readings = await repo.get_by_query(
|
||||
query=query,
|
||||
sort_by="timestamp",
|
||||
sort_order="desc",
|
||||
limit=limit,
|
||||
offset=offset
|
||||
)
|
||||
|
||||
execution_time = (time.time() - start_query_time) * 1000
|
||||
|
||||
return DataResponse(
|
||||
data=readings,
|
||||
total_count=total_count,
|
||||
query=DataQuery(
|
||||
sensor_ids=[sensor_id],
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
limit=limit,
|
||||
offset=offset
|
||||
),
|
||||
execution_time_ms=execution_time
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting sensor data for {sensor_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
@router.get("/rooms", summary="Get all rooms")
|
||||
async def get_rooms(db=Depends(check_database)):
|
||||
"""Get list of all rooms with sensor counts and latest metrics"""
|
||||
try:
|
||||
result = await room_service.get_all_rooms()
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting rooms: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
@router.get("/rooms/{room_name}/data", summary="Get room historical data")
|
||||
async def get_room_data(
|
||||
room_name: str,
|
||||
start_time: Optional[int] = Query(None, description="Start timestamp (Unix)"),
|
||||
end_time: Optional[int] = Query(None, description="End timestamp (Unix)"),
|
||||
limit: int = Query(100, description="Maximum records to return"),
|
||||
db=Depends(check_database)
|
||||
):
|
||||
"""Get historical data for a specific room"""
|
||||
try:
|
||||
start_query_time = time.time()
|
||||
|
||||
result = await room_service.get_room_data(
|
||||
room_name=room_name,
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
execution_time = (time.time() - start_query_time) * 1000
|
||||
result["execution_time_ms"] = execution_time
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting room data for {room_name}: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
@router.post("/data/query", summary="Advanced data query", response_model=DataResponse)
|
||||
async def query_data(query_params: DataQuery, db=Depends(check_database)):
|
||||
"""Advanced data querying with multiple filters and aggregations"""
|
||||
try:
|
||||
start_query_time = time.time()
|
||||
|
||||
# Build MongoDB query
|
||||
mongo_query = {}
|
||||
|
||||
# Sensor filters
|
||||
if query_params.sensor_ids:
|
||||
mongo_query["sensor_id"] = {"$in": query_params.sensor_ids}
|
||||
|
||||
if query_params.rooms:
|
||||
mongo_query["room"] = {"$in": query_params.rooms}
|
||||
|
||||
if query_params.sensor_types:
|
||||
mongo_query["sensor_type"] = {"$in": [st.value for st in query_params.sensor_types]}
|
||||
|
||||
# Time range
|
||||
if query_params.start_time or query_params.end_time:
|
||||
time_query = {}
|
||||
if query_params.start_time:
|
||||
time_query["$gte"] = datetime.fromtimestamp(query_params.start_time)
|
||||
if query_params.end_time:
|
||||
time_query["$lte"] = datetime.fromtimestamp(query_params.end_time)
|
||||
mongo_query["created_at"] = time_query
|
||||
|
||||
# Execute query through repository
|
||||
from ..infrastructure.repositories import SensorReadingRepository
|
||||
repo = SensorReadingRepository()
|
||||
|
||||
total_count = await repo.count_by_query(mongo_query)
|
||||
readings = await repo.get_by_query(
|
||||
query=mongo_query,
|
||||
sort_by=query_params.sort_by,
|
||||
sort_order=query_params.sort_order,
|
||||
limit=query_params.limit,
|
||||
offset=query_params.offset
|
||||
)
|
||||
|
||||
execution_time = (time.time() - start_query_time) * 1000
|
||||
|
||||
return DataResponse(
|
||||
data=readings,
|
||||
total_count=total_count,
|
||||
query=query_params,
|
||||
execution_time_ms=execution_time
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error executing data query: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
@router.get("/analytics/summary", summary="Get analytics summary")
|
||||
async def get_analytics_summary(
|
||||
hours: int = Query(24, description="Hours of data to analyze"),
|
||||
db=Depends(check_database)
|
||||
):
|
||||
"""Get analytics summary for the specified time period"""
|
||||
try:
|
||||
result = await analytics_service.get_analytics_summary(hours)
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting analytics summary: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
@router.get("/analytics/trends", summary="Get energy trends")
|
||||
async def get_energy_trends(
|
||||
hours: int = Query(168, description="Hours of data to analyze (default: 1 week)"),
|
||||
db=Depends(check_database)
|
||||
):
|
||||
"""Get energy consumption trends"""
|
||||
try:
|
||||
result = await analytics_service.get_energy_trends(hours)
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting energy trends: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
@router.get("/analytics/rooms", summary="Get room comparison analytics")
|
||||
async def get_room_comparison(
|
||||
hours: int = Query(24, description="Hours of data to analyze"),
|
||||
db=Depends(check_database)
|
||||
):
|
||||
"""Get room-by-room comparison analytics"""
|
||||
try:
|
||||
result = await analytics_service.get_room_comparison(hours)
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting room comparison: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
@router.get("/events", summary="Get system events")
|
||||
async def get_events(
|
||||
severity: Optional[str] = Query(None, description="Filter by severity"),
|
||||
event_type: Optional[str] = Query(None, description="Filter by event type"),
|
||||
hours: int = Query(24, description="Hours of events to retrieve"),
|
||||
limit: int = Query(50, description="Maximum events to return"),
|
||||
db=Depends(check_database)
|
||||
):
|
||||
"""Get recent system events and alerts"""
|
||||
try:
|
||||
# Build filters
|
||||
filters = {}
|
||||
if severity:
|
||||
filters["severity"] = severity
|
||||
if event_type:
|
||||
filters["event_type"] = event_type
|
||||
|
||||
from ..infrastructure.repositories import SystemEventRepository
|
||||
repo = SystemEventRepository()
|
||||
|
||||
events = await repo.get_recent(
|
||||
hours=hours,
|
||||
limit=limit,
|
||||
filters=filters
|
||||
)
|
||||
|
||||
return {
|
||||
"events": events,
|
||||
"count": len(events),
|
||||
"period_hours": hours
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting events: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
@router.put("/sensors/{sensor_id}/metadata", summary="Update sensor metadata")
|
||||
async def update_sensor_metadata(
|
||||
sensor_id: str,
|
||||
metadata: dict,
|
||||
db=Depends(check_database)
|
||||
):
|
||||
"""Update sensor metadata"""
|
||||
try:
|
||||
success = await sensor_service.update_sensor_metadata(sensor_id, metadata)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Sensor not found")
|
||||
|
||||
return {"message": "Sensor metadata updated successfully"}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating sensor metadata: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
@router.delete("/sensors/{sensor_id}", summary="Delete sensor and all its data")
|
||||
async def delete_sensor(sensor_id: str, db=Depends(check_database)):
|
||||
"""Delete a sensor and all its associated data"""
|
||||
try:
|
||||
result = await sensor_service.delete_sensor(sensor_id)
|
||||
|
||||
if result["readings_deleted"] == 0 and not result.get("metadata_deleted"):
|
||||
raise HTTPException(status_code=404, detail="Sensor not found")
|
||||
|
||||
return {
|
||||
"message": "Sensor deleted successfully",
|
||||
**result
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting sensor {sensor_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
@router.get("/export", summary="Export data")
|
||||
async def export_data(
|
||||
start_time: int = Query(..., description="Start timestamp (Unix)"),
|
||||
end_time: int = Query(..., description="End timestamp (Unix)"),
|
||||
sensor_ids: Optional[str] = Query(None, description="Comma-separated sensor IDs"),
|
||||
format: str = Query("json", description="Export format (json, csv)"),
|
||||
db=Depends(check_database)
|
||||
):
|
||||
"""Export sensor data for the specified time range"""
|
||||
try:
|
||||
# Build query
|
||||
query = {
|
||||
"created_at": {
|
||||
"$gte": datetime.fromtimestamp(start_time),
|
||||
"$lte": datetime.fromtimestamp(end_time)
|
||||
}
|
||||
}
|
||||
|
||||
if sensor_ids:
|
||||
sensor_list = [sid.strip() for sid in sensor_ids.split(",")]
|
||||
query["sensor_id"] = {"$in": sensor_list}
|
||||
|
||||
# Get data through repository
|
||||
from ..infrastructure.repositories import SensorReadingRepository
|
||||
repo = SensorReadingRepository()
|
||||
|
||||
readings = await repo.get_by_query(
|
||||
query=query,
|
||||
sort_by="timestamp",
|
||||
sort_order="asc",
|
||||
limit=10000 # Large limit for export
|
||||
)
|
||||
|
||||
# Convert datetime fields for JSON serialization
|
||||
for reading in readings:
|
||||
if "created_at" in reading and hasattr(reading["created_at"], "isoformat"):
|
||||
reading["created_at"] = reading["created_at"].isoformat()
|
||||
|
||||
if format.lower() == "csv":
|
||||
raise HTTPException(status_code=501, detail="CSV export not yet implemented")
|
||||
|
||||
return {
|
||||
"data": readings,
|
||||
"count": len(readings),
|
||||
"export_params": {
|
||||
"start_time": start_time,
|
||||
"end_time": end_time,
|
||||
"sensor_ids": sensor_ids.split(",") if sensor_ids else None,
|
||||
"format": format
|
||||
}
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error exporting data: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
128
layers/presentation/redis_subscriber.py
Normal file
128
layers/presentation/redis_subscriber.py
Normal file
@@ -0,0 +1,128 @@
|
||||
"""
|
||||
Redis subscriber for real-time data processing
|
||||
Presentation Layer - handles Redis pub/sub and WebSocket broadcasting
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from ..infrastructure.redis_connection import redis_connection
|
||||
from ..business.sensor_service import SensorService
|
||||
from ..business.room_service import RoomService
|
||||
from .websocket_handler import websocket_manager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class RedisSubscriber:
|
||||
"""Manages Redis subscription and data broadcasting"""
|
||||
|
||||
def __init__(self):
|
||||
self.sensor_service = SensorService()
|
||||
self.room_service = RoomService()
|
||||
self.is_running = False
|
||||
self.subscription_task = None
|
||||
|
||||
async def start_subscription(self, channel: str = "energy_data") -> None:
|
||||
"""Start Redis subscription in background task"""
|
||||
if self.is_running:
|
||||
logger.warning("Redis subscriber is already running")
|
||||
return
|
||||
|
||||
self.is_running = True
|
||||
self.subscription_task = asyncio.create_task(self._subscribe_loop(channel))
|
||||
logger.info(f"Started Redis subscriber for channel: {channel}")
|
||||
|
||||
async def stop_subscription(self) -> None:
|
||||
"""Stop Redis subscription"""
|
||||
self.is_running = False
|
||||
if self.subscription_task:
|
||||
self.subscription_task.cancel()
|
||||
try:
|
||||
await self.subscription_task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
logger.info("Redis subscriber stopped")
|
||||
|
||||
async def _subscribe_loop(self, channel: str) -> None:
|
||||
"""Main subscription loop"""
|
||||
logger.info("Starting Redis subscriber...")
|
||||
|
||||
try:
|
||||
# Get Redis client and create pubsub
|
||||
redis_client = await redis_connection.get_client()
|
||||
pubsub = await redis_connection.create_pubsub()
|
||||
|
||||
# Subscribe to channel
|
||||
await pubsub.subscribe(channel)
|
||||
logger.info(f"Subscribed to Redis channel: '{channel}'")
|
||||
|
||||
while self.is_running:
|
||||
try:
|
||||
# Get message with timeout
|
||||
message = await pubsub.get_message(ignore_subscribe_messages=True, timeout=1.0)
|
||||
|
||||
if message and message.get('data'):
|
||||
await self._process_message(message['data'])
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in Redis subscriber loop: {e}")
|
||||
# Add delay to prevent rapid-fire errors
|
||||
await asyncio.sleep(5)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Could not connect to Redis for subscription: {e}")
|
||||
finally:
|
||||
# Clean up pubsub connection
|
||||
try:
|
||||
await pubsub.unsubscribe(channel)
|
||||
await pubsub.close()
|
||||
except Exception as e:
|
||||
logger.error(f"Error closing pubsub connection: {e}")
|
||||
|
||||
async def _process_message(self, message_data: str) -> None:
|
||||
"""Process incoming Redis message"""
|
||||
try:
|
||||
logger.debug(f"Received from Redis: {message_data}")
|
||||
|
||||
# Process sensor data through business layer
|
||||
processing_success = await self.sensor_service.process_sensor_message(message_data)
|
||||
|
||||
if processing_success:
|
||||
# Extract room from message for room metrics update
|
||||
import json
|
||||
try:
|
||||
data = json.loads(message_data)
|
||||
room = data.get('room')
|
||||
if room:
|
||||
# Update room metrics asynchronously
|
||||
asyncio.create_task(self.room_service.update_room_metrics(room))
|
||||
except json.JSONDecodeError:
|
||||
logger.warning("Could not parse message for room extraction")
|
||||
|
||||
# Broadcast to WebSocket clients
|
||||
await websocket_manager.broadcast(message_data)
|
||||
else:
|
||||
logger.warning("Sensor data processing failed, skipping broadcast")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing Redis message: {e}")
|
||||
|
||||
def is_subscriber_running(self) -> bool:
|
||||
"""Check if subscriber is currently running"""
|
||||
return self.is_running and (
|
||||
self.subscription_task is not None and
|
||||
not self.subscription_task.done()
|
||||
)
|
||||
|
||||
async def get_subscriber_status(self) -> dict:
|
||||
"""Get subscriber status information"""
|
||||
return {
|
||||
"is_running": self.is_running,
|
||||
"task_status": (
|
||||
"running" if self.subscription_task and not self.subscription_task.done()
|
||||
else "stopped"
|
||||
),
|
||||
"active_websocket_connections": websocket_manager.get_connection_count()
|
||||
}
|
||||
|
||||
# Global Redis subscriber instance
|
||||
redis_subscriber = RedisSubscriber()
|
||||
97
layers/presentation/websocket_handler.py
Normal file
97
layers/presentation/websocket_handler.py
Normal file
@@ -0,0 +1,97 @@
|
||||
"""
|
||||
WebSocket connection handler
|
||||
Presentation Layer - manages WebSocket connections and real-time communication
|
||||
"""
|
||||
import asyncio
|
||||
from typing import List
|
||||
from fastapi import WebSocket, WebSocketDisconnect
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class WebSocketManager:
|
||||
"""Manages WebSocket connections and broadcasting"""
|
||||
|
||||
def __init__(self):
|
||||
self.active_connections: List[WebSocket] = []
|
||||
|
||||
async def connect(self, websocket: WebSocket) -> None:
|
||||
"""Accept and store new WebSocket connection"""
|
||||
await websocket.accept()
|
||||
self.active_connections.append(websocket)
|
||||
logger.info(f"New client connected. Total clients: {len(self.active_connections)}")
|
||||
|
||||
def disconnect(self, websocket: WebSocket) -> None:
|
||||
"""Remove WebSocket connection"""
|
||||
if websocket in self.active_connections:
|
||||
self.active_connections.remove(websocket)
|
||||
logger.info(f"Client disconnected. Total clients: {len(self.active_connections)}")
|
||||
|
||||
async def send_personal_message(self, message: str, websocket: WebSocket) -> None:
|
||||
"""Send message to specific WebSocket connection"""
|
||||
try:
|
||||
await websocket.send_text(message)
|
||||
except Exception as e:
|
||||
logger.error(f"Error sending personal message: {e}")
|
||||
self.disconnect(websocket)
|
||||
|
||||
async def broadcast(self, message: str) -> None:
|
||||
"""Broadcast message to all connected clients"""
|
||||
if not self.active_connections:
|
||||
return
|
||||
|
||||
try:
|
||||
# Send to all connections concurrently
|
||||
tasks = [
|
||||
self._safe_send_message(connection, message)
|
||||
for connection in self.active_connections.copy()
|
||||
]
|
||||
|
||||
# Execute all sends concurrently and handle exceptions
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
# Remove failed connections
|
||||
failed_connections = []
|
||||
for i, result in enumerate(results):
|
||||
if isinstance(result, Exception):
|
||||
failed_connections.append(self.active_connections[i])
|
||||
|
||||
for connection in failed_connections:
|
||||
self.disconnect(connection)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in broadcast: {e}")
|
||||
|
||||
async def _safe_send_message(self, websocket: WebSocket, message: str) -> None:
|
||||
"""Safely send message to WebSocket with error handling"""
|
||||
try:
|
||||
await websocket.send_text(message)
|
||||
except WebSocketDisconnect:
|
||||
# Connection was closed
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error sending message to client: {e}")
|
||||
raise
|
||||
|
||||
def get_connection_count(self) -> int:
|
||||
"""Get number of active connections"""
|
||||
return len(self.active_connections)
|
||||
|
||||
async def ping_all_connections(self) -> int:
|
||||
"""Ping all connections to check health, return number of healthy connections"""
|
||||
if not self.active_connections:
|
||||
return 0
|
||||
|
||||
healthy_connections = []
|
||||
for connection in self.active_connections.copy():
|
||||
try:
|
||||
await connection.ping()
|
||||
healthy_connections.append(connection)
|
||||
except Exception:
|
||||
logger.debug("Removing unhealthy connection")
|
||||
|
||||
self.active_connections = healthy_connections
|
||||
return len(healthy_connections)
|
||||
|
||||
# Global WebSocket manager instance
|
||||
websocket_manager = WebSocketManager()
|
||||
Reference in New Issue
Block a user