404 lines
14 KiB
Python
404 lines
14 KiB
Python
"""
|
|
API routes for the energy monitoring system
|
|
Presentation Layer - handles HTTP endpoints and request/response formatting
|
|
"""
|
|
from fastapi import APIRouter, HTTPException, Query, Depends
|
|
from typing import List, Optional, Dict, Any
|
|
from datetime import datetime, timedelta
|
|
import time
|
|
import logging
|
|
|
|
from models import (
|
|
DataQuery, DataResponse, SensorType, SensorStatus, HealthCheck
|
|
)
|
|
from ..business.sensor_service import SensorService
|
|
from ..business.room_service import RoomService
|
|
from ..business.analytics_service import AnalyticsService
|
|
from ..infrastructure.database_connection import database_connection
|
|
from ..infrastructure.redis_connection import redis_connection
|
|
|
|
logger = logging.getLogger(__name__)
|
|
router = APIRouter()
|
|
|
|
# Initialize services
|
|
sensor_service = SensorService()
|
|
room_service = RoomService()
|
|
analytics_service = AnalyticsService()
|
|
|
|
# Dependency to check database connection
|
|
async def check_database():
|
|
"""Dependency to ensure database is connected"""
|
|
try:
|
|
db = await database_connection.get_database()
|
|
return db
|
|
except Exception as e:
|
|
logger.error(f"Database connection failed: {e}")
|
|
raise HTTPException(status_code=503, detail="Database unavailable")
|
|
|
|
@router.get("/sensors", summary="Get all sensors")
|
|
async def get_sensors(
|
|
room: Optional[str] = Query(None, description="Filter by room"),
|
|
sensor_type: Optional[SensorType] = Query(None, description="Filter by sensor type"),
|
|
status: Optional[SensorStatus] = Query(None, description="Filter by status"),
|
|
db=Depends(check_database)
|
|
):
|
|
"""Get list of all registered sensors with optional filtering"""
|
|
try:
|
|
# Build filters
|
|
filters = {}
|
|
if room:
|
|
filters["room"] = room
|
|
if sensor_type:
|
|
filters["sensor_type"] = sensor_type.value
|
|
if status:
|
|
filters["status"] = status.value
|
|
|
|
result = await sensor_service.get_all_sensors(filters)
|
|
return result
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error getting sensors: {e}")
|
|
raise HTTPException(status_code=500, detail="Internal server error")
|
|
|
|
@router.get("/sensors/{sensor_id}", summary="Get sensor details")
|
|
async def get_sensor(sensor_id: str, db=Depends(check_database)):
|
|
"""Get detailed information about a specific sensor"""
|
|
try:
|
|
result = await sensor_service.get_sensor_details(sensor_id)
|
|
|
|
if not result:
|
|
raise HTTPException(status_code=404, detail="Sensor not found")
|
|
|
|
return result
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"Error getting sensor {sensor_id}: {e}")
|
|
raise HTTPException(status_code=500, detail="Internal server error")
|
|
|
|
@router.get("/sensors/{sensor_id}/data", summary="Get sensor historical data")
|
|
async def get_sensor_data(
|
|
sensor_id: str,
|
|
start_time: Optional[int] = Query(None, description="Start timestamp (Unix)"),
|
|
end_time: Optional[int] = Query(None, description="End timestamp (Unix)"),
|
|
limit: int = Query(100, description="Maximum records to return"),
|
|
offset: int = Query(0, description="Records to skip"),
|
|
db=Depends(check_database)
|
|
):
|
|
"""Get historical data for a specific sensor"""
|
|
try:
|
|
start_query_time = time.time()
|
|
|
|
# Build query
|
|
query = {"sensor_id": sensor_id}
|
|
|
|
if start_time or end_time:
|
|
time_query = {}
|
|
if start_time:
|
|
time_query["$gte"] = datetime.fromtimestamp(start_time)
|
|
if end_time:
|
|
time_query["$lte"] = datetime.fromtimestamp(end_time)
|
|
query["created_at"] = time_query
|
|
|
|
# Get total count and readings through service layer
|
|
from ..infrastructure.repositories import SensorReadingRepository
|
|
repo = SensorReadingRepository()
|
|
|
|
total_count = await repo.count_by_query(query)
|
|
readings = await repo.get_by_query(
|
|
query=query,
|
|
sort_by="timestamp",
|
|
sort_order="desc",
|
|
limit=limit,
|
|
offset=offset
|
|
)
|
|
|
|
execution_time = (time.time() - start_query_time) * 1000
|
|
|
|
return DataResponse(
|
|
data=readings,
|
|
total_count=total_count,
|
|
query=DataQuery(
|
|
sensor_ids=[sensor_id],
|
|
start_time=start_time,
|
|
end_time=end_time,
|
|
limit=limit,
|
|
offset=offset
|
|
),
|
|
execution_time_ms=execution_time
|
|
)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error getting sensor data for {sensor_id}: {e}")
|
|
raise HTTPException(status_code=500, detail="Internal server error")
|
|
|
|
@router.get("/rooms", summary="Get all rooms")
|
|
async def get_rooms(db=Depends(check_database)):
|
|
"""Get list of all rooms with sensor counts and latest metrics"""
|
|
try:
|
|
result = await room_service.get_all_rooms()
|
|
return result
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error getting rooms: {e}")
|
|
raise HTTPException(status_code=500, detail="Internal server error")
|
|
|
|
@router.get("/rooms/{room_name}/data", summary="Get room historical data")
|
|
async def get_room_data(
|
|
room_name: str,
|
|
start_time: Optional[int] = Query(None, description="Start timestamp (Unix)"),
|
|
end_time: Optional[int] = Query(None, description="End timestamp (Unix)"),
|
|
limit: int = Query(100, description="Maximum records to return"),
|
|
db=Depends(check_database)
|
|
):
|
|
"""Get historical data for a specific room"""
|
|
try:
|
|
start_query_time = time.time()
|
|
|
|
result = await room_service.get_room_data(
|
|
room_name=room_name,
|
|
start_time=start_time,
|
|
end_time=end_time,
|
|
limit=limit
|
|
)
|
|
|
|
execution_time = (time.time() - start_query_time) * 1000
|
|
result["execution_time_ms"] = execution_time
|
|
|
|
return result
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error getting room data for {room_name}: {e}")
|
|
raise HTTPException(status_code=500, detail="Internal server error")
|
|
|
|
@router.post("/data/query", summary="Advanced data query", response_model=DataResponse)
|
|
async def query_data(query_params: DataQuery, db=Depends(check_database)):
|
|
"""Advanced data querying with multiple filters and aggregations"""
|
|
try:
|
|
start_query_time = time.time()
|
|
|
|
# Build MongoDB query
|
|
mongo_query = {}
|
|
|
|
# Sensor filters
|
|
if query_params.sensor_ids:
|
|
mongo_query["sensor_id"] = {"$in": query_params.sensor_ids}
|
|
|
|
if query_params.rooms:
|
|
mongo_query["room"] = {"$in": query_params.rooms}
|
|
|
|
if query_params.sensor_types:
|
|
mongo_query["sensor_type"] = {"$in": [st.value for st in query_params.sensor_types]}
|
|
|
|
# Time range
|
|
if query_params.start_time or query_params.end_time:
|
|
time_query = {}
|
|
if query_params.start_time:
|
|
time_query["$gte"] = datetime.fromtimestamp(query_params.start_time)
|
|
if query_params.end_time:
|
|
time_query["$lte"] = datetime.fromtimestamp(query_params.end_time)
|
|
mongo_query["created_at"] = time_query
|
|
|
|
# Execute query through repository
|
|
from ..infrastructure.repositories import SensorReadingRepository
|
|
repo = SensorReadingRepository()
|
|
|
|
total_count = await repo.count_by_query(mongo_query)
|
|
readings = await repo.get_by_query(
|
|
query=mongo_query,
|
|
sort_by=query_params.sort_by,
|
|
sort_order=query_params.sort_order,
|
|
limit=query_params.limit,
|
|
offset=query_params.offset
|
|
)
|
|
|
|
execution_time = (time.time() - start_query_time) * 1000
|
|
|
|
return DataResponse(
|
|
data=readings,
|
|
total_count=total_count,
|
|
query=query_params,
|
|
execution_time_ms=execution_time
|
|
)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error executing data query: {e}")
|
|
raise HTTPException(status_code=500, detail="Internal server error")
|
|
|
|
@router.get("/analytics/summary", summary="Get analytics summary")
|
|
async def get_analytics_summary(
|
|
hours: int = Query(24, description="Hours of data to analyze"),
|
|
db=Depends(check_database)
|
|
):
|
|
"""Get analytics summary for the specified time period"""
|
|
try:
|
|
result = await analytics_service.get_analytics_summary(hours)
|
|
return result
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error getting analytics summary: {e}")
|
|
raise HTTPException(status_code=500, detail="Internal server error")
|
|
|
|
@router.get("/analytics/trends", summary="Get energy trends")
|
|
async def get_energy_trends(
|
|
hours: int = Query(168, description="Hours of data to analyze (default: 1 week)"),
|
|
db=Depends(check_database)
|
|
):
|
|
"""Get energy consumption trends"""
|
|
try:
|
|
result = await analytics_service.get_energy_trends(hours)
|
|
return result
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error getting energy trends: {e}")
|
|
raise HTTPException(status_code=500, detail="Internal server error")
|
|
|
|
@router.get("/analytics/rooms", summary="Get room comparison analytics")
|
|
async def get_room_comparison(
|
|
hours: int = Query(24, description="Hours of data to analyze"),
|
|
db=Depends(check_database)
|
|
):
|
|
"""Get room-by-room comparison analytics"""
|
|
try:
|
|
result = await analytics_service.get_room_comparison(hours)
|
|
return result
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error getting room comparison: {e}")
|
|
raise HTTPException(status_code=500, detail="Internal server error")
|
|
|
|
@router.get("/events", summary="Get system events")
|
|
async def get_events(
|
|
severity: Optional[str] = Query(None, description="Filter by severity"),
|
|
event_type: Optional[str] = Query(None, description="Filter by event type"),
|
|
hours: int = Query(24, description="Hours of events to retrieve"),
|
|
limit: int = Query(50, description="Maximum events to return"),
|
|
db=Depends(check_database)
|
|
):
|
|
"""Get recent system events and alerts"""
|
|
try:
|
|
# Build filters
|
|
filters = {}
|
|
if severity:
|
|
filters["severity"] = severity
|
|
if event_type:
|
|
filters["event_type"] = event_type
|
|
|
|
from ..infrastructure.repositories import SystemEventRepository
|
|
repo = SystemEventRepository()
|
|
|
|
events = await repo.get_recent(
|
|
hours=hours,
|
|
limit=limit,
|
|
filters=filters
|
|
)
|
|
|
|
return {
|
|
"events": events,
|
|
"count": len(events),
|
|
"period_hours": hours
|
|
}
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error getting events: {e}")
|
|
raise HTTPException(status_code=500, detail="Internal server error")
|
|
|
|
@router.put("/sensors/{sensor_id}/metadata", summary="Update sensor metadata")
|
|
async def update_sensor_metadata(
|
|
sensor_id: str,
|
|
metadata: dict,
|
|
db=Depends(check_database)
|
|
):
|
|
"""Update sensor metadata"""
|
|
try:
|
|
success = await sensor_service.update_sensor_metadata(sensor_id, metadata)
|
|
|
|
if not success:
|
|
raise HTTPException(status_code=404, detail="Sensor not found")
|
|
|
|
return {"message": "Sensor metadata updated successfully"}
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"Error updating sensor metadata: {e}")
|
|
raise HTTPException(status_code=500, detail="Internal server error")
|
|
|
|
@router.delete("/sensors/{sensor_id}", summary="Delete sensor and all its data")
|
|
async def delete_sensor(sensor_id: str, db=Depends(check_database)):
|
|
"""Delete a sensor and all its associated data"""
|
|
try:
|
|
result = await sensor_service.delete_sensor(sensor_id)
|
|
|
|
if result["readings_deleted"] == 0 and not result.get("metadata_deleted"):
|
|
raise HTTPException(status_code=404, detail="Sensor not found")
|
|
|
|
return {
|
|
"message": "Sensor deleted successfully",
|
|
**result
|
|
}
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"Error deleting sensor {sensor_id}: {e}")
|
|
raise HTTPException(status_code=500, detail="Internal server error")
|
|
|
|
@router.get("/export", summary="Export data")
|
|
async def export_data(
|
|
start_time: int = Query(..., description="Start timestamp (Unix)"),
|
|
end_time: int = Query(..., description="End timestamp (Unix)"),
|
|
sensor_ids: Optional[str] = Query(None, description="Comma-separated sensor IDs"),
|
|
format: str = Query("json", description="Export format (json, csv)"),
|
|
db=Depends(check_database)
|
|
):
|
|
"""Export sensor data for the specified time range"""
|
|
try:
|
|
# Build query
|
|
query = {
|
|
"created_at": {
|
|
"$gte": datetime.fromtimestamp(start_time),
|
|
"$lte": datetime.fromtimestamp(end_time)
|
|
}
|
|
}
|
|
|
|
if sensor_ids:
|
|
sensor_list = [sid.strip() for sid in sensor_ids.split(",")]
|
|
query["sensor_id"] = {"$in": sensor_list}
|
|
|
|
# Get data through repository
|
|
from ..infrastructure.repositories import SensorReadingRepository
|
|
repo = SensorReadingRepository()
|
|
|
|
readings = await repo.get_by_query(
|
|
query=query,
|
|
sort_by="timestamp",
|
|
sort_order="asc",
|
|
limit=10000 # Large limit for export
|
|
)
|
|
|
|
# Convert datetime fields for JSON serialization
|
|
for reading in readings:
|
|
if "created_at" in reading and hasattr(reading["created_at"], "isoformat"):
|
|
reading["created_at"] = reading["created_at"].isoformat()
|
|
|
|
if format.lower() == "csv":
|
|
raise HTTPException(status_code=501, detail="CSV export not yet implemented")
|
|
|
|
return {
|
|
"data": readings,
|
|
"count": len(readings),
|
|
"export_params": {
|
|
"start_time": start_time,
|
|
"end_time": end_time,
|
|
"sensor_ids": sensor_ids.split(",") if sensor_ids else None,
|
|
"format": format
|
|
}
|
|
}
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"Error exporting data: {e}")
|
|
raise HTTPException(status_code=500, detail="Internal server error") |