Files
sac4cps-backend/api.py
rafaeldpsilva a7a18e6295 first commit
2025-09-09 13:46:42 +01:00

582 lines
21 KiB
Python

from fastapi import APIRouter, HTTPException, Query, Depends
from typing import List, Optional, Dict, Any
from datetime import datetime, timedelta
import time
import logging
from pymongo import ASCENDING, DESCENDING
from database import get_database, redis_manager
from models import (
DataQuery, DataResponse, SensorReading, SensorMetadata,
RoomMetrics, SystemEvent, SensorType, SensorStatus
)
from persistence import persistence_service
from services.token_service import TokenService
logger = logging.getLogger(__name__)
router = APIRouter()
# Dependency to get database
async def get_db():
return await get_database()
@router.get("/sensors", summary="Get all sensors")
async def get_sensors(
room: Optional[str] = Query(None, description="Filter by room"),
sensor_type: Optional[SensorType] = Query(None, description="Filter by sensor type"),
status: Optional[SensorStatus] = Query(None, description="Filter by status"),
db=Depends(get_db)
):
"""Get list of all registered sensors with optional filtering"""
try:
# Build query
query = {}
if room:
query["room"] = room
if sensor_type:
query["sensor_type"] = sensor_type.value
if status:
query["status"] = status.value
# Execute query
cursor = db.sensor_metadata.find(query).sort("created_at", DESCENDING)
sensors = await cursor.to_list(length=None)
# Convert ObjectId to string
for sensor in sensors:
sensor["_id"] = str(sensor["_id"])
return {
"sensors": sensors,
"count": len(sensors),
"query": query
}
except Exception as e:
logger.error(f"Error getting sensors: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/sensors/{sensor_id}", summary="Get sensor details")
async def get_sensor(sensor_id: str, db=Depends(get_db)):
"""Get detailed information about a specific sensor"""
try:
# Get sensor metadata
sensor = await db.sensor_metadata.find_one({"sensor_id": sensor_id})
if not sensor:
raise HTTPException(status_code=404, detail="Sensor not found")
sensor["_id"] = str(sensor["_id"])
# Get recent readings (last 24 hours)
recent_readings = await persistence_service.get_recent_readings(
sensor_id=sensor_id,
limit=100,
minutes=1440 # 24 hours
)
# Get latest reading from Redis
latest_reading = await redis_manager.get_sensor_data(sensor_id)
return {
"sensor": sensor,
"latest_reading": latest_reading,
"recent_readings_count": len(recent_readings),
"recent_readings": recent_readings[:10] # Return only 10 most recent
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting sensor {sensor_id}: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/sensors/{sensor_id}/data", summary="Get sensor historical data")
async def get_sensor_data(
sensor_id: str,
start_time: Optional[int] = Query(None, description="Start timestamp (Unix)"),
end_time: Optional[int] = Query(None, description="End timestamp (Unix)"),
limit: int = Query(100, description="Maximum records to return"),
offset: int = Query(0, description="Records to skip"),
db=Depends(get_db)
):
"""Get historical data for a specific sensor"""
try:
start_query_time = time.time()
# Build time range query
query = {"sensor_id": sensor_id}
if start_time or end_time:
time_query = {}
if start_time:
time_query["$gte"] = datetime.fromtimestamp(start_time)
if end_time:
time_query["$lte"] = datetime.fromtimestamp(end_time)
query["created_at"] = time_query
# Get total count
total_count = await db.sensor_readings.count_documents(query)
# Execute query with pagination
cursor = db.sensor_readings.find(query).sort("timestamp", DESCENDING).skip(offset).limit(limit)
readings = await cursor.to_list(length=limit)
# Convert ObjectId to string
for reading in readings:
reading["_id"] = str(reading["_id"])
execution_time = (time.time() - start_query_time) * 1000 # Convert to milliseconds
return DataResponse(
data=readings,
total_count=total_count,
query=DataQuery(
sensor_ids=[sensor_id],
start_time=start_time,
end_time=end_time,
limit=limit,
offset=offset
),
execution_time_ms=execution_time
)
except Exception as e:
logger.error(f"Error getting sensor data for {sensor_id}: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/rooms", summary="Get all rooms")
async def get_rooms(db=Depends(get_db)):
"""Get list of all rooms with sensor counts"""
try:
# Get distinct rooms from sensor readings
rooms = await db.sensor_readings.distinct("room", {"room": {"$ne": None}})
room_data = []
for room in rooms:
# Get sensor count for each room
sensor_count = len(await db.sensor_readings.distinct("sensor_id", {"room": room}))
# Get latest room metrics from Redis
room_metrics = await redis_manager.get_room_metrics(room)
room_data.append({
"room": room,
"sensor_count": sensor_count,
"latest_metrics": room_metrics
})
return {
"rooms": room_data,
"count": len(room_data)
}
except Exception as e:
logger.error(f"Error getting rooms: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/rooms/{room_name}/data", summary="Get room historical data")
async def get_room_data(
room_name: str,
start_time: Optional[int] = Query(None, description="Start timestamp (Unix)"),
end_time: Optional[int] = Query(None, description="End timestamp (Unix)"),
limit: int = Query(100, description="Maximum records to return"),
db=Depends(get_db)
):
"""Get historical data for a specific room"""
try:
start_query_time = time.time()
# Build query for room metrics
query = {"room": room_name}
if start_time or end_time:
time_query = {}
if start_time:
time_query["$gte"] = datetime.fromtimestamp(start_time)
if end_time:
time_query["$lte"] = datetime.fromtimestamp(end_time)
query["created_at"] = time_query
# Get room metrics
cursor = db.room_metrics.find(query).sort("timestamp", DESCENDING).limit(limit)
room_metrics = await cursor.to_list(length=limit)
# Also get sensor readings for the room
sensor_query = {"room": room_name}
if "created_at" in query:
sensor_query["created_at"] = query["created_at"]
sensor_cursor = db.sensor_readings.find(sensor_query).sort("timestamp", DESCENDING).limit(limit)
sensor_readings = await sensor_cursor.to_list(length=limit)
# Convert ObjectId to string
for item in room_metrics + sensor_readings:
item["_id"] = str(item["_id"])
execution_time = (time.time() - start_query_time) * 1000
return {
"room": room_name,
"room_metrics": room_metrics,
"sensor_readings": sensor_readings,
"execution_time_ms": execution_time
}
except Exception as e:
logger.error(f"Error getting room data for {room_name}: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/data/query", summary="Advanced data query", response_model=DataResponse)
async def query_data(query_params: DataQuery, db=Depends(get_db)):
"""Advanced data querying with multiple filters and aggregations"""
try:
start_query_time = time.time()
# Build MongoDB query
mongo_query = {}
# Sensor filters
if query_params.sensor_ids:
mongo_query["sensor_id"] = {"$in": query_params.sensor_ids}
if query_params.rooms:
mongo_query["room"] = {"$in": query_params.rooms}
if query_params.sensor_types:
mongo_query["sensor_type"] = {"$in": [st.value for st in query_params.sensor_types]}
# Time range
if query_params.start_time or query_params.end_time:
time_query = {}
if query_params.start_time:
time_query["$gte"] = datetime.fromtimestamp(query_params.start_time)
if query_params.end_time:
time_query["$lte"] = datetime.fromtimestamp(query_params.end_time)
mongo_query["created_at"] = time_query
# Get total count
total_count = await db.sensor_readings.count_documents(mongo_query)
# Execute query with pagination and sorting
sort_direction = DESCENDING if query_params.sort_order == "desc" else ASCENDING
cursor = db.sensor_readings.find(mongo_query).sort(
query_params.sort_by, sort_direction
).skip(query_params.offset).limit(query_params.limit)
readings = await cursor.to_list(length=query_params.limit)
# Convert ObjectId to string
for reading in readings:
reading["_id"] = str(reading["_id"])
execution_time = (time.time() - start_query_time) * 1000
return DataResponse(
data=readings,
total_count=total_count,
query=query_params,
execution_time_ms=execution_time
)
except Exception as e:
logger.error(f"Error executing data query: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/analytics/summary", summary="Get analytics summary")
async def get_analytics_summary(
hours: int = Query(24, description="Hours of data to analyze"),
db=Depends(get_db)
):
"""Get analytics summary for the specified time period"""
try:
start_time = datetime.utcnow() - timedelta(hours=hours)
# Aggregation pipeline for analytics
pipeline = [
{"$match": {"created_at": {"$gte": start_time}}},
{"$group": {
"_id": {
"sensor_id": "$sensor_id",
"room": "$room",
"sensor_type": "$sensor_type"
},
"reading_count": {"$sum": 1},
"avg_energy": {"$avg": "$energy.value"},
"total_energy": {"$sum": "$energy.value"},
"avg_co2": {"$avg": "$co2.value"},
"max_co2": {"$max": "$co2.value"},
"avg_temperature": {"$avg": "$temperature.value"},
"latest_timestamp": {"$max": "$timestamp"}
}},
{"$sort": {"total_energy": -1}}
]
cursor = db.sensor_readings.aggregate(pipeline)
analytics = await cursor.to_list(length=None)
# Room-level summary
room_pipeline = [
{"$match": {"created_at": {"$gte": start_time}, "room": {"$ne": None}}},
{"$group": {
"_id": "$room",
"sensor_count": {"$addToSet": "$sensor_id"},
"total_energy": {"$sum": "$energy.value"},
"avg_co2": {"$avg": "$co2.value"},
"max_co2": {"$max": "$co2.value"},
"reading_count": {"$sum": 1}
}},
{"$project": {
"room": "$_id",
"sensor_count": {"$size": "$sensor_count"},
"total_energy": 1,
"avg_co2": 1,
"max_co2": 1,
"reading_count": 1
}},
{"$sort": {"total_energy": -1}}
]
room_cursor = db.sensor_readings.aggregate(room_pipeline)
room_analytics = await room_cursor.to_list(length=None)
return {
"period_hours": hours,
"start_time": start_time.isoformat(),
"sensor_analytics": analytics,
"room_analytics": room_analytics,
"summary": {
"total_sensors_analyzed": len(analytics),
"total_rooms_analyzed": len(room_analytics),
"total_readings": sum(item["reading_count"] for item in analytics)
}
}
except Exception as e:
logger.error(f"Error getting analytics summary: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/events", summary="Get system events")
async def get_events(
severity: Optional[str] = Query(None, description="Filter by severity"),
event_type: Optional[str] = Query(None, description="Filter by event type"),
hours: int = Query(24, description="Hours of events to retrieve"),
limit: int = Query(50, description="Maximum events to return"),
db=Depends(get_db)
):
"""Get recent system events and alerts"""
try:
start_time = datetime.utcnow() - timedelta(hours=hours)
# Build query
query = {"created_at": {"$gte": start_time}}
if severity:
query["severity"] = severity
if event_type:
query["event_type"] = event_type
# Execute query
cursor = db.system_events.find(query).sort("timestamp", DESCENDING).limit(limit)
events = await cursor.to_list(length=limit)
# Convert ObjectId to string
for event in events:
event["_id"] = str(event["_id"])
return {
"events": events,
"count": len(events),
"period_hours": hours
}
except Exception as e:
logger.error(f"Error getting events: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.put("/sensors/{sensor_id}/metadata", summary="Update sensor metadata")
async def update_sensor_metadata(
sensor_id: str,
metadata: dict,
db=Depends(get_db)
):
"""Update sensor metadata"""
try:
# Update timestamp
metadata["updated_at"] = datetime.utcnow()
result = await db.sensor_metadata.update_one(
{"sensor_id": sensor_id},
{"$set": metadata}
)
if result.matched_count == 0:
raise HTTPException(status_code=404, detail="Sensor not found")
return {"message": "Sensor metadata updated successfully", "modified": result.modified_count > 0}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error updating sensor metadata: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.delete("/sensors/{sensor_id}", summary="Delete sensor and all its data")
async def delete_sensor(sensor_id: str, db=Depends(get_db)):
"""Delete a sensor and all its associated data"""
try:
# Delete sensor readings
readings_result = await db.sensor_readings.delete_many({"sensor_id": sensor_id})
# Delete sensor metadata
metadata_result = await db.sensor_metadata.delete_one({"sensor_id": sensor_id})
# Delete from Redis cache
await redis_manager.redis_client.delete(f"sensor:latest:{sensor_id}")
await redis_manager.redis_client.delete(f"sensor:status:{sensor_id}")
if metadata_result.deleted_count == 0:
raise HTTPException(status_code=404, detail="Sensor not found")
return {
"message": "Sensor deleted successfully",
"sensor_id": sensor_id,
"readings_deleted": readings_result.deleted_count,
"metadata_deleted": metadata_result.deleted_count
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error deleting sensor {sensor_id}: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/export", summary="Export data")
async def export_data(
start_time: int = Query(..., description="Start timestamp (Unix)"),
end_time: int = Query(..., description="End timestamp (Unix)"),
sensor_ids: Optional[str] = Query(None, description="Comma-separated sensor IDs"),
format: str = Query("json", description="Export format (json, csv)"),
db=Depends(get_db)
):
"""Export sensor data for the specified time range"""
try:
# Build query
query = {
"created_at": {
"$gte": datetime.fromtimestamp(start_time),
"$lte": datetime.fromtimestamp(end_time)
}
}
if sensor_ids:
sensor_list = [sid.strip() for sid in sensor_ids.split(",")]
query["sensor_id"] = {"$in": sensor_list}
# Get data
cursor = db.sensor_readings.find(query).sort("timestamp", ASCENDING)
readings = await cursor.to_list(length=None)
# Convert ObjectId to string
for reading in readings:
reading["_id"] = str(reading["_id"])
# Convert datetime to ISO string for JSON serialization
if "created_at" in reading:
reading["created_at"] = reading["created_at"].isoformat()
if format.lower() == "csv":
# TODO: Implement CSV export
raise HTTPException(status_code=501, detail="CSV export not yet implemented")
return {
"data": readings,
"count": len(readings),
"export_params": {
"start_time": start_time,
"end_time": end_time,
"sensor_ids": sensor_ids.split(",") if sensor_ids else None,
"format": format
}
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error exporting data: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
# Token Management Endpoints
@router.get("/tokens", summary="Get all tokens")
async def get_tokens(db=Depends(get_db)):
"""Get list of all tokens"""
try:
token_service = TokenService(db)
tokens = await token_service.get_tokens()
return {"tokens": tokens}
except Exception as e:
logger.error(f"Error getting tokens: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/tokens/generate", summary="Generate new token")
async def generate_token(
name: str,
list_of_resources: List[str],
data_aggregation: bool = False,
time_aggregation: bool = False,
embargo: int = 0,
exp_hours: int = 24,
db=Depends(get_db)
):
"""Generate a new JWT token with specified permissions"""
try:
token_service = TokenService(db)
token = token_service.generate_token(
name=name,
list_of_resources=list_of_resources,
data_aggregation=data_aggregation,
time_aggregation=time_aggregation,
embargo=embargo,
exp_hours=exp_hours
)
return {"token": token}
except Exception as e:
logger.error(f"Error generating token: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/tokens/check", summary="Validate token")
async def check_token(token: str, db=Depends(get_db)):
"""Check token validity and decode payload"""
try:
token_service = TokenService(db)
decoded = token_service.decode_token(token)
return decoded
except Exception as e:
logger.error(f"Error checking token: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/tokens/save", summary="Save token to database")
async def save_token(token: str, db=Depends(get_db)):
"""Save a valid token to the database"""
try:
token_service = TokenService(db)
result = await token_service.insert_token(token)
return result
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))
except Exception as e:
logger.error(f"Error saving token: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/tokens/revoke", summary="Revoke token")
async def revoke_token(token: str, db=Depends(get_db)):
"""Revoke a token by marking it as inactive"""
try:
token_service = TokenService(db)
result = await token_service.revoke_token(token)
return result
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
logger.error(f"Error revoking token: {e}")
raise HTTPException(status_code=500, detail="Internal server error")