first commit
This commit is contained in:
362
layers/infrastructure/repositories.py
Normal file
362
layers/infrastructure/repositories.py
Normal file
@@ -0,0 +1,362 @@
|
||||
"""
|
||||
Repository classes for data access
|
||||
Infrastructure Layer - handles database operations and queries
|
||||
"""
|
||||
import json
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Dict, Any, Optional
|
||||
from pymongo import ASCENDING, DESCENDING
|
||||
from pymongo.errors import DuplicateKeyError
|
||||
import logging
|
||||
|
||||
from .database_connection import database_connection
|
||||
from .redis_connection import redis_connection
|
||||
from models import SensorReading, SensorMetadata, RoomMetrics, SystemEvent
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class SensorReadingRepository:
|
||||
"""Repository for sensor reading data operations"""
|
||||
|
||||
async def create(self, reading: SensorReading) -> bool:
|
||||
"""Store sensor reading in MongoDB"""
|
||||
try:
|
||||
db = await database_connection.get_database()
|
||||
reading_dict = reading.dict()
|
||||
|
||||
# Add document ID for deduplication
|
||||
reading_dict["_id"] = f"{reading.sensor_id}_{reading.timestamp}"
|
||||
|
||||
await db.sensor_readings.insert_one(reading_dict)
|
||||
logger.debug(f"Stored sensor reading for {reading.sensor_id}")
|
||||
return True
|
||||
|
||||
except DuplicateKeyError:
|
||||
logger.debug(f"Duplicate reading ignored for {reading.sensor_id} at {reading.timestamp}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error storing sensor reading: {e}")
|
||||
return False
|
||||
|
||||
async def get_recent_by_sensor(self, sensor_id: str, limit: int = 100, minutes: int = 60) -> List[Dict]:
|
||||
"""Get recent readings for a specific sensor"""
|
||||
try:
|
||||
db = await database_connection.get_database()
|
||||
query = {
|
||||
"sensor_id": sensor_id,
|
||||
"created_at": {"$gte": datetime.utcnow() - timedelta(minutes=minutes)}
|
||||
}
|
||||
|
||||
cursor = db.sensor_readings.find(query).sort("created_at", -1).limit(limit)
|
||||
readings = await cursor.to_list(length=limit)
|
||||
|
||||
# Convert ObjectId to string
|
||||
for reading in readings:
|
||||
reading["_id"] = str(reading["_id"])
|
||||
|
||||
return readings
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting recent readings for {sensor_id}: {e}")
|
||||
return []
|
||||
|
||||
async def get_recent_by_room(self, room: str, minutes: int = 5) -> List[Dict]:
|
||||
"""Get recent readings for a specific room"""
|
||||
try:
|
||||
db = await database_connection.get_database()
|
||||
recent_time = datetime.utcnow() - timedelta(minutes=minutes)
|
||||
|
||||
cursor = db.sensor_readings.find({
|
||||
"room": room,
|
||||
"created_at": {"$gte": recent_time}
|
||||
})
|
||||
|
||||
readings = await cursor.to_list(length=None)
|
||||
return readings
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting recent readings for room {room}: {e}")
|
||||
return []
|
||||
|
||||
async def get_by_query(self, query: Dict[str, Any], sort_by: str = "timestamp",
|
||||
sort_order: str = "desc", limit: int = 100, offset: int = 0) -> List[Dict]:
|
||||
"""Get readings by complex query"""
|
||||
try:
|
||||
db = await database_connection.get_database()
|
||||
|
||||
sort_direction = DESCENDING if sort_order == "desc" else ASCENDING
|
||||
cursor = db.sensor_readings.find(query).sort(sort_by, sort_direction).skip(offset).limit(limit)
|
||||
|
||||
readings = await cursor.to_list(length=limit)
|
||||
|
||||
# Convert ObjectId to string
|
||||
for reading in readings:
|
||||
reading["_id"] = str(reading["_id"])
|
||||
|
||||
return readings
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error querying sensor readings: {e}")
|
||||
return []
|
||||
|
||||
async def count_by_query(self, query: Dict[str, Any]) -> int:
|
||||
"""Count readings matching query"""
|
||||
try:
|
||||
db = await database_connection.get_database()
|
||||
return await db.sensor_readings.count_documents(query)
|
||||
except Exception as e:
|
||||
logger.error(f"Error counting sensor readings: {e}")
|
||||
return 0
|
||||
|
||||
async def get_distinct_rooms(self) -> List[str]:
|
||||
"""Get list of distinct rooms"""
|
||||
try:
|
||||
db = await database_connection.get_database()
|
||||
return await db.sensor_readings.distinct("room", {"room": {"$ne": None}})
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting distinct rooms: {e}")
|
||||
return []
|
||||
|
||||
async def get_distinct_sensor_ids_by_room(self, room: str) -> List[str]:
|
||||
"""Get distinct sensor IDs for a room"""
|
||||
try:
|
||||
db = await database_connection.get_database()
|
||||
return await db.sensor_readings.distinct("sensor_id", {"room": room})
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting distinct sensor IDs for room {room}: {e}")
|
||||
return []
|
||||
|
||||
async def delete_by_sensor_id(self, sensor_id: str) -> int:
|
||||
"""Delete all readings for a sensor"""
|
||||
try:
|
||||
db = await database_connection.get_database()
|
||||
result = await db.sensor_readings.delete_many({"sensor_id": sensor_id})
|
||||
return result.deleted_count
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting readings for sensor {sensor_id}: {e}")
|
||||
return 0
|
||||
|
||||
async def aggregate(self, pipeline: List[Dict]) -> List[Dict]:
|
||||
"""Execute aggregation pipeline"""
|
||||
try:
|
||||
db = await database_connection.get_database()
|
||||
cursor = db.sensor_readings.aggregate(pipeline)
|
||||
return await cursor.to_list(length=None)
|
||||
except Exception as e:
|
||||
logger.error(f"Error executing aggregation: {e}")
|
||||
return []
|
||||
|
||||
class SensorMetadataRepository:
|
||||
"""Repository for sensor metadata operations"""
|
||||
|
||||
async def create(self, metadata: SensorMetadata) -> bool:
|
||||
"""Create sensor metadata"""
|
||||
try:
|
||||
db = await database_connection.get_database()
|
||||
await db.sensor_metadata.insert_one(metadata.dict())
|
||||
logger.info(f"Created metadata for sensor: {metadata.sensor_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating sensor metadata: {e}")
|
||||
return False
|
||||
|
||||
async def update(self, sensor_id: str, updates: Dict[str, Any]) -> bool:
|
||||
"""Update sensor metadata"""
|
||||
try:
|
||||
db = await database_connection.get_database()
|
||||
updates["updated_at"] = datetime.utcnow()
|
||||
|
||||
result = await db.sensor_metadata.update_one(
|
||||
{"sensor_id": sensor_id},
|
||||
{"$set": updates}
|
||||
)
|
||||
return result.modified_count > 0
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating sensor metadata: {e}")
|
||||
return False
|
||||
|
||||
async def get_by_sensor_id(self, sensor_id: str) -> Optional[Dict]:
|
||||
"""Get sensor metadata by ID"""
|
||||
try:
|
||||
db = await database_connection.get_database()
|
||||
metadata = await db.sensor_metadata.find_one({"sensor_id": sensor_id})
|
||||
if metadata:
|
||||
metadata["_id"] = str(metadata["_id"])
|
||||
return metadata
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting sensor metadata: {e}")
|
||||
return None
|
||||
|
||||
async def get_all(self, filters: Dict[str, Any] = None) -> List[Dict]:
|
||||
"""Get all sensor metadata with optional filters"""
|
||||
try:
|
||||
db = await database_connection.get_database()
|
||||
query = filters or {}
|
||||
|
||||
cursor = db.sensor_metadata.find(query).sort("created_at", DESCENDING)
|
||||
metadata_list = await cursor.to_list(length=None)
|
||||
|
||||
# Convert ObjectId to string
|
||||
for metadata in metadata_list:
|
||||
metadata["_id"] = str(metadata["_id"])
|
||||
|
||||
return metadata_list
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting sensor metadata: {e}")
|
||||
return []
|
||||
|
||||
async def delete(self, sensor_id: str) -> bool:
|
||||
"""Delete sensor metadata"""
|
||||
try:
|
||||
db = await database_connection.get_database()
|
||||
result = await db.sensor_metadata.delete_one({"sensor_id": sensor_id})
|
||||
return result.deleted_count > 0
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting sensor metadata: {e}")
|
||||
return False
|
||||
|
||||
class RoomMetricsRepository:
|
||||
"""Repository for room metrics operations"""
|
||||
|
||||
async def create(self, metrics: RoomMetrics) -> bool:
|
||||
"""Store room metrics"""
|
||||
try:
|
||||
db = await database_connection.get_database()
|
||||
await db.room_metrics.insert_one(metrics.dict())
|
||||
logger.debug(f"Stored room metrics for {metrics.room}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error storing room metrics: {e}")
|
||||
return False
|
||||
|
||||
async def get_by_room(self, room: str, limit: int = 100) -> List[Dict]:
|
||||
"""Get room metrics by room name"""
|
||||
try:
|
||||
db = await database_connection.get_database()
|
||||
cursor = db.room_metrics.find({"room": room}).sort("timestamp", DESCENDING).limit(limit)
|
||||
metrics = await cursor.to_list(length=limit)
|
||||
|
||||
# Convert ObjectId to string
|
||||
for metric in metrics:
|
||||
metric["_id"] = str(metric["_id"])
|
||||
|
||||
return metrics
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting room metrics for {room}: {e}")
|
||||
return []
|
||||
|
||||
class SystemEventRepository:
|
||||
"""Repository for system events operations"""
|
||||
|
||||
async def create(self, event: SystemEvent) -> bool:
|
||||
"""Create system event"""
|
||||
try:
|
||||
db = await database_connection.get_database()
|
||||
await db.system_events.insert_one(event.dict())
|
||||
logger.info(f"System event logged: {event.event_type} - {event.title}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error logging system event: {e}")
|
||||
return False
|
||||
|
||||
async def get_recent(self, hours: int = 24, limit: int = 50,
|
||||
filters: Dict[str, Any] = None) -> List[Dict]:
|
||||
"""Get recent system events"""
|
||||
try:
|
||||
db = await database_connection.get_database()
|
||||
start_time = datetime.utcnow() - timedelta(hours=hours)
|
||||
|
||||
query = {"created_at": {"$gte": start_time}}
|
||||
if filters:
|
||||
query.update(filters)
|
||||
|
||||
cursor = db.system_events.find(query).sort("timestamp", DESCENDING).limit(limit)
|
||||
events = await cursor.to_list(length=limit)
|
||||
|
||||
# Convert ObjectId to string
|
||||
for event in events:
|
||||
event["_id"] = str(event["_id"])
|
||||
|
||||
return events
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting recent events: {e}")
|
||||
return []
|
||||
|
||||
class RedisRepository:
|
||||
"""Repository for Redis cache operations"""
|
||||
|
||||
async def set_sensor_data(self, sensor_id: str, data: Dict[str, Any], expire_seconds: int = 3600) -> bool:
|
||||
"""Store latest sensor data in Redis cache"""
|
||||
try:
|
||||
key = f"sensor:latest:{sensor_id}"
|
||||
json_data = json.dumps(data)
|
||||
await redis_connection.set_with_expiry(key, json_data, expire_seconds)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error caching sensor data: {e}")
|
||||
return False
|
||||
|
||||
async def get_sensor_data(self, sensor_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get latest sensor data from Redis cache"""
|
||||
try:
|
||||
key = f"sensor:latest:{sensor_id}"
|
||||
data = await redis_connection.get(key)
|
||||
if data:
|
||||
return json.loads(data)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting cached sensor data: {e}")
|
||||
return None
|
||||
|
||||
async def set_sensor_status(self, sensor_id: str, status_data: Dict[str, Any], expire_seconds: int = 1800) -> bool:
|
||||
"""Set sensor status in Redis"""
|
||||
try:
|
||||
key = f"sensor:status:{sensor_id}"
|
||||
json_data = json.dumps(status_data)
|
||||
await redis_connection.set_with_expiry(key, json_data, expire_seconds)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error setting sensor status: {e}")
|
||||
return False
|
||||
|
||||
async def set_room_metrics(self, room: str, metrics: Dict[str, Any], expire_seconds: int = 1800) -> bool:
|
||||
"""Store room metrics in Redis cache"""
|
||||
try:
|
||||
key = f"room:metrics:{room}"
|
||||
json_data = json.dumps(metrics)
|
||||
await redis_connection.set_with_expiry(key, json_data, expire_seconds)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error caching room metrics: {e}")
|
||||
return False
|
||||
|
||||
async def get_room_metrics(self, room: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get room metrics from Redis cache"""
|
||||
try:
|
||||
key = f"room:metrics:{room}"
|
||||
data = await redis_connection.get(key)
|
||||
if data:
|
||||
return json.loads(data)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting cached room metrics: {e}")
|
||||
return None
|
||||
|
||||
async def get_active_sensors(self) -> List[str]:
|
||||
"""Get list of active sensors from Redis"""
|
||||
try:
|
||||
keys = await redis_connection.get_keys_by_pattern("sensor:latest:*")
|
||||
return [key.replace("sensor:latest:", "") for key in keys]
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting active sensors: {e}")
|
||||
return []
|
||||
|
||||
async def delete_sensor_cache(self, sensor_id: str) -> bool:
|
||||
"""Delete all cached data for a sensor"""
|
||||
try:
|
||||
await redis_connection.delete(f"sensor:latest:{sensor_id}")
|
||||
await redis_connection.delete(f"sensor:status:{sensor_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting sensor cache: {e}")
|
||||
return False
|
||||
Reference in New Issue
Block a user