first commit

This commit is contained in:
rafaeldpsilva
2025-09-09 13:46:42 +01:00
commit a7a18e6295
77 changed files with 8678 additions and 0 deletions

View File

@@ -0,0 +1 @@
# Empty file to make this a Python package

View File

@@ -0,0 +1,95 @@
"""
Database connection management for MongoDB
Infrastructure Layer - handles low-level database connectivity
"""
import os
from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorDatabase
from pymongo import IndexModel, ASCENDING, DESCENDING
from typing import Optional
import logging
logger = logging.getLogger(__name__)
class DatabaseConnection:
"""Manages MongoDB connection and database operations"""
def __init__(self):
self.client: Optional[AsyncIOMotorClient] = None
self.database: Optional[AsyncIOMotorDatabase] = None
self._mongodb_url = os.getenv("MONGODB_URL", "mongodb://localhost:27017")
self._database_name = os.getenv("DATABASE_NAME", "energy_monitoring")
async def connect(self) -> None:
"""Establish connection to MongoDB"""
try:
logger.info(f"Connecting to MongoDB at: {self._mongodb_url}")
self.client = AsyncIOMotorClient(self._mongodb_url)
await self.client.admin.command('ping')
self.database = self.client[self._database_name]
await self._create_indexes()
logger.info("Successfully connected to MongoDB")
except Exception as e:
logger.error(f"Error connecting to MongoDB: {e}")
raise
async def disconnect(self) -> None:
"""Close MongoDB connection"""
if self.client:
self.client.close()
logger.info("Disconnected from MongoDB")
async def get_database(self) -> AsyncIOMotorDatabase:
"""Get database instance"""
if not self.database:
await self.connect()
return self.database
async def _create_indexes(self) -> None:
"""Create database indexes for optimal performance"""
try:
# Sensor readings collection indexes
sensor_readings_indexes = [
IndexModel([("sensor_id", ASCENDING), ("timestamp", DESCENDING)]),
IndexModel([("timestamp", DESCENDING)]),
IndexModel([("room", ASCENDING), ("timestamp", DESCENDING)]),
IndexModel([("sensor_type", ASCENDING), ("timestamp", DESCENDING)]),
IndexModel([("created_at", DESCENDING)]),
]
await self.database.sensor_readings.create_indexes(sensor_readings_indexes)
# Room metrics collection indexes
room_metrics_indexes = [
IndexModel([("room", ASCENDING), ("timestamp", DESCENDING)]),
IndexModel([("timestamp", DESCENDING)]),
IndexModel([("created_at", DESCENDING)]),
]
await self.database.room_metrics.create_indexes(room_metrics_indexes)
# Sensor metadata collection indexes
sensor_metadata_indexes = [
IndexModel([("sensor_id", ASCENDING)], unique=True),
IndexModel([("room", ASCENDING)]),
IndexModel([("sensor_type", ASCENDING)]),
IndexModel([("status", ASCENDING)]),
]
await self.database.sensor_metadata.create_indexes(sensor_metadata_indexes)
# System events collection indexes
system_events_indexes = [
IndexModel([("timestamp", DESCENDING)]),
IndexModel([("event_type", ASCENDING), ("timestamp", DESCENDING)]),
IndexModel([("severity", ASCENDING), ("timestamp", DESCENDING)]),
]
await self.database.system_events.create_indexes(system_events_indexes)
logger.info("Database indexes created successfully")
except Exception as e:
logger.error(f"Error creating indexes: {e}")
# Global database connection instance
database_connection = DatabaseConnection()

View File

@@ -0,0 +1,80 @@
"""
Redis connection management and operations
Infrastructure Layer - handles Redis connectivity and low-level operations
"""
import os
import json
from typing import Optional, Dict, Any
import logging
import redis.asyncio as redis
logger = logging.getLogger(__name__)
class RedisConnection:
"""Manages Redis connection and basic operations"""
def __init__(self):
self.redis_client: Optional[redis.Redis] = None
self._host = os.getenv("REDIS_HOST", "localhost")
self._port = int(os.getenv("REDIS_PORT", "6379"))
self._db = int(os.getenv("REDIS_DB", "0"))
async def connect(self) -> None:
"""Connect to Redis"""
try:
self.redis_client = redis.Redis(
host=self._host,
port=self._port,
db=self._db,
decode_responses=True
)
await self.redis_client.ping()
logger.info("Successfully connected to Redis")
except Exception as e:
logger.error(f"Error connecting to Redis: {e}")
raise
async def disconnect(self) -> None:
"""Disconnect from Redis"""
if self.redis_client:
await self.redis_client.close()
logger.info("Disconnected from Redis")
async def get_client(self) -> redis.Redis:
"""Get Redis client instance"""
if not self.redis_client:
await self.connect()
return self.redis_client
async def set_with_expiry(self, key: str, value: str, expire_seconds: int = 3600) -> None:
"""Set a key-value pair with expiration"""
client = await self.get_client()
await client.setex(key, expire_seconds, value)
async def get(self, key: str) -> Optional[str]:
"""Get value by key"""
client = await self.get_client()
return await client.get(key)
async def delete(self, key: str) -> None:
"""Delete a key"""
client = await self.get_client()
await client.delete(key)
async def get_keys_by_pattern(self, pattern: str) -> list:
"""Get keys matching a pattern"""
client = await self.get_client()
return await client.keys(pattern)
async def publish(self, channel: str, message: str) -> None:
"""Publish message to a channel"""
client = await self.get_client()
await client.publish(channel, message)
async def create_pubsub(self) -> redis.client.PubSub:
"""Create a pub/sub instance"""
client = await self.get_client()
return client.pubsub()
# Global Redis connection instance
redis_connection = RedisConnection()

View File

@@ -0,0 +1,362 @@
"""
Repository classes for data access
Infrastructure Layer - handles database operations and queries
"""
import json
from datetime import datetime, timedelta
from typing import List, Dict, Any, Optional
from pymongo import ASCENDING, DESCENDING
from pymongo.errors import DuplicateKeyError
import logging
from .database_connection import database_connection
from .redis_connection import redis_connection
from models import SensorReading, SensorMetadata, RoomMetrics, SystemEvent
logger = logging.getLogger(__name__)
class SensorReadingRepository:
"""Repository for sensor reading data operations"""
async def create(self, reading: SensorReading) -> bool:
"""Store sensor reading in MongoDB"""
try:
db = await database_connection.get_database()
reading_dict = reading.dict()
# Add document ID for deduplication
reading_dict["_id"] = f"{reading.sensor_id}_{reading.timestamp}"
await db.sensor_readings.insert_one(reading_dict)
logger.debug(f"Stored sensor reading for {reading.sensor_id}")
return True
except DuplicateKeyError:
logger.debug(f"Duplicate reading ignored for {reading.sensor_id} at {reading.timestamp}")
return True
except Exception as e:
logger.error(f"Error storing sensor reading: {e}")
return False
async def get_recent_by_sensor(self, sensor_id: str, limit: int = 100, minutes: int = 60) -> List[Dict]:
"""Get recent readings for a specific sensor"""
try:
db = await database_connection.get_database()
query = {
"sensor_id": sensor_id,
"created_at": {"$gte": datetime.utcnow() - timedelta(minutes=minutes)}
}
cursor = db.sensor_readings.find(query).sort("created_at", -1).limit(limit)
readings = await cursor.to_list(length=limit)
# Convert ObjectId to string
for reading in readings:
reading["_id"] = str(reading["_id"])
return readings
except Exception as e:
logger.error(f"Error getting recent readings for {sensor_id}: {e}")
return []
async def get_recent_by_room(self, room: str, minutes: int = 5) -> List[Dict]:
"""Get recent readings for a specific room"""
try:
db = await database_connection.get_database()
recent_time = datetime.utcnow() - timedelta(minutes=minutes)
cursor = db.sensor_readings.find({
"room": room,
"created_at": {"$gte": recent_time}
})
readings = await cursor.to_list(length=None)
return readings
except Exception as e:
logger.error(f"Error getting recent readings for room {room}: {e}")
return []
async def get_by_query(self, query: Dict[str, Any], sort_by: str = "timestamp",
sort_order: str = "desc", limit: int = 100, offset: int = 0) -> List[Dict]:
"""Get readings by complex query"""
try:
db = await database_connection.get_database()
sort_direction = DESCENDING if sort_order == "desc" else ASCENDING
cursor = db.sensor_readings.find(query).sort(sort_by, sort_direction).skip(offset).limit(limit)
readings = await cursor.to_list(length=limit)
# Convert ObjectId to string
for reading in readings:
reading["_id"] = str(reading["_id"])
return readings
except Exception as e:
logger.error(f"Error querying sensor readings: {e}")
return []
async def count_by_query(self, query: Dict[str, Any]) -> int:
"""Count readings matching query"""
try:
db = await database_connection.get_database()
return await db.sensor_readings.count_documents(query)
except Exception as e:
logger.error(f"Error counting sensor readings: {e}")
return 0
async def get_distinct_rooms(self) -> List[str]:
"""Get list of distinct rooms"""
try:
db = await database_connection.get_database()
return await db.sensor_readings.distinct("room", {"room": {"$ne": None}})
except Exception as e:
logger.error(f"Error getting distinct rooms: {e}")
return []
async def get_distinct_sensor_ids_by_room(self, room: str) -> List[str]:
"""Get distinct sensor IDs for a room"""
try:
db = await database_connection.get_database()
return await db.sensor_readings.distinct("sensor_id", {"room": room})
except Exception as e:
logger.error(f"Error getting distinct sensor IDs for room {room}: {e}")
return []
async def delete_by_sensor_id(self, sensor_id: str) -> int:
"""Delete all readings for a sensor"""
try:
db = await database_connection.get_database()
result = await db.sensor_readings.delete_many({"sensor_id": sensor_id})
return result.deleted_count
except Exception as e:
logger.error(f"Error deleting readings for sensor {sensor_id}: {e}")
return 0
async def aggregate(self, pipeline: List[Dict]) -> List[Dict]:
"""Execute aggregation pipeline"""
try:
db = await database_connection.get_database()
cursor = db.sensor_readings.aggregate(pipeline)
return await cursor.to_list(length=None)
except Exception as e:
logger.error(f"Error executing aggregation: {e}")
return []
class SensorMetadataRepository:
"""Repository for sensor metadata operations"""
async def create(self, metadata: SensorMetadata) -> bool:
"""Create sensor metadata"""
try:
db = await database_connection.get_database()
await db.sensor_metadata.insert_one(metadata.dict())
logger.info(f"Created metadata for sensor: {metadata.sensor_id}")
return True
except Exception as e:
logger.error(f"Error creating sensor metadata: {e}")
return False
async def update(self, sensor_id: str, updates: Dict[str, Any]) -> bool:
"""Update sensor metadata"""
try:
db = await database_connection.get_database()
updates["updated_at"] = datetime.utcnow()
result = await db.sensor_metadata.update_one(
{"sensor_id": sensor_id},
{"$set": updates}
)
return result.modified_count > 0
except Exception as e:
logger.error(f"Error updating sensor metadata: {e}")
return False
async def get_by_sensor_id(self, sensor_id: str) -> Optional[Dict]:
"""Get sensor metadata by ID"""
try:
db = await database_connection.get_database()
metadata = await db.sensor_metadata.find_one({"sensor_id": sensor_id})
if metadata:
metadata["_id"] = str(metadata["_id"])
return metadata
except Exception as e:
logger.error(f"Error getting sensor metadata: {e}")
return None
async def get_all(self, filters: Dict[str, Any] = None) -> List[Dict]:
"""Get all sensor metadata with optional filters"""
try:
db = await database_connection.get_database()
query = filters or {}
cursor = db.sensor_metadata.find(query).sort("created_at", DESCENDING)
metadata_list = await cursor.to_list(length=None)
# Convert ObjectId to string
for metadata in metadata_list:
metadata["_id"] = str(metadata["_id"])
return metadata_list
except Exception as e:
logger.error(f"Error getting sensor metadata: {e}")
return []
async def delete(self, sensor_id: str) -> bool:
"""Delete sensor metadata"""
try:
db = await database_connection.get_database()
result = await db.sensor_metadata.delete_one({"sensor_id": sensor_id})
return result.deleted_count > 0
except Exception as e:
logger.error(f"Error deleting sensor metadata: {e}")
return False
class RoomMetricsRepository:
"""Repository for room metrics operations"""
async def create(self, metrics: RoomMetrics) -> bool:
"""Store room metrics"""
try:
db = await database_connection.get_database()
await db.room_metrics.insert_one(metrics.dict())
logger.debug(f"Stored room metrics for {metrics.room}")
return True
except Exception as e:
logger.error(f"Error storing room metrics: {e}")
return False
async def get_by_room(self, room: str, limit: int = 100) -> List[Dict]:
"""Get room metrics by room name"""
try:
db = await database_connection.get_database()
cursor = db.room_metrics.find({"room": room}).sort("timestamp", DESCENDING).limit(limit)
metrics = await cursor.to_list(length=limit)
# Convert ObjectId to string
for metric in metrics:
metric["_id"] = str(metric["_id"])
return metrics
except Exception as e:
logger.error(f"Error getting room metrics for {room}: {e}")
return []
class SystemEventRepository:
"""Repository for system events operations"""
async def create(self, event: SystemEvent) -> bool:
"""Create system event"""
try:
db = await database_connection.get_database()
await db.system_events.insert_one(event.dict())
logger.info(f"System event logged: {event.event_type} - {event.title}")
return True
except Exception as e:
logger.error(f"Error logging system event: {e}")
return False
async def get_recent(self, hours: int = 24, limit: int = 50,
filters: Dict[str, Any] = None) -> List[Dict]:
"""Get recent system events"""
try:
db = await database_connection.get_database()
start_time = datetime.utcnow() - timedelta(hours=hours)
query = {"created_at": {"$gte": start_time}}
if filters:
query.update(filters)
cursor = db.system_events.find(query).sort("timestamp", DESCENDING).limit(limit)
events = await cursor.to_list(length=limit)
# Convert ObjectId to string
for event in events:
event["_id"] = str(event["_id"])
return events
except Exception as e:
logger.error(f"Error getting recent events: {e}")
return []
class RedisRepository:
"""Repository for Redis cache operations"""
async def set_sensor_data(self, sensor_id: str, data: Dict[str, Any], expire_seconds: int = 3600) -> bool:
"""Store latest sensor data in Redis cache"""
try:
key = f"sensor:latest:{sensor_id}"
json_data = json.dumps(data)
await redis_connection.set_with_expiry(key, json_data, expire_seconds)
return True
except Exception as e:
logger.error(f"Error caching sensor data: {e}")
return False
async def get_sensor_data(self, sensor_id: str) -> Optional[Dict[str, Any]]:
"""Get latest sensor data from Redis cache"""
try:
key = f"sensor:latest:{sensor_id}"
data = await redis_connection.get(key)
if data:
return json.loads(data)
return None
except Exception as e:
logger.error(f"Error getting cached sensor data: {e}")
return None
async def set_sensor_status(self, sensor_id: str, status_data: Dict[str, Any], expire_seconds: int = 1800) -> bool:
"""Set sensor status in Redis"""
try:
key = f"sensor:status:{sensor_id}"
json_data = json.dumps(status_data)
await redis_connection.set_with_expiry(key, json_data, expire_seconds)
return True
except Exception as e:
logger.error(f"Error setting sensor status: {e}")
return False
async def set_room_metrics(self, room: str, metrics: Dict[str, Any], expire_seconds: int = 1800) -> bool:
"""Store room metrics in Redis cache"""
try:
key = f"room:metrics:{room}"
json_data = json.dumps(metrics)
await redis_connection.set_with_expiry(key, json_data, expire_seconds)
return True
except Exception as e:
logger.error(f"Error caching room metrics: {e}")
return False
async def get_room_metrics(self, room: str) -> Optional[Dict[str, Any]]:
"""Get room metrics from Redis cache"""
try:
key = f"room:metrics:{room}"
data = await redis_connection.get(key)
if data:
return json.loads(data)
return None
except Exception as e:
logger.error(f"Error getting cached room metrics: {e}")
return None
async def get_active_sensors(self) -> List[str]:
"""Get list of active sensors from Redis"""
try:
keys = await redis_connection.get_keys_by_pattern("sensor:latest:*")
return [key.replace("sensor:latest:", "") for key in keys]
except Exception as e:
logger.error(f"Error getting active sensors: {e}")
return []
async def delete_sensor_cache(self, sensor_id: str) -> bool:
"""Delete all cached data for a sensor"""
try:
await redis_connection.delete(f"sensor:latest:{sensor_id}")
await redis_connection.delete(f"sensor:status:{sensor_id}")
return True
except Exception as e:
logger.error(f"Error deleting sensor cache: {e}")
return False