220 lines
7.8 KiB
Python
220 lines
7.8 KiB
Python
import os
|
|
from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorDatabase
|
|
from pymongo import IndexModel, ASCENDING, DESCENDING
|
|
from typing import Optional
|
|
import asyncio
|
|
from datetime import datetime, timedelta
|
|
import logging
|
|
|
|
# Configure logging
|
|
logging.basicConfig(level=logging.INFO)
|
|
logger = logging.getLogger(__name__)
|
|
|
|
class MongoDB:
|
|
client: Optional[AsyncIOMotorClient] = None
|
|
database: Optional[AsyncIOMotorDatabase] = None
|
|
|
|
# Global MongoDB instance
|
|
mongodb = MongoDB()
|
|
|
|
async def connect_to_mongo():
|
|
"""Create database connection"""
|
|
try:
|
|
# MongoDB connection string - default to localhost for development
|
|
mongodb_url = os.getenv("MONGODB_URL", "mongodb://localhost:27017")
|
|
database_name = os.getenv("DATABASE_NAME", "energy_monitoring")
|
|
|
|
logger.info(f"Connecting to MongoDB at: {mongodb_url}")
|
|
|
|
# Create async MongoDB client
|
|
mongodb.client = AsyncIOMotorClient(mongodb_url)
|
|
|
|
# Test the connection
|
|
await mongodb.client.admin.command('ping')
|
|
logger.info("Successfully connected to MongoDB")
|
|
|
|
# Get database
|
|
mongodb.database = mongodb.client[database_name]
|
|
|
|
# Create indexes for better performance
|
|
await create_indexes()
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error connecting to MongoDB: {e}")
|
|
raise
|
|
|
|
async def close_mongo_connection():
|
|
"""Close database connection"""
|
|
if mongodb.client:
|
|
mongodb.client.close()
|
|
logger.info("Disconnected from MongoDB")
|
|
|
|
async def create_indexes():
|
|
"""Create database indexes for optimal performance"""
|
|
try:
|
|
# Sensor readings collection indexes
|
|
sensor_readings_indexes = [
|
|
IndexModel([("sensor_id", ASCENDING), ("timestamp", DESCENDING)]),
|
|
IndexModel([("timestamp", DESCENDING)]),
|
|
IndexModel([("room", ASCENDING), ("timestamp", DESCENDING)]),
|
|
IndexModel([("sensor_type", ASCENDING), ("timestamp", DESCENDING)]),
|
|
IndexModel([("created_at", DESCENDING)]),
|
|
]
|
|
await mongodb.database.sensor_readings.create_indexes(sensor_readings_indexes)
|
|
|
|
# Room metrics collection indexes
|
|
room_metrics_indexes = [
|
|
IndexModel([("room", ASCENDING), ("timestamp", DESCENDING)]),
|
|
IndexModel([("timestamp", DESCENDING)]),
|
|
IndexModel([("created_at", DESCENDING)]),
|
|
]
|
|
await mongodb.database.room_metrics.create_indexes(room_metrics_indexes)
|
|
|
|
# Sensor metadata collection indexes
|
|
sensor_metadata_indexes = [
|
|
IndexModel([("sensor_id", ASCENDING)], unique=True),
|
|
IndexModel([("room", ASCENDING)]),
|
|
IndexModel([("sensor_type", ASCENDING)]),
|
|
IndexModel([("status", ASCENDING)]),
|
|
]
|
|
await mongodb.database.sensor_metadata.create_indexes(sensor_metadata_indexes)
|
|
|
|
# System events collection indexes
|
|
system_events_indexes = [
|
|
IndexModel([("timestamp", DESCENDING)]),
|
|
IndexModel([("event_type", ASCENDING), ("timestamp", DESCENDING)]),
|
|
IndexModel([("severity", ASCENDING), ("timestamp", DESCENDING)]),
|
|
]
|
|
await mongodb.database.system_events.create_indexes(system_events_indexes)
|
|
|
|
logger.info("Database indexes created successfully")
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error creating indexes: {e}")
|
|
|
|
async def get_database() -> AsyncIOMotorDatabase:
|
|
"""Get database instance"""
|
|
if not mongodb.database:
|
|
await connect_to_mongo()
|
|
return mongodb.database
|
|
|
|
class RedisManager:
|
|
"""Redis connection and operations manager"""
|
|
|
|
def __init__(self):
|
|
self.redis_client = None
|
|
self.redis_host = os.getenv("REDIS_HOST", "localhost")
|
|
self.redis_port = int(os.getenv("REDIS_PORT", "6379"))
|
|
self.redis_db = int(os.getenv("REDIS_DB", "0"))
|
|
|
|
async def connect(self):
|
|
"""Connect to Redis"""
|
|
try:
|
|
import redis.asyncio as redis
|
|
self.redis_client = redis.Redis(
|
|
host=self.redis_host,
|
|
port=self.redis_port,
|
|
db=self.redis_db,
|
|
decode_responses=True
|
|
)
|
|
await self.redis_client.ping()
|
|
logger.info("Successfully connected to Redis")
|
|
except Exception as e:
|
|
logger.error(f"Error connecting to Redis: {e}")
|
|
raise
|
|
|
|
async def disconnect(self):
|
|
"""Disconnect from Redis"""
|
|
if self.redis_client:
|
|
await self.redis_client.close()
|
|
logger.info("Disconnected from Redis")
|
|
|
|
async def set_sensor_data(self, sensor_id: str, data: dict, expire_time: int = 3600):
|
|
"""Store latest sensor data in Redis with expiration"""
|
|
if not self.redis_client:
|
|
await self.connect()
|
|
|
|
key = f"sensor:latest:{sensor_id}"
|
|
await self.redis_client.setex(key, expire_time, str(data))
|
|
|
|
async def get_sensor_data(self, sensor_id: str) -> Optional[dict]:
|
|
"""Get latest sensor data from Redis"""
|
|
if not self.redis_client:
|
|
await self.connect()
|
|
|
|
key = f"sensor:latest:{sensor_id}"
|
|
data = await self.redis_client.get(key)
|
|
if data:
|
|
import json
|
|
return json.loads(data)
|
|
return None
|
|
|
|
async def set_room_metrics(self, room: str, metrics: dict, expire_time: int = 1800):
|
|
"""Store room aggregated metrics in Redis"""
|
|
if not self.redis_client:
|
|
await self.connect()
|
|
|
|
key = f"room:metrics:{room}"
|
|
await self.redis_client.setex(key, expire_time, str(metrics))
|
|
|
|
async def get_room_metrics(self, room: str) -> Optional[dict]:
|
|
"""Get room aggregated metrics from Redis"""
|
|
if not self.redis_client:
|
|
await self.connect()
|
|
|
|
key = f"room:metrics:{room}"
|
|
data = await self.redis_client.get(key)
|
|
if data:
|
|
import json
|
|
return json.loads(data)
|
|
return None
|
|
|
|
async def get_all_active_sensors(self) -> list:
|
|
"""Get list of all sensors with recent data in Redis"""
|
|
if not self.redis_client:
|
|
await self.connect()
|
|
|
|
keys = await self.redis_client.keys("sensor:latest:*")
|
|
return [key.replace("sensor:latest:", "") for key in keys]
|
|
|
|
# Global Redis manager instance
|
|
redis_manager = RedisManager()
|
|
|
|
async def cleanup_old_data():
|
|
"""Cleanup old data from MongoDB (retention policy)"""
|
|
try:
|
|
db = await get_database()
|
|
|
|
# Delete sensor readings older than 90 days
|
|
retention_date = datetime.utcnow() - timedelta(days=90)
|
|
result = await db.sensor_readings.delete_many({
|
|
"created_at": {"$lt": retention_date}
|
|
})
|
|
|
|
if result.deleted_count > 0:
|
|
logger.info(f"Deleted {result.deleted_count} old sensor readings")
|
|
|
|
# Delete room metrics older than 30 days
|
|
retention_date = datetime.utcnow() - timedelta(days=30)
|
|
result = await db.room_metrics.delete_many({
|
|
"created_at": {"$lt": retention_date}
|
|
})
|
|
|
|
if result.deleted_count > 0:
|
|
logger.info(f"Deleted {result.deleted_count} old room metrics")
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error cleaning up old data: {e}")
|
|
|
|
# Scheduled cleanup task
|
|
async def schedule_cleanup():
|
|
"""Schedule periodic cleanup of old data"""
|
|
while True:
|
|
try:
|
|
await cleanup_old_data()
|
|
# Wait 24 hours before next cleanup
|
|
await asyncio.sleep(24 * 60 * 60)
|
|
except Exception as e:
|
|
logger.error(f"Error in scheduled cleanup: {e}")
|
|
# Wait 1 hour before retrying
|
|
await asyncio.sleep(60 * 60) |