first commit

This commit is contained in:
rafaeldpsilva
2025-09-09 13:46:42 +01:00
commit a7a18e6295
77 changed files with 8678 additions and 0 deletions

View File

@@ -0,0 +1 @@
# Empty file to make this a Python package

Binary file not shown.

View File

@@ -0,0 +1,300 @@
"""
Analytics business logic service
Business Layer - handles analytics calculations and data aggregations
"""
from datetime import datetime, timedelta
from typing import Dict, Any, List, Optional
import logging
from ..infrastructure.repositories import SensorReadingRepository
logger = logging.getLogger(__name__)
class AnalyticsService:
"""Service for analytics and reporting operations"""
def __init__(self):
self.sensor_reading_repo = SensorReadingRepository()
async def get_analytics_summary(self, hours: int = 24) -> Dict[str, Any]:
"""Get comprehensive analytics summary for the specified time period"""
try:
start_time = datetime.utcnow() - timedelta(hours=hours)
# Sensor-level analytics pipeline
sensor_pipeline = [
{"$match": {"created_at": {"$gte": start_time}}},
{"$group": {
"_id": {
"sensor_id": "$sensor_id",
"room": "$room",
"sensor_type": "$sensor_type"
},
"reading_count": {"$sum": 1},
"avg_energy": {"$avg": "$energy.value"},
"total_energy": {"$sum": "$energy.value"},
"avg_co2": {"$avg": "$co2.value"},
"max_co2": {"$max": "$co2.value"},
"avg_temperature": {"$avg": "$temperature.value"},
"latest_timestamp": {"$max": "$timestamp"}
}},
{"$sort": {"total_energy": -1}}
]
sensor_analytics = await self.sensor_reading_repo.aggregate(sensor_pipeline)
# Room-level analytics pipeline
room_pipeline = [
{"$match": {"created_at": {"$gte": start_time}, "room": {"$ne": None}}},
{"$group": {
"_id": "$room",
"sensor_count": {"$addToSet": "$sensor_id"},
"total_energy": {"$sum": "$energy.value"},
"avg_co2": {"$avg": "$co2.value"},
"max_co2": {"$max": "$co2.value"},
"reading_count": {"$sum": 1}
}},
{"$project": {
"room": "$_id",
"sensor_count": {"$size": "$sensor_count"},
"total_energy": 1,
"avg_co2": 1,
"max_co2": 1,
"reading_count": 1
}},
{"$sort": {"total_energy": -1}}
]
room_analytics = await self.sensor_reading_repo.aggregate(room_pipeline)
# Calculate summary statistics
summary_stats = self._calculate_summary_stats(sensor_analytics, room_analytics)
return {
"period_hours": hours,
"start_time": start_time.isoformat(),
"sensor_analytics": sensor_analytics,
"room_analytics": room_analytics,
"summary": summary_stats
}
except Exception as e:
logger.error(f"Error getting analytics summary: {e}")
return {
"period_hours": hours,
"start_time": None,
"sensor_analytics": [],
"room_analytics": [],
"summary": {}
}
def _calculate_summary_stats(self, sensor_analytics: List[Dict],
room_analytics: List[Dict]) -> Dict[str, Any]:
"""Calculate summary statistics from analytics data"""
total_readings = sum(item["reading_count"] for item in sensor_analytics)
total_energy = sum(item.get("total_energy", 0) or 0 for item in sensor_analytics)
# Energy consumption insights
energy_insights = {
"total_consumption_kwh": round(total_energy, 2),
"average_consumption_per_sensor": (
round(total_energy / len(sensor_analytics), 2)
if sensor_analytics else 0
),
"top_energy_consumer": (
sensor_analytics[0]["_id"]["sensor_id"]
if sensor_analytics else None
)
}
# CO2 insights
co2_values = [item.get("avg_co2") for item in sensor_analytics if item.get("avg_co2")]
co2_insights = {
"average_co2_level": (
round(sum(co2_values) / len(co2_values), 1)
if co2_values else 0
),
"sensors_with_high_co2": len([
co2 for co2 in co2_values if co2 and co2 > 1000
]),
"sensors_with_critical_co2": len([
co2 for co2 in co2_values if co2 and co2 > 5000
])
}
return {
"total_sensors_analyzed": len(sensor_analytics),
"total_rooms_analyzed": len(room_analytics),
"total_readings": total_readings,
"energy_insights": energy_insights,
"co2_insights": co2_insights
}
async def get_energy_trends(self, hours: int = 168) -> Dict[str, Any]:
"""Get energy consumption trends (default: last week)"""
try:
start_time = datetime.utcnow() - timedelta(hours=hours)
# Hourly energy consumption pipeline
pipeline = [
{"$match": {
"created_at": {"$gte": start_time},
"energy.value": {"$exists": True}
}},
{"$group": {
"_id": {
"year": {"$year": "$created_at"},
"month": {"$month": "$created_at"},
"day": {"$dayOfMonth": "$created_at"},
"hour": {"$hour": "$created_at"}
},
"total_energy": {"$sum": "$energy.value"},
"sensor_count": {"$addToSet": "$sensor_id"},
"reading_count": {"$sum": 1}
}},
{"$project": {
"_id": 0,
"timestamp": {
"$dateFromParts": {
"year": "$_id.year",
"month": "$_id.month",
"day": "$_id.day",
"hour": "$_id.hour"
}
},
"total_energy": {"$round": ["$total_energy", 2]},
"sensor_count": {"$size": "$sensor_count"},
"reading_count": 1
}},
{"$sort": {"timestamp": 1}}
]
trends = await self.sensor_reading_repo.aggregate(pipeline)
# Calculate trend insights
insights = self._calculate_trend_insights(trends)
return {
"period_hours": hours,
"data_points": len(trends),
"trends": trends,
"insights": insights
}
except Exception as e:
logger.error(f"Error getting energy trends: {e}")
return {
"period_hours": hours,
"data_points": 0,
"trends": [],
"insights": {}
}
def _calculate_trend_insights(self, trends: List[Dict]) -> Dict[str, Any]:
"""Calculate insights from trend data"""
if not trends:
return {}
energy_values = [item["total_energy"] for item in trends]
# Peak and low consumption
max_consumption = max(energy_values)
min_consumption = min(energy_values)
avg_consumption = sum(energy_values) / len(energy_values)
# Find peak time
peak_item = max(trends, key=lambda x: x["total_energy"])
peak_time = peak_item["timestamp"]
return {
"peak_consumption_kwh": max_consumption,
"lowest_consumption_kwh": min_consumption,
"average_consumption_kwh": round(avg_consumption, 2),
"peak_time": peak_time.isoformat() if hasattr(peak_time, 'isoformat') else str(peak_time),
"consumption_variance": round(max_consumption - min_consumption, 2)
}
async def get_room_comparison(self, hours: int = 24) -> Dict[str, Any]:
"""Get room-by-room comparison analytics"""
try:
start_time = datetime.utcnow() - timedelta(hours=hours)
pipeline = [
{"$match": {
"created_at": {"$gte": start_time},
"room": {"$ne": None}
}},
{"$group": {
"_id": "$room",
"total_energy": {"$sum": "$energy.value"},
"avg_energy": {"$avg": "$energy.value"},
"avg_co2": {"$avg": "$co2.value"},
"max_co2": {"$max": "$co2.value"},
"avg_temperature": {"$avg": "$temperature.value"},
"sensor_count": {"$addToSet": "$sensor_id"},
"reading_count": {"$sum": 1}
}},
{"$project": {
"room": "$_id",
"_id": 0,
"total_energy": {"$round": [{"$ifNull": ["$total_energy", 0]}, 2]},
"avg_energy": {"$round": [{"$ifNull": ["$avg_energy", 0]}, 2]},
"avg_co2": {"$round": [{"$ifNull": ["$avg_co2", 0]}, 1]},
"max_co2": {"$round": [{"$ifNull": ["$max_co2", 0]}, 1]},
"avg_temperature": {"$round": [{"$ifNull": ["$avg_temperature", 0]}, 1]},
"sensor_count": {"$size": "$sensor_count"},
"reading_count": 1
}},
{"$sort": {"total_energy": -1}}
]
room_comparison = await self.sensor_reading_repo.aggregate(pipeline)
# Calculate comparison insights
insights = self._calculate_room_insights(room_comparison)
return {
"period_hours": hours,
"rooms_analyzed": len(room_comparison),
"comparison": room_comparison,
"insights": insights
}
except Exception as e:
logger.error(f"Error getting room comparison: {e}")
return {
"period_hours": hours,
"rooms_analyzed": 0,
"comparison": [],
"insights": {}
}
def _calculate_room_insights(self, room_data: List[Dict]) -> Dict[str, Any]:
"""Calculate insights from room comparison data"""
if not room_data:
return {}
# Energy insights
total_energy = sum(room["total_energy"] for room in room_data)
highest_consumer = room_data[0] if room_data else None
lowest_consumer = min(room_data, key=lambda x: x["total_energy"]) if room_data else None
# CO2 insights
rooms_with_high_co2 = [
room for room in room_data
if room.get("avg_co2", 0) > 1000
]
# Temperature insights
temp_values = [room.get("avg_temperature", 0) for room in room_data if room.get("avg_temperature")]
avg_building_temp = sum(temp_values) / len(temp_values) if temp_values else 0
return {
"total_building_energy_kwh": round(total_energy, 2),
"highest_energy_consumer": highest_consumer["room"] if highest_consumer else None,
"lowest_energy_consumer": lowest_consumer["room"] if lowest_consumer else None,
"rooms_with_high_co2": len(rooms_with_high_co2),
"high_co2_rooms": [room["room"] for room in rooms_with_high_co2],
"average_building_temperature": round(avg_building_temp, 1),
"total_active_sensors": sum(room["sensor_count"] for room in room_data)
}

View File

@@ -0,0 +1,234 @@
"""
Data cleanup and maintenance service
Business Layer - handles data retention policies and system maintenance
"""
import asyncio
from datetime import datetime, timedelta
from typing import Dict, Any
import logging
from ..infrastructure.database_connection import database_connection
from ..infrastructure.repositories import SensorReadingRepository
logger = logging.getLogger(__name__)
class CleanupService:
"""Service for data cleanup and maintenance operations"""
def __init__(self):
self.sensor_reading_repo = SensorReadingRepository()
self.is_running = False
self.cleanup_task = None
async def start_scheduled_cleanup(self, interval_hours: int = 24) -> None:
"""Start scheduled cleanup process"""
if self.is_running:
logger.warning("Cleanup service is already running")
return
self.is_running = True
self.cleanup_task = asyncio.create_task(self._cleanup_loop(interval_hours))
logger.info(f"Started scheduled cleanup service (interval: {interval_hours} hours)")
async def stop_scheduled_cleanup(self) -> None:
"""Stop scheduled cleanup process"""
self.is_running = False
if self.cleanup_task:
self.cleanup_task.cancel()
try:
await self.cleanup_task
except asyncio.CancelledError:
pass
logger.info("Cleanup service stopped")
async def _cleanup_loop(self, interval_hours: int) -> None:
"""Main cleanup loop"""
while self.is_running:
try:
await self.cleanup_old_data()
# Wait for next cleanup interval
await asyncio.sleep(interval_hours * 3600) # Convert hours to seconds
except Exception as e:
logger.error(f"Error in scheduled cleanup: {e}")
# Wait 1 hour before retrying on error
await asyncio.sleep(3600)
async def cleanup_old_data(self) -> Dict[str, int]:
"""Perform data cleanup based on retention policies"""
try:
cleanup_results = {}
db = await database_connection.get_database()
# Delete sensor readings older than 90 days
sensor_retention_date = datetime.utcnow() - timedelta(days=90)
sensor_result = await db.sensor_readings.delete_many({
"created_at": {"$lt": sensor_retention_date}
})
cleanup_results["sensor_readings_deleted"] = sensor_result.deleted_count
if sensor_result.deleted_count > 0:
logger.info(f"Deleted {sensor_result.deleted_count} old sensor readings")
# Delete room metrics older than 30 days
room_retention_date = datetime.utcnow() - timedelta(days=30)
room_result = await db.room_metrics.delete_many({
"created_at": {"$lt": room_retention_date}
})
cleanup_results["room_metrics_deleted"] = room_result.deleted_count
if room_result.deleted_count > 0:
logger.info(f"Deleted {room_result.deleted_count} old room metrics")
# Delete system events older than 60 days
events_retention_date = datetime.utcnow() - timedelta(days=60)
events_result = await db.system_events.delete_many({
"created_at": {"$lt": events_retention_date}
})
cleanup_results["system_events_deleted"] = events_result.deleted_count
if events_result.deleted_count > 0:
logger.info(f"Deleted {events_result.deleted_count} old system events")
# Clean up orphaned sensor metadata (sensors with no recent readings)
orphaned_retention_date = datetime.utcnow() - timedelta(days=30)
# Find sensors with no recent readings
active_sensors = await db.sensor_readings.distinct("sensor_id", {
"created_at": {"$gte": orphaned_retention_date}
})
orphaned_result = await db.sensor_metadata.delete_many({
"sensor_id": {"$nin": active_sensors},
"last_seen": {"$lt": orphaned_retention_date}
})
cleanup_results["orphaned_metadata_deleted"] = orphaned_result.deleted_count
if orphaned_result.deleted_count > 0:
logger.info(f"Deleted {orphaned_result.deleted_count} orphaned sensor metadata records")
return cleanup_results
except Exception as e:
logger.error(f"Error during data cleanup: {e}")
return {"error": str(e)}
async def get_storage_statistics(self) -> Dict[str, Any]:
"""Get storage statistics for different collections"""
try:
db = await database_connection.get_database()
stats = {}
# Sensor readings statistics
sensor_stats = await db.command("collStats", "sensor_readings")
stats["sensor_readings"] = {
"count": sensor_stats.get("count", 0),
"size_bytes": sensor_stats.get("size", 0),
"avg_obj_size": sensor_stats.get("avgObjSize", 0),
"storage_size": sensor_stats.get("storageSize", 0)
}
# Room metrics statistics
room_stats = await db.command("collStats", "room_metrics")
stats["room_metrics"] = {
"count": room_stats.get("count", 0),
"size_bytes": room_stats.get("size", 0),
"avg_obj_size": room_stats.get("avgObjSize", 0),
"storage_size": room_stats.get("storageSize", 0)
}
# System events statistics
events_stats = await db.command("collStats", "system_events")
stats["system_events"] = {
"count": events_stats.get("count", 0),
"size_bytes": events_stats.get("size", 0),
"avg_obj_size": events_stats.get("avgObjSize", 0),
"storage_size": events_stats.get("storageSize", 0)
}
# Sensor metadata statistics
metadata_stats = await db.command("collStats", "sensor_metadata")
stats["sensor_metadata"] = {
"count": metadata_stats.get("count", 0),
"size_bytes": metadata_stats.get("size", 0),
"avg_obj_size": metadata_stats.get("avgObjSize", 0),
"storage_size": metadata_stats.get("storageSize", 0)
}
# Calculate totals
total_documents = sum(collection["count"] for collection in stats.values())
total_size = sum(collection["size_bytes"] for collection in stats.values())
total_storage = sum(collection["storage_size"] for collection in stats.values())
stats["totals"] = {
"total_documents": total_documents,
"total_size_bytes": total_size,
"total_storage_bytes": total_storage,
"total_size_mb": round(total_size / (1024 * 1024), 2),
"total_storage_mb": round(total_storage / (1024 * 1024), 2)
}
return stats
except Exception as e:
logger.error(f"Error getting storage statistics: {e}")
return {"error": str(e)}
async def get_data_retention_info(self) -> Dict[str, Any]:
"""Get information about data retention policies and old data"""
try:
db = await database_connection.get_database()
# Current date references
now = datetime.utcnow()
sensor_cutoff = now - timedelta(days=90)
room_cutoff = now - timedelta(days=30)
events_cutoff = now - timedelta(days=60)
retention_info = {}
# Sensor readings retention info
old_sensor_count = await db.sensor_readings.count_documents({
"created_at": {"$lt": sensor_cutoff}
})
retention_info["sensor_readings"] = {
"retention_days": 90,
"cutoff_date": sensor_cutoff.isoformat(),
"old_records_count": old_sensor_count
}
# Room metrics retention info
old_room_count = await db.room_metrics.count_documents({
"created_at": {"$lt": room_cutoff}
})
retention_info["room_metrics"] = {
"retention_days": 30,
"cutoff_date": room_cutoff.isoformat(),
"old_records_count": old_room_count
}
# System events retention info
old_events_count = await db.system_events.count_documents({
"created_at": {"$lt": events_cutoff}
})
retention_info["system_events"] = {
"retention_days": 60,
"cutoff_date": events_cutoff.isoformat(),
"old_records_count": old_events_count
}
return retention_info
except Exception as e:
logger.error(f"Error getting retention info: {e}")
return {"error": str(e)}
def is_cleanup_running(self) -> bool:
"""Check if cleanup service is currently running"""
return self.is_running and (
self.cleanup_task is not None and
not self.cleanup_task.done()
)
# Global cleanup service instance
cleanup_service = CleanupService()

View File

@@ -0,0 +1,262 @@
"""
Room metrics business logic service
Business Layer - handles room-related aggregations and business operations
"""
from datetime import datetime, timedelta
from typing import Dict, Any, List, Optional
import logging
from models import RoomMetrics, CO2Status, OccupancyLevel
from ..infrastructure.repositories import (
SensorReadingRepository, RoomMetricsRepository, RedisRepository
)
logger = logging.getLogger(__name__)
class RoomService:
"""Service for room-related business operations"""
def __init__(self):
self.sensor_reading_repo = SensorReadingRepository()
self.room_metrics_repo = RoomMetricsRepository()
self.redis_repo = RedisRepository()
async def update_room_metrics(self, room: str) -> bool:
"""Calculate and store room-level metrics"""
if not room:
return False
try:
# Get recent readings for this room (last 5 minutes)
recent_readings = await self.sensor_reading_repo.get_recent_by_room(
room=room,
minutes=5
)
if not recent_readings:
return False
# Calculate aggregated metrics
metrics = await self._calculate_room_metrics(room, recent_readings)
# Store in MongoDB
stored = await self.room_metrics_repo.create(metrics)
# Cache in Redis
if stored:
await self.redis_repo.set_room_metrics(room, metrics.dict())
logger.debug(f"Updated room metrics for {room}")
return stored
except Exception as e:
logger.error(f"Error updating room metrics for {room}: {e}")
return False
async def _calculate_room_metrics(self, room: str, readings: List[Dict]) -> RoomMetrics:
"""Calculate aggregated metrics for a room based on recent readings"""
# Group readings by sensor
sensors_data = {}
for reading in readings:
sensor_id = reading["sensor_id"]
if sensor_id not in sensors_data:
sensors_data[sensor_id] = []
sensors_data[sensor_id].append(reading)
# Initialize value arrays
energy_values = []
co2_values = []
temperature_values = []
humidity_values = []
motion_detected = False
# Extract values from readings
for sensor_readings in sensors_data.values():
for reading in sensor_readings:
if reading.get("energy"):
energy_values.append(reading["energy"]["value"])
if reading.get("co2"):
co2_values.append(reading["co2"]["value"])
if reading.get("temperature"):
temperature_values.append(reading["temperature"]["value"])
if reading.get("humidity"):
humidity_values.append(reading["humidity"]["value"])
if reading.get("motion") and reading["motion"].get("value") == "Detected":
motion_detected = True
# Get sensor types present
sensor_types = list(set(
reading.get("sensor_type")
for reading in readings
if reading.get("sensor_type")
))
# Initialize metrics object
metrics = RoomMetrics(
room=room,
timestamp=int(datetime.utcnow().timestamp()),
sensor_count=len(sensors_data),
active_sensors=list(sensors_data.keys()),
sensor_types=sensor_types,
motion_detected=motion_detected
)
# Calculate energy metrics
if energy_values:
metrics.energy = self._calculate_energy_metrics(energy_values)
# Calculate CO2 metrics and occupancy
if co2_values:
metrics.co2 = self._calculate_co2_metrics(co2_values)
metrics.occupancy_estimate = self._estimate_occupancy_from_co2(
metrics.co2["average"]
)
# Calculate temperature metrics
if temperature_values:
metrics.temperature = self._calculate_temperature_metrics(temperature_values)
# Calculate humidity metrics
if humidity_values:
metrics.humidity = self._calculate_humidity_metrics(humidity_values)
# Set last activity time if motion detected
if motion_detected:
metrics.last_activity = datetime.utcnow()
return metrics
def _calculate_energy_metrics(self, values: List[float]) -> Dict[str, Any]:
"""Calculate energy consumption metrics"""
return {
"current": sum(values),
"average": sum(values) / len(values),
"total": sum(values),
"peak": max(values),
"unit": "kWh"
}
def _calculate_co2_metrics(self, values: List[float]) -> Dict[str, Any]:
"""Calculate CO2 level metrics"""
avg_co2 = sum(values) / len(values)
return {
"current": avg_co2,
"average": avg_co2,
"max": max(values),
"min": min(values),
"status": self._get_co2_status(avg_co2).value,
"unit": "ppm"
}
def _calculate_temperature_metrics(self, values: List[float]) -> Dict[str, Any]:
"""Calculate temperature metrics"""
avg_temp = sum(values) / len(values)
return {
"current": avg_temp,
"average": avg_temp,
"max": max(values),
"min": min(values),
"unit": "°C"
}
def _calculate_humidity_metrics(self, values: List[float]) -> Dict[str, Any]:
"""Calculate humidity metrics"""
avg_humidity = sum(values) / len(values)
return {
"current": avg_humidity,
"average": avg_humidity,
"max": max(values),
"min": min(values),
"unit": "%"
}
def _get_co2_status(self, co2_level: float) -> CO2Status:
"""Determine CO2 status based on level"""
if co2_level < 400:
return CO2Status.GOOD
elif co2_level < 1000:
return CO2Status.MODERATE
elif co2_level < 5000:
return CO2Status.POOR
else:
return CO2Status.CRITICAL
def _estimate_occupancy_from_co2(self, co2_level: float) -> OccupancyLevel:
"""Estimate occupancy level based on CO2 levels"""
if co2_level < 600:
return OccupancyLevel.LOW
elif co2_level < 1200:
return OccupancyLevel.MEDIUM
else:
return OccupancyLevel.HIGH
async def get_all_rooms(self) -> Dict[str, Any]:
"""Get list of all rooms with sensor counts and latest metrics"""
try:
rooms = await self.sensor_reading_repo.get_distinct_rooms()
room_data = []
for room in rooms:
# Get sensor count for each room
sensor_ids = await self.sensor_reading_repo.get_distinct_sensor_ids_by_room(room)
sensor_count = len(sensor_ids)
# Get latest room metrics from cache
room_metrics = await self.redis_repo.get_room_metrics(room)
room_data.append({
"room": room,
"sensor_count": sensor_count,
"sensor_ids": sensor_ids,
"latest_metrics": room_metrics
})
return {
"rooms": room_data,
"count": len(room_data)
}
except Exception as e:
logger.error(f"Error getting rooms: {e}")
return {"rooms": [], "count": 0}
async def get_room_data(self, room_name: str, start_time: Optional[int] = None,
end_time: Optional[int] = None, limit: int = 100) -> Dict[str, Any]:
"""Get historical data for a specific room"""
try:
# Build query for time range
query = {"room": room_name}
if start_time or end_time:
time_query = {}
if start_time:
time_query["$gte"] = datetime.fromtimestamp(start_time)
if end_time:
time_query["$lte"] = datetime.fromtimestamp(end_time)
query["created_at"] = time_query
# Get room metrics
room_metrics = await self.room_metrics_repo.get_by_room(room_name, limit)
# Get sensor readings for the room
sensor_readings = await self.sensor_reading_repo.get_by_query(
query=query,
sort_by="timestamp",
sort_order="desc",
limit=limit
)
return {
"room": room_name,
"room_metrics": room_metrics,
"sensor_readings": sensor_readings
}
except Exception as e:
logger.error(f"Error getting room data for {room_name}: {e}")
return {
"room": room_name,
"room_metrics": [],
"sensor_readings": []
}

View File

@@ -0,0 +1,328 @@
"""
Sensor business logic service
Business Layer - handles sensor-related business operations and rules
"""
import json
from datetime import datetime, timedelta
from typing import Dict, Any, List, Optional
import logging
import uuid
from models import (
SensorReading, LegacySensorReading, SensorMetadata,
SensorType, SensorStatus, CO2Status, OccupancyLevel
)
from ..infrastructure.repositories import (
SensorReadingRepository, SensorMetadataRepository,
SystemEventRepository, RedisRepository
)
logger = logging.getLogger(__name__)
class SensorService:
"""Service for sensor-related business operations"""
def __init__(self):
self.sensor_reading_repo = SensorReadingRepository()
self.sensor_metadata_repo = SensorMetadataRepository()
self.system_event_repo = SystemEventRepository()
self.redis_repo = RedisRepository()
async def process_sensor_message(self, message_data: str) -> bool:
"""Process incoming sensor message and handle business logic"""
try:
# Parse the message
data = json.loads(message_data)
logger.debug(f"Processing sensor message: {data}")
# Convert to standard format
sensor_reading = await self._parse_sensor_data(data)
# Validate business rules
validation_result = await self._validate_sensor_reading(sensor_reading)
if not validation_result["valid"]:
logger.warning(f"Sensor reading validation failed: {validation_result['errors']}")
return False
# Store the reading
stored = await self.sensor_reading_repo.create(sensor_reading)
if not stored:
return False
# Update caches and metadata
await self._update_caches(sensor_reading)
await self._update_sensor_metadata(sensor_reading)
# Check for alerts
await self._check_sensor_alerts(sensor_reading)
return True
except Exception as e:
logger.error(f"Error processing sensor message: {e}")
await self._log_processing_error(str(e), message_data)
return False
async def _parse_sensor_data(self, data: dict) -> SensorReading:
"""Parse and convert sensor data to standard format"""
# Check if legacy format
if self._is_legacy_format(data):
return await self._convert_legacy_data(data)
else:
return SensorReading(**data)
def _is_legacy_format(self, data: dict) -> bool:
"""Check if data is in legacy format"""
legacy_keys = {"sensorId", "timestamp", "value", "unit"}
return legacy_keys.issubset(data.keys()) and "energy" not in data
async def _convert_legacy_data(self, data: dict) -> SensorReading:
"""Convert legacy format to new sensor reading format"""
legacy_reading = LegacySensorReading(**data)
return SensorReading(
sensor_id=legacy_reading.sensor_id,
sensor_type=SensorType.ENERGY,
timestamp=legacy_reading.timestamp,
created_at=legacy_reading.created_at,
energy={
"value": legacy_reading.value,
"unit": legacy_reading.unit
}
)
async def _validate_sensor_reading(self, reading: SensorReading) -> Dict[str, Any]:
"""Validate sensor reading against business rules"""
errors = []
# Check timestamp is not too far in the future
future_threshold = datetime.utcnow().timestamp() + 3600 # 1 hour
if reading.timestamp > future_threshold:
errors.append("Timestamp is too far in the future")
# Check timestamp is not too old
past_threshold = datetime.utcnow().timestamp() - 86400 # 24 hours
if reading.timestamp < past_threshold:
errors.append("Timestamp is too old")
# Validate sensor values
if reading.energy:
energy_value = reading.energy.get("value", 0)
if energy_value < 0 or energy_value > 1000: # Reasonable energy range
errors.append("Energy value is out of acceptable range")
if reading.co2:
co2_value = reading.co2.get("value", 0)
if co2_value < 0 or co2_value > 50000: # Reasonable CO2 range
errors.append("CO2 value is out of acceptable range")
if reading.temperature:
temp_value = reading.temperature.get("value", 0)
if temp_value < -50 or temp_value > 100: # Reasonable temperature range
errors.append("Temperature value is out of acceptable range")
return {
"valid": len(errors) == 0,
"errors": errors
}
async def _update_caches(self, reading: SensorReading) -> None:
"""Update Redis caches with latest sensor data"""
# Cache latest sensor reading
await self.redis_repo.set_sensor_data(
reading.sensor_id,
reading.dict(),
expire_seconds=3600
)
# Update sensor status
status_data = {
"status": "online",
"last_seen": reading.timestamp,
"room": reading.room
}
await self.redis_repo.set_sensor_status(
reading.sensor_id,
status_data,
expire_seconds=1800
)
async def _update_sensor_metadata(self, reading: SensorReading) -> None:
"""Update or create sensor metadata"""
existing = await self.sensor_metadata_repo.get_by_sensor_id(reading.sensor_id)
if existing:
# Update existing metadata
updates = {
"last_seen": datetime.utcnow(),
"status": SensorStatus.ONLINE.value
}
# Add sensor type to monitoring capabilities if not present
capabilities = existing.get("monitoring_capabilities", [])
if reading.sensor_type.value not in capabilities:
capabilities.append(reading.sensor_type.value)
updates["monitoring_capabilities"] = capabilities
await self.sensor_metadata_repo.update(reading.sensor_id, updates)
else:
# Create new sensor metadata
metadata = SensorMetadata(
sensor_id=reading.sensor_id,
name=f"Sensor {reading.sensor_id}",
sensor_type=reading.sensor_type,
room=reading.room,
status=SensorStatus.ONLINE,
last_seen=datetime.utcnow(),
monitoring_capabilities=[reading.sensor_type.value]
)
await self.sensor_metadata_repo.create(metadata)
logger.info(f"Created metadata for new sensor: {reading.sensor_id}")
async def _check_sensor_alerts(self, reading: SensorReading) -> None:
"""Check for alert conditions in sensor data"""
alerts = []
# CO2 level alerts
if reading.co2:
co2_level = reading.co2.get("value", 0)
if co2_level > 5000:
alerts.append({
"event_type": "co2_critical",
"severity": "critical",
"title": "Critical CO2 Level",
"description": f"CO2 level ({co2_level} ppm) exceeds critical threshold in {reading.room or 'unknown room'}"
})
elif co2_level > 1000:
alerts.append({
"event_type": "co2_high",
"severity": "warning",
"title": "High CO2 Level",
"description": f"CO2 level ({co2_level} ppm) is above recommended levels in {reading.room or 'unknown room'}"
})
# Energy consumption alerts
if reading.energy:
energy_value = reading.energy.get("value", 0)
if energy_value > 10:
alerts.append({
"event_type": "energy_high",
"severity": "warning",
"title": "High Energy Consumption",
"description": f"Energy consumption ({energy_value} kWh) is unusually high for sensor {reading.sensor_id}"
})
# Temperature alerts
if reading.temperature:
temp_value = reading.temperature.get("value", 0)
if temp_value > 30 or temp_value < 15:
alerts.append({
"event_type": "temperature_extreme",
"severity": "warning",
"title": "Extreme Temperature",
"description": f"Temperature ({temp_value}°C) is outside normal range in {reading.room or 'unknown room'}"
})
# Log alerts as system events
for alert in alerts:
await self._log_alert_event(reading, **alert)
async def _log_alert_event(self, reading: SensorReading, event_type: str, severity: str,
title: str, description: str) -> None:
"""Log an alert as a system event"""
from models import SystemEvent
event = SystemEvent(
event_id=str(uuid.uuid4()),
event_type=event_type,
severity=severity,
timestamp=int(datetime.utcnow().timestamp()),
title=title,
description=description,
sensor_id=reading.sensor_id,
room=reading.room,
source="sensor_service",
data=reading.dict()
)
await self.system_event_repo.create(event)
async def _log_processing_error(self, error_message: str, raw_data: str) -> None:
"""Log data processing error"""
from models import SystemEvent
event = SystemEvent(
event_id=str(uuid.uuid4()),
event_type="data_processing_error",
severity="error",
timestamp=int(datetime.utcnow().timestamp()),
title="Sensor Data Processing Failed",
description=f"Failed to process sensor message: {error_message}",
source="sensor_service",
data={"raw_message": raw_data}
)
await self.system_event_repo.create(event)
async def get_sensor_details(self, sensor_id: str) -> Optional[Dict[str, Any]]:
"""Get complete sensor details including metadata and recent readings"""
# Get metadata
metadata = await self.sensor_metadata_repo.get_by_sensor_id(sensor_id)
if not metadata:
return None
# Get recent readings
recent_readings = await self.sensor_reading_repo.get_recent_by_sensor(
sensor_id=sensor_id,
limit=100,
minutes=1440 # 24 hours
)
# Get latest reading from cache
latest_reading = await self.redis_repo.get_sensor_data(sensor_id)
return {
"sensor": metadata,
"latest_reading": latest_reading,
"recent_readings_count": len(recent_readings),
"recent_readings": recent_readings[:10] # Return only 10 most recent
}
async def update_sensor_metadata(self, sensor_id: str, metadata_updates: Dict[str, Any]) -> bool:
"""Update sensor metadata with business validation"""
# Validate updates
if "sensor_id" in metadata_updates:
del metadata_updates["sensor_id"] # Cannot change sensor ID
# Update timestamp
metadata_updates["updated_at"] = datetime.utcnow()
return await self.sensor_metadata_repo.update(sensor_id, metadata_updates)
async def delete_sensor(self, sensor_id: str) -> Dict[str, Any]:
"""Delete a sensor and all its associated data"""
# Delete readings
readings_deleted = await self.sensor_reading_repo.delete_by_sensor_id(sensor_id)
# Delete metadata
metadata_deleted = await self.sensor_metadata_repo.delete(sensor_id)
# Clear cache
await self.redis_repo.delete_sensor_cache(sensor_id)
return {
"sensor_id": sensor_id,
"readings_deleted": readings_deleted,
"metadata_deleted": metadata_deleted
}
async def get_all_sensors(self, filters: Dict[str, Any] = None) -> Dict[str, Any]:
"""Get all sensors with optional filtering"""
sensors = await self.sensor_metadata_repo.get_all(filters)
return {
"sensors": sensors,
"count": len(sensors),
"filters": filters or {}
}