Compare commits

...

6 Commits

Author SHA1 Message Date
rafaeldpsilva
4bedcecf5d feat: Implement HTTP Poller for IoT device data ingestion
- Added iots-right.json configuration file to define IoT devices and their sensors.
- Developed HttpPoller class to handle polling of IoT devices via HTTP.
- Created IoT configuration loader to validate and load device configurations from JSON.
- Introduced models for device status, polling metrics, and data sources.
- Implemented API routes for health checks, device status retrieval, and configuration management.
- Enhanced error handling and logging throughout the data ingestion process.
2025-12-22 16:35:22 +00:00
rafaeldpsilva
ccf5f5a5c3 modular monolythic 2025-12-20 00:57:59 +00:00
rafaeldpsilva
4779eb9ded modular monolythic 2025-12-20 00:51:04 +00:00
rafaeldpsilva
6ed61b06e8 update 2025-12-20 00:36:48 +00:00
rafaeldpsilva
ed0b4c750c update 2025-12-20 00:33:03 +00:00
rafaeldpsilva
a84d3632da update 2025-12-20 00:31:34 +00:00
34 changed files with 8177 additions and 116 deletions

2
.gitignore vendored
View File

@@ -173,4 +173,6 @@ poetry.toml
# LSP config files
pyrightconfig.json
CLAUDE.md
# End of https://www.toptal.com/developers/gitignore/api/python

View File

@@ -1,33 +1,6 @@
version: "3.8"
services:
# Database Services
mongodb:
image: mongo:5.0
container_name: mongodb
restart: unless-stopped
environment:
MONGO_INITDB_ROOT_USERNAME: admin
MONGO_INITDB_ROOT_PASSWORD: password123
ports:
- "27017:27017"
volumes:
- mongodb_data:/data/db
- ./init-mongo:/docker-entrypoint-initdb.d
networks:
- energy-network
redis:
image: redis:7-alpine
container_name: redis
restart: unless-stopped
ports:
- "6379:6379"
volumes:
- redis_data:/data
networks:
- energy-network
# API Gateway
api-gateway:
build:
@@ -48,33 +21,10 @@ services:
- FORECASTING_SERVICE_URL=http://forecasting-service:8005
- IOT_CONTROL_SERVICE_URL=http://iot-control-service:8006
- DATA_INGESTION_SERVICE_URL=http://data-ingestion-service:8008
depends_on:
- mongodb
- redis
- token-service
- sensor-service
- data-ingestion-service
- demand-response-service
# - battery-service
networks:
- energy-network
# Token Management Service
token-service:
build:
context: ./token-service
dockerfile: Dockerfile
container_name: token-service
restart: unless-stopped
ports:
- "8001:8001"
environment:
- MONGO_URL=mongodb://admin:password123@mongodb:27017/energy_dashboard_tokens?authSource=admin
- JWT_SECRET_KEY=your-super-secret-jwt-key-change-in-production
depends_on:
- mongodb
networks:
- energy-network
# Battery Management Service
# battery-service:
@@ -88,9 +38,6 @@ services:
# environment:
# - MONGO_URL=mongodb://admin:password123@mongodb:27017/energy_dashboard_batteries?authSource=admin
# - REDIS_URL=redis://redis:6379
# depends_on:
# - mongodb
# - redis
# networks:
# - energy-network
@@ -107,10 +54,6 @@ services:
- MONGO_URL=mongodb://admin:password123@mongodb:27017/energy_dashboard_demand_response?authSource=admin
- REDIS_URL=redis://redis:6379
- SENSOR_SERVICE_URL=http://sensor-service:8007
depends_on:
- mongodb
- redis
- sensor-service
networks:
- energy-network
@@ -126,9 +69,6 @@ services:
# environment:
# - MONGO_URL=mongodb://admin:password123@mongodb:27017/energy_dashboard_p2p?authSource=admin
# - REDIS_URL=redis://redis:6379
# depends_on:
# - mongodb
# - redis
# networks:
# - energy-network
@@ -144,9 +84,6 @@ services:
# environment:
# - MONGO_URL=mongodb://admin:password123@mongodb:27017/energy_dashboard_forecasting?authSource=admin
# - REDIS_URL=redis://redis:6379
# depends_on:
# - mongodb
# - redis
# networks:
# - energy-network
@@ -164,9 +101,6 @@ services:
# - REDIS_URL=redis://redis:6379
# - BATTERY_SERVICE_URL=http://battery-service:8002
# - DEMAND_RESPONSE_SERVICE_URL=http://demand-response-service:8003
# depends_on:
# - mongodb
# - redis
# networks:
# - energy-network
@@ -186,8 +120,6 @@ services:
- FTP_SA4CPS_REMOTE_PATH=/SLGs/
- FTP_CHECK_INTERVAL=21600
- FTP_SKIP_INITIAL_SCAN=true
depends_on:
- mongodb
networks:
- energy-network
@@ -204,25 +136,6 @@ services:
- MONGO_URL=mongodb://admin:password123@mongodb:27017/energy_dashboard_sensors?authSource=admin
- REDIS_URL=redis://redis:6379
# - TOKEN_SERVICE_URL=http://token-service:8001
depends_on:
- mongodb
- redis
networks:
- energy-network
# Monitoring and Management
nginx:
image: nginx:alpine
container_name: nginx
restart: unless-stopped
ports:
- "80:80"
- "443:443"
volumes:
- ./nginx/nginx.conf:/etc/nginx/nginx.conf
- ./nginx/ssl:/etc/nginx/ssl
depends_on:
- api-gateway
networks:
- energy-network

View File

@@ -1,29 +0,0 @@
events {
worker_connections 1024;
}
http {
upstream api_gateway {
server api-gateway:8000;
}
server {
listen 80;
location / {
proxy_pass http://api_gateway;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
location /ws {
proxy_pass http://api_gateway;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host $host;
}
}
}

14
monolith/.env.example Normal file
View File

@@ -0,0 +1,14 @@
# MongoDB Configuration
# Update with your MongoDB connection string
MONGO_URL=mongodb://admin:password123@localhost:27017/?authSource=admin
# HTTP Poller Configuration
# IoT device polling settings
HTTP_POLL_INTERVAL=60
HTTP_TIMEOUT=10
HTTP_MAX_CONCURRENT=5
# Application Settings
DEBUG=false
HOST=0.0.0.0
PORT=8000

28
monolith/Dockerfile Normal file
View File

@@ -0,0 +1,28 @@
FROM python:3.11-slim
WORKDIR /app
# Install system dependencies
RUN apt-get update && apt-get install -y \
gcc \
&& rm -rf /var/lib/apt/lists/*
# Copy requirements and install dependencies
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy application code
COPY src/ ./src/
# Set Python path
ENV PYTHONPATH=/app
# Expose port
EXPOSE 8000
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
CMD python -c "import requests; requests.get('http://localhost:8000/health')"
# Run the application
CMD ["uvicorn", "src.main:app", "--host", "0.0.0.0", "--port", "8000"]

480
monolith/MIGRATION.md Normal file
View File

@@ -0,0 +1,480 @@
# Migration Guide: Microservices to Modular Monolith
This guide explains the transformation from the microservices architecture to the modular monolithic architecture.
## Overview of Changes
### Architecture Transformation
**Before (Microservices)**:
- Multiple independent services (8+ services)
- HTTP-based inter-service communication
- Redis pub/sub for events
- API Gateway for routing
- Service discovery and health checking
- Separate Docker containers per service
**After (Modular Monolith)**:
- Single application with modular structure
- Direct function calls via dependency injection
- In-process event bus
- Integrated routing in main application
- Single Docker container
- Separate databases per module (preserved isolation)
## Key Architectural Differences
### 1. Service Communication
#### Microservices Approach
```python
# HTTP call to another service
async with aiohttp.ClientSession() as session:
url = f"{SENSOR_SERVICE_URL}/sensors/{sensor_id}"
async with session.get(url) as response:
data = await response.json()
```
#### Modular Monolith Approach
```python
# Direct function call with dependency injection
from modules.sensors import SensorService
from core.dependencies import get_sensors_db
sensor_service = SensorService(db=await get_sensors_db(), redis=None)
data = await sensor_service.get_sensor_details(sensor_id)
```
### 2. Event Communication
#### Microservices Approach (Redis Pub/Sub)
```python
# Publishing
await redis.publish("energy_data", json.dumps(data))
# Subscribing
pubsub = redis.pubsub()
await pubsub.subscribe("energy_data")
message = await pubsub.get_message()
```
#### Modular Monolith Approach (Event Bus)
```python
# Publishing
from core.events import event_bus, EventTopics
await event_bus.publish(EventTopics.ENERGY_DATA, data)
# Subscribing
def handle_energy_data(data):
# Process data
pass
event_bus.subscribe(EventTopics.ENERGY_DATA, handle_energy_data)
```
### 3. Database Access
#### Microservices Approach
```python
# Each service has its own database connection
from database import get_database
db = await get_database() # Returns service-specific database
```
#### Modular Monolith Approach
```python
# Centralized database manager with module-specific databases
from core.database import db_manager
sensors_db = db_manager.sensors_db
demand_response_db = db_manager.demand_response_db
```
### 4. Application Structure
#### Microservices Structure
```
microservices/
├── api-gateway/
│ └── main.py (port 8000)
├── sensor-service/
│ └── main.py (port 8007)
├── demand-response-service/
│ └── main.py (port 8003)
├── data-ingestion-service/
│ └── main.py (port 8008)
└── docker-compose.yml (8+ containers)
```
#### Modular Monolith Structure
```
monolith/
├── src/
│ ├── main.py (single entry point)
│ ├── core/ (shared infrastructure)
│ └── modules/
│ ├── sensors/
│ ├── demand_response/
│ └── data_ingestion/
└── docker-compose.yml (1 container)
```
## Migration Steps
### Phase 1: Preparation
1. **Backup existing data**:
```bash
# Backup all MongoDB databases
mongodump --uri="mongodb://admin:password123@localhost:27017" --out=/backup/microservices
```
2. **Document current API endpoints**:
- List all endpoints from each microservice
- Document inter-service communication patterns
- Identify Redis pub/sub channels in use
3. **Review environment variables**:
- Consolidate environment variables
- Update connection strings for external MongoDB and Redis
### Phase 2: Deploy Modular Monolith
1. **Configure environment**:
```bash
cd /path/to/monolith
cp .env.example .env
# Edit .env with MongoDB and Redis connection strings
```
2. **Build and deploy**:
```bash
docker-compose up --build -d
```
3. **Verify health**:
```bash
curl http://localhost:8000/health
curl http://localhost:8000/api/v1/overview
```
### Phase 3: Data Migration (if needed)
The modular monolith uses the **same database structure** as the microservices, so typically no data migration is needed. However, verify:
1. **Database names match**:
- `energy_dashboard_sensors`
- `energy_dashboard_demand_response`
- `digitalmente_ingestion`
2. **Collections are accessible**:
```bash
# Connect to MongoDB
mongosh mongodb://admin:password123@mongodb-host:27017/?authSource=admin
# Check databases
show dbs
# Verify collections in each database
use energy_dashboard_sensors
show collections
```
### Phase 4: API Client Migration
Update API clients to point to the new monolith endpoint:
**Before**:
- Sensor API: `http://api-gateway:8000/api/v1/sensors/*`
- DR API: `http://api-gateway:8000/api/v1/demand-response/*`
**After**:
- All APIs: `http://monolith:8000/api/v1/*`
The API paths remain the same, only the host changes!
### Phase 5: Decommission Microservices
Once the monolith is stable:
1. **Stop microservices**:
```bash
cd /path/to/microservices
docker-compose down
```
2. **Keep backups** for at least 30 days
3. **Archive microservices code** for reference
## Benefits of the Migration
### Operational Simplification
| Aspect | Microservices | Modular Monolith | Improvement |
|--------|---------------|------------------|-------------|
| **Containers** | 8+ containers | 1 container | 87% reduction |
| **Network calls** | HTTP between services | In-process calls | ~100x faster |
| **Deployment complexity** | Coordinate 8+ services | Single deployment | Much simpler |
| **Monitoring** | 8+ health endpoints | 1 health endpoint | Easier |
| **Log aggregation** | 8+ log sources | 1 log source | Simpler |
### Performance Improvements
1. **Reduced latency**:
- Inter-service HTTP calls: ~10-50ms
- Direct function calls: ~0.01-0.1ms
- **Improvement**: 100-1000x faster
2. **Reduced network overhead**:
- No HTTP serialization/deserialization
- No network round-trips
- No service discovery delays
3. **Shared resources**:
- Single database connection pool
- Shared Redis connection (if enabled)
- Shared in-memory caches
### Development Benefits
1. **Easier debugging**:
- Single process to debug
- Direct stack traces across modules
- No distributed tracing needed
2. **Simpler testing**:
- Test entire flow in one process
- No need to mock HTTP calls
- Integration tests run faster
3. **Faster development**:
- Single application to run locally
- Immediate code changes (with reload)
- No service orchestration needed
## Preserved Benefits from Microservices
### Module Isolation
Each module maintains clear boundaries:
- Separate directory structure
- Own models and business logic
- Dedicated database (data isolation)
- Clear public interfaces
### Independent Scaling (Future)
If needed, modules can be extracted back into microservices:
- Clean module boundaries make extraction easy
- Database per module already separated
- Event bus can switch to Redis pub/sub
- Direct calls can switch to HTTP calls
### Team Organization
Teams can still own modules:
- Sensors team owns `modules/sensors/`
- DR team owns `modules/demand_response/`
- Clear ownership and responsibilities
## Rollback Strategy
If you need to rollback to microservices:
1. **Keep microservices code** in the repository
2. **Database unchanged**: Both architectures use the same databases
3. **Redeploy microservices**:
```bash
cd /path/to/microservices
docker-compose up -d
```
4. **Update API clients** to point back to API Gateway
## Monitoring and Observability
### Health Checks
**Single health endpoint**:
```bash
curl http://localhost:8000/health
```
Returns:
```json
{
"service": "Energy Dashboard Monolith",
"status": "healthy",
"components": {
"database": "healthy",
"redis": "healthy",
"event_bus": "healthy"
},
"modules": {
"sensors": "loaded",
"demand_response": "loaded",
"data_ingestion": "loaded"
}
}
```
### Logging
All logs in one place:
```bash
# Docker logs
docker-compose logs -f monolith
# Application logs
docker-compose logs -f monolith | grep "ERROR"
```
### Metrics
System overview endpoint:
```bash
curl http://localhost:8000/api/v1/overview
```
## Common Migration Issues
### Issue: Module Import Errors
**Problem**: `ModuleNotFoundError: No module named 'src.modules'`
**Solution**:
```bash
# Set PYTHONPATH
export PYTHONPATH=/app
# Or in docker-compose.yml
environment:
- PYTHONPATH=/app
```
### Issue: Database Connection Errors
**Problem**: Cannot connect to MongoDB
**Solution**:
1. Verify MongoDB is accessible:
```bash
docker-compose exec monolith ping mongodb-host
```
2. Check connection string in `.env`
3. Ensure network connectivity
### Issue: Redis Connection Errors
**Problem**: Redis connection failed but app should work
**Solution**:
Redis is optional. Set in `.env`:
```
REDIS_ENABLED=false
```
### Issue: Event Subscribers Not Receiving Events
**Problem**: Events published but subscribers not called
**Solution**:
Ensure subscribers are registered before events are published:
```python
# Register subscriber in lifespan startup
@asynccontextmanager
async def lifespan(app: FastAPI):
# Subscribe before publishing
event_bus.subscribe(EventTopics.ENERGY_DATA, handle_energy)
yield
```
## Testing the Migration
### 1. Functional Testing
Test each module's endpoints:
```bash
# Sensors
curl http://localhost:8000/api/v1/sensors/get
curl http://localhost:8000/api/v1/rooms
# Analytics
curl http://localhost:8000/api/v1/analytics/summary
# Health
curl http://localhost:8000/health
```
### 2. Load Testing
Compare performance:
```bash
# Microservices
ab -n 1000 -c 10 http://localhost:8000/api/v1/sensors/get
# Modular Monolith
ab -n 1000 -c 10 http://localhost:8000/api/v1/sensors/get
```
Expected: Modular monolith should be significantly faster.
### 3. WebSocket Testing
Test real-time features:
```javascript
const ws = new WebSocket('ws://localhost:8000/api/v1/ws');
ws.onmessage = (event) => console.log('Received:', event.data);
```
## FAQ
### Q: Do I need to migrate the database?
**A**: No, the modular monolith uses the same database structure as the microservices.
### Q: Can I scale individual modules?
**A**: Not independently. The entire monolith scales together. If you need independent scaling, consider keeping the microservices architecture or using horizontal scaling with load balancers.
### Q: What happens to Redis pub/sub?
**A**: Replaced with an in-process event bus. Redis can still be used for caching if `REDIS_ENABLED=true`.
### Q: Are the API endpoints the same?
**A**: Yes, the API paths remain identical. Only the host changes.
### Q: Can I extract modules back to microservices later?
**A**: Yes, the modular structure makes it easy to extract modules back into separate services if needed.
### Q: How do I add a new module?
**A**: See the "Adding a New Module" section in README.md.
### Q: Is this suitable for production?
**A**: Yes, modular monoliths are production-ready and often more reliable than microservices for small-to-medium scale applications.
## Next Steps
1. **Deploy to staging** and run full test suite
2. **Monitor performance** and compare with microservices
3. **Gradual rollout** to production (canary or blue-green deployment)
4. **Decommission microservices** after 30 days of stable operation
5. **Update documentation** and team training
## Support
For issues or questions about the migration:
1. Check this guide and README.md
2. Review application logs: `docker-compose logs monolith`
3. Test health endpoint: `curl http://localhost:8000/health`
4. Contact the development team

453
monolith/README.md Normal file
View File

@@ -0,0 +1,453 @@
# Energy Dashboard - Modular Monolith
This is the modular monolithic architecture version of the Energy Dashboard, refactored from the original microservices architecture.
## Architecture Overview
The application is structured as a **modular monolith**, combining the benefits of:
- **Monolithic deployment**: Single application, simpler operations
- **Modular design**: Clear module boundaries, maintainability
### Key Architectural Decisions
1. **Single Application**: All modules run in one process
2. **Module Isolation**: Each module has its own directory and clear interfaces
3. **Separate Databases**: Each module maintains its own database for data isolation
4. **In-Process Event Bus**: Replaces Redis pub/sub for inter-module communication
5. **Direct Dependency Injection**: Modules communicate directly via function calls
6. **Shared Core**: Common infrastructure (database, events, config) shared across modules
## Project Structure
```
monolith/
├── src/
│ ├── main.py # Main FastAPI application
│ ├── core/ # Shared core infrastructure
│ │ ├── config.py # Centralized configuration
│ │ ├── database.py # Database connection manager
│ │ ├── events.py # In-process event bus
│ │ ├── redis.py # Optional Redis cache
│ │ ├── dependencies.py # FastAPI dependencies
│ │ └── logging_config.py # Logging setup
│ ├── modules/ # Business modules
│ │ ├── sensors/ # Sensor management module
│ │ │ ├── __init__.py
│ │ │ ├── router.py # API routes
│ │ │ ├── models.py # Data models
│ │ │ ├── sensor_service.py # Business logic
│ │ │ ├── room_service.py
│ │ │ ├── analytics_service.py
│ │ │ └── websocket_manager.py
│ │ ├── demand_response/ # Demand response module
│ │ │ ├── __init__.py
│ │ │ ├── models.py
│ │ │ └── demand_response_service.py
│ │ └── data_ingestion/ # Data ingestion module
│ │ ├── __init__.py
│ │ ├── config.py
│ │ ├── ftp_monitor.py
│ │ ├── slg_processor.py
│ │ └── database.py
│ └── api/ # API layer (if needed)
├── config/ # Configuration files
├── tests/ # Test files
├── requirements.txt # Python dependencies
├── Dockerfile # Docker build file
├── docker-compose.yml # Docker Compose configuration
└── README.md # This file
```
## Modules
### 1. Sensors Module (`src/modules/sensors`)
**Responsibility**: Sensor management, room management, real-time data, and analytics
**Key Features**:
- Sensor CRUD operations
- Room management
- Real-time data ingestion
- Analytics and reporting
- WebSocket support for live data streaming
**Database**: `energy_dashboard_sensors`
**API Endpoints**: `/api/v1/sensors/*`, `/api/v1/rooms/*`, `/api/v1/analytics/*`
### 2. Demand Response Module (`src/modules/demand_response`)
**Responsibility**: Grid interaction, demand response events, and load management
**Key Features**:
- Demand response event management
- Device flexibility calculation
- Auto-response configuration
- Load reduction requests
**Database**: `energy_dashboard_demand_response`
**API Endpoints**: `/api/v1/demand-response/*`
### 3. Data Ingestion Module (`src/modules/data_ingestion`)
**Responsibility**: FTP monitoring and SA4CPS data processing
**Key Features**:
- FTP file monitoring
- .sgl_v2 file processing
- Dynamic collection management
- Duplicate detection
**Database**: `digitalmente_ingestion`
**API Endpoints**: `/api/v1/ingestion/*`
## Core Components
### Event Bus (`src/core/events.py`)
Replaces Redis pub/sub with an in-process event bus for inter-module communication.
**Standard Event Topics**:
- `energy_data`: Energy consumption updates
- `dr_events`: Demand response events
- `sensor_events`: Sensor-related events
- `system_events`: System-level events
- `data_ingestion`: Data ingestion events
**Usage Example**:
```python
from core.events import event_bus, EventTopics
# Publish event
await event_bus.publish(EventTopics.ENERGY_DATA, {"sensor_id": "sensor_1", "value": 3.5})
# Subscribe to events
def handle_energy_data(data):
print(f"Received energy data: {data}")
event_bus.subscribe(EventTopics.ENERGY_DATA, handle_energy_data)
```
### Database Manager (`src/core/database.py`)
Centralized database connection management with separate databases per module.
**Available Databases**:
- `main_db`: Main application database
- `sensors_db`: Sensors module database
- `demand_response_db`: Demand response module database
- `data_ingestion_db`: Data ingestion module database
**Usage Example**:
```python
from core.dependencies import get_sensors_db
from fastapi import Depends
async def my_endpoint(db=Depends(get_sensors_db)):
result = await db.sensors.find_one({"sensor_id": "sensor_1"})
```
### Configuration (`src/core/config.py`)
Centralized configuration using Pydantic Settings.
**Configuration Sources**:
1. Environment variables
2. `.env` file (if present)
3. Default values
## Getting Started
### Prerequisites
- Python 3.11+
- MongoDB 7.0+ (deployed separately)
- Redis 7+ (optional, for caching - deployed separately)
- Docker and Docker Compose (for containerized deployment)
### Local Development
1. **Install dependencies**:
```bash
cd monolith
pip install -r requirements.txt
```
2. **Configure environment**:
```bash
cp .env.example .env
# Edit .env with your MongoDB and Redis connection strings
```
3. **Ensure MongoDB and Redis are accessible**:
- MongoDB should be running and accessible at the URL specified in `MONGO_URL`
- Redis (optional) should be accessible at the URL specified in `REDIS_URL`
4. **Run the application**:
```bash
cd src
uvicorn main:app --reload --host 0.0.0.0 --port 8000
```
5. **Access the application**:
- API: http://localhost:8000
- Health Check: http://localhost:8000/health
- API Docs: http://localhost:8000/docs
### Docker Deployment
**Note**: MongoDB and Redis are deployed separately and must be accessible before starting the application.
1. **Configure environment variables**:
```bash
cp .env.example .env
# Edit .env with your MongoDB and Redis connection strings
```
2. **Build and start the application**:
```bash
cd monolith
docker-compose up --build -d
```
3. **View logs**:
```bash
docker-compose logs -f monolith
```
4. **Stop the application**:
```bash
docker-compose down
```
## API Endpoints
### Global Endpoints
- `GET /`: Root endpoint
- `GET /health`: Global health check
- `GET /api/v1/overview`: System overview
### Sensors Module
- `GET /api/v1/sensors/get`: Get sensors with filters
- `GET /api/v1/sensors/{sensor_id}`: Get sensor details
- `GET /api/v1/sensors/{sensor_id}/data`: Get sensor data
- `POST /api/v1/sensors`: Create sensor
- `PUT /api/v1/sensors/{sensor_id}`: Update sensor
- `DELETE /api/v1/sensors/{sensor_id}`: Delete sensor
- `GET /api/v1/rooms`: Get all rooms
- `GET /api/v1/rooms/names`: Get room names
- `POST /api/v1/rooms`: Create room
- `GET /api/v1/rooms/{room_name}`: Get room details
- `PUT /api/v1/rooms/{room_name}`: Update room
- `DELETE /api/v1/rooms/{room_name}`: Delete room
- `GET /api/v1/analytics/summary`: Analytics summary
- `GET /api/v1/analytics/energy`: Energy analytics
- `POST /api/v1/data/query`: Advanced data query
- `WS /api/v1/ws`: WebSocket for real-time data
### Demand Response Module
- Endpoints for demand response events, invitations, and device management
- (To be fully documented when router is added)
### Data Ingestion Module
- Endpoints for FTP monitoring status and manual triggers
- (To be fully documented when router is added)
## Inter-Module Communication
Modules communicate in two ways:
### 1. Direct Dependency Injection
For synchronous operations, modules directly import and call each other's services:
```python
from modules.sensors import SensorService
from core.dependencies import get_sensors_db
sensor_service = SensorService(db=await get_sensors_db(), redis=None)
sensors = await sensor_service.get_sensors()
```
### 2. Event-Driven Communication
For asynchronous operations, modules use the event bus:
```python
from core.events import event_bus, EventTopics
# Publisher
await event_bus.publish(EventTopics.ENERGY_DATA, {
"sensor_id": "sensor_1",
"value": 3.5,
"timestamp": 1234567890
})
# Subscriber
async def handle_energy_update(data):
print(f"Energy update: {data}")
event_bus.subscribe(EventTopics.ENERGY_DATA, handle_energy_update)
```
## Background Tasks
The application runs several background tasks:
1. **Room Metrics Aggregation** (every 5 minutes)
- Aggregates sensor data into room-level metrics
2. **Data Cleanup** (daily)
- Removes sensor data older than 90 days
3. **Event Scheduler** (every 60 seconds)
- Checks and executes scheduled demand response events
4. **Auto Response** (every 30 seconds)
- Processes automatic demand response opportunities
5. **FTP Monitoring** (every 6 hours, configurable)
- Monitors FTP server for new SA4CPS data files
## Configuration Options
Key environment variables:
### Database
- `MONGO_URL`: MongoDB connection string
- `REDIS_URL`: Redis connection string
- `REDIS_ENABLED`: Enable/disable Redis (true/false)
### Application
- `DEBUG`: Enable debug mode (true/false)
- `HOST`: Application host (default: 0.0.0.0)
- `PORT`: Application port (default: 8000)
### FTP
- `FTP_SA4CPS_HOST`: FTP server host
- `FTP_SA4CPS_PORT`: FTP server port
- `FTP_SA4CPS_USERNAME`: FTP username
- `FTP_SA4CPS_PASSWORD`: FTP password
- `FTP_SA4CPS_REMOTE_PATH`: Remote directory path
- `FTP_CHECK_INTERVAL`: Check interval in seconds
- `FTP_SKIP_INITIAL_SCAN`: Skip initial FTP scan (true/false)
## Migration from Microservices
See [MIGRATION.md](MIGRATION.md) for detailed migration guide.
## Development Guidelines
### Adding a New Module
1. Create module directory: `src/modules/new_module/`
2. Add module files:
- `__init__.py`: Module exports
- `models.py`: Pydantic models
- `service.py`: Business logic
- `router.py`: API routes
3. Register module in main application:
```python
from modules.new_module.router import router as new_module_router
app.include_router(new_module_router, prefix="/api/v1/new-module", tags=["new-module"])
```
### Adding an Event Topic
1. Add topic to `EventTopics` class in `src/core/events.py`:
```python
class EventTopics:
NEW_TOPIC = "new_topic"
```
2. Use in your module:
```python
from core.events import event_bus, EventTopics
await event_bus.publish(EventTopics.NEW_TOPIC, data)
```
## Testing
```bash
# Run all tests
pytest
# Run with coverage
pytest --cov=src --cov-report=html
# Run specific module tests
pytest tests/modules/sensors/
```
## Monitoring and Logging
- **Logs**: Application logs to stdout
- **Log Level**: Controlled by `DEBUG` environment variable
- **Health Checks**: Available at `/health` endpoint
- **Metrics**: System overview at `/api/v1/overview`
## Performance Considerations
- **Database Indexing**: Ensure proper indexes on frequently queried fields
- **Redis Caching**: Enable Redis for improved performance (optional)
- **Connection Pooling**: Motor (MongoDB) and Redis clients handle connection pooling
- **Async Operations**: All I/O operations are asynchronous
- **Background Tasks**: Long-running operations don't block request handling
## Security
- **CORS**: Configured in main application
- **Environment Variables**: Use `.env` file, never commit secrets
- **Database Authentication**: MongoDB requires authentication
- **Input Validation**: Pydantic models validate all inputs
- **Error Handling**: Sensitive information not exposed in error messages
## Troubleshooting
### Database Connection Issues
```bash
# Test MongoDB connection (update with your connection string)
mongosh mongodb://admin:password123@mongodb-host:27017/?authSource=admin
# Check if MongoDB is accessible from the container
docker-compose exec monolith ping mongodb-host
```
### Redis Connection Issues
```bash
# Test Redis connection (update with your connection string)
redis-cli -h redis-host ping
# Check if Redis is accessible from the container
docker-compose exec monolith ping redis-host
```
### Application Won't Start
```bash
# Check logs
docker-compose logs monolith
# Verify environment variables
docker-compose exec monolith env | grep MONGO
```
## License
[Your License Here]
## Contributing
[Your Contributing Guidelines Here]

View File

@@ -0,0 +1,41 @@
version: "3.8"
services:
# Modular Monolith Application
monolith:
build:
context: .
dockerfile: Dockerfile
container_name: energy-dashboard-monolith
restart: unless-stopped
ports:
- "8000:8000"
environment:
# MongoDB Configuration (external deployment)
- MONGO_URL=${MONGO_URL}
# Redis Configuration (external deployment, optional)
- REDIS_URL=${REDIS_URL}
- REDIS_ENABLED=${REDIS_ENABLED:-false}
# FTP Configuration
- FTP_SA4CPS_HOST=${FTP_SA4CPS_HOST:-ftp.sa4cps.pt}
- FTP_SA4CPS_PORT=${FTP_SA4CPS_PORT:-21}
- FTP_SA4CPS_USERNAME=${FTP_SA4CPS_USERNAME}
- FTP_SA4CPS_PASSWORD=${FTP_SA4CPS_PASSWORD}
- FTP_SA4CPS_REMOTE_PATH=${FTP_SA4CPS_REMOTE_PATH:-/SLGs/}
- FTP_CHECK_INTERVAL=${FTP_CHECK_INTERVAL:-21600}
- FTP_SKIP_INITIAL_SCAN=${FTP_SKIP_INITIAL_SCAN:-true}
# Application Settings
- DEBUG=${DEBUG:-false}
networks:
- energy-network
volumes:
- ./src:/app/src # Mount source code for development
networks:
energy-network:
driver: bridge
name: energy-network

1561
monolith/iots-left.json Executable file

File diff suppressed because it is too large Load Diff

616
monolith/iots-right.json Executable file
View File

@@ -0,0 +1,616 @@
{
"iots": {
"battery": [
{
"name": "Battery_1",
"type": "battery",
"uri": "192.168.2.54",
"sensors": [
{
"type": "energy",
"tag": [
"battery",
"stored_energy"
],
"data": "DOUBLE"
},
{
"type": "charging_rate",
"tag": [
"battery",
"charging_rate"
],
"data": "DOUBLE"
}
]
},
{
"name": "Battery_2",
"type": "battery",
"uri": "192.168.2.55",
"sensors": [
{
"type": "energy",
"tag": [
"battery",
"stored_energy"
],
"data": "DOUBLE"
},
{
"type": "charging_rate",
"tag": [
"battery",
"charging_rate"
],
"data": "DOUBLE"
}
]
},
{
"name": "Battery_3",
"type": "battery",
"uri": "192.168.2.56",
"sensors": [
{
"type": "energy",
"tag": [
"battery",
"stored_energy"
],
"data": "DOUBLE"
},
{
"type": "charging_rate",
"tag": [
"battery",
"charging_rate"
],
"data": "DOUBLE"
}
]
}
],
"refrigerator": [
{
"name": "Fridge",
"type": "refrigerator",
"uri": "http://192.168.2.5:8520/enaplug/read/170307001",
"sensors": [
{
"type": "power",
"tag": [
"enaplug_170307001",
"act1"
],
"data": "DOUBLE"
},
{
"type": "voltage",
"tag": [
"enaplug_170307001",
"volt1"
],
"data": "DOUBLE"
},
{
"type": "current",
"tag": [
"enaplug_170307001",
"curr1"
],
"data": "DOUBLE"
},
{
"type": "doorOpen",
"tag": [
"enaplug_170307001",
"doorOpened"
],
"data": "BOOLEAN"
},
{
"type": "state",
"tag": [
"enaplug_170307001",
"state"
],
"data": "DOUBLE"
},
{
"type": "internal Temperature",
"tag": [
"enaplug_170307001",
"temp2"
],
"data": "DOUBLE"
},
{
"type": "external Temperature",
"tag": [
"enaplug_170307001",
"temp1"
],
"data": "DOUBLE"
},
{
"type": "humidity",
"tag": [
"enaplug_170307001",
"hum1"
],
"data": "DOUBLE"
}
]
}
],
"waterheater": [
{
"name": "Water Heater",
"type": "water heater",
"uri": "http://192.168.2.5:8520/enaplug/read/180717001",
"sensors": [
{
"type": "power",
"tag": [
"enaplug_180717001",
"act"
],
"data": "DOUBLE"
},
{
"type": "voltage",
"tag": [
"enaplug_180717001",
"volt"
],
"data": "DOUBLE"
},
{
"type": "current",
"tag": [
"enaplug_180717001",
"curr"
],
"data": "DOUBLE"
},
{
"type": "state",
"tag": [
"enaplug_180717001",
"state"
],
"data": "BOOLEAN"
},
{
"type": "temperature",
"tag": [
"enaplug_180717001",
"temp"
],
"data": "DOUBLE"
}
]
}
],
"microwave": [
{
"name": "Microwave",
"type": "microwave",
"uri": "http://192.168.2.5:8520/resource/AnalyzerKitHall_V2",
"sensors": [
{
"type": "power",
"tag": [
"AnalyzerKitHall_V2",
"microwave_active"
],
"data": "DOUBLE"
},
{
"type": "voltage",
"tag": [
"AnalyzerKitHall_V2",
"microwave_voltage"
],
"data": "DOUBLE"
},
{
"type": "current",
"tag": [
"AnalyzerKitHall_V2",
"microwave_current_x10"
],
"data": "DOUBLE"
}
]
}
],
"dishwasher": [
{
"name": "Dishwasher",
"type": "dishwasher",
"uri": "http://192.168.2.5:8520/resource/AnalyzerKitHall_V2",
"sensors": [
{
"type": "power",
"tag": [
"AnalyzerKitHall_V2",
"dishwasher_active"
],
"data": "DOUBLE"
},
{
"type": "voltage",
"tag": [
"AnalyzerKitHall_V2",
"dishwasher_voltage"
],
"data": "DOUBLE"
},
{
"type": "current",
"tag": [
"AnalyzerKitHall_V2",
"dishwasher_current_x10"
],
"data": "DOUBLE"
}
]
}
],
"kettle": [
{
"name": "Kettle",
"type": "kettle",
"uri": "http://192.168.2.5:8520/resource/AnalyzerKitHall_V2",
"sensors": [
{
"type": "power",
"tag": [
"AnalyzerKitHall_V2",
"kettle_active"
],
"data": "DOUBLE"
},
{
"type": "voltage",
"tag": [
"AnalyzerKitHall_V2",
"kettle_voltage"
],
"data": "DOUBLE"
},
{
"type": "current",
"tag": [
"AnalyzerKitHall_V2",
"kettle_current_x10"
],
"data": "DOUBLE"
}
]
}
],
"hvac": [
{
"name": "Air Conditioner Kitchen",
"type": "hvac",
"uri": "http://192.168.2.5:8520/resource/AnalyzerKitHall_V2",
"sensors": [
{
"type": "power",
"tag": [
"AnalyzerKitHall_V2",
"kitchen_ac_activePower"
],
"data": "DOUBLE"
},
{
"type": "voltage",
"tag": [
"AnalyzerKitHall_V2",
"kitchen_ac_voltage"
],
"data": "DOUBLE"
},
{
"type": "current",
"tag": [
"AnalyzerKitHall_V2",
"kitchen_ac_current_x10"
],
"data": "DOUBLE"
}
]
},
{
"name": "Air Conditioner Hallway",
"type": "hvac",
"uri": "http://192.168.2.5:8520/resource/AnalyzerKitHall_V2",
"sensors": [
{
"type": "power",
"tag": [
"AnalyzerKitHall_V2",
"hallway_ac_activePower"
],
"data": "DOUBLE"
},
{
"type": "voltage",
"tag": [
"AnalyzerKitHall_V2",
"hallway_ac_voltage"
],
"data": "DOUBLE"
},
{
"type": "current",
"tag": [
"AnalyzerKitHall_V2",
"hallway_ac_current_x10"
],
"data": "DOUBLE"
}
]
},
{
"name": "Air Conditioner 112_115",
"type": "hvac",
"uri": "http://192.168.2.5:8520/resource/Analyzer115_V1",
"sensors": [
{
"type": "power",
"tag": [
"Analyzer115_V1",
"P2_W"
],
"data": "DOUBLE"
},
{
"type": "voltage",
"tag": [
"Analyzer115_V1",
"U2N_Vx10"
],
"data": "DOUBLE"
},
{
"type": "current",
"tag": [
"Analyzer115_V1",
"Curr2_mA"
],
"data": "DOUBLE"
}
]
},
{
"name": "Air Conditioner 111_116",
"type": "hvac",
"uri": "http://192.168.2.5:8520/resource/Analyzer116_V1",
"sensors": [
{
"type": "power",
"tag": [
"Analyzer116_V1",
"P2_W"
],
"data": "DOUBLE"
},
{
"type": "voltage",
"tag": [
"Analyzer116_V1",
"U2N_V"
],
"data": "DOUBLE"
},
{
"type": "current",
"tag": [
"Analyzer116_V1",
"Curr2_A"
],
"data": "DOUBLE"
}
]
}
],
"sockets": [
{
"name": "Sockets 112_115",
"type": "sockets",
"uri": "http://192.168.2.5:8520/resource/Analyzer115_V1",
"sensors": [
{
"type": "power",
"tag": [
"Analyzer115_V1",
"P1_W"
],
"data": "DOUBLE"
},
{
"type": "voltage",
"tag": [
"Analyzer115_V1",
"U1N_Vx10"
],
"data": "DOUBLE"
},
{
"type": "current",
"tag": [
"Analyzer115_V1",
"Curr1_mA"
],
"data": "DOUBLE"
}
]
},
{
"name": "Sockets 111_116",
"type": "sockets",
"uri": "http://192.168.2.5:8520/resource/Analyzer116_V1",
"sensors": [
{
"type": "power",
"tag": [
"Analyzer116_V1",
"P3_W"
],
"data": "DOUBLE"
},
{
"type": "voltage",
"tag": [
"Analyzer116_V1",
"U3N_V"
],
"data": "DOUBLE"
},
{
"type": "current",
"tag": [
"Analyzer116_V1",
"Curr3_A"
],
"data": "DOUBLE"
}
]
}
],
"lamp": [
{
"name": "Lamp 1_111",
"type": "lamp",
"uri": "http://192.168.2.68:8089/desenrasca/lamp/111/1",
"sensors": [
{
"type": "power",
"tag": [
"consumption_w"
],
"data": "DOUBLE"
}
]
},
{
"name": "Lamp 1_112",
"type": "lamp",
"uri": "http://192.168.2.68:8089/desenrasca/lamp/112/1",
"sensors": [
{
"type": "power",
"tag": [
"consumption_w"
],
"data": "DOUBLE"
}
]
},
{
"name": "Lamp 2_112",
"type": "lamp",
"uri": "http://192.168.2.68:8089/desenrasca/lamp/112/2",
"sensors": [
{
"type": "power",
"tag": [
"consumption_w"
],
"data": "DOUBLE"
}
]
},
{
"name": "Lamp 3_112",
"type": "lamp",
"uri": "http://192.168.2.68:8089/desenrasca/lamp/112/3",
"sensors": [
{
"type": "power",
"tag": [
"consumption_w"
],
"data": "DOUBLE"
}
]
},
{
"name": "Lamp 1_115",
"type": "lamp",
"uri": "http://192.168.2.68:8089/desenrasca/lamp/115/1",
"sensors": [
{
"type": "power",
"tag": [
"consumption_w"
],
"data": "DOUBLE"
}
]
},
{
"name": "Lamp 2_115",
"type": "lamp",
"uri": "http://192.168.2.68:8089/desenrasca/lamp/115/2",
"sensors": [
{
"type": "power",
"tag": [
"consumption_w"
],
"data": "DOUBLE"
}
]
},
{
"name": "Lamp 3_115",
"type": "lamp",
"uri": "http://192.168.2.68:8089/desenrasca/lamp/115/3",
"sensors": [
{
"type": "power",
"tag": [
"consumption_w"
],
"data": "DOUBLE"
}
]
},
{
"name": "Lamp 1_116",
"type": "lamp",
"uri": "http://192.168.2.68:8089/desenrasca/lamp/116/1",
"sensors": [
{
"type": "power",
"tag": [
"consumption_w"
],
"data": "DOUBLE"
}
]
}
],
"generation": [
{
"name": "Generation",
"type": "generation",
"uri": "http://192.168.2.68:8089/desenrasca/generation/3750",
"sensors": [
{
"type": "generation",
"tag": [
"generation_w"
],
"data": "DOUBLE"
}
]
}
]
}
}

28
monolith/requirements.txt Normal file
View File

@@ -0,0 +1,28 @@
# FastAPI and ASGI server
fastapi==0.104.1
uvicorn[standard]==0.24.0
python-multipart==0.0.6
# Database drivers
motor==3.3.2 # Async MongoDB
redis[hiredis]==5.0.1 # Redis with hiredis for better performance
# Data validation and settings
pydantic==2.5.0
pydantic-settings==2.1.0
# Async HTTP client
aiohttp==3.9.1
# WebSockets
websockets==12.0
# Data processing
pandas==2.1.4
numpy==1.26.2
# FTP support
ftputil==5.0.4
# Utilities
python-dateutil==2.8.2

View File

@@ -0,0 +1 @@
"""Core infrastructure components for the modular monolith."""

View File

@@ -0,0 +1,43 @@
"""Centralized configuration management."""
import os
from typing import Dict, Optional
from pydantic_settings import BaseSettings
class Settings(BaseSettings):
"""Application settings."""
# Application
app_name: str = "Energy Dashboard Monolith"
app_version: str = "1.0.0"
debug: bool = False
host: str = "0.0.0.0"
port: int = 8000
# MongoDB
mongo_url: str = os.getenv("MONGO_URL", "mongodb://admin:password123@localhost:27017/?authSource=admin")
# Module-specific databases (preserving isolation)
sensors_db_name: str = "energy_dashboard_sensors"
demand_response_db_name: str = "energy_dashboard_demand_response"
data_ingestion_db_name: str = "digitalmente_ingestion"
main_db_name: str = "energy_dashboard"
# HTTP Poller Configuration (for IoT devices)
http_poll_interval: int = int(os.getenv("HTTP_POLL_INTERVAL", "60")) # 60 seconds
http_timeout: int = int(os.getenv("HTTP_TIMEOUT", "10")) # 10 seconds
http_max_concurrent: int = int(os.getenv("HTTP_MAX_CONCURRENT", "5")) # 5 concurrent requests
# CORS
cors_origins: list = ["*"]
cors_allow_credentials: bool = True
cors_allow_methods: list = ["*"]
cors_allow_headers: list = ["*"]
class Config:
env_file = ".env"
case_sensitive = False
# Global settings instance
settings = Settings()

View File

@@ -0,0 +1,85 @@
"""Database connection management for all modules."""
import logging
from typing import Optional, Dict
from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorDatabase
from .config import settings
logger = logging.getLogger(__name__)
class DatabaseManager:
"""Manages MongoDB connections for all modules."""
def __init__(self):
self._client: Optional[AsyncIOMotorClient] = None
self._databases: Dict[str, AsyncIOMotorDatabase] = {}
async def connect(self):
"""Establish connection to MongoDB."""
try:
logger.info(f"Connecting to MongoDB: {settings.mongo_url}")
self._client = AsyncIOMotorClient(settings.mongo_url)
# Test connection
await self._client.admin.command('ping')
logger.info("Successfully connected to MongoDB")
# Initialize database references
self._databases = {
"main": self._client[settings.main_db_name],
"sensors": self._client[settings.sensors_db_name],
"demand_response": self._client[settings.demand_response_db_name],
"data_ingestion": self._client[settings.data_ingestion_db_name],
}
except Exception as e:
logger.error(f"Failed to connect to MongoDB: {e}")
raise
async def disconnect(self):
"""Close MongoDB connection."""
if self._client:
self._client.close()
logger.info("Disconnected from MongoDB")
def get_database(self, name: str) -> AsyncIOMotorDatabase:
"""Get database by name."""
if name not in self._databases:
raise ValueError(f"Database '{name}' not configured")
return self._databases[name]
@property
def client(self) -> AsyncIOMotorClient:
"""Get the MongoDB client."""
if not self._client:
raise RuntimeError("Database not connected. Call connect() first.")
return self._client
@property
def main_db(self) -> AsyncIOMotorDatabase:
"""Get main database."""
return self.get_database("main")
@property
def sensors_db(self) -> AsyncIOMotorDatabase:
"""Get sensors database."""
return self.get_database("sensors")
@property
def demand_response_db(self) -> AsyncIOMotorDatabase:
"""Get demand response database."""
return self.get_database("demand_response")
@property
def data_ingestion_db(self) -> AsyncIOMotorDatabase:
"""Get data ingestion database."""
return self.get_database("data_ingestion")
# Global database manager instance
db_manager = DatabaseManager()
async def get_database(name: str = "main") -> AsyncIOMotorDatabase:
"""Dependency injection function for database access."""
return db_manager.get_database(name)

View File

@@ -0,0 +1,39 @@
"""FastAPI dependency injection utilities."""
from typing import Optional
from fastapi import Depends, HTTPException, status
from motor.motor_asyncio import AsyncIOMotorDatabase
import redis.asyncio as aioredis
from .database import db_manager
from .redis import redis_manager
from .events import event_bus, EventBus
async def get_main_db() -> AsyncIOMotorDatabase:
"""Get main database dependency."""
return db_manager.main_db
async def get_sensors_db() -> AsyncIOMotorDatabase:
"""Get sensors database dependency."""
return db_manager.sensors_db
async def get_demand_response_db() -> AsyncIOMotorDatabase:
"""Get demand response database dependency."""
return db_manager.demand_response_db
async def get_data_ingestion_db() -> AsyncIOMotorDatabase:
"""Get data ingestion database dependency."""
return db_manager.data_ingestion_db
async def get_redis() -> Optional[aioredis.Redis]:
"""Get Redis client dependency."""
return redis_manager.client
def get_event_bus() -> EventBus:
"""Get event bus dependency."""
return event_bus

137
monolith/src/core/events.py Normal file
View File

@@ -0,0 +1,137 @@
"""In-process event bus for inter-module communication."""
import asyncio
import logging
from typing import Dict, List, Callable, Any, Set
from collections import defaultdict
from dataclasses import dataclass
from datetime import datetime
import json
logger = logging.getLogger(__name__)
@dataclass
class Event:
"""Event data structure."""
topic: str
data: Any
timestamp: datetime
source: str = "system"
def to_dict(self) -> dict:
"""Convert to dictionary."""
return {
"topic": self.topic,
"data": self.data,
"timestamp": self.timestamp.isoformat(),
"source": self.source
}
class EventBus:
"""
In-process event bus for replacing Redis pub/sub.
Provides asynchronous event publishing and subscription.
"""
def __init__(self):
self._subscribers: Dict[str, List[Callable]] = defaultdict(list)
self._event_history: List[Event] = []
self._max_history: int = 1000
self._lock = asyncio.Lock()
async def publish(self, topic: str, data: Any, source: str = "system"):
"""
Publish an event to a topic.
Args:
topic: Event topic/channel name
data: Event data (will be JSON serialized if dict)
source: Event source identifier
"""
event = Event(
topic=topic,
data=data,
timestamp=datetime.utcnow(),
source=source
)
# Store in history
async with self._lock:
self._event_history.append(event)
if len(self._event_history) > self._max_history:
self._event_history.pop(0)
# Notify subscribers
if topic in self._subscribers:
logger.debug(f"Publishing event to topic '{topic}': {len(self._subscribers[topic])} subscribers")
# Create tasks for all subscribers
tasks = []
for callback in self._subscribers[topic]:
tasks.append(self._call_subscriber(callback, event))
# Execute all callbacks concurrently
if tasks:
await asyncio.gather(*tasks, return_exceptions=True)
else:
logger.debug(f"No subscribers for topic '{topic}'")
async def _call_subscriber(self, callback: Callable, event: Event):
"""Call a subscriber callback with error handling."""
try:
if asyncio.iscoroutinefunction(callback):
await callback(event.data)
else:
callback(event.data)
except Exception as e:
logger.error(f"Error in event subscriber: {e}", exc_info=True)
def subscribe(self, topic: str, callback: Callable):
"""
Subscribe to events on a topic.
Args:
topic: Event topic/channel name
callback: Async or sync callback function that receives event data
"""
self._subscribers[topic].append(callback)
logger.info(f"Subscribed to topic '{topic}'. Total subscribers: {len(self._subscribers[topic])}")
def unsubscribe(self, topic: str, callback: Callable):
"""Unsubscribe from a topic."""
if topic in self._subscribers and callback in self._subscribers[topic]:
self._subscribers[topic].remove(callback)
logger.info(f"Unsubscribed from topic '{topic}'")
def get_topics(self) -> List[str]:
"""Get list of all topics with subscribers."""
return list(self._subscribers.keys())
def get_subscriber_count(self, topic: str) -> int:
"""Get number of subscribers for a topic."""
return len(self._subscribers.get(topic, []))
async def get_event_history(self, topic: str = None, limit: int = 100) -> List[Event]:
"""Get event history, optionally filtered by topic."""
async with self._lock:
if topic:
events = [e for e in self._event_history if e.topic == topic]
else:
events = self._event_history.copy()
return events[-limit:]
# Global event bus instance
event_bus = EventBus()
# Common event topics (replaces Redis channels)
class EventTopics:
"""Standard event topic names."""
ENERGY_DATA = "energy_data"
DR_EVENTS = "dr_events"
SENSOR_EVENTS = "sensor_events"
SYSTEM_EVENTS = "system_events"
DATA_INGESTION = "data_ingestion"

View File

@@ -0,0 +1,25 @@
"""Logging configuration."""
import logging
import sys
from .config import settings
def setup_logging():
"""Configure application logging."""
log_level = logging.DEBUG if settings.debug else logging.INFO
logging.basicConfig(
level=log_level,
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
handlers=[
logging.StreamHandler(sys.stdout)
]
)
# Set third-party loggers to WARNING
logging.getLogger("uvicorn").setLevel(logging.WARNING)
logging.getLogger("motor").setLevel(logging.WARNING)
logging.getLogger("redis").setLevel(logging.WARNING)
logger = logging.getLogger(__name__)
logger.info(f"Logging configured. Level: {log_level}")

View File

@@ -0,0 +1,61 @@
"""Redis connection management (optional, for caching)."""
import logging
from typing import Optional
import redis.asyncio as aioredis
from .config import settings
logger = logging.getLogger(__name__)
class RedisManager:
"""Manages Redis connection for caching."""
def __init__(self):
self._client: Optional[aioredis.Redis] = None
async def connect(self):
"""Establish connection to Redis."""
if not settings.redis_enabled:
logger.info("Redis is disabled in settings")
return
try:
logger.info(f"Connecting to Redis: {settings.redis_url}")
self._client = await aioredis.from_url(
settings.redis_url,
encoding="utf-8",
decode_responses=True
)
# Test connection
await self._client.ping()
logger.info("Successfully connected to Redis")
except Exception as e:
logger.warning(f"Failed to connect to Redis: {e}. Continuing without Redis cache.")
self._client = None
async def disconnect(self):
"""Close Redis connection."""
if self._client:
await self._client.close()
logger.info("Disconnected from Redis")
@property
def client(self) -> Optional[aioredis.Redis]:
"""Get the Redis client."""
return self._client
@property
def is_available(self) -> bool:
"""Check if Redis is available."""
return self._client is not None
# Global Redis manager instance
redis_manager = RedisManager()
async def get_redis() -> Optional[aioredis.Redis]:
"""Dependency injection function for Redis access."""
return redis_manager.client

305
monolith/src/main.py Normal file
View File

@@ -0,0 +1,305 @@
"""
Main FastAPI application for the Energy Dashboard Modular Monolith.
Integrates all modules: sensors, demand-response, and data-ingestion.
"""
import asyncio
import logging
from contextlib import asynccontextmanager
from datetime import datetime, timedelta
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
# Core imports
from core.config import settings
from core.logging_config import setup_logging
from core.database import db_manager
from core.events import event_bus, EventTopics
# Module imports
from modules.sensors.router import router as sensors_router
from modules.sensors.room_service import RoomService
from modules.sensors import WebSocketManager
# TEMPORARILY DISABLED: from modules.demand_response import DemandResponseService
# Setup logging
setup_logging()
logger = logging.getLogger(__name__)
# Background tasks
async def room_metrics_aggregation_task():
"""Periodically aggregate room-level metrics"""
logger.info("Starting room metrics aggregation task")
while True:
try:
room_service = RoomService(db_manager.sensors_db, None)
await room_service.aggregate_all_room_metrics()
await asyncio.sleep(300) # 5 minutes
except Exception as e:
logger.error(f"Error in room metrics aggregation: {e}")
await asyncio.sleep(600)
async def data_cleanup_task():
"""Periodic cleanup of old data"""
logger.info("Starting data cleanup task")
while True:
try:
from modules.sensors import SensorService
service = SensorService(db_manager.sensors_db, None)
cleanup_date = datetime.utcnow() - timedelta(days=90)
await service.cleanup_old_data(cleanup_date)
await asyncio.sleep(86400) # 24 hours
except Exception as e:
logger.error(f"Error in data cleanup task: {e}")
await asyncio.sleep(7200)
# TEMPORARILY DISABLED: Demand Response background tasks
# async def event_scheduler_task():
# """Background task for checking and executing scheduled DR events"""
# logger.info("Starting event scheduler task")
#
# while True:
# try:
# service = DemandResponseService(db_manager.demand_response_db, redis_manager.client)
# await service.check_scheduled_events()
# await asyncio.sleep(settings.event_scheduler_interval)
#
# except Exception as e:
# logger.error(f"Error in event scheduler task: {e}")
# await asyncio.sleep(120)
# async def auto_response_task():
# """Background task for automatic demand response"""
# logger.info("Starting auto-response task")
#
# while True:
# try:
# service = DemandResponseService(db_manager.demand_response_db, redis_manager.client)
# await service.process_auto_responses()
# await asyncio.sleep(settings.auto_response_interval)
#
# except Exception as e:
# logger.error(f"Error in auto-response task: {e}")
# await asyncio.sleep(90)
# async def energy_data_event_subscriber():
# """Subscribe to internal event bus for energy data events"""
# logger.info("Starting energy data event subscriber")
#
# async def handle_energy_data(data):
# """Handle energy data events"""
# try:
# service = DemandResponseService(db_manager.demand_response_db, redis_manager.client)
# sensor_id = data.get("sensorId") or data.get("sensor_id")
# power_kw = data.get("value", 0.0)
#
# if sensor_id:
# service.update_device_power_cache(sensor_id, power_kw)
#
# except Exception as e:
# logger.error(f"Error processing energy data event: {e}")
#
# # Subscribe to energy data events
# event_bus.subscribe(EventTopics.ENERGY_DATA, handle_energy_data)
async def http_polling_task(http_poller):
"""Background task for HTTP IoT device polling"""
logger.info("Starting HTTP polling task")
try:
await http_poller.run()
except Exception as e:
logger.error(f"Error in HTTP polling task: {e}")
# Application lifespan
@asynccontextmanager
async def lifespan(app: FastAPI):
"""Manage application lifespan"""
logger.info(f"Starting {settings.app_name} v{settings.app_version}...")
# Connect to databases
await db_manager.connect()
# Initialize default rooms
room_service = RoomService(db_manager.sensors_db, None)
await room_service.initialize_default_rooms()
# Initialize HTTP poller for IoT devices
from modules.data_ingestion import HttpPoller, set_http_poller
http_poller = HttpPoller(
sensors_db=db_manager.sensors_db,
poll_interval=settings.http_poll_interval,
timeout=10,
max_concurrent=5
)
set_http_poller(http_poller)
# Subscribe to internal events
# TEMPORARILY DISABLED: await energy_data_event_subscriber()
# Start background tasks
asyncio.create_task(room_metrics_aggregation_task())
asyncio.create_task(data_cleanup_task())
# TEMPORARILY DISABLED: asyncio.create_task(event_scheduler_task())
# TEMPORARILY DISABLED: asyncio.create_task(auto_response_task())
# Start HTTP polling task
asyncio.create_task(http_polling_task(http_poller))
logger.info("Application startup complete")
yield
logger.info("Shutting down application...")
# Disconnect from databases
await db_manager.disconnect()
logger.info("Application shutdown complete")
# Create FastAPI application
app = FastAPI(
title=settings.app_name,
description="Modular monolithic architecture for Energy Dashboard",
version=settings.app_version,
lifespan=lifespan
)
# Add CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=settings.cors_origins,
allow_credentials=settings.cors_allow_credentials,
allow_methods=settings.cors_allow_methods,
allow_headers=settings.cors_allow_headers,
)
# Root endpoint
@app.get("/")
async def root():
"""Root endpoint"""
return {
"service": settings.app_name,
"version": settings.app_version,
"status": "running",
"timestamp": datetime.utcnow().isoformat()
}
# Health check endpoint
@app.get("/health")
async def health_check():
"""Global health check"""
try:
# Check database connection
await db_manager.main_db.command("ping")
return {
"service": settings.app_name,
"version": settings.app_version,
"status": "healthy",
"timestamp": datetime.utcnow().isoformat(),
"components": {
"database": "healthy",
"event_bus": "healthy"
},
"modules": {
"sensors": "loaded",
"demand_response": "disabled",
"data_ingestion": "loaded"
}
}
except Exception as e:
logger.error(f"Health check failed: {e}")
raise HTTPException(status_code=503, detail="Service Unavailable")
# System overview endpoint
@app.get("/api/v1/overview")
async def system_overview():
"""Get system overview"""
try:
from modules.sensors import SensorService
# TEMPORARILY DISABLED: from modules.demand_response import DemandResponseService
sensor_service = SensorService(db_manager.sensors_db, None)
# TEMPORARILY DISABLED: dr_service = DemandResponseService(db_manager.demand_response_db, None)
# Get sensor counts
all_sensors = await sensor_service.get_sensors()
active_sensors = [s for s in all_sensors if s.get("status") == "online"]
# Get room counts
room_service = RoomService(db_manager.sensors_db, None)
all_rooms = await room_service.get_rooms()
# Get DR event counts
# TEMPORARILY DISABLED: active_events = len(dr_service.active_events) if hasattr(dr_service, 'active_events') else 0
return {
"timestamp": datetime.utcnow().isoformat(),
"sensors": {
"total": len(all_sensors),
"active": len(active_sensors),
"offline": len(all_sensors) - len(active_sensors)
},
"rooms": {
"total": len(all_rooms)
},
"demand_response": {
"status": "disabled"
},
"event_bus": {
"topics": event_bus.get_topics(),
"total_topics": len(event_bus.get_topics())
}
}
except Exception as e:
logger.error(f"Error getting system overview: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
# Include module routers with prefixes
app.include_router(
sensors_router,
prefix="/api/v1",
tags=["sensors"]
)
# Data Ingestion router
from modules.data_ingestion import router as data_ingestion_router
app.include_router(
data_ingestion_router,
prefix="/api/v1/ingestion",
tags=["data-ingestion"]
)
# Note: Demand Response router would be added here (currently disabled)
# app.include_router(demand_response_router, prefix="/api/v1/demand-response", tags=["demand-response"])
if __name__ == "__main__":
import uvicorn
uvicorn.run(
"main:app",
host=settings.host,
port=settings.port,
reload=settings.debug
)

View File

@@ -0,0 +1,31 @@
"""Data Ingestion module - handles HTTP/MQTT IoT device polling."""
from .http_poller import HttpPoller
from .iot_config import IoTConfiguration, IoTDevice, get_iot_config, get_config_loader
from .models import (
DataSourceType, PollingStatus, DeviceStatus,
DataSourceSummary, PollingMetrics
)
from .router import router, set_http_poller
__all__ = [
# HTTP Poller
"HttpPoller",
"set_http_poller",
# Configuration
"IoTConfiguration",
"IoTDevice",
"get_iot_config",
"get_config_loader",
# Models
"DataSourceType",
"PollingStatus",
"DeviceStatus",
"DataSourceSummary",
"PollingMetrics",
# Router
"router",
]

View File

@@ -0,0 +1,353 @@
"""
HTTP Poller Service
Polls IoT devices via HTTP and ingests sensor data
"""
import asyncio
import logging
import time
from datetime import datetime
from typing import Dict, Optional, Any, List
import aiohttp
from motor.motor_asyncio import AsyncIOMotorDatabase
from .iot_config import IoTConfiguration, IoTDevice, SensorConfig, get_iot_config
from .models import DeviceStatus, PollingStatus, PollingMetrics
from modules.sensors.models import SensorReading, SensorType
from core.events import event_bus, EventTopics
logger = logging.getLogger(__name__)
class HttpPoller:
"""HTTP-based IoT device poller"""
def __init__(
self,
sensors_db: AsyncIOMotorDatabase,
config: Optional[IoTConfiguration] = None,
poll_interval: int = 60,
timeout: int = 10,
max_concurrent: int = 5
):
"""
Initialize HTTP poller
Args:
sensors_db: Motor database for sensor data storage
config: IoT configuration (loads from file if None)
poll_interval: Seconds between polls
timeout: HTTP request timeout in seconds
max_concurrent: Maximum concurrent HTTP requests
"""
self.sensors_db = sensors_db
self.config = config or get_iot_config()
self.poll_interval = poll_interval
self.timeout = timeout
self.max_concurrent = max_concurrent
# Metrics tracking
self.device_status: Dict[str, DeviceStatus] = {}
self.total_polls = 0
self.successful_polls = 0
self.failed_polls = 0
self.poll_times: List[float] = []
# Control flags
self.running = False
self._semaphore = asyncio.Semaphore(max_concurrent)
# Initialize device status
self._initialize_device_status()
def _initialize_device_status(self):
"""Initialize status tracking for all devices"""
for device in self.config.get_all_devices():
self.device_status[device.name] = DeviceStatus(
device_name=device.name,
device_type=device.type,
uri=device.uri,
status=PollingStatus.INACTIVE,
sensors_count=len(device.sensors)
)
async def poll_device(self, device: IoTDevice) -> bool:
"""
Poll a single device and ingest its data
Args:
device: IoT device configuration
Returns:
True if successful, False otherwise
"""
async with self._semaphore:
status = self.device_status[device.name]
start_time = time.time()
try:
# Update status
status.last_poll = datetime.utcnow()
status.total_polls += 1
self.total_polls += 1
# Make HTTP request
async with aiohttp.ClientSession() as session:
async with session.get(
device.uri,
timeout=aiohttp.ClientTimeout(total=self.timeout)
) as response:
if response.status != 200:
raise Exception(f"HTTP {response.status}: {await response.text()}")
data = await response.json()
# Extract and store sensor readings
timestamp = int(datetime.utcnow().timestamp())
readings_stored = 0
for sensor in device.sensors:
try:
value = self._extract_value(data, sensor.tag)
if value is not None:
await self._store_reading(
device=device,
sensor=sensor,
value=value,
timestamp=timestamp
)
readings_stored += 1
except Exception as e:
logger.warning(
f"Failed to extract sensor {sensor.type} from {device.name}: {e}"
)
# Update success metrics
poll_time = (time.time() - start_time) * 1000
self.poll_times.append(poll_time)
if len(self.poll_times) > 100:
self.poll_times.pop(0)
status.successful_polls += 1
status.last_success = datetime.utcnow()
status.status = PollingStatus.ACTIVE
status.last_error = None
self.successful_polls += 1
logger.debug(
f"Polled {device.name}: {readings_stored} readings in {poll_time:.1f}ms"
)
return True
except asyncio.TimeoutError:
error_msg = f"Timeout after {self.timeout}s"
status.failed_polls += 1
status.status = PollingStatus.ERROR
status.last_error = error_msg
self.failed_polls += 1
logger.error(f"Timeout polling {device.name}")
return False
except Exception as e:
error_msg = str(e)
status.failed_polls += 1
status.status = PollingStatus.ERROR
status.last_error = error_msg
self.failed_polls += 1
logger.error(f"Error polling {device.name}: {e}")
return False
def _extract_value(self, data: Dict[str, Any], tag_path: List[str]) -> Optional[Any]:
"""
Extract value from nested JSON using tag path
Args:
data: JSON response data
tag_path: List of keys to traverse
Returns:
Extracted value or None
"""
current = data
for key in tag_path:
if isinstance(current, dict):
current = current.get(key)
if current is None:
return None
else:
return None
return current
async def _store_reading(
self,
device: IoTDevice,
sensor: SensorConfig,
value: Any,
timestamp: int
):
"""
Store sensor reading in database and publish event
Args:
device: Device configuration
sensor: Sensor configuration
value: Sensor value
timestamp: Unix timestamp
"""
# Map sensor type to SensorType enum (with fallback to ENERGY)
sensor_type_map = {
"power": SensorType.ENERGY,
"voltage": SensorType.ENERGY,
"current": SensorType.ENERGY,
"energy": SensorType.ENERGY,
"temperature": SensorType.TEMPERATURE,
"humidity": SensorType.HUMIDITY,
"co2": SensorType.CO2,
"generation": SensorType.ENERGY,
"charging_rate": SensorType.ENERGY,
"doorOpen": SensorType.OCCUPANCY,
"state": SensorType.OCCUPANCY,
}
sensor_type = sensor_type_map.get(sensor.type, SensorType.ENERGY)
# Create sensor ID from device name and sensor type
sensor_id = f"{device.name}_{sensor.type}".replace(" ", "_")
# Convert value based on data type
if sensor.data == "BOOLEAN":
numeric_value = 1.0 if value else 0.0
else:
numeric_value = float(value)
# Create sensor reading
reading_data = {
"sensor_id": sensor_id,
"sensor_type": sensor_type.value,
"timestamp": timestamp,
"room": device.type, # Use device type as room (battery, refrigerator, etc.)
"metadata": {
"device_name": device.name,
"device_type": device.type,
"sensor_type": sensor.type,
"data_type": sensor.data,
"source": "http_poller"
}
}
# Add specific sensor data fields
if sensor.type == "power":
reading_data["power"] = numeric_value
elif sensor.type == "voltage":
reading_data["voltage"] = numeric_value
elif sensor.type == "current":
reading_data["current"] = numeric_value
elif sensor.type == "temperature":
reading_data["temperature"] = numeric_value
elif sensor.type == "humidity":
reading_data["humidity"] = numeric_value
elif sensor.type == "energy":
reading_data["energy"] = numeric_value
elif sensor.type == "generation":
reading_data["generation"] = numeric_value
else:
reading_data["energy"] = numeric_value # Default field
# Store in database
await self.sensors_db.sensor_readings.insert_one(reading_data)
# Publish to event bus
await event_bus.publish(EventTopics.ENERGY_DATA, {
"sensor_id": sensor_id,
"value": numeric_value,
"timestamp": timestamp,
"device_name": device.name,
"sensor_type": sensor.type
})
async def poll_all_devices(self):
"""Poll all configured devices concurrently"""
devices = self.config.get_all_devices()
if not devices:
logger.warning("No devices configured for polling")
return
logger.info(f"Polling {len(devices)} devices...")
# Poll all devices concurrently
tasks = [self.poll_device(device) for device in devices]
results = await asyncio.gather(*tasks, return_exceptions=True)
# Count successes
successes = sum(1 for r in results if r is True)
logger.info(f"Polling complete: {successes}/{len(devices)} successful")
async def run(self):
"""Run continuous polling loop"""
self.running = True
logger.info(
f"Starting HTTP poller: {len(self.config.get_all_devices())} devices, "
f"interval={self.poll_interval}s"
)
while self.running:
try:
await self.poll_all_devices()
await asyncio.sleep(self.poll_interval)
except Exception as e:
logger.error(f"Error in polling loop: {e}")
await asyncio.sleep(10) # Brief pause on error
def stop(self):
"""Stop the polling loop"""
logger.info("Stopping HTTP poller...")
self.running = False
def get_metrics(self) -> PollingMetrics:
"""Get current polling metrics"""
active_count = sum(
1 for s in self.device_status.values()
if s.status == PollingStatus.ACTIVE
)
inactive_count = sum(
1 for s in self.device_status.values()
if s.status == PollingStatus.INACTIVE
)
error_count = sum(
1 for s in self.device_status.values()
if s.status == PollingStatus.ERROR
)
# Count devices by type
devices_by_type: Dict[str, int] = {}
for device in self.config.get_all_devices():
devices_by_type[device.type] = devices_by_type.get(device.type, 0) + 1
# Calculate success rate
success_rate = 0.0
if self.total_polls > 0:
success_rate = (self.successful_polls / self.total_polls) * 100
# Calculate average poll time
avg_poll_time = 0.0
if self.poll_times:
avg_poll_time = sum(self.poll_times) / len(self.poll_times)
return PollingMetrics(
timestamp=datetime.utcnow(),
total_devices=len(self.device_status),
active_devices=active_count,
inactive_devices=inactive_count,
error_devices=error_count,
total_polls=self.total_polls,
successful_polls=self.successful_polls,
failed_polls=self.failed_polls,
success_rate=success_rate,
average_poll_time_ms=avg_poll_time,
devices_by_type=devices_by_type
)
def get_device_statuses(self) -> List[DeviceStatus]:
"""Get status for all devices"""
return list(self.device_status.values())

View File

@@ -0,0 +1,187 @@
"""
IoT Configuration Loader
Loads and validates IoT device configuration from iots-right.json
"""
import json
import logging
from pathlib import Path
from typing import Dict, List, Optional, Any
from pydantic import BaseModel, Field, validator
logger = logging.getLogger(__name__)
class SensorConfig(BaseModel):
"""Individual sensor configuration within a device"""
type: str = Field(..., description="Sensor type (power, voltage, temperature, etc.)")
tag: List[str] = Field(..., description="JSON path tags to extract value")
data: str = Field(..., description="Data type: DOUBLE, BOOLEAN, etc.")
class IoTDevice(BaseModel):
"""IoT device configuration"""
name: str = Field(..., description="Device name")
type: str = Field(..., description="Device type (battery, refrigerator, hvac, etc.)")
uri: str = Field(..., description="HTTP endpoint URI")
sensors: List[SensorConfig] = Field(..., description="List of sensors")
@validator('uri')
def validate_uri(cls, v):
"""Ensure URI is valid"""
if not v.startswith(('http://', 'https://')):
raise ValueError(f"Invalid URI: {v}")
return v
class IoTConfiguration(BaseModel):
"""Complete IoT configuration"""
iots: Dict[str, List[IoTDevice]] = Field(..., description="IoT devices grouped by category")
def get_all_devices(self) -> List[IoTDevice]:
"""Get flat list of all devices"""
devices = []
for device_list in self.iots.values():
devices.extend(device_list)
return devices
def get_devices_by_type(self, device_type: str) -> List[IoTDevice]:
"""Get devices by category type"""
return self.iots.get(device_type, [])
def get_device_by_name(self, name: str) -> Optional[IoTDevice]:
"""Find device by name"""
for device in self.get_all_devices():
if device.name == name:
return device
return None
class IoTConfigLoader:
"""Loads and manages IoT configuration"""
def __init__(self, config_path: Optional[Path] = None):
"""
Initialize config loader
Args:
config_path: Path to iots-right.json. If None, looks in monolith root
"""
self.config_path = config_path or self._find_config_file()
self.config: Optional[IoTConfiguration] = None
def _find_config_file(self) -> Path:
"""Find iots-right.json in monolith directory"""
# Start from current file location and go up to find monolith root
current = Path(__file__).parent
while current.name != 'monolith' and current.parent != current:
current = current.parent
config_file = current / 'iots-right.json'
if not config_file.exists():
logger.warning(f"Config file not found at {config_file}")
return config_file
def load(self) -> IoTConfiguration:
"""
Load configuration from file
Returns:
IoTConfiguration object
Raises:
FileNotFoundError: If config file doesn't exist
ValueError: If config is invalid
"""
if not self.config_path.exists():
raise FileNotFoundError(f"IoT config not found: {self.config_path}")
try:
with open(self.config_path, 'r') as f:
data = json.load(f)
self.config = IoTConfiguration(**data)
device_count = len(self.config.get_all_devices())
logger.info(f"Loaded IoT config: {device_count} devices from {self.config_path}")
# Log summary by type
for device_type, devices in self.config.iots.items():
logger.info(f" - {device_type}: {len(devices)} devices")
return self.config
except json.JSONDecodeError as e:
logger.error(f"Invalid JSON in config file: {e}")
raise ValueError(f"Invalid JSON in {self.config_path}: {e}")
except Exception as e:
logger.error(f"Failed to load config: {e}")
raise
def reload(self) -> IoTConfiguration:
"""Reload configuration from file"""
return self.load()
def get_config(self) -> IoTConfiguration:
"""
Get current configuration, loading if necessary
Returns:
IoTConfiguration object
"""
if self.config is None:
self.load()
return self.config
def get_device_summary(self) -> Dict[str, Any]:
"""Get summary statistics of devices"""
config = self.get_config()
total_devices = 0
total_sensors = 0
summary = {}
for device_type, devices in config.iots.items():
device_count = len(devices)
sensor_count = sum(len(d.sensors) for d in devices)
total_devices += device_count
total_sensors += sensor_count
summary[device_type] = {
"device_count": device_count,
"sensor_count": sensor_count,
"devices": [
{
"name": d.name,
"uri": d.uri,
"sensor_count": len(d.sensors)
}
for d in devices
]
}
return {
"total_devices": total_devices,
"total_sensors": total_sensors,
"by_type": summary,
"config_file": str(self.config_path)
}
# Global config loader instance
_config_loader: Optional[IoTConfigLoader] = None
def get_config_loader() -> IoTConfigLoader:
"""Get global config loader instance"""
global _config_loader
if _config_loader is None:
_config_loader = IoTConfigLoader()
return _config_loader
def get_iot_config() -> IoTConfiguration:
"""Get IoT configuration"""
return get_config_loader().get_config()

View File

@@ -0,0 +1,112 @@
"""
Pydantic models for Data Ingestion module
"""
from datetime import datetime
from typing import Optional, Dict, Any, List
from pydantic import BaseModel, Field
from enum import Enum
class PollingStatus(str, Enum):
"""Device polling status"""
ACTIVE = "active"
INACTIVE = "inactive"
ERROR = "error"
DISABLED = "disabled"
class DataSourceType(str, Enum):
"""Data source type"""
HTTP = "http"
MQTT = "mqtt"
class DeviceStatus(BaseModel):
"""Device polling status information"""
device_name: str
device_type: str
uri: str
status: PollingStatus
last_poll: Optional[datetime] = None
last_success: Optional[datetime] = None
total_polls: int = 0
successful_polls: int = 0
failed_polls: int = 0
last_error: Optional[str] = None
sensors_count: int = 0
class Config:
json_encoders = {
datetime: lambda v: v.isoformat() if v else None
}
class DataSourceSummary(BaseModel):
"""Summary of data sources"""
source_type: DataSourceType
enabled: bool
total_devices: int
active_devices: int
total_sensors: int
devices: List[DeviceStatus]
class PollingMetrics(BaseModel):
"""Polling performance metrics"""
timestamp: datetime
total_devices: int
active_devices: int
inactive_devices: int
error_devices: int
total_polls: int
successful_polls: int
failed_polls: int
success_rate: float
average_poll_time_ms: float
devices_by_type: Dict[str, int]
class Config:
json_encoders = {
datetime: lambda v: v.isoformat()
}
class HttpPollerConfig(BaseModel):
"""HTTP poller configuration"""
enabled: bool = True
poll_interval_seconds: int = Field(60, ge=5, le=3600)
timeout_seconds: int = Field(10, ge=1, le=60)
max_retries: int = Field(3, ge=0, le=10)
concurrent_requests: int = Field(5, ge=1, le=50)
class MqttSubscriberConfig(BaseModel):
"""MQTT subscriber configuration (future)"""
enabled: bool = False
broker_host: str = "localhost"
broker_port: int = 1883
username: Optional[str] = None
password: Optional[str] = None
topics: List[str] = []
class DataIngestionConfig(BaseModel):
"""Complete data ingestion configuration"""
http: HttpPollerConfig = HttpPollerConfig()
mqtt: MqttSubscriberConfig = MqttSubscriberConfig()
class HealthResponse(BaseModel):
"""Health check response"""
service: str
status: str
timestamp: datetime
version: str
http_poller: Optional[Dict[str, Any]] = None
mqtt_subscriber: Optional[Dict[str, Any]] = None
class Config:
json_encoders = {
datetime: lambda v: v.isoformat()
}

View File

@@ -0,0 +1,194 @@
"""Data Ingestion module API routes."""
import logging
from datetime import datetime
from fastapi import APIRouter, HTTPException, Depends
from typing import Optional
from .models import (
HealthResponse, DataSourceSummary, PollingMetrics,
DeviceStatus, DataSourceType
)
from .iot_config import get_config_loader
from .http_poller import HttpPoller
from core.dependencies import get_sensors_db
logger = logging.getLogger(__name__)
# Create router
router = APIRouter()
# Global HTTP poller instance (will be set by main.py)
_http_poller: Optional[HttpPoller] = None
def set_http_poller(poller: HttpPoller):
"""Set the global HTTP poller instance"""
global _http_poller
_http_poller = poller
def get_http_poller() -> HttpPoller:
"""Get the HTTP poller instance"""
if _http_poller is None:
raise HTTPException(status_code=503, detail="HTTP poller not initialized")
return _http_poller
# Health check
@router.get("/health", response_model=HealthResponse)
async def health_check(db=Depends(get_sensors_db)):
"""Health check endpoint for data ingestion module"""
try:
await db.command("ping")
http_status = None
if _http_poller:
metrics = _http_poller.get_metrics()
http_status = {
"enabled": True,
"running": _http_poller.running,
"total_devices": metrics.total_devices,
"active_devices": metrics.active_devices,
"success_rate": metrics.success_rate
}
return HealthResponse(
service="data-ingestion-module",
status="healthy",
timestamp=datetime.utcnow(),
version="1.0.0",
http_poller=http_status,
mqtt_subscriber={"enabled": False}
)
except Exception as e:
logger.error(f"Health check failed: {e}")
raise HTTPException(status_code=503, detail="Service Unavailable")
@router.get("/sources", response_model=DataSourceSummary)
async def get_data_sources():
"""
Get all data ingestion sources
Implements OpenAPI endpoint: GET /api/v1/sources
"""
try:
poller = get_http_poller()
config_loader = get_config_loader()
device_statuses = poller.get_device_statuses()
active_count = sum(1 for d in device_statuses if d.status.value == "active")
return DataSourceSummary(
source_type=DataSourceType.HTTP,
enabled=True,
total_devices=len(device_statuses),
active_devices=active_count,
total_sensors=sum(d.sensors_count for d in device_statuses),
devices=device_statuses
)
except Exception as e:
logger.error(f"Error getting data sources: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/sources/summary")
async def get_sources_summary():
"""Get summary of configured data sources"""
try:
config_loader = get_config_loader()
summary = config_loader.get_device_summary()
return summary
except Exception as e:
logger.error(f"Error getting sources summary: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/sources/devices")
async def get_device_list():
"""Get list of all configured devices"""
try:
poller = get_http_poller()
devices = poller.get_device_statuses()
return {
"devices": devices,
"count": len(devices),
"timestamp": datetime.utcnow().isoformat()
}
except Exception as e:
logger.error(f"Error getting device list: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/sources/devices/{device_name}", response_model=DeviceStatus)
async def get_device_status(device_name: str):
"""Get status of a specific device"""
try:
poller = get_http_poller()
status = poller.device_status.get(device_name)
if not status:
raise HTTPException(status_code=404, detail=f"Device '{device_name}' not found")
return status
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting device status: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/metrics", response_model=PollingMetrics)
async def get_polling_metrics():
"""Get HTTP polling performance metrics"""
try:
poller = get_http_poller()
return poller.get_metrics()
except Exception as e:
logger.error(f"Error getting metrics: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/poll/trigger")
async def trigger_manual_poll():
"""Manually trigger a polling cycle for all devices"""
try:
poller = get_http_poller()
# Trigger poll in background
import asyncio
asyncio.create_task(poller.poll_all_devices())
return {
"message": "Manual poll triggered",
"timestamp": datetime.utcnow().isoformat(),
"devices": len(poller.device_status)
}
except Exception as e:
logger.error(f"Error triggering poll: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/config/reload")
async def reload_configuration():
"""Reload IoT configuration from iots-right.json"""
try:
config_loader = get_config_loader()
config = config_loader.reload()
return {
"message": "Configuration reloaded successfully",
"total_devices": len(config.get_all_devices()),
"timestamp": datetime.utcnow().isoformat()
}
except Exception as e:
logger.error(f"Error reloading config: {e}")
raise HTTPException(status_code=500, detail=str(e))

View File

@@ -0,0 +1,21 @@
"""Demand Response module - handles grid interaction and load management."""
from .models import (
DRInvitation,
DRInvitationResponse,
EventRequest,
EventStatus,
LoadReductionRequest,
FlexibilityResponse
)
from .demand_response_service import DemandResponseService
__all__ = [
"DRInvitation",
"DRInvitationResponse",
"EventRequest",
"EventStatus",
"LoadReductionRequest",
"FlexibilityResponse",
"DemandResponseService",
]

View File

@@ -0,0 +1,747 @@
"""
Demand Response Service - Core Business Logic
Handles DR invitations, event execution, auto-response, and flexibility calculation
"""
import asyncio
import json
import uuid
from datetime import datetime, timedelta
from typing import List, Dict, Optional, Any
import logging
from motor.motor_asyncio import AsyncIOMotorDatabase
import redis.asyncio as redis
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class DemandResponseService:
"""Core Demand Response service business logic"""
def __init__(self, db: AsyncIOMotorDatabase, redis_client: redis.Redis):
self.db = db
self.redis = redis_client
self.active_events: Dict[str, asyncio.Task] = {} # event_id -> task
self.device_power_cache: Dict[str, float] = {} # device_id -> power_kw (updated by Redis subscriber)
# ===== INVITATION MANAGEMENT =====
async def send_invitation(
self,
event_time: datetime,
load_kwh: float,
load_percentage: float,
iots: List[str],
duration_minutes: int = 59
) -> Dict[str, Any]:
"""
Create and send DR invitation
Returns: {"event_id": str, "response": str, "message": str}
"""
logger.info(f"Creating DR invitation for {len(iots)} devices at {event_time}")
# Generate unique event ID
event_id = str(uuid.uuid4())
# Check auto-response configuration
auto_config = await self.get_auto_response_config()
response = "YES" if auto_config.get("enabled", False) else "WAITING"
# Create invitation document
invitation = {
"event_id": event_id,
"created_at": datetime.utcnow(),
"event_time": event_time,
"load_kwh": load_kwh,
"load_percentage": load_percentage,
"iots": iots,
"duration_minutes": duration_minutes,
"response": response,
"status": "pending"
}
# Store in MongoDB
await self.db.demand_response_invitations.insert_one(invitation)
# Cache in Redis for fast access (24 hour TTL)
cache_key = f"dr:invitation:{event_id}"
await self.redis.setex(
cache_key,
86400,
json.dumps(invitation, default=str)
)
# Publish event to Redis pub/sub
await self.redis.publish("dr_events", json.dumps({
"event": "invitation_created",
"event_id": event_id,
"event_time": event_time.isoformat(),
"load_kwh": load_kwh,
"response": response
}))
logger.info(f"Invitation {event_id} created with response: {response}")
return {
"event_id": event_id,
"response": response,
"message": "Invitation created successfully"
}
async def answer_invitation(
self,
event_id: str,
iot_id: str,
response: str,
committed_reduction_kw: Optional[float] = None
) -> Dict[str, Any]:
"""
Record device response to invitation
Returns: {"success": bool, "message": str}
"""
logger.info(f"Recording response for invitation {event_id}, device {iot_id}: {response}")
# Validate invitation exists
invitation = await self.get_invitation(event_id)
if not invitation:
return {"success": False, "message": f"Invitation {event_id} not found"}
if iot_id not in invitation["iots"]:
return {"success": False, "message": f"Device {iot_id} not in invitation"}
# Check if already responded
existing = await self.db.demand_response_responses.find_one({
"event_id": event_id,
"device_id": iot_id
})
if existing:
return {"success": False, "message": f"Device {iot_id} has already responded"}
# Store response
response_doc = {
"event_id": event_id,
"device_id": iot_id,
"response": response,
"committed_reduction_kw": committed_reduction_kw,
"responded_at": datetime.utcnow()
}
await self.db.demand_response_responses.insert_one(response_doc)
# Check if all devices have responded
total_devices = len(invitation["iots"])
total_responses = await self.db.demand_response_responses.count_documents({"event_id": event_id})
if total_responses == total_devices:
# All devices responded - update invitation status
yes_count = await self.db.demand_response_responses.count_documents({
"event_id": event_id,
"response": "YES"
})
all_yes = yes_count == total_devices
new_response = "YES" if all_yes else "NO"
new_status = "scheduled" if all_yes else "cancelled"
await self.db.demand_response_invitations.update_one(
{"event_id": event_id},
{"$set": {"response": new_response, "status": new_status}}
)
logger.info(f"Invitation {event_id} final response: {new_response} (status: {new_status})")
# Clear cache
await self.redis.delete(f"dr:invitation:{event_id}")
# Publish event
await self.redis.publish("dr_events", json.dumps({
"event": "invitation_answered",
"event_id": event_id,
"device_id": iot_id,
"response": response
}))
return {"success": True, "message": "Response recorded successfully"}
async def get_invitation(self, event_id: str) -> Optional[Dict[str, Any]]:
"""
Get invitation by event_id (with Redis caching)
"""
# Try cache first
cache_key = f"dr:invitation:{event_id}"
cached = await self.redis.get(cache_key)
if cached:
invitation = json.loads(cached)
return invitation
# Fallback to MongoDB
invitation = await self.db.demand_response_invitations.find_one({"event_id": event_id})
if invitation:
invitation["_id"] = str(invitation["_id"])
# Cache for 24 hours
await self.redis.setex(
cache_key,
86400,
json.dumps(invitation, default=str)
)
return invitation
return None
async def get_unanswered_invitations(self) -> List[Dict[str, Any]]:
"""Get all pending invitations awaiting response"""
cursor = self.db.demand_response_invitations.find({
"response": "WAITING",
"status": "pending"
}).sort("created_at", -1)
invitations = []
async for inv in cursor:
inv["_id"] = str(inv["_id"])
invitations.append(inv)
return invitations
async def get_answered_invitations(self, hours: int = 24, limit: int = 50) -> List[Dict[str, Any]]:
"""Get recent answered invitations"""
start_time = datetime.utcnow() - timedelta(hours=hours)
cursor = self.db.demand_response_invitations.find({
"response": {"$ne": "WAITING"},
"created_at": {"$gte": start_time}
}).sort("created_at", -1).limit(limit)
invitations = []
async for inv in cursor:
inv["_id"] = str(inv["_id"])
invitations.append(inv)
return invitations
# ===== EVENT EXECUTION =====
async def schedule_event(
self,
event_time: datetime,
iots: List[str],
load_reduction_kw: float,
duration_minutes: int = 59
) -> Dict[str, Any]:
"""
Schedule a DR event for execution
Returns: {"event_id": str, "message": str}
"""
logger.info(f"Scheduling DR event for {len(iots)} devices at {event_time}")
# Create event document
event_id = str(uuid.uuid4())
end_time = event_time + timedelta(minutes=duration_minutes)
event = {
"event_id": event_id,
"start_time": event_time,
"end_time": end_time,
"status": "scheduled",
"participating_devices": iots,
"target_reduction_kw": load_reduction_kw,
"actual_reduction_kw": 0.0,
"power_samples": []
}
await self.db.demand_response_events.insert_one(event)
# Publish scheduled event
await self.redis.publish("dr_events", json.dumps({
"event": "event_scheduled",
"event_id": event_id,
"start_time": event_time.isoformat(),
"end_time": end_time.isoformat(),
"devices": iots
}))
logger.info(f"Event {event_id} scheduled successfully")
return {
"event_id": event_id,
"message": "Event scheduled successfully"
}
async def execute_event(self, event_id: str):
"""
Execute a DR event (spawns background task)
"""
logger.info(f"Executing DR event {event_id}")
# Get event details
event = await self.db.demand_response_events.find_one({"event_id": event_id})
if not event:
logger.error(f"Event {event_id} not found")
return
# Update status to active
await self.db.demand_response_events.update_one(
{"event_id": event_id},
{"$set": {"status": "active", "actual_start_time": datetime.utcnow()}}
)
# Publish event started
await self.redis.publish("dr_events", json.dumps({
"event": "event_started",
"event_id": event_id,
"devices": event["participating_devices"]
}))
# Create and store async task for this event
task = asyncio.create_task(self._run_event_loop(event))
self.active_events[event_id] = task
logger.info(f"DR event {event_id} started successfully")
async def _run_event_loop(self, event: Dict[str, Any]):
"""
CRITICAL: Core event execution loop - runs for duration_minutes
Samples power every 5 seconds, accumulates reduction, handles cancellation
"""
event_id = event["event_id"]
end_time = event["end_time"]
devices = event["participating_devices"]
total_reduction_kwh = 0.0
sample_count = 0
logger.info(f"Starting event loop for {event_id}, ending at {end_time}")
try:
while datetime.utcnow() < end_time:
# Get current power for all participating devices from cache
device_powers = {
device_id: self.device_power_cache.get(device_id, 0.0)
for device_id in devices
}
# Calculate reduction for this 5-second interval
# interval_hours = 5.0 / 3600.0 = 0.00139 hours
interval_reduction_kwh = sum(device_powers.values()) * (5.0 / 3600.0)
total_reduction_kwh += interval_reduction_kwh
sample_count += 1
# Store sample in MongoDB (every sample to maintain accuracy)
sample = {
"timestamp": datetime.utcnow(),
"device_powers": device_powers,
"interval_reduction_kwh": interval_reduction_kwh
}
await self.db.demand_response_events.update_one(
{"event_id": event_id},
{
"$push": {"power_samples": sample},
"$set": {"actual_reduction_kw": total_reduction_kwh}
}
)
# Update Redis cache for fast access to current reduction
cache_key = f"dr:event:active:{event_id}"
await self.redis.setex(
cache_key,
300, # 5 minute TTL
json.dumps({
"event_id": event_id,
"current_reduction_kwh": total_reduction_kwh,
"devices": device_powers,
"last_update": datetime.utcnow().isoformat()
}, default=str)
)
# Publish progress every 10 samples (50 seconds)
if sample_count % 10 == 0:
await self.redis.publish("dr_events", json.dumps({
"event": "event_progress",
"event_id": event_id,
"total_reduction_kwh": round(total_reduction_kwh, 3),
"device_powers": device_powers,
"timestamp": datetime.utcnow().isoformat()
}))
logger.info(f"Event {event_id} progress: {total_reduction_kwh:.3f} kWh ({sample_count} samples)")
# Sleep for 5 seconds
await asyncio.sleep(5)
# Event completed successfully
logger.info(f"Event {event_id} completed with {total_reduction_kwh:.3f} kWh reduction")
await self._complete_event(event_id, total_reduction_kwh)
except asyncio.CancelledError:
logger.info(f"Event {event_id} cancelled by user")
await self._cancel_event(event_id)
raise
except Exception as e:
logger.error(f"Error in event {event_id}: {e}", exc_info=True)
await self._cancel_event(event_id)
async def _complete_event(self, event_id: str, total_reduction_kwh: float):
"""Mark event as completed"""
await self.db.demand_response_events.update_one(
{"event_id": event_id},
{
"$set": {
"status": "completed",
"actual_end_time": datetime.utcnow(),
"actual_reduction_kw": total_reduction_kwh
}
}
)
# Remove from active events
self.active_events.pop(event_id, None)
# Clear cache
await self.redis.delete(f"dr:event:active:{event_id}")
# Publish completion
await self.redis.publish("dr_events", json.dumps({
"event": "event_completed",
"event_id": event_id,
"total_reduction_kwh": total_reduction_kwh
}))
logger.info(f"DR event {event_id} marked as completed")
async def _cancel_event(self, event_id: str):
"""Internal method to cancel an event"""
await self.db.demand_response_events.update_one(
{"event_id": event_id},
{
"$set": {
"status": "cancelled",
"cancelled_at": datetime.utcnow()
}
}
)
self.active_events.pop(event_id, None)
await self.redis.delete(f"dr:event:active:{event_id}")
# Publish cancellation
await self.redis.publish("dr_events", json.dumps({
"event": "event_cancelled",
"event_id": event_id,
"timestamp": datetime.utcnow().isoformat()
}))
async def cancel_event(self, event_id: str):
"""
Public method to cancel a running DR event gracefully
"""
logger.info(f"Cancelling DR event {event_id}")
# Cancel the async task
task = self.active_events.get(event_id)
if task and not task.done():
task.cancel()
try:
await task
except asyncio.CancelledError:
# Expected - task cancelled successfully
pass
except Exception as e:
logger.error(f"Error cancelling event task {event_id}: {e}")
# Update database status (if not already done by _cancel_event)
event = await self.db.demand_response_events.find_one({"event_id": event_id})
if event and event.get("status") != "cancelled":
await self._cancel_event(event_id)
logger.info(f"DR event {event_id} cancelled successfully")
async def get_active_events(self) -> List[Dict[str, Any]]:
"""Get currently running events with real-time data"""
cursor = self.db.demand_response_events.find({
"status": "active"
}).sort("start_time", -1)
events = []
async for event in cursor:
event["_id"] = str(event["_id"])
# Add real-time data from cache
cache_key = f"dr:event:active:{event['event_id']}"
cached = await self.redis.get(cache_key)
if cached:
realtime_data = json.loads(cached)
event["current_reduction_kwh"] = realtime_data.get("current_reduction_kwh")
event["current_device_powers"] = realtime_data.get("devices")
events.append(event)
return events
# ===== DEVICE POWER INTEGRATION =====
def update_device_power_cache(self, device_id: str, power_kw: float):
"""
Update device power cache (called by Redis subscriber)
This is synchronous because it's just updating a dict
"""
self.device_power_cache[device_id] = power_kw
# No logging here to avoid spam (called every few seconds per device)
async def get_device_power(self, device_id: str) -> float:
"""Get current power for a device from cache"""
return self.device_power_cache.get(device_id, 0.0)
# ===== AUTO-RESPONSE CONFIGURATION =====
async def get_auto_response_config(self) -> Dict[str, Any]:
"""Get auto-response configuration"""
config = await self.db.auto_response_config.find_one({"config_id": "default"})
if not config:
# Create default config
default_config = {
"config_id": "default",
"enabled": False,
"max_reduction_percentage": 20.0,
"response_delay_seconds": 300,
"min_notice_minutes": 60,
"updated_at": datetime.utcnow()
}
await self.db.auto_response_config.insert_one(default_config)
return default_config
return config
async def set_auto_response_config(
self,
enabled: bool,
max_reduction_percentage: float = 20.0,
response_delay_seconds: int = 300,
min_notice_minutes: int = 60
) -> Dict[str, Any]:
"""Update auto-response configuration"""
await self.db.auto_response_config.update_one(
{"config_id": "default"},
{
"$set": {
"enabled": enabled,
"max_reduction_percentage": max_reduction_percentage,
"response_delay_seconds": response_delay_seconds,
"min_notice_minutes": min_notice_minutes,
"updated_at": datetime.utcnow()
}
},
upsert=True
)
# Clear cache
await self.redis.delete("dr:config:auto_response")
logger.info(f"Auto-response config updated: enabled={enabled}")
return await self.get_auto_response_config()
async def process_auto_responses(self):
"""
Process pending invitations with auto-response (called by background task)
"""
# Get auto-response configuration
auto_config = await self.get_auto_response_config()
if not auto_config.get("enabled"):
return
# Find unanswered invitations
invitations = await self.get_unanswered_invitations()
for invitation in invitations:
event_id = invitation["event_id"]
event_time = invitation["event_time"]
# Parse event_time (might be string from cache)
if isinstance(event_time, str):
event_time = datetime.fromisoformat(event_time.replace('Z', '+00:00'))
# Check if event is within auto-response criteria
time_until_event = (event_time - datetime.utcnow()).total_seconds() / 60 # minutes
min_notice = auto_config.get("min_notice_minutes", 60)
if time_until_event >= min_notice:
logger.info(f"Auto-responding to invitation {event_id}")
# Auto-accept for all devices
for device_id in invitation["iots"]:
# Check if already responded
existing = await self.db.demand_response_responses.find_one({
"event_id": event_id,
"device_id": device_id
})
if not existing:
# Get device current power
device_power = await self.get_device_power(device_id)
# Calculate committed reduction based on max_reduction_percentage
max_reduction_pct = auto_config.get("max_reduction_percentage", 20.0)
committed_reduction = device_power * (max_reduction_pct / 100) if device_power > 0 else 0.5
# Submit auto-response
try:
await self.answer_invitation(event_id, device_id, "YES", committed_reduction)
logger.info(f"Auto-accepted for device {device_id} with {committed_reduction:.2f} kW commitment")
except Exception as e:
logger.error(f"Error auto-responding for {device_id}: {e}")
else:
logger.debug(f"Invitation {event_id} too soon ({time_until_event:.0f}m < {min_notice}m)")
# ===== BACKGROUND TASK SUPPORT =====
async def check_scheduled_events(self):
"""
Check for events that need to be started (called by scheduler task)
"""
now = datetime.utcnow()
threshold = now + timedelta(minutes=1) # Start events within next minute
# Find scheduled events that should start
cursor = self.db.demand_response_events.find({
"status": "scheduled",
"start_time": {"$lte": threshold, "$gte": now}
})
async for event in cursor:
event_id = event["event_id"]
# Check if not already active
if event_id not in self.active_events:
logger.info(f"Starting scheduled DR event {event_id}")
await self.execute_event(event_id)
# ===== BASIC FLEXIBILITY CALCULATION =====
async def get_current_flexibility(self) -> Dict[str, Any]:
"""
Calculate current available flexibility from device power cache
"""
total_flexibility_kw = 0.0
devices = []
# Get all devices with instructions
cursor = self.db.device_instructions.find({})
current_hour = datetime.utcnow().hour
async for device_doc in cursor:
device_id = device_doc["device_id"]
instruction = device_doc["instructions"].get(str(current_hour), "off")
if instruction != "off":
# Get device current power from cache
device_power = self.device_power_cache.get(device_id, 0.0)
if instruction == "participation":
# Full flexibility (100%)
flexibility = device_power
elif instruction == "shifting":
# Partial flexibility (20%)
flexibility = device_power * 0.20
else:
flexibility = 0.0
if flexibility > 0:
devices.append({
"device_id": device_id,
"available_kw": round(flexibility, 2),
"instruction": instruction,
"current_power": round(device_power, 2)
})
total_flexibility_kw += flexibility
snapshot = {
"timestamp": datetime.utcnow(),
"total_flexibility_kw": round(total_flexibility_kw, 2),
"devices": devices
}
# Store snapshot
await self.db.flexibility_snapshots.insert_one(dict(snapshot))
# Cache for 5 minutes
await self.redis.setex(
"dr:flexibility:current",
300,
json.dumps(snapshot, default=str)
)
return snapshot
async def get_device_instructions(self, device_id: Optional[str] = None) -> Dict[str, Any]:
"""Get DR instructions for device(s)"""
if device_id:
doc = await self.db.device_instructions.find_one({"device_id": device_id})
return doc if doc else {"device_id": device_id, "instructions": {}}
else:
cursor = self.db.device_instructions.find({})
instructions = {}
async for doc in cursor:
instructions[doc["device_id"]] = doc["instructions"]
return instructions
async def update_device_instructions(self, device_id: str, instructions: Dict[str, str]):
"""Update hourly instructions for a device"""
await self.db.device_instructions.update_one(
{"device_id": device_id},
{
"$set": {
"instructions": instructions,
"updated_at": datetime.utcnow()
}
},
upsert=True
)
logger.info(f"Updated instructions for device {device_id}")
# ===== ANALYTICS =====
async def get_performance_analytics(self, days: int = 30) -> Dict[str, Any]:
"""Get DR performance analytics"""
start_date = datetime.utcnow() - timedelta(days=days)
# Query completed events
cursor = self.db.demand_response_events.find({
"status": "completed",
"start_time": {"$gte": start_date}
})
events = await cursor.to_list(length=None)
if not events:
return {
"period_days": days,
"total_events": 0,
"total_reduction_kwh": 0.0,
"total_target_kwh": 0.0,
"average_reduction_kwh": 0.0,
"achievement_rate": 0.0,
"average_event_duration_minutes": 59
}
total_reduction = sum(e.get("actual_reduction_kw", 0) for e in events)
total_target = sum(e.get("target_reduction_kw", 0) for e in events)
return {
"period_days": days,
"total_events": len(events),
"total_reduction_kwh": round(total_reduction, 2),
"total_target_kwh": round(total_target, 2),
"average_reduction_kwh": round(total_reduction / len(events), 2),
"achievement_rate": round((total_reduction / total_target * 100) if total_target > 0 else 0, 2),
"average_event_duration_minutes": 59
}

View File

@@ -0,0 +1,338 @@
"""
Pydantic models for Demand Response Service
"""
from datetime import datetime
from typing import List, Dict, Optional, Literal
from pydantic import BaseModel, Field
from enum import Enum
# Enums
class InvitationStatus(str, Enum):
"""Invitation status states"""
PENDING = "pending"
SCHEDULED = "scheduled"
ACTIVE = "active"
COMPLETED = "completed"
CANCELLED = "cancelled"
class ResponseType(str, Enum):
"""Device response types"""
WAITING = "WAITING"
YES = "YES"
NO = "NO"
class EventStatus(str, Enum):
"""DR event status states"""
SCHEDULED = "scheduled"
ACTIVE = "active"
COMPLETED = "completed"
CANCELLED = "cancelled"
class InstructionType(str, Enum):
"""Device participation instruction types"""
PARTICIPATION = "participation" # Full DR participation (100%)
SHIFTING = "shifting" # Partial participation (0-20%)
OFF = "off" # No DR participation
# Invitation Models
class EventRequest(BaseModel):
"""Request model for creating a DR event (alias for DRInvitationCreate)"""
event_time: datetime = Field(..., description="When the DR event should occur")
load_kwh: float = Field(..., description="Target load reduction in kWh", gt=0)
load_percentage: float = Field(..., description="Target reduction as percentage of total load", ge=0, le=100)
iots: List[str] = Field(..., description="List of device IDs to participate", min_items=1)
duration_minutes: int = Field(59, description="Event duration in minutes", gt=0, le=120)
class Config:
json_schema_extra = {
"example": {
"event_time": "2025-12-10T14:00:00",
"load_kwh": 5.0,
"load_percentage": 15.0,
"iots": ["sensor_1", "sensor_2"],
"duration_minutes": 59
}
}
class DRInvitationCreate(BaseModel):
"""Request model for creating a DR invitation"""
event_time: datetime = Field(..., description="When the DR event should occur")
load_kwh: float = Field(..., description="Target load reduction in kWh", gt=0)
load_percentage: float = Field(..., description="Target reduction as percentage of total load", ge=0, le=100)
iots: List[str] = Field(..., description="List of device IDs to participate", min_items=1)
duration_minutes: int = Field(59, description="Event duration in minutes", gt=0, le=120)
class Config:
json_schema_extra = {
"example": {
"event_time": "2025-12-10T14:00:00",
"load_kwh": 5.0,
"load_percentage": 15.0,
"iots": ["sensor_1", "sensor_2"],
"duration_minutes": 59
}
}
class DRInvitationResponse(BaseModel):
"""Response model for device answering invitation"""
event_id: str = Field(..., description="Event identifier")
iot_id: str = Field(..., description="Device identifier")
response: ResponseType = Field(..., description="Device response (YES/NO)")
committed_reduction_kw: Optional[float] = Field(None, description="Committed power reduction in kW", ge=0)
class Config:
json_schema_extra = {
"example": {
"event_id": "550e8400-e29b-41d4-a716-446655440000",
"iot_id": "sensor_1",
"response": "YES",
"committed_reduction_kw": 2.5
}
}
class DRInvitation(BaseModel):
"""Full DR invitation model"""
event_id: str = Field(..., description="Unique event identifier")
created_at: datetime = Field(..., description="Invitation creation time")
event_time: datetime = Field(..., description="Scheduled event start time")
load_kwh: float = Field(..., description="Target load reduction in kWh")
load_percentage: float = Field(..., description="Target reduction percentage")
iots: List[str] = Field(..., description="Participating device IDs")
duration_minutes: int = Field(..., description="Event duration in minutes")
response: str = Field(..., description="Overall response status")
status: str = Field(..., description="Invitation status")
class Config:
json_encoders = {
datetime: lambda v: v.isoformat() if v else None
}
json_schema_extra = {
"example": {
"event_id": "550e8400-e29b-41d4-a716-446655440000",
"created_at": "2025-12-10T13:45:00",
"event_time": "2025-12-10T14:00:00",
"load_kwh": 5.0,
"load_percentage": 15.0,
"iots": ["sensor_1", "sensor_2"],
"duration_minutes": 59,
"response": "WAITING",
"status": "pending"
}
}
# Event Models
class EventScheduleRequest(BaseModel):
"""Request model for scheduling a DR event"""
event_time: datetime = Field(..., description="Event start time")
iots: List[str] = Field(..., description="Participating device IDs", min_items=1)
load_reduction_kw: float = Field(..., description="Target reduction in kW", gt=0)
duration_minutes: int = Field(59, description="Event duration in minutes", gt=0, le=120)
class Config:
json_schema_extra = {
"example": {
"event_time": "2025-12-10T14:00:00",
"iots": ["sensor_1", "sensor_2"],
"load_reduction_kw": 5.0,
"duration_minutes": 59
}
}
class PowerSample(BaseModel):
"""Individual power sample during event"""
timestamp: datetime = Field(..., description="Sample timestamp")
device_powers: Dict[str, float] = Field(..., description="Device power readings (device_id -> kW)")
interval_reduction_kwh: Optional[float] = Field(None, description="Reduction for this interval")
class DREvent(BaseModel):
"""DR event execution model"""
event_id: str = Field(..., description="Unique event identifier")
invitation_id: Optional[str] = Field(None, description="Source invitation ID if applicable")
start_time: datetime = Field(..., description="Event start time")
end_time: datetime = Field(..., description="Event end time")
status: EventStatus = Field(..., description="Event status")
participating_devices: List[str] = Field(..., description="Device IDs participating")
target_reduction_kw: float = Field(..., description="Target power reduction in kW")
actual_reduction_kw: float = Field(0.0, description="Actual achieved reduction in kWh")
power_samples: List[Dict] = Field(default_factory=list, description="Power samples during event")
class Config:
json_encoders = {
datetime: lambda v: v.isoformat() if v else None
}
class ActiveEventResponse(BaseModel):
"""Response model for active event with real-time data"""
event_id: str = Field(..., description="Event identifier")
status: EventStatus = Field(..., description="Current status")
start_time: datetime = Field(..., description="Event start time")
end_time: datetime = Field(..., description="Event end time")
participating_devices: List[str] = Field(..., description="Participating devices")
target_reduction_kw: float = Field(..., description="Target reduction")
actual_reduction_kw: float = Field(..., description="Current achieved reduction")
current_device_powers: Optional[Dict[str, float]] = Field(None, description="Current device power readings")
progress_percentage: Optional[float] = Field(None, description="Event progress (0-100%)")
class Config:
json_encoders = {
datetime: lambda v: v.isoformat() if v else None
}
class LoadReductionRequest(BaseModel):
"""Request model for executing load reduction"""
event_time: datetime = Field(..., description="Event start time")
iot: str = Field(..., description="Device ID")
class Config:
json_schema_extra = {
"example": {
"event_time": "2025-12-10T14:00:00",
"iot": "sensor_1"
}
}
# Flexibility Models
class DeviceFlexibility(BaseModel):
"""Per-device flexibility information"""
device_id: str = Field(..., description="Device identifier")
available_kw: float = Field(..., description="Available flexibility in kW", ge=0)
instruction: str = Field(..., description="Current DR instruction")
current_power: float = Field(..., description="Current power consumption in kW", ge=0)
class FlexibilityResponse(BaseModel):
"""Response model for current flexibility"""
timestamp: datetime = Field(..., description="Calculation timestamp")
total_flexibility_kw: float = Field(..., description="Total available flexibility in kW", ge=0)
devices: List[DeviceFlexibility] = Field(..., description="Per-device breakdown")
class Config:
json_encoders = {
datetime: lambda v: v.isoformat() if v else None
}
json_schema_extra = {
"example": {
"timestamp": "2025-12-10T13:45:00",
"total_flexibility_kw": 15.5,
"devices": [
{
"device_id": "sensor_1",
"available_kw": 3.5,
"instruction": "participation",
"current_power": 3.5
},
{
"device_id": "sensor_2",
"available_kw": 0.8,
"instruction": "shifting",
"current_power": 4.0
}
]
}
}
class DeviceInstructionUpdate(BaseModel):
"""Model for updating device instructions"""
device_id: str = Field(..., description="Device identifier")
instructions: Dict[str, str] = Field(..., description="Hourly instructions (hour -> instruction type)")
class Config:
json_schema_extra = {
"example": {
"device_id": "sensor_1",
"instructions": {
"0": "participation",
"1": "shifting",
"2": "off",
"3": "participation"
}
}
}
# Configuration Models
class AutoResponseConfig(BaseModel):
"""Auto-response configuration model"""
enabled: bool = Field(..., description="Whether auto-response is enabled")
max_reduction_percentage: float = Field(20.0, description="Maximum reduction percentage for auto-accept", ge=0, le=100)
response_delay_seconds: int = Field(300, description="Delay before auto-responding (seconds)", ge=0)
min_notice_minutes: int = Field(60, description="Minimum notice required for auto-accept (minutes)", ge=0)
class Config:
json_schema_extra = {
"example": {
"enabled": True,
"max_reduction_percentage": 20.0,
"response_delay_seconds": 300,
"min_notice_minutes": 60
}
}
# Response Models
class InvitationSendResponse(BaseModel):
"""Response for sending invitation"""
event_id: str = Field(..., description="Created event identifier")
response: str = Field(..., description="Initial response status")
message: str = Field(..., description="Status message")
class InvitationAnswerResponse(BaseModel):
"""Response for answering invitation"""
success: bool = Field(..., description="Whether answer was recorded")
message: str = Field(..., description="Status message")
class EventScheduleResponse(BaseModel):
"""Response for scheduling event"""
event_id: str = Field(..., description="Scheduled event identifier")
message: str = Field(..., description="Status message")
class PerformanceAnalytics(BaseModel):
"""Performance analytics response"""
period_days: int = Field(..., description="Analysis period in days")
total_events: int = Field(..., description="Total number of events")
total_reduction_kwh: float = Field(..., description="Total energy reduced")
total_target_kwh: float = Field(..., description="Total target reduction")
average_reduction_kwh: float = Field(..., description="Average reduction per event")
achievement_rate: float = Field(..., description="Achievement rate (%)")
average_event_duration_minutes: int = Field(..., description="Average event duration")
# Health Check Model
class HealthResponse(BaseModel):
"""Health check response model"""
service: str = Field(..., description="Service name")
status: str = Field(..., description="Service status")
timestamp: datetime = Field(..., description="Check timestamp")
version: str = Field(..., description="Service version")
class Config:
json_encoders = {
datetime: lambda v: v.isoformat() if v else None
}
json_schema_extra = {
"example": {
"service": "demand-response-service",
"status": "healthy",
"timestamp": "2025-12-10T13:45:00",
"version": "1.0.0"
}
}

View File

@@ -0,0 +1,39 @@
"""Sensors module - handles sensor management, rooms, and analytics."""
from .models import (
SensorReading,
SensorMetadata,
RoomMetrics,
SystemEvent,
Room,
RoomCreate,
RoomUpdate,
RoomInfo,
SensorType,
SensorStatus,
CO2Status,
OccupancyLevel
)
from .sensor_service import SensorService
from .room_service import RoomService
from .analytics_service import AnalyticsService
from .websocket_manager import WebSocketManager
__all__ = [
"SensorReading",
"SensorMetadata",
"RoomMetrics",
"SystemEvent",
"Room",
"RoomCreate",
"RoomUpdate",
"RoomInfo",
"SensorType",
"SensorStatus",
"CO2Status",
"OccupancyLevel",
"SensorService",
"RoomService",
"AnalyticsService",
"WebSocketManager",
]

View File

@@ -0,0 +1,377 @@
"""
Analytics service for processing sensor data and generating insights
"""
import logging
from datetime import datetime, timedelta
from typing import List, Dict, Any, Optional
import json
logger = logging.getLogger(__name__)
class AnalyticsService:
"""Service for analytics and data processing"""
def __init__(self, db, redis_client):
self.db = db
self.redis = redis_client
async def query_data(self, query_params) -> Dict[str, Any]:
"""Execute advanced data query"""
try:
# Build query
query = {}
if hasattr(query_params, 'sensor_ids') and query_params.sensor_ids:
query["sensor_id"] = {"$in": query_params.sensor_ids}
if hasattr(query_params, 'start_time') and query_params.start_time:
query.setdefault("timestamp", {})["$gte"] = query_params.start_time
if hasattr(query_params, 'end_time') and query_params.end_time:
query.setdefault("timestamp", {})["$lte"] = query_params.end_time
# Execute query
cursor = self.db.sensor_readings.find(query)
if hasattr(query_params, 'limit') and query_params.limit:
cursor = cursor.limit(query_params.limit)
if hasattr(query_params, 'offset') and query_params.offset:
cursor = cursor.skip(query_params.offset)
cursor = cursor.sort("timestamp", -1)
# Get results
results = []
async for reading in cursor:
reading["_id"] = str(reading["_id"])
results.append(reading)
# Get total count
total_count = await self.db.sensor_readings.count_documents(query)
return {
"data": results,
"total_count": total_count,
"query": query_params.__dict__ if hasattr(query_params, '__dict__') else {},
"execution_time_ms": 0 # Placeholder
}
except Exception as e:
logger.error(f"Error executing data query: {e}")
raise
async def get_analytics_summary(self, hours: int = 24) -> Dict[str, Any]:
"""Get comprehensive analytics summary"""
try:
start_time = datetime.utcnow() - timedelta(hours=hours)
# Get basic statistics
pipeline = [
{
"$match": {
"created_at": {"$gte": start_time}
}
},
{
"$group": {
"_id": None,
"total_readings": {"$sum": 1},
"average_value": {"$avg": "$value"},
"min_value": {"$min": "$value"},
"max_value": {"$max": "$value"},
"unique_sensors": {"$addToSet": "$sensor_id"}
}
}
]
cursor = self.db.sensor_readings.aggregate(pipeline)
stats = await cursor.to_list(length=1)
base_stats = stats[0] if stats else {
"total_readings": 0,
"average_value": 0,
"min_value": 0,
"max_value": 0,
"unique_sensors": []
}
# Get room-level statistics
room_stats = await self._get_room_analytics(hours)
# Get energy trends
energy_trends = await self._get_energy_trends(hours)
return {
"period_hours": hours,
"timestamp": datetime.utcnow().isoformat(),
"total_readings": base_stats["total_readings"],
"unique_sensors": len(base_stats["unique_sensors"]),
"value_statistics": {
"average": round(base_stats["average_value"], 2) if base_stats["average_value"] else 0,
"minimum": base_stats["min_value"],
"maximum": base_stats["max_value"]
},
"room_statistics": room_stats,
"energy_trends": energy_trends
}
except Exception as e:
logger.error(f"Error getting analytics summary: {e}")
raise
async def get_energy_analytics(self, hours: int = 24, room: Optional[str] = None) -> Dict[str, Any]:
"""Get energy-specific analytics"""
try:
start_time = datetime.utcnow() - timedelta(hours=hours)
# Build query
query = {"created_at": {"$gte": start_time}}
if room:
query["room"] = room
# Energy consumption over time
pipeline = [
{"$match": query},
{
"$group": {
"_id": {
"hour": {"$hour": "$created_at"},
"date": {"$dateToString": {"format": "%Y-%m-%d", "date": "$created_at"}}
},
"total_energy": {"$sum": "$value"},
"reading_count": {"$sum": 1}
}
},
{"$sort": {"_id.date": 1, "_id.hour": 1}}
]
cursor = self.db.sensor_readings.aggregate(pipeline)
hourly_data = []
async for data in cursor:
hourly_data.append({
"hour": data["_id"]["hour"],
"date": data["_id"]["date"],
"total_energy": data["total_energy"],
"reading_count": data["reading_count"]
})
# Peak consumption analysis
peak_analysis = await self._get_peak_consumption_analysis(query)
# Energy efficiency metrics
efficiency_metrics = await self._get_efficiency_metrics(query)
return {
"period_hours": hours,
"room": room,
"timestamp": datetime.utcnow().isoformat(),
"hourly_consumption": hourly_data,
"peak_analysis": peak_analysis,
"efficiency_metrics": efficiency_metrics,
"total_consumption": sum(item["total_energy"] for item in hourly_data)
}
except Exception as e:
logger.error(f"Error getting energy analytics: {e}")
raise
async def _get_room_analytics(self, hours: int) -> Dict[str, Any]:
"""Get room-level analytics"""
try:
start_time = datetime.utcnow() - timedelta(hours=hours)
pipeline = [
{
"$match": {
"created_at": {"$gte": start_time},
"room": {"$ne": None}
}
},
{
"$group": {
"_id": "$room",
"total_readings": {"$sum": 1},
"total_energy": {"$sum": "$value"},
"average_energy": {"$avg": "$value"},
"unique_sensors": {"$addToSet": "$sensor_id"}
}
},
{"$sort": {"total_energy": -1}}
]
cursor = self.db.sensor_readings.aggregate(pipeline)
room_data = []
async for room in cursor:
room_data.append({
"room": room["_id"],
"total_readings": room["total_readings"],
"total_energy": room["total_energy"],
"average_energy": round(room["average_energy"], 2),
"sensor_count": len(room["unique_sensors"])
})
return {
"by_room": room_data,
"total_rooms": len(room_data)
}
except Exception as e:
logger.error(f"Error getting room analytics: {e}")
return {"by_room": [], "total_rooms": 0}
async def _get_energy_trends(self, hours: int) -> Dict[str, Any]:
"""Get energy consumption trends"""
try:
start_time = datetime.utcnow() - timedelta(hours=hours)
# Get current period data
current_query = {"created_at": {"$gte": start_time}}
current_cursor = self.db.sensor_readings.aggregate([
{"$match": current_query},
{"$group": {"_id": None, "total": {"$sum": "$value"}, "count": {"$sum": 1}}}
])
current_data = await current_cursor.to_list(length=1)
current_total = current_data[0]["total"] if current_data else 0
current_count = current_data[0]["count"] if current_data else 0
# Get previous period for comparison
previous_start = start_time - timedelta(hours=hours)
previous_query = {
"created_at": {"$gte": previous_start, "$lt": start_time}
}
previous_cursor = self.db.sensor_readings.aggregate([
{"$match": previous_query},
{"$group": {"_id": None, "total": {"$sum": "$value"}, "count": {"$sum": 1}}}
])
previous_data = await previous_cursor.to_list(length=1)
previous_total = previous_data[0]["total"] if previous_data else 0
# Calculate trend
if previous_total > 0:
trend_percentage = ((current_total - previous_total) / previous_total) * 100
else:
trend_percentage = 0
return {
"current_period": {
"total_energy": current_total,
"reading_count": current_count,
"average_per_reading": current_total / current_count if current_count > 0 else 0
},
"previous_period": {
"total_energy": previous_total
},
"trend": {
"percentage_change": round(trend_percentage, 2),
"direction": "up" if trend_percentage > 0 else "down" if trend_percentage < 0 else "stable"
}
}
except Exception as e:
logger.error(f"Error getting energy trends: {e}")
return {}
async def _get_peak_consumption_analysis(self, base_query: Dict[str, Any]) -> Dict[str, Any]:
"""Analyze peak consumption patterns"""
try:
pipeline = [
{"$match": base_query},
{
"$group": {
"_id": {"$hour": "$created_at"},
"total_consumption": {"$sum": "$value"},
"reading_count": {"$sum": 1}
}
},
{"$sort": {"total_consumption": -1}}
]
cursor = self.db.sensor_readings.aggregate(pipeline)
hourly_consumption = await cursor.to_list(length=None)
if not hourly_consumption:
return {"peak_hour": None, "peak_consumption": 0, "hourly_pattern": []}
peak_data = hourly_consumption[0]
return {
"peak_hour": peak_data["_id"],
"peak_consumption": peak_data["total_consumption"],
"hourly_pattern": [
{
"hour": item["_id"],
"consumption": item["total_consumption"],
"reading_count": item["reading_count"]
}
for item in hourly_consumption
]
}
except Exception as e:
logger.error(f"Error analyzing peak consumption: {e}")
return {"peak_hour": None, "peak_consumption": 0, "hourly_pattern": []}
async def _get_efficiency_metrics(self, base_query: Dict[str, Any]) -> Dict[str, Any]:
"""Calculate energy efficiency metrics"""
try:
# Average consumption per sensor
pipeline = [
{"$match": base_query},
{
"$group": {
"_id": "$sensor_id",
"total_consumption": {"$sum": "$value"},
"reading_count": {"$sum": 1}
}
},
{
"$group": {
"_id": None,
"average_per_sensor": {"$avg": "$total_consumption"},
"sensor_count": {"$sum": 1},
"min_consumption": {"$min": "$total_consumption"},
"max_consumption": {"$max": "$total_consumption"}
}
}
]
cursor = self.db.sensor_readings.aggregate(pipeline)
efficiency_data = await cursor.to_list(length=1)
if not efficiency_data:
return {
"average_per_sensor": 0,
"sensor_count": 0,
"efficiency_score": 0,
"variation_coefficient": 0
}
data = efficiency_data[0]
# Calculate efficiency score (lower variation = higher efficiency)
if data["average_per_sensor"] > 0:
variation_coefficient = (data["max_consumption"] - data["min_consumption"]) / data["average_per_sensor"]
efficiency_score = max(0, 100 - (variation_coefficient * 10)) # Scale to 0-100
else:
variation_coefficient = 0
efficiency_score = 100
return {
"average_per_sensor": round(data["average_per_sensor"], 2),
"sensor_count": data["sensor_count"],
"efficiency_score": round(efficiency_score, 1),
"variation_coefficient": round(variation_coefficient, 2)
}
except Exception as e:
logger.error(f"Error calculating efficiency metrics: {e}")
return {
"average_per_sensor": 0,
"sensor_count": 0,
"efficiency_score": 0,
"variation_coefficient": 0
}

View File

@@ -0,0 +1,378 @@
"""
Models for Sensor Management Service - integrating all original dashboard functionality
"""
from pydantic import BaseModel, Field
from typing import Optional, List, Dict, Any, Literal
from datetime import datetime
from enum import Enum
class SensorType(str, Enum):
ENERGY = "energy"
CO2 = "co2"
TEMPERATURE = "temperature"
HUMIDITY = "humidity"
HVAC = "hvac"
LIGHTING = "lighting"
SECURITY = "security"
MOTION = "motion"
class SensorStatus(str, Enum):
ONLINE = "online"
OFFLINE = "offline"
ERROR = "error"
MAINTENANCE = "maintenance"
class CO2Status(str, Enum):
GOOD = "good"
MODERATE = "moderate"
POOR = "poor"
CRITICAL = "critical"
class OccupancyLevel(str, Enum):
LOW = "low"
MEDIUM = "medium"
HIGH = "high"
# Base Models from original dashboard
class SensorReading(BaseModel):
"""Individual sensor reading model - enhanced from original"""
sensor_id: str = Field(..., description="Unique sensor identifier")
room: Optional[str] = Field(None, description="Room where sensor is located")
sensor_type: SensorType = Field(..., description="Type of sensor")
timestamp: int = Field(..., description="Unix timestamp of reading")
created_at: datetime = Field(default_factory=datetime.utcnow, description="Record creation timestamp")
# Sensor values with enhanced structure
energy: Optional[Dict[str, Any]] = Field(None, description="Energy reading with value and unit")
co2: Optional[Dict[str, Any]] = Field(None, description="CO2 reading with value and unit")
temperature: Optional[Dict[str, Any]] = Field(None, description="Temperature reading with value and unit")
humidity: Optional[Dict[str, Any]] = Field(None, description="Humidity reading with value and unit")
motion: Optional[Dict[str, Any]] = Field(None, description="Motion detection reading")
# Additional sensor types from tiocps
power: Optional[Dict[str, Any]] = Field(None, description="Power consumption reading")
voltage: Optional[Dict[str, Any]] = Field(None, description="Voltage reading")
current: Optional[Dict[str, Any]] = Field(None, description="Current reading")
generation: Optional[Dict[str, Any]] = Field(None, description="Energy generation reading")
# Metadata
metadata: Optional[Dict[str, Any]] = Field(default_factory=dict, description="Additional sensor metadata")
class Config:
json_encoders = {
datetime: lambda v: v.isoformat()
}
class LegacySensorReading(BaseModel):
"""Legacy sensor reading format for backward compatibility"""
sensor_id: str = Field(..., alias="sensorId")
timestamp: int
value: float
unit: str
created_at: datetime = Field(default_factory=datetime.utcnow)
class Config:
allow_population_by_field_name = True
class SensorMetadata(BaseModel):
"""Enhanced sensor metadata from original dashboard"""
sensor_id: str = Field(..., description="Unique sensor identifier")
name: str = Field(..., description="Human-readable sensor name")
sensor_type: SensorType = Field(..., description="Type of sensor")
room: Optional[str] = Field(None, description="Room assignment")
status: SensorStatus = Field(default=SensorStatus.OFFLINE, description="Current sensor status")
# Physical location and installation details
location: Optional[str] = Field(None, description="Physical location description")
floor: Optional[str] = Field(None, description="Floor level")
building: Optional[str] = Field(None, description="Building identifier")
# Technical specifications
model: Optional[str] = Field(None, description="Sensor model")
manufacturer: Optional[str] = Field(None, description="Sensor manufacturer")
firmware_version: Optional[str] = Field(None, description="Firmware version")
hardware_version: Optional[str] = Field(None, description="Hardware version")
# Network and connectivity
ip_address: Optional[str] = Field(None, description="IP address if network connected")
mac_address: Optional[str] = Field(None, description="MAC address")
connection_type: Optional[str] = Field(None, description="Connection type (wifi, ethernet, zigbee, etc.)")
# Power and maintenance
battery_level: Optional[float] = Field(None, description="Battery level percentage")
last_maintenance: Optional[datetime] = Field(None, description="Last maintenance date")
next_maintenance: Optional[datetime] = Field(None, description="Next scheduled maintenance")
# Operational settings
sampling_rate: Optional[int] = Field(None, description="Data sampling rate in seconds")
calibration_date: Optional[datetime] = Field(None, description="Last calibration date")
# Capabilities from tiocps integration
monitoring_capabilities: List[str] = Field(default_factory=list, description="List of monitoring capabilities")
control_capabilities: List[str] = Field(default_factory=list, description="List of control capabilities")
demand_response_enabled: bool = Field(default=False, description="Demand response participation")
# Timestamps
installed_at: Optional[datetime] = Field(None, description="Installation timestamp")
last_seen: Optional[datetime] = Field(None, description="Last communication timestamp")
created_at: datetime = Field(default_factory=datetime.utcnow, description="Record creation timestamp")
updated_at: datetime = Field(default_factory=datetime.utcnow, description="Record update timestamp")
class Config:
json_encoders = {
datetime: lambda v: v.isoformat() if v else None
}
class RoomMetrics(BaseModel):
"""Enhanced room metrics from original dashboard"""
room: str = Field(..., description="Room identifier")
timestamp: int = Field(..., description="Metrics calculation timestamp")
created_at: datetime = Field(default_factory=datetime.utcnow, description="Record creation timestamp")
# Sensor inventory
sensor_count: int = Field(0, description="Total number of sensors in room")
active_sensors: List[str] = Field(default_factory=list, description="List of active sensor IDs")
sensor_types: List[SensorType] = Field(default_factory=list, description="Types of sensors present")
# Energy metrics (enhanced from tiocps)
energy: Optional[Dict[str, Any]] = Field(None, description="Energy consumption metrics")
power: Optional[Dict[str, Any]] = Field(None, description="Power consumption metrics")
generation: Optional[Dict[str, Any]] = Field(None, description="Energy generation metrics")
flexibility: Optional[Dict[str, Any]] = Field(None, description="Energy flexibility metrics")
# Environmental metrics
co2: Optional[Dict[str, Any]] = Field(None, description="CO2 level metrics")
temperature: Optional[Dict[str, Any]] = Field(None, description="Temperature metrics")
humidity: Optional[Dict[str, Any]] = Field(None, description="Humidity metrics")
# Occupancy and usage
occupancy_estimate: OccupancyLevel = Field(default=OccupancyLevel.LOW, description="Estimated occupancy level")
motion_detected: bool = Field(default=False, description="Recent motion detection status")
# Time-based metrics
last_activity: Optional[datetime] = Field(None, description="Last detected activity timestamp")
daily_usage_hours: Optional[float] = Field(None, description="Estimated daily usage in hours")
# Economic metrics from tiocps
energy_cost: Optional[float] = Field(None, description="Estimated energy cost")
savings_potential: Optional[float] = Field(None, description="Potential savings from optimization")
class Config:
json_encoders = {
datetime: lambda v: v.isoformat() if v else None
}
class Room(BaseModel):
"""Room definition model"""
name: str = Field(..., description="Room name/identifier")
display_name: Optional[str] = Field(None, description="Human-readable room name")
floor: Optional[str] = Field(None, description="Floor level")
building: Optional[str] = Field(None, description="Building identifier")
area_m2: Optional[float] = Field(None, description="Room area in square meters")
capacity: Optional[int] = Field(None, description="Room capacity (people)")
room_type: Optional[str] = Field(None, description="Room type (office, meeting, etc.)")
# Configuration
target_temperature: Optional[float] = Field(None, description="Target temperature")
target_co2: Optional[float] = Field(None, description="Target CO2 level")
operating_hours: Optional[Dict[str, Any]] = Field(None, description="Operating hours schedule")
# Status
active: bool = Field(default=True, description="Whether room is active")
created_at: datetime = Field(default_factory=datetime.utcnow)
updated_at: datetime = Field(default_factory=datetime.utcnow)
class Config:
json_encoders = {
datetime: lambda v: v.isoformat()
}
class SystemEvent(BaseModel):
"""Enhanced system events from original dashboard"""
event_id: str = Field(..., description="Unique event identifier")
event_type: str = Field(..., description="Type of event")
severity: Literal["info", "warning", "error", "critical"] = Field(..., description="Event severity")
timestamp: int = Field(..., description="Event timestamp")
created_at: datetime = Field(default_factory=datetime.utcnow, description="Record creation timestamp")
# Event details
title: str = Field(..., description="Event title")
description: str = Field(..., description="Event description")
source: Optional[str] = Field(None, description="Event source (sensor_id, system component, etc.)")
# Context
sensor_id: Optional[str] = Field(None, description="Related sensor ID")
room: Optional[str] = Field(None, description="Related room")
# Event data
data: Optional[Dict[str, Any]] = Field(default_factory=dict, description="Additional event data")
# Status tracking
acknowledged: bool = Field(default=False, description="Whether event has been acknowledged")
resolved: bool = Field(default=False, description="Whether event has been resolved")
acknowledged_by: Optional[str] = Field(None, description="Who acknowledged the event")
resolved_by: Optional[str] = Field(None, description="Who resolved the event")
acknowledged_at: Optional[datetime] = Field(None, description="Acknowledgment timestamp")
resolved_at: Optional[datetime] = Field(None, description="Resolution timestamp")
class Config:
json_encoders = {
datetime: lambda v: v.isoformat() if v else None
}
class DataQuery(BaseModel):
"""Enhanced data query parameters from original dashboard"""
sensor_ids: Optional[List[str]] = Field(None, description="Filter by sensor IDs")
rooms: Optional[List[str]] = Field(None, description="Filter by rooms")
sensor_types: Optional[List[SensorType]] = Field(None, description="Filter by sensor types")
# Time range
start_time: Optional[int] = Field(None, description="Start timestamp (Unix)")
end_time: Optional[int] = Field(None, description="End timestamp (Unix)")
# Aggregation
aggregate: Optional[str] = Field(None, description="Aggregation method (avg, sum, min, max)")
interval: Optional[str] = Field(None, description="Aggregation interval (1m, 5m, 1h, 1d)")
# Pagination
limit: int = Field(default=100, description="Maximum number of records to return")
offset: int = Field(default=0, description="Number of records to skip")
# Sorting
sort_by: str = Field(default="timestamp", description="Field to sort by")
sort_order: Literal["asc", "desc"] = Field(default="desc", description="Sort order")
# Additional filters from tiocps
energy_threshold: Optional[float] = Field(None, description="Filter by energy threshold")
co2_threshold: Optional[float] = Field(None, description="Filter by CO2 threshold")
include_metadata: bool = Field(default=False, description="Include sensor metadata in response")
class DataResponse(BaseModel):
"""Enhanced response model for data queries"""
data: List[Dict[str, Any]] = Field(default_factory=list, description="Query results")
total_count: int = Field(0, description="Total number of matching records")
query: DataQuery = Field(..., description="Original query parameters")
execution_time_ms: float = Field(..., description="Query execution time in milliseconds")
# Additional metadata
aggregation_applied: bool = Field(default=False, description="Whether data was aggregated")
cache_hit: bool = Field(default=False, description="Whether result was served from cache")
class AnalyticsSummary(BaseModel):
"""Comprehensive analytics summary"""
period_hours: int
start_time: datetime
end_time: datetime
# Sensor analytics
total_sensors: int
active_sensors: int
sensor_types_summary: Dict[str, int]
# Room analytics
total_rooms: int
active_rooms: int
room_occupancy_summary: Dict[str, int]
# Energy analytics
total_energy_consumption: float
total_energy_generation: float
net_energy_consumption: float
energy_efficiency: float
# Environmental analytics
average_co2: float
average_temperature: float
average_humidity: float
# System health
system_events_count: int
critical_events_count: int
sensor_errors_count: int
class Config:
json_encoders = {
datetime: lambda v: v.isoformat()
}
# Room Management Models
class Room(BaseModel):
"""Room model for database storage and API responses"""
name: str = Field(..., description="Unique room name")
description: Optional[str] = Field(None, description="Room description")
floor: Optional[str] = Field(None, description="Floor designation")
building: Optional[str] = Field(None, description="Building name")
area: Optional[float] = Field(None, description="Room area in square meters")
capacity: Optional[int] = Field(None, description="Maximum occupancy")
room_type: Optional[str] = Field(None, description="Room type (office, meeting, storage, etc.)")
# Metadata
created_at: datetime = Field(default_factory=datetime.utcnow, description="Room creation timestamp")
updated_at: datetime = Field(default_factory=datetime.utcnow, description="Room update timestamp")
class Config:
json_encoders = {
datetime: lambda v: v.isoformat() if v else None
}
class RoomCreate(BaseModel):
"""Model for creating new rooms"""
name: str = Field(..., description="Unique room name", min_length=1, max_length=100)
description: Optional[str] = Field(None, description="Room description", max_length=500)
floor: Optional[str] = Field(None, description="Floor designation", max_length=50)
building: Optional[str] = Field(None, description="Building name", max_length=100)
area: Optional[float] = Field(None, description="Room area in square meters", gt=0)
capacity: Optional[int] = Field(None, description="Maximum occupancy", gt=0)
room_type: Optional[str] = Field(None, description="Room type", max_length=50)
class RoomUpdate(BaseModel):
"""Model for updating existing rooms"""
description: Optional[str] = Field(None, description="Room description", max_length=500)
floor: Optional[str] = Field(None, description="Floor designation", max_length=50)
building: Optional[str] = Field(None, description="Building name", max_length=100)
area: Optional[float] = Field(None, description="Room area in square meters", gt=0)
capacity: Optional[int] = Field(None, description="Maximum occupancy", gt=0)
room_type: Optional[str] = Field(None, description="Room type", max_length=50)
class RoomInfo(BaseModel):
"""Comprehensive room information for API responses"""
name: str = Field(..., description="Room name")
description: Optional[str] = Field(None, description="Room description")
floor: Optional[str] = Field(None, description="Floor designation")
building: Optional[str] = Field(None, description="Building name")
area: Optional[float] = Field(None, description="Room area in square meters")
capacity: Optional[int] = Field(None, description="Maximum occupancy")
room_type: Optional[str] = Field(None, description="Room type")
# Runtime information
sensor_count: int = Field(0, description="Number of sensors in room")
active_sensors: int = Field(0, description="Number of active sensors")
last_updated: Optional[datetime] = Field(None, description="Last metrics update")
# Timestamps
created_at: datetime = Field(..., description="Room creation timestamp")
updated_at: datetime = Field(..., description="Room update timestamp")
class Config:
json_encoders = {
datetime: lambda v: v.isoformat() if v else None
}
class HealthResponse(BaseModel):
"""Health check response"""
service: str
status: str
timestamp: datetime
version: str
# Additional service-specific health metrics
total_sensors: Optional[int] = None
active_sensors: Optional[int] = None
total_rooms: Optional[int] = None
websocket_connections: Optional[int] = None
class Config:
json_encoders = {
datetime: lambda v: v.isoformat()
}

View File

@@ -0,0 +1,467 @@
"""
Room service for managing rooms and room-level metrics
"""
import logging
from datetime import datetime, timedelta
from typing import List, Dict, Any, Optional
import json
logger = logging.getLogger(__name__)
class RoomService:
"""Service for managing rooms and room-level analytics"""
def __init__(self, db, redis_client):
self.db = db
self.redis = redis_client
async def get_all_room_names(self) -> List[str]:
"""Get a simple list of all room names for dropdowns/selections"""
try:
# Get rooms from the rooms collection
room_cursor = self.db.rooms.find({}, {"name": 1})
room_names = set()
async for room in room_cursor:
room_names.add(room["name"])
# Also get rooms that exist only in sensor data (legacy support)
sensor_cursor = self.db.sensors.find(
{"room": {"$ne": None, "$exists": True}},
{"room": 1}
)
async for sensor in sensor_cursor:
if sensor.get("room"):
room_names.add(sensor["room"])
# Convert to sorted list
return sorted(list(room_names))
except Exception as e:
logger.error(f"Error getting room names: {e}")
raise
async def initialize_default_rooms(self) -> None:
"""Initialize default rooms if none exist"""
try:
# Check if any rooms exist
room_count = await self.db.rooms.count_documents({})
if room_count == 0:
# Create default rooms
default_rooms = [
{"name": "Conference Room A", "description": "Main conference room", "room_type": "meeting"},
{"name": "Conference Room B", "description": "Secondary conference room", "room_type": "meeting"},
{"name": "Office Floor 1", "description": "First floor office space", "room_type": "office"},
{"name": "Office Floor 2", "description": "Second floor office space", "room_type": "office"},
{"name": "Kitchen", "description": "Employee kitchen and break room", "room_type": "common"},
{"name": "Lobby", "description": "Main entrance and reception", "room_type": "common"},
{"name": "Server Room", "description": "IT equipment room", "room_type": "technical"},
{"name": "Storage Room", "description": "General storage", "room_type": "storage"},
{"name": "Meeting Room 1", "description": "Small meeting room", "room_type": "meeting"},
{"name": "Meeting Room 2", "description": "Small meeting room", "room_type": "meeting"}
]
for room_data in default_rooms:
room_doc = {
**room_data,
"created_at": datetime.utcnow(),
"updated_at": datetime.utcnow()
}
await self.db.rooms.insert_one(room_doc)
logger.info(f"Initialized {len(default_rooms)} default rooms")
except Exception as e:
logger.error(f"Error initializing default rooms: {e}")
raise
async def get_rooms(self) -> List[Dict[str, Any]]:
"""Get all rooms with sensor counts and metrics"""
try:
# Get unique rooms from sensors
pipeline = [
{"$group": {"_id": "$room", "sensor_count": {"$sum": 1}}},
{"$match": {"_id": {"$ne": None}}}
]
cursor = self.db.sensors.aggregate(pipeline)
rooms = []
async for room_data in cursor:
room_name = room_data["_id"]
# Get latest room metrics
latest_metrics = await self._get_latest_room_metrics(room_name)
room_info = {
"name": room_name,
"sensor_count": room_data["sensor_count"],
"latest_metrics": latest_metrics,
"last_updated": latest_metrics.get("timestamp") if latest_metrics else None
}
rooms.append(room_info)
return rooms
except Exception as e:
logger.error(f"Error getting rooms: {e}")
raise
async def create_room(self, room_data: Dict[str, Any]) -> Dict[str, Any]:
"""Create a new room"""
try:
room_doc = {
"name": room_data.get("name"),
"description": room_data.get("description", ""),
"floor": room_data.get("floor"),
"building": room_data.get("building"),
"area": room_data.get("area"),
"capacity": room_data.get("capacity"),
"room_type": room_data.get("room_type"),
"created_at": datetime.utcnow(),
"updated_at": datetime.utcnow()
}
# Validate required fields
if not room_doc["name"] or not room_doc["name"].strip():
raise ValueError("Room name is required")
# Check if room already exists
existing = await self.db.rooms.find_one({"name": room_doc["name"]})
if existing:
raise ValueError(f"Room {room_doc['name']} already exists")
result = await self.db.rooms.insert_one(room_doc)
return {
"id": str(result.inserted_id),
"name": room_doc["name"],
"created_at": room_doc["created_at"]
}
except Exception as e:
logger.error(f"Error creating room: {e}")
raise
async def update_room(self, room_name: str, room_data: Dict[str, Any]) -> Dict[str, Any]:
"""Update an existing room"""
try:
# Check if room exists
existing = await self.db.rooms.find_one({"name": room_name})
if not existing:
raise ValueError(f"Room {room_name} not found")
# Prepare update document
update_doc = {
"updated_at": datetime.utcnow()
}
# Update only provided fields
for field in ["description", "floor", "building", "area", "capacity", "room_type"]:
if field in room_data and room_data[field] is not None:
update_doc[field] = room_data[field]
# Perform update
result = await self.db.rooms.update_one(
{"name": room_name},
{"$set": update_doc}
)
if result.modified_count == 0:
logger.warning(f"No changes made to room {room_name}")
return {
"name": room_name,
"updated_at": update_doc["updated_at"],
"modified": result.modified_count > 0
}
except Exception as e:
logger.error(f"Error updating room: {e}")
raise
async def delete_room(self, room_name: str) -> Dict[str, Any]:
"""Delete a room and optionally reassign sensors"""
try:
# Check if room exists
existing = await self.db.rooms.find_one({"name": room_name})
# Check for sensors in this room
sensors_in_room = await self.db.sensors.find({"room": room_name}).to_list(None)
if sensors_in_room:
# Update sensors to have null room (don't delete sensors)
await self.db.sensors.update_many(
{"room": room_name},
{"$unset": {"room": ""}}
)
# Delete room from rooms collection if it exists
room_deleted = False
if existing:
result = await self.db.rooms.delete_one({"name": room_name})
room_deleted = result.deleted_count > 0
# Delete room metrics
metrics_result = await self.db.room_metrics.delete_many({"room": room_name})
return {
"room": room_name,
"room_deleted": room_deleted,
"sensors_updated": len(sensors_in_room),
"metrics_deleted": metrics_result.deleted_count
}
except Exception as e:
logger.error(f"Error deleting room: {e}")
raise
async def get_room_details(self, room_name: str) -> Optional[Dict[str, Any]]:
"""Get detailed room information"""
try:
# Get room info
room = await self.db.rooms.find_one({"name": room_name})
if not room:
# Create basic room info from sensor data
sensors = await self.db.sensors.find({"room": room_name}).to_list(None)
if not sensors:
return None
room = {
"name": room_name,
"description": f"Room with {len(sensors)} sensors",
"sensor_count": len(sensors)
}
else:
room["_id"] = str(room["_id"])
# Get sensor count
sensor_count = await self.db.sensors.count_documents({"room": room_name})
room["sensor_count"] = sensor_count
# Get sensors in this room
cursor = self.db.sensors.find({"room": room_name})
sensors = []
async for sensor in cursor:
sensor["_id"] = str(sensor["_id"])
sensors.append(sensor)
room["sensors"] = sensors
# Get recent room metrics
room["recent_metrics"] = await self._get_recent_room_metrics(room_name, hours=24)
return room
except Exception as e:
logger.error(f"Error getting room details: {e}")
raise
async def get_room_data(self, room_name: str, start_time: Optional[int] = None,
end_time: Optional[int] = None, limit: int = 100) -> Dict[str, Any]:
"""Get historical data for a room"""
try:
# Get room metrics
room_query = {"room": room_name}
if start_time or end_time:
room_query["timestamp"] = {}
if start_time:
room_query["timestamp"]["$gte"] = start_time
if end_time:
room_query["timestamp"]["$lte"] = end_time
room_metrics_cursor = self.db.room_metrics.find(room_query).sort("timestamp", -1).limit(limit)
room_metrics = []
async for metric in room_metrics_cursor:
metric["_id"] = str(metric["_id"])
room_metrics.append(metric)
# Get sensor readings for this room
sensor_query = {"room": room_name}
if start_time or end_time:
sensor_query["timestamp"] = {}
if start_time:
sensor_query["timestamp"]["$gte"] = start_time
if end_time:
sensor_query["timestamp"]["$lte"] = end_time
sensor_readings_cursor = self.db.sensor_readings.find(sensor_query).sort("timestamp", -1).limit(limit)
sensor_readings = []
async for reading in sensor_readings_cursor:
reading["_id"] = str(reading["_id"])
sensor_readings.append(reading)
return {
"room_metrics": room_metrics,
"sensor_readings": sensor_readings
}
except Exception as e:
logger.error(f"Error getting room data: {e}")
raise
async def update_room_metrics(self, sensor_data):
"""Update room-level metrics when sensor data is received"""
try:
if not sensor_data.room:
return
# Calculate room-level aggregates
room_metrics = await self._calculate_room_metrics(sensor_data.room)
if room_metrics:
# Store room metrics
metrics_doc = {
"room": sensor_data.room,
"timestamp": sensor_data.timestamp,
"total_energy": room_metrics.get("total_energy", 0),
"average_temperature": room_metrics.get("avg_temperature"),
"co2_level": room_metrics.get("co2_level"),
"occupancy_estimate": room_metrics.get("occupancy_estimate"),
"sensor_count": room_metrics.get("sensor_count", 0),
"created_at": datetime.utcnow()
}
await self.db.room_metrics.insert_one(metrics_doc)
# Cache latest metrics
if self.redis:
cache_key = f"room:{sensor_data.room}:latest_metrics"
await self.redis.setex(cache_key, 3600, json.dumps(metrics_doc, default=str))
except Exception as e:
logger.error(f"Error updating room metrics: {e}")
async def aggregate_all_room_metrics(self):
"""Aggregate metrics for all rooms"""
try:
# Get all unique rooms
pipeline = [{"$group": {"_id": "$room"}}]
cursor = self.db.sensors.aggregate(pipeline)
async for room_data in cursor:
room_name = room_data["_id"]
if room_name:
await self._calculate_room_metrics(room_name)
except Exception as e:
logger.error(f"Error aggregating room metrics: {e}")
async def _get_latest_room_metrics(self, room_name: str) -> Optional[Dict[str, Any]]:
"""Get latest room metrics"""
try:
# Try Redis cache first
if self.redis:
cache_key = f"room:{room_name}:latest_metrics"
cached = await self.redis.get(cache_key)
if cached:
return json.loads(cached)
# Fall back to database
latest = await self.db.room_metrics.find_one(
{"room": room_name},
sort=[("timestamp", -1)]
)
if latest:
latest["_id"] = str(latest["_id"])
return latest
return None
except Exception as e:
logger.error(f"Error getting latest room metrics: {e}")
return None
async def _get_recent_room_metrics(self, room_name: str, hours: int = 24) -> List[Dict[str, Any]]:
"""Get recent room metrics"""
try:
start_time = datetime.utcnow() - timedelta(hours=hours)
cursor = self.db.room_metrics.find({
"room": room_name,
"created_at": {"$gte": start_time}
}).sort("timestamp", -1)
metrics = []
async for metric in cursor:
metric["_id"] = str(metric["_id"])
metrics.append(metric)
return metrics
except Exception as e:
logger.error(f"Error getting recent room metrics: {e}")
return []
async def _calculate_room_metrics(self, room_name: str) -> Dict[str, Any]:
"""Calculate aggregated metrics for a room"""
try:
# Get recent sensor readings (last 5 minutes)
five_minutes_ago = datetime.utcnow() - timedelta(minutes=5)
pipeline = [
{
"$match": {
"room": room_name,
"created_at": {"$gte": five_minutes_ago}
}
},
{
"$group": {
"_id": "$sensor_id",
"latest_value": {"$last": "$value"},
"sensor_type": {"$last": "$sensor_type"} if "sensor_type" in ["$first", "$last"] else {"$first": "energy"},
"unit": {"$last": "$unit"}
}
}
]
cursor = self.db.sensor_readings.aggregate(pipeline)
total_energy = 0
temperatures = []
co2_levels = []
sensor_count = 0
async for sensor_data in cursor:
sensor_count += 1
value = sensor_data.get("latest_value", 0)
sensor_type = sensor_data.get("sensor_type", "energy")
if sensor_type == "energy" or "energy" in str(sensor_data.get("unit", "")).lower():
total_energy += value
elif sensor_type == "temperature":
temperatures.append(value)
elif sensor_type == "co2":
co2_levels.append(value)
metrics = {
"total_energy": total_energy,
"sensor_count": sensor_count,
"avg_temperature": sum(temperatures) / len(temperatures) if temperatures else None,
"co2_level": sum(co2_levels) / len(co2_levels) if co2_levels else None,
"occupancy_estimate": self._estimate_occupancy(sensor_count, total_energy)
}
return metrics
except Exception as e:
logger.error(f"Error calculating room metrics: {e}")
return {}
def _estimate_occupancy(self, sensor_count: int, total_energy: float) -> Optional[str]:
"""Estimate occupancy level based on energy consumption"""
if total_energy == 0:
return "vacant"
elif total_energy < sensor_count * 50: # Low threshold
return "low"
elif total_energy < sensor_count * 150: # Medium threshold
return "medium"
else:
return "high"

View File

@@ -0,0 +1,475 @@
"""Sensors module API routes."""
import logging
from datetime import datetime
from fastapi import APIRouter, HTTPException, Depends, WebSocket, WebSocketDisconnect, Query, BackgroundTasks
from typing import Optional
from .models import (
SensorReading, SensorMetadata, RoomCreate, RoomUpdate, DataQuery, DataResponse,
SensorType, SensorStatus, HealthResponse
)
from .sensor_service import SensorService
from .room_service import RoomService
from .analytics_service import AnalyticsService
from .websocket_manager import WebSocketManager
from core.dependencies import get_sensors_db, get_redis
logger = logging.getLogger(__name__)
# Create router
router = APIRouter()
# WebSocket manager (shared across all route handlers)
websocket_manager = WebSocketManager()
# Dependency functions
async def get_sensor_service(db=Depends(get_sensors_db), redis=Depends(get_redis)):
return SensorService(db, redis)
async def get_room_service(db=Depends(get_sensors_db), redis=Depends(get_redis)):
return RoomService(db, redis)
async def get_analytics_service(db=Depends(get_sensors_db), redis=Depends(get_redis)):
return AnalyticsService(db, redis)
# Health check
@router.get("/health", response_model=HealthResponse)
async def health_check(db=Depends(get_sensors_db)):
"""Health check endpoint for sensors module"""
try:
await db.command("ping")
return HealthResponse(
service="sensors-module",
status="healthy",
timestamp=datetime.utcnow(),
version="1.0.0"
)
except Exception as e:
logger.error(f"Health check failed: {e}")
raise HTTPException(status_code=503, detail="Service Unavailable")
# WebSocket endpoint for real-time data
@router.websocket("/ws")
async def websocket_endpoint(websocket: WebSocket):
"""WebSocket endpoint for real-time sensor data"""
await websocket_manager.connect(websocket)
try:
while True:
await websocket.receive_text()
except WebSocketDisconnect:
await websocket_manager.disconnect(websocket)
# Sensor Management Routes
@router.get("/sensors/get")
async def get_sensors(
room: Optional[str] = Query(None, description="Filter by room"),
sensor_type: Optional[SensorType] = Query(None, description="Filter by sensor type"),
status: Optional[SensorStatus] = Query(None, description="Filter by status"),
service: SensorService = Depends(get_sensor_service)
):
"""Get all sensors with optional filtering"""
try:
sensors = await service.get_sensors(room=room, sensor_type=sensor_type, status=status)
return {
"sensors": sensors,
"count": len(sensors),
"filters": {
"room": room,
"sensor_type": sensor_type.value if sensor_type else None,
"status": status.value if status else None
}
}
except Exception as e:
logger.error(f"Error getting sensors: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/sensors/{sensor_id}")
async def get_sensor(sensor_id: str, service: SensorService = Depends(get_sensor_service)):
"""Get detailed sensor information"""
try:
sensor = await service.get_sensor_details(sensor_id)
if not sensor:
raise HTTPException(status_code=404, detail="Sensor not found")
return sensor
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting sensor {sensor_id}: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/sensors/{sensor_id}/data")
async def get_sensor_data(
sensor_id: str,
start_time: Optional[int] = Query(None, description="Start timestamp (Unix)"),
end_time: Optional[int] = Query(None, description="End timestamp (Unix)"),
limit: int = Query(100, description="Maximum records to return"),
offset: int = Query(0, description="Records to skip"),
service: SensorService = Depends(get_sensor_service)
):
"""Get historical data for a specific sensor"""
try:
data = await service.get_sensor_data(
sensor_id=sensor_id,
start_time=start_time,
end_time=end_time,
limit=limit,
offset=offset
)
return DataResponse(
data=data["readings"],
total_count=data["total_count"],
query=DataQuery(
sensor_ids=[sensor_id],
start_time=start_time,
end_time=end_time,
limit=limit,
offset=offset
),
execution_time_ms=data.get("execution_time_ms", 0)
)
except Exception as e:
logger.error(f"Error getting sensor data for {sensor_id}: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/sensors")
async def create_sensor(
sensor_data: SensorMetadata,
service: SensorService = Depends(get_sensor_service)
):
"""Register a new sensor"""
try:
result = await service.create_sensor(sensor_data)
return {
"message": "Sensor created successfully",
"sensor_id": sensor_data.sensor_id,
"created_at": result.get("created_at")
}
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))
except Exception as e:
logger.error(f"Error creating sensor: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.put("/sensors/{sensor_id}")
async def update_sensor(
sensor_id: str,
update_data: dict,
service: SensorService = Depends(get_sensor_service)
):
"""Update sensor metadata"""
try:
result = await service.update_sensor(sensor_id, update_data)
if not result:
raise HTTPException(status_code=404, detail="Sensor not found")
return {
"message": "Sensor updated successfully",
"sensor_id": sensor_id,
"updated_at": datetime.utcnow().isoformat()
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error updating sensor {sensor_id}: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.delete("/sensors/{sensor_id}")
async def delete_sensor(
sensor_id: str,
service: SensorService = Depends(get_sensor_service)
):
"""Delete a sensor and all its data"""
try:
result = await service.delete_sensor(sensor_id)
return {
"message": "Sensor deleted successfully",
"sensor_id": sensor_id,
"readings_deleted": result.get("readings_deleted", 0)
}
except Exception as e:
logger.error(f"Error deleting sensor {sensor_id}: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
# Room Management Routes
@router.get("/rooms/names")
async def get_room_names(service: RoomService = Depends(get_room_service)):
"""Get simple list of room names for dropdowns"""
try:
room_names = await service.get_all_room_names()
return {
"rooms": room_names,
"count": len(room_names)
}
except Exception as e:
logger.error(f"Error getting room names: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/rooms")
async def get_rooms(service: RoomService = Depends(get_room_service)):
"""Get all rooms with sensor counts and metrics"""
try:
rooms = await service.get_rooms()
return {
"rooms": rooms,
"count": len(rooms)
}
except Exception as e:
logger.error(f"Error getting rooms: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/rooms")
async def create_room(
room_data: RoomCreate,
service: RoomService = Depends(get_room_service)
):
"""Create a new room"""
try:
result = await service.create_room(room_data.dict())
return {
"message": "Room created successfully",
"room": result["name"],
"created_at": result["created_at"]
}
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))
except Exception as e:
logger.error(f"Error creating room: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.put("/rooms/{room_name}")
async def update_room(
room_name: str,
room_data: RoomUpdate,
service: RoomService = Depends(get_room_service)
):
"""Update an existing room"""
try:
result = await service.update_room(room_name, room_data.dict(exclude_unset=True))
return {
"message": "Room updated successfully",
"room": result["name"],
"updated_at": result["updated_at"],
"modified": result["modified"]
}
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
logger.error(f"Error updating room {room_name}: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.delete("/rooms/{room_name}")
async def delete_room(room_name: str, service: RoomService = Depends(get_room_service)):
"""Delete a room"""
try:
result = await service.delete_room(room_name)
return {
"message": "Room deleted successfully",
**result
}
except Exception as e:
logger.error(f"Error deleting room {room_name}: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/rooms/{room_name}")
async def get_room(room_name: str, service: RoomService = Depends(get_room_service)):
"""Get detailed room information"""
try:
room = await service.get_room_details(room_name)
if not room:
raise HTTPException(status_code=404, detail="Room not found")
return room
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting room {room_name}: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/rooms/{room_name}/data")
async def get_room_data(
room_name: str,
start_time: Optional[int] = Query(None, description="Start timestamp (Unix)"),
end_time: Optional[int] = Query(None, description="End timestamp (Unix)"),
limit: int = Query(100, description="Maximum records to return"),
service: RoomService = Depends(get_room_service)
):
"""Get historical data for a specific room"""
try:
data = await service.get_room_data(
room_name=room_name,
start_time=start_time,
end_time=end_time,
limit=limit
)
return {
"room": room_name,
"room_metrics": data.get("room_metrics", []),
"sensor_readings": data.get("sensor_readings", []),
"period": {
"start_time": start_time,
"end_time": end_time
}
}
except Exception as e:
logger.error(f"Error getting room data for {room_name}: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
# Analytics Routes
@router.post("/data/query")
async def query_data(
query_params: DataQuery,
service: AnalyticsService = Depends(get_analytics_service)
):
"""Advanced data querying with multiple filters"""
try:
result = await service.query_data(query_params)
return result
except Exception as e:
logger.error(f"Error executing data query: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/analytics/summary")
async def get_analytics_summary(
hours: int = Query(24, description="Hours of data to analyze"),
service: AnalyticsService = Depends(get_analytics_service)
):
"""Get comprehensive analytics summary"""
try:
analytics = await service.get_analytics_summary(hours)
return analytics
except Exception as e:
logger.error(f"Error getting analytics summary: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/analytics/energy")
async def get_energy_analytics(
hours: int = Query(24),
room: Optional[str] = Query(None),
service: AnalyticsService = Depends(get_analytics_service)
):
"""Get energy-specific analytics"""
try:
analytics = await service.get_energy_analytics(hours, room)
return analytics
except Exception as e:
logger.error(f"Error getting energy analytics: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
# Data Export
@router.get("/export")
async def export_data(
start_time: int = Query(..., description="Start timestamp (Unix)"),
end_time: int = Query(..., description="End timestamp (Unix)"),
sensor_ids: Optional[str] = Query(None, description="Comma-separated sensor IDs"),
format: str = Query("json", description="Export format (json, csv)"),
service: SensorService = Depends(get_sensor_service)
):
"""Export sensor data"""
try:
export_data_result = await service.export_data(
start_time=start_time,
end_time=end_time,
sensor_ids=sensor_ids,
format=format
)
return export_data_result
except Exception as e:
logger.error(f"Error exporting data: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
# System Events
@router.get("/events")
async def get_events(
severity: Optional[str] = Query(None, description="Filter by severity"),
event_type: Optional[str] = Query(None, description="Filter by event type"),
hours: int = Query(24, description="Hours of events to retrieve"),
limit: int = Query(50, description="Maximum events to return"),
service: SensorService = Depends(get_sensor_service)
):
"""Get system events and alerts"""
try:
events = await service.get_events(
severity=severity,
event_type=event_type,
hours=hours,
limit=limit
)
return {
"events": events,
"count": len(events),
"period_hours": hours
}
except Exception as e:
logger.error(f"Error getting events: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
# Real-time data ingestion endpoint
@router.post("/data/ingest")
async def ingest_sensor_data(
sensor_data: SensorReading,
background_tasks: BackgroundTasks,
service: SensorService = Depends(get_sensor_service),
room_service: RoomService = Depends(get_room_service)
):
"""Ingest real-time sensor data"""
try:
result = await service.ingest_sensor_data(sensor_data)
# Schedule background tasks
if sensor_data.room:
background_tasks.add_task(_update_room_metrics, room_service, sensor_data)
background_tasks.add_task(_broadcast_sensor_data, sensor_data)
return {
"message": "Sensor data ingested successfully",
"sensor_id": sensor_data.sensor_id,
"timestamp": sensor_data.timestamp
}
except Exception as e:
logger.error(f"Error ingesting sensor data: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
# Background task helper functions
async def _update_room_metrics(room_service: RoomService, sensor_data: SensorReading):
"""Update room-level metrics when sensor data is received"""
try:
await room_service.update_room_metrics(sensor_data)
except Exception as e:
logger.error(f"Error updating room metrics: {e}")
async def _broadcast_sensor_data(sensor_data: SensorReading):
"""Broadcast sensor data to WebSocket clients"""
try:
await websocket_manager.broadcast_sensor_data(sensor_data)
except Exception as e:
logger.error(f"Error broadcasting sensor data: {e}")

View File

@@ -0,0 +1,251 @@
"""
Sensor service business logic
"""
import logging
from datetime import datetime, timedelta
from typing import List, Dict, Any, Optional
import json
logger = logging.getLogger(__name__)
class SensorService:
"""Service for managing sensors and sensor data"""
def __init__(self, db, redis_client):
self.db = db
self.redis = redis_client
async def get_sensors(self, room: Optional[str] = None, sensor_type: Optional[str] = None, status: Optional[str] = None) -> List[Dict[str, Any]]:
"""Get sensors with optional filtering"""
try:
query = {}
if room:
query["room"] = room
if sensor_type:
query["sensor_type"] = sensor_type
if status:
query["status"] = status
cursor = self.db.sensors.find(query)
sensors = []
async for sensor in cursor:
sensor["_id"] = str(sensor["_id"])
sensors.append(sensor)
return sensors
except Exception as e:
logger.error(f"Error getting sensors: {e}")
raise
async def get_sensor_details(self, sensor_id: str) -> Optional[Dict[str, Any]]:
"""Get detailed sensor information"""
try:
sensor = await self.db.sensors.find_one({"sensor_id": sensor_id})
if sensor:
sensor["_id"] = str(sensor["_id"])
# Get recent readings
recent_readings = await self.get_sensor_data(sensor_id, limit=10)
sensor["recent_readings"] = recent_readings.get("readings", [])
return sensor
return None
except Exception as e:
logger.error(f"Error getting sensor details: {e}")
raise
async def get_sensor_data(self, sensor_id: str, start_time: Optional[int] = None,
end_time: Optional[int] = None, limit: int = 100, offset: int = 0) -> Dict[str, Any]:
"""Get historical sensor data"""
try:
query = {"sensor_id": sensor_id}
if start_time or end_time:
query["timestamp"] = {}
if start_time:
query["timestamp"]["$gte"] = start_time
if end_time:
query["timestamp"]["$lte"] = end_time
# Get total count
total_count = await self.db.sensor_readings.count_documents(query)
# Get readings
cursor = self.db.sensor_readings.find(query).sort("timestamp", -1).skip(offset).limit(limit)
readings = []
async for reading in cursor:
reading["_id"] = str(reading["_id"])
readings.append(reading)
return {
"readings": readings,
"total_count": total_count,
"execution_time_ms": 0 # Placeholder
}
except Exception as e:
logger.error(f"Error getting sensor data: {e}")
raise
async def create_sensor(self, sensor_data) -> Dict[str, Any]:
"""Create a new sensor"""
try:
# Check if sensor already exists
existing = await self.db.sensors.find_one({"sensor_id": sensor_data.sensor_id})
if existing:
raise ValueError(f"Sensor {sensor_data.sensor_id} already exists")
# Create sensor document
sensor_doc = {
"sensor_id": sensor_data.sensor_id,
"name": sensor_data.name,
"sensor_type": sensor_data.sensor_type.value if hasattr(sensor_data.sensor_type, 'value') else str(sensor_data.sensor_type),
"room": sensor_data.room,
"location": sensor_data.location if hasattr(sensor_data, 'location') else None,
"status": "active",
"created_at": datetime.utcnow(),
"updated_at": datetime.utcnow()
}
result = await self.db.sensors.insert_one(sensor_doc)
return {"created_at": datetime.utcnow()}
except Exception as e:
logger.error(f"Error creating sensor: {e}")
raise
async def update_sensor(self, sensor_id: str, update_data: Dict[str, Any]) -> bool:
"""Update sensor metadata"""
try:
update_data["updated_at"] = datetime.utcnow()
result = await self.db.sensors.update_one(
{"sensor_id": sensor_id},
{"$set": update_data}
)
return result.modified_count > 0
except Exception as e:
logger.error(f"Error updating sensor: {e}")
raise
async def delete_sensor(self, sensor_id: str) -> Dict[str, Any]:
"""Delete a sensor and its data"""
try:
# Delete readings
readings_result = await self.db.sensor_readings.delete_many({"sensor_id": sensor_id})
# Delete sensor
await self.db.sensors.delete_one({"sensor_id": sensor_id})
return {"readings_deleted": readings_result.deleted_count}
except Exception as e:
logger.error(f"Error deleting sensor: {e}")
raise
async def ingest_sensor_data(self, sensor_data) -> Dict[str, Any]:
"""Ingest real-time sensor data"""
try:
# Create reading document
reading_doc = {
"sensor_id": sensor_data.sensor_id,
"timestamp": sensor_data.timestamp,
"value": sensor_data.value,
"unit": sensor_data.unit if hasattr(sensor_data, 'unit') else None,
"room": sensor_data.room if hasattr(sensor_data, 'room') else None,
"created_at": datetime.utcnow()
}
# Store in database
await self.db.sensor_readings.insert_one(reading_doc)
# Cache recent value in Redis
if self.redis:
cache_key = f"sensor:{sensor_data.sensor_id}:latest"
await self.redis.setex(cache_key, 3600, json.dumps(reading_doc, default=str))
return {"success": True}
except Exception as e:
logger.error(f"Error ingesting sensor data: {e}")
raise
async def export_data(self, start_time: int, end_time: int, sensor_ids: Optional[str] = None,
format: str = "json") -> Dict[str, Any]:
"""Export sensor data"""
try:
query = {
"timestamp": {"$gte": start_time, "$lte": end_time}
}
if sensor_ids:
sensor_list = [s.strip() for s in sensor_ids.split(",")]
query["sensor_id"] = {"$in": sensor_list}
cursor = self.db.sensor_readings.find(query).sort("timestamp", 1)
readings = []
async for reading in cursor:
reading["_id"] = str(reading["_id"])
readings.append(reading)
return {
"format": format,
"data": readings,
"total_records": len(readings),
"period": {"start": start_time, "end": end_time}
}
except Exception as e:
logger.error(f"Error exporting data: {e}")
raise
async def get_events(self, severity: Optional[str] = None, event_type: Optional[str] = None,
hours: int = 24, limit: int = 50) -> List[Dict[str, Any]]:
"""Get system events"""
try:
start_time = datetime.utcnow() - timedelta(hours=hours)
query = {"timestamp": {"$gte": start_time}}
if severity:
query["severity"] = severity
if event_type:
query["event_type"] = event_type
cursor = self.db.system_events.find(query).sort("timestamp", -1).limit(limit)
events = []
async for event in cursor:
event["_id"] = str(event["_id"])
events.append(event)
return events
except Exception as e:
logger.error(f"Error getting events: {e}")
return []
async def cleanup_old_data(self, cutoff_date: datetime):
"""Clean up old sensor data"""
try:
result = await self.db.sensor_readings.delete_many({
"created_at": {"$lt": cutoff_date}
})
logger.info(f"Cleaned up {result.deleted_count} old sensor readings")
except Exception as e:
logger.error(f"Error cleaning up old data: {e}")
raise

View File

@@ -0,0 +1,288 @@
"""
WebSocket manager for real-time sensor data broadcasting
"""
import asyncio
import json
from typing import List, Set, Dict, Any
from fastapi import WebSocket, WebSocketDisconnect
import logging
from .models import SensorReading
logger = logging.getLogger(__name__)
class WebSocketManager:
"""Manages WebSocket connections for real-time data broadcasting"""
def __init__(self):
self.active_connections: List[WebSocket] = []
self.room_subscriptions: Dict[str, Set[WebSocket]] = {}
self.sensor_subscriptions: Dict[str, Set[WebSocket]] = {}
self.connection_metadata: Dict[WebSocket, Dict[str, Any]] = {}
async def connect(self, websocket: WebSocket, room: str = None, sensor_id: str = None):
"""Accept a WebSocket connection and handle subscriptions"""
await websocket.accept()
self.active_connections.append(websocket)
# Store connection metadata
self.connection_metadata[websocket] = {
"connected_at": asyncio.get_event_loop().time(),
"room": room,
"sensor_id": sensor_id,
"message_count": 0
}
# Handle room subscription
if room:
if room not in self.room_subscriptions:
self.room_subscriptions[room] = set()
self.room_subscriptions[room].add(websocket)
# Handle sensor subscription
if sensor_id:
if sensor_id not in self.sensor_subscriptions:
self.sensor_subscriptions[sensor_id] = set()
self.sensor_subscriptions[sensor_id].add(websocket)
logger.info(f"WebSocket client connected. Total connections: {len(self.active_connections)}")
# Send initial connection confirmation
await self.send_to_connection(websocket, {
"type": "connection_established",
"timestamp": asyncio.get_event_loop().time(),
"subscriptions": {
"room": room,
"sensor_id": sensor_id
},
"total_connections": len(self.active_connections)
})
async def disconnect(self, websocket: WebSocket):
"""Remove a WebSocket connection and clean up subscriptions"""
if websocket in self.active_connections:
self.active_connections.remove(websocket)
# Clean up room subscriptions
for room_connections in self.room_subscriptions.values():
room_connections.discard(websocket)
# Clean up sensor subscriptions
for sensor_connections in self.sensor_subscriptions.values():
sensor_connections.discard(websocket)
# Clean up metadata
self.connection_metadata.pop(websocket, None)
logger.info(f"WebSocket client disconnected. Total connections: {len(self.active_connections)}")
async def send_to_connection(self, websocket: WebSocket, data: Dict[str, Any]):
"""Send data to a specific WebSocket connection"""
try:
await websocket.send_text(json.dumps(data))
# Update message count
if websocket in self.connection_metadata:
self.connection_metadata[websocket]["message_count"] += 1
except Exception as e:
logger.error(f"Error sending data to WebSocket: {e}")
await self.disconnect(websocket)
async def broadcast_to_all(self, data: Dict[str, Any]):
"""Broadcast data to all connected WebSocket clients"""
if not self.active_connections:
return
message = json.dumps(data)
disconnected = []
for websocket in self.active_connections:
try:
await websocket.send_text(message)
# Update message count
if websocket in self.connection_metadata:
self.connection_metadata[websocket]["message_count"] += 1
except Exception as e:
logger.error(f"Error broadcasting to WebSocket: {e}")
disconnected.append(websocket)
# Clean up disconnected connections
for websocket in disconnected:
await self.disconnect(websocket)
async def broadcast_to_room(self, room: str, data: Dict[str, Any]):
"""Broadcast data to all clients subscribed to a specific room"""
if room not in self.room_subscriptions:
return
room_connections = self.room_subscriptions[room].copy()
if not room_connections:
return
message = json.dumps(data)
disconnected = []
for websocket in room_connections:
try:
await websocket.send_text(message)
# Update message count
if websocket in self.connection_metadata:
self.connection_metadata[websocket]["message_count"] += 1
except Exception as e:
logger.error(f"Error broadcasting to room {room}: {e}")
disconnected.append(websocket)
# Clean up disconnected connections
for websocket in disconnected:
await self.disconnect(websocket)
async def broadcast_to_sensor(self, sensor_id: str, data: Dict[str, Any]):
"""Broadcast data to all clients subscribed to a specific sensor"""
if sensor_id not in self.sensor_subscriptions:
return
sensor_connections = self.sensor_subscriptions[sensor_id].copy()
if not sensor_connections:
return
message = json.dumps(data)
disconnected = []
for websocket in sensor_connections:
try:
await websocket.send_text(message)
# Update message count
if websocket in self.connection_metadata:
self.connection_metadata[websocket]["message_count"] += 1
except Exception as e:
logger.error(f"Error broadcasting to sensor {sensor_id}: {e}")
disconnected.append(websocket)
# Clean up disconnected connections
for websocket in disconnected:
await self.disconnect(websocket)
async def broadcast_sensor_data(self, sensor_reading: SensorReading):
"""Broadcast sensor reading data to appropriate subscribers"""
data = {
"type": "sensor_data",
"sensor_id": sensor_reading.sensor_id,
"room": sensor_reading.room,
"sensor_type": sensor_reading.sensor_type.value,
"timestamp": sensor_reading.timestamp,
"data": {
"energy": sensor_reading.energy,
"co2": sensor_reading.co2,
"temperature": sensor_reading.temperature,
"humidity": sensor_reading.humidity,
"motion": sensor_reading.motion,
"power": sensor_reading.power,
"voltage": sensor_reading.voltage,
"current": sensor_reading.current,
"generation": sensor_reading.generation
},
"metadata": sensor_reading.metadata
}
# Broadcast to all connections
await self.broadcast_to_all(data)
# Broadcast to room-specific subscribers
if sensor_reading.room:
await self.broadcast_to_room(sensor_reading.room, data)
# Broadcast to sensor-specific subscribers
await self.broadcast_to_sensor(sensor_reading.sensor_id, data)
async def broadcast_room_metrics(self, room: str, metrics: Dict[str, Any]):
"""Broadcast room-level metrics to subscribers"""
data = {
"type": "room_metrics",
"room": room,
"timestamp": asyncio.get_event_loop().time(),
"metrics": metrics
}
# Broadcast to all connections
await self.broadcast_to_all(data)
# Broadcast to room-specific subscribers
await self.broadcast_to_room(room, data)
async def broadcast_system_event(self, event: Dict[str, Any]):
"""Broadcast system events to all subscribers"""
data = {
"type": "system_event",
"timestamp": asyncio.get_event_loop().time(),
"event": event
}
await self.broadcast_to_all(data)
async def broadcast_raw_data(self, raw_data: str):
"""Broadcast raw data from Redis or other sources"""
try:
# Try to parse as JSON
data = json.loads(raw_data)
# Add type if not present
if "type" not in data:
data["type"] = "raw_data"
await self.broadcast_to_all(data)
except json.JSONDecodeError:
# Send as raw string if not JSON
data = {
"type": "raw_data",
"data": raw_data,
"timestamp": asyncio.get_event_loop().time()
}
await self.broadcast_to_all(data)
def get_connection_stats(self) -> Dict[str, Any]:
"""Get statistics about current WebSocket connections"""
total_connections = len(self.active_connections)
room_stats = {room: len(connections) for room, connections in self.room_subscriptions.items()}
sensor_stats = {sensor: len(connections) for sensor, connections in self.sensor_subscriptions.items()}
# Calculate message statistics
total_messages = sum(
metadata.get("message_count", 0)
for metadata in self.connection_metadata.values()
)
return {
"total_connections": total_connections,
"room_subscriptions": room_stats,
"sensor_subscriptions": sensor_stats,
"total_messages_sent": total_messages,
"active_rooms": len([room for room, connections in self.room_subscriptions.items() if connections]),
"active_sensors": len([sensor for sensor, connections in self.sensor_subscriptions.items() if connections])
}
async def send_connection_stats(self):
"""Send connection statistics to all clients"""
stats = self.get_connection_stats()
data = {
"type": "connection_stats",
"timestamp": asyncio.get_event_loop().time(),
"stats": stats
}
await self.broadcast_to_all(data)
async def ping_all_connections(self):
"""Send ping to all connections to keep them alive"""
data = {
"type": "ping",
"timestamp": asyncio.get_event_loop().time()
}
await self.broadcast_to_all(data)