modular monolythic
This commit is contained in:
1
monolith/src/core/__init__.py
Normal file
1
monolith/src/core/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Core infrastructure components for the modular monolith."""
|
||||
56
monolith/src/core/config.py
Normal file
56
monolith/src/core/config.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""Centralized configuration management."""
|
||||
import os
|
||||
from typing import Dict, Optional
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""Application settings."""
|
||||
|
||||
# Application
|
||||
app_name: str = "Energy Dashboard Monolith"
|
||||
app_version: str = "1.0.0"
|
||||
debug: bool = False
|
||||
host: str = "0.0.0.0"
|
||||
port: int = 8000
|
||||
|
||||
# MongoDB
|
||||
mongo_url: str = os.getenv("MONGO_URL", "mongodb://admin:password123@localhost:27017/?authSource=admin")
|
||||
|
||||
# Module-specific databases (preserving isolation)
|
||||
sensors_db_name: str = "energy_dashboard_sensors"
|
||||
demand_response_db_name: str = "energy_dashboard_demand_response"
|
||||
data_ingestion_db_name: str = "digitalmente_ingestion"
|
||||
main_db_name: str = "energy_dashboard"
|
||||
|
||||
# Redis
|
||||
redis_url: str = os.getenv("REDIS_URL", "redis://localhost:6379")
|
||||
redis_enabled: bool = True # Can be disabled for full monolith mode
|
||||
|
||||
# FTP Configuration (for data ingestion)
|
||||
ftp_sa4cps_host: str = os.getenv("FTP_SA4CPS_HOST", "ftp.sa4cps.pt")
|
||||
ftp_sa4cps_port: int = int(os.getenv("FTP_SA4CPS_PORT", "21"))
|
||||
ftp_sa4cps_username: str = os.getenv("FTP_SA4CPS_USERNAME", "curvascarga@sa4cps.pt")
|
||||
ftp_sa4cps_password: str = os.getenv("FTP_SA4CPS_PASSWORD", "")
|
||||
ftp_sa4cps_remote_path: str = os.getenv("FTP_SA4CPS_REMOTE_PATH", "/SLGs/")
|
||||
ftp_check_interval: int = int(os.getenv("FTP_CHECK_INTERVAL", "21600")) # 6 hours
|
||||
ftp_skip_initial_scan: bool = os.getenv("FTP_SKIP_INITIAL_SCAN", "true").lower() == "true"
|
||||
|
||||
# CORS
|
||||
cors_origins: list = ["*"]
|
||||
cors_allow_credentials: bool = True
|
||||
cors_allow_methods: list = ["*"]
|
||||
cors_allow_headers: list = ["*"]
|
||||
|
||||
# Background Tasks
|
||||
health_check_interval: int = 30
|
||||
event_scheduler_interval: int = 60
|
||||
auto_response_interval: int = 30
|
||||
|
||||
class Config:
|
||||
env_file = ".env"
|
||||
case_sensitive = False
|
||||
|
||||
|
||||
# Global settings instance
|
||||
settings = Settings()
|
||||
85
monolith/src/core/database.py
Normal file
85
monolith/src/core/database.py
Normal file
@@ -0,0 +1,85 @@
|
||||
"""Database connection management for all modules."""
|
||||
import logging
|
||||
from typing import Optional, Dict
|
||||
from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorDatabase
|
||||
from .config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DatabaseManager:
|
||||
"""Manages MongoDB connections for all modules."""
|
||||
|
||||
def __init__(self):
|
||||
self._client: Optional[AsyncIOMotorClient] = None
|
||||
self._databases: Dict[str, AsyncIOMotorDatabase] = {}
|
||||
|
||||
async def connect(self):
|
||||
"""Establish connection to MongoDB."""
|
||||
try:
|
||||
logger.info(f"Connecting to MongoDB: {settings.mongo_url}")
|
||||
self._client = AsyncIOMotorClient(settings.mongo_url)
|
||||
|
||||
# Test connection
|
||||
await self._client.admin.command('ping')
|
||||
logger.info("Successfully connected to MongoDB")
|
||||
|
||||
# Initialize database references
|
||||
self._databases = {
|
||||
"main": self._client[settings.main_db_name],
|
||||
"sensors": self._client[settings.sensors_db_name],
|
||||
"demand_response": self._client[settings.demand_response_db_name],
|
||||
"data_ingestion": self._client[settings.data_ingestion_db_name],
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to connect to MongoDB: {e}")
|
||||
raise
|
||||
|
||||
async def disconnect(self):
|
||||
"""Close MongoDB connection."""
|
||||
if self._client:
|
||||
self._client.close()
|
||||
logger.info("Disconnected from MongoDB")
|
||||
|
||||
def get_database(self, name: str) -> AsyncIOMotorDatabase:
|
||||
"""Get database by name."""
|
||||
if name not in self._databases:
|
||||
raise ValueError(f"Database '{name}' not configured")
|
||||
return self._databases[name]
|
||||
|
||||
@property
|
||||
def client(self) -> AsyncIOMotorClient:
|
||||
"""Get the MongoDB client."""
|
||||
if not self._client:
|
||||
raise RuntimeError("Database not connected. Call connect() first.")
|
||||
return self._client
|
||||
|
||||
@property
|
||||
def main_db(self) -> AsyncIOMotorDatabase:
|
||||
"""Get main database."""
|
||||
return self.get_database("main")
|
||||
|
||||
@property
|
||||
def sensors_db(self) -> AsyncIOMotorDatabase:
|
||||
"""Get sensors database."""
|
||||
return self.get_database("sensors")
|
||||
|
||||
@property
|
||||
def demand_response_db(self) -> AsyncIOMotorDatabase:
|
||||
"""Get demand response database."""
|
||||
return self.get_database("demand_response")
|
||||
|
||||
@property
|
||||
def data_ingestion_db(self) -> AsyncIOMotorDatabase:
|
||||
"""Get data ingestion database."""
|
||||
return self.get_database("data_ingestion")
|
||||
|
||||
|
||||
# Global database manager instance
|
||||
db_manager = DatabaseManager()
|
||||
|
||||
|
||||
async def get_database(name: str = "main") -> AsyncIOMotorDatabase:
|
||||
"""Dependency injection function for database access."""
|
||||
return db_manager.get_database(name)
|
||||
39
monolith/src/core/dependencies.py
Normal file
39
monolith/src/core/dependencies.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""FastAPI dependency injection utilities."""
|
||||
from typing import Optional
|
||||
from fastapi import Depends, HTTPException, status
|
||||
from motor.motor_asyncio import AsyncIOMotorDatabase
|
||||
import redis.asyncio as aioredis
|
||||
|
||||
from .database import db_manager
|
||||
from .redis import redis_manager
|
||||
from .events import event_bus, EventBus
|
||||
|
||||
|
||||
async def get_main_db() -> AsyncIOMotorDatabase:
|
||||
"""Get main database dependency."""
|
||||
return db_manager.main_db
|
||||
|
||||
|
||||
async def get_sensors_db() -> AsyncIOMotorDatabase:
|
||||
"""Get sensors database dependency."""
|
||||
return db_manager.sensors_db
|
||||
|
||||
|
||||
async def get_demand_response_db() -> AsyncIOMotorDatabase:
|
||||
"""Get demand response database dependency."""
|
||||
return db_manager.demand_response_db
|
||||
|
||||
|
||||
async def get_data_ingestion_db() -> AsyncIOMotorDatabase:
|
||||
"""Get data ingestion database dependency."""
|
||||
return db_manager.data_ingestion_db
|
||||
|
||||
|
||||
async def get_redis() -> Optional[aioredis.Redis]:
|
||||
"""Get Redis client dependency."""
|
||||
return redis_manager.client
|
||||
|
||||
|
||||
def get_event_bus() -> EventBus:
|
||||
"""Get event bus dependency."""
|
||||
return event_bus
|
||||
137
monolith/src/core/events.py
Normal file
137
monolith/src/core/events.py
Normal file
@@ -0,0 +1,137 @@
|
||||
"""In-process event bus for inter-module communication."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Dict, List, Callable, Any, Set
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
import json
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Event:
|
||||
"""Event data structure."""
|
||||
topic: str
|
||||
data: Any
|
||||
timestamp: datetime
|
||||
source: str = "system"
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""Convert to dictionary."""
|
||||
return {
|
||||
"topic": self.topic,
|
||||
"data": self.data,
|
||||
"timestamp": self.timestamp.isoformat(),
|
||||
"source": self.source
|
||||
}
|
||||
|
||||
|
||||
class EventBus:
|
||||
"""
|
||||
In-process event bus for replacing Redis pub/sub.
|
||||
Provides asynchronous event publishing and subscription.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._subscribers: Dict[str, List[Callable]] = defaultdict(list)
|
||||
self._event_history: List[Event] = []
|
||||
self._max_history: int = 1000
|
||||
self._lock = asyncio.Lock()
|
||||
|
||||
async def publish(self, topic: str, data: Any, source: str = "system"):
|
||||
"""
|
||||
Publish an event to a topic.
|
||||
|
||||
Args:
|
||||
topic: Event topic/channel name
|
||||
data: Event data (will be JSON serialized if dict)
|
||||
source: Event source identifier
|
||||
"""
|
||||
event = Event(
|
||||
topic=topic,
|
||||
data=data,
|
||||
timestamp=datetime.utcnow(),
|
||||
source=source
|
||||
)
|
||||
|
||||
# Store in history
|
||||
async with self._lock:
|
||||
self._event_history.append(event)
|
||||
if len(self._event_history) > self._max_history:
|
||||
self._event_history.pop(0)
|
||||
|
||||
# Notify subscribers
|
||||
if topic in self._subscribers:
|
||||
logger.debug(f"Publishing event to topic '{topic}': {len(self._subscribers[topic])} subscribers")
|
||||
|
||||
# Create tasks for all subscribers
|
||||
tasks = []
|
||||
for callback in self._subscribers[topic]:
|
||||
tasks.append(self._call_subscriber(callback, event))
|
||||
|
||||
# Execute all callbacks concurrently
|
||||
if tasks:
|
||||
await asyncio.gather(*tasks, return_exceptions=True)
|
||||
else:
|
||||
logger.debug(f"No subscribers for topic '{topic}'")
|
||||
|
||||
async def _call_subscriber(self, callback: Callable, event: Event):
|
||||
"""Call a subscriber callback with error handling."""
|
||||
try:
|
||||
if asyncio.iscoroutinefunction(callback):
|
||||
await callback(event.data)
|
||||
else:
|
||||
callback(event.data)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in event subscriber: {e}", exc_info=True)
|
||||
|
||||
def subscribe(self, topic: str, callback: Callable):
|
||||
"""
|
||||
Subscribe to events on a topic.
|
||||
|
||||
Args:
|
||||
topic: Event topic/channel name
|
||||
callback: Async or sync callback function that receives event data
|
||||
"""
|
||||
self._subscribers[topic].append(callback)
|
||||
logger.info(f"Subscribed to topic '{topic}'. Total subscribers: {len(self._subscribers[topic])}")
|
||||
|
||||
def unsubscribe(self, topic: str, callback: Callable):
|
||||
"""Unsubscribe from a topic."""
|
||||
if topic in self._subscribers and callback in self._subscribers[topic]:
|
||||
self._subscribers[topic].remove(callback)
|
||||
logger.info(f"Unsubscribed from topic '{topic}'")
|
||||
|
||||
def get_topics(self) -> List[str]:
|
||||
"""Get list of all topics with subscribers."""
|
||||
return list(self._subscribers.keys())
|
||||
|
||||
def get_subscriber_count(self, topic: str) -> int:
|
||||
"""Get number of subscribers for a topic."""
|
||||
return len(self._subscribers.get(topic, []))
|
||||
|
||||
async def get_event_history(self, topic: str = None, limit: int = 100) -> List[Event]:
|
||||
"""Get event history, optionally filtered by topic."""
|
||||
async with self._lock:
|
||||
if topic:
|
||||
events = [e for e in self._event_history if e.topic == topic]
|
||||
else:
|
||||
events = self._event_history.copy()
|
||||
|
||||
return events[-limit:]
|
||||
|
||||
|
||||
# Global event bus instance
|
||||
event_bus = EventBus()
|
||||
|
||||
|
||||
# Common event topics (replaces Redis channels)
|
||||
class EventTopics:
|
||||
"""Standard event topic names."""
|
||||
ENERGY_DATA = "energy_data"
|
||||
DR_EVENTS = "dr_events"
|
||||
SENSOR_EVENTS = "sensor_events"
|
||||
SYSTEM_EVENTS = "system_events"
|
||||
DATA_INGESTION = "data_ingestion"
|
||||
25
monolith/src/core/logging_config.py
Normal file
25
monolith/src/core/logging_config.py
Normal file
@@ -0,0 +1,25 @@
|
||||
"""Logging configuration."""
|
||||
import logging
|
||||
import sys
|
||||
from .config import settings
|
||||
|
||||
|
||||
def setup_logging():
|
||||
"""Configure application logging."""
|
||||
log_level = logging.DEBUG if settings.debug else logging.INFO
|
||||
|
||||
logging.basicConfig(
|
||||
level=log_level,
|
||||
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
||||
handlers=[
|
||||
logging.StreamHandler(sys.stdout)
|
||||
]
|
||||
)
|
||||
|
||||
# Set third-party loggers to WARNING
|
||||
logging.getLogger("uvicorn").setLevel(logging.WARNING)
|
||||
logging.getLogger("motor").setLevel(logging.WARNING)
|
||||
logging.getLogger("redis").setLevel(logging.WARNING)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info(f"Logging configured. Level: {log_level}")
|
||||
61
monolith/src/core/redis.py
Normal file
61
monolith/src/core/redis.py
Normal file
@@ -0,0 +1,61 @@
|
||||
"""Redis connection management (optional, for caching)."""
|
||||
import logging
|
||||
from typing import Optional
|
||||
import redis.asyncio as aioredis
|
||||
from .config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RedisManager:
|
||||
"""Manages Redis connection for caching."""
|
||||
|
||||
def __init__(self):
|
||||
self._client: Optional[aioredis.Redis] = None
|
||||
|
||||
async def connect(self):
|
||||
"""Establish connection to Redis."""
|
||||
if not settings.redis_enabled:
|
||||
logger.info("Redis is disabled in settings")
|
||||
return
|
||||
|
||||
try:
|
||||
logger.info(f"Connecting to Redis: {settings.redis_url}")
|
||||
self._client = await aioredis.from_url(
|
||||
settings.redis_url,
|
||||
encoding="utf-8",
|
||||
decode_responses=True
|
||||
)
|
||||
|
||||
# Test connection
|
||||
await self._client.ping()
|
||||
logger.info("Successfully connected to Redis")
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to connect to Redis: {e}. Continuing without Redis cache.")
|
||||
self._client = None
|
||||
|
||||
async def disconnect(self):
|
||||
"""Close Redis connection."""
|
||||
if self._client:
|
||||
await self._client.close()
|
||||
logger.info("Disconnected from Redis")
|
||||
|
||||
@property
|
||||
def client(self) -> Optional[aioredis.Redis]:
|
||||
"""Get the Redis client."""
|
||||
return self._client
|
||||
|
||||
@property
|
||||
def is_available(self) -> bool:
|
||||
"""Check if Redis is available."""
|
||||
return self._client is not None
|
||||
|
||||
|
||||
# Global Redis manager instance
|
||||
redis_manager = RedisManager()
|
||||
|
||||
|
||||
async def get_redis() -> Optional[aioredis.Redis]:
|
||||
"""Dependency injection function for Redis access."""
|
||||
return redis_manager.client
|
||||
Reference in New Issue
Block a user