From c3ba7c0dc0ba0971ff1c10f6766c525a8f21afff Mon Sep 17 00:00:00 2001 From: rafaeldpsilva Date: Mon, 22 Sep 2025 15:12:24 +0100 Subject: [PATCH] Remove layered architecture files and related modules --- layers/__init__.py | 1 - layers/__pycache__/__init__.cpython-312.pyc | Bin 171 -> 0 bytes layers/__pycache__/__init__.cpython-39.pyc | Bin 165 -> 0 bytes layers/business/__init__.py | 1 - .../__pycache__/__init__.cpython-39.pyc | Bin 174 -> 0 bytes .../analytics_service.cpython-39.pyc | Bin 8561 -> 0 bytes .../cleanup_service.cpython-39.pyc | Bin 6456 -> 0 bytes .../__pycache__/room_service.cpython-39.pyc | Bin 6572 -> 0 bytes .../__pycache__/sensor_service.cpython-39.pyc | Bin 8935 -> 0 bytes layers/business/analytics_service.py | 300 ------------- layers/business/cleanup_service.py | 234 ---------- layers/business/room_service.py | 262 ------------ layers/business/sensor_service.py | 328 -------------- layers/infrastructure/__init__.py | 1 - .../__pycache__/__init__.cpython-312.pyc | Bin 186 -> 0 bytes .../__pycache__/__init__.cpython-39.pyc | Bin 180 -> 0 bytes .../database_connection.cpython-312.pyc | Bin 5584 -> 0 bytes .../database_connection.cpython-39.pyc | Bin 3133 -> 0 bytes .../redis_connection.cpython-39.pyc | Bin 3383 -> 0 bytes .../__pycache__/repositories.cpython-39.pyc | Bin 14132 -> 0 bytes layers/infrastructure/database_connection.py | 95 ---- layers/infrastructure/redis_connection.py | 80 ---- layers/infrastructure/repositories.py | 362 ---------------- layers/presentation/__init__.py | 1 - .../__pycache__/__init__.cpython-39.pyc | Bin 178 -> 0 bytes .../__pycache__/api_routes.cpython-39.pyc | Bin 10672 -> 0 bytes .../redis_subscriber.cpython-39.pyc | Bin 4387 -> 0 bytes .../websocket_handler.cpython-39.pyc | Bin 3740 -> 0 bytes layers/presentation/api_routes.py | 404 ------------------ layers/presentation/redis_subscriber.py | 128 ------ layers/presentation/websocket_handler.py | 97 ----- main.py | 202 --------- microservices_example.md | 84 ---- test_structure.py | 221 ---------- 34 files changed, 2801 deletions(-) delete mode 100644 layers/__init__.py delete mode 100644 layers/__pycache__/__init__.cpython-312.pyc delete mode 100644 layers/__pycache__/__init__.cpython-39.pyc delete mode 100644 layers/business/__init__.py delete mode 100644 layers/business/__pycache__/__init__.cpython-39.pyc delete mode 100644 layers/business/__pycache__/analytics_service.cpython-39.pyc delete mode 100644 layers/business/__pycache__/cleanup_service.cpython-39.pyc delete mode 100644 layers/business/__pycache__/room_service.cpython-39.pyc delete mode 100644 layers/business/__pycache__/sensor_service.cpython-39.pyc delete mode 100644 layers/business/analytics_service.py delete mode 100644 layers/business/cleanup_service.py delete mode 100644 layers/business/room_service.py delete mode 100644 layers/business/sensor_service.py delete mode 100644 layers/infrastructure/__init__.py delete mode 100644 layers/infrastructure/__pycache__/__init__.cpython-312.pyc delete mode 100644 layers/infrastructure/__pycache__/__init__.cpython-39.pyc delete mode 100644 layers/infrastructure/__pycache__/database_connection.cpython-312.pyc delete mode 100644 layers/infrastructure/__pycache__/database_connection.cpython-39.pyc delete mode 100644 layers/infrastructure/__pycache__/redis_connection.cpython-39.pyc delete mode 100644 layers/infrastructure/__pycache__/repositories.cpython-39.pyc delete mode 100644 layers/infrastructure/database_connection.py delete mode 100644 layers/infrastructure/redis_connection.py delete mode 100644 layers/infrastructure/repositories.py delete mode 100644 layers/presentation/__init__.py delete mode 100644 layers/presentation/__pycache__/__init__.cpython-39.pyc delete mode 100644 layers/presentation/__pycache__/api_routes.cpython-39.pyc delete mode 100644 layers/presentation/__pycache__/redis_subscriber.cpython-39.pyc delete mode 100644 layers/presentation/__pycache__/websocket_handler.cpython-39.pyc delete mode 100644 layers/presentation/api_routes.py delete mode 100644 layers/presentation/redis_subscriber.py delete mode 100644 layers/presentation/websocket_handler.py delete mode 100644 main.py delete mode 100644 microservices_example.md delete mode 100644 test_structure.py diff --git a/layers/__init__.py b/layers/__init__.py deleted file mode 100644 index e8a337c..0000000 --- a/layers/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Empty file to make this a Python package \ No newline at end of file diff --git a/layers/__pycache__/__init__.cpython-312.pyc b/layers/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 779e97ddc75c48fced4168463981109d000f080b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 171 zcmX@j%ge<81SvH;Gqiy8V-N=&d}aZPOlPQM&}8&m$xy@urWO_J s$H!;pWtPOp>lIY~;;_lhPbtkwwJTx;8qEmA#UREg`kf|Qz_8CpR4F^Gc<7=auIATDMB5-AM944RC7D;bJF!U*D*mwrZmZmND! zNMv>NwI!$qDgW=v3@~OepYHSke`xRoRO5DSd^lll$e~InwO%VlUSKr iRIDE#pP83g5+AQuPg`kg8Z7D8CpR4F^Gc<7=auIATDMB5-AM944RC7D;bJF!U*D5uzp5Mv>NwI!$qDgW=v3@~OepYHSke`xRoRO5DSd^lll$e~InwO%VlUSKr rRIHy=TAZ1eT3oCjAD@|*SrQ+wS5SG2!zMRBr8Fni4rJM9AZ7pnc=Ih} diff --git a/layers/business/__pycache__/analytics_service.cpython-39.pyc b/layers/business/__pycache__/analytics_service.cpython-39.pyc deleted file mode 100644 index 2420b29ac1d9f69f428ab3febf010f2af77fc4c8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8561 zcmbtZ-)|h(b)MflyId}pq(qUFWUr*P>UC^Vb<(DA6xDW|$Zb_RvC_1iCY_A-UM|Pl z-C5t6mBhs|FI0e_fPwU_Kp8-Mi~fYZwFnRtedv1!=t~O})k{&J1?tLf)$iOpvp*?9 zZgw$e?%cUQ&b{~C^L_VBu((*#@Vj#GS9|}st!e*E50gI=4{zg&zYoDQPh&bW+ImO# z^p4>fIz5|hvtxNy$M)=w<2jvzSCIKu+x1*#GpAkbl)RFzare;zFCXjP;!TYenEOy; z?xEpTm~~rg6bE~yce+7)KM9+0?M^=qyF89-?Wh$tYcUu1!X_`hn`hq&_PMCNQQHf; ztj*(EFsj)M+Rc7DNW!Q~*{sHbB&Y?gmf$UwTY7@AXbmHE@roy5ho?o7jJK1ZVWrl4 zVKYh1ce?wjbt{aM)cv4G%?0h9CkEBj({s0ZH;%-IJYZqB^&#&?ahOD6zez1m{%kzF zjVu0Z2(EcL=FnieXEKAC4>iwX=eW%sUf}Mov7s^h$Y2gDJhZ(cFL7r>%jCF(dt|aA zD?PNl1-3p{x6Bq%w>(#?f_9f!6}1-G#$2sswt`xfxmv5Jb%vcqttGaGHQG$8qb0hn zmSnJ%C2G5om{#=zIy{Osm@NN=s$H7;cy?DDP zqK|kJ&n$@d?nHrL+joNIUEXEeZ8D{JJGZ#)ktz9^)ou0mQ`h&yZkYIfl~g62HGQx# zI~iMfsVD0w01LDCqP~bzNARRCx=mS+>f%QOTfp@TT=5Sv6F=1lD~a|up+FW#1@c)6^ATIS!NZam!vGSs!T6Kt{l0LtF)HXO3OSj zhuTNxBWq|L>%XJ1wfD6H3q8qx*g3X_((6a)6a5v~26K>_??Lsyc|>o|vkUAZdkLw9 z?EkXNANOe;*jbCp=L(XdC9BQ!OFk+8&v5WeM?y(%ef+ zP1t0@nI9xit$Hirc+(P5zxT9M&x|%?Pwl#hqRyb4XC?bR-Z0bRs6=W}fpj6up!aAF z8a<&cf@E;7^>ne$yIi#Px9$b)K1T=5=*=u`L7fLrSLzAx$eq>~T)i2T>ZE;~1f5>P zO-(;!>0-W(no+--q$RZNtCrJp5+y-9e=5)$bdna(x=bul&O|3(6`mHY`=d@URyf8) z{2KIr@(jP7r)Is&9~RKWPO%;^_TA_BPT6q1D6`fy5dzT-XQS`)p6_D z7RF>?5Y$aL;Fy!inrMzX5Z6eM1~aWR?6j=b(N`O@gozPK`?Q+1G}`}Zg%-shKNm6+ z8C?3N5b$;_o=ERDDpAV{yj5$I1a0=z=_k!@bYBpT3fk^zF^3|auKPJyaX+i+L!l&2 zi(wqWpTfq{;x`{Oxx}B;0T{NpNNrBtH;RH>rnE=#n@D*}*$8@JkGBDz;;WRqC`U2M z`z@I_-JQ;*4^^Uq0_jpo(I@`@=~PPpZmq6Z zXXx$t%~Qu!&$5hggX}RQkop|p?+SYysp7) zs59Z_hz?DGFxe^1 zR7}?z#8CS|Zhny!5e!h(Jbxe<4ykvuk1W9wRKh42Nlrc=D$f;V~46F2@8 zjr|8MZE$^V37=>BTi4qWc13(+Yt+n~wnVC=wl5(8ZP!TKN1&%nKQ?-2n4W2EKx)kmpnWO%h}+g6c^3BsP)SX_($w<0lm6ZNd)`aI zJO`wXc~3N!7lMfY6g zC;tjF%gjf-hvKOTaG70*-^GhQE=jT+{a5KQ{K;z8p}2(>PwMHxxF@Xd@^~TN^xOO% zZ+mCe8pi(pFxm6>!qy%Ep|>%W-4r3V8h{h88n(Bl%p@-&;bFj;cShxtKaQ1iMQ&Qj zauv6Y(gWly!G!UpHb_R7rDmIV8?FKZ^7hL6?NCbM-v>y6q@V#w z8e;KndPRg;33B%0I>evgiU~zDa|N#5HH~L*-j?xfMK?Zm7Ar`t!`-{O@ys!${M4zC z%U4c&(0F0ndh&C0GDU?8$e?5BFL1?GY`eiPPbR`dgPFx_NJr9BNrmxLVnsPOZDdBxH9zyW!EO_JAFhRy_eJu9iJEHA0+l>_HT z4ydcVOX_+B+x_a`&6}XE$}+%UM{cH;2%KT!+N+H32K{#OR;?YxN$o!8cN+rMuZc4x z$gK|wb^ZWOA^t90IgznJS+%E_qBuucc72}*Vz5vj^V$KpZq!ZokjaAm4|acFKG=0) zP;^?R3aKrNq$WzF7QGP{soxq2#1%9_2O{Y~8kX{@(mW+mBYr1-odkK@K?U6;{98b+ zZ-bk~gCg<0EvZ&XCY3g>OryW70CfuTEw=?|q1sexfXbbJnR@sNM8j63Qy%&yGgffVWw(|8?97|5C3^zR=v}iX)@b}G}G zHtQIvNI|p6T)6?i9Tu-nG__?$hCO!XHXLc5V#gBS%Ru(xPmJ z&Z^#-$x8PE5eD5RrxP-cx)5Pke22y)_uz*}rG>p94w6I&QVAHGh#YsxeAC5jZz-7d zmeh%P)IBo7?xdogJW5X};VI=Lqu!&gr#G-5a{4#8Vj@~}{<4e;fY{ZmW>p7V1BhLH zT`zw&cEIcZ(v<^#1^E)X|X}6izI3!UM6vg#AOm!NX)>l zI{~{_>HVuDWPs&0N{!J^Lh+lFN3KB-M1VVlyF2}MTM$o_TN89(95@=d@&P;(sVrDD zsLH&FXp#6bRhAH3RDqg0Wk@D)x=Ip|Wx3Z;BL`&}pBXc04?MTiQ!^=naPaapEYHmL z>A*Z0oS}+e441_Zp{KVbE6!+jB0R4WF3yzx2Q(3X4?;#;B*Z!d(Aen{u@>F=)G0|4 zu7kK|e}FS!yYw&C#Z$n!KIbP+Py#{hqST8BkP55Jf;2_4B}o4r5;;iEg;ywua+RQ1 z21$ra>*5ALTgJ2j&V^4Xq+%fsjR49B-YJYSexfjnm=2<#4h2U5_iO=y8dLn)&}3!R z$IudgfdEeS1Y%$xl24-cYznoYR)JLj{~BWEBT9iX)uRIT>hj?A=RrNq$mG8L6@n^c z)p<}wo{H7ZnDPD;o>;?nd;yS^tCcg835?C7GICe%ych)#chIjwqUIJgqVUA$HN!lul%24lYScYDi4SL@3n5jHv*Fsa%?LTEB-1;(ZbylbD%J zB2(v4E~XCX2wQ!2dT`{q+UG%*=c`T*S*pG{AI_mcWc<$%(hE!FPu2TUKHrKUQM`Mu z^HEUd6uLbT4CCiaT*JqJcB{=_;+-Y~ZGnhj65~jlBywR~&UY0a$s&CTJ%3R+NSF zV!+RLptksk1evEGf|#?x9%ay`_NuwUBTbj42rHuag32vWk6C<%LQpM#N|#yV?V=6^ zNn!#|NdytGsp{({4EdSkRFHD@3jU5>CVKear^jZdOnV1`#VT6G?GMyVK2_=aXE*3@ z-%m@v--%eiP3f}lf7}n+*_(pzv#9C&VhL4K6Gvr1hA8NRfuNYJc$Wn2IT>;i0i|Tn zHKG&^Y+|ruH&en3dWy-MT@YW_+@|2Bp`hE{a zq_mI<$i53Gl@{n*3SOq?!|tv?%tZ8?_(sdO=-aaVm>=?3eSIzh7H`QfFM{k{oyMuh ala$4oNKMZTERp)CZCO|y-8ITa$@o7gy;i~i diff --git a/layers/business/__pycache__/cleanup_service.cpython-39.pyc b/layers/business/__pycache__/cleanup_service.cpython-39.pyc deleted file mode 100644 index 566988e2f30562984bed9b65186cb66cb2240e7b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6456 zcmbVQ&5s;M74NV4>iO8&^?KKitz;6Ckb!vD5I~VJR)U>`kFZ2Bij@|q)!S9O+v}e0 zadnTAY4wJKeFPM7LW(1Hk2!MW!iE0;Ar!SZ!O3TC0Rq2Q{joE%>m= z#p*NdrZO~>z`Wta&cOA#6HLYfCt!n-;{`DfoWSLSh>N?P%gygkA}`=kGE2? zUcwXo4h>fzR>jhoYUxa4`hCSR*cz{q2p?#y!J7ALRvo?81C^Pqec!Me)z=Q|qTl2# zZt~iWb{^scRgWzJ+m_f)h3&ItV7m+06?O*L9$%6pSJ_$g`mBb7SkHRb^CP&KOFVg@ z00uju$ff+KGYZFCI3$+HRk6qNM=;6oI-ckpnpnA~q>2r_LaQ-?b*5l)ZR^#cA(|k` z&O}nIw#em*_c+W?;{o#^&QYGy>--}y)pOBnr$wYe9L_@-cE7H!@L z-N^`Q5p71!x7=~GITqnQ&M(?zPPDfjI)ZI(JML{BuuY$|D%#vGv~07Sq)@+&@ic4N zwikG@ZLiRN>Sz?Tsn*qGeO_=ICEY{XGpSmS)LGnhe0wjPh$yQ=T_+-NNjT+?;2NGk zIX2t5A~9ISWlj~ z89O2#MD8AE6Q8rn%peLE)|5O|ti80efd|X+N%+%2A@QxBC6^={3_r=J5YXVca*tOA zNn;@QIUF`c8#~GBez?2KMb>!75%LThPBaZ%FU;C5ri!^d%dBgcQuo7fEIKsQ@PeIi zsEHmr;tVx(&Uw=(g7t`YXjcm>d934OL@X4ou9~WWc12B|T{x;o3?8at6(dh*Gij(i zpdSH{T1>~JimTKfYDbEyBz=N^h~xSJdbgBd)xd%68`CQ)`l|A_5@?r|xOP})#|LF3 zkKc^L@hRdh&gs>yO3SK~iZEi#*{~snoQtdOlGF4B?H&d#J2>`Ngb0N=3#53Anl3d9 zf?OkhC*}AgNF$Oo$&n#fh3LTSXB-*$octPhN<>}@}Wj5Poo&1;}LIT?1A>4cBgn> zRxX1JQ{i!pWceqEGL=1$#%+0hs9ADRN^5|Rl~R>tR|bW&vwo?nq0ejz5V^=}kk)rvn)3$7-CZeBl7ZdC22=JQ3+SNn2BU+BwL&uO$~3WL;{xP3xXh zQ##s8#2>>2zpp00@stBKo)PhZM#4IRI4K7GT0N&6=&80(N@=iKs(Y#j&yoxBz(|e# z29BeCRUwEqZX04hZXUK+llt&Et=#8SJk5e<4Lr?)XKjvWJ*`WgZSVvndH$*3i9UFC z7I=0Go(=G97d#ttJez4#^6Y^pAmaHx?6}|x47e^Wa4lEblDqG)KK8$ywj^Jz*uRNy zUCg+Wnw|>j;43@xpi;DhcG{Nnu3}z5#k{{N<^=}xp3Ub?)msR6P~+bF>hv#ApRN-^UY-B~qteBuso-}Xdy+N`%fZD?mQ38Z% zWCj#Hd>lm9stCi8J>s$O+-iKXEC=h<8SEo&8P z^H^d$y&H41h$RROUk9VC1iNrU!J_0s zZvAW`kZjBe_R2s!g{g+iqCqouc6lr;~aGg4|rTv>?l@oeR?*Il(mR z7Lr9yISJxp*PL}`<(GBmubCDnQjQ7U43M)L*Mi!Q5qD= z>Yw%uQv+73YkdT@G`C9WmQ~FxGD_^^wTz-uskH!Vh*~O(a#&rjl=tlYF$lKZB0v>GOhQ6QQ5Th-t zMLlHBaU?dOEm z(lTx1GQvZ(n*}q>T32^@z~3En%Yr2 z|2CUN7+Ad5oId}YM$H9-i!kuyuk)cGPmK`{wE2`9J)=l$;q1|?m8mm=M?F<#N?vD; zg_OL(D0noEloSP{2YOm(EppHMO{A$rZOkXeC=D2rx>-=mfUw^xsOMAj7E;`rq(*`- zdCMfdaHb}l>00GX+lL+2nQ^Awxcdd1X)n3*8k{NJ?8QjdjRwwkIEg>Alt&3C<#Jo% zX`H!uhMFNYU!&%$)I5h~s9U|+JYyPJZFdrfJ3Dy-vrNIf$n}cci+uC32TdNIx2%ep zTyJSry<@AAsh`*&Cgf*xSHC1({jvRLEiVf5?H*6{xz?s5nXJ;Xd>2-pffb!H)ylis z%+5);R-ZOon5tZm)sx$&3_b5lio#^`=yXcbm93Kr$V9r521;Q&;UBQy=p{6YhPx_K zs~-Jfi!vQEG$dKtf6c~c^y-DpE!JfoG2xuiQ5>b{MouNVh=z&@;bT;OVd_N9Au7sr zWQj4kbbXJzw+G%%d7G~)DX4k56F~voKdyS=woYbUl%ZG{@KehsD=1BtiDFSO`v)zE zN=13;$^0~Vav?OH9D|Azl@*7}RAvmE5x4EkwCzzyWjnwf+rBe#{NhW)Ce^ZSv5Ya9 zj?$aB07%dcOx6(u6`iH~mKn34%A)u&HRLigV>=9e@h0_;^J@uFrAM^TP%ok-HBmcj z)>h13yWd&+?%K7rZ^-Ys2E9eJRB6ghfV4lW$I~$aQa(S;nl-5S2bA>`R7T*ICMGVb zpL~N9YN4zyt;)KvKu9HESzAY0KQ9uupdYWxnxeQ#^UH`ut3JN)6m`kd_LnuU=5oD8 aDil$r&oG*}0rb@-?gJgo)Ox>Ee*S+yB(&@R diff --git a/layers/business/__pycache__/room_service.cpython-39.pyc b/layers/business/__pycache__/room_service.cpython-39.pyc deleted file mode 100644 index 6d170cc407f8c36e9594644e24688fb6bce836a3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6572 zcmb_gNpBp-74Cg{n!`mDMTrt^k8ODw#U=&YSs*KdZBdeCNTfsBmOOwnX-*Z%re;%B zk1S4_K14d?U;{q*5Wq}8A}0YsVC3eU0{fCfenFiAh9r?qJkVUgR1U& z^}6cStFPWt;qY)^;P){3dhONc4C5cv82s62yn!NbqhdzYU?#H~X49;iO{;2|)V3RT z)2TX5x9T>%s@E)33!3gU{Hl+Bw^3{c)u1_49cq@UCG>f$&=_uxR7aYl)zRiyb<8xl zzkP_0@0iuYmkj2!;$4Fk?^x9d<}4YNpw|f&+wEq#$zu`KWqG+PqZXI4+-R>x^|IvR zR#fM~TRHhkxWPsFqCVht|K0w2FDcq5ECD~GGAg0F^g)NaW#7%s*Jy!OctqtW|iDaz&g$@lEHs&j~8X{q@rn0Ky`Gr@O z;xO(?HL_5zcROLLzHx=$;*I+j&0(vfOS~o9Vv&a|YOOBvPFqHCTWqK!1LF*kM;AGZ zzD z_3u=NKr4Y(MDOs5HD#~>UktGlz8E>MZ<=Kv2pYr;w3)L4r7i9 zb_DIiXiw<&Bs+@skpW4LZJBI}9S3!il_B3zehiYP^|1lIC$?PnBzx+vTb*JjfTX9> zy;({D(|aj26giD5HtraSQ8QypKkeAjPnQ{()|{_+ zq698=Sn9p5iFC8^?6k!&^&BFQ)-r^r^bw*5f_7UIDyDKIZ>-!i#00t?T$^2MH~Fk! z9VTbz+VyUew_-Uf!`JE^IolEKbzVoCg>r4V9SSzP9M*5}7MpF*`H-{uB4_o~*(sXo zY$(50i&{}!s~yHP(nDpKKK{K4LS&}2nVbp7#Gw(CCwSZytvV?t|Ix=}W4=X^e?gU) z5>{c!=-o<;bz|L3U{RR;nRVG<&fCVOm00rBHQ1FUsB_y&%*5Ks`)qt+Z##*-V`kh= z;)ouo?zRU`d%Lh>=5EG!o|;Hp4OEVB^oGvFVyd?<9DJ7{2|| zV1>j5r$e0Aj1M8rru&IYa-?skjnjC;X8OJ7&xehAm#m4l9}wYo8v^S!C}J7iUu{v>DC5bTvN2mvT|i2=6YQH}@JnR2Bh#%RRxRUQKu zb!>WVd84Mc;ApL$%ccoApu{2i!m9&!F&E?17?bq*0O*v%$f|iTs;gWmm+|H9sw!T% zUFX`kDVLLUfHJ`%UDaqde}-p?WoCBI(`sSLtV@tOwS(2-!YGwW0q%)n3HDjsX-VXFj~b}PRxZK8Z zj*Lw^vF41APHh5J%$x#35;w7Lnd0QCk=TiQR@YO;HjqRpx#_POn;uF5h2D$I+6;g~ zbH~6qzuO#2hVcA?#@hzQcT90R@#t%2!b}ChJ|BJ8F_#aCEe45M@rEU?vtTp0dHv?QF-(-U<_4|$j!FN`QjGHwdvRgg z-!Zm}JD|r{5o#Oj8lrYmN&+_gnYTHdc%PeM4yUw~K*nUaHk=edSp#Jxfn=bJ;(aU` zULR#6v|3OOB|dm?X1B)^NC?W|qzKBVpiCr1O*xVTplqd5fMYTl#$9vZ^SMtkGHs3-B9kWu+inpuD4%NLosjaAfPL_ z^4Ge}h(+;+@|tb(?p1$|$1tg!y|>31kiBzIB$a1SCB{d`H(}Yf5i?;IV8zn64-B9N zo*1jK)KDf%ywTzFFzq&uGsxD=X;sJs-hXn6XICNg?T$G2Ax%ZL6=%f^9`=4Tcp@HS z17^-P+OQ7t+)RHMoMmmCbZ{f%u-Q?iOssml+ltjlSdXJyyp|GWzn6rTYBbeLjb&<4 zh42qY4Xu*2XoYmGc^A5R8iQlYl+1ZX#NC6{sNWZINHgRqaTC4 z&Hi=??Z6!0Q>IPd4lL_oP=J*-M@$dBczfUlo>{U{@U{QF=VmQX0;~7z|A0|=%X1JV zwbulU{73JE_wy+7MN}~&3D~8y##y^#p2%r9Z+A?}RJL54G`n|l*KVZxE!PqIcbnR) z<@Td$-?+9`aaEz-6#_0*oW&%{joWe9s5%`U-cU{#K@kFW8{Y~NMA^;oHbmC`P|Lkf z!56^vD-=n=p8++Hs^HrG)qCzS%GitXJ)xA1Aqps31+U;`@8T(RN~~Bf9_AVimQ6vR6hBXGGW@$|URu6j5JVsYEQ)c}i zQPe?L$x!mBq6V!Ipxog+3;);RP9ndBYGRF|rM06*vlLO`~P$mW|dawCp7u zsC7j6I?8u@&yuqjfIctNK}jK9?uQn7_ULnp9RCSaM8Ad+n6J2^h#8f0X<=bb`BxX_ zE-b$J-Ua2nyRfjR{PT;K-@APN%`5jZlR;j5zlPoi?T;v#hI|}@tXRDhdo;B7uub-nB~5A%xpw|<$bduqwAO?`UZaR}I{^XiksvOLMdENdain zPn}&-TKY+$Q8)_q;mCOy(4Mn2~719PkE*+NWGO{Am7AY1Q_aR<|x%u1j0UT zcfLF!O98YZV z7IIM_PP`ZcyX3$LbCi9GGnxWif?|W>B`$ODge}{K%ST>HcO@}fD zbFv;Ee1!yetXp=5S#RSTV73U%&QQ+90(~Dcloa#(kOb!j=+$3n?7Ex9Pl~{k;ayxIeNI1$6||v>^Kd8>E1}L9;igYEngliJPdZ0S#YEKkbVs9t+t% zfKgyI!k_NqmFQHx|lSZnqnvg0`SAOThjcI-k=C zg>qLK?JyREc|ifNxJVVnHsT$s$U_QpbJ}AFas?uyih^75CRHC(wHr(8H@Z6#qo^oG zlx0-ViRHUv<;i!ZgXz-r(DcyQ&{%QuNo3jIBV+kqb61XtUxHWfw}7KYs)^(kmusom zT1veNnq3v>@(Z1m^5e2!rpwY2)0wTFh*~QGxt-|Nan;FZ=>AsU)<<0G8(w{xD=2v)?Tqigs^ z*Yr)D`mK)DE%~Lc?b}_)ce<|c%6_9$_RDf!#joI9>QuWmzt&yw7rJ%7-d*$;yA8k5 zUGkT@%lto(`LpJv8g ztyxKV~*5uuU6&!Klz2@x&J=Wnd0BG84f_H+1 zv&r4^b=D^Gw3ZL4WyE(3HDc6nB&lJQOwG_B0K7Pgbj{8sNMvu=fP zlDW70v_{ZrI@y9c;T;~Zu(z9?xW#va_R(x-T{XYMlK?veS$?Z~M}3~v)Y#o5NCt6M zzIyxhyjs8AZV&oFuYGijAM(x<1Bb+6)@PQ!!~0PjCXqPGHfA@O8d<%26eqlU{gC&P zsqV!)oQ3hEqfKa-{yAv8jVHdMYZ}+UA)0S6-8Y%R%uhAn;w5e~>yg1q%>LB!9q#fn zbI>MAT7Cs>&>igxtD;>UkJs1&+BIJ03s|GRW1Q4j9dL`Rf&N9TvLxHf>;&44aeIZW zqP;Y3ud#Ksm$6Ps{bAn5Bb%LMr_g_b`p|Rwk;%@m=g_l)p4G{o*!6kr`U3MXwuZ6s zX>jhdnDrt%hmrN^5jo>LdkOs;tT{DfJeOU-nlH19pO*ZSYzch)O2H&|72Bql@`|?u z){eiHRog)S_(yeOg{yet^C%MS6K$xq^u&;FGqL2ml-Tm^Fbzl0x0|)L#+ALb9W&XI9KRJ3rOLDGdGZhnss* zmv0KzXYuB>s6FUnYP=Z-Z?yaIW?w}6yp1*s;=S!C5NvZhXdmz%+w71i$D4)3Z|0}U zMgCI%D05q_uoot+)=6v>yC^i>m4E5lU5U*czn2O?$abv@^%3mymXttS@MIu*ZNg;n zBX%d%`Hy&F19(pVCecRvP=BmHG!kQEGJRQSY3Cu&w|>GX6y_4uSK-d0vR3_2l8UsM$Ha(?t= zuT;_AP7roDyW(Xw7a|hzyZ6$w*Qw@#edtWjTDaNBto=CZWu;COusE}EmR&Bg664#0 z-E6tl4@AscIaf($*l6XOj=S^2WX}CS^stLq1y5vl8~q8->a9VaEJdpwwD)+NtrzWl z(L#W-l@`X^2gPPVhl>O{{`!M9mj)y&$%|#HEgZ5nxl~Jan{gtVjvy`<&(Q%aI<8;O zx5*FbFHKMeNsI@b#J6LZ1kN&@OJP*SXy0-l0`U!*+vYt7&FS zEoiEaw9YNnOY~9Gi+-9eNTJ~967BN*q|YWO6fdFq4xZRXp_#yzk$SWEIL>4<_ct+* z?k{yXVP_9F`zvUfp+R^U>5sL?dUS>vL;a9$Y^G9?y#D{|l12!Qs6fKnOzLA*wP_O$XJT@K&kIQt}RpYag zM9Aw`bHdLcfk@xu_X}<)bc!N3X!@(zPW&2*W+^Kx?6g9bRr6{BhfMrBhO>$ib=C?J z@f!qR7G;`C2N%CZMHPi_U*EcZ=f;O}j`${ZPj;9_XFW$}exP}bzJZyy@x&`AG^1`9 zkU9BhpnPimve1%+ygiF=K~bkz|Cq$lxyKUs3p92$pc=CFSQ{CS!F}sw!}NV)XdYPN zBKQza9@)7m7;{5=Xh|6BKohqlj3sTMzHf;XBi5)yuHa+iA#545545dV+6q{sbhxg!6N)`E5v6|mdcxfejFhhGlgJ16laWZLVHKlyW z-VV55#=5GSRcLippBE0YN+02g$xvv91yfYfD|u<_br__IG53a2HlC&z=G0PdC+DsF z)-m-|!bVa(-9iO^nzj#{1liC>CdmqvkO})_-qaA-l0?u0&VOV=gA=qZp$!DR(~u^( z(MezZUFkb`GWhb82jSfhlRb|@RdA!yMN-d;-^TuMtn2(iKNNIs*ee!q`F7OnARdNQ zgQZu2V}~#ubKc7=5y4S&;m*Q&*uV<@B9we#@TG|(4F+!lVXabK-yx`_DJZpSVG@YM zepU5J|9PH9O#u8Up7;z3&9up7(2Xx^u2f`S+9m2S(375-qsfdXHA4upuB3A%A^r&! zLM=H5`^xQxdq4(L^b>PrJ=T)azP;}-j}MYW}B zv4$oy-&E^XCCN)lIg)F%ln-F}*hXMuK7ds5H7vy1%4XAV>zov%JD$sjKr_Ji%P6DA># zQ%7Ob1ECd3dqJ0rH)$e~RL)lGka>X@0Rut#kYXDdfKCkN6|cD@)@a8D#L|ir${~*6j77k^;mrM_zz-CnE|T(8M8?Bg&u|;5n1{3Wn(>_z?UiuW2!`%}5fI#$_WCx@8LP66KR|L5E} zjBDB-Jpa%J-#bHnM~9nklba6Jc$y^Whx*3`I}N3GCT%MBmC_wD2=YjefrTj#2b-#p zu2i0MpvZEDe~uIbxBxbyEdbjNI+OgcQPG9F_c8M=wL zaYWyx)9r=3d*Wp@)A~(Rrw;UzIs@!|upJ%3#zTL1kq2QAL3vJj`zItKNRCMtL9Ujr z%Y|~M0iJF==*nHwmAnu5EBbKPbKb!Rdrk5y2E9Q%2s)i3k4`m}U-F)iPxxe!wmLCo z%3J(FB6ydlC$6iCH|0#E3sVj6;=se~fB4(0awm8&amcvWqX4U;+!sPAC7P}SPDp(~ zsz6CUdC^LVyo*}akRh*R29;GR0$piqY!xai4?$u|+@$&Mq3}y`a9$6*ilM*56Mq{8 zxe4SFxXSFn^sIbQv8P+EJ3HR^LiIGJX1Fe5n>;@;@w|^m^w4BpNH0{`6}jwqF|kFH)~O)tG2?PhjxxbE?SZ5Uopzo-S{V8dJh_WfLyQF{ z5r1`AQnY30_J7$6>GN}zfpRm`jA~{JGCu<&Av21ZD}wvVEt1qWu}kqIPm;u2QkzaZ zrZzsXfWBjDGm}}f)F!hr)X8yR4h)A8>a6^k3x`JAFT;(2>42iGNLp7w>uP$EXf4wN zlT4B)Pb$eQ{t(;ys|0W?)g)fSSo)m0xOa_`DDO{FZ12iET>7GVQ;eN&P$=#k5m!91 zO?y-cA9){0)<)m|&o~sp4iI8e@ zxtmMq`z4-u1BGVHa)C=6@ZWYdJ@*U_nBY`p=is18HkBl&lNCQAISITZ`A4Wozi=w4 zMP6%siabG#82b=phVLm5WC|+Ct3@)ILTO5fGHu%#(5I=1(9K+v((O(r{^nt)bz0mpu{Gb2~q-x zRh(rwieLb}^2K~Q@AxtWk{X4hn5TearSPX!0M#u#k{^iw=rh8hChAC0=- z)6;inzH-gXp)}x5*h%mW?ResSh8Hhj-DhAOpSmcCHq`rGh>t)yW?#Mm9YdH@CyQ0e zBFevmdum5$8OpRfIZRfPIzZgVMnJa2lS8tEb%YebiY67WP(co~jn&54#%0|7tJ=!)(wU7@)r}=Nqj^UB z3C%uBg*28#@61Lt1IkRDl+2SnQ0Bn7*`>i9>XPPHUXsFG6}nfcMh>f3pn|y3KaKcZ zAm1bgZAye*qHoCZzYvg-L%ZrXtZ> Yptcs%DwgFU&9_0{c#ZScMyX=_AHAPK7ytkO diff --git a/layers/business/analytics_service.py b/layers/business/analytics_service.py deleted file mode 100644 index b23c7bf..0000000 --- a/layers/business/analytics_service.py +++ /dev/null @@ -1,300 +0,0 @@ -""" -Analytics business logic service -Business Layer - handles analytics calculations and data aggregations -""" -from datetime import datetime, timedelta -from typing import Dict, Any, List, Optional -import logging - -from ..infrastructure.repositories import SensorReadingRepository - -logger = logging.getLogger(__name__) - -class AnalyticsService: - """Service for analytics and reporting operations""" - - def __init__(self): - self.sensor_reading_repo = SensorReadingRepository() - - async def get_analytics_summary(self, hours: int = 24) -> Dict[str, Any]: - """Get comprehensive analytics summary for the specified time period""" - try: - start_time = datetime.utcnow() - timedelta(hours=hours) - - # Sensor-level analytics pipeline - sensor_pipeline = [ - {"$match": {"created_at": {"$gte": start_time}}}, - {"$group": { - "_id": { - "sensor_id": "$sensor_id", - "room": "$room", - "sensor_type": "$sensor_type" - }, - "reading_count": {"$sum": 1}, - "avg_energy": {"$avg": "$energy.value"}, - "total_energy": {"$sum": "$energy.value"}, - "avg_co2": {"$avg": "$co2.value"}, - "max_co2": {"$max": "$co2.value"}, - "avg_temperature": {"$avg": "$temperature.value"}, - "latest_timestamp": {"$max": "$timestamp"} - }}, - {"$sort": {"total_energy": -1}} - ] - - sensor_analytics = await self.sensor_reading_repo.aggregate(sensor_pipeline) - - # Room-level analytics pipeline - room_pipeline = [ - {"$match": {"created_at": {"$gte": start_time}, "room": {"$ne": None}}}, - {"$group": { - "_id": "$room", - "sensor_count": {"$addToSet": "$sensor_id"}, - "total_energy": {"$sum": "$energy.value"}, - "avg_co2": {"$avg": "$co2.value"}, - "max_co2": {"$max": "$co2.value"}, - "reading_count": {"$sum": 1} - }}, - {"$project": { - "room": "$_id", - "sensor_count": {"$size": "$sensor_count"}, - "total_energy": 1, - "avg_co2": 1, - "max_co2": 1, - "reading_count": 1 - }}, - {"$sort": {"total_energy": -1}} - ] - - room_analytics = await self.sensor_reading_repo.aggregate(room_pipeline) - - # Calculate summary statistics - summary_stats = self._calculate_summary_stats(sensor_analytics, room_analytics) - - return { - "period_hours": hours, - "start_time": start_time.isoformat(), - "sensor_analytics": sensor_analytics, - "room_analytics": room_analytics, - "summary": summary_stats - } - - except Exception as e: - logger.error(f"Error getting analytics summary: {e}") - return { - "period_hours": hours, - "start_time": None, - "sensor_analytics": [], - "room_analytics": [], - "summary": {} - } - - def _calculate_summary_stats(self, sensor_analytics: List[Dict], - room_analytics: List[Dict]) -> Dict[str, Any]: - """Calculate summary statistics from analytics data""" - total_readings = sum(item["reading_count"] for item in sensor_analytics) - total_energy = sum(item.get("total_energy", 0) or 0 for item in sensor_analytics) - - # Energy consumption insights - energy_insights = { - "total_consumption_kwh": round(total_energy, 2), - "average_consumption_per_sensor": ( - round(total_energy / len(sensor_analytics), 2) - if sensor_analytics else 0 - ), - "top_energy_consumer": ( - sensor_analytics[0]["_id"]["sensor_id"] - if sensor_analytics else None - ) - } - - # CO2 insights - co2_values = [item.get("avg_co2") for item in sensor_analytics if item.get("avg_co2")] - co2_insights = { - "average_co2_level": ( - round(sum(co2_values) / len(co2_values), 1) - if co2_values else 0 - ), - "sensors_with_high_co2": len([ - co2 for co2 in co2_values if co2 and co2 > 1000 - ]), - "sensors_with_critical_co2": len([ - co2 for co2 in co2_values if co2 and co2 > 5000 - ]) - } - - return { - "total_sensors_analyzed": len(sensor_analytics), - "total_rooms_analyzed": len(room_analytics), - "total_readings": total_readings, - "energy_insights": energy_insights, - "co2_insights": co2_insights - } - - async def get_energy_trends(self, hours: int = 168) -> Dict[str, Any]: - """Get energy consumption trends (default: last week)""" - try: - start_time = datetime.utcnow() - timedelta(hours=hours) - - # Hourly energy consumption pipeline - pipeline = [ - {"$match": { - "created_at": {"$gte": start_time}, - "energy.value": {"$exists": True} - }}, - {"$group": { - "_id": { - "year": {"$year": "$created_at"}, - "month": {"$month": "$created_at"}, - "day": {"$dayOfMonth": "$created_at"}, - "hour": {"$hour": "$created_at"} - }, - "total_energy": {"$sum": "$energy.value"}, - "sensor_count": {"$addToSet": "$sensor_id"}, - "reading_count": {"$sum": 1} - }}, - {"$project": { - "_id": 0, - "timestamp": { - "$dateFromParts": { - "year": "$_id.year", - "month": "$_id.month", - "day": "$_id.day", - "hour": "$_id.hour" - } - }, - "total_energy": {"$round": ["$total_energy", 2]}, - "sensor_count": {"$size": "$sensor_count"}, - "reading_count": 1 - }}, - {"$sort": {"timestamp": 1}} - ] - - trends = await self.sensor_reading_repo.aggregate(pipeline) - - # Calculate trend insights - insights = self._calculate_trend_insights(trends) - - return { - "period_hours": hours, - "data_points": len(trends), - "trends": trends, - "insights": insights - } - - except Exception as e: - logger.error(f"Error getting energy trends: {e}") - return { - "period_hours": hours, - "data_points": 0, - "trends": [], - "insights": {} - } - - def _calculate_trend_insights(self, trends: List[Dict]) -> Dict[str, Any]: - """Calculate insights from trend data""" - if not trends: - return {} - - energy_values = [item["total_energy"] for item in trends] - - # Peak and low consumption - max_consumption = max(energy_values) - min_consumption = min(energy_values) - avg_consumption = sum(energy_values) / len(energy_values) - - # Find peak time - peak_item = max(trends, key=lambda x: x["total_energy"]) - peak_time = peak_item["timestamp"] - - return { - "peak_consumption_kwh": max_consumption, - "lowest_consumption_kwh": min_consumption, - "average_consumption_kwh": round(avg_consumption, 2), - "peak_time": peak_time.isoformat() if hasattr(peak_time, 'isoformat') else str(peak_time), - "consumption_variance": round(max_consumption - min_consumption, 2) - } - - async def get_room_comparison(self, hours: int = 24) -> Dict[str, Any]: - """Get room-by-room comparison analytics""" - try: - start_time = datetime.utcnow() - timedelta(hours=hours) - - pipeline = [ - {"$match": { - "created_at": {"$gte": start_time}, - "room": {"$ne": None} - }}, - {"$group": { - "_id": "$room", - "total_energy": {"$sum": "$energy.value"}, - "avg_energy": {"$avg": "$energy.value"}, - "avg_co2": {"$avg": "$co2.value"}, - "max_co2": {"$max": "$co2.value"}, - "avg_temperature": {"$avg": "$temperature.value"}, - "sensor_count": {"$addToSet": "$sensor_id"}, - "reading_count": {"$sum": 1} - }}, - {"$project": { - "room": "$_id", - "_id": 0, - "total_energy": {"$round": [{"$ifNull": ["$total_energy", 0]}, 2]}, - "avg_energy": {"$round": [{"$ifNull": ["$avg_energy", 0]}, 2]}, - "avg_co2": {"$round": [{"$ifNull": ["$avg_co2", 0]}, 1]}, - "max_co2": {"$round": [{"$ifNull": ["$max_co2", 0]}, 1]}, - "avg_temperature": {"$round": [{"$ifNull": ["$avg_temperature", 0]}, 1]}, - "sensor_count": {"$size": "$sensor_count"}, - "reading_count": 1 - }}, - {"$sort": {"total_energy": -1}} - ] - - room_comparison = await self.sensor_reading_repo.aggregate(pipeline) - - # Calculate comparison insights - insights = self._calculate_room_insights(room_comparison) - - return { - "period_hours": hours, - "rooms_analyzed": len(room_comparison), - "comparison": room_comparison, - "insights": insights - } - - except Exception as e: - logger.error(f"Error getting room comparison: {e}") - return { - "period_hours": hours, - "rooms_analyzed": 0, - "comparison": [], - "insights": {} - } - - def _calculate_room_insights(self, room_data: List[Dict]) -> Dict[str, Any]: - """Calculate insights from room comparison data""" - if not room_data: - return {} - - # Energy insights - total_energy = sum(room["total_energy"] for room in room_data) - highest_consumer = room_data[0] if room_data else None - lowest_consumer = min(room_data, key=lambda x: x["total_energy"]) if room_data else None - - # CO2 insights - rooms_with_high_co2 = [ - room for room in room_data - if room.get("avg_co2", 0) > 1000 - ] - - # Temperature insights - temp_values = [room.get("avg_temperature", 0) for room in room_data if room.get("avg_temperature")] - avg_building_temp = sum(temp_values) / len(temp_values) if temp_values else 0 - - return { - "total_building_energy_kwh": round(total_energy, 2), - "highest_energy_consumer": highest_consumer["room"] if highest_consumer else None, - "lowest_energy_consumer": lowest_consumer["room"] if lowest_consumer else None, - "rooms_with_high_co2": len(rooms_with_high_co2), - "high_co2_rooms": [room["room"] for room in rooms_with_high_co2], - "average_building_temperature": round(avg_building_temp, 1), - "total_active_sensors": sum(room["sensor_count"] for room in room_data) - } \ No newline at end of file diff --git a/layers/business/cleanup_service.py b/layers/business/cleanup_service.py deleted file mode 100644 index 76219b4..0000000 --- a/layers/business/cleanup_service.py +++ /dev/null @@ -1,234 +0,0 @@ -""" -Data cleanup and maintenance service -Business Layer - handles data retention policies and system maintenance -""" -import asyncio -from datetime import datetime, timedelta -from typing import Dict, Any -import logging - -from ..infrastructure.database_connection import database_connection -from ..infrastructure.repositories import SensorReadingRepository - -logger = logging.getLogger(__name__) - -class CleanupService: - """Service for data cleanup and maintenance operations""" - - def __init__(self): - self.sensor_reading_repo = SensorReadingRepository() - self.is_running = False - self.cleanup_task = None - - async def start_scheduled_cleanup(self, interval_hours: int = 24) -> None: - """Start scheduled cleanup process""" - if self.is_running: - logger.warning("Cleanup service is already running") - return - - self.is_running = True - self.cleanup_task = asyncio.create_task(self._cleanup_loop(interval_hours)) - logger.info(f"Started scheduled cleanup service (interval: {interval_hours} hours)") - - async def stop_scheduled_cleanup(self) -> None: - """Stop scheduled cleanup process""" - self.is_running = False - if self.cleanup_task: - self.cleanup_task.cancel() - try: - await self.cleanup_task - except asyncio.CancelledError: - pass - logger.info("Cleanup service stopped") - - async def _cleanup_loop(self, interval_hours: int) -> None: - """Main cleanup loop""" - while self.is_running: - try: - await self.cleanup_old_data() - # Wait for next cleanup interval - await asyncio.sleep(interval_hours * 3600) # Convert hours to seconds - except Exception as e: - logger.error(f"Error in scheduled cleanup: {e}") - # Wait 1 hour before retrying on error - await asyncio.sleep(3600) - - async def cleanup_old_data(self) -> Dict[str, int]: - """Perform data cleanup based on retention policies""" - try: - cleanup_results = {} - db = await database_connection.get_database() - - # Delete sensor readings older than 90 days - sensor_retention_date = datetime.utcnow() - timedelta(days=90) - sensor_result = await db.sensor_readings.delete_many({ - "created_at": {"$lt": sensor_retention_date} - }) - cleanup_results["sensor_readings_deleted"] = sensor_result.deleted_count - - if sensor_result.deleted_count > 0: - logger.info(f"Deleted {sensor_result.deleted_count} old sensor readings") - - # Delete room metrics older than 30 days - room_retention_date = datetime.utcnow() - timedelta(days=30) - room_result = await db.room_metrics.delete_many({ - "created_at": {"$lt": room_retention_date} - }) - cleanup_results["room_metrics_deleted"] = room_result.deleted_count - - if room_result.deleted_count > 0: - logger.info(f"Deleted {room_result.deleted_count} old room metrics") - - # Delete system events older than 60 days - events_retention_date = datetime.utcnow() - timedelta(days=60) - events_result = await db.system_events.delete_many({ - "created_at": {"$lt": events_retention_date} - }) - cleanup_results["system_events_deleted"] = events_result.deleted_count - - if events_result.deleted_count > 0: - logger.info(f"Deleted {events_result.deleted_count} old system events") - - # Clean up orphaned sensor metadata (sensors with no recent readings) - orphaned_retention_date = datetime.utcnow() - timedelta(days=30) - - # Find sensors with no recent readings - active_sensors = await db.sensor_readings.distinct("sensor_id", { - "created_at": {"$gte": orphaned_retention_date} - }) - - orphaned_result = await db.sensor_metadata.delete_many({ - "sensor_id": {"$nin": active_sensors}, - "last_seen": {"$lt": orphaned_retention_date} - }) - cleanup_results["orphaned_metadata_deleted"] = orphaned_result.deleted_count - - if orphaned_result.deleted_count > 0: - logger.info(f"Deleted {orphaned_result.deleted_count} orphaned sensor metadata records") - - return cleanup_results - - except Exception as e: - logger.error(f"Error during data cleanup: {e}") - return {"error": str(e)} - - async def get_storage_statistics(self) -> Dict[str, Any]: - """Get storage statistics for different collections""" - try: - db = await database_connection.get_database() - - stats = {} - - # Sensor readings statistics - sensor_stats = await db.command("collStats", "sensor_readings") - stats["sensor_readings"] = { - "count": sensor_stats.get("count", 0), - "size_bytes": sensor_stats.get("size", 0), - "avg_obj_size": sensor_stats.get("avgObjSize", 0), - "storage_size": sensor_stats.get("storageSize", 0) - } - - # Room metrics statistics - room_stats = await db.command("collStats", "room_metrics") - stats["room_metrics"] = { - "count": room_stats.get("count", 0), - "size_bytes": room_stats.get("size", 0), - "avg_obj_size": room_stats.get("avgObjSize", 0), - "storage_size": room_stats.get("storageSize", 0) - } - - # System events statistics - events_stats = await db.command("collStats", "system_events") - stats["system_events"] = { - "count": events_stats.get("count", 0), - "size_bytes": events_stats.get("size", 0), - "avg_obj_size": events_stats.get("avgObjSize", 0), - "storage_size": events_stats.get("storageSize", 0) - } - - # Sensor metadata statistics - metadata_stats = await db.command("collStats", "sensor_metadata") - stats["sensor_metadata"] = { - "count": metadata_stats.get("count", 0), - "size_bytes": metadata_stats.get("size", 0), - "avg_obj_size": metadata_stats.get("avgObjSize", 0), - "storage_size": metadata_stats.get("storageSize", 0) - } - - # Calculate totals - total_documents = sum(collection["count"] for collection in stats.values()) - total_size = sum(collection["size_bytes"] for collection in stats.values()) - total_storage = sum(collection["storage_size"] for collection in stats.values()) - - stats["totals"] = { - "total_documents": total_documents, - "total_size_bytes": total_size, - "total_storage_bytes": total_storage, - "total_size_mb": round(total_size / (1024 * 1024), 2), - "total_storage_mb": round(total_storage / (1024 * 1024), 2) - } - - return stats - - except Exception as e: - logger.error(f"Error getting storage statistics: {e}") - return {"error": str(e)} - - async def get_data_retention_info(self) -> Dict[str, Any]: - """Get information about data retention policies and old data""" - try: - db = await database_connection.get_database() - - # Current date references - now = datetime.utcnow() - sensor_cutoff = now - timedelta(days=90) - room_cutoff = now - timedelta(days=30) - events_cutoff = now - timedelta(days=60) - - retention_info = {} - - # Sensor readings retention info - old_sensor_count = await db.sensor_readings.count_documents({ - "created_at": {"$lt": sensor_cutoff} - }) - retention_info["sensor_readings"] = { - "retention_days": 90, - "cutoff_date": sensor_cutoff.isoformat(), - "old_records_count": old_sensor_count - } - - # Room metrics retention info - old_room_count = await db.room_metrics.count_documents({ - "created_at": {"$lt": room_cutoff} - }) - retention_info["room_metrics"] = { - "retention_days": 30, - "cutoff_date": room_cutoff.isoformat(), - "old_records_count": old_room_count - } - - # System events retention info - old_events_count = await db.system_events.count_documents({ - "created_at": {"$lt": events_cutoff} - }) - retention_info["system_events"] = { - "retention_days": 60, - "cutoff_date": events_cutoff.isoformat(), - "old_records_count": old_events_count - } - - return retention_info - - except Exception as e: - logger.error(f"Error getting retention info: {e}") - return {"error": str(e)} - - def is_cleanup_running(self) -> bool: - """Check if cleanup service is currently running""" - return self.is_running and ( - self.cleanup_task is not None and - not self.cleanup_task.done() - ) - -# Global cleanup service instance -cleanup_service = CleanupService() \ No newline at end of file diff --git a/layers/business/room_service.py b/layers/business/room_service.py deleted file mode 100644 index b2cc062..0000000 --- a/layers/business/room_service.py +++ /dev/null @@ -1,262 +0,0 @@ -""" -Room metrics business logic service -Business Layer - handles room-related aggregations and business operations -""" -from datetime import datetime, timedelta -from typing import Dict, Any, List, Optional -import logging - -from models import RoomMetrics, CO2Status, OccupancyLevel -from ..infrastructure.repositories import ( - SensorReadingRepository, RoomMetricsRepository, RedisRepository -) - -logger = logging.getLogger(__name__) - -class RoomService: - """Service for room-related business operations""" - - def __init__(self): - self.sensor_reading_repo = SensorReadingRepository() - self.room_metrics_repo = RoomMetricsRepository() - self.redis_repo = RedisRepository() - - async def update_room_metrics(self, room: str) -> bool: - """Calculate and store room-level metrics""" - if not room: - return False - - try: - # Get recent readings for this room (last 5 minutes) - recent_readings = await self.sensor_reading_repo.get_recent_by_room( - room=room, - minutes=5 - ) - - if not recent_readings: - return False - - # Calculate aggregated metrics - metrics = await self._calculate_room_metrics(room, recent_readings) - - # Store in MongoDB - stored = await self.room_metrics_repo.create(metrics) - - # Cache in Redis - if stored: - await self.redis_repo.set_room_metrics(room, metrics.dict()) - logger.debug(f"Updated room metrics for {room}") - - return stored - - except Exception as e: - logger.error(f"Error updating room metrics for {room}: {e}") - return False - - async def _calculate_room_metrics(self, room: str, readings: List[Dict]) -> RoomMetrics: - """Calculate aggregated metrics for a room based on recent readings""" - - # Group readings by sensor - sensors_data = {} - for reading in readings: - sensor_id = reading["sensor_id"] - if sensor_id not in sensors_data: - sensors_data[sensor_id] = [] - sensors_data[sensor_id].append(reading) - - # Initialize value arrays - energy_values = [] - co2_values = [] - temperature_values = [] - humidity_values = [] - motion_detected = False - - # Extract values from readings - for sensor_readings in sensors_data.values(): - for reading in sensor_readings: - if reading.get("energy"): - energy_values.append(reading["energy"]["value"]) - if reading.get("co2"): - co2_values.append(reading["co2"]["value"]) - if reading.get("temperature"): - temperature_values.append(reading["temperature"]["value"]) - if reading.get("humidity"): - humidity_values.append(reading["humidity"]["value"]) - if reading.get("motion") and reading["motion"].get("value") == "Detected": - motion_detected = True - - # Get sensor types present - sensor_types = list(set( - reading.get("sensor_type") - for reading in readings - if reading.get("sensor_type") - )) - - # Initialize metrics object - metrics = RoomMetrics( - room=room, - timestamp=int(datetime.utcnow().timestamp()), - sensor_count=len(sensors_data), - active_sensors=list(sensors_data.keys()), - sensor_types=sensor_types, - motion_detected=motion_detected - ) - - # Calculate energy metrics - if energy_values: - metrics.energy = self._calculate_energy_metrics(energy_values) - - # Calculate CO2 metrics and occupancy - if co2_values: - metrics.co2 = self._calculate_co2_metrics(co2_values) - metrics.occupancy_estimate = self._estimate_occupancy_from_co2( - metrics.co2["average"] - ) - - # Calculate temperature metrics - if temperature_values: - metrics.temperature = self._calculate_temperature_metrics(temperature_values) - - # Calculate humidity metrics - if humidity_values: - metrics.humidity = self._calculate_humidity_metrics(humidity_values) - - # Set last activity time if motion detected - if motion_detected: - metrics.last_activity = datetime.utcnow() - - return metrics - - def _calculate_energy_metrics(self, values: List[float]) -> Dict[str, Any]: - """Calculate energy consumption metrics""" - return { - "current": sum(values), - "average": sum(values) / len(values), - "total": sum(values), - "peak": max(values), - "unit": "kWh" - } - - def _calculate_co2_metrics(self, values: List[float]) -> Dict[str, Any]: - """Calculate CO2 level metrics""" - avg_co2 = sum(values) / len(values) - return { - "current": avg_co2, - "average": avg_co2, - "max": max(values), - "min": min(values), - "status": self._get_co2_status(avg_co2).value, - "unit": "ppm" - } - - def _calculate_temperature_metrics(self, values: List[float]) -> Dict[str, Any]: - """Calculate temperature metrics""" - avg_temp = sum(values) / len(values) - return { - "current": avg_temp, - "average": avg_temp, - "max": max(values), - "min": min(values), - "unit": "°C" - } - - def _calculate_humidity_metrics(self, values: List[float]) -> Dict[str, Any]: - """Calculate humidity metrics""" - avg_humidity = sum(values) / len(values) - return { - "current": avg_humidity, - "average": avg_humidity, - "max": max(values), - "min": min(values), - "unit": "%" - } - - def _get_co2_status(self, co2_level: float) -> CO2Status: - """Determine CO2 status based on level""" - if co2_level < 400: - return CO2Status.GOOD - elif co2_level < 1000: - return CO2Status.MODERATE - elif co2_level < 5000: - return CO2Status.POOR - else: - return CO2Status.CRITICAL - - def _estimate_occupancy_from_co2(self, co2_level: float) -> OccupancyLevel: - """Estimate occupancy level based on CO2 levels""" - if co2_level < 600: - return OccupancyLevel.LOW - elif co2_level < 1200: - return OccupancyLevel.MEDIUM - else: - return OccupancyLevel.HIGH - - async def get_all_rooms(self) -> Dict[str, Any]: - """Get list of all rooms with sensor counts and latest metrics""" - try: - rooms = await self.sensor_reading_repo.get_distinct_rooms() - - room_data = [] - for room in rooms: - # Get sensor count for each room - sensor_ids = await self.sensor_reading_repo.get_distinct_sensor_ids_by_room(room) - sensor_count = len(sensor_ids) - - # Get latest room metrics from cache - room_metrics = await self.redis_repo.get_room_metrics(room) - - room_data.append({ - "room": room, - "sensor_count": sensor_count, - "sensor_ids": sensor_ids, - "latest_metrics": room_metrics - }) - - return { - "rooms": room_data, - "count": len(room_data) - } - - except Exception as e: - logger.error(f"Error getting rooms: {e}") - return {"rooms": [], "count": 0} - - async def get_room_data(self, room_name: str, start_time: Optional[int] = None, - end_time: Optional[int] = None, limit: int = 100) -> Dict[str, Any]: - """Get historical data for a specific room""" - try: - # Build query for time range - query = {"room": room_name} - - if start_time or end_time: - time_query = {} - if start_time: - time_query["$gte"] = datetime.fromtimestamp(start_time) - if end_time: - time_query["$lte"] = datetime.fromtimestamp(end_time) - query["created_at"] = time_query - - # Get room metrics - room_metrics = await self.room_metrics_repo.get_by_room(room_name, limit) - - # Get sensor readings for the room - sensor_readings = await self.sensor_reading_repo.get_by_query( - query=query, - sort_by="timestamp", - sort_order="desc", - limit=limit - ) - - return { - "room": room_name, - "room_metrics": room_metrics, - "sensor_readings": sensor_readings - } - - except Exception as e: - logger.error(f"Error getting room data for {room_name}: {e}") - return { - "room": room_name, - "room_metrics": [], - "sensor_readings": [] - } \ No newline at end of file diff --git a/layers/business/sensor_service.py b/layers/business/sensor_service.py deleted file mode 100644 index 12a140a..0000000 --- a/layers/business/sensor_service.py +++ /dev/null @@ -1,328 +0,0 @@ -""" -Sensor business logic service -Business Layer - handles sensor-related business operations and rules -""" -import json -from datetime import datetime, timedelta -from typing import Dict, Any, List, Optional -import logging -import uuid - -from models import ( - SensorReading, LegacySensorReading, SensorMetadata, - SensorType, SensorStatus, CO2Status, OccupancyLevel -) -from ..infrastructure.repositories import ( - SensorReadingRepository, SensorMetadataRepository, - SystemEventRepository, RedisRepository -) - -logger = logging.getLogger(__name__) - -class SensorService: - """Service for sensor-related business operations""" - - def __init__(self): - self.sensor_reading_repo = SensorReadingRepository() - self.sensor_metadata_repo = SensorMetadataRepository() - self.system_event_repo = SystemEventRepository() - self.redis_repo = RedisRepository() - - async def process_sensor_message(self, message_data: str) -> bool: - """Process incoming sensor message and handle business logic""" - try: - # Parse the message - data = json.loads(message_data) - logger.debug(f"Processing sensor message: {data}") - - # Convert to standard format - sensor_reading = await self._parse_sensor_data(data) - - # Validate business rules - validation_result = await self._validate_sensor_reading(sensor_reading) - if not validation_result["valid"]: - logger.warning(f"Sensor reading validation failed: {validation_result['errors']}") - return False - - # Store the reading - stored = await self.sensor_reading_repo.create(sensor_reading) - if not stored: - return False - - # Update caches and metadata - await self._update_caches(sensor_reading) - await self._update_sensor_metadata(sensor_reading) - - # Check for alerts - await self._check_sensor_alerts(sensor_reading) - - return True - - except Exception as e: - logger.error(f"Error processing sensor message: {e}") - await self._log_processing_error(str(e), message_data) - return False - - async def _parse_sensor_data(self, data: dict) -> SensorReading: - """Parse and convert sensor data to standard format""" - # Check if legacy format - if self._is_legacy_format(data): - return await self._convert_legacy_data(data) - else: - return SensorReading(**data) - - def _is_legacy_format(self, data: dict) -> bool: - """Check if data is in legacy format""" - legacy_keys = {"sensorId", "timestamp", "value", "unit"} - return legacy_keys.issubset(data.keys()) and "energy" not in data - - async def _convert_legacy_data(self, data: dict) -> SensorReading: - """Convert legacy format to new sensor reading format""" - legacy_reading = LegacySensorReading(**data) - - return SensorReading( - sensor_id=legacy_reading.sensor_id, - sensor_type=SensorType.ENERGY, - timestamp=legacy_reading.timestamp, - created_at=legacy_reading.created_at, - energy={ - "value": legacy_reading.value, - "unit": legacy_reading.unit - } - ) - - async def _validate_sensor_reading(self, reading: SensorReading) -> Dict[str, Any]: - """Validate sensor reading against business rules""" - errors = [] - - # Check timestamp is not too far in the future - future_threshold = datetime.utcnow().timestamp() + 3600 # 1 hour - if reading.timestamp > future_threshold: - errors.append("Timestamp is too far in the future") - - # Check timestamp is not too old - past_threshold = datetime.utcnow().timestamp() - 86400 # 24 hours - if reading.timestamp < past_threshold: - errors.append("Timestamp is too old") - - # Validate sensor values - if reading.energy: - energy_value = reading.energy.get("value", 0) - if energy_value < 0 or energy_value > 1000: # Reasonable energy range - errors.append("Energy value is out of acceptable range") - - if reading.co2: - co2_value = reading.co2.get("value", 0) - if co2_value < 0 or co2_value > 50000: # Reasonable CO2 range - errors.append("CO2 value is out of acceptable range") - - if reading.temperature: - temp_value = reading.temperature.get("value", 0) - if temp_value < -50 or temp_value > 100: # Reasonable temperature range - errors.append("Temperature value is out of acceptable range") - - return { - "valid": len(errors) == 0, - "errors": errors - } - - async def _update_caches(self, reading: SensorReading) -> None: - """Update Redis caches with latest sensor data""" - # Cache latest sensor reading - await self.redis_repo.set_sensor_data( - reading.sensor_id, - reading.dict(), - expire_seconds=3600 - ) - - # Update sensor status - status_data = { - "status": "online", - "last_seen": reading.timestamp, - "room": reading.room - } - await self.redis_repo.set_sensor_status( - reading.sensor_id, - status_data, - expire_seconds=1800 - ) - - async def _update_sensor_metadata(self, reading: SensorReading) -> None: - """Update or create sensor metadata""" - existing = await self.sensor_metadata_repo.get_by_sensor_id(reading.sensor_id) - - if existing: - # Update existing metadata - updates = { - "last_seen": datetime.utcnow(), - "status": SensorStatus.ONLINE.value - } - - # Add sensor type to monitoring capabilities if not present - capabilities = existing.get("monitoring_capabilities", []) - if reading.sensor_type.value not in capabilities: - capabilities.append(reading.sensor_type.value) - updates["monitoring_capabilities"] = capabilities - - await self.sensor_metadata_repo.update(reading.sensor_id, updates) - else: - # Create new sensor metadata - metadata = SensorMetadata( - sensor_id=reading.sensor_id, - name=f"Sensor {reading.sensor_id}", - sensor_type=reading.sensor_type, - room=reading.room, - status=SensorStatus.ONLINE, - last_seen=datetime.utcnow(), - monitoring_capabilities=[reading.sensor_type.value] - ) - - await self.sensor_metadata_repo.create(metadata) - logger.info(f"Created metadata for new sensor: {reading.sensor_id}") - - async def _check_sensor_alerts(self, reading: SensorReading) -> None: - """Check for alert conditions in sensor data""" - alerts = [] - - # CO2 level alerts - if reading.co2: - co2_level = reading.co2.get("value", 0) - if co2_level > 5000: - alerts.append({ - "event_type": "co2_critical", - "severity": "critical", - "title": "Critical CO2 Level", - "description": f"CO2 level ({co2_level} ppm) exceeds critical threshold in {reading.room or 'unknown room'}" - }) - elif co2_level > 1000: - alerts.append({ - "event_type": "co2_high", - "severity": "warning", - "title": "High CO2 Level", - "description": f"CO2 level ({co2_level} ppm) is above recommended levels in {reading.room or 'unknown room'}" - }) - - # Energy consumption alerts - if reading.energy: - energy_value = reading.energy.get("value", 0) - if energy_value > 10: - alerts.append({ - "event_type": "energy_high", - "severity": "warning", - "title": "High Energy Consumption", - "description": f"Energy consumption ({energy_value} kWh) is unusually high for sensor {reading.sensor_id}" - }) - - # Temperature alerts - if reading.temperature: - temp_value = reading.temperature.get("value", 0) - if temp_value > 30 or temp_value < 15: - alerts.append({ - "event_type": "temperature_extreme", - "severity": "warning", - "title": "Extreme Temperature", - "description": f"Temperature ({temp_value}°C) is outside normal range in {reading.room or 'unknown room'}" - }) - - # Log alerts as system events - for alert in alerts: - await self._log_alert_event(reading, **alert) - - async def _log_alert_event(self, reading: SensorReading, event_type: str, severity: str, - title: str, description: str) -> None: - """Log an alert as a system event""" - from models import SystemEvent - - event = SystemEvent( - event_id=str(uuid.uuid4()), - event_type=event_type, - severity=severity, - timestamp=int(datetime.utcnow().timestamp()), - title=title, - description=description, - sensor_id=reading.sensor_id, - room=reading.room, - source="sensor_service", - data=reading.dict() - ) - - await self.system_event_repo.create(event) - - async def _log_processing_error(self, error_message: str, raw_data: str) -> None: - """Log data processing error""" - from models import SystemEvent - - event = SystemEvent( - event_id=str(uuid.uuid4()), - event_type="data_processing_error", - severity="error", - timestamp=int(datetime.utcnow().timestamp()), - title="Sensor Data Processing Failed", - description=f"Failed to process sensor message: {error_message}", - source="sensor_service", - data={"raw_message": raw_data} - ) - - await self.system_event_repo.create(event) - - async def get_sensor_details(self, sensor_id: str) -> Optional[Dict[str, Any]]: - """Get complete sensor details including metadata and recent readings""" - # Get metadata - metadata = await self.sensor_metadata_repo.get_by_sensor_id(sensor_id) - if not metadata: - return None - - # Get recent readings - recent_readings = await self.sensor_reading_repo.get_recent_by_sensor( - sensor_id=sensor_id, - limit=100, - minutes=1440 # 24 hours - ) - - # Get latest reading from cache - latest_reading = await self.redis_repo.get_sensor_data(sensor_id) - - return { - "sensor": metadata, - "latest_reading": latest_reading, - "recent_readings_count": len(recent_readings), - "recent_readings": recent_readings[:10] # Return only 10 most recent - } - - async def update_sensor_metadata(self, sensor_id: str, metadata_updates: Dict[str, Any]) -> bool: - """Update sensor metadata with business validation""" - # Validate updates - if "sensor_id" in metadata_updates: - del metadata_updates["sensor_id"] # Cannot change sensor ID - - # Update timestamp - metadata_updates["updated_at"] = datetime.utcnow() - - return await self.sensor_metadata_repo.update(sensor_id, metadata_updates) - - async def delete_sensor(self, sensor_id: str) -> Dict[str, Any]: - """Delete a sensor and all its associated data""" - # Delete readings - readings_deleted = await self.sensor_reading_repo.delete_by_sensor_id(sensor_id) - - # Delete metadata - metadata_deleted = await self.sensor_metadata_repo.delete(sensor_id) - - # Clear cache - await self.redis_repo.delete_sensor_cache(sensor_id) - - return { - "sensor_id": sensor_id, - "readings_deleted": readings_deleted, - "metadata_deleted": metadata_deleted - } - - async def get_all_sensors(self, filters: Dict[str, Any] = None) -> Dict[str, Any]: - """Get all sensors with optional filtering""" - sensors = await self.sensor_metadata_repo.get_all(filters) - - return { - "sensors": sensors, - "count": len(sensors), - "filters": filters or {} - } \ No newline at end of file diff --git a/layers/infrastructure/__init__.py b/layers/infrastructure/__init__.py deleted file mode 100644 index e8a337c..0000000 --- a/layers/infrastructure/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Empty file to make this a Python package \ No newline at end of file diff --git a/layers/infrastructure/__pycache__/__init__.cpython-312.pyc b/layers/infrastructure/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 898b81d21c2ba9fb6452a5bdea387311af1b2ecb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 186 zcmX@j%ge<81XVRVGqiy8V-N=&d}aZPOlPQM&}8&m$xy@urWO_J zXXd39B^H+yl_r;z7NzRP$7kkcmc+;F6;%G>u*uC&Da}c>D`ExO#0bR2AjU^#Mn=XW HW*`dy3)?cH diff --git a/layers/infrastructure/__pycache__/__init__.cpython-39.pyc b/layers/infrastructure/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 35add973ade2aa01fdd7925b94b5524f3ac15a0c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 180 zcmYe~<>g`kf~uOG8CpR4F^Gc<7=auIATDMB5-AM944RC7D;bJF!U*D5q<%(zZmND! zNMv>NwI!$qDgW=v3@~OepYHSke`xRoRO5DSd^lll$e~InwO%VlUSKr xRIHzwmsXTmTvAk;TvA$;svjSpnU`4-AFo$Xd5gm)H$SB`C)EyQuM*fL{FvQ8_dj#!E$%Cg)XY>NfWUC~rXuChzn zA!7xBEwoNxGzkjS35p^`n+FGR0YADxQ6P?k80gEgv_W_4A_1C*_Kk@GG}lA_S#p<$ zDLX+2Xh5gh(ccFaaf% zAd^&>O44DP!2NWBN!r4;q&;j;I>HX4k4ZR_uCU9flL>d&O(My?+-fqYl#=ItqSOjE zk?1HAZR<$1zfYF?nqgk^`(^U5H$(az+2>sok#yu_M3wlcoJvX2bX-pH$w(>^laf*@ z%`eFcKPRVR^2C^HI<=%k)U>i1O|L2vKO4zN3g6A2iloGZr1A;*Om{*$EhTude$CVI zbjI}`%sS*JbuOr8Qqk%8IXNvWhx}VS;^D9{HNFrL+&M3-yqm3bGUP2NAi=!|p5@AXt*HM@jog%YNhZ!hsP})RF zvM-UM9qJC~b3p0L(0-TBnF5VlHp(8FGmNB~)~U817^GqYc|}qpSUfda-W@|a>=zXL zH!d2|iRdg6i~*b(EsHwMm;=i;vsNj=Ye;jCVGYyY&;w>sB3(7h;*ddpCZxOP=0iv4 zC&q*yAD_*(CGq}?Cx-%ogdB||PRVL|=urRT2m5u;MDRp#EVwu+go1OEdW)2jlvqZ9 z331rkcq-<%>9%NThaIL@=%lRbwwRQbQm1tm6XVr|RVAS}2_}({iX^4?2%T1?#FC0l z!G9wLPRU6rpolA?8kmrytJp);fEwwIuBd?(MP3H~RRdx~J#|u!C}QAbB>KFR5(5eB zscIlzyo0(;k>D()LN%^| zH{(3~H~t2C&maPv*NM`Y-bl;XZR(OpoJGsl)>j69gMLezL4iPNyE)RB+N7*@BcODc zvnu7X5%4N)TwC_kws8wwv+Ty_kx0D+lK+~zME#s$K&!?pLvPc)>$E`^NM%U^R9o() zGK1T_ydGdZ+LV$&8LA&$JO=-cJt)1^8ZP}fZD)?R^uAZt68)?}LCfRl~(xhU1rJIkWhj^V{frAyw?pj=p zMkQ5UT1_M}<}#94MV{S0sVES?P5-W0Yl!!=x-B8cVv?fMa5Q8ETU5c$rNAYIbS5Gu z<0;)9m6LEGhzd4_g8g1^7NUw2NlS20AUI2^?wouvDj6|YXC%ziPbv>%Ny;QFwUA90R%QXksd#&=R5?qqPilLDHW7=gFxJp4D- zAuGiQI1^B>Ps-zs`x`|O^ZY=huwu!w6*DR;i$v9VAv2`m)l{~1JRt*+D8mKI?c`Lp zZ6dCgw--EgNs*H#Rme}5y)B9-E8BsKvIA!iK~@UQn~^r;wcCmsi*`n$~cP3BS3$TsaUvN zt(!Evvw^Xy3A$kkbsuCUb2V>yeeJb1e2VtJx&K!O3M{X&{2ko;>YC>0C{oC?>l>8? z`g43A@q40&{zFS&t;KMdHk|V`xRG^%S$IYy(k3=TfVaqsvn`P^>%vG^=xZq~=c}Ti ze&CAq52R;mWV9rRF>CnZZGg%eUYc&OOI%41@R8 z^uc@Ks+RD3oCo*!p^s5(^d|b@OS~>M)X#!N4fNy81(tE3u1g|oS_S}>tslKXT#Oqz z23K#ue>Z?De9;>Tz}4p)ghHZX30lM#w_+g=C_r=+_pMS7W~^<&B5w5$xTkyz(X}0&2FU=SgBY!-blu+ zuk3<$cDHG{aqInkpbmoe;K_U7S}CK(z0SceZK%cf)=#eZwT|AaFWq=bJ3O!Xj^(`z zcfIY<{g~F#r}+-&z5TfT^t;b#9f7O+{+!hgKBf6)^4?k8{j)36TF2pQz8lNhz=GyG zp7$=|?w?*6f^N;%pZ5+F-L}0=8oQ_1gtlzE)ct06feYlgK!F?0aiiK3Pv^O33hXl) z`wSpQkY^J{_43qku+yEufiJo&4*PT5{sK3g*NQSK73Wo~zH_7}EMenr}YuJyx{SkAPOYsGi#*4n;Bf0J+&V#+(424%VsK|b^Pa20t36s;>0DH!}t z&4RO>Bc=w4^|7r}Zgjnsm?G%wZNV;R+#rZ4n!Z5>d!g~sVPeWof7BlgK;z>_i76-j z@!mG5-$Z!&O`wH3ZRB<%cN)3dIdz!2xwUobAa!#;gDDR(xaTkhb%1W|AV3H{EWx)1 z{GCG(be8}hLd4YsuCs#h!fGT@9iw8600000 diff --git a/layers/infrastructure/__pycache__/database_connection.cpython-39.pyc b/layers/infrastructure/__pycache__/database_connection.cpython-39.pyc deleted file mode 100644 index 4178cb6594722301b0da3ece0ac1293efc097b6e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3133 zcmZuz-ESMk7N6NKuh)*#rY^LF(gg`2OSCQpfmB^xktR`ZQR5;CLX41B+ntHyupee- zUGOU5fmY(aTnX`nbT5+g$e(bZ`7idBC;kOKB+eOooplmhdv@l`*)wO($L~zIw&oG| zeRuf({X6S~{ECzM&xFbQ@Tff?oG`*EH)6^tyv^81OlBq)vncYd*iIbgBn{R`T;^(? z9XAt?d3tQbE!GmPSzD|fQ`WgpxWgOI32!_z*t%GMM%V>zJSIK2_`=%`^YBTigdb&T zDx!RprG65o;ZP(Z&HcSh`ny>=%(m}&57Ipus$5Q@d?JPaFq{hM-}Lvxl*dB(arV{C zSUeT6&rj=oI?AWsZ*VX&=o#h4rkbYFgGakrp2@9v1pAcTIT$;Lv4 zXt;`K&1f83@NB`O-UPzQAqnP`=Els#%s4iHXX%;AEj_b20h{arYwbQ7+<&xvFZkr6 zhsD(dqsO1z?f2s>3gi7u<#+GAcl)jP*xL5y$D8*yAMXT%&D|Y#QKUi+rvZF0g20T@ zVb3X@XdWN8ifL(Ns&s}T7wOZojl!rsn8>*71eH*bhKcx@mX;Fn-p@o{gj4*^`}ThS!1QSg4RrpRcR{gQe4j}zhpNDGyNft`pe-cJth?MtZOf=OWt)w5w%@a2` zN!(lGY3T;R2<#7n3+O%<2%!%AJG6M|Y_M)s(gR2%M+!*nbR`%De2<0u3q0yaAjRi7 znL$w=k^|$vx$;`)x1fC`n{S9IO= zd+pMRv*A$4(t?O(at)j&(T%bRgxMiaMk#AVSpqP_Wd~Q#!{vGqNfG7(;s+%!RN375 zIug3FOIzTcJwswtODc3m&B+FGK%GXf7hs~gK!{<`4WmuFMu&C`_|prtxOPqgEIwQb zI#wVehYtEHFo56@c@61|9z#IqRfs{;IBFJGw&Dyhbb*vh-(+cVb$g^1dw?7FWR_G? zgPx^bQrZ!IEHA@G5)UAuKg?^Uc0!LY^}GSg$^k+QhkCTQe$KH@q|clTW1&xW4F-57 z)rFLVb3sbdMYu%6kAhMm5U_MHmh2dU%@S|0_iy#*4T@ucB+7ob?ZAUxs`ae({kbR z%8nIVV!5BfP5C15-iAlL3xpV!xk1}>1ODwltVY+uS^F=`?phacMB9H_PS?^B$OT*l zRDG@T`_}Rfbn6t9_dM*&cqs^i(hK0$z$Y<|+d=To`Iz zmWadJ-r5=PULmmoNNn`jX;d4COD)57Iu|=m$I&aji}E#;Y5`%F61;cb(liM1Dmu#M z4GQ;Rjqy}pYEC5sb0FOy{<5Kku*JcQH37OF>UJk@;0_wZ(U|fQMjvn1wF}C5rkCzN a=I;0>wjG5H&azx+S6y(pYj{TIqrU;a_&XQ? diff --git a/layers/infrastructure/__pycache__/redis_connection.cpython-39.pyc b/layers/infrastructure/__pycache__/redis_connection.cpython-39.pyc deleted file mode 100644 index 6a0953f2578d92fd6f16146bc13c09dfaa1e1c16..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3383 zcma)8OK%%D5GMB}S+-@xcAPlRMUy_%){)x;NgqLx);5w9sT0HjiteF{)snW?S?#Li zDv1Q8hsL*}mtLC!_A%Gqdd|P#wWs`rqKD3KSF)uzXjkBf91e#w-wcNgrl%_ezF+%) zZvPCuzi@E)GhuK8n*1IJBOYOt88P*!{x>`W{>|8IS)SFhJ-g+24$WmsUP;T8TN7S| z5?}Q5o<<)1zn_`+_cR_fD^KEJJ}S0%tR;5I)b}*f-V>ClDiFJF_-SBrbkiM)e7SD@lwnm@%XT+ zSKu63ubZlTw~fk!SXrx4m?`tiq#NSo;m?7=4QTQYAe;ae1aa_8W-xP?cozEioHB=% zb}i2?MrAeuBZrrE%6hiKCSg3m&hiTUCP8ZYxy7n%X4m$nfHTc2i-gsl8|)aH1<5L} zfDDf3*gTA9K&Hly!+3$6fblWZ0}>~nn{1Jt0?sV#HMcXbcRS6_0OvTf0OxaRMuW35 zM7g*0jfQy~HNYQ1Zw4|758)M3aD4D#--fmVO8DpR$BKe_t#c$Z}_+FZah#EaT*5kb}F+0``+FA4?OGY&} zD@B*iIb}FFnv*rt`EXm`|7epKbyZ;Uv z+i8n81Zy+dSWUwYW}R%v;7Zt*jkZX4ASY#m1#-KY27)!3LHLj-tPx}C$woAikd4Be zA;_27T~+q|D2X!PuOV7xAcQ)!N@r>B zBRc@@4nV$RLbEUczgP%2jO;yUpWG(QyhSLz>jdX;R7mm&r14^r5X4dE{nke&`<5EoEuMEseFonqH0ZFlF2q zT(;rxa=G3+ztIUpF6Cw?j=O^+&)HB-@AR4wsTj;e$<}b{Gq*meYz?roHEv=O01(qi zsz?x8Wwl|Ea^iGriwk8zd{Q;B_9W!`kSm*`{<aV81v!~TPN^tE3owu;fe^!> zR`E0bDLXZYT$R>n@5FfQij^br!h?e4RLAQgbRZ@~xJdV?jtte2G4`O4AnuF;zGm*E<=)$2oJuZE!%pH1k{KkJ>4$u*z>z7GzYQ}IYa1~f zbn4W&QwO52`GvWt{W=95^hnP4N4!EFcw_VyZ}MzVoApU`qeNyw67qG31cptxNRBiX z&U10TKLDAbvDG7tJrI3WgN%TbG#~P=vL6L;hpQ@o(vAf8C1hp7q?iYFp#@~{VMW8H zoq7j4;0_rC>8aJX_Gq8>&Azd3!jaN})8R6D7dODcz=h40^j>Z|h=lt%%C=p-slL6? z*9)N4vn7W6cX7fwcai;T8(i3v`#z`XkU)bAX+Xl76%6 z7bB!ramq378pA# zXYO^HaU{3hLNgZlz%BM0`dez}Qc&2iQj29InDj^aEk1^cHE0J}^;&f<^o>+8j2kxX z0#^JH7a;SoM&}i5Mesmp6XT+p3!JD9*w0@>3iEBk}+nz2w`MQ>jT+ueLU-*xH^pSL=NZn0kMmg*(0+nsWEu0F@d zMyFD*h`ew*^WFLSJkI4sp|j9ks4wvKV&_nIvA)3eQ};K^pt?)Q8bm)wuHJ=ymB;v-C>R;wD3gO*;`=b4@*jQfVj3G<#= zx5XJR?>YD@c*UcT{C!h6qHxcum&94VqRyA@=R{e|;e1(~^NP4Kw_zOB@SDB+(0yAh zioV=9SwIi!+$vfAiwJfQ?>q)@j%zLcQs+%)J!cX4Dab0{c#&R?lB+tXj0qvleWd- z-|5|T;-Nxx+$6F+u>TO`{~rSH+$$=@T`ZX+YH=rjjh7<@kl85R8N=OH>zb> z#-v8&O)qF9HWQhG?69DQh><8;^@C=25Y5MDW1;y`vEB1M88rGmFS0xR%}q~6mhf(E zZ%PteRJeMl<*_kGmPa;PwIb8^IvesZT{gt6dO7Z_!OzvbhnhTtYmYy?c)Q>AE=n;F z{>AlvYr6}(^Dp|%ms$h=;z0JdVA=jf(e!WM>Nlmhc&pj^$m@xV9fBwSVtWF#F3JSv zfJhex!^m#oxq){CkMdVhXojIzbSJlLEb5E8ZIty@-Nu;(JzTq)1&OhSUr=2<61e-( zWfWCY9>q^&b=uu_5aqk=-ge;matS9}CW_=kbCW9e-=h+KGNIT|QDFqeZtj5=n54{~ z=_^_r-sZlsV~tE!YwLl&n-`X_9~isN2+Fr(kL<0&$PoF@VujzykMirN;}1spcQsc0((rGg%mpa(xuCo|Cki7Src@jipwg`Nn=lfalwaQ|+*L}A z-&CnfVfC69Ko47xPoj>>hcsP(;I-Nt?N+ROwa4a}&A@x8%V+S?RYOfzQO%=hGz0lO z&OiDRpNBdW%Ij?gx8*WT&?%+NP}P8xdDi0|8<*T_g>_m!g?l7nNn~#atzQ2-lDxWn zn$DOT?Vf-d_GKWebRr-08y)gNIXF3af*KyD!bMRn%9C_Vg5}mcba@KZ$RZC%sz;U!OAU9(;cMBsyIt`ERm_Hu{UlF8_Jw$3}YRH9xi=hTdVqF&e0bQ+o%B67^HqXP()Tdkf4#Nlo3j2 zjjXLah>(>k9n>&ZEob+-vL>Iy zE68V2K&!|WS(D^Nda?6QRvqfYl|Mtbm!=d*zDT{L+9Q9Ksy|1?l%hCFQTXAc-mD|_ zQY>vEJQ8eElRrYy9C{e@!@M6qH#^OGqCYXadM@ZIxU zlBtp{U#2s8vIMAOROExc6o6_r4754^50J~HI7 zz~~u44nSmYnY-3V2WZ(NW0VJI-t;3pp z1uwZLQ*O~fLz*C@`$-)?Ll*+t>DnRF>_CUc1tI$`e&6FOMl^&63B*|Z1 zp@QTgU!{Ver(R)Ei8i#0=xs)`$g$2;FLv_)9e#&jMJu0xRm;Ke84J2=jPV9O@617J zm2?7A<59ADWQiO3x>P;BjY@cGLOl#nGk7;r2PN)Upyu#B2o#YOh!vDYT^YZ-e60ji8n_^GVvKpGVUU11P#D%I`vk#N|r?K)9r7#h_EofEf4{q zmtSKj4s&OEUcyF!aV&tzhL_xjd#2t=noQ4mPR;qg)6F=V+}z-S^f4)!DPL1lejs*E zcJMIvDhI22hvxn(6LTM{I?eoO*1T`T3V)6j9>fnG(qgrrCSOqaJ&JFKr)c(*u_he< z`g%gZ=sy@9t2qtCbh9U zqA1UgLuVo-3Y?s2a4PYgUZdMY(qv9m1QHy;xG9@?Xej~mjFA8--uAaU!A!h59QO;k zsZl_Rze&QYpirQ-tUF44&+aF_8GTR+XwV*bop#StC}vQH@n=GAY)KbpSrR!7MEsBz zrR5U7kV=m1i6rO7l1n4=430rsd!%ND$FAP-T8P)&=H{mKHaU^yCc~{MDtTN<%;T0M zvP@fWfISi_B-g!}lrfgd4a@+$b>+rqa6@?$O}>Y3Mg(O&_8ceo6G8fLB;VwDddOE$ zRCW0a^kYimgNo?zMRFmC=SZaac&LSG0wDE z!a@$t7J1~oDF0US5I+kCxv+vL;#^rALKZJ+J;ztexJubr%E#rA_oLR7k3}9%fP_{T zqZK8qB*QpPCWO`TaW-|i%Q-0<>jGukyDXbJy9+Dq{*eT#y# zBsB(ygVdNa1pcI~tNAW?7jXF2Bx6mITX^LDxXn_=0egQ<+!3galVY*Qiy7 zt|hH0c~u;vVVajpu1iuBNpTYg@{x&TP+z$VCfhEqBtGresMUTxjg|TbXyGrQNT3hF zmm&}+_LF9u8mi{_p;3N&K&kII6GU_^`H)4V)a@=R;cFApio!ZYl<%f45GmDwDvs*yuZH?v1xg6`RB~-|bqttmh;qte5tb1e@71c{{ znuh#HXr7_ikWeF$7q@t_yf`tV;_44@chWOw5i39(WQ@GoeynO|{1^omN^-wKM>Aj( z$sc~1zr=MF?QoW_j4&F1Ulh&qljNxGmv5qj{ZrE+u7Lt+CI(_UYXxgSM(k55c;zp3_9?u!NEP^OH; z!ni)gDXP<)Lb55%62k&0W3}DZkO~1jzU91;U98 z*Pr82;RjP9(g(|JT0Ky~DAoyfO{86|2S#j8$aE$Ag7RFPkFW#eGU9v$evyxOZDhp) zbcTNd0aCup4JhAbai)s}m|-8YGF^7~Jc*D9hE8KOtLq zzlqcp$#0x^kerd&H{}qgt|%~72E}3h*HEp`CHlk}()~r8C*ffHpW~*CsX4|{lE_r1 z<{l_rbg;jfF(-UsN`@>uTKNGOPH=gVOTo|@Camxv!;bwR#O9e<08cc^%m zig_yDqatN+inlRayG0lBRCrVnyO5hyw5b@6F&(I^<={u47Ynmz`EB1uR71;d=^|jZ#Wag5)5S(I z3&-%DV+tkLsCt=-Jq+q2I(s04`d74=&7h9mm?gECK5@(f4#kh<1SkjlIrNr%CP4l# za5nt*1VB)r$f+*{5CHfb0X!w45k?Yg%7a^(Ghp5U%vXeho?Su+)#(E^6bKu>w^tRR(Q^Xi3K(pAe}gqPDUfM@YZ zeqcP`ELAd0NkYaR7as0Rs#S`;GY#xN(AXzgDfhmXaRJEqoSZH5%>6aQs1%Z7g?biZ z5f`6EtoP|0qZ0v&>{2lcohUsT;p-E+DRh!_ZVxWe^(*-Luc0^)mrB5;7@0V7Od>^W z0YIckJ2=3ztL5lOW&_C>mBRDmV-*GQHWSyQeI`>7W%0=;0Mu__yy5zU6{Pw0{bN3g zDz+TN2yuc-AVtF4B0}hTl94A$Gb{G9_ar9s;o&6q%e+o4jOReToy$80#3eV(VLMTZ zRdkj;Y35GCs?5qL=tUGY>~OMf_pwB*)^IlHk&I)ct;wf^gVe?RBP!wVOi0j7mt#5U zh2>bpfrtSuO6FtbA0RohvA_lrt7{YsVtLJ`jwg@eWI`TP5I~o#Gu9-k8=F zg7T;!uu}*-1n>*I>x`V3*3Jz-hYl2()>d}svE8MZ#F80WTfAFjEx>OYO%}qHYoN99 zV!aYstk`7Y+jvh!WS^y@RVv1S7w6gl3vqIkHy9{^BUo7&$N!mll6-@_Ma6&$UaWnI zj+g)+jD95*Tk@#5C5wt65G$iW6KkZrh?{cb$d0v-wdgcGWsEqKxQdE|Y3l85kVVd4 z@q!x`_RG4d7mak8KViqL)>p~)$FOi=+zq^;PY()E)T(nc;qeePc#n#6R4_6!C{l7x z<={y5Br%!Hh=Mh>(uzk8HE=F*6hg0c1(f+HeQP$U<8wgk>{{_ zXM7z(!TvRsq%()Kf(I>&mrl)*ofs*vRv0_>T@V?VC?Bzj(iTuVJg*pKhZ0n27J&xt zVqaS3B_CH*6m&_oyi|KTd|Hu9nuAQ}L-zu&ALb)-%kTHFA#A%l@Z}s{j52jWgF@N{ zPs(sA@~P%C`JG?K4RSG_3id8h-NvNcQ`b{Y3WF$I~HGz`{ zn1H$$v(I4w2Y3G;XZ_^fKscRth02fTC>FUChgdZCaWI_VRW^nKU`#mE6L0%WZDQC4Mrz|r>%zQ2a8ny;A1>^GHtgbmRxo_ykBpBI!(ru@oI*tZ z7=g|9gHE&MA#HA4ctP^SGwVyKUsSwIBcr`=uY8DU`cK@dg%UeZg{WL?+R3vTirV>(_wO~qR zVuA)W5fn+Y;-myEAD&(xZzqXuis>2c5^2Wk1`$BqhC!Jw5S4>oyR+;F8B=Efy28I|kets|Wvp6%b z;rN9k8B%=}*l?V4kE%ohV> sw~d_3YFo!Ecw^0XQJAI!`&hxZtUCxz%le8@G%DZK%*xfuTIKBj00krm;{X5v diff --git a/layers/infrastructure/database_connection.py b/layers/infrastructure/database_connection.py deleted file mode 100644 index cc024b5..0000000 --- a/layers/infrastructure/database_connection.py +++ /dev/null @@ -1,95 +0,0 @@ -""" -Database connection management for MongoDB -Infrastructure Layer - handles low-level database connectivity -""" -import os -from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorDatabase -from pymongo import IndexModel, ASCENDING, DESCENDING -from typing import Optional -import logging - -logger = logging.getLogger(__name__) - -class DatabaseConnection: - """Manages MongoDB connection and database operations""" - - def __init__(self): - self.client: Optional[AsyncIOMotorClient] = None - self.database: Optional[AsyncIOMotorDatabase] = None - self._mongodb_url = os.getenv("MONGODB_URL", "mongodb://localhost:27017") - self._database_name = os.getenv("DATABASE_NAME", "energy_monitoring") - - async def connect(self) -> None: - """Establish connection to MongoDB""" - try: - logger.info(f"Connecting to MongoDB at: {self._mongodb_url}") - - self.client = AsyncIOMotorClient(self._mongodb_url) - await self.client.admin.command('ping') - - self.database = self.client[self._database_name] - await self._create_indexes() - - logger.info("Successfully connected to MongoDB") - - except Exception as e: - logger.error(f"Error connecting to MongoDB: {e}") - raise - - async def disconnect(self) -> None: - """Close MongoDB connection""" - if self.client: - self.client.close() - logger.info("Disconnected from MongoDB") - - async def get_database(self) -> AsyncIOMotorDatabase: - """Get database instance""" - if not self.database: - await self.connect() - return self.database - - async def _create_indexes(self) -> None: - """Create database indexes for optimal performance""" - try: - # Sensor readings collection indexes - sensor_readings_indexes = [ - IndexModel([("sensor_id", ASCENDING), ("timestamp", DESCENDING)]), - IndexModel([("timestamp", DESCENDING)]), - IndexModel([("room", ASCENDING), ("timestamp", DESCENDING)]), - IndexModel([("sensor_type", ASCENDING), ("timestamp", DESCENDING)]), - IndexModel([("created_at", DESCENDING)]), - ] - await self.database.sensor_readings.create_indexes(sensor_readings_indexes) - - # Room metrics collection indexes - room_metrics_indexes = [ - IndexModel([("room", ASCENDING), ("timestamp", DESCENDING)]), - IndexModel([("timestamp", DESCENDING)]), - IndexModel([("created_at", DESCENDING)]), - ] - await self.database.room_metrics.create_indexes(room_metrics_indexes) - - # Sensor metadata collection indexes - sensor_metadata_indexes = [ - IndexModel([("sensor_id", ASCENDING)], unique=True), - IndexModel([("room", ASCENDING)]), - IndexModel([("sensor_type", ASCENDING)]), - IndexModel([("status", ASCENDING)]), - ] - await self.database.sensor_metadata.create_indexes(sensor_metadata_indexes) - - # System events collection indexes - system_events_indexes = [ - IndexModel([("timestamp", DESCENDING)]), - IndexModel([("event_type", ASCENDING), ("timestamp", DESCENDING)]), - IndexModel([("severity", ASCENDING), ("timestamp", DESCENDING)]), - ] - await self.database.system_events.create_indexes(system_events_indexes) - - logger.info("Database indexes created successfully") - - except Exception as e: - logger.error(f"Error creating indexes: {e}") - -# Global database connection instance -database_connection = DatabaseConnection() \ No newline at end of file diff --git a/layers/infrastructure/redis_connection.py b/layers/infrastructure/redis_connection.py deleted file mode 100644 index 574414f..0000000 --- a/layers/infrastructure/redis_connection.py +++ /dev/null @@ -1,80 +0,0 @@ -""" -Redis connection management and operations -Infrastructure Layer - handles Redis connectivity and low-level operations -""" -import os -import json -from typing import Optional, Dict, Any -import logging -import redis.asyncio as redis - -logger = logging.getLogger(__name__) - -class RedisConnection: - """Manages Redis connection and basic operations""" - - def __init__(self): - self.redis_client: Optional[redis.Redis] = None - self._host = os.getenv("REDIS_HOST", "localhost") - self._port = int(os.getenv("REDIS_PORT", "6379")) - self._db = int(os.getenv("REDIS_DB", "0")) - - async def connect(self) -> None: - """Connect to Redis""" - try: - self.redis_client = redis.Redis( - host=self._host, - port=self._port, - db=self._db, - decode_responses=True - ) - await self.redis_client.ping() - logger.info("Successfully connected to Redis") - except Exception as e: - logger.error(f"Error connecting to Redis: {e}") - raise - - async def disconnect(self) -> None: - """Disconnect from Redis""" - if self.redis_client: - await self.redis_client.close() - logger.info("Disconnected from Redis") - - async def get_client(self) -> redis.Redis: - """Get Redis client instance""" - if not self.redis_client: - await self.connect() - return self.redis_client - - async def set_with_expiry(self, key: str, value: str, expire_seconds: int = 3600) -> None: - """Set a key-value pair with expiration""" - client = await self.get_client() - await client.setex(key, expire_seconds, value) - - async def get(self, key: str) -> Optional[str]: - """Get value by key""" - client = await self.get_client() - return await client.get(key) - - async def delete(self, key: str) -> None: - """Delete a key""" - client = await self.get_client() - await client.delete(key) - - async def get_keys_by_pattern(self, pattern: str) -> list: - """Get keys matching a pattern""" - client = await self.get_client() - return await client.keys(pattern) - - async def publish(self, channel: str, message: str) -> None: - """Publish message to a channel""" - client = await self.get_client() - await client.publish(channel, message) - - async def create_pubsub(self) -> redis.client.PubSub: - """Create a pub/sub instance""" - client = await self.get_client() - return client.pubsub() - -# Global Redis connection instance -redis_connection = RedisConnection() \ No newline at end of file diff --git a/layers/infrastructure/repositories.py b/layers/infrastructure/repositories.py deleted file mode 100644 index c9c2945..0000000 --- a/layers/infrastructure/repositories.py +++ /dev/null @@ -1,362 +0,0 @@ -""" -Repository classes for data access -Infrastructure Layer - handles database operations and queries -""" -import json -from datetime import datetime, timedelta -from typing import List, Dict, Any, Optional -from pymongo import ASCENDING, DESCENDING -from pymongo.errors import DuplicateKeyError -import logging - -from .database_connection import database_connection -from .redis_connection import redis_connection -from models import SensorReading, SensorMetadata, RoomMetrics, SystemEvent - -logger = logging.getLogger(__name__) - -class SensorReadingRepository: - """Repository for sensor reading data operations""" - - async def create(self, reading: SensorReading) -> bool: - """Store sensor reading in MongoDB""" - try: - db = await database_connection.get_database() - reading_dict = reading.dict() - - # Add document ID for deduplication - reading_dict["_id"] = f"{reading.sensor_id}_{reading.timestamp}" - - await db.sensor_readings.insert_one(reading_dict) - logger.debug(f"Stored sensor reading for {reading.sensor_id}") - return True - - except DuplicateKeyError: - logger.debug(f"Duplicate reading ignored for {reading.sensor_id} at {reading.timestamp}") - return True - except Exception as e: - logger.error(f"Error storing sensor reading: {e}") - return False - - async def get_recent_by_sensor(self, sensor_id: str, limit: int = 100, minutes: int = 60) -> List[Dict]: - """Get recent readings for a specific sensor""" - try: - db = await database_connection.get_database() - query = { - "sensor_id": sensor_id, - "created_at": {"$gte": datetime.utcnow() - timedelta(minutes=minutes)} - } - - cursor = db.sensor_readings.find(query).sort("created_at", -1).limit(limit) - readings = await cursor.to_list(length=limit) - - # Convert ObjectId to string - for reading in readings: - reading["_id"] = str(reading["_id"]) - - return readings - - except Exception as e: - logger.error(f"Error getting recent readings for {sensor_id}: {e}") - return [] - - async def get_recent_by_room(self, room: str, minutes: int = 5) -> List[Dict]: - """Get recent readings for a specific room""" - try: - db = await database_connection.get_database() - recent_time = datetime.utcnow() - timedelta(minutes=minutes) - - cursor = db.sensor_readings.find({ - "room": room, - "created_at": {"$gte": recent_time} - }) - - readings = await cursor.to_list(length=None) - return readings - - except Exception as e: - logger.error(f"Error getting recent readings for room {room}: {e}") - return [] - - async def get_by_query(self, query: Dict[str, Any], sort_by: str = "timestamp", - sort_order: str = "desc", limit: int = 100, offset: int = 0) -> List[Dict]: - """Get readings by complex query""" - try: - db = await database_connection.get_database() - - sort_direction = DESCENDING if sort_order == "desc" else ASCENDING - cursor = db.sensor_readings.find(query).sort(sort_by, sort_direction).skip(offset).limit(limit) - - readings = await cursor.to_list(length=limit) - - # Convert ObjectId to string - for reading in readings: - reading["_id"] = str(reading["_id"]) - - return readings - - except Exception as e: - logger.error(f"Error querying sensor readings: {e}") - return [] - - async def count_by_query(self, query: Dict[str, Any]) -> int: - """Count readings matching query""" - try: - db = await database_connection.get_database() - return await db.sensor_readings.count_documents(query) - except Exception as e: - logger.error(f"Error counting sensor readings: {e}") - return 0 - - async def get_distinct_rooms(self) -> List[str]: - """Get list of distinct rooms""" - try: - db = await database_connection.get_database() - return await db.sensor_readings.distinct("room", {"room": {"$ne": None}}) - except Exception as e: - logger.error(f"Error getting distinct rooms: {e}") - return [] - - async def get_distinct_sensor_ids_by_room(self, room: str) -> List[str]: - """Get distinct sensor IDs for a room""" - try: - db = await database_connection.get_database() - return await db.sensor_readings.distinct("sensor_id", {"room": room}) - except Exception as e: - logger.error(f"Error getting distinct sensor IDs for room {room}: {e}") - return [] - - async def delete_by_sensor_id(self, sensor_id: str) -> int: - """Delete all readings for a sensor""" - try: - db = await database_connection.get_database() - result = await db.sensor_readings.delete_many({"sensor_id": sensor_id}) - return result.deleted_count - except Exception as e: - logger.error(f"Error deleting readings for sensor {sensor_id}: {e}") - return 0 - - async def aggregate(self, pipeline: List[Dict]) -> List[Dict]: - """Execute aggregation pipeline""" - try: - db = await database_connection.get_database() - cursor = db.sensor_readings.aggregate(pipeline) - return await cursor.to_list(length=None) - except Exception as e: - logger.error(f"Error executing aggregation: {e}") - return [] - -class SensorMetadataRepository: - """Repository for sensor metadata operations""" - - async def create(self, metadata: SensorMetadata) -> bool: - """Create sensor metadata""" - try: - db = await database_connection.get_database() - await db.sensor_metadata.insert_one(metadata.dict()) - logger.info(f"Created metadata for sensor: {metadata.sensor_id}") - return True - except Exception as e: - logger.error(f"Error creating sensor metadata: {e}") - return False - - async def update(self, sensor_id: str, updates: Dict[str, Any]) -> bool: - """Update sensor metadata""" - try: - db = await database_connection.get_database() - updates["updated_at"] = datetime.utcnow() - - result = await db.sensor_metadata.update_one( - {"sensor_id": sensor_id}, - {"$set": updates} - ) - return result.modified_count > 0 - except Exception as e: - logger.error(f"Error updating sensor metadata: {e}") - return False - - async def get_by_sensor_id(self, sensor_id: str) -> Optional[Dict]: - """Get sensor metadata by ID""" - try: - db = await database_connection.get_database() - metadata = await db.sensor_metadata.find_one({"sensor_id": sensor_id}) - if metadata: - metadata["_id"] = str(metadata["_id"]) - return metadata - except Exception as e: - logger.error(f"Error getting sensor metadata: {e}") - return None - - async def get_all(self, filters: Dict[str, Any] = None) -> List[Dict]: - """Get all sensor metadata with optional filters""" - try: - db = await database_connection.get_database() - query = filters or {} - - cursor = db.sensor_metadata.find(query).sort("created_at", DESCENDING) - metadata_list = await cursor.to_list(length=None) - - # Convert ObjectId to string - for metadata in metadata_list: - metadata["_id"] = str(metadata["_id"]) - - return metadata_list - except Exception as e: - logger.error(f"Error getting sensor metadata: {e}") - return [] - - async def delete(self, sensor_id: str) -> bool: - """Delete sensor metadata""" - try: - db = await database_connection.get_database() - result = await db.sensor_metadata.delete_one({"sensor_id": sensor_id}) - return result.deleted_count > 0 - except Exception as e: - logger.error(f"Error deleting sensor metadata: {e}") - return False - -class RoomMetricsRepository: - """Repository for room metrics operations""" - - async def create(self, metrics: RoomMetrics) -> bool: - """Store room metrics""" - try: - db = await database_connection.get_database() - await db.room_metrics.insert_one(metrics.dict()) - logger.debug(f"Stored room metrics for {metrics.room}") - return True - except Exception as e: - logger.error(f"Error storing room metrics: {e}") - return False - - async def get_by_room(self, room: str, limit: int = 100) -> List[Dict]: - """Get room metrics by room name""" - try: - db = await database_connection.get_database() - cursor = db.room_metrics.find({"room": room}).sort("timestamp", DESCENDING).limit(limit) - metrics = await cursor.to_list(length=limit) - - # Convert ObjectId to string - for metric in metrics: - metric["_id"] = str(metric["_id"]) - - return metrics - except Exception as e: - logger.error(f"Error getting room metrics for {room}: {e}") - return [] - -class SystemEventRepository: - """Repository for system events operations""" - - async def create(self, event: SystemEvent) -> bool: - """Create system event""" - try: - db = await database_connection.get_database() - await db.system_events.insert_one(event.dict()) - logger.info(f"System event logged: {event.event_type} - {event.title}") - return True - except Exception as e: - logger.error(f"Error logging system event: {e}") - return False - - async def get_recent(self, hours: int = 24, limit: int = 50, - filters: Dict[str, Any] = None) -> List[Dict]: - """Get recent system events""" - try: - db = await database_connection.get_database() - start_time = datetime.utcnow() - timedelta(hours=hours) - - query = {"created_at": {"$gte": start_time}} - if filters: - query.update(filters) - - cursor = db.system_events.find(query).sort("timestamp", DESCENDING).limit(limit) - events = await cursor.to_list(length=limit) - - # Convert ObjectId to string - for event in events: - event["_id"] = str(event["_id"]) - - return events - except Exception as e: - logger.error(f"Error getting recent events: {e}") - return [] - -class RedisRepository: - """Repository for Redis cache operations""" - - async def set_sensor_data(self, sensor_id: str, data: Dict[str, Any], expire_seconds: int = 3600) -> bool: - """Store latest sensor data in Redis cache""" - try: - key = f"sensor:latest:{sensor_id}" - json_data = json.dumps(data) - await redis_connection.set_with_expiry(key, json_data, expire_seconds) - return True - except Exception as e: - logger.error(f"Error caching sensor data: {e}") - return False - - async def get_sensor_data(self, sensor_id: str) -> Optional[Dict[str, Any]]: - """Get latest sensor data from Redis cache""" - try: - key = f"sensor:latest:{sensor_id}" - data = await redis_connection.get(key) - if data: - return json.loads(data) - return None - except Exception as e: - logger.error(f"Error getting cached sensor data: {e}") - return None - - async def set_sensor_status(self, sensor_id: str, status_data: Dict[str, Any], expire_seconds: int = 1800) -> bool: - """Set sensor status in Redis""" - try: - key = f"sensor:status:{sensor_id}" - json_data = json.dumps(status_data) - await redis_connection.set_with_expiry(key, json_data, expire_seconds) - return True - except Exception as e: - logger.error(f"Error setting sensor status: {e}") - return False - - async def set_room_metrics(self, room: str, metrics: Dict[str, Any], expire_seconds: int = 1800) -> bool: - """Store room metrics in Redis cache""" - try: - key = f"room:metrics:{room}" - json_data = json.dumps(metrics) - await redis_connection.set_with_expiry(key, json_data, expire_seconds) - return True - except Exception as e: - logger.error(f"Error caching room metrics: {e}") - return False - - async def get_room_metrics(self, room: str) -> Optional[Dict[str, Any]]: - """Get room metrics from Redis cache""" - try: - key = f"room:metrics:{room}" - data = await redis_connection.get(key) - if data: - return json.loads(data) - return None - except Exception as e: - logger.error(f"Error getting cached room metrics: {e}") - return None - - async def get_active_sensors(self) -> List[str]: - """Get list of active sensors from Redis""" - try: - keys = await redis_connection.get_keys_by_pattern("sensor:latest:*") - return [key.replace("sensor:latest:", "") for key in keys] - except Exception as e: - logger.error(f"Error getting active sensors: {e}") - return [] - - async def delete_sensor_cache(self, sensor_id: str) -> bool: - """Delete all cached data for a sensor""" - try: - await redis_connection.delete(f"sensor:latest:{sensor_id}") - await redis_connection.delete(f"sensor:status:{sensor_id}") - return True - except Exception as e: - logger.error(f"Error deleting sensor cache: {e}") - return False \ No newline at end of file diff --git a/layers/presentation/__init__.py b/layers/presentation/__init__.py deleted file mode 100644 index e8a337c..0000000 --- a/layers/presentation/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Empty file to make this a Python package \ No newline at end of file diff --git a/layers/presentation/__pycache__/__init__.cpython-39.pyc b/layers/presentation/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index ce43cbddc694938dac907434bd390ee27b493bc1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 178 zcmYe~<>g`kf}EP28CpR4F^Gc<7=auIATDMB5-AM944RC7D;bJF!U*D5xPC@{ZmND! zNMv>NwI!$qDgW=v3@~OepYHSke`xRoRO5DSd^lll$e~InwO%VlUSKr s1X7V&4AhZWl9`{UA0MBYmst`YuUAlci^C>2KczG$)edCgXCP((070)W6#xJL diff --git a/layers/presentation/__pycache__/api_routes.cpython-39.pyc b/layers/presentation/__pycache__/api_routes.cpython-39.pyc deleted file mode 100644 index 373d25c25731a9b26f36fd79caab7f5fd5a93ab1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 10672 zcmc&)OKcoRdhYl1JUARtBt^YjvTSoCizD0eOO~~gE!mRSrfpG{*RytJn^P^Ssd>=V zJ+h|V)CQJ-1OWs%4+KG038+JW=oBD_0J#O&9COa0uR(z5l1pBIEZh11s-ETyHS+Fl zNpy90)!+5_|F8a6jXN<>Q1I7V|HrGrYl`x3^fLTsM3Yx=$W*^(1jp}4$9Y~)JvZvA~l^q%y-}L4&_7O2Ql}EfI z?CHBEdxkwL`hHIIJL_z zd%@erUc94wgY{O}ajf?x_OhTlCPsaQy_$~t8apwxA`L5e9n>e;8-n_IL4At-B&9yh z&Wxdclf5OVUl7!1*|{I>`)&4)png$MpJx}c^}m~P*#Enh_CTX~ zs>wu3;qmK!(?dn^Vz=8~%3dpHI&P~U`psaoZab#rF1vx(Xm&dtuSv_omlf_YKNzZO z5-$dSG#)A9U*ihuD5B|5*--B(7ob?rE9*+Ee4;EVh7xN1ORBQ2s!B8;>Kg{rV)a+b zUupMH6PiqCP+)houhmbKPx0ltdQa-q_pCNjlqyfeiH#!tGluSV3;1~UCv>46bM>l%ZcjUSGa@sho9E2cH3T!vo#jf z&UTx63z6HNbd zxMxuML%c+9`pUWyD|{+eVuPtCm00CYtgh0VhBu8b#F}{1Pb#6hp~c2M6~Hq9JTo>^ z;OYIZ0X$0p&)6`T8Cx0fEM}3d&VXmETQ@BLPuvCY3;-`5oghZFu%Dgo6~Qp>t>DeW z-Wc%C`O*)sI^A@)J68nvpgZ`}SPFlJif2(QMmx?CJ{-V^RLkJ=6V8MG1|%QOT`pZB>)^jc(++%E%_Zh_x7G8Ka>`30!TwGr36qdxBb6iHO~dV>f^Z&iNblbS5Oe@xA(TK+ex)!g)&U0q2YN^&FvxyVJ*3~%g&x8^(?j{_ z7%f819suw=(o&FqahG8o9M=ifyrzG}Z>CFKeDG%^+{%*J15T$K!n*W2jFViWnQ@44 z9%?5XK96=@MFI751h-^rQxuJqe?q)lM1J4k$LLd%F<+o!oLmc=o&5wf!88g**VGCG zThc78B=p9t%3nr(v~!$5p{qhS+BGEAtJoMY#7#h0w9Ud!@z5bNhh5)ZbLKzp_&2K2 z^f?&at+fxB80mfYhTrbBVUwF(&H_^Q+zWfW6HQ*+tO~CAYgJP$G%28O9yWwYwV^TP zliBjyewbL@D^~(9Y+AIw^luXP3|j_SgMR?FlzUJW3<&ep^<1n5yP#y=g&MMOB)k27|o>!GPe@j^}#Dxtr&I#1!fZAeQShYZF zvwW;WnHBDsP+Qbel5>cyIfWIcmGy}*7nkBRm} zuw53|E^HK8IVNmR#^ns#B?=VSRHo{3>*bqeU|ZY;wsTN*+oLLpa*GUwv-KFL1>+wY zhgL!lC5%Npg;aPiUp(`|!XBe@QXyhHeYC zyw?f&SO^7?+&G~qnYhBc?F`My zZYTxr1|jb?0b6f@4;Jow0Y6P2l*NRw%7$FD@GAyye1gU!^SxzXOI1E8H}{xTOQl|s zn?ZCemMGUv632hK|dA``ft$ldFn!ZPJWei z3!&!W7D{_9__jkbyY`hBz2mmMb*bFvXwg);RV`qPVznvJP<4Cp}(deBPspb0&wLl5TSTx^kUjjhbByA5&Lk6Cj&(|E$QWBE+u zY3ue)8$uO#Ayf?-uMj;;>-!BFkCw@Qj+rm=cTn>8Xc2oxm4;x^kE@Ivf1keo8Hz;6 zRj`k~87tR3|BT4D>~mq$FH(ar?3d|jjCJRqqv20+1>^`Sd2CFW^>oucD&;D8C;4Nu z5oJ^q(yj@m5p6%iuDhKkyc@9~QCZdaQy@&rg9y3NMtszQ`&N>gX3gbpI~eiciufSg zo^NC0expQ>QrT;;t9q=gs{9aH+*Q?AV_jHQWmV&^iaJfyk!@90b$%I1H^hmz#oJls zmi)pJtwyj68k7H}3>sDq9*5RO4t4PDU}JNF%H)5N)|#T$My-KS^RU_lR%{^wxQ?V? zY{F_=v9(c%bz!wjz$6g3T7>;oR!g`haKXy1PQnh8^(G2CMXd(ZmVw%QSRtPvh8Msl zGi$x=8?}EL*hP{NvGCN!c2d0MLLD(YlA;Clakl&G3gyNGCl}a8ssnsQ1!4!%W@cZ+^I#lVZ z^2gd22#G|1P*wS1v{I0}a4gYa>>Rwx;sBnK!bwt<&2D?`)IZ_{MdX z&I^)KYQ!ii9d;uKvwV??52!dz1sOU15f$gCc$-fXrW-oKd`(_+FuDfZcf@44hD|UTX8ezs zVC|6&eSieP3VMvDYVIICR2!rNW=Kh9)lORdV2Ucoadb{J?~oecPhTUL-R*^ z4;&OZ7DujSG1@ySsF9V95fWd;;*)J87D?KW9gPzSZ-I7;R^uJikt%y!0@M;!h9n?# zOe7W{oC8vYF82dU6bvBoJLT=u#9Nx$(dGWpCr)uO+CR$UEtF5>@^<1f5hMBHTkxM*?yjC{O{4$k(3;3T zKx>*C7PI4grZsacPo7TZUs&t;n|b&b;x7CP3;snZdP+dUYl6q=iAu^{aaiKEJRS!8 z3#@~4D#%1b-OntE!47N%}a> zY-yeYbeV``77|7nIni5QXB^O~g`p`6Z7U48u+20tPCZFW9MaPmw_1)PCp1a$oChJi z>ns;)Q`&6cX@6JBX|TZBceW)mqc=bs?H!v)Lzw82{d_q4@fz+hOfp$GsE7M~3|}Yq zprT1m+Tf4EtqfiwRr;JXsgb8XgFC)i7J0HK8918B}`9e2~*_803%A6 z=3*m*kw(d@kyNR1(-09e?gBPL_TEt4lDz>WT2q|VfWPzwc-Bt%F9 zsmcVn&Pw=of#~l?65!?NNV+QR{RuZR`9Q;4LWjv^yf6QXSh8C%kI)ttOrzmoV;DtN%;XohPyH z!h_6_&#EQe_(9XO?=3USloK7`8a{g|k6o ztimgZcBb_5K*5=(%IBPBaJ>rea5D`cA{a{&jTjlq7-m}FIHnsVOdViJ+>yR!+FgXz zFg+SSO=bq+G$9HBF@@Afe8;CEa={ZM_5 z{{nMB(7dTFVcI zg7J^gWsr=YK$NW~=+9ClB8Fmo)5XG+r%j*+Kjk5eeIW7EkrT2{q*+hC*_R z#*E}o;p zrh?M1l9!Z@6G@OHN5578ZDNTeULX$}CaAqk#UvFK6!jg;y}<9l7%#}(FbHiqrH1M= zS*!5#)1)=uPHnrgwk+DoJ|^o~Ke*+Tgdnix7bxOK7xn#H2)F(wh0mh9gwova$sb|Z zjRs|~8x3&^j>B#7qa9%eMD|rgf#T;7{P*co9U|v7Jc*;uM8~g!#1RCqQyt|;69Y$A zp-7O3AY8q9Bt3CRE*C>X` diff --git a/layers/presentation/__pycache__/redis_subscriber.cpython-39.pyc b/layers/presentation/__pycache__/redis_subscriber.cpython-39.pyc deleted file mode 100644 index 9a6d5a3f503c44a22333aa8ae017ba428e82dc1d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 4387 zcmb7H&2JmW6`%d&H$}vVN)xB5hB2LxigX}FL&wL zr4ucvTGA& z$nM*!ZwHl8wO>WQ!mB}TRPWa{CiGcDG>^1?>pjM6y#9>w`cu8%7VW33zryt$)@`Iu z>h}d7#!fsL#J(I3gmiWz=}6%PH*3ck#pt8o>Z3 z1C(0d-~jX)>zn+nu(-wT7dnCW%<9`1)m~`4&Ku9ner5h|leaLgs&N~uS9k~Gny9PQ zt9%V30Io)7_&P={ZUN+T*^0ttXAW_C>m#+>60idNvBH#4Q=l&c=F@@85A_MTZ9K7q zCSgz6l(|}>t6zg_7@3^rEY7e6y?3u`OM)V6!+OBgjdNh7MUzblY|E*v4qM1c7{XB5 z>hj*)#ETz&tz~8`g59r~T!HK>ck^L15}T5bdA#{f4g)( zZj?*2h6sh+J8%ifURTd5K72G3L1s&lOl0U2faQn!gu`F)#COq5**=r&i8h0$9BBwE z1PeC+CrgZ(Ib}0ziuogg*~Y30H>cX4*jIYFwri!=cM?w~MMNxv=nO-LAlQ@91g0iP z)9yYz!7x1*3sJJ;a5(B zcS81dZeMpX0@rws~6ePl5rmQV4V8FzchAe z!E`t9h6b-qtul6!=B&l*%Mzc$u!q){*5h9OdyCy-k9#ol&q&jA?Vw%gx}R{kQL?`?d?zY$p0Ao_E(_Gi^IJzlA?6d zfpAC2|46WLR-v*Wnk1P?+?qPd-wCINTCC~=B3zt{#99LopV za?&fgRzHZS=x|U`U_mK~0s$n~S&A|PugL0qB60m-i1d@S3WCc8vs$4ZXeefE(9+^W znSnvpaf=#fK3#kF3tuQs$Sk3vT7|k52SSXq#w48U%q-fcK!%-@B{FJW(&XE*i+xne zcWUAUDaeaB!c9E!Z8S`;>$cw3+D1oP110GCIz|`pbo3i~N4sVaL9F4q_MftszIl== zmaIpnfLWWx`&C-ZDR(&!R`6yjQWblAgHHM)=Z{5p0|S6T7SVI4Pj&em<)|K*heoyb00F-MUX@}O-lAk4&SrxLiS$%3z=2UTZ*<@gn#=$hFYP_q-@$a0pTgT%xtg97v zA$=PRuyXKE#1V?%l;Xd`D^t7Vk|O@?FKre7_$BU8Wp6dT{Bd5jdIuCa(a?Vugs)sDZRe%=&>hb;S{oY{frfk_#%;>s%O(5=hf`VYTDWL zhDaJWocPgjJf>~Ti@q|j7NvI)V8v??0jnwkGK-7BWKY&;Yv)QYE~>m@ac1tvQJ7f) zO7K`-!%CI6BqeM~7Vn>#j1kVlRfXV4By#A-*~$kyzwNyv{D_M@ubrdyFQI`$vgi%8NRbmQ?@?9JR5f#YZTq3{ zA34L_lAWm1LSg1lqy)(Xi_)xXEL8?3k5GHQF7cm5VLfLJS=QJi+Kwt~Rn=7gZ~DWN zk^EY@Q)i?4UWM*k#o6d&@&dJVs`vueLY?bW!XBZVePQ5cV5V2z!{uzTD+kqCGKmow zpyiRe=4F-gHc%-mq5QmAgQ{0Im&h)WuRjs);!d`>(8XPAfsp(K_LsZSy5j8ly$@Hw zNtnDIJ6gHeX7SR5upNj2Y z@x%*?e=4raBj0u9Dr7PvPNe)Edh#+gMBU19Gjk9{fxJU&Og{7z`D^-X;o_=C^_e7l z(@0f)Rg}tAd#&DXbbipe)UK$1k>KvpN|kjgRLX-N`ftK65|?N>@o|k28%3I#BPb~q z_RkL{$eRecjT5&$a*_Vo`M=B8zWgS*bfeBM^|FX}fcT-hPDl!$JonNe%FFx0G_N~v bR=txW`%7Zbn24BLSlAqLCkj3n9= zDbI{-i;(h=H4r})=yP2_`{+MVpg`aHFZkN0{DnNEJ!eQ=>{U#FqnY7czw@2%90glj zbpp@+^k4n!+l2fV8>^26jqC8LJ5X@aA)Im+(+-7SD`o?$V^Qq0V|!5PR0dARF>MxC zJ5^DgxuSMVJN2&!uW;u%;m#53Y>15`(%IzfE@@V?LH)isyqkuPg!aNTNkpilH1YaD z!eb%px1~@b(Shl_6^w=Su6TnW33@_#>t-qsW_nTt@f95n1gtq2jFKocE7X64Ymj!6 z=Ju^f=_bo-i#75yi|UO?l}8D2{OV&v<2t~>pAJzqQY%ndC9oL ztIzF@vuL}#2JNbFO;7!$#W(mS^wfk4GqzyHHa`XJx~Q0*28=k(cc5oOZ1OWNDc|Mq zKx?aPo#pRBYn$5uG#m}Uj;f`CtKm?! zhBAE!rdBN;sQzIZNZvXOfCLF|#b{+5v9x^asW?=IBYsJYz2P`_eLqSf?fa+jn0Q`7 zv-58Pu{R&pMsqI~${i{6NG73~hjZbpw0Qe4jsdaeV9C{~$w-QCJA+!2HH ziCBw8_VE`541D2#YmjI+D{PzuDRfO88bhZYp`2ju{fHH7&yRI=R(n*lJ#bWWQ4Bss8&rsF%5OhGNPOBw-U z6KhVI37OdwL!bNP8=%nC`p!z~#qU1ZpOfm!T`;&;p2yRvr-mYox>2|m%y#B|DP<}> zkWUW&?G3@H(gZkFt^k~>2Yw=9ywJ)C`XF+@oqIpH&ba@so)wByry$f{%L{7z< z5Wizqns4Hln|uH>jDQfv=q}x+4Vqm#f%w{~O(xBL2J=-KqHPiLBV7C6P`scIVH0${ z?AZ_G2ij#uj9u)X(PMH0XgFn>J+vka#W|vXEv+-PdSq$?Rjng>gFN{4w`^LOI^fif zhDarEPV89~SD94K6Ljy#&;#SPr_eidCsoro1PfmU3p?X~f`whM@I_sl)wwfq=N2yQ z>VD@MyTb3nuGQ?*-_K=uZW=Uh5QFy@@lf!&4zyoWX#@qG7#XjS*QnAD8dC~~0}8$% z$Lw$HF~kRBD^zV?1zq7AxJv_pCbGT#yuK82&<}&V5fS4@Mmp-(Vu+X!O@{|pa6e?e z!`T0_y%XkKgq6K(aT*4(I)KW2a(=qdU0=-UrtW3C1y!TOTg(M|ZWMlQhbf4CHBjRu zjMCic1-cK4R}%?@6d7od*CQ1rN(V_O)nzg%C4} zwd(A2gD4h!HJs%%!VrPq<*#tL3s@LDJggZ|YK!Aufc+Y@?)V^AF>`W6f< zk{6bJa^m3YX1pc>$PwJZM4&EIFzgm;2-;=>bfJL{4yG)-2+@g|goc@fhR#3}QN`|) z$LD}4_V`}G9bK6@+`=#4z?Xe^G1&V=S0|9}03LSs$J-cIR&)w6KKP+| zGogTW&Xnt{z^)2^e$zI%pbxiM9eEp;P#6IVueO0+3|`Ecc7SYa?6q!{<|#~7lVBix zKd<}#AmyVN>zls+cof9tm#XjcH1vJB0sBd`tK7u`_s=b*rM!sE%UB@$b1O=;L@}28 zSS&BJV4TEc7X3&eF None: - """Start Redis subscription in background task""" - if self.is_running: - logger.warning("Redis subscriber is already running") - return - - self.is_running = True - self.subscription_task = asyncio.create_task(self._subscribe_loop(channel)) - logger.info(f"Started Redis subscriber for channel: {channel}") - - async def stop_subscription(self) -> None: - """Stop Redis subscription""" - self.is_running = False - if self.subscription_task: - self.subscription_task.cancel() - try: - await self.subscription_task - except asyncio.CancelledError: - pass - logger.info("Redis subscriber stopped") - - async def _subscribe_loop(self, channel: str) -> None: - """Main subscription loop""" - logger.info("Starting Redis subscriber...") - - try: - # Get Redis client and create pubsub - redis_client = await redis_connection.get_client() - pubsub = await redis_connection.create_pubsub() - - # Subscribe to channel - await pubsub.subscribe(channel) - logger.info(f"Subscribed to Redis channel: '{channel}'") - - while self.is_running: - try: - # Get message with timeout - message = await pubsub.get_message(ignore_subscribe_messages=True, timeout=1.0) - - if message and message.get('data'): - await self._process_message(message['data']) - - except Exception as e: - logger.error(f"Error in Redis subscriber loop: {e}") - # Add delay to prevent rapid-fire errors - await asyncio.sleep(5) - - except Exception as e: - logger.error(f"Could not connect to Redis for subscription: {e}") - finally: - # Clean up pubsub connection - try: - await pubsub.unsubscribe(channel) - await pubsub.close() - except Exception as e: - logger.error(f"Error closing pubsub connection: {e}") - - async def _process_message(self, message_data: str) -> None: - """Process incoming Redis message""" - try: - logger.debug(f"Received from Redis: {message_data}") - - # Process sensor data through business layer - processing_success = await self.sensor_service.process_sensor_message(message_data) - - if processing_success: - # Extract room from message for room metrics update - import json - try: - data = json.loads(message_data) - room = data.get('room') - if room: - # Update room metrics asynchronously - asyncio.create_task(self.room_service.update_room_metrics(room)) - except json.JSONDecodeError: - logger.warning("Could not parse message for room extraction") - - # Broadcast to WebSocket clients - await websocket_manager.broadcast(message_data) - else: - logger.warning("Sensor data processing failed, skipping broadcast") - - except Exception as e: - logger.error(f"Error processing Redis message: {e}") - - def is_subscriber_running(self) -> bool: - """Check if subscriber is currently running""" - return self.is_running and ( - self.subscription_task is not None and - not self.subscription_task.done() - ) - - async def get_subscriber_status(self) -> dict: - """Get subscriber status information""" - return { - "is_running": self.is_running, - "task_status": ( - "running" if self.subscription_task and not self.subscription_task.done() - else "stopped" - ), - "active_websocket_connections": websocket_manager.get_connection_count() - } - -# Global Redis subscriber instance -redis_subscriber = RedisSubscriber() \ No newline at end of file diff --git a/layers/presentation/websocket_handler.py b/layers/presentation/websocket_handler.py deleted file mode 100644 index cb565ad..0000000 --- a/layers/presentation/websocket_handler.py +++ /dev/null @@ -1,97 +0,0 @@ -""" -WebSocket connection handler -Presentation Layer - manages WebSocket connections and real-time communication -""" -import asyncio -from typing import List -from fastapi import WebSocket, WebSocketDisconnect -import logging - -logger = logging.getLogger(__name__) - -class WebSocketManager: - """Manages WebSocket connections and broadcasting""" - - def __init__(self): - self.active_connections: List[WebSocket] = [] - - async def connect(self, websocket: WebSocket) -> None: - """Accept and store new WebSocket connection""" - await websocket.accept() - self.active_connections.append(websocket) - logger.info(f"New client connected. Total clients: {len(self.active_connections)}") - - def disconnect(self, websocket: WebSocket) -> None: - """Remove WebSocket connection""" - if websocket in self.active_connections: - self.active_connections.remove(websocket) - logger.info(f"Client disconnected. Total clients: {len(self.active_connections)}") - - async def send_personal_message(self, message: str, websocket: WebSocket) -> None: - """Send message to specific WebSocket connection""" - try: - await websocket.send_text(message) - except Exception as e: - logger.error(f"Error sending personal message: {e}") - self.disconnect(websocket) - - async def broadcast(self, message: str) -> None: - """Broadcast message to all connected clients""" - if not self.active_connections: - return - - try: - # Send to all connections concurrently - tasks = [ - self._safe_send_message(connection, message) - for connection in self.active_connections.copy() - ] - - # Execute all sends concurrently and handle exceptions - results = await asyncio.gather(*tasks, return_exceptions=True) - - # Remove failed connections - failed_connections = [] - for i, result in enumerate(results): - if isinstance(result, Exception): - failed_connections.append(self.active_connections[i]) - - for connection in failed_connections: - self.disconnect(connection) - - except Exception as e: - logger.error(f"Error in broadcast: {e}") - - async def _safe_send_message(self, websocket: WebSocket, message: str) -> None: - """Safely send message to WebSocket with error handling""" - try: - await websocket.send_text(message) - except WebSocketDisconnect: - # Connection was closed - raise - except Exception as e: - logger.error(f"Error sending message to client: {e}") - raise - - def get_connection_count(self) -> int: - """Get number of active connections""" - return len(self.active_connections) - - async def ping_all_connections(self) -> int: - """Ping all connections to check health, return number of healthy connections""" - if not self.active_connections: - return 0 - - healthy_connections = [] - for connection in self.active_connections.copy(): - try: - await connection.ping() - healthy_connections.append(connection) - except Exception: - logger.debug("Removing unhealthy connection") - - self.active_connections = healthy_connections - return len(healthy_connections) - -# Global WebSocket manager instance -websocket_manager = WebSocketManager() \ No newline at end of file diff --git a/main.py b/main.py deleted file mode 100644 index e0e6580..0000000 --- a/main.py +++ /dev/null @@ -1,202 +0,0 @@ - -import asyncio -import json -import redis.asyncio as redis -import time -import os -from fastapi import FastAPI, WebSocket, WebSocketDisconnect, HTTPException, Depends, Query -from fastapi.middleware.cors import CORSMiddleware -from typing import List, Optional -import logging -from contextlib import asynccontextmanager - -# Import our custom modules -from database import connect_to_mongo, close_mongo_connection, redis_manager, schedule_cleanup -from persistence import persistence_service -from models import DataQuery, DataResponse, HealthCheck -from api import router as api_router - -# Configure logging -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) - -# Application startup time for uptime calculation -app_start_time = time.time() - -@asynccontextmanager -async def lifespan(app: FastAPI): - """Application lifespan manager""" - # Startup - logger.info("Application starting up...") - - # Connect to databases - await connect_to_mongo() - await persistence_service.initialize() - - # Start background tasks - asyncio.create_task(redis_subscriber()) - asyncio.create_task(schedule_cleanup()) - - logger.info("Application startup complete") - - yield - - # Shutdown - logger.info("Application shutting down...") - await close_mongo_connection() - await redis_manager.disconnect() - logger.info("Application shutdown complete") - -app = FastAPI( - title="Energy Monitoring Dashboard API", - description="Real-time energy monitoring and IoT sensor data management system", - version="1.0.0", - lifespan=lifespan -) - -# Add CORS middleware -app.add_middleware( - CORSMiddleware, - allow_origins=["*"], # Configure appropriately for production - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], -) - -# Include API router -app.include_router(api_router, prefix="/api/v1") - -# In-memory store for active WebSocket connections -active_connections: List[WebSocket] = [] - -# Redis channel to subscribe to -REDIS_CHANNEL = "energy_data" - - -@app.websocket("/ws") -async def websocket_endpoint(websocket: WebSocket): - """ - WebSocket endpoint that connects a client, adds them to the active pool, - and removes them on disconnection. - """ - await websocket.accept() - active_connections.append(websocket) - logger.info(f"New client connected. Total clients: {len(active_connections)}") - try: - while True: - # Keep the connection alive - await websocket.receive_text() - except WebSocketDisconnect: - active_connections.remove(websocket) - logger.info(f"Client disconnected. Total clients: {len(active_connections)}") - - -async def redis_subscriber(): - """ - Connects to Redis, subscribes to the specified channel, and broadcasts - messages to all active WebSocket clients. Also persists data to MongoDB. - """ - logger.info("Starting Redis subscriber...") - try: - r = redis.Redis(host='localhost', port=6379, db=0, decode_responses=True) - await r.ping() - logger.info("Successfully connected to Redis for subscription.") - except Exception as e: - logger.error(f"Could not connect to Redis for subscription: {e}") - return - - pubsub = r.pubsub() - await pubsub.subscribe(REDIS_CHANNEL) - - logger.info(f"Subscribed to Redis channel: '{REDIS_CHANNEL}'") - while True: - try: - message = await pubsub.get_message(ignore_subscribe_messages=True, timeout=1.0) - if message: - message_data = message['data'] - logger.debug(f"Received from Redis: {message_data}") - - # Process and persist the data - await persistence_service.process_sensor_message(message_data) - - # Broadcast message to all connected WebSocket clients - if active_connections: - await asyncio.gather( - *[connection.send_text(message_data) for connection in active_connections], - return_exceptions=True - ) - - except Exception as e: - logger.error(f"Error in Redis subscriber loop: {e}") - # Add a delay to prevent rapid-fire errors - await asyncio.sleep(5) - - -@app.get("/") -async def read_root(): - """Root endpoint with basic system information""" - return { - "message": "Energy Monitoring Dashboard Backend", - "version": "1.0.0", - "status": "running", - "uptime_seconds": time.time() - app_start_time - } - - -@app.get("/health", response_model=HealthCheck) -async def health_check(): - """Health check endpoint""" - try: - # Check database connections - mongodb_connected = True - redis_connected = True - - try: - await persistence_service.db.command("ping") - except: - mongodb_connected = False - - try: - await redis_manager.redis_client.ping() - except: - redis_connected = False - - # Get system statistics - stats = await persistence_service.get_sensor_statistics() - - # Determine overall status - status = "healthy" - if not mongodb_connected or not redis_connected: - status = "degraded" - - return HealthCheck( - status=status, - mongodb_connected=mongodb_connected, - redis_connected=redis_connected, - total_sensors=stats.get("total_sensors", 0), - active_sensors=stats.get("active_sensors", 0), - total_readings=stats.get("total_readings", 0), - uptime_seconds=time.time() - app_start_time - ) - - except Exception as e: - logger.error(f"Health check failed: {e}") - raise HTTPException(status_code=503, detail="Service Unavailable") - - -@app.get("/status") -async def system_status(): - """Detailed system status endpoint""" - try: - stats = await persistence_service.get_sensor_statistics() - - return { - "timestamp": time.time(), - "uptime_seconds": time.time() - app_start_time, - "active_websocket_connections": len(active_connections), - "database_stats": stats - } - - except Exception as e: - logger.error(f"Status check failed: {e}") - raise HTTPException(status_code=500, detail="Internal Server Error") diff --git a/microservices_example.md b/microservices_example.md deleted file mode 100644 index 6e6c3c4..0000000 --- a/microservices_example.md +++ /dev/null @@ -1,84 +0,0 @@ -# Microservices Architecture Example - -## Service Decomposition - -### 1. Sensor Data Service -**Responsibility**: Sensor data ingestion, validation, and storage -``` -Port: 8001 -Database: sensor_db (MongoDB) -Endpoints: -- POST /sensors/data # Ingest sensor readings -- GET /sensors/{id}/data # Get sensor history -- GET /sensors # List sensors -``` - -### 2. Room Management Service -**Responsibility**: Room metrics, aggregations, and space management -``` -Port: 8002 -Database: room_db (MongoDB) -Endpoints: -- GET /rooms # List rooms -- GET /rooms/{id}/metrics # Current room metrics -- GET /rooms/{id}/history # Historical room data -``` - -### 3. Analytics Service -**Responsibility**: Data analysis, reporting, and insights -``` -Port: 8003 -Database: analytics_db (PostgreSQL/ClickHouse) -Endpoints: -- GET /analytics/summary # Dashboard summary -- GET /analytics/trends # Trend analysis -- GET /analytics/reports/{id} # Generated reports -``` - -### 4. Notification Service -**Responsibility**: Alerts, events, and real-time notifications -``` -Port: 8004 -Database: events_db (MongoDB) -Message Queue: RabbitMQ/Kafka -Endpoints: -- POST /notifications/send # Send notification -- GET /events # System events -- WebSocket: /ws/notifications # Real-time alerts -``` - -### 5. API Gateway -**Responsibility**: Request routing, authentication, rate limiting -``` -Port: 8000 -Routes all requests to appropriate services -Handles CORS, authentication, logging -``` - -## Inter-Service Communication - -### Synchronous (HTTP/REST) -```python -# Analytics Service calling Sensor Service -import httpx - -async def get_sensor_data(sensor_id: str): - async with httpx.AsyncClient() as client: - response = await client.get(f"http://sensor-service:8001/sensors/{sensor_id}/data") - return response.json() -``` - -### Asynchronous (Message Queue) -```python -# Sensor Service publishes event -await message_queue.publish("sensor.data.received", { - "sensor_id": "sensor_001", - "timestamp": datetime.utcnow(), - "data": sensor_reading -}) - -# Room Service subscribes to event -@message_queue.subscribe("sensor.data.received") -async def handle_sensor_data(message): - await room_service.update_room_metrics(message.data) -``` \ No newline at end of file diff --git a/test_structure.py b/test_structure.py deleted file mode 100644 index 27e4f76..0000000 --- a/test_structure.py +++ /dev/null @@ -1,221 +0,0 @@ -#!/usr/bin/env python3 -""" -Test script to validate the layered architecture structure -This script checks the structure without requiring all dependencies to be installed -""" -import os -import sys -from pathlib import Path - -def check_file_structure(): - """Check if all expected files exist in the layered structure""" - expected_structure = { - "layers/__init__.py": "Layers package init", - "layers/infrastructure/__init__.py": "Infrastructure layer init", - "layers/infrastructure/database_connection.py": "Database connection management", - "layers/infrastructure/redis_connection.py": "Redis connection management", - "layers/infrastructure/repositories.py": "Data access layer", - "layers/business/__init__.py": "Business layer init", - "layers/business/sensor_service.py": "Sensor business logic", - "layers/business/room_service.py": "Room business logic", - "layers/business/analytics_service.py": "Analytics business logic", - "layers/business/cleanup_service.py": "Cleanup business logic", - "layers/presentation/__init__.py": "Presentation layer init", - "layers/presentation/websocket_handler.py": "WebSocket management", - "layers/presentation/redis_subscriber.py": "Redis pub/sub handling", - "layers/presentation/api_routes.py": "API route definitions", - "main_layered.py": "Main application with layered architecture", - "models.py": "Data models (existing)", - } - - print("šŸ” Checking layered architecture file structure...") - print("=" * 60) - - all_files_exist = True - - for file_path, description in expected_structure.items(): - full_path = Path(file_path) - - if full_path.exists(): - size = full_path.stat().st_size - print(f"āœ… {file_path:<40} ({size:,} bytes) - {description}") - else: - print(f"āŒ {file_path:<40} MISSING - {description}") - all_files_exist = False - - print("=" * 60) - - if all_files_exist: - print("šŸŽ‰ All files in layered structure exist!") - return True - else: - print("āŒ Some files are missing from the layered structure") - return False - -def check_import_structure(): - """Check the logical structure of imports (without actually importing)""" - print("\nšŸ“‹ Analyzing import dependencies...") - print("=" * 60) - - # Define expected dependencies by layer - layer_dependencies = { - "Infrastructure Layer": { - "files": [ - "layers/infrastructure/database_connection.py", - "layers/infrastructure/redis_connection.py", - "layers/infrastructure/repositories.py" - ], - "can_import_from": ["models", "external libraries"], - "should_not_import_from": ["business", "presentation"] - }, - "Business Layer": { - "files": [ - "layers/business/sensor_service.py", - "layers/business/room_service.py", - "layers/business/analytics_service.py", - "layers/business/cleanup_service.py" - ], - "can_import_from": ["models", "infrastructure", "external libraries"], - "should_not_import_from": ["presentation"] - }, - "Presentation Layer": { - "files": [ - "layers/presentation/websocket_handler.py", - "layers/presentation/redis_subscriber.py", - "layers/presentation/api_routes.py" - ], - "can_import_from": ["models", "business", "infrastructure", "external libraries"], - "should_not_import_from": [] - } - } - - violations = [] - - for layer_name, layer_info in layer_dependencies.items(): - print(f"\n{layer_name}:") - - for file_path in layer_info["files"]: - if Path(file_path).exists(): - try: - with open(file_path, 'r') as f: - content = f.read() - - # Check for violations - for forbidden in layer_info["should_not_import_from"]: - if forbidden == "business" and "from ..business" in content: - violations.append(f"{file_path} imports from business layer (violation)") - elif forbidden == "presentation" and "from ..presentation" in content: - violations.append(f"{file_path} imports from presentation layer (violation)") - - print(f" āœ… {Path(file_path).name}") - - except Exception as e: - print(f" āš ļø {Path(file_path).name} - Could not analyze: {e}") - - if violations: - print(f"\nāŒ Found {len(violations)} layering violations:") - for violation in violations: - print(f" - {violation}") - return False - else: - print("\nāœ… No layering violations detected!") - return True - -def analyze_code_separation(): - """Analyze how well the code has been separated by responsibility""" - print("\nšŸ“Š Analyzing code separation...") - print("=" * 60) - - analysis = { - "Infrastructure Layer": { - "responsibilities": ["Database connections", "Redis connections", "Data repositories"], - "file_count": 0, - "total_lines": 0 - }, - "Business Layer": { - "responsibilities": ["Business logic", "Data processing", "Analytics", "Cleanup"], - "file_count": 0, - "total_lines": 0 - }, - "Presentation Layer": { - "responsibilities": ["HTTP endpoints", "WebSocket handling", "Request/Response"], - "file_count": 0, - "total_lines": 0 - } - } - - layer_paths = { - "Infrastructure Layer": "layers/infrastructure/", - "Business Layer": "layers/business/", - "Presentation Layer": "layers/presentation/" - } - - for layer_name, layer_path in layer_paths.items(): - layer_dir = Path(layer_path) - if layer_dir.exists(): - py_files = list(layer_dir.glob("*.py")) - py_files = [f for f in py_files if f.name != "__init__.py"] - - total_lines = 0 - for py_file in py_files: - try: - with open(py_file, 'r') as f: - lines = len(f.readlines()) - total_lines += lines - except: - pass - - analysis[layer_name]["file_count"] = len(py_files) - analysis[layer_name]["total_lines"] = total_lines - - for layer_name, info in analysis.items(): - print(f"\n{layer_name}:") - print(f" Files: {info['file_count']}") - print(f" Lines of Code: {info['total_lines']:,}") - print(f" Responsibilities: {', '.join(info['responsibilities'])}") - - total_files = sum(info["file_count"] for info in analysis.values()) - total_lines = sum(info["total_lines"] for info in analysis.values()) - - print(f"\nšŸ“ˆ Total Separation Metrics:") - print(f" Total Files: {total_files}") - print(f" Total Lines: {total_lines:,}") - print(f" Layers: 3 (Infrastructure, Business, Presentation)") - - return True - -def main(): - """Main test function""" - print("šŸ—ļø LAYERED ARCHITECTURE VALIDATION") - print("=" * 60) - - success = True - - # Check file structure - if not check_file_structure(): - success = False - - # Check import structure - if not check_import_structure(): - success = False - - # Analyze code separation - if not analyze_code_separation(): - success = False - - print("\n" + "=" * 60) - if success: - print("šŸŽ‰ VALIDATION SUCCESSFUL - Layered architecture is properly structured!") - print("\n✨ Key Benefits Achieved:") - print(" • Clear separation of concerns") - print(" • Infrastructure isolated from business logic") - print(" • Business logic separated from presentation") - print(" • Easy to test individual layers") - print(" • Maintainable and scalable structure") - else: - print("āŒ VALIDATION FAILED - Issues found in layered architecture") - - return success - -if __name__ == "__main__": - sys.exit(0 if main() else 1) \ No newline at end of file