From 6e3625a606e806d9cd386654237b065c32d06bb6 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Sat, 17 Aug 2024 11:02:53 +1000 Subject: [PATCH 01/38] Update Polish translation --- .../solcast_solar/translations/pl.json | 67 ++++++++++++++++--- 1 file changed, 57 insertions(+), 10 deletions(-) diff --git a/custom_components/solcast_solar/translations/pl.json b/custom_components/solcast_solar/translations/pl.json index 32aa4014..70df5acb 100644 --- a/custom_components/solcast_solar/translations/pl.json +++ b/custom_components/solcast_solar/translations/pl.json @@ -1,10 +1,13 @@ { "config": { + "abort": { + "single_instance_allowed": "Dozwolona tylko jedna instancja Solcast" + }, "step": { "user": { "data": { "api_key": "Klucz API (wielokrotne wartości oddzielane przecinkiem)", - "api_quota": "Limit interfejsu API (wielokrotne wartości oddzielane przecinkiem)" + "api_quota": "Limit API (opcjonalnie wielokrotne wartości oddzielane przecinkiem)" }, "description": "Dane konta Solcast API" } @@ -14,14 +17,14 @@ "step": { "init": { "data": { - "api_key": "Działanie" + "solcast_config_action": "Działanie" }, "description": "Opcje konfiguracji Solcast" }, "api": { "data": { "api_key": "Klucz API (wielokrotne wartości oddzielane przecinkiem)", - "api_quota": "Limit interfejsu API (wielokrotne wartości oddzielane przecinkiem)" + "api_quota": "Limit API (opcjonalnie wielokrotne wartości oddzielane przecinkiem)" }, "description": "Dane konta Solcast API" }, @@ -71,12 +74,26 @@ }, "description": "Wybierz atrybuty czujnika, które będą dostępne" } + }, + "error": { + "unknown": "Nieznany błąd", + "incorrect_options_action": "Wybrano nieprawidłowe działanie" + } + }, + "selector": { + "solcast_config_action": { + "options": { + "configure_api": "Konfiguruj klucz Solcast API", + "configure_dampening": "Konfiguruj tłumienie", + "configure_customsensor": "Konfiguruj niestandardowy czujnik godzin", + "configure_attributes": "Konfiguruj dostępne atrybuty" + } } }, "system_health": { "info": { "can_reach_server": "Połączenie z serwerem Solcast", - "used_requests": "Wykorzystane zapytania API", + "used_requests": "Wykorzystane żądania API", "rooftop_site_count": "Liczba połaci" } }, @@ -86,22 +103,46 @@ "description": "Pobierz najnowsze dane prognoz Solcast." }, "clear_all_solcast_data": { - "name": "Wyczyść dane Solcast", - "description": "Usunięte zostaną wszystkie przechowywane dane Solcast. Plik solcast.json zostanie usunięty." + "name": "Wyczyść wszystkie zapisane dane Solcast", + "description": "Usuwa plik solcast.json, aby usunąć wszystkie aktualne dane witryny Solcast." }, "query_forecast_data": { "name": "Pobierz dane prognoz", - "description": "Pobierz aktualne dane prognoz.", + "description": "Zwraca zestaw danych lub wartość dla podanego zapytania.", "fields": { "start_date_time": { "name": "Data i godzina rozpoczęcia", - "description": "Czas rozpoczęcia danych prognozowych." + "description": "Pobierz dane prognoz od określonej daty i godziny." }, "end_date_time": { "name": "Data i godzina zakończenia", - "description": "Czas zakończenia danych prognozowych." + "description": "Pobierz dane prognoz do określonej daty i godziny." } } + }, + "set_dampening": { + "name": "Ustaw tłumienie prognoz", + "description": "Ustaw godzinowy współczynnik tłumienia prognoz.", + "fields": { + "damp_factor": { + "name": "Ciąg tłumienia", + "description": "Ciąg wartości współczynnika tłumienia godzinowego, oddzielany przecinkiem." + } + } + }, + "set_hard_limit": { + "name": "Ustaw twardy limit prognoz inwertera", + "description": "Zabrania wartości prognoz przekraczających maksymalną moc inwertera.", + "fields": { + "hard_limit": { + "name": "Wartość limitu w watach", + "description": "Ustaw maksymalną wartość w watach, jaką może wyprodukować inwerter." + } + } + }, + "remove_hard_limit": { + "name": "Usuń twardy limit prognoz inwertera", + "description": "Usuń ustawiony limit." } }, "entity": { @@ -114,6 +155,7 @@ "forecast_this_hour": {"name": "Prognoza na bieżącą godzinę"}, "get_remaining_today": {"name": "Pozostała prognoza na dziś"}, "forecast_next_hour": {"name": "Prognoza na następną godzinę"}, + "forecast_custom_hours": {"name": "Prognoza na następne X godzin"}, "total_kwh_forecast_tomorrow": {"name": "Prognoza na jutro"}, "peak_w_tomorrow": {"name": "Szczytowa moc jutro"}, "peak_w_time_tomorrow": {"name": "Czas szczytowej mocy jutro"}, @@ -125,7 +167,12 @@ "total_kwh_forecast_d5": {"name": "Prognoza na dzień 5"}, "total_kwh_forecast_d6": {"name": "Prognoza na dzień 6"}, "total_kwh_forecast_d7": {"name": "Prognoza na dzień 7"}, - "power_now": {"name": "Aktualna moc"} + "power_now": {"name": "Aktualna moc"}, + "weather_description": {"name": "Pogoda"}, + "hard_limit": {"name": "Ustawiony twardy limit"} + }, + "select": { + "estimate_mode" : {"name": "Użyj pola prognozy"} } } } \ No newline at end of file From 1469688ff4f357097b0ca477fa324f1f8ad45c38 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Sat, 17 Aug 2024 12:59:32 +1000 Subject: [PATCH 02/38] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 1c041a1a..8f067c39 100644 --- a/README.md +++ b/README.md @@ -243,7 +243,7 @@ mode: single > > Log capture instructions are in the Bug Issue Template - you will see them if you start creating a new issue - make sure you include these logs if you want the assistance of the repository constributors. > -> An example of busy messages and a successful retry are shown below (with debug logging enabled). In this case there is no issue, as the retry succeeds. Should five consecutive attempts fail, then the forecast retrieval will end with an `ERROR`. If that happens, manually trigger another `solcast_solar.update_forecasts` service call, or wait for your next scheduled automation run. +> An example of busy messages and a successful retry are shown below (with debug logging enabled). In this case there is no issue, as the retry succeeds. Should ten consecutive attempts fail, then the forecast retrieval will end with an `ERROR`. If that happens, manually trigger another `solcast_solar.update_forecasts` service call, or wait for your next scheduled automation run. > > Should the load of sites data on integration startup be the call that has failed with 429/Too busy, then the integration cannot start correctly, and it will retry continuously. From 66331bb39ba80f4003ab47cabc0d3f427adf59e6 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Sat, 17 Aug 2024 14:14:51 +1000 Subject: [PATCH 03/38] Update README.md --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 8f067c39..d8b4bf7a 100644 --- a/README.md +++ b/README.md @@ -585,9 +585,9 @@ v4.1 * @autoSteve is welcomed as a CodeOwner * It is now apparent that it is unlikely that this repo will be added as a default repo in HACS until HACS 2.0 is out, so the installation instructions make it clear that adding via the Manual Repository flow is the preferred approach, and new instructions have been added to show how to do this. -Release Changelog: https://github.com/BJReplay/ha-solcast-solar/compare/v4.0.31...v4.1 +Release Changelog: https://github.com/BJReplay/ha-solcast-solar/compare/v4.0.31...v4.1.0 -Most Recent Changes: https://github.com/BJReplay/ha-solcast-solar/compare/v4.0.43...v4.1 +Most Recent Changes: https://github.com/BJReplay/ha-solcast-solar/compare/v4.0.43...v4.1.0 ### Prior changes
Click here for changes back to v3.0 From 4282fe3085aa620df92b159ae9236715e9e0809c Mon Sep 17 00:00:00 2001 From: BJReplay <37993507+BJReplay@users.noreply.github.com> Date: Mon, 19 Aug 2024 10:31:25 +1000 Subject: [PATCH 04/38] Update hacs.json Fixes #140 Signed-off-by: BJReplay <37993507+BJReplay@users.noreply.github.com> --- hacs.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hacs.json b/hacs.json index de4f6c76..e4b5991c 100644 --- a/hacs.json +++ b/hacs.json @@ -1,5 +1,5 @@ { - "name": "Solcast PV Solar", + "name": "Solcast PV Forecast", "render_readme": true, "homeassistant": "2023.7", "zip_release": true, From bfffe5b0bbebffacadbf033bb9df888dd0c98744 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Mon, 19 Aug 2024 19:11:37 +1000 Subject: [PATCH 05/38] Reduce aiofiles version requirement to >=23.2.0 --- custom_components/solcast_solar/manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/custom_components/solcast_solar/manifest.json b/custom_components/solcast_solar/manifest.json index 03de771b..21b325be 100644 --- a/custom_components/solcast_solar/manifest.json +++ b/custom_components/solcast_solar/manifest.json @@ -9,6 +9,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "issue_tracker": "https://github.com/BJReplay/ha-solcast-solar/issues", - "requirements": ["aiohttp>=3.8.5", "aiofiles>=24.1.0", "datetime>=4.3", "isodate>=0.6.1"], + "requirements": ["aiohttp>=3.8.5", "aiofiles>=23.2.0", "datetime>=4.3", "isodate>=0.6.1"], "version": "4.1.3" } From 2fc738133293bba3c46fcc983cd359dcfd04eab3 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Tue, 20 Aug 2024 15:33:54 +1000 Subject: [PATCH 06/38] Configuration dialog improvements --- .github/SCREENSHOTS/damp.png | Bin 58112 -> 58771 bytes README.md | 4 ++-- custom_components/solcast_solar/strings.json | 16 ++++++++-------- .../solcast_solar/translations/de.json | 17 +++++++++++++++-- .../solcast_solar/translations/en.json | 16 ++++++++-------- .../solcast_solar/translations/fr.json | 10 ++++++---- .../solcast_solar/translations/pl.json | 6 +++--- .../solcast_solar/translations/sk.json | 6 +++--- .../solcast_solar/translations/ur.json | 10 ++++++---- 9 files changed, 51 insertions(+), 34 deletions(-) diff --git a/.github/SCREENSHOTS/damp.png b/.github/SCREENSHOTS/damp.png index 3a27fc2ac0e57e248f78a27781d7df7ba1ef5c32..745ea3bc80978d0ea37f2f765877b8958787f5ca 100644 GIT binary patch literal 58771 zcmeFZXH=707c~lKK!ONT6$ET_1O);LQbYl1(n6Ik1PBDAOAX)=6aGajTmnKedZNFN!5^Kf>Fv#0n&J2Q%D?A&gQEuixzg{W63vhfnh(BUU{(kb=Oqgf z$2p_na$DQsQY-V}GL+SDiRBYI9n@U$GoKe^^cV;k zUDWRSY>eyrO!OUxnW&*+^O$G>bIe5nMAUq5MvP@|dbFS4@I|d0IT9YIOr_HzqSAR8 zS?M@$BI2^5H?8n;?ApIARS_o~dp3}xk)yfwrP`+7KP|y)uSGGMzwuz_`x``0s+>?J z>EnD(MJs#4K%!7)F}J9+2;BgJG)_I^sq_DIOyW19%6A0=Yi)eC+j#^tRjaiE6JGmy z6GbGNJ`!QY7ZJ=hl!pV)TPumS`@#uP}2+>-gw!`JrJ}=+*lto;~h^ zx6Fn8xs*574)04YA-Ry+f;|J?gs0l66nNgz(XaXZb761EF;WZMd#8S#rj7aDCXb!3 z{?32BM_z-&jJh3EH6No&+vgyCUyP1VT>Y|hNqWye$n$sL&ZN zhOPHGwY<~aC-Q77@ZWn75OotV(CK*UeR+NH37cXRWsT9rS@`t3)#t?I{P+FNUeV_K z{>rTP#4WC0N%SwxS<2`4fT>AaX^S{5nD*;h-H0rNJ17)RjFz_ygks)mW^Uoj+~HhmpOV>;`9-Z<;Ua8X z&rdejjLKrXx`IhASHH>`u{8MJY`E=M=E*#p9l7ql`Ro_?7d*btlN!})eji=?(KNHafHMUzA$;C62z4-T=OZHN$D88L?(4`{P z-Z;H)U#er}mhtk^i4IhmAPOL-xpw^>I0FAfQb?NLp11*+6OF_H=R+Q{%3j1j+&EE} zM_2i*PyI8|!1qNyki6w<@7Z%;=x~j%S%Y^lslL-2zRV68b;^aDSVR4W0X+apc6SqR6AFQr}bMV%z)vNt1~A%#V}LGw}jpBUJz= zR1N~+0n`45=?Kq%rp*~sQW^&J&*!u&T~<4&my0uZ|6NVN68Hk@xH3A#QT8&%goLN< zM!DW7s_4?m=3l1dRC#E9p4-TBqPAwfJ4JTtT=a>#55CD~S{3W03)db6&=%4N9B9YJ z6`wtM0bGXU=!I*V^*uRS1$*$R%>Oi=hsf^9>uN%UNM3zK|8q>OA#y?iCp%}8Gx5Ux z_cFC%!bVA}^5Rl6C&rPt6+0^NdwGwUzR&|7Q&Pgq&V$ymaSOM81AiM2Umu$yr8t+MaYO z@D4cB&TV;i_|E@vrXJC*NpQ1wnocod!?1<5?e2;7nvz3D@=%+`{XrTy;7L=k>f%kN z|Frs{;$^L+^A`($6gGc87@|Dcc9IUc-IL1{h>6SPU_&{d*+kv zxlH-^cUw!WhxoV7X;c2^i*RWE`Ou;}`TzAyL0jfK+*`pq|2?&t{Vn&_Wv-TtCXbU{ zmx&=DROp)X*h_iCrTCz0s z+}pUth$?y40>kO{br=wd#*2GaUc6=U*F`b6k|AVHUVFr28%z73p11Y9L#I#N?mc8d z$VjKq$j9~DEB5vq<)bF!0Bgk>1@hH?7{a42g#;^3eUG1wGhkCI^WOmYAqFhM?Mo%S zk4hb1ULyN_Eu}}3>zxwiu3igS8T_s;y%C09uA zwsgIAHG2a9Ot@x`xVwdGTMAdioR(q#TGbXTUS%D*2*m~ zvHSO945wD{POkq9VYl1Pos{PDjr?H36Di~K^^?CpB4=f<&||HI9WhvF{H{TMwj)jn zfD@_rh|YM_If;X(4g>lwFFFclJ}dB6e9ot1{F?7zT3EtBwsM{%^UZG2qq)2~tv{W|8+2rMRA;^^laosq1W zNHDW5P7kqeetH%9NM!zcFIXDgV;@_;hhQ7NCBFZ&`jC=xbbVNQf2T?zp?&o@*q1&wu5?CA-h(gLX7}lRLKDP(>|lGlied2}m66JV8{D z@l|&Tkv~RerSbP-zL9LUsOFs~{WH$FxDzpy5V~o3n~~Eurl}BB;Z&J1Y{qZIwxV3; z(<1rv8Aun0tqRD=HTlU7w~V_I?;)m|-yvSmYBsm41$~(~+A42%5&s8m+d*2HazA)* zSR}oQ>Yc2~g5spf12J}SF5|8`vlfy1&hJ0%Ae7pFrpdlxo_Q_MadUsESQ){2Nce0q zulzL;!#~KKQa16eQKM7(>PUIPGQ40p=i-ogf+t;-vmYknwmGZEMfxRHeFcsDWj_|_ z-UMBG!oc`j$?qEUkN~fszD?IQc^aY*H?ghU1=a~e-}*1YkZ{v_e$=p_0HXaudFK2; zLBjlaP1RxkWh6k6&W~5?j%%?K^4xi*``x*f3)$&l8;aiiD9)j>w%xcp6JuLM*E>nX zSG!|^&}|B4!s9!Ei822-;(N^=Am&)fxp>pad%i#K@r-*c#qbwK+iGK3#@?4t+z?6w z`TEOcxY$S7MW|dhsg=c!(~rt?Od?URe(h_;d$=t1WfqH|AL$-^WrHSVGYaTt5(;K( zI(^6P_M#Vf9Lln^{;c+3O~Ks&(H>>IFED@SF4*H@#hQ>(4}Y(Mi8MY^zTuhT9K0`8 zuZXN{ozaC#Slt^*^^LGcmH)VAEn90YwOm$~Qn7E)ynn_2l1Z@G)?hnV z=iT+!cffcTm^fq+Kip6+K0ewvXN$9M<&;^>5vbe4dh9QkN4xI*06=frQ^MUIt1jA4&z$w!Qm>poKM(uxpY7XO!b6)jBDM`FO1kv6cVw!UYmVdjJu!Z zI>okQJ%h1U1vPXP&jRUgENwj!-t=&y5zB75Mc(rzam=LOWleWSb@q1!XUZv!D^B0c^ zdn38#=pNSttGL^o^XFT>Efs4exf^XVm$iU0ZQ4o zx*T-j-cEAZTHGkNVGs56A@+md$EdbIq&}h}HoBA)Ry=Ii#Du~IVjOovi3wDCg(7JP zl;ZCWdBPSPQqI@wscqU($-Qt2irQK#?nIy-l}Zi!`}y@-Ao*28Z%5KlUUHp!H&fKc zE8uq9Y&6vy1)gyUwxlHZpzlGBd1t&(lsYxCn>tIy^ln1AbS;Mf8p1}cam2iW0U$Gn>yWlZdy<>i<01( zDl-PNdf|H`(3MZZF>m6%*GY+^SE^!I=oMyc^p8c1XL}?PI_sb~k9vN~p4422ydzq$ z_?z~ZNY!Ps1V|YDq;QF~dd#b>`ZVQA#AcVw0H0JT-2z#1y801{d$96*uxJq`$&!UD zY`+(*I;cp8UP1Uz1NY_WTDC1Ouuus>HEFypj*k-MeLlt5)>;OO@69Q)T!e@sTg$%s zF(Pina62H2(jt4n25J+0XXXOq>k6k5C9!68iVoa!v2ZwdN;;}S#!mF>U(~Pn@;;?p(h-N=|Yt(O!7L%`;cSH zTd&Er-0LZejoaYroBqSSY3@nZr@Tq9WwY_8@-{2 zF>l?$T5e62L}$2`!4L;Q_xrrkeqF@pm*<@miONGTjh4Gop?e#8Pa7aQ4FP3I4D_4S zA}b%h7YSuGP$q})Xt+;@5JOZbzp<1JW7Tg&tudZ`ge&m->laFvyhKQ={he*|*3S&R zMdDe=z@{zx&YcUVA+zg;T7(c*<9lEsOGHOSN7xYtqs3v|UO@*5i@Zjv4*6>8?+F8; zU$_ot7+U1l^TE2E`>qt;ti`^E%m9F*p*>DiI5cRp|8apTRv9s4%L1PqV3CH<@y=@t zUV1W`is9&+oRVKK>;V9|{VMdTMlV&wxYs@^h2ch|U}aR@ZfwlTIITz4ry7Y}S@gQ{ ztiW&VbI+%c$m;3E?J>hA$(Cuki)AhDmPC!jH%@b>RqPMs@3I*;JX$MFCxkA`GFldaXdu+H@vtbnrku4z9ToR*25^C8Z` z){lb4hF(h`-zvZ2i=JI0t*e$+A?4X;A2rp~>&9!`g^`cw`^%dr24qLpNcfCL`UN** z?(9rH7f6Y;!~}n0d1=Y;dyn@~JN@YH&(DK7O_oQrrMCtqST9_bzG1l9tG`}gL@-}l zwcGK^s8R0vy1_bOpnJS5zh0e9<3yN(dh2@%2oD!7M)=^c!es4oJM>`vV7-%3 zEao$-7`F_J#V%B2w8~q0aqbS6o%-lyb*7)s578fq=paHz8w)uF!|%pFMmV*#9OBfK zoeW)nPwC4MlCg;U?}ccJR0^}y?rhT*?N`Q3`BKrR-X_={!?J7J@we0rc!U-j5gtbx zdU5LTT}UiJ_GM3@h=mhpo-oX}vGzTs>CyU<7m2H4wwYHx^1ri}A?w-S{P>9EA%3fQ zj}%HSqZiUIK3>IJG(iA<=i-)l6^tNiTh^!aAmlE30ki3~=1F(bu|NY5F^KDcscM=#PS zA)-gQt>Ez)Vu;*S&0sYvQy~Y7o1c7Xh~KS#2Rq+u*_m5QV6PPv9&-%!Jd=hDId^-K zU&(%NAL9ExZmG_kGtafeMLbNh=j5^=o$)qeeV0%BUsX7(sIoNDh|yxlh;dPk>ThZ>SM?5~FN_4eC@mG1?{7uc^<%p%BZmJF3C zf;_&ypkG3tW3(>3m*PWm(6)B*_xDVc?1Axc5s-+sT0O;>Y^k@4G%t`Y%4EY@sXO>86 z6Mh&pO6zeoN1qA1sse^PG(IP{NIv4-agvXYfQg4H2?|}Gd_=jhtrAR=>(j-d-6zk` zKak*Cs(9q8n(&-Zp|HXL=N6917u7@`d}qOK4j9^wyDi>W2Z2r0eF`zb#a-Uk!1!wp z05EBBJzn}1%qNnPowm9!e}`sFWt|;%iaRahdQ0{0x&~iDipXHqcNeCgV=m)?^qITf z(t;z_#Yz!fSm6o14VBLe+mkGw@}8GkHP+9c)jo*pFFuz^WVp96A6iOJUaa>JnlmK- zxc-|2vzVb70Om7Op~^zXN85GSy~Tpn1e8tnPZ7#L) z>$;MKrLT@Ns!u*&^6l=^?&Z0(NP|`eJB3auPAzqj^3KBT3Wr&SIP`V^^|P$`;nD$v z7+7|cOhC>|BB629mp`)(dOp!FjWX}}1|w}QN9I}L2;IQMX|!{rS9YMc*^pTz3;;yu zhmX8X z>i9w~Jo>iOx?;4*nQBe?6Mc^SSWOuqv9*1)w0ot{k{64(*?!XwM#pt>zE=h>@mTMS zUf=p8WKT8))zlmEO=K-MzU8oZ)#!Zto!QtYx@FeUx7^mT!q{6)mqMKeBm@Mep34M? ztX8iV&0O-@`O3DF8D}d2t;k+v5y4XzB{gULHVo;yONING+i;$KGZq!E1w8 zj;A9HJ1RVG3AhZI(Whj5u1ZB{?5|W6L<^xXF}OK>LfxB#J(+skxrI_GX527i5Mg77 z`$*Yi-)=KJ1EF{^#@iBCGI?LFSg7?+WPO(qRwbW2-}j0=-r|7oDK|Hzu(n;rRSP?+ zmg)he`m6aV!6L0^neX0KcvQ+`4u@k8F+4LSfvuxF#73~@&Yt^O8v()g$4YY6W}xusFWVVb9biR z=Dw>F{5Ayex)lgNcvBeY(LL4pklKLdXdVw~kGT_m(}6E(9$qziHTvwGz(*uOz$hGB zplMO~P4bLx?1TB$W2V(+=E=6saR<&pNAfUX(2%voG-%K~Ur8$qd< zOWDzm=QgjqLxPXIf94i<@VjM&)^xlgx(RV_NcEu$7G3!;XZQ3{SrA4VJ zYt{i+ZJji}EQS4?>>Cc2xKTZ)J`JI{r){z|iys^n8{l*GL=BstYI9xE=`e}5xUwyz;@ot#Av>AwL) zMbmhJ$PgD_Q(u+_%(Z zfzp##zbLtq{Jp}zpa(*#5UU!Tbxuf@)j+;X!SztzFrei2)rO4P;L6}nN_*tFhqQ3~ zO2(DQPWilhR?sFn=3g@@`&wehHrGbw5m|VLG!-C53PF#WbLi*)>u4_!@!@YV7SzPkN=#OXB)@LRnjvcZ_@}@UQwUN6^_SSzi zkGCR(S`VFER==AT4zd{y3pL1J`uf9cv;vy&_y7k;lbuUx?%2vD$AlGisg=KP(hq?f zX(q5!oIB_UA+ zCC#5r2Mp41N|ml;t-(3f#+3x!aFc0mzC5DSJeFbtQ)GWwxdoX-T)i}m)2%``rqbuT znaV-$(#M16 zlH{b6g<(HU&oKx#R|8VVZ1mJWXX(;DZFrc+%+mBiQjbuEG?2T|{)8TMS;Y=CIv}+8 z{vTrWm&p5)Te6-?D`5KXZ;Wp(P>fIW(6XERpFEpV3rOktG;Js8eaS}18Evjd9p_GJ zCzta99j?b2@gK1&fQ+&b9vjPalEnv&B#QW;#^ZldSfQoAY?O8Tj_8AvQr8F29=kN{ zbAP(>S6jJwCF|Af?44hpJlhVm=bQZd{|H5JJNzWC|&f>cmYFv_-e_lsiG%?8KC ze(c7|*hmRbzzTy31^*uIDzijRI-mHh`gztt5EKLaESgIj2|$NIR7^(*;PHFoR!eX; zJDxMf;9V8gAHMn_@R8bm|8p<^ogZoXr7K2X^|M19Ca0PxCQ$31IG-Z-IP5-@gQL_rKkw zY^J&ZRIoc+gHNz;B#taH_$ud;5g;NIU=nq1=K)x7KN^r%ZLz>Y5^d@6l5VFHee{GF zaPH3~45U2)RmX0*U3Ea2AKA!Th0xDO718DW}$BRN*@8sp6jBA%jFY7BQ{m26tk$`3Yuiv zX}~790%(qvrq?&cB^Ut#r_iOAHq3E(xDPL2%m6@E((rm^c%^20d+=i3y%tI>jJJ5XKnVQsaU0 zI^cYU*g{na+dwplAnO8(uQ1RnRcp?(x>fVl9oVNMlp~A@1Ca@vF`Abv!1lymi5B{8 z^^r^>g0U24wgmao(k26G<|r3&N6c;G<}YOnFzaRS<5HAGmjy>wFE2^_l(N&QJvx1$ zL?#Tv=Z6He?J3aNcieqHU-Rk%4)0)^&xuE6=)P@#ZuFg{!cSaP>RW&A@|UF2wsCjd zF<{&~nD@?!F1@AA-4Syf*ot}*{5}+Sey@?l@#s6t@l6olz!*v&qUK4?UNCYVcQqHK3bPHB_QXF&Y+m^|a=$N= z+;yZ3g~j{0M&7ajsG4rpHmAM?B7m63HWiJT*ora%&ppRtoSH&39+REB-Bn;%7wft= zpOMfGCX|)Iv#$`*01Wh;nEO@iLFp9Kw7dxW_Nc=f*B=ceK^XI_$&_?&K!X*5nCkx0Z0085 zY%JVIkpl%!s=&ZOsj~*Hz}&a2-90m-2a^1mhb^9c!^c_GdkZ-_DO^l0qju3VLg>BT zgmz*^>$gLtWl2VEhnoY!=%e)nf!;$F$C@m|KA^>zcV=|d5WiMI6Oq#;v578pCf%JM z{*eJx-!A7pt>vV(^AV5kOw+hCS;ZW0qmLs?n?b8gV$_hyd)`?IukPX^AJl~dAZvcZ zaIQ4tiQ4)-);M-nm_*Nej;x7J5La4CEVAf_0&cS6Fy9J3V5ZHrb+9O0*D1a~sMKl} z1na-4rSBB(3Z$@he-O7a2t5j3MFRul1LYDPs^zX}qt9`&r`%ANv5{=A(W$LY~ z*n~HLIR0#)08V5JM>cJ`=(zUZvg#uHC8bLC(!Jz86k7gXRH1c6>ktCB@C2ei|M4Av zCJeN6LH#XvmTUCz$bRTv?Oa+dKi7kJAYeyJWuq%@CurOrcA4-DI$kLuCRXuzP(Z@` ztbWDt-ou+&Drq0#Pfw=-Z$kgvW&!%J4Nv^z_4fviWf#b81}`lLC0fl`tpK!lkPdo) zjlu^UkDE96RwvN6U@Cwwh^v?iQ8{05pK1ReTd*5P$)}om! zdwr)pS^$+d0fu`*X}rVED*D(8Pt%ROzGaM;C6CnR zn?bCg#E#?@AFE?nw#BmUX1nI(n^$~D{a9&GiJ+0p(Hvr(o?uFdNq5m_m#?h&s=UWI z6|qfn`^ZfG4jn2ZY$B%3-{z5p5!o^2W^1IEp_^r`?_%ae^vJp2qR%{C=DfEwX#vmK^i)m8xX#b;4G+olCz#G`US&x zDhQV*TEI}Ld#~-Hd93m3hin-hE>0HYrvfv_3_}VJ53m@a@`8NNLdvq0x(f?8~?n>o>$>iU4iYw^f`k-S^jxs-+^Kz{(>>a&F5(lva%W zgy;6qcWu*m(o5AJj<37&G#-qHDMR3UWAm%AN16+WH(sH8?x_VDxhqA*K7YlZS<5tDoQ@Zc2N*ogxRJ@H_rN!M}pFYg6 znO(eH?JL_aRQ)k9ZWg9v`2)JGv%WD6*4ozGMOZIV@Oa|*$OTbL?*YTCLI|UG4JNxR zYF#3Q1Pph1*gTJRXURo`MZ^o_9?h{;i;g(gzOE5%(`kElPl}Dgpy7s9eC?c``wPXY z*IM;tuq{J&Ej>H14ZLQNsBOr3RXnmYGI7*zQOh=0X+MC`TBodh&E}ROFf<)3vXCj$ z2GHh}VwM8R{z*tfP{dEnJF_i^jyto07zxv6O3?|jr;Jz!>gr*ks&+7)-Gw>VS7>+m zN4AO5i3`fu1UiqCa8}AAYtS%fLj&*m3CDi(W7rY||NYW%>#KcNO@p=SuGwDCliB}) zyeQE243-@RcT#?Hl{92;9U#7Aj^JoSFj$}#Tu9&%r6j-6Xd7oZm`T1VUoK1q26-%X2^MkST z8_+E0;4+r}8C{rkKH{_3r%xz(7d4GjsiI$vUDo;{Mo{X9Qob!>lPbOXLEh+#D8oZi zobpxc{Ufu$!(4GH5#c=D(xUsy{-~x+%eU__%O6s!+Oi>PYRkb$e_y#KG$1#JTNpZY zON~|K4(T^0n~c=zNDH2tvhT;#LL#|0tJY>j%5!9t*cwvV6v8W?y5Hd~WLvhHg=no5Nc1mg`qs?{C_s+HI8t?`a9o(IPr&{g1Ju{P?FYw6 zS;YkyB}gbnA?s#ds@cckeXDpG26b~(T47>+$IBgA)c0@<@K+VzFTl8GwkZA>VqOr; z>zFfs!GqFrb%t~pvCv`%+>k_RsWaBbFYI8qx(kb@-MnKlD9)@ORJ1f%D#?k8#0vz= zA$v}lo&F(Ok0;rL-L$+H`qg{0GgA?!sx2@F2a3Kx-Tbim1v19C5hiF` zhC(ShbP9-pMr974ZU`%*i#K|le)DS8IL~k=oRc%=su69L_@RiJt+C2joOIUUUlB;ao%}Nxii>s7Vav?T$kw4r&lOy(% zzE$r=7@FzWd{b7{@G?Ek&TQlE5|L;@hhWDy=^Niy#;vB((?bGkwnA>#rawL6j+Q;1 z;9MSn%*$z8F^1ob(5n;P&k3X-s3D6m+kSs01aea<4Ok3s$lT2auf4$x4@M7 z5G{*4xHS)NdD0T@(idy>gP<=yhcgFoqn>Z9QGrN9!K=xP7MBN{qkb__Zvm|gq`3@f zY#L6neN#y_D1dQ6y=|AQ-WZ#8Fk%l;QgZN=Iap5-uaYW>H&azq^G-%kFt93Kdy=(W z>4(v{DTKsv><75nyg;L~byUBqD~aPiOS#;1x`ezsLoQ^5qiBZJtQAClo>e;wy(4?% zyec!LNj z>C=r)t{6V7LRGoI)Ya4kc+tzjaTmU`Y%Z!4!(iXllwLCO3Vq2=3YS3EsZ)i~yLD5f9gAHUh_%1r0r z$)bMmt0Lz9Q9=#V@n-R_+}V-`tecO!i?zcJI#pij*6Q|-6$^xE_3hAx>J?u;9@{c0 z3$=|?E2!}pc_UEw`xza|IDP*Nsk6KEomai($*b1Tqq0TY2!o^Zr^ePqY{c4HS^G}^ zma8%Ol1d~z4E<_yKHhsdQ}Rfe=@6oG=$`wu;Tuz?VxMpEWc6lLRTb#QNrhnSDi|(8 zD2(M`rx+@=DIPc|`HkA=6F*cgrXZ_^FD7U7ale4*xye__jH~s&D%_;a{lM_5u=eJW ze+MKXYI!Yc^^gL+^PDM5QF*1mH|tY~jH7y!E`;^yYosVhIAMv+a>AwF$YxWdI8dKV zPkeN(HVu{~qd*u%BpA0|nCh@V{QOMJDltpfODE(Ai(*0bT7aj0Rl2`Eb;EQ@ZKT_> zW!1_aXPKO-`t4)6Syc*J^8LJQzaDpk*JAp6Ra9F9k7n~;1nky;?Lw#+a+LC2P2lF? zm_5Y?xs4Qst`q0dYD7$vD4uze+=C!Rj8y&Y#2~tIFN{i^G=l-j8VT?4eBGcE{Mux1nC!4E-aA&iZxLiAn4I8zd|qT}K_5M(um=^Av(?CVi_c6y8Zj^$gQ-$97d!^CudSbv=3YRgjD{hB?)yCAIJN~;>suXKFx zLa#rWPRH`|9rfZNN zD>Go|mbsI+X1Tnp-g$*djH#X-zXA8c>#^U(@!eJm^m*(47qOX`9NsM zl$!A==PK9Dg|u{|Y@MINa%veKndc|#us+XD0(zgNya5q`I7K;+4Qy5DKvRnhyLuJnM}eaZOLg*fwL2Q$ICE`~xG z(VY6zK^_||>P5%J-#UYegu7qS?Q{LL2Mb98z&Bwv24$U4Pw)L6%Tn! zsz){MCrwoCm1vQ; zM+DRVbk%^i9saZzjetT&1xOv6h9TTs2Y9EovtZt2oV!Z^TvCNl55h&e0hjEeUZBm* zyqtx2g6yC@>pZ%*J5Ku`4&duD(zh!|i&fBJsHUSV9Q#)ZdH~l{Cw9o5S2eQ|oHE0y z-TmImLRrj8275oOLbGI)%bBYw_iE>!v_GSbw=tC}Q zvG%;KRH?Vfk8sLv%AVn1=qN*69&o=oSH4bR9?xQbX9X9$lle4%Q?D@D>k5)C(^pUZ zp@>(UZ6DRbT`v4RxhK_nALw6`Um5e-U1rhuM>1z%7$7;7&-0C>do_$SdMoceC*IZK zTyUToh!>H~sq@)i&MefKV?vwNg0(hv?E{{Txp!-0TM~T@+@+2-+f&_$Iowffp7{0t zE67aHtEf*SPU!u|CdLjFzAFaKNMlwWXDa$_)ci(-q@{{j z;5QRd15=vl~-fLQIN<(8R+A+o%!6V#u6et9(0nBiXgrro`Nw7yJfpc3ld&AFr=N~bbG?i8gZ z0TG*!O{txqR)!h~W@BWYUHeCqEad^BV4Hd56RRSk1*3x5iA;e*yqkA}hgs|{{MxW3 z%^mW9q$EqZ&1;DbRD!wXAtEiIzZHp0dT<}NJH408LupiwY4$P&Tht?;rNaoGyBjEd zV>EyMTQf-2_I|X@DSpS4{9iHB#|j;_eS` zPi(XR`$j8+uuW{Vpk)XUObpwB@W@nomimO1B$M7JV$(ix<@Ri6g00N!3tww7KwoOE z28=bLPeTi<$UWc>6q_Nu?CZRaPn-mjsZxr5UfD2mCN=aC|AfbS&=4?PF=EY6mW9~7 zuHr)22HIi;^@f$}Nwb9|6E=!kdfF>icv~l&2$^=S0%8B$f=PA9@qk zqBd;woV*=u(9f;<)S7w0nAl#dwEUYYXSTf|bzx*U_3<#v-qd>lx0p`j= zrE)lTPlhrJum>(i0YBI{@DAQ7j$`rKX}AQ$yRH4HmQaD-?GamvUj-^sWGNhghGw3g zV~h16VYa9P2YyB5|b`z zW;$+k%f=Mc9SvKTk2wq3E;Qj~-)I0ro7&^WMwr`xhtT4j*oMkuYT8n!^b?8CDmM58 zMrOCUD~}6p2lC@Cia2I2-%rNSL2fO)WMMZ_CE&LnytxjfY(yMm(B1TbCvU_M`4uw^ zqCJsxQGoVaz7|?qxNQci5;%wWvZB?@-2vUThF4Z-;CN3fUK~GaDU%4^o3e0x560pc z-YKB7J{F1B%Vsq0^#jAIkxcNVdBl(lQxj{Xo{8uo7z9|I^_>%$$C9kSQa-KDm4P%8 zbCK$tbm@7CpTRUWgPlKKTIXwIzLogYifLpPUozo6dy3Nub#Ub%2 zKq>Vya=mc(lt`qaUL_EmmjwH8G$A-s1=w|{Cj5TNxxH*ARc4Tms@YqIMyH%_?zsK^ zb#4RX<5p7oQy>Z#zVku4$^3U;P}i5`as7=5TwWydhYIVZ>=bAJudSbn?L&4KPy*bb$91 zm2nhfQ`ur8e@}X%)^j=Mak))2b{lv=9yv7JL&(Ha&>K8Uas|%+CAIQV`@rSIf*Hfs zPi!&PCU1;^_F1@2tHRmb8cw69?>3)?0I7#2aO&s9xfUlx0Z^WOgQJr zkp$*t22w>i81sVFH#MML2O;&sa4Zj)I#WWB>S6duImP>0RD`~>PBAb%47VHu!TEWy z!co2tAq@>Ds`}>_#3yoL>k=i1Vn)1Mc3gg~P({2zO8pEl$D≫>FxC(-Dt59%&iU zpIoW>s4@e+r9H)QWR8U zL#*4#>E!nPJ`02V#8qGFV_uE#J)km*XiMMco!Q0W#WMw+3`5*p^FzEyOj^f*9J`oe zPnw5PiynCwSia^pIp_4RC2&q>+i_Vu=Xw3FpH1>Sk<%^ts7337uYM8-Kkn~Cb+f!B z?7lr1@i_#k%0huR%C!5h<{t;LRSx^-Lb!fNQ&c<3DUH~(MmGKY-ZZQG+VwYR{grZ< zs+%}-if*&v8CV;Z<#vE8V2MM!qT{8^V9%UR!xbNWf$G^7Bun>R@lG5Z~Aa4S_XDY-fgW$f|Hb<*b2I7dBIOCm)*XI=_>qYssbZ_~?m{38sh>aVomcWp!Tc8Gkl9 zfY~G~QKqQ~D3ddTSBrm#5x-2856kgPU;dq_y_gS1*9PY2Kp`Q7W5K&?vKUYu$oxkB zYz)VlE|oe4%|QvGNTQT$!M8uYW)kh^6|f#G%yrr{1G%vsit50x&?GL?q1mJRDGH(=(GUepK=$gF__2iA8wh~|IYu$L+*4gm12+!vx?6H7UOfX zkss}t&1aLjW-kV^vvp=ml@r8;&RF!O2YJ@E0P_t3c%@=5e$j5@j_RVG)6xYAaG({y z>`(krVA{ixD^HKqC5}FTjUnIOIfA z&tWuZkrH%}V=A7LJ`4OVov&A5U|CK368~Q zb>fpYrG(qoPLBFViO~aEDOY&3kTwrmZ{VG1BRq;qMYxStG3#5~x?hOZfXyiy)tW|d zZKplbW07Pwhc6KM7&m1NEF7=n-b_001fQ6arKdk~k%dKKm|1g22Nybf%55inT~a!> zPZ6Kxcvh21D_CupeCcWn(Xz{L4S}P!?!3Z3nfBmA$?(%> zw6jll+MIwvl9%g%G1co@#J^RUz!T;IK43U${MdgSwO$~GJ&@t~PaPBuDiEAf-K~;7 z>8L#b!Rc<0HqpOmo$z$_br3wWK6Cz0nU-DPs9j@xf3n!b8Mqv9)XM)@qC0u^8gTed z37e3f0DzR4A)w*mlS4O7^u?JefR9^jKU1CT3Kdq+aIS{p$0yHj=98=snRv;1Vq!@q z%7cb$_?nlWJo|qg`Og6T|6WJ>MC{TVzp{q{%fR>?3~XKkAm>lu$(OSNw&QoJ+CTx9 z&n31KE|_;lv8mu`1CF5o!r_5Y;X{}QHE$iU#Cy&DLumx6dj!xbc*1&rqq z#SR#_V{3t}#1x<`v0jikoLt@%aAJg|>VN?+WeyUN#%}?wH#YB<95NE>0aqxVPmjTX zRZA=jTf}r??5bS@Q^`mD=SHuxY=3rUf;`CevYrS7fV9VPX|NkuJHN~zJ7Vu3KxMfZ z5FwQx>VcvOp^C?JktovXDHSh)Bzg?rG z?LAuCarBe=3dxqVIMhyUK;jGz-(8~*A{^yBTfFgi-reXH+0)VL-4 zQ(5_2@!kiEkp>Li^gY-AAe1n$Xi6Skwgpo|Yz-dc9 zr@Q;3{?yjjxS&~nh|dD)-J_lFTf?!p?MycLRX@h>qQbqKvVXPW9g$<@n^aB@zjz}S zYiRnpEo7y}N~))Y;Zg8TYujYQbES+&ajiA`A>|TaWUqrb1!MR9%SBHE%e3%z;7`JW z;&h5_5iJJkcr=c$=CcbNpBCGy_A^5SM5Ls@qSw>)czg~ffLKx#sMuWM z^LBtc*YcS7Ul-g{E*&v19s73zq^|8 zLI5)nm8lI$!{F=qtVFmZnbpVs3I}shTP$oK*ZPZEr7*d^82ZSgcrG~*15$D`1S3lm z^6(V~nN7jYst8JXsJYh{n|BJi#fQdkgn8{-j`x2)h0942dUyC3;C;y9_UCQO@fJ*M2GQ$O zkOhQYcFL@SS=L?*!TtO__k0$}R6d1t-MJv)RqgpSWh8dRVj<~m_@m(rj3zclxN}9< zWh%UU@}Wt1ht}Xx?a`hUKGSeS2dX0Y%v)j^BtkjY5@A5)o)+uc-I-Uy6zi*RI?r6D zRFnRxvzfH_Q(~K4iOhZQl%qcpazJ8 zg=X;WZ)ZSpG+F$5w+@V0!j2b}^gx*=eD#^vR@u}QMuY;mDQQpShQUJ&h)-0}=~AGs zW9aLCb%z-F>oD$pka2H~)x5GFTF@WrG75kF;YFnxCn)a%%PAIFI1_rHca`@ zD@0MmYwRJ%)B#;FdhB7hZQGbOCxY>J%{CPYu<((_Ely^N_vLnAeWpA$ms07#9EHjlqp>4Eb)=*35ABg63; znmUcvz#QZ}0ty^ScaYeFw6&_T=ynFHTw|fpr}syym6Q-5U-?V3RJ3m)C`2`guc%a3 z5Zd2sbjqK}IcgWcRskI~LN%DVTh^P99tgdjQQhi?@4?a2-#nhpCHXJCZ&7$Im|MNp zD$}$DD2QpjLqI(=?;ypd9SUl;_*pzRRHxh;Z~s5+y=7EY-TDWr2qGIPX^>L78w5$E zQ@T5pMx;wx0qJg#X44(gA&qpWG}4`SZeP!NJ>!17<9_>pk}-t6*P3gtXU_Sn2gi=T z8mM2)G3+11HkL=qM;mf&{wU=mx*iwCM}R?{3P1h8_<{K6d!Rj=yd|+Bn%aB{{dPKS z6=6HW4QNlckwf>Pv?sukoXfWdgOPsNQ%WVTyGeA_fb-?_w$z1^$MMxUD5Yg8_FZm( z{hiu`#pLXiY3Ov`W`2~INUtimFk2e~r+i-O4xo_T&}BcGVz}yK8#KyZy-L%&27^=H z_%H5> zO;+rO;ScH5K#Y|IpuK5Nl;ug&`MFv$g(qS9eWiWnk6Ep=AU9KAjJF;@QvKjeNUYWI zkOkE6)LNkXu>2Jc`M%ZmwRHtxEtbf(Fgv-nTt6~Q1bFqH=$Cy6M@eM(n^x>jy}u3OrC>U3=qgD4^eHfEq*HqVK)PQS3SPQ0 z4g{QRhUS79HZm%j<5!#SoFXENm1Ye_WiTW2_rPL>b#olH8&Ha z64I_BJ90cB$<-dAp(?qGzz>`4MZ_SZgS`q-1O zj-9>3qa#$Z1R%D7z9WbwiVoTQzz8ejp350e=gcNgBS7(}%0u{_?nfuDW2+H1P^!c` z>sYG!;G$1DyY7^Vnbch^VmthLS=`ws6L_+V&Hu|qM}Bk>8=Y<2$%lyD=>}uxW$}2H zP|@x2_vfK%XvJ%}Ey(h$1Q4Y9L<~+?e7(sxf^4Ye+SPjiD9_k` zn+|{YqV^{OYP`A1%&E5uS8wBX!bs&Z$C<^xSrO||!~J^%4T95FBZOgOY;*4WX3IM- z88BlQfen{;!8~GWAHbz1I6u~5iEaZUd-QP)i4Pp+K^j~*2i;cnDgvK_Gf`p@O{~!` zlnVXz&2uBE-`GyBu2)Ii>ycc2B$O3MDxO8q9C*W8xgv%LTHFzjVuq}qOa<@G?g2QV z%Ks;TQ`vOL=AfVmorc^3x)Hw?ZEnmHWM>42C|6<;=!{W}S>@8im{BTir>`z&Hh~IS zsWv$f51vJ#%g zD%qun53|eMh%^YOTd7Fw>7bjMCxIZC*E8_lmeL-8T0Vq?ei8jv2Y@U~VsIFKdsM%0 zGf-$^9#O$iSbWQMYp48s0ii5+=FjbXwU%$GFudPXw^IBQK|yq#`*UWafR6s0m1HGK zK7wr?2)`8V-_;_X{cu(!M8nQA1#IfZ z5MRYA@`nN(cBZ0}A?+3sR3*O`E5U5DnSJ#M^SN?C5h2~Fqwvxa&d4~!qg1bi#&h3% zeL`3x)Dm?FgTOD#^#xojA2}>Oa6dTnlgDJ>z#JAt)u*I&%V+u`J<8slW$?7umS5ZB z@Y~e`mTD+usLnsC@#a*Mt2Wb-=qsGg35%jUu{5M^u3W)rH^LiYn%~ISl4&k~p$zkN z{Q^RCEc%VQ{j*-%?K-mIF(bwP2}ubv$=uAfJRKRe@$Au;Z@B0_^cUu>PgFM^xUr}4@FbhL!Xmj(z+(C?m z>Gb%v+N`!NcGcI2=^eU`+E+!qn?-1XHbZdL?k6li=;-t0b4Q`z?=O8U!0Xj4m;$)Ve(K>n`@u}V@lA7a5w3gVSR&ug-e~>@# zK&+CJ<9ac$reEL;CKU5__T)gK>s~ePl=UX;&ehMrW%Il-cVXVNs)Ix1sCC&X?b>14 z;#M|HpT-^2$YqW-8qQS(6*A{rZ?WNM@-F}|Pq${!Nnf4iP)bYsIbm(b9bX)ih-vT_ zY$hp?rD)6}3`2F-dd>DcN>IWv(DUYH6e?n1=+7(M5TvNkjAE+ zEW<4^C5U1lei#BsWx!Y(4m=}H88doKp(+?`C#HF$qok%# z_^B!nBYl~COn8H4xemGbmfNgzZMVpRO`wdG-cL(cfNbchLC);+OHkN ze^GtAgJE+xCl5ZDG&+wKf8uE7Z0A)Il?M6}>cvBQs9{gZ59~ zEjDC}&8JrrP~PdQmK+KclhaS}5FX>@ zFS|yj*xPD77Zy{86IYCkXe}?ZSL@BUa8(kc=|YR_8ZSMmNk^H#zhT0~TAfZw%-J62 z6BB8(uD4rUi#^i5pKre93_Db4zhg#L9Z?x@FSmppeZ8`&0G2iM0{PVS97Os&p|ik= z5W%*owDyFo^$Qehq$=yFG+|TNGloiQ1Q>T=Vor-!7w*oA zNw+U_b&M++ox}c+K0{srg#$sc9Gx$ofAUz|uNvm|bofa-S9=x;&CD@0kJBG>$Q^Ii z9U5V`)bv5iC4~k^bV|&#v08Xwy@p5L==qAw$i5sSHE+kVBgSEUfGg~IzJ&NyI02dX zrXNB1d0I{s9iHM5J+{)#CZKRDb;q!%C&k3Nph|D`hf!qH$O3MJ!u(stlC2#3&ym0J zZ~@K#PrjV?bL2?c#jSxF9_Ipq5FYt@a2BIkANfL`rSE=oKcnbDvQOp%92A44f%=7Z zBG1wKOU_y{_p$Zv1%cPjqx-`}@tFeX3SldJN1awGl~%hvAFh7viB`H!S0Sg(y>wJ0 zrBB^z`gu9^uHI^)s5tm=BjT>kW(49q@rtoh`(y^9Q=hMHJL9-s7G_ya55rRhe<2SA zP_)=V2ZnF!8(?N*xdB-lTxYH<_sd8^{s{pFhSaf8VLv-QF=2OlrSf#RAjl?ddqLQ0 zf^J@SV3WUyW5~Av!)dB0b8aQR!Pi7C%0r5YZrC3y80@#~W&96`suS9CKNg)H&p+6= zNJYawuH@suRBSl_6Szvw)p2IHoHokQo^yg)e2eL8jw7A?kHh?Q`TgaDoZ1tRLaH`n zqWfGh?_H9a*9WSbFI@BA;jw5lAMS;1bMFB!G4FIlRo*nhL}sTY?~eABBEu&$4XpM& z(t>7@=fX6(V|s|RCA0#!?0_+zK@__&|KOI-_a?^{r3;$Cz$c`Abw-H00^U<4p*s-c z$+BfrsnWZF}--_@wZj3>OWSV7|!!2d~|>3)&bka(b--8L>s8 zdon<)PV3$X=38!7XHAQ9SStxa8|e-eF(tVADvN>?5@O8GZ4oFT*Vt;C1aA~&5c-Pn zo&tJSHXE&$HaNDgg}p-U&QSc-I_J2MoTo2;U!f)Nl88F&2a!jX9%An;Um3Yjtx+6))LcRcS=Cmf2O<9|EKJ*nFdmIrn7 zQctu9#^R{u<@;s#uSG3&Cu+P=`dH!Cox2xxV_O|*Uu7asM2i*t?8iG8;1FN}4W2L5ReWCiU z9L_*n3=;Ms<(T)!0j=7Plj7;8b010np)k=YhonR6(Gt^tgoyhgZ!l&wy$h=|3I0dS2MzW#LA37J>N5AksKFP8mbYZ>A@g z=sw?9MtY1A-@GVh83ZJbiT%+0%)78Lx#mh-CjF!If_%YTN)}pZZ6-fSoohZX^ly-M zm=Op(jjBn+uimhGUDF84UpaUxkfvvgKy=c1$u2jViMq|BH-Ddji%3V+EB346Z2X zjGvj%=pgy$kn?H(Dr~6w5B@}rChYaL-&TP?EQip2e-6|@@^kIB4}b~qiPz_y%jT%a z@jn`Uq)wp2X%(Po|9Tufo1mKtFZb;XM5RLRZ*@TBGXQn3vE|)+X7Q65;VD6*D%#W0 zR~yGRVtk&jpTDjRPh<}a^WPYIq+oWMwe)`O!L9d{Rp4`PgwQ|A`G25kcmOebKQlfr zqSSzh9`EzrZRk%&!=aB=Z753*zt!8)lS_>@I9B^N`k(X{jC|mE1))CP2~^c_&iY}e z5!ml-rhL38Ao_q#1*Ba3m){$PW!to{Wi|^}GQiYUa=GIp`5^df-TQqSl^mchdPL<) z_CU7a#0BAof z1#nO7|Ax)wnZOk)^N&CT9|`BXJ(=YD?pdG~2Mv{EnitM(-F)@>gm`(PMdtpb=cQ!AY$U)B@RQ=AX&Si{Q zyMgpihVt)fC&=&!y@cEiM>&!82eN$Q8Qf>-LUDv;#Ieq6ZpZYq!-Ox+986MG?@a1k z??0|&L`v1;{Hq+l0p%FJwrf!9E#<&zf{k;3O@2HUwL3>ai`&Y#x@t=km&&h&Y8-=z zSO2yVq1FL4#Hqe7LBOe!X2q0&-Olbrrkh^777dcr>3V+@O{cs1UG@EED*8w@rlFlmg68{w0-^vJf1Q5y=$SsvRD4*j}JhT8$M?3oB4w>nW?HgH4 zthHQ&M*F5I_J0XRF9;ZJtdE6={2qr#!fZFO*}AXNKblva4fS{gajf5uLeP3Z|8FlB zdG-5NrCrAH3q{|C^&FwFEWPdEk>7Nv8|Kr7noCh<;g1Z&?NVH5sXQA<%zm?XU(gV5o{G z>KAV>y%SJXW|!#R{wUpw`}^KAMF23+6)%r?rD5?9$NoEy3K84iGM@@CJ6o&z;MDJHzk!eZ@k+-Niib*CxztCT0DkDs&}w?$DGq5ebd2}J|NH| zO!@b~omY4FOfcytZsyO|z2dT6}uls0E3aeW1mi!<)atVJ;yx0WfzvHa2Z!F;ao zSZg#6o411b0WU$h%Q_BrR%G`ROWOLuKw?tff0rKd%6s>Ft#eGT`}|CbxS-Y)!@lY$ zYo*q>ai)!$dJm#<-3O8u^o|L*M&e?rZV069&G{A{Yv=n-6^S7;h0%DxW2$L~F{lt$@j;oiT&z$kqVr`jG!`iK+D;uEP% zCiS=KAu@FFM-l@~3(-*h)MP$8leHbY0ok>i6jN5Vdgx&hAJw*|u6jWT(w?TFg&Jph z>*K6uljzT6!78Tsl6^4@_mRW};huzjgeR95Zl-zr&{#62(@_Oa!*7)ftsk=2Xt0(R z6}^0(elAaGjF#2>P&5#pobax9oXSc#guFZ!<+HgZo5Nq* zcp^ppx-rE9B#wkd#fLeJFOzJpDjIF(6y2A~y4H;t4Q{XzwK`>MoK>n`x$B;vyR7){ zTzLJURj2>*?7!bZPU^KpK6C`>jbyuO4JHfLezEKy|K&XLy16KxhH6jE(qfV?NbKF3 zS>d>WC~aN4HSQ-`G`pe-f5kwJ&6-M3d-H`!kED8HO^lxF?#<%#(RKp- zn&P^Aw|p5vKBs2UUw`i{la_;xKR{e$5b*{~gaDK{+~lxq&1$^7?5^c-=nuD%&d0o@&m-uv$mufTS|&$g(5h zxmOsU7GgI=nHB$f6y(fW@cD^+XsSVUP}41R@`-2m2aZ;Isa1G%-))PZrgnT)+Vc>N zkJ7HF7vs+tZ6d7)&Uy zUTf2nWOK1n-o`N4hZH)9{7RiMC{h$mpQzsQHj2=J zfg*e&R4qFT#STB7M{5EXr?6}3jR3Iw>46ZA74e=eyjutVEh2tA5dm4P6)M!FO>*7Mr#ltQ$*;@Old$WhI*U zYo_93qMDa2Kw7SGWj9j~<2Z@5BHlogc@niB)aQ>7dfr_vpuxWRisoM4ZM2F84Xopa zJ{VQG0MX+cpn*V1y&TOk(40^=O)de-a(|GD$3UyfzJuFzE|Mf}U=LjX+DYOe8*dpJ zq8Du1P=LWy_7l-8fa#R);m)U5nZjm@ON!vp)Uj~LS}pEXy`}Pxxu9}Bg+nk;`ZF<& zDRe~@Y!YY~Mx9&xnbg(aDvm*rck-3gVEtqEeMqiJ9qd3H++$egoYO_GE-|o5JYlR^ z^xhBkyng=FWg~qFa7$JU^f)yfd?aS1lrwk}N=;`+a0sv#PwZh?V6o zGb9)}rVqr}fjBr?J6QizvF29Ew2NRD{a*(S9>ysu(LE<}{iva<-m_CJDKcce@LVOC zPEpAu8+d0$)vukkknNw?+|5F38BHoSFLTy>Wnzj3v@<0gJc0Q9YnituvPu+)11r%z zI|Jjl@5kgu26PsEru>+oTVi<$AbQX0O=_BEL6}%+k7~m*_7M$Tk{W0z)CPW?sQ7>c z=`ps~XG|)8(5u8UH?UgkpNkZv3*&T9~y48mfqMgs` z_)vDb2VAF?G20~N@8?F%3MG`wq!=0ML0+C)kEFqIx10wneR&`25+K)6LEjBq9<9re z`f(7+M`$v*tMz=97_QZHz>UUPNSy+hKtcmzbzC;fMl4AZSqw#;U`T3XV<56(vb+20 zAiTTJ0#H&IWf4k$Q|nFvC&HpYr$sNsH`1i*X5ihGOHVi^4ln^{jCR=GCahk^fBT$FE^iI=;M@3gGeP4!7f`mvUiT+I3CC zVY_X03OIVH1Jzfu_)*G~i8xT2j;iRnk?zv`N&P&SIKp6z*4r_b)v= zuvMdU&Oj~@=g#k-V;RoRLYHa+Fmb&NF#5#mcmifuEcLF~{g?S03FkrAdCaDjfyy7E zgd(e-yESwg558Epn>XJ4vhHe&lF6piS-6@%BZ!6F?ZM>C{ApEZ>2&}QH}C`chIlEu z*vpZmLDKt@kYLE_yXpwwIoddFh z>6#EUSU42-;>QPYv*PdX`BXQa6fOZ^ebZ5!S6BWT7L>j5q^st~!w57jpf7$fFRRt* z3N9tCWD(?}xmV;`0+CR-b^No74kizln88Uc*I$*T{l{Le`WK*n)}%9I?A)DJH}SE; z#1^eU0D*@|MSj-kwgU6_s_i!2o)mwaA?c$=<7%4twrjt1ce{6Y7&~}Yi0RPG$B33nP zJcqeTg_#w|#yoJ0?c4+EmoGPKyekEN_6`0)f`fIw{YP>6!AB4LM1I;sc!zQV#kIAf z!U4E-nFAXWzhOW0wUoMus$g1ri}VFY(M1K!*~SDsWnR66Yzcv*0_*HRknKVlz5 zeSuooCz+32T)P7S-FY)&E0AO2C$gDHd#Z}i(}KP?^@eli&{ENz=XnW$9rg@xY&=R{ zlG43YNZP=?QaB|tMyTemc}D(RP2Uf@6#&wB^Jk4`exPS={!W$C&bq*zGxz1j*RS@7 zK?IVhkhZU(`~;1FeCXP-)xGFZy_`$!3|FJVl8DlboBuE(^Ml9dS~T`S9V3Fv--()?dV8GH2DBK&CE4jv8#Kev4ByuN_9p7&!@f|=QVmZ~ zR}I{$D}wJ2u3(5g#eg;J@c2x1I4Dwzl6OO`65KIaYWO@B7Dno2ibfH<9(7*YkOtjP zlt6x9w516e$gG^R6crRVy8IlLp6ZTWHT_ewl;K^*TUqNrOG#Jj9Uag`u{ayN&^T4k zlT3(KhSpD3wT$ax^HOGf2fi^xizhvBrDW2%N5X=)ch`e=?MPF=bB1?&-X+jE=44

qgNR3A~T+;LI{E1Dm1sryP>(+qY$9tK|Xp$NYq6esUKz`%_I;LUc zk-@45sXnVyA4^v|(sT9XTuc$aXmcp5a@`%)jJKbkvMp5j7`X#kpSACGlw%Y>7HtG! zNz>!_A$dW0wVtSA10pv8AkA8V$M1-EFMqx0bNHy56&d5~4S%F5Iz%~41_Z2|np>AA;mNMgvMP~edt6a}aeQHLI74tCP|*f0#fiBGGsC7RG%U3?*F ztCIOvH2r(8o^ys)66lCldPl9VmQ}Jty1${c=m{i^?G8uKA81%ld~b*58=C#spLC!h zpgTN$6;GM614Lj0y1Rb{y14|MI#n4>YV5hDgl=t@%ET9!#zh0pyt`)(UuP?5j8>XS z`Pu0T)ErAKeR5gnHL&a1oxvp3)PLVg9xI{t0|6Pq5GXc7MPdtm?cmJ&U^PgTRA3x& zwo*JRn7OuC(gRG9&Nyt!Bk70gEOOp{G%{%;rL+qIXSIVOeqJ8w?FYghpt~7JZt04j z%w9=qqxBK-m0%bT^@5iYdLO$oe@)CT{qc>sc!E5ow6DbdFRUNwbf*=49iv*J-rh$j zmo7{63>BxaMYJNvVrd+I5XdKizn?K3JX=sEPWv%3%G@CY?|1 zN?s+rfsS+mu@_Uc-Hw6|Z^b*qPj9nZb7;Bc{_PZ4U%x(+yu*Nc*W8j(4>=?FmH~6# zPbXA!HU2N$gY?}@5rVC#?=NdA|aNXZ%WCk>}G+ZDe6ooeB82a(d zU`r8CrIJQnGnd(|#nY9UjKmE1N{NXNM=6vu(&D{vr?%Djv}C&((j{)BY65{P9|?aD zE}VBdHBR~RrzD-B646N8R51rNZ&I+~0-t&-JT1OaAP1$s*3xvdK6WvsaNbQ^X%I@hUQ9URq} zJfE?j><2`-jDY-Oe4~sMpBDy%z2*7O5Vd6!^mKua?x^EJ-pR~P;kAbHrx|=!kL&Z{ z4}*ofGVAiRw#o(;@zO`h(RU_+l6A#Xt_q?ft_OAO1IZeUC4FSn71Z<6YvHucop9xz zi}9KH>nta{32jDzx$T;_|_c>&*WoGDw9EvpUr8o*n|rWo7r9~9HQMnknU9Dvr^Tkp)nZ> z7~XQ4XLvW{hnn#bmV=7X#w`D^*kRdRLHfyo=-i`jOq-c!H;hG6YDx01;{_vcpfktc z=PHcTTq7zUR?}*&Id$f*y^tr&=TBAOjNB)s(Nmt#|8q$4vU+q3ZH@S~jhEl!fN-wN zhI`=pULxHpbv)K`m-{XZb`V;M5Gzox%z?6|?=}oo(IPk~i)(ZkX9Q)DH$`aCr;@rr z@6}|IJATc~#d;7Bbe7$HDz3@-X{@D^Hr1tOa(wAp_A$z?wHoeZTzPc8TtTUXB_$!A zaA&2g-;Of5oL)yNcZg%d3K7dbZ}cmF?kD$0MIsJAvqmrLfvcW%mnk)}cjgbMzZ<9e zeSAKl7%GSKp7+U^btc_nUbQ^N&0Da80e z>9bheYj*Q)Z!#>EAv^hDV7cUym6Rx#^eh5<0`_Rh`I}pOG-*cH&(h)OPJThA<~~V0 z8%o6LwU7Nt8!#L^?5)&qSA(Cq9=U)#o>4){Jo~GJ|IfM;E+}G7Z*U;@pDcjY4ukEx zZw5?xC7Tg9R0pwq^Rb0Ev3A@x{jrp3tE!LPD%9~uou^iA=;l9kd-ZPr49uS_WtHK4 z_7j8(1_g?!gH6~M`ME-!8is%sRVJBmFaaLM%sKuvWQ7DV4XLcBKU9w8t7?Q+YbJQRcg9evhj)%Lk z3!uU2+?^>eFHYqVckSLS!~l1Ajc%p#;g0G)v%ADf*B*`Kt;AR`NuqICXwV47e`fT!C-|o==*UXBvG}Gk?sqUz&A&guS0gbsmfb8od^MiT#g;r4TwF_38G|v zXw-C@^R0R3(L6`OiTN?#i!y}Ys7%@?5;ev@4mWY|LE-$gX|4W+N)wB&9$wEAR9^F) zfbjeUATWiyDlGX-5-E1tMe2iz>8vpDB(){m%ehy&&ar0UP10m{@{KGbh=z}QAA4%h zqSq7~#Zd^=-WRHU&Yi+uomoJ!jUQjDF{i4ca*Ic|1yXH|W`_=evK&RtQ(XF^LI>sN zh9h^FQGQ|kn9a)F!s!#mqiuXEC+Msq&NyUQOTosY5q)?Awl@kv!lU+adJdJe5z?1B zgCz}L%=oV!0i8D?P`|h3vd;tC`9XZ90jGk|T~dQmKb!8UjhhM6bIy(So5PO}P(Q5% z_%VGU8Ps4a7_q4s#w#~Y54mCYMtJ938*IsLBJ zxSH_i)F;CeiM~lN)dRPXh;f#(g>?1fx^LFN)D+T#eZZwADNfmJwY7s8;n>@%5so`h zuE!I$Y!Xg~~9zk^ZEuNBgf7IV{dcXkIC_yp4# z>#f>TknZ;5neuoHt#eGYKvWl=fD_xU1&)L#zCc-3X$$Q*XFI*Ys~3klXAk?5!W>FW zQ?V6rBT=C7$Z7;%HOsG1>s(AO2^P-!pe!V*rrn#42I#t!!Nk~{n%OP|GK~_Ms>awU zV_UL?h|t?7XkHnwq(y9(Q;SVA1-_OH$wlEpk<=*SsE5~={eANa)e;q+-G`!Ie7ye9 zuQ&ghOvuZdB+mu$c?|~3kDMgg9)rgc(j|vIbX>{`h$BS~yT)zOSd_Cr<;zM&F$`a) zj;k4dsY)%Nb*>uOr7f0N31E#^u>JZ;2W<0yV6&y(``Y8eAoUykb%|{TP0SRhFH-E0 zd^#gm%BA($Bu^?8)P6m6TOcrO>p&aatp5Ddma7S`v2x1SLN{j3K0WMlW)yqDT|BNB z!q)d*gLs6`RZ0xZgl76>eFcR2WrIbV_SmOMLZ6>Kz{C{NkpK$n;d5JH1d|9@b2T;Q zMGzfyEc1RqEekwzItUbgc)U>tiaa!7EaVcx$-XSx{;tt41A-7)c*#FC`>_akP*1Ay zg{us1*i??V{4=#3A64j_>VAJguYTjkPoIcTOHwB5i41S0AvVj+mZqk}5G64sqRKoi zsF@#=uBT#8xUDfCjL+5zA}sJgh}tFS_L%utJrlKC1nLrysL> zrJ&ZC{!C4pibbg78vMyInX~eV(BxE6L&4cxmH>Ocbb7MHHO8Svo8qGk*_5f@XopAv zWHkf&wAvOYPvCXg?Gr;*WT}p|Tr#t5Zv}eZiSk!<&%Ah^knX3n-&aG#{ADb!Q<8&1DIEFvBBz$(AtP2Va?CG{Ezno|*I>t3t?_hMPsYeY* z3BGrg{z<*N`qBT3KY>Vzy1mCqJkZN7>4V%vf2Kuz;AgOH+~v!*7eB^?=gDlZr%Yyl zAaC@bx7ys|JG}t@bCG`tn}1OIHW@)bis!PAyMKQE+Ilk*Agiyh_lCZtYA3Rz=z1-E zYRZG5yvg(Oq@Q3Lr6s2 z9~FS>PaQ)cfKE+-Xd2(@q@9|@+h}F>sM^Hq4(%mL2Hes0POW5LhM_0|R+Msi+|Eha z$1|DOr`Vk8E=~+YK}Mx?C2#(Rqj&B5=Z?P z7x4s-25vyq?x*T^xZYTTSl*!(68-y_|8wKdMFjn_1U>rcbC`MS&gz=);rtJkA{y}# zBIR}4PH$z@-_Zg0K<$4&UIK2QPimt$Dr;Z6G+^X>*b(o4|NkA_fMjBDvqq$707d5s zG4ZAJ!k_u}&p3^GePkQK`C4MNhQmQ7wLL>8th-tMKcrP1x{brM0ePqChw;Hv0sM&v zJjm{u#mDM@&U3%vGePY~+K4pqaDW`anY#6_n4ypxi8pd;W>cM(k7+)wbxsYTi?R;RygrpeO?f5 zMR@tQ|2`wIvB0YxTCFKBU5-$%xSSg?3CPjiDGSqYw9K<5gT8nd%sYw3=2-9t6NY{6 zMhbW&XFJR7o^AAcH$U**mu}bBeR0=DM}C%xg zgOZ+h^z6PkOyX`RBb(^=W#Rl~Y&HtB_iI(08roT{7**7N4bT#0TB`z!Etan`>sf&^ z4$4w@y!#TUp{MIg*4cY<3;{}sIVtF8vGH`%l!C} zO6+QhM_&Qmh<0m*y8BWsF59ZH!=rc%%D$xJjz*&{)3Xx}D2tj*XD|VFr^#6;2^+0Z zXJnXPk8elWj!|T1$a}PCeLUU*7^nMZ2=Uax1AX3{+D{O9Vho>OE&1;a-$$ZnhA+$e zpQ#JGjTA6=TQ3-^B>%28Prg6OEjh>g>!QJrl@wsaX3|Ub|2s8)4+kV@@ZSr7U5mUX zxQI$30z%4vpAon|@PUv2?*spD4F62Y|1Z;qxvGj`JKP`E6w_-0NECllx`4ICF&Rk0 zcNGBe74y5{%#uFVUSAOK5CCI98(rYP%tie*Q3d7o@x1~TVbmIEK%)(7`-PbT(%`Zg zoB)jgS4@@S^lTO+svXq9kWGt5&ea~>MnVSJUY0LWqnwb zGvNW%MScvh6qW$5c>o|gxIrUSTF?9g=jHa#UK5gZg;)&K?(XhOfS7tTp=BrI1n4w^ ziG%y*$h!u>jPKaA;_>&|*X%6~wi=JbisQ@5Kr)^$$k@wYNqA#nbF*v@5Ep-#OXH@8 zlsT10S>sawAn3omh5XM{px=P49mGPgqa8hy znh@X!t2>;ct`BJWF;|4AM5K@{(-{6|yMbc_9sqVI@f9?zmX68VWhBx6wYr3TT7|rr z4v<5<@ejE-H3I|I{S8B2P{J&?OD%{bPwRT<)!`7}URpPucQCcW7^hbJCdq%+6C6gk zhjXgjDLZ<)9}u256HgST<7}lBxsmc)!qSS`KyVz#=$NVkKMt7!`+{v3(Gm!weq2yg zXQpOeSZUCNqy6EBO3s0JBSle;<2{y%XqY=Asw+{8UiGqO@achIWSL*z`X0gbUongIk`6s3JR6m60+_iH9h{L_(9)(peSgBe{f@k=g$OQH96=i zcCk~3TMvW^xMOc@Ko`F*^J}qxmts8|_+w@Kr%j!6)CkV7G=;=nz0{_ntwHJQJYJ@b zs@1_S$EMz6;V{PrzcEGSugKJEBbaL+jz#!CiUDtjy>Dm!Q{|?yDc1I3u7HMW^e>y( z(a;JE1O3|TrYJ0lx)|QoJUD?V1>CbNGp+Ysfmv2+(9vZdJg+U6mpp~rfG|e}@tcBi zY^Jg~)tui_Oubn$Nah|5t^lrc`dFoh!ueISuxnnmHklBj;VrBqmTPZXG2zCG%p3a0 znSQY?O5gDgwa_)kKkuLUUnShAXhe@~;(ZudXnC1ptI~Ko{~^=6n%RV6uWHh`vl5R#=xWV2Ogqbqj#j)(5$h z$cYwZ5|4+wE=xf;k6$lSQ^WBk@RIUtG&i1B7K>1<^6fdx0hWG3l+Y3@I_Funwc85e zL4vjC)l8{$PiIC4GVWJ$5R&KdQnYT9RX;Qd$XcE^97Y!h?^?sJW4`S7Js2zYuy5cJ z5XvUXBaXnpLZF~a_FRiMCCG`l$y(8Zg+Pk%*UZiVP0g3>a~t5Y?fN3VBzpJ|ui>Q7 z8UsJ&;OuI<5C!fRDzWfq9JA(75=L*wZK0|?@4%cAaJxIs5tze! zet9rG(a8DK{rHCr)oDfQp{@Q)@LFcjtz{=#5$GHxOuWW;yi-G@yod8p+WEF>1l=`jcZ@MlGoNoh%u@m|OcxrTs5o`1XQ@^N8EOg5*qo zy3=|UjH0G`qz3Uy{M~+lz|-Ev+GdeRevgA7-QXaSeyiIyl3VmC{r*+{-HT>Kesmc2 zd|@Rqh(73$V>o2#S?|mV5)t+$(JnimVR+gB6-fO)h`e!XWiZF5NYl;bF3UyGx1#4}uznuLmoX89?TRl3?D_bjCG+I70C2)sQGo{T3Z5 ze^Ni(bKi4q6XQP&Kkr%Sf-ptS!D`r&n84+mI39bmq|BVLW0?9tc1;{VaakS~kWd1; z#dS<3fRGRBhZdM0Lg~R8|7E#UR^DNXpSxxoBeX4_(_P=76T<6tNiHrPDCwab`&p(( zv~xYf#*!zsiODkwNj0H46rU|y7Xw-ZQ#FLx7ngPtl!k)WR#kd&f55Sk&23Xk7h>Ea z+$8*77sRMUanocFQzjX?)&ncr^6Og$YiB9$qdvBkp8|8kfw&VT5b3)>oO>z@XkbZ< zi%08yCW+JY3nmS*0iDV_N^e$~Jn@I|4z$8pLRm}>YulelwXgf}97^W~e13`P@`Nfa zwrCJ()?PD$l zU5`-c`9Wd`SC7T|gA-P=)p$=B&3NSmG-zEe^6k!Rfwn3}%PUU@`MQPVQ=C!N12mtahtyl5nGsG*f9l*n~bd~|I-!hoBqpi#tP~Wt1c{ zM8)mg1ws@Q05$78^>KS4y@3Y$MZQ(m-_d(@W28%N{}R(JYL9$#DsvV_m%TZ#c6?m7 zXZCP|)QpHE$@pQ{$o(L~gTvLGl5}f&U)C+M07x#P?e8iHSIJki>-@N( ziZD6NZDXG$RmvbGsrwXpwJoEa(6%Fqu0lPV@!Lkbr;BH=YvU6&n~tW8D)#J)UA13U zgDuIKab!p5g>>y!ugVJHiAh237rS^($rY!y(^2jeexsQD&WpbM2-GvhQx=B&u#4UFZa93gg$iZ7QBEV62u5_fFg3U)&&+~bEQV+$8gjbr>f zPi**>KRq{ao|1HII%G9wARZhR<|-QW{0_xFa5E9=CD}D}TS}54y<*KOunpmFO|gq_ z9fN)Sc%>6#d28mYFOJjkG;{}UocF)D4R>1zFT|=RFV|X>rJ{1pvu(eA*~J!1q5byh z`czNUno|hpK@YvW`=<5l?8hg@2bu>?ICrDm^VDhG0ra(^U;FynX#6OwXKpvETl2T9a z?)0*Y+mtbAQ>pjN;tlD1B+YmCDWh+;mN8p=w}pC-S?PeVgM?Lti(l$mh1D``7_ncPyLX{5QRgE@(7-dGc-xbZ^YGqB|oLKD4yTJXd^dU<)OPMJlL)x|K%+ zmJEj=OHT7rU4?$zsVWL7ogS3rS@F@tr@NN0;cZ#9$R$sV^S?QrVLC-NhiCHr!o=oB zMQm#Pf@uAz3O@Z4sKQGANQhD7Yz@yBI@EC=a+{lWDt?SBS*Qdji)>DvM;Dy;;``2g zhbNsOeDAqMXUYZIH%CA2smY1Mx8Q{f(UasbR%kE~v_f51Xejl1^sKe`qJ>W%YXsuVS} zWaN0?<$M({L~y@;Xb3#JhNsshwz7MKxwgy=5>Yv2=ebi2zMRiW+o-n8!F%k;C}L55 zZAV?o-~wmE<;{>LHX1RT1v*)y&!)oZq#cerNQAi&?4r^;hgKe&{840DKImHvn6vSL z-ogo;4Z$ex>hHTLRxCR{e<+16JNER`(QUpgvJgMOm%S3fW;@KfiYpmG>@0_LutxW~ zman5v2>FUU)_Qs5%}e;M6<>~Zzw-I!6!&_P9l>tICVv@o@!sv;IyqQGUl+UslFjNIXC)=-fBR&{cvP`38TeNR4pmZ4DNHh%{l{P@@OpO z5zTJaj?Qu%dP8c zHVIOkVZ^9@AkW+6_rZD!&!#Bx7ImyVetHW=g8V)*=e_+9=e7>y_v(o<(zN+y?^s5Y zV>9u7hK?=SX_V3z-K(DA^o|Yx**Lbt4_v#faUUxSO4#YlnY$Dz^M879#+BvQ#F83O z4t)n==yw5Pa`vYF7jdd!ca8)dT+0K%i`dC}dR!zSvqRqD&Q({nwj;q7S zdC6I0$ajGN(=Jn)WCRb@l*G3CnYqSV!Bb4Cu-XuKRC>`cEbl(+U$l&B6_X!gWQ4na z`el8=g&?UsJay|a_(ISoru_&)G^jwA*CSIja?7|gXhOG^0>e*X+g`Sh^->983gq~X z4->>yl}_9`}xa={z>Vs*FIW1RUYqmrp0FIiL2o#^xtJy=&4K4_|BYo%B`y2q+od)eSTZ*W*`xfId*XD&b;TW%7Z1Ul9c!@ z;{8;9qY;ataB4N@+Ftriu<|^L&1{YBK}5?biYkMGZh4vPlZRL=De zDf~HhncuXB;5*nlCNn*-uI?2d^Y)(E=BBh6+_;&(mu(FO+U~+|)C8rm?jCsuxpG4_ zWgBFvQo8o9Vau_{(-Pv1Ug)oU6#k#~-ZHGpV`@L_lECEg>Z!-ML&eNJ}G~ zf-H~_q(M?j326c8?hud$X%GbIt~1O1+gtWI*SXGhKL58Lan>8p%)B%6-1l?eV;ioG zJ7#kFO!-MiE~hockCSHX_wIuw#uX$~H6D((^^tY;4tGi9W|*}KglhSP*yQIGHpR8% zK_;SbA&@@Em*aTzHJL@{;|zK+Jo#u7F3MP0f$&fj&X2Vlr|*TN&>#VIdfkT>j7;xx zSry(6Wujjw zd|>UN4LSFgOT_8vcXcdCx;$(yo4|Nd1i%N6Pyh}{F1}GKK3)jrlQV96*Q)`J_Odgw z7V3J0M3bE%{A@%8$)%BcgKEtVYD>@HVu0pC`fMS%=Z=5#t0E8mZ7p+k8Hj!i=a6{g zBWN6uedMj?SdOMikMZ8mq-3>uz$V0qMG*H+WDmAaWXxDX61X)F=-!r6R#2nib`YN?R zu&YqTJM{wc-CwPD-C6&&o#^hS{Hm;_;tGHJ>$$DkXI-2N&q(+n5jr-XHLGp>Jj{*d z3al6!CiH19h@ft%BrlepP@bwNY$Xfu#SBn4ZHLiu%F+&ZV#UQ~F_atAwfXz+Ht|Ou z3N_%0Q_wya-6aGM=mNht1;{?nuM7 zXc!WNy!PyH@U12HX3yNA=6Hs+w2{*WR#NAuECGD-gb8)MAn~VDbUxyB1 zii7~Xb9qlK)OMy(rbkGgp6Szg_XbQ7%X>`hn;#6W*ZZA@{Lo}*`h!m?}--O;bBS_p@L z%#sS*g)LsYQYpv~FB!vRQ)BQ{=y$&ztYg%h_=DSp z@@%|g^?o{15iOsEMu7~H<98Pyx~?mG*dXI1cs?^ zd`3ouc;kx@XWX`!S>%Hq!9^{J$dyMn9m0+x_HbmdY!j#UAwhm;_AGF}HV=t%9^a89 z#`8Mq(RD*JP(>D~6$+id~8Mq#L%{(Km>i`BBzm;Aa-v&pQ8<#3OTzq}XHtx_Q)O*UR#xV8{T5?W=F_(q@V1*xk|+^U{!m8|@5U z`#g4=&9Uf}!b1>ztCoQH)rbvGg@>xew=!wt4os^eKmr^%RPXhNV$xyHz2;H~!|K9R znd~GDVx%v$t5Sq-#3VC^4W? zq@*$*)4G@#n;g=9fb6%GHuRUFVtN9wi&8UEp&ABB-D5>2Mk_Ab;x}+BXbSgsuYygCoSi z6mtW?##aIiYKt{PXV~8xOq@zL^BRuVOK14?0#^tFC3GP*$`yyk zlog75zR&|5tFc70H8fFWVq&iHA4JR6N3oZr9{vjQh~z-Mvw>vM&rZlzmhpm#d=k&8 z;9nQyfP`v3%E6YJDU*-={6(pwQv*H}HRLeFizI?EN2`C+_qYjt=QfHCG2fIil7eBe znle(>H@Y9mRW}y|)rcPY*lmb8051NwviWPMCN`(vv*%|TDV!|9t+;jgp>KyASMsR} z;ZC^JCuY<~3ZW*ydT937_rK{lf7_!dMh8S>+} zj|mxYO%q#G_{YcJIwe_S${doFYDdm6n3hnl_-W+9p4&4T7lohaygkWb|47=j>;0o` zfQh+|fBjJFf5}}xC9orK63_3kl4T9!YBuvY*)SpqLTdJ*sjbE<^}{o?>y_i%uf3KF zPX7gR(Seo$Dh%VH{(Td>O~N`NLaNZC_Aj_UGp_ZTO!`ysK4IH3nJ+&Egl$v7>i)nuz`Uqx*xM$k zJ@7h!E(-1?g8VNm88rM@{LRI}I^oZcfA5MHxc>$P{(^13#eqGNbB1xaQOo5^yH7c0HvAAyxo5U?+R|NYe}0QPP> zJf*A^F4u`M*pz_EGjBZeaqa)X258?|2m*!U)Y~2l!Sv0DdOT&|>9t^#`Gyx0$nIOh zp66zVR37X7`Syw;VG)AM8Qtp#8#^{c{pUQ$8gEuZ36cS1@u2>9Nil!{1KNmm3V!a^ zs$T!8Gaw~#DP$r%*n$w*(7|VFnz0NbFiV$dhmPVdNRozT!ScVpy1a-ly%7U~x}F0i zT+kQ=NFYg4=|Tu|Ah3+!Qb^#Su0S|n7_EQHosX2hAMC&eATk}AdO9BR_!2DkS`Pg( zKq1I!sRxINe?G>B@2S9pw6?ebOC{a~QH5T`PKooKj zq6A%A<@EHl8HaBg7g(~r+XjqVp29`sQKIQKM8T?0$Hl)OdelTfm%t6z=QcG3a5XyO z#MHr)IX-vV_OqM02pdt_cp58aDB_$~zTdJy{HLeCXskO%l2Q~K`dm@7t|z@NE@MvjpI*D5hZiK^fWbt^GQtAT=YLC z@&1bq2>S{aAPph^EH(%^OuyK_cGJc#bcf+&QPT_34lfOjBLW=be-j8gQNoOJ-S)@L z;k|n9?!!snAJTTqt;MM` z2WTckhu(FLMwPmr@Q4cpv8A~9E&{eUnP{p9ySJCZg*s1IzlrIch0qQ`E_J71<_dY(G7-(h(y|LTCz^QKj zP76?3oQQ081SD)Yf)!u{sNooO1;yt8I|*O(J8%P&>A4X9jH3;~+^yfe4WmMI1HW~_ zC^%e%fPI_0#K=066pX(`1t~H!)tuw7#2v%N$}2V-r5dkdb(ra`iIMCU3W0b zne7%qvzB)T3hD1`;I-XX12G@>G6ilG%sDXne51-xIz;Jc3z$`Re*wNk<7{ALjNhqX zluel};-Txr=d$VA_1AWA52OF)E3^rO+K~M^ELWdJYphh}Sg(h{&NvMVT z0+hC{804f-S&Xbl^PHsTl4j4%+z#>$C@xohFF%yOM4*~GLJiBds6Ci#Q$OEkc&A^w zOi6G3rfK>eVpteirfK#t1GmV_s=NfYX9to0U&My6IwKyU>%;~o?t)UE749?)KyjMz z!azHn;lMU{?JTOl6Y}&o#lolmsthojg31O&1bsUPc&J1R#y~9&^i|w=fFF7-C=11? z{oic5fl^+qv41a}UFSByM69Yo~Yi9A~&i5+c0e}1A zPgeQW3?ma`V^6hd|I$N%dDIWT0_KQbCX;<`PHMzN>?{s_rz5gjp94W zhII5JLjAx&T(OvE)?xKQC-HJ_=O(~hq-s|HPr!vGy7WuX7yx_fq=Zq>USLBfEU@t_jl>h&mQIL@n9qE^yRWwGnDpMI zjIl0l#i2#iCOnWIM@;VssCdk_E#!xS1)>}nfIhy#Iy5Ei2-SoB_!Rqe7l+iURKzRq zkSf@;q~MKq$t87D-^cP{%vl0;9mcQdF=>cY`Y9bdsdwUX)j#m|Ux6=&a+}0c<-!)6 zm|50~xuc6he9OF(NgVUjSvW^f+f%#K__c6su-woh;gs3M!scg8|d^3!EQ0XvV zdq)V1cA^mzTYmD&?~WH0Q(T+Xi4ry_5%!4=qdBrJZbF-%dv)hFkOViMV7-r+bYB6i z2&hD-3lfe%=c3@_QhzApsGhaXD&6fqidKIre2Bv9t1p##%O5ktF%cYrWXktib}6Cgv3kJE`!<&p8x&NmZV)4gyXMvS0X-MHDnzYp5D zVSeFF8JLZK+}|2pZ8-&uUxz;JP=q!C*_L11APTHG^QJ?eSKe8*quk!|S`{>&I-U8t zfQA3`^k`-k;phd{D*F8LBPjh%< za@WphFR2Cix8NqeCr~9?lxt__)nL^Y+r3AQLo$ba#DPbz|4P8&CM{ZE0H#UVlR-$A zI5)KE*s62rAp4 znwI7g%?&bF13=PNY;O?Acx1UJ`p4c&X$N$1u#BDa7mpV^-Am7?TkW!EZ;e4@8-!D( zxi*a$nM9)3$iDYvsTY3oiH?bg>a$}QuuH#OnSz)4XS8$sAvJS1Z@nvcla*|KzUR;7 zCQ7u}z7^o1MI}1;o&v$s)cx9drL=o=ia7d@DO4J9v6ptW z;c9T&%*2|12D631C4&XlvZ5C(p;voJFG~lyL9G`)eY$RAYrt0RE^}G##)4pUB!ZV% z7{EP|r39%_=q%~hSo^MCK#tV|h=s(2+^204cg3hhITus9Xrg5xstS#r0fD>iOs+Nf zR*?RzVJX!wpb*XY3L$~iT3_nXi&9?oJe&@jI@?)Q<(UO?!Je_Qft!y?`Ns%NU+4}a zvgF&~#R#AM%x4Z1zUE;GyNBA8)XtvLGg7PrX^|iaK13ZmaK%WP4q?Cn#9iN$k5eP2Wbc8#aC)@R7e-& zhvd6y&IPZ~<5Kp|qjAC{k?S+(sP{QjIzrS42uxX+TRjI45o5JY{+&-zlvSA=+PYo! z9Z4d2N;%?l*@!;w=2@!t)L9~;nc7D9YFryx46tuG)_JL>ydnnZMiGDQ6FK4$;DxgI zptt|kYbm)9XW}VdwN0kcB$mgLp(;Do(Ah+5&b4L)LqNRAFq=Zmoe>|Z+5lKzJV;x32MTHZrwH z#9$CdAG!nt3l^FvXzxMkMbsZRqp-7Y=ja#c!L)aybc1&bg%noC)1y5S7*^6;2{@sZ zcrgJ@m`ShH?);Lga-w_Re|1`P6=I-;D=Nl>Klvn?D1TE{1&Y|5brdmZGn67@47jP$ zUY<2v2{N&+-EU_TUR8)mVpLx9_%2Hv)`Q>8glXJu)*;x<$2bwmMAGgrci9vY6y=Xa zhIKm^hc3R~^&?Z^9^rJ`&30nR^EY;0#%?n|^~V@)J{?`U-;sgBxEh@`P(_^a5@N)n z#Xm>VP*dCXP;c=!o~sW~&!Nyv{b&*Tqnr#unVLT1yYZKc^!@J@5}arX2;$ph=5o}h zW1_nDtN>fh&44qU?*7JZ+QSo?!}lzPX?lY}a=ZAsUKCY*)C~A-4u*HTXt8BWq;}*( zeP8!AW@uK_c*=e0q8Z|59&Rmur7;!+lt zrS6RHkaJTLaIj*+FOYMGGwDSaaJSB(1TWX$sN{T~B42Algl)|$|0u7jA}=%L1w&95W*d^M>RfHRP`%g~t7SAZtVoxe=e5 zk;RK&{5^OsPg;~G+5z%4Or2StGTQzZwG!i(vH9>0+LU90(pExL)LS3}oS?$ukZWAK z8+l4*nb4_0c!dRC{AaMgMUn^3bDC2lod>GuTPIs8(O4FBppR|6SBa&hv zk7JwuiQxEEiN39@BIw!uYd|1sBq{?jgm2uS4Um-((-`dJKIvy>QIL?5Gh(bBp>0dg zD1<^4wB0vGLn8e_rK<&96%D7B;j{6==Apb=+kxypNOX;2<+6q|WeAEbJ$hUibja2V zCz@|MOywmm#9%)@)(LXdSAbE#v{)e}sKQ^Yo0Sm96^dkh1$Gfdc)){J&_Rh|^;;yY za$UV<)t;V~_hu-SGp?Jm;iLT7Ry1eks;M0v#S-05-Lhg+VJ(PN3>sfSfh9SOQs@~* zy7+1+U}0b>iF5AQpCQGb!uo2cDxKlZLpIoOlp(3a&33x*GnG+yyr2FD@HS)u(!b8E zD~P4xhpZtg|6|!_aL4puCrQnLrwR_v<<`y~^+{55qS~@1&F$-+%<{NwcNP2-8KS;m z%6biF)o2k9nRsTglDDqUY4Z>ey)^T-bH11TQkIW-y;Uu1L&@~0oGj`5Aj{k=8NHf5 zi7-Us?kPCK)O;wq_^z54ENrwZWhj7&t<_V^KtOK`!&9q<=}y=`>8T0COI8wfCu|OR zyvr+2M}pu7zyGG_%N?PA&1a6q%{_eax?PnQhW5e?Fw6Lf5bOION&HWqr~RV^k0x- zKL=q;DFmo1V2)*VaR9-J^DLfNKU4T_;=Q+~*0gktC??YD60hlG)t2*I9kW{q(cumN zqEZ@hItV|5hKNzK+LEQYVA1HKz)>qXpy#Rz{LpfZh4%I;u0i@fJAFTk%0ic-mQOg; zxYaDfnl;M=MJNhAJ49t$B4IMp++fKVTC?ilw0A?>Ylx` z99fPmCi{TSDa115mf&#^EmExt0q`D55aq(YTrv7|uY0xT6~rSK!EM2#to9 zt!^>fk|jRwFmI2GeO?NmpN)w5@`?@~VJ_Tut2K1>f?GxS#Ol^*@;Xy!12v2+J1=&7 zBd&z1jnhABCcL?M;TG8R#T~LUjfQ5IhDq-T)DOF471-c2Z;E5v zn>$Sx<-Ko;M(jRn`UVrqv8BQey8}?8O%CRTj9bx4Xl)o?(8xEP!8SricT0DL5w&CK z#J@=s-3+K3f^P#bl)CRnhvRezI;=$pHH=P6d>n0y2et+-^~7MVlNE7C}pzj#2?Nz0LX$)o$Ut(bA>Ek&jp|M&FIJ zId7OpN7y|%QSuBoc31|If7>lABCYADC>gL|PY5TEmW z0|OlrYvb_w1sIu_1u~_<_kCu!%^aI0?k~U5)IXH%)G?FFyHe-`TiYL1e8@FgWtR6h z+qnER3p5>3jsbxnrk^(TVPk=Rnf5e-Kp=N-5M(JyXH>>pkbmBBAZ5_BuILh+tpv&w zejy8$Q_%O`4v9pd&h1$tHmaH_Tr zwY!yhq^uj2hN;2gpl*%*y)`#X-?Es%wCDPl2;1O3#Uw9vC_?UDFyb%dpxe^(`e{15 zaSZ~P8w>aB)@No0k*by^e5IICVW|Ms0;tEbY5rw`6$IZmgxRBV|<Nj-#LcEUXHxPfL``NUgIq#MuqLxOO1*NZk7 zmu=RKj-f8tGhz7<&JdNIExlW>Yc;^RqETE@6iXi7k~nRp7K!2+|7AOCPqRV&DJF-q zDe+WH_}E$sFIR5MMG%WZLF(OYz{p4p*r{WYg8RH}3yzh8peRJajLf-SnqnaIJ$mh5 zP!-=>4mrLJk)y>&$hj_rk6UIaC57MV1DXAf9f?gxwa)spF%*aaZDZ{-aWosiQYqUu*-)j$$^2%+wkbLX_i0Lz{iz= znL&_Y-ubj9c5?Igb|&%x$2xHK{l%OwWMd6=={N4fD0WPBEt%^^zRjN=Ihc2n@=CBp zB05-AKf$_0cHSS7b!FlVr27ODzYZ!xWgY4a?>5qw@b47iV-XOYcA=z<*bpcYpq%!$ zRwyI{aEqB{v@yMKe8E^#kz`ze23QJ3DI1!7>z$R+m$6!M?`EdD9CS-b>>#lPDtEFu z)>b5at4k(1+*e)YzIqWZO6jPKGT3o=o%9)sJ?PJz$PgucdfHO(xikJnUJbnapPwi2B!0wOyM@`XPeVoJO~_t zhzN->@bfq&VcdgAF`w1P%usU!li0VhRTw8os4w~-Ag}JnMu2C8P~7Q=NE zhcY5Dl5h zz#S_E)tMdpX`5)?3*r(SyovE4P{M9$tqFh>(5aK`*M8{z{GNa-O@amR_;tT{E@r^4ECx+)u8WEasoQ+;IUXqaSe_t5U(j3u}Ux0xK zrc(iOr6gVh;hh5_XV%iJh9B(z|MC|KhyTT2_(hc%t z0Yhs$*MBXB*l~>%!z%%gA9K-9$JRZ*cxEu}mOE$^YTDj09IL7sp2rrA4+8FzH?M}vRMIW)i~j(nIu?rQsBN*_C3gx58yzUgizH$3~i+RkL`;!~Tmt9i(fbYV&& zyGQ*1f%n|I#b$H9d&JP*m;|nq{!RA4$0EB?L-w;TyyuS<+vD{n`CYd5yyxql*5dBY z%sOXtW@GkXRMwId_7UKtz}bq5b(2TV63MlX-m6;vYKhP<>l804W|>}x*>YT56B1FO z@FOmBKN4i~rS`h|6(18T$(?OjH~styKT*8wr-6Au+IPW*gUsr<@~1=l3U?lzgcG-< z`ljKT)tg9Iy6-8Mx(xJNJ)nDf#h^ZlR+Y8HcE?#994G0^YoUL(Jydu3m-~qk_8Xt2 z3V1A?_)YhA=07LWp%NF@aQCzhaP#G6+9s3gcgDQBqkADf?sCFu^W`ZH(N=!keG0Z& z%V#WTk_$V%s6V561zv3Pum`Mi)vc}5uLh`>RZv#hmiU)^lW6W#H~Q{PObCcNgkQHEz7&DYSa^8O@r#bZ;d!@{k7L5 zp+<@qIgCnN*ekk6>;f|?EERt-s%x!rypOhsRB{h}lw?6D@ClGz?lG&XZ8tB9Evrg! zt+%!w=L;p3-5qeZ)mrnM5IlPDxRXVuZ)q#>fs^@4c%Xts;qb$ny4jsUtrqnf@-0-; zEC(e`iSbgVHCqQRgg~JVLP2*V#X=O!3 zMJSl->GQko?!O~^J;E5xy{h%}$EA|ib`94iA`MI@mJq|?1rxIkvv?kpX;C$F!u-d%}3lV<9eo>WirnYKpgfD{SeonlG`6WuhtAr4Mc{&biE>*;yO>TSfwhoh;<0_pcd&jlxLOD>LSKHC4%K0ex4KYzHO=*~EmFrBT>Hg#n zJNLqOPUO$GPy?L$vW%t1qv>qI6K6e_U%Hn(0_Ql1@`IDVUJ-EoKFJ>x{JHT60pFPb`&k_#97=7^U#SlQ7JT`q;FodcLG%@HZ$p)bLsn%#=hN8Wkw|jw$%B&Y z^3^b++>ll04|vgzhGwQ>uY*o5VkR7-JIjx{t>P#sM$;-)SDaW`F!Y{oeR7J(!R*;z z8?MRgP5OKKAx{6cNDAMt&O0}bt2skS8M}8AN0yV4-rPCs>nU$Lk-wjERZZu3Mq8R| z%q`ornY_hIKE|eX-mSK{BH*TX(z-zsGg}}M9!-HgAt>LKJjA<56gTS$H$0d3&S~zp1CNGuhZEL@82ZAgKTsjB*N)X(+7mPjtn5%k9)u9 ze~MRE8gh1ruPm|jXR{2CuDSMMnovElZxHwUA?1_YmM)Z)faf{A<80AC4E5gtBX3u-M2YP+K(p;Oo!pb+s@|5j3MD95+_9n0; zd}|5ed+xzG+LiWByMb-GA7FF?G>_?R1wzwFFGx>qn-3&NAC!+= z26#Vpi|xwQVHrYK8c3szwxl+<{VX%i*fV|5xL@pY>s0N$0x0NXTpVSb=%feb0hR;d1!3;Q}(nNb-hMDVf>L&1h`xB`n3pb}iQ? z5Uq+8(hssvyl=90SP6;P+>Ymi-2df12KrP(E}cTpSQNet{8;kJRf+zs^CxoyY4Zme zG7}3RYnfPljtu>H{qo@FPI5OWkoAnINL7s9zXZZczUhua?0=F(h(VKXgS5F$V#A1F zZyHPydj4Y)_Sc*E`JtER8<%GnD@`C`N>qdky`+9uwT6_z1IFvSz5FS=`Tc+CjQz*(2qjAq_rp zU|l{zcgj#(dz$P2eh*P3WIWM$(GNc)1k_**$3RSGEz1j01pD{YKwJkG z!zokVsi63+&&HrMCE=HU{U?=Q1xEUl@}zEl1B(t!EWTM_A9KQW&hqd3q|^D?I=?O? zK=e4vG*A?0C0~;LzeNOB)KxU-hg7v1?BfrFavLDJZsjO#U0ajE!*~ST5J0YeQaSXw znagUlz<$Pg%vq=C==z5a)SQamNVz%lUT00)=za68`);CR#kO6ZY1f~d$PuZ^|2;Rh z6k2w%$_}UoBp?5z_U~KKzC}-`NoU4}qJ1j<`Vxq)Kzb9o&5LbzeE#*|hSXFqZC{3~ zz%??8g)a7o=hyR#3NwV`{!-w(YN+$Hv`oQ3z9FRlmL^0^RaFennVPK?erf6ZioB9d zS4Oo7DOSC8f9`?u4o2kx&DcyEi_>YoOBl}5 zBgpD5EU!=p6VGMBaPlo`_G$;&iVSm2)H~nOOvaQna8U^g#6Hd{QAYl%Ep$X zmb&^SJqEh9y_=0xMT(l#0^QsKdjM*QQ;*V z9gALPE6lQ$GB!3gd~)Kh4TH(d%*<>lpD0|53Ob zTJO@*l!u3haWK9sV*I)Ku`ZQ|CND4V(B9tOD)F*cjuD1@E1{pXUC-d4>bs;Q`IeTJ zM?v{+*E;5s7B}-fFCX7$k4`y_4?LUQHtZW$fPSF zRaRD7dV1E_a=-MZFUXOBJtusV*!nCjIaz_5o7)UOzlr}(*gI9J%&sP9W)w@FdzaZL z#uk6GM`Ai092|Ve!xP)y-v0c3e9 z{%jnEP4jbYEf>Pm;Bz6Tr>B=&R>tk@5+Uw+CSep-<0$?>3$iVDJ9)qN+=>Rw3Ork3K-O(8B)5tQxrw%n4wFh zQ(72czCG$Whx7dd?|WVEqZb1V^Q*nrUi)6_zSnxCt|~_kqk|C;5Rfa#OKTDk5V3-< z3#26A9Y5-6VepO6NmK3~K~eYRMexguC;AHJDk=o*;Ac_-DB%SHBK$|de*!`}0_d-w z2?!Jk>HqmzlaTe#XCMRwK~@CBe?D^`e8>L_2VdamKi`Q`3I9D~D&+4^6S1Ze{qyq$ z{9)@xFZ;naG6#7BCjx?tO!zNC1yPN_mvnRp7G;k^d*3h{_~>iOAx>f zIq?SRO<7+8CZ1~l;D0`Y|1=?N>hBZ%I;DKD#8&G}MsP5nRe!7V68ekt%5YzW{nTWI z^-y<()xf6;yNS^Xn~{D#`^j-W>%mSwTgAJ&B)s+4|G7M0_`7c(-*lzPyy<)&aJtJM zdb+d0eXDc4)~Rb{q`a%Zpu97fmbY_zaX`h$=sN>0PlJRNCRXRNY+mQOil}p5G>y?W zeMNGR=WzeTbLMfUo?7*&wCsB?_HyXMe#!P$k9dOBKvq1PR#se!&%qZGo=0O$?gr^( z>Qpbc7y3SMmUrGEzNv65`kDSr6l`Tj9>-FCYFAuY$Z z)>miM(T3KwvoXAl9y?`S)vs%A@yq-(&=QP0kXsKIr<$8>N%$SVNpE!tObweqQut^U z(fW$6-fi7ZSN{?8!Q%*R!Oehw=R)E!wR%YV3o^>i428%GZe*|WBo`;@s~5+rxm9W| z?7^=|{X1)wH&cr!J#xv0=Tne&O$ST)t%kU3?#uZt0s4_eqmERFj}S<>9^Yk!G%OZ?b%==+NHw?z$<3y2tK{ z70KDlmTmT-^B06ppFD=@M(U&27`Z3oHI8#*_RU=8LuJ(=E{Gk;=Opphqyo<+MtP43 z{yRDZ5Dazi%mRCT{uu^~*O?@lPV?R7tCd6M&P9Ujob&ho9sN`YBu}L;WASlYD7^V_ zQwbUxhdrlLEZIkDlwjW^)S|G4eRtAeE^wrhG3aFJjWmts=D}G}rB5p{y7{|s2d@(& zcrK?*VjxRpRo-NEw37FGTUhbvNS^NVw9p^KEIRe}moJ?dL6Y<8v?foE5Se+ztjh)E zp$1l&(RtApp>Iw;4LNISSyTHVMxL5NorL15jmO@aQAop6<&)Qxf9czP$d1($*5Q-T zpsGCF?9dYqyG-%#r3i^HfYTr|tu@&%rsiCsgR_tw4u&(GI7c39>ZQ~OZ%y_Ksg@PD z4idFDikvJ@{QUxFs=fq|B$MU{;pLtn2d%3w81eq2+$>txa^*DDDSAzb%1p){h+@vIr6kF~BL8klG9Q761f+*#3 zij!Bl`9MO7g2R=SpZZEF-b%An68JwR2d$pCQ3*yF$ZJ|E5vqjOFm>QXWCw~&14gd? zJDPq`u$pQgdLodU`+8fJ^k@gt!6htX580zPm^A_oV5Tsm_c2AHgnAYD)W6;=hNQp7}_p!FEUD?YaUL zZO6$uNxkf6SSoAgTh@7}Bg#p2_k?Z00Y&-7!=YdkMLo|5gEPeYct%qJGMm_(}b_RL$`;QU%dfvk=27&*1AgUYZ zsGZQK)lqy__i&M>W+NB>H4-q*7uCh7K3;9n=9XAQ=W037i{1EVOT%A-nuhd^=n|Aw zxm(}S?d*Ss>Kf!xsazvLrZ;x?T74Pm$+MozDUi@#(cqS_8Y(h+($n7e-^oAcS2CP# z?0W(JNb&QA{)_+IZWQOai=N6`assFxaKiG)xg|)f|Cn!my*jT@7k0`eka^$y!tKoa;%k z$L}a6-Ur(}^!5#WtM*s)CGI{4i`l&HZXI}_P~Ulk{vdM!QpwtD*81|i z%kk+k@4bm52J59G3h~2-XN4CSd3_FlrWNG_-!zw(Uvx(*=qwka`KEnCpDca6ee)52 zJVO{rk>j>cP;a$gwlHSJIwqgH~3vw<+X6z za^+%H0_p>W__%WX9j5Y$k2EGW2_9Sk<;6b<*$6p6=jgutdjW00a``hQ5c1+ZlUpd<;&P&8s^;SeAMo*(uxHNXus#?*8*@P?sV{VU$58-5aNF zA(pLpfxlOV#9IjFtg4|HRYvbjht-erQZlqN`D@e@BZn5K!Jb*Hk!$IPC3CLMjkj`P zSLeVO^L+@S=&{K(`B^>XGsfzB?b_dtB!HBPGO?z69XN{0q-hr~_-X9nR?q1o@3+jE z;q@1qLQb!-8=clE{2pi`^WEtF0O9MopS_=rZozFw(flJ%Q%)b%wvf=r>F60@y5qm? z(u#UGoB7`HK5*u>8yBq3HE^GeRss9Sss&WV#Cs=Nbnp~|*Tb!$W+ZZq!h8p#C$KtJ z-LpcoE9>`HGk2HUly_`pJPFaWuK_f7I)q)8%mAi~+h3XVI^1Y8-<PZ{f302JURN6n$5%vN1^F0%v z{g20zgQPa?Zb4_S*mT7lJ5XG`J6vHq#$RsBWAAggA(y`I&;($dr-^wN>IwFS)zw65 zUHz8H$8fTnzsE~GRD!2PL7U+MO+`+x;lc)~=Idr_$uUJJ8%Ee}Loe=76d^3(*c`RS zZ&SZoRrR^ANgZ}O8l9kf=+uHL-o zjwc@Z^Db}Cm9||iPtiFy>C_`?YjK#PK#ALoZ8n1{8l2h`J6yF#mUCBoCAkdp>`ft? zv2pwE0QV=zU3I^?wLfZI4BuqC8A8kZsr$wj_)W%cvcQ1Xvq4K$h>0>$#_n(oMow7h zj) z1NPe(UkJTj&7&Fqg5N8dF;=4FA*qJ8=jOn1aUva>{#eN{3sV2SEp8fe0`?F#{A4YNrYFwqZynC>aZVsNEyM}!fX$w=< z##TU_tHw;KaSw`h5<>?GoLq*npFkIg}s$XxiJ|(Fh9>XEnM<{mqL#lkr2RlLt zHT*&4{!sV#mVNH}InJ;bHl{Sxbcr=bI+lZC0U3&6mvlc1SwEql5QkpM%zEgcx#ZYi zVbV32nnN3oynb&&8$QMc&y|(f%E)=Q6oGQg-Pdi(pdCBvD&vF1b-Y37nD-Vlwi-YRFjs(=yu{adOV7krWJ2ecf-?0z^uo*2x1NvF4N^1 zq$homRv9@Q}gl&uvW$ZoQ)S;@6u)6Dx6NitY}7xa_%7%K!5zb|DTWs70xRxURy4 zsmsZ|U!*apdUr!Q-G}8CTS?tagi@I-Vn7&rhwhIYZSd@`~@j|F{?+}au(Va6m4h-;=yPY*wGFWh<3&H8M8+@e}6g{VbA?Cru1 z&XbCvQ6=4w^2}AVS_>4KX&k=1u(nH){DF%CyN{j1P2T-E{k$sWg*ZbVxyC7%9m)}# z@~$#h9o)-@tBQjpJl=7)ANkjAwsVwsXB`XbnRvT2`brH|?T%U}1YPy`e3oor)h_?#f(uZtAO)V}h-bZa@O%x=eEjwPv#lHUGC8-*cioN2U%40RYpPG|V zl+Ci+STr#!B3Z|d9WJ3EsUI+5ctgc%0zPCI3Ezx{?JR9i_JR%2?#?mo(h%6)J>ViO zck_IU#rsCe66AK@#jw`W6$Odj6FmC{$r8DeuoeaPwb6R>glABZ zc$(1)R+`+v-kt1ep~L0Fb?nZvUhlakm?q!U>*lOS=o}3u@{kpAq|iDIr~boakEPrK zsT@RYct)!FR(J4ap?F+756bo% zHD@pGV7d5-HIyjgBG>&W@6KsL^kwB6jQb#@-Kc&F}oNusQ);mKw}8aA|s`iZ)N159#C@hC(4nyKy2t&gKjw2 zOU2Ey_3&X%I;;F&EG-HfS+%=4BLqyt)$hd9y>5jYE+3|gTh-q(s+$>Ym~mO^T~})v z`F6CNcQh$J*3dmmkqi^9ZpMZA2L;w$g%;AW4^Pp^A9{=K1&-d4HG-~JpQzU%e7w) zLv;l}+bUd$wcU`q`&`5CB__`(*iaIqL4Q5qr>1uJg-*g21@(-Y9yom}XC+$G-~xhK z$BwtS1`@?@J8CxD*xUWnKtyNZdBWq?E4LPwV&)HM8LJ5v zkK&QFqg;^kIu0Y>G(OVh(>i(_CTfm{E+Vz*$J5RWy>E?usMHs8qnQ)_z@l^268e7C z=;@nl4NhXjis!-H%0Rm?x(~QITE!Ji246$Oa-=H7B{YyEI}dVXVS~3Vqw)e3>CA^x z{43>;T{PC86DpSM8=-w1xrQin0ooWEfA&3O4umND6nB643aylUD4WyMFy;&ENOGH` z)vFTxm6DOTaglV;HOh(2!*@;%F{q=;CC{`q*Q0lTQqJY2+t9jjcYV(xr*Wz|5tRmT zxBLdzL8xH;MT#hY${*>>x}%Y!+a-HjV#DoDPhILIF_4C2S>~kMt%;-3b&9Sa_=prm zO__mI(p;35Di3$#cErfT=j4a^T_8-fxp>Pkt}>Y7_rz-}NmQ~NM-@A)OvUU))3Bmb z7&Gr;QD@bwz_!Ji)FX!Nv@{Mgjf6ystkvAGu6`>OWbR#rKGjvR#-OVYmjo!a0R8oi znCfkbK2p5^WrqdT?Qd_m%|l*h*2?E$__~vLSLP4*w?fvMJ*h8P=CY0M44I_Bmttj~ zT7OnOel+yyPeS7L`Bak3ONBUFdg?^hmuE;KM&B^bB3(Z=<{N1S?>t`SV3sG7|CJMXc;RB;`-OIoq8>BpE0J(^yO8K@NGtJw( z^Wr$6d}YKZU#*S21E%+8T#m#4$^r;N zW)sI^%btP=jRzGl!32*S1>rcJd6-D^5T_1@KIaHQRrV`#V8-~1pdg5NFqjMQOKZcF zUv{zwd%i-fEc~=81m&VG_8YQJljYFrFrhS)z%KL&2N5v#yZ4+jmcyUw*PcV6-s%Jr z>j^E-PgyAB>Pneh$7JOoUy0@0`voRp4ll7s<9w3jP_o$w_ zQW?%*k>sK~C|2BbkO$W!oFso!Kt}FDju^~**un5F=hGs{ZgdFCjx(GU-R>>;tCsKj zNY$8~^;+%x)pR;)WWR8iaPfMO_-e_Av&Uh7qXEs-3xtX@ElP&xcV9HON?{J|K9aDU zeZlxqA?kPwX`7-bF8ggyTjK3CQ;havpBzKij9%AUs>=(}+_{gdA@M#(2eV@plLu9{ zV{|Idav<1fJ!7k(Y6Pa8v$XXBv!H40m;q9vY81h-$Fn(Kw_Idxtug+i8Fm4L4ua{a z_q=WSAj7A*8_v}t-*WJkMMHwYFSpCzK!m#}(lVH}L4u+}Qp5cD)THzJ|5k%t>CAH8IMLmh1iC?;cvRwG${AF4>M zI{sIZ(~sl&9=pb||MJcrqL5cA zuW1}{2xSuxbR5cJyxD0e`MO*3^+kCdE2T;pVl8t{Av?v#qdRns{b&A2G{TVYlwz&a zce|Ref#!hzsxr{lW(aKZ#-cz9tW|85JBxNiRbVZf++P_0lCRyW3Cu5>ec} z%`pwTYV7BW;NV+)R5|yCEV;b5r#<2Lb|QdrJIq_#3{ovGK@c;Nf+~bNlZT7j|F#v& z%aB;Vgm1 zh`>zM`6ceRvCOL+Ojg@@Af&iao)2qR6Q!v!86}yvsm0 zq4a@sZ+GqrTh*t}DLsbkI!ap`Z4C5qZY?LFoxZjIbV(F4R(qL8$WGk430? zY%e@2mYgDl=H2}5D%;QZ7OV@5J4h)6sXKgATe@wc*dOTyi8_4xh)|Jf1%%CqdT7C_ zOF~c0bY->rPBAUNCSuSIA5`ip`KD}C?+W+Oxx>b402bhCeneY{a10{jU~k>tL}QFE za5nD>M|EHUk0Fb@G~U=OFA=)L=v^Dy5q~K{6V3XtEonYbIOX)W@y5I;qsq`RbybX* z{Sr|Wg2?F~Mv75iB19=e6S`8fnebw(#xt|w=TvNh0Iu|Ky+hQFYfa5p^Nm}y^&eKu zua&IkUaqIGi9CstZGYxt%0#N`mzzTB^}bpwncOe@9+RAIwichKy7{F6I`iA-5)$`= z$RrOEgkbTHM4WayM8MRaYpNK9hWwp+CMF)&+BQXKLV`Z;YdA$QIYxX=j5lY3h!H}l z7)b2@PV)N+5>sU?Q1jPm<<4ghW@roTDxnAn75mpn_y10CqDiCZb8@nzx%D_A=>%_8 zWI;*Q^Bg?kEh$LNX5GU3I$t^eN#Y`8-a*;xu43|7t&XX1YRGafn(&{oBO&{z{OHax zO60y*xeStY`_s=U@*K?0zWQ^smA9t-!|pe$7E$=I63TA8j(8+=G6mgwOTzbph!D%K z>G(Sp$_jGTth6yFbNdo25MtKSNT+)z^W-ltFiO1o-wFRWhQG1O|I7xzvi@&dDSmSr z9QvUIkW4Z(1{1&DgzfeM7&!|BT3K%ElUWAk=9yQ58~>)<{G!M*@<^Tliee5Z26*1k zaEGq)r#x+EGY_EFy^RO!HuE#mtC2T7CWX+2kckzr49Gu-wHxLleF+1@T zwNQN4`XZ-(xaW;QUFC;;X)@*q+l$m9S5N(R&Z+u-C<=9|@I;6AWOIAXxJCopN=zqn zf{iH%;mOPQCOs3BxEs-40Lpztm5M^%LMTMH!-dvrzIR5kX|3Sl6YA_0;W$7jKdG8R z;*PpNdfB#HbZ@-#&ZP9|Ljb;co}Z$ql1%Z5RlrcIKP&^mkHHo`cBo_dzVJIzk|sqv zQx=0~9Wew@*!=7J0E5_K6cx4C_5nEF%C2_y;#U3C(JgCwu^AfmM{@u@Rnq4Trs@?N z@4MyoarqqW=sC4OX@>J3JpIDdWnhu?q-y-B(mk78ozzx zP7Jy6wzg}E_bv}e<5hC0>(V#Ggo<1B-*f^fVoWK*1Vt28+@Qi62$iLTIT|@qnf3-8dlC%QrsJXNTmZ4PiGs zxGfNCX0$+(;cC!40J}^fIPLp7Ipz5ux*|uSi-Z+%FMV1%VD9`4Su=R~VqbbtpY?!> z0DR1*JY==g3($&VK17djRy41_xGqxv5TG9S+UmM-P5WFJd$8PE)HiCQI%gi-9ugO8 z>i3MhQ6Z9f4lt!*dl`x3zx|M}gvzdDU*r*k&(9X)C6Id#r%wD?Y@+rgOmtVjUj9m~ z$+NFe+|uVg%vwqQ+&x&4qRMD9I>TJv1MZ9xNBVp*2sbJ*Fkw42;0UW!=e-##?6$Tt z8FQpH&pltS?n>e#BJnnP)@zSFdPs2sY2;M7!V%x5SR3t;sA%*iRBK`V!R_MN$rknHm@M5s0QNr-%pg8#seP?7 zfB{fSBkuD_tMV#MQZd`RASk{Kc(VFcnsokY+tCUhz{{rD68TK~okGoZrQ2gV<9WMm zt0&CS=7w$YJdXnn%;TSQF69^7og&o-gqPFYB2Y{5gIrmo`|-NgXfNPWv zLG?;mhxsZJN4)KPJnC6sx`e)L9A_6Hy?F*fdJQ)>e>}4cRel2kick; z#pH3ojG2R!rfgTCcCW@j6P|Co?6L56*BgD@cG)So(XO&%sB*${y|HTXFWD)Sh+EN- zPtM37$p7ow*WfAS;RYbyGDyFA1wEG%Fwsd?$62jZzGSlQg}O# z?6i9wxP#EYQ;xyg47q-= zyJ{VJck^>ug0JrNusRbw3vJ8(6R>6p+|7smc=BO)GG|UorQ%SbIr~7Bqn7!lM{x_? zcB7`sW{ALKjtsw*?u$~bx_SQ(+HCSD?DXG+*?=9hgxFW1K82ZKD9G(Bus<-E>uqN06aj5 zG45cfxl@M5$jo)ZWoWu+w;V!89+VFW63_jSGjt>DuIXvQV{-)t&8K0sJOw?^ZdP~p-5O(iwlVCM3(`5|eFRJxc%^u9 zX*?ma1s1V|ZI(-W0^}WF`{33|DA&cS`;k>{i?Jp?j`pi+(2HfSmsUhQcWoZz@2w+o zBG%&bKfpGH!c)lnN@wWn?h{r(7@^e~3Q3vK7SNWK3`l+bRj2|mRky5*l2Zyu_SyPu z&!y1y(9F?0)yk61;%v`cmk$X`kqJ-NnvZ2`zAqme1FDyw)wylR&7dV7hC4W0`@a5x z$wH6ftVWm4j(5x(CHBPS%=KW1Ifkx2roVomVy7v7f7l29RGWq+0%Sq4L~S1geofZfHY zAF1o{dc{8yFo#_$we!hUTE)kaI$u=?!;^34Gp4?mEL8|Fp!U(et^XuiCwNY!PfMMe z1k8dXmQN1^?9l~umvJwU?1y=jrN@tTxx!(KOSn{AMKrfzwXwM&g1~Lh}z*<8EIYFgEgj#`mH;f|-SrSrqV_ zyRS-iHBuNcajc)U-zrt!f%R(Fr?ITV7!MWPF_Z=q7NX||J!ma&a)}yX)cawY-FLT? zvC=wlV-1CUb$Tz`be4V8!;N^0NWb;A#kbvM#;B{)w*BkEY%9~&+f)vuMoA1xO*Hvvt8x~|q2&1O#+-zlK9a?juu6@ zqt}xgwv&c>7h!8?z8wG}D%W?6v{i<{Zf0Mev!2rQL6MGR3==aBFvhO%{VE%)A?gD) zrsB+~DS=rn!YH5hZ8udU$Bu(sbL4?%#?*PJV_v}El+J`WZmRQMapdYRbVaBC4*S@6 z9_ilYiMZ|_uqIp_ZZJ8l@BiB6RL3|L)NZ3_D29sMH#F^AM6Zz1{`;KaI5E3PkO=<42r#CBAR}Fdm}tS zZjrTGqK5_I0&|#Zd^o(3|8Xqes z3YK=~Ta~v7&L=u86%IkXi^tu%@(oBQWKTEtMXtC4N9hdiJAR&begkIYV%!VROR<6e z++6@>j2m~P0?(8#H~Lmh3Rl0&S1BJm{7QjzcymX*5n?>|)7V#g9`ng;aDOTaIPB5} zNQP)Ejrsva(#|CA%C7%@fy;sV+R+|@?9NdB)D8QLe2@7bi1WN-&)``*gYys6zua9q z_Go$9sO8b(#2hv$73#5EDS;#!cSBnrP}ha*N!%z8S#!J{?a3ur(mM?~)|XzlE{kk1ZE{^59hp-A+L01(|16ecRJoKl zJD)}}N+#FavuFlYozqpVmwIMGyYlqXd|I}9QK8pD;I-2`tvwu3qi|s#{RArvaV?5M zASh5IDjARua%y!SlF|8GRU%mjF2s=u>gpX6lt>)EaMJ-%PKwm<#?e^JfwA{xXZ!mV zmXCt`&x(XDM>0N!e{fV#3)AUH&lsi091}+t;XppK{0+RJ0#Tn9all}i_VJ-+^Qq$7 zW;ED@GH5l&Uis#naC-hHW#*O)W_d@jSZmSE*EplG6Q@PYOFP)TaTt5aN|~0Kb*UL> zehWrO90+d@jLy(^dxETS$IQg(C7VJ-cW`t`S^tpc(s}~6J`=UMHc>xRACcrb`sKI_ zw%e0eeeLuwo4>%JUQYPB6=S8T5{BlJ^LvF2GqN4QxJl#ua-E7$lF&#L*i8yDY~PPA zTPMjvwB`|+C5-QiUjCG`9g9Zs+oH;eict1BWXcY95%HPglNNPUr_^>m& zU~_Y|wKdC2a%ND4wF)a~RU8(z!go1x-9sV1B~*G2X-?dL9D8I?($RjjCUdpsC3SQY z4l#ErX506DEsZCRBT4S4@TDTcCo6+dkBCBBSm)rr;*s|N0j1&ry*GeYL(6jM&F%*Y znXygTzmjTxpICYuHlEb}yqr&R?*`rCVhyvU_L#*^pTiYbxK*-j*d|Kc>}*l!Cxm_B z3?!km{T+REifpHbpFyQSz|A3A9jK%p0c=by-EmfNi6S+7eMw?fxHC^JI0C_v76F@p zN$^2pVQU_+e2rpl%2L2zjA^u|?G~b+TAbUxiAIK&jGjkc>%`pI-`i-`LtfaKb(Hi) z8eLmAVw}hLT`Y4~7^}(NqBF=X%>d3cx~A!q1MYigI&M9xd)a?eQGfZ8qp?!al)w&! zCIin-p$3j`hqur7e4Y$us?ZCm#6H{N_-nTFSk4^@7J1Cul-yVr7Zp_*$}u#?ou;o? zc$I-r=X)R7{3CbzW2P7GYi;)O=p^->iKbc~k0b7tr5u*(+Fh4r^oS1p8te{`pUHUo zz=gDWGxUnSzH&JOcNzGsZ70nNJww;6Q+e-zV#>NF39cC`VxQ?FOw|17ZQJftdd?_~ z8>ic$=~~#BKvBxtVnhDU>*a>bTTsJ*GE0Fy@tOQt`wnqjXL9qKKHHOYouwKdV$lKt6} zjiRLH$GnhIVcveBIrNs9?N%J;RBucF z;V6tZ?07w-xi)r-ea|9sNK@Gu6Qx3A^(?(<_*$J2j;kvn!#wHlRihpW)P0=2X0A)t z-wsTrCr~+JbRHMxkEI4F>vA9|5aOXki7Snm!_?jx*nE5XGUwH*Vu9_#q`h|``mf?a&lw9;2BE`8-=L}Qn`zJszY~MH_hPkpx)me%V&}<*+UCW?zeZ(UO`oSs>qJ!iwcBeu#{kAay+ zcc(LNoWUCDkY@;(ylFH#sK3DfHFIlFHuJDa_u#&b`I|$zp-gk|{w#AZ`PF-r33XJv zZXJc+S$<{Co7N65HCa$I z=1!%fE_t9xAPk9Ok@1#@qDssuF;dnPr|3#g%V3n6j}!Cum^+8M;*C4ntA?sT?N5u-fOeu`}tScL5gLTCzHLu zAa(#3hOA(No`>Cudd>C|aYB!0rbfav^k2kUezidnJ!PtYj|k-ESVSd*%jRD|0|<=|s+` z@Y9)1^Zpw18|QMqJPO>&7*E(AclFi7y~4-YvB|1#prS!c)Y&g^?Jrm$T`l{tvLdd< zYE$MYsF+zEvFQxdqGOMObRIfGMalj0ajZ%TsCtR6Cw$63fV|WU>ydL*0~@!yd39Dz z?aXsEqUaQmd%`>`e>Ao`gm_+P?VvV{8J8SNCAe7-)&?Y72qMl|BdByExYVM}gL%tga~gds z{XU@dE|}!ui}hbiV&+Hh0s(k7xxRDC(N>+McPZR(NA=c6sLN zmv5*o7y^S(XRv+N8p8r@`KagH>~7d#6ctszZYL6{R9o=0*;6@Rauvv3Vm z#5F>M+tQ;@7WNgq48gS`e>&3LO-Y+k-R}XEm3l>+vG>dmn5wpeHt%hC4~#^>4(BCK z%?DH|?o|q5$!p8yutAvWcB@vGkF(E^a~tyRQFcN0x3yPkgKUC*mDV{bT{~%WR~3b` znkfs77LL-tumcI0!&?+-2;(fQxXxHKfa3Jz_%Upl z(D+(5%yQ7~g*W^5??1kNSY^C;;F;Qd{+3|h6bw_ub}mfuKy1e>5tBqZp`fGat11eZ{(|=E5JC6MIUIDkLMdiho;ycQ{#lRht;Pu za1EGv4E4d`*oSDU?tF@)vdIJ3Lz7Nfg76h&1!yD2Iy9y$fl5c(7?sRwolNU3_axWf5UyD} zB}&an9nLPH+cLIe&jv3J`FczfDR*RSMQX$ZXTwNAnJ~AOj+g8Wc$%F&ByQLrO|tvC z5j`4y9`i$uXGrt?*R8cQMkVCX4)$%c_^h$oBxM=lFF)`RieCDVqDh_llRI&b^rnV{ zWE#RZ4Q8;!xRQJ(fAGsrrQ)~B@a%R{X((&7kg(iu1~c_5)SpnXi+=<04Dr-gVgmwF zHMJu?*wt0awdqO3Qs%J`0LnC&A;ubplz({9PdC*glV<1#$Qg@C1qsQ6Q(H*ThN?$d ze6Mm)yOz)N;MjqePIlVdAwOmha%ztD|K>7D(f)ux*7OfYjy>iwzdiMZl#ZHj19Agy>3K`3A=Y3pY0dXe{gW)v`=AL@--plJ8|e z;A^H#If>`=mT|UBE(n%CY3gM_c2@cX)i%kpr5-bF^H@by7k!H#^d+-vihIqFNc6kn zCtQM1_zTOAtp8LU_Ona)MhuGdxSU{5!3QBx#FJEIf7f2%X`=h!_WC{MYwb=jsux(n z!S538#ry&ZzhC6@!86}qzdQ5~{R|F?aVw`FluzT_;=@j6=nwdlCJwME$Q9KVoq3&HoMJ z|8s-L&=J}CcP{|21pa50zn9|w>7`xAhOZGE$W~KR5B&@~;UP9)8SE1KE9E_%(=E?e zmjQbjq9@!lLKRm4G_3sgY9NK5Txf98N!j9$pye@r5r5~&YwwS=sa_yCGUs>dYDqER zJp+6qLG?bz)_|ILJkaW6fc{eC?O4gV6RzPtjivE2%|ekqU}fjt23>v1&;_asY$lO7 z?{dH$S;XipcXe<#+W^Yf(DNt2gTtWnLfvF8o<7ENn+bTf`?AA3!kxkLU*7BYiyt3p z2D5}akoa{1iLr9Id6#mrabqxoQ#SN6t&*?iXPVqUq$0PO)^B|9TN#sT4%HlJRZ=O4 z0;*@JaVPP{m>$=hT_e5tocPb#afPBy@H_doVK|8hM^frS@pYfB{rFA4d~k#CgkcF1YxcEvz}H7TmJ4~W zpmieCwo`D2ZG(X`?~{2gLh&M6TZjH!Efzeh9!F#RtOc*#HFWBN170`lP71V>fP+?JE27J)b5jppyboMGmghKf6T7F%QZ*O>DKHc zkO8k8DjB?QJ+cE!rP7{Ngk{}oK77h+Gkm+eojreedLFQF_2fPkQ_aUa#fJ6eXHSfA z9x2+(H|?*wDDCvOP1kQ#g1#s@vN8%sLTHV|dvw3<5f1aB%R6aY&6=hV^bf?t?Kj#~BlERl}6<9FVbEUq}zDQYk#% z&K3{2l2z~os8~?(qMlWvpBRpR1bu#7*tk1dt9B+rBR@EQweKC|ABYLu z$+wH2ZDNx ziCQ01f?A4?@DapdLHreefuRr$+volitCVNd4I|lBi;r24A6^^c;i&C)F&yH-n8Lgn&f$Yt>|AdtvJ8xE2z-^*r#mCo%O#9B;;&Ye3~;Lqv%4;TvSFujVZ4p znD?gTAB|Q$>4=Fp^5^|*p*!|2e|FH?=K5Zg6nt) zoHn&)&S;7IoBtcbLmQ9TR*-Elgd-S>UXK$?=Xc2qS7En<8nNnvwx1Q`W*E_At);iQGj=Sg(IHx$|V_D`4x zf!`sBpY}tBA^&`j}Lu7taK=AbD|*)0fYxMjA$k9vN|!y5{%6t z#PhjP^+&w>>%4w(#H8w`{&RdM0y4&M6A+O#pMUV5t0;rp;gNKrJF$zO`^*iZ(5aVP z|G5e?xShCEp+DUTexG-ShEd{+ee1=OS2-66Zb!K{k>un9Zk|4uIwN3u`s7t!DuCNz zwp4xox2hU{U;7}g-J~ytp74UCtv7*0nUXL2k7oM!1O9Iq|BU7Tb;BsJb(l~h11H9J zE!hTbWFq1Li;G8{_>LH&Ivp`wD%jmsmDpQG%3zmN0myqLpKgCT?t~HD#GA&a`VD~X z;~c+#iU);Gy`70dyqld{^F0C{DnO@*?^fVik5_-m3%!x9nJ7JBmf8omUX?QXLZ zZd35bpMp!_*Q14_Ay+W(ZK_7h@!Gut^dq6OCv21)sOU96KKKFpT1XF~a#Fsua}?WP zWAO5?EUj!+yo!@wU~8_67sOQpK$Tf_qz$B91Ijq6SR*(gJwSjH<{ezs&Tsxrbpa2M zei5kpfeZ$Y(9XiW#8FMux$s&SH&z)u4uLC!>dC|WJL_W=R*$#u15|25u3%)8gx@?n zf#kFcs5GNSRPD7B(&kL+*XpbXpZ+P&`aLq2z{un|45j%;st8P=3&1C2QZ)R6QBN0l zfRj%^;;>T&ZR&C!Ns)|9Zvy@l72+3Yh`O)4I=j~Zx}YPZVfJj{{YZf3n=__cuY5St z_Dz4h7oJ`x<*v$C z79(y8s?>DIRL6xt^~L35AZO&xIDi1c10Q)tJ{dIWh`33X!>qoy1R_Rx&8p9T_hxHl zlwjG1FEx82K_CIJP}V|bm0m;wF#A?=QHXfl9e9kjb}-68DZ(_+y~qSN+;!2nN2# zxMTvEaK}M`xn5@QV6;p4b+H9Q2CpBIgXdb?2Y`xNvsL5Ugtiom38=@K1Nvez+lZ>x z3rfQ-&De?cZ(aKLda~YT(A+Y#9IdjozF&#Z(Y<9~^Oc16>5P0j!DGP$OsG;F|o2in|J7wu@&4M%r?{ec?65)lZU1Hdk$3+dB zig+nxZHMA&4_!LC7dISJkO(*}~{e(aRE07~))8#BVYJEkyD*ei?)cobp&0t5C zBrO$3JmOL-WLax};E+*VvqkONWZ$&^$q<;UD~2i7&{PDAtp?p0CRRM4Ecm`XTvU2> zwXrxaE05x7HObeAyFd>uGS>6`^Nq0JL_V~A`ugzKAci#)vnlTT6`x5W^5QiLhdqqo|HWd!{+6fmfvf?G= z_=>nHwGTkbYl+r-(zr9oi?6_3*wvWd;6?&;Wnj@Xnf_A#ovKE+o;#;0*)GGwp|j>n6qUc|Nq!~%c!ijFK$%nRFnoO zk?vL+R0Qdg?rxBl2I*2ty1To(K}xziq`T`~k9z*cbI1LD$GGGD<~ifp?7i1od#<_W zFJ_*dpm@5fjCi;)-H8klX$Ekg=ysh4MTt3m*n7F549#?qbn63Ec+BDuEFCNnrS9vA zq_=sa3+b_&p2+mYaJ369BGQ;YbI9Mhn3StGZt1!WyxPzWF3ocPaQ?m$L{1w+yHLQ# zt-Ay997S#LAI(#C2)_+t3Fk&$O`7p4m76lK%DN@eaUtTiXS68MC-3TEjSUYo#)qK7 z-E^QXWSB;-34{fX?nZ310YalW z#7Tw=rofg3;ik$NG0O>r(e#aSEmADif zmg0bIkpUFOsoF;YokpEi$F=7-T@XTz059Paa7-u?Z={HGt!nnB`n*kf}?`%^_8nTF9 z$-5s?u!$OHV?c%sHL_^YQJzwv-!BX)0tp3!7-P6kMBERfI$KD94Kw#tN-CA)GuvpX zPe}r>BB5H(sWPUMb4r1#Jad}gPId#uiI^@zr#^^mgFR`BWre6f9Ci|7+kzEQ5zHAt z^V0zcNBX_7)eX;a*^DEkQ3Ls?gr(3qU=ezY22w70S?z<7W~lk|NbAFVzKoxJs`0Fb zVsU8%-WxZ*{nzT|c-l%mYBIy+)jouq`Wk(^7Q+TYd}n8L1X;>1V>j^3tV&V~iTB!T11_(#+SkY6sfy1( zcm#Pq`v<{krvO`6X2o{I-lH^6gV!00HC~P;+0Nbx3f@k^LK&F?AQ54L;Mq!Ux2I+) z_XSn+NHZPuvJa9U2u|d>^TP^%1A~((|H-pR%c??+WYfa!I!<~wFqVhv!2t!PHs%SX z!_j%c1vm+_%$VR$sEYGpIvZshWdNYK%dJ{Wmabnd&9ht-Nhk!&Vx3rj=%{aMn21OG zmF`dOJCb5G5;XzI91%7xJZ(jfl$|yoJo?i~^Ck#IpFLV$gJ$eYHew=Qj!*}z57_?< zd1gTIbLXg=AXR31Oc3BTKhyip^zBmWDkIR zaT=-tDFC8f`Gi!cFUz%CF555-Ptb;W!RE0LCQ6UXoloa(0d^s~nnX|RFjcxPNV#UH zKT;AkB`;H+J|`)_n5os+_~f&Oi%f)#_+9!mDcPF1i{Oojc#)jc_=p>3J57x0gH&=| z`sm?9y|L7{$SQd&`}a@n%!8lWLrhiFw@B01ZIEc?xzf1vlvFH>eTsE3%fwvX-0k1r z4eot}kRS9dTW-hp-wFg=P5i7w7ek7T;Db#?8jY9+8Cl%gW`huiwMc~ky(X3JbI~dy zzCWN`GBXqki)vVzi7Ku!8%zL(LD7mexQ4ytEF}Z`AiKaU}RQ-mbLVeq1=)s0(qbNTmCxFkD#INfW|a$>Yp#$`<@>_Uc7iJ4tx?G? zDBymS;Q^l-CghEbk6Q@TT>9bS>?Nd*RF-+VGDO7U2ibV}sO*!?kjy5<*M0#RwpMx~ zIa%>mn$aN8l3EBUI@dg(mf_;6OrBcbeba7^jIxT9WOz& zf+jaddL-?ACU7KcAE!4|G&r^W@jXe-D>dEo3e#D<3e+(uzAi&HBMkpF`SExC9W0@(OY!j4^|^J!7@iX8%js55Yo?*v$7V z^QpHLksKHsXd;n0$Z4P%<#?n1F<}IAmW??w_BCv{@zYTS{Mk1sejqvmU61OuuxKAUqv;&2kM zS2#QEp-7q(JeQT^(KC_9c#s_v0af;%Y@%LNU<4?5upJd^a*Sk0v*oXN<@2DObu;TR zx3JUiLy&)& zvDV*`scHlW?s<74VJgapKW_pKu<&Kt$X$39qGeNguYmkUEN9S*Q+gl3DW_*tHXeo2 z46v-m>vo{BGdXZi9ifm)6d=SUyX%Rxj1=tM zhXIZ7NGS5()AeodOn`565l?}q&3WUHb9T%(D2%ZINd4Xw8)NoEZH;our54xk{kl~y6T+TDtdYVpx-<}{Fd*00B!Fz2bwW2D1`dlnVnF>LLp zu4LbfDYB$P+{6gqvwdutW}GNdJFkgeHI^K8dtJeDC$V>TP7!bE$&-(?d571cJs=-! zrdGyvnpL}3yB!ouXB@%7aZ|>XKNz@Y%yA3IjG zByi%28ApPem9-^!FZpRMTS||xHha!i;=Z`GSN171hFV0T`}StldeTtee#^RYZoyEt zIG&jZ+cP|}Ub0}p3@tZ$`9$(hs+#q>A+##svcL-;TP)3n3=;m# zgbuZ?;WvRu2uLEv?3wG5-_$?l5HM{GaP5~%j@ojwUCZoxuU`&@+kLyc=;E8M&ohdS z&B+n@!Wl=yXqC`~nP0uGZ{D;L!ZlETy;o=F^NGz?mM&B%Vp!Hq!Q>-QH%^$$XKc$I zGl*|x>zn}-cEh7?RS|1P!$lZWektKDC)B|_e>C~+)E<`b=+SfuQb(gbyx=_`SenRs zb1qy7(OaQ*%c+p5!0zLrik+{mhD%nSQ#qI+IosZ!@(k|}1ke=X%zpl@JwzY|1vxfK zO!r6ZWHJUisPcFU!!@d1mi7ahiL*!PH*!)b(#3K^3Rw=Av}3EH9*MESIs64#_gVyz zn3UPC%dRpERcsj*d~Gm0PghkQ3YCyX=Uj$EJ>crL zkeI=Ht|%g23%Hn?c`>{4fgr?|@KH_5pn4;<8#ks;W4{8(m}W)~McdUHWwz{YVA#5+ zzj5a(-#3$aHq*&sPoXI53WP1$HJ{y6A_l5`Hb3_(U&`pQ)hP4jw`1H!?aZeeKm7Er z=!5atdLIGwM+%w&2pH*8zNOqLn+t0hOA7ic{x%-wH-!=x3c~q1maUfdufZhuf+a1ckE!$G|7UmKtYpPuNNb&-hOM8 zYMFW`)&GvZzPkCV()OaYhv7IFqKb)CA zc{$I+SO@v;4{ghE6kkTrRoO6Qh^05vb+#s3GSzy$jsyiCc(lUEdD(&!(qypjVhEwHQray@vl~xrYRW|BY4L(TEs>f>_=R-{6 z@^NMFd{TFFx$&?2IDajm$04OT$RA!OlPe(cb_U^wv(rU2y#0GBs%{- zHA1>17z~&5c$&XCZYAJi{{}W)lz%Y{j}rjBr0*wMcl>qnix1%9k}98|zkya@0uymE zXI=PrTZDE9$L(zV3)5fYFbocSVU11$n12%m6~V>-cMyrc{C_xz{KqG1AKTOpJv%!4 zlobv1@0AVK>6rCh6PS>(#$vqZkKWqaW)}9i9gnxG%;$@skIW(ysj!=9D?5Hy;b6&B z3nb5UIVR5})6#33do%Fwf|im8b4z_he+Ct5q%OzolEO5C?I6&j&2Z%Rz6q8>xqx3U z<E5ua#cBbA0UdMGjdJS%(_U)xrZq#7 z@!eRK3(H6+{*$1;;qNe5Luk3xxyzPq17>sOBQNxOJ*qD^r06Pipe|wAJDY zzs5%NI()?VUr6yd0s&JHWo%=01vk)`^QG0d0^+w5xhCX)MW>-5Fcl>BfDdg_qNLEB zG5xmogBr`<_wxau6XNVcR5DfMG+S7Dn*DcbWrYcUKl*DB%q~vYzNRE>v{%uJTiEW3 ztw(2a4OIMZ#CkxKzT8dhQq)OYz(9N`XQ8Dw0lN9<=|N^v@3- z24vl>)N>wxFG6ie>a*?@B>#I0TE;RYJVy+7X!N&U-hXztHyGGoy=l9SZsE6Jv4L|u za>L28QI#og7RSakr_9F0GlN(5zDr+UlR{@)HB~9!hFK}swtge%6zUc3|27u>=ddzF z$zott_nrGp1(UjPWesrO-9A6wc^qiU|I~J13VAz7U8A3UeJ$w@pWD`?VhBYoGiIvr z9SehHj;ji5On0UN7xh%?-rpn_YKAXc5A;eD&XQQGu3CEJitJYRim+mRo9G%We2o?N zu0N%??2eYjOcmK%m!&I!yz@nz-VDx21Wrb5-?M)?(#;fKwo*4V7rJ@X8B3jt*7fm=_(bGJ zazfM_K6GJ;H#|D(+_u&ZicRI&JXOJcWtGAKV`�YAl=S(BDL_n3i$Ris{5as$^@V zG`Fsc`JM5PO%Q~FVK@qvW44?j=u#mgXZdmUJzd}2c4aBST$*QiF}IP5ZOBC9yORPU z*<8|w0i^;t@jT};)s=yBItFHxoa z;9A_wKj7tNUO!kN>vU0A&BWpOn<`g!#e{0!$5GCx-!P_H zaL942n!5hqt2LKSsz%1T!dkgyMY5=MQ33D$#noV`?~@O7U9fpoCs*Nejjv+BVl}*(X zO1qu8bxLB6R5-#4)!oir)#-XZsl~+<)V=Haf+S-T@_PP8;~srYz%l2fTYZf2ScNU~ z98zF~c=^PD;=0jHWx*7vSoedI)J!zJNB#eX&L2})%{?{heI#LnC`&thoLvr!Q=z^Z zlGxtO$CS$!e2JIQ0M+#`w^K zyP-#37xm*%=71ULjvJVGuzWf(JigyafR847ds<~eTDOWK`t$|7)?3d2N+kg)n1?M7 zvJX<+m84=+7}HpbgquWiDjgzeIH(dbdDl@$Cb3h&@e`YtSRZ;-J82w2=6leu3 zgl8QXsMnxD1HQzowFt`GN&pGc2A>dT*JzOqRUbjB_e&Qmp%xC!aQLQZcMJE|Zc7Cg zE+%a{vp?`WUMZ~yNV;svE&&%3WMFOc=H)>%voaEL81E)fDI_wlxRL&~;d;dH4DD~=}aK*6|LIw{QwagUFHu=N@6zIoKw z0Sbr?{Ib3u^eQwm1N4v$;{Skf55TcD65nS}8_yf|<+X*D!JD8!+?!1I0izS^gCdDa zI=O+0S%ti&j~=ik$cud0LxS*~woGgiSahsp0(ztmz-D-WH#MpUZWK@?vO-{@CBUM5 zq|A>jgh=!Ar?F@H2@?N|l^Om2q*qdoWX}F9GT# z7f&YQH9a~GJjqxf4FSNz47%)DYXN$8h}S~7+spZVFt&Qcovu*lRZ^D_tJm8wyvH$v<*s1{kk$3Y9fn#W*(q%z zK*P{jT|zWnoS;GDT5f-&O7W9T8|JeAtIbio$gezNs4BDxYtREz6_U30mDzDS5O{oY z7@&|T8~#p(n};h|e7Bp$peKP3#7jsO=nXsO8X8wzZ2_&sXR1s^AvxX}Py%2RFU}2E z^!xnktg<%S0h#?0tc%AsGH$Zu;vkHfH z;3l53PnwBbxlkR*(Xrgc`KT6bA`IQ-uSG7df@sYARA{=eMSO~x%=Iv~#iBwDi z0NJ#<2<%bt&B~=wat-WO2=*P`Rf8f^@1sz!RaA~y#(0Nif2%K7^XH8m2L#wLfKtmB zK&IMvbOt^h8p7j*eP;^uOw)m-Q!k>M;X;E(?N5~{O;S(d}`Xf^T9b%SdG*SvN! zQ@e!cyyvmoMcYr~Zl0*C;HmmS*R!%)q-R%$5YsiYjrmmZdKND^eSiu%aj$FKUnvkL z`YICa85Mf*UJie2lv5_^!J(|9TzukXYO%&UShda3bSYj>RL#IKr5ip;RJ=%-sp_f| z3-jyq(=0|9l}PTRCYq`0Kfl}flvJy)<$YfMO|EsWR7ekVo4&GOu1xF z9@FBHBm&3PuS!tvt{_h&2RPdDvS;p@z|wd3$+$G{bk`@fc<+ z)2yTVvA)>+apHOV!$TVN!vLI(*Ohe3Z$VSj{mDu>1&(?D7~$pM9oe^apshw5*gu}F zgf6zYy8@%J0O(_ln`LY*;E#o9ghi9UWip=M{ot@AIq^;f(7#ggry%~%g#hcPZ1yt} z%r8!g81looT8cF(^j}-|Ceh2fUj5c|qZf4Sj_LVyf)|Fk(7BrAIelPf4FcJKn(o)P ztzfsqAJiKUy2iDVW!=NQI(o-Qc3elC8L)k`rO<>IJ(ixJH<2)KuJ$Vsy%?hMm`044LRD3rhK&j3O9>J| z|NgafK%^>PTB)Np&}_T>&b-74^sLgB1zoXNHN(+h+wH#OvRl9mg}3<0RqBS;9<(B_ zn8KCNahs7>dhcjLP{E_DQ)hBAdT0rKeMs3I?#&3G9HX5t*v*(RP4W=*aZ+$g3u$Vi zJi8nSq&R@a^?C=@hHKckXIm6#8+N!2U}MC-S~1W}FZucO{2&Svw2Hn!o{hdU|We2X(vNNKOFJj7H*{@uA5+Q0=3Fc4eB{ z3Kc^c6w3Ut?4=x*r)SnKj&??0G#d$)XTB^>8FDr8*J&Ib#sxY+9&^Rkk%(q$6*v9y z7JHW}7Wai}H{AEe18;^mS{=3|(+*mEmpzhDg?6R}{PC4#p%fRu?a$#E$~{;v+OiWt zZ03w_RB9hz44^yevs*H>bs&G}L6gsC^TM1^7U^n^xYM;m)~SR$FXpIULf!mw&?D)} zJkXtv>X%W@SMw~>Ws^X_-w79r3fD|bgaD1muSOSPj+N`pM|i9wF`qgNdX-6Frg&Rk zGr?76gwSb2IN^m%2*&~=OEbRZ0x{0+n&R9Oole?AcGv@bt(fF(XilB-f5)F>0?2N% z%YPKV_f&ZA{o8K<$P$QN^!fb6Sxq0Mv;0$w4VX} ze8M?vHaQ8uk~`0yyqBKQ19jRYMW`U96m7nPhu_x0?xCJ`P zOPsamj{9u(&zjS?8TQ)+&scYBkdGJ_dk6{`Y-IZty}% z1FAxlg_}C?>;AadgQ|W{9U;9O7mY{tNr^mS{aE(M^dO~xKX=Zb* zleTSs4b1_EF#S>DQYwT?ml)y`NRf z7#j`eClN}cU9&J|DO8**U!8v!6G&I7n3Nhr*1Y!U8|$0rdln~ec@g=O!mp;jyrDc7 zjLEo@NUhn4E;U0SY{x1gY>!GW9}!7j43O_+>oB)8Q&o_D7B$vUX*csq%JK6085~ue zYUJLS(8QwMuhcr)4H0Y^K|P6KgGg$4Mh52{%*ZELl(J>CDuE%}c~av&a^Kt3l!ZT+ z_6F4+3(eFT6>s|-zQHlEkMiueFFuibgloUQo)Ejxj~XIvU(u2<;o zw}Ob8=!I~V<;Am2>#`1vYrFOBZ-nUduIY>&KhBFYE2TcfNZZdFs&Js%%@t!%_BV&b zpur^v2PNAl9--Iq>r>GphO>5?+nD32K0Y7Ooc)ZGU&)0$^z&+ZE`$cdTolFz-7j%j zu(x-+$f7rd$WY~Df7>agMPKGpl<&)y20v42M};CtPW}wYDK2GwZmk1SdJI02+QC0L z`w{BOs*kx{^!0Lkd%rqmx+~~IEM{(R1E<`CInVDEMhE$BR#>XCHa1QOyX}X`@^|iv zj2H%0RpScg8;_A(VB15&g?vZ$2>>a3_=h0HFSOo1A^ftRztmd;@4s z?Y;}k@n_QS$;&XA{CWjqk0oL$-eH1}C=}@gwX;u1;n>?ehD|3wYLu|zj{>LMD!#$T zuf0R5uaL$ab=*vAph=va=x|#^x_XtWvg6PjMDth(E1h+S=7YHMR740H(W2O_$VYTi zeEFD+-SkAW@xiA2cym)7Exw>r)!yN=Oovcywd$$#9ccy7LDS0q+0gmavwk$R@Q*U#Sn=*5zyC1k-_P&!WQld)q_EkCl7 z>WTa!*OmWGMt^Qq`GYIP%y7ed>Q!bFo1!R!ZT4SZ$`}4R7-I@4avOUY64Qu+`NYsu z2liv4>*LpJg{pndJC!omsEe~gWfY?#b3KM5LZamw3e*($^Wt{-#G#^yR`9xSsB2A1 zZh=fRxbey_+?kzvHeC2NfW%S}n#>|UuH_Rh0(viwIjJ?ei>GATac?iRxelz8eu*N(XZfzkba|9;%KU! z-Q)5>d`f^Xtix6&?uOlQ;MR}nky-I~p}|F|Aw00$>en<%WT z00&N72QTgbN0dgKZYD$2uoefd_#J_*oT|m=j)zr1**U46m|iyjzSsD}2V!S5YnZaK zLU<#?p7%oWuiZ8xBO@0&Y2jG#AhwDx*CJ$>cOFUJKar;G?c!PhoY~_WBWPD4OEbXn zJ$$JzepH6w$2vYW3EBi*MZ8u6zN)qtpraR|TD|42PE=5%u8?;%>9VXP^y|g75zKR{ z{6Ks(nf6A;k@g)ZfkRq#;karIB{^n`>NOHxGtZkOIGiU_Z~=M(PuD(^Ys`KpkT z{3kfU@h=T+>AdZEFhb$|?Y>*d?yPmNeq`>D>&%}+HN3}ke=;q1WtHd$j@l9&>yP$W z`FG#1@sQCzcW#W=1cnj+;{||ELH{#lN-k{=J_^vHh4Tq)04Vt$uZ>^@=uTgQ8pa7q$j~EoU9hK$Zljn{V z4bhn1-k?a1A>K6spIiA?`MJ&$0`F{6t}i9qO(iM#~l;U54&DZL2Kctr!Kc2#i8Y?#&R`&ob4PtJ(Z` z`<3q2S96J14On$gn++!?b?rY|zkz$wTAmb*hjqdYdQEBH_U#${iBc`k8L5xi_Sj1G zDi$y23LTy86rk6ZY+DN$d={|v-?~oJuQeqmvxmR5DAnl@KNq zt1J|vSIY@C?;qe=%#GPJab94HGrMfBkBL?1;=EwiI>|0R44X@e?BRg)tqG=5!5yvS zHD+kl;H=hmiR2BXTt0;kv6%nf^Y9Kl7zuVLq$HzUjt zsOhP}OCz7RX0ze|b-pPo^8~$|=UHdp(?Wf}4Rnc@Bw`CZwl6(A=2A)+@n&=H8NVd@;!vwf*vOM7T|?Xn=><%3KZ zJ7V&qvuO54`{JwmYyrwkqy>!%falEdX zCsOQs@q(-^QMKKD^PaL`Tq>o27BDY9#4I!RZf*FVYCf`j@*Y&s?3Bh{HwM3q#BgF+ zjXaknX)TaH6iRxm-*h@MTWQbah3IzWG1XnaROxeSU$h>2UiQB%@|JJjBCGw0<$(4a zQIl^*Y4VBJ#12oJ`^?x9M;{7;zd631jNOllK=z?1^BGOXZ9DutWx_oHPy=xjD{um~ z)twj7qaT=^5&)V0kfzJ3!OTg3m>jq}0&rD*>%=fYFD++zz*F}!J&l=6oK7V>(U!>o& zG~UrCA|1at{T9F0EUgrgINE6U32}0qrm<&%(&sNz=!yI*rI9iNd*HmN1u8e@MNorb z{NE55XI^)*Wlr}bcA@lwvN8UI5XsWRLjl0CT=k2?+hKNKL=yW>dBsrMwZ;lTJYwd@v5 zDTcQgwpm8_t6&=?^vpvP6CF;GO6aOHpj4Z8kR$1mJN)pjt)-9iR~a+;8pas+#pBK{ zgMogi5qyK`EsWEpiNPH*85zesI&SKnlB3CD*(kW`DFHXEG`xuqB$1X@1<{61N8#ibEiYdm+SM{UEt`Lk!6vb zW$f4*55Cxbm=wXMv`-x&XodMae}A5r%e3U=C!?dkv{t`ZxmfQoA_F(WXT49EahU_r z=;nhV4QKwZrjQP;o#e!jKHQVI8KE&LQu zW^qfK*9)$;_%4ovx#wbT34_FFd9G~oRa!cTn=8+rC;!UTHR^Qx#&^0+sFEIP8J=|g z#lmx(Kau87eT^Eb0*f-dbTR;**@Kzp1ncqK=TLWtIT@W+-1ELIOzs5{g1*=}gNJICMwG6M=h+QHx(&SL_clH8VO%D-SpG%o|Kt)7i;}H<{ zw6`LAVFZT3hZeu2{Jz?;T}yj=P$B7UX@ZsQc5h9B6t$t>^gz@q>6)x?{n4TYSiXwjSxtzHK=)}FRNk^d=W{f=HfvF=cMgok6L&zkb1)$P3= z(z5>jHYo59VSBl^(95br$FRVjqnYw2>iy18ULw4FJ}9lhE1FZM8a%S~ahGxEKaZ;l z^DG#vyQuNtXYtSKE3Kv28oU2IUz8s3VAR3 zNz++KFTwQos}V4;vATt8_o znJzZEvb!e{%{5e$7Y3V*{^v)S;71QXc}{twViB=uzz(3J4YIwmX6E0hQ~7tvXm%q4 zVgSE{b0=J2KM|Fm2w7hRj`i@HE+@a=@%7j7=?l74a=CjvYkg6lI;mBM0{FT+F_(S)o5tS%o-D|tvP$xM z40Gotl2U}=Vvc`@>LYS=HIF`;jX!9OH~p0AS;>!u&;(f@rRc_A7-~Fk%)YI0K-CW zgD$Aq;ZGvw;ws_rGEUynHCkUzn4n&jJeFL~+%Rp7T2jQsc3**IX?65An^ zQH!$FF%^Y~<$c~PTlX>0=xguYK`RxBVzjoystYqq;;}a$G(|3Zt|JtTq*Hd>W$^HF zYE`&fP_0!^>8#khE94_;xWEoeVAQ{V8%G_y7Rl=1vY(Ni9ztFP_1}*igabA8MJfE> zU;ip^pn@xmPrM5L8XWLXW+)^K3+TVE7EWdc^Aov=g#QPY`*Q)MR6YDZqx$QAdGw-5 zCCg>fnEtikz&(@s4+;N!59-(a9x^e7#U#Izv)>=p7k8)*r+-%zWTLZC9>TuYt8Ml9 zYu3R(kudiEx#ah>{@(}uGhhF|9>LBJnPhAx-JppPR8fv>mJg7WaEm4J@<251GB|v$ z&WfN1MH@uw1S;ehAJiS!E^=|7GbsEUB9nc-!egUu%G)>wrAz1{pl0SB_yS~%9IAlXy#)N09swQp_N$P`eVL1Q z6`(?YKD~$i_d^H00r5rPXNAzR?sh@dBZ#E)JKJ+z%eCcS?JGg8dB35T_)8)uCRIc6 z#oK>pNfYL&B|!weJ)XH5)$s@FSfy}?hMCSPtXFs+;aP8LU%fPi?SJ><9x$(JJ3LU! zfibm;ZMp#d@!wOP%SD=C=uO|Lm*z10Zvr3#cFNjO>-4AODhl(_Ym-2ptj#Ur>cqi| zo+dce5rZ+!ukxRSxF*W|iU> zRt{iLRnS%%9JnsYMovr?Atmu)2 z>v5dZ*~B5V(grY>a`Og9BfiW*SAa?3*5o(cUD>uXWluPR%8Cz=gn#B}*c8_Sc+F;2 z;R0BST7YT2R8iz&rU=SM*F1LVJ2wCmATX~#?6ibn*}%P-l^lJE0|M)YM2HqA{94vh z#L4_r4~#ap0g5`a%{p5+-7`Lx_XVp_3j1DQLl$i$v!HT1*IUMCV~7Yq{7ayUpkMUR z%@}cc8`S9#AWFq)*8Kf+YhK8iT?Ka*GzwB@dW68_&2+&9YBf-)#^95Doc3vc#Bg>17+UMM#*>UE3{vJmW^D=no4tO*(&Kh|q7kbz{8_M_#F)Eu)a5xtCV+Iiy8ug%AT(!&uNsG`?sMD>=w^hk zHV4A;s28xnvqLzcD%0vaV=_5|94if)h+YgKdE8$&-3Kn~LQq`67yC%PJ8PhU+#XJn z{gmD}@1)xugQaAzN5&Zrp}#UJ4=r*jKB5SE=yx!y!S{^#1!zfpg)>Px;kCyfOjTSL zEZxYfpXn?rV^HLnB>}IpU;BdJVa=i^^6>hwJq#}IPhfCDKew_g>eu1^D5{lR;SF}x((d?O!LI11!*Nn&rVRA#_aayD)C0!MasRMPW&kn4VQoJ zJ!2l?i7i?Kku*f(NrR(RScuk#H9!an(s0BXPC7i#Xu8|nSH-a!WHZ^FO-eXr$`p6L=3%#Zrg|e@Q>Yie-9q`!>I9AmG{xp9Zgt1CY#$QsV}J7Y0VQ z4)}}%^C5Wm8E1UfcCnWJqdEYFJ1LT#H`qWyw?Q8Dhu-6=1_&~$b+p>E1Wk0okAbmS zeRB=A#x+I4a}-b1sNxP-e_o;F9}APOHsVKC;NH1&nUOzyT|#hq1b@w9>vmR_v|IDe z=y)5@H7tRJ#%Vd%f=^2IyW`W0L4Pf`!ibyQMOPA`Ag6RgNFTMdK%%m;lgH<nv;ted9K)&WXI_*(8wMEGH*74I_HKZMQW2fwRbzywby3LWA-K$DI@gP;1V; zA!Mh+NWM^t0NussZKIN)L)^irtYn9P<0oaXhOys$JpwHnYbZiKI|M*@FG$#lspxVE z%*1P64KXgsJEgXUmQAFIKCDBk$X<9iKVw9QQv{xpUuNx~n95WZmp;V$!`HyIBqL!2 z?uXm~FLjO*&+9418-NQyr%6316{jL&mAufn-BMV2P&6Te+L+F(N4${qL$bKz=-k9@ z=VcVJWg#1?*i_UGjmG8s&_&HCpX)`Sz(atvP{z=J@uB$9-3Zby?tw>)Xk^>yW~~9v z!ldFpw3RFldhDS)y1IRJ;AT+IVWhuR*|vJWj-aT=ZwgOfU1c_kfT2*}dHm-wXC7*Axqpxq;K=+-TZ4H%Nmt5ZZaPjVE5vWbpl)YoWCIBNu7yP5+u zt>DbL3G_7s_^HrUI@p;Q-%q&n<8=R&RG&joC1&d+bQF* zFhBPOGmVdBD{2w$ipPVPb+km7k&!0dg%t~U7LptCXd^n7H*_wpkV@vt<=jCsOv?9t zuDQU?*$?}Y1KUCxfsZx z06y-|9JU6;L_TpQ_VP;sLtoxQ8bI}M2n)G>Bq_lJM}wj~GR=mvUZ;oR9-Wfn=8!v} zNscSIKGMu;w!MCh4Tv6;`G7m45)i7LA_DirbVKChw8UIrEu*4Avd{J?zmF5H7qw~j zM4~Y6iGyb&@bx`sjZ~tCfoY9mw<_qzJACP<5_AoZU7!mLjF3SLn$W4oK4%il?29JR zLW`G~WsVj?qc81#erTyrDSY>3kB>c!pV&Wr6-w!YU78srJJn%ERE@%G2>U`C<>9X9 zKEM+o)!(Jm^7v2^3QeEzRbkxtPr}`z5VmneQY2}8>uO1>g4Z`Jfgahdb2(p09zDso z0BW7o8FkK(DSB6A94OS153C_t1!>OpMCMl@d6 z0n`2XM#jqVTTFR@%6_|e1g^OyPMKhbtEN={BC3gga{N{J%7}aX;A^dRGfh9(MBNQk z1^i)NmgGGKdUCf%Nmi!xPh(c+vFCC|dCN}+62Ke`^weR4mje%=QFpFg07V*>SJJ1e} z)fq5=LtCLq!aiG52kzg9H#>ZhW^JcT#iHpnDGBEN9*AD4Ro$lb?|8&?2L2fjVoj9y zz6s5;Z!^!r4|PMbGTO9yg2~3jXL1dDDk$_ij4OgKj#;=qHyf^!cP@sxCO>j+M-;Z` zbPx={glF%MpZOUbAr3nJelW4={$e2s<*kUMZ5t3ZEoPVI5j2IQO@&6id$!4M%Q`Jd zi$&4+<&t(Bp7$(f(9+7pMzv(;MZk!Ym0RsS zWtqIrxIyx%C)#392eXE&vMGDVhqd#J!Ohr99{G;wLoNLPY){Y0BtUtUWTL^|bFR)P zSr@-j@%xT>NZ==+I_ZFk`b{i9#)&T>?yBIzJ}u$eI{oae_EH_F)p(eL3XzTQ*A_eV zWHcKdRVeS;bx-WAmVjT-{?ZK zzWXxgwD_DO&3DuA1wPfJn7L+$uvs?M0f40yJhlE`c8 z7pHH-zwK*5PI$`XV9kE-{*)wT!-Qw(K7PHk=Sl-1!r7acG_g8YTlp_ik>i$Mu|U~5 zF6Dlu&G6QHFl>m{(}&2Sf}*MH&nIkabIgPIVAeyq#F_h@(xOn{rFVqE!xOoF z%^wA(GqGmpqCE5kd_mtpM5Mg8S%E5x(oPuaI8SUZgVQ@XbedAo4K#9hZ%QcjK@e_9 zL~zI|4i1wj%sof#{uD0`OJe_Q((c*Ydd7PQ=*E}?S)7w!J35~9EDwj=JNWkd*j5zKKWd29luHh!QEag6=f?-MP}&kH051dV&p)O*B(Oy%^VD$%)BP@ovNTp6n} zJf3d5rF+OUHl`MfUE>gEcyMih!YIHbx?HYlH_%L1GSe-9F)qV-(duimDQS~=%7e4I zUKW#4L2CJg^sVs^JT;ornH`Hg)(1@#q!rV&RVo&Dxm}U+fKH9H`rwejL;B34BQvvUx_X9C{vzP+yA(buV+ru}tP4}!^`GrfHzX&a|@~}u|x`%I7 zDIH41jfa-S5N4EYvv(+_xS*Xpc5LN=F@WM7S@%cq}?3uk@uA`5xY1I5R`T zNfX`vd?y;`hb-EQ)1J&Xh`<{>Ktq>b{6xbAFj*|T*ocnG2%X8rsc`cZu`_UW(7H8I zbOkZ2KS^mwY`-NmTJ`06WvQqq8 zopop$Bd!%T@8(x-%;!|tiT4MNIM zgKLl0d8TDWmY_ZE(lI*y7^nl&v~NjezUlzlxta3@(XBs#z_uTD!+E72StMT`dK(mre4y0Eha9|k%~?@6h0%N8FQCXQ_7i{e;L;BEc+rw` z8S`yvxyAhkG*}KWp=C5AAEVt}Lu8cPycX4OUUu|*1*e7b2si%gTYn{=y8-;4%DA7) z+(|szt5ullUO#^bmxi+Eu;Jv&$SD;=E9U2?eTGERDHw|I5Ux^{LZAx;4fiDcIlD!= zMFwBY4}os9)u8{=-gn1S*~b419qVL-kVwehdpl-iMuhA*w#eR&y(#3_$|{*DqwJX- zvgOE3WR^X?*Xem$-``)qzrT;aytwak?)zHzb${NAn#fN%)zEIDB{#1<%H|x8 z;Iw&GOTlwVo&!G#Z90cf4~L`iV9y!A0iCME7r}afxt`iAXUa#QNGCDnG?FVh-KgW1 z+gs_d#Q!w=l-`fe*#ym>QI%){q+6a4TIfnTBcpb0uv>h=cF&C2!2KvHQCg=>k03q% z$&ULBeM~zZ3G>@w7<62r?ZPWo>j9;)DDpk(Iiyd8{$7XgMEk%$Z^IS-jp$M)<#X%| znHAZAMI&04IMX4%lnk!hItJKo`52?FunILBpX3=E2D>|JpaS8%*_rc*dAE+Aaskj^ zqE{92>`X}{wIcWu=meRc!|@{EOSsNMy=)uLi<|wImD$Xr&Iw>SLJ$(E%$)W!eMO(W z+x&HdtQ4VRq!3+PYDfaKT#OfD*6T_q|87meBZow%RR|S7i_L|H=3c!;30#+CZp`78CfhCTE3*5{>vnKl#!{O;rL=gU7D2jx z?UJ0utP#-O5Z~T}!+PA7t*U1GilGF1&Kv|}E!56I1eEJC3WM6k_wxL#`UpeVRx=a0 z(nf|3P9D-EAcUUgsFD$ZfgN$Ji@Jsx_>%1-bpcaa%0h_TImk&qkioS_ODx-|1#hbv zCqNvj0Jh5ka!wQrQVaJ9%-?9+?VZ|JbBj5YEu-7_89w6H1nDzlyuCf}s#EmJLH>1c zVGXM>y`V3MTv+WANv?7`%0yfb&ac~gm7;L&M%q?L=9&`payLZw#E3{^%(=@{Gip`M zIR}NRm8&O0w7AjhQhs3IApsN^4X2)%i9E9X>@2brnz41Etji3@*|C!1+r`WX0_rh^ z80AVVsmY-CbVkCZ&L!V0-w@#jGL4$%xHzvM*i%2%>?(F(Vep0UwvH+B2#a|zI$AAZ z^M*~a_-P8g<2D{MMdh|1Mxj=29A@K?u`cC#s!n)JPt@pQJMxd|>nc`G8PrR&R%bR? zS4q+`14Jo@4cjv(N%;KDEJd8`0}aAE2;U}QEJi;33!`sXBo>cBfnmmOH>4&Ai%6n8 z|13wm#$LnW5k@l#`dB+^;n+svycEctm{;x4x5`XLEn1;hh94DD9uz4Hep3H(`Ld5i zpAntL8ZKKuwT2NOrL4*58&NyUPAH6VO}_6QZuRv!kr2UlqG@r%L+t@F*2_DOg$-@h zCBJ{r;{|kkju&DPIr5#A9N3$GxbtB)V5o!8yo|!r^_9 zLf~t?cTiWL9BVtj;6&0gMw611)a?Pn4x(jdMr>v5K?|R_^F5BMzypnk?o}>4*$hfvjwi+~x zA3{8x*f} zbCVr(`9XyFr;Ce}U$uEFA0cMF;^T^loZAaLjtcVQbin8)<=ICSM1y zXZB;`JU^Qp-FF}212Oac{np8UHXRx|&v^;XPyTwDn&}Vh6>&(Sne(A+px*q?^KU-} z=f5<%*ZON9fZdg*G`QZw@3nXSYJqOh#J8*L?2*4OKM?!N4FC@}O6^7c(ZGzF`r!O- z_t*=6!q~7Auo3?8{prl z{@Ezs89<|$Wc#%Lns4ZZkOPfH|JSJhHR{h11?KR7BlZ7d`rlji=o+b+W7RIeHU{?Y z7tp@9=(Jg+s0-YFdVg!a6|E0^{O^iazTM5VUw4^_}Q+7|ym5dZ%G zh~JUp`g}}pIi%OX3(YGRW0_+dK`1?mLo0Y3q8xHK;k#dChQ1S7+&Ka$&t>2sSy~Pd z!!QqE{%+PzerL^F@{))v_Bz<-JB{evX}9+VdU?N2@=Cf@KujkAV%0k#AR6G+$~HPT zW)sxvMQP8_O5=pD8tlwK7*5e3^-L-gRqkB3%_5_qt2xl*&YQ5e0&jp!t| zD~}a>#(q^q<1Uc-SCUlDN$^YMsls7ix`g9Ny(H^dk^eDG($g&{t4Ww>xp zzz%r(Mq@4l{`rN%EN@%S?=MVfW{#JH_twf*aW0A_(E9+IhmB2w6tWJW&UK$52hBwO zJRGk-Xi4fAfz+5!<*kGSXcL6bVXyC|hGZAMrN zI{^7ulsG5%y@%6VK)CQwtCfhfz?aJVzVp;ur-uMXFMv_?6TbYxt`3F)X5jf}5}bY1 zj(5IVtahgic3XXU1K?wye7XWPv>O3?JG7?)jr6qwk#nCh*69c0wlR_$S4aS2UiivJ z52d`^OLxh+1?CR2rlOo-5v*%K4DRql$5D(QJUy7Zx+L+d$PUj6*$1vEXQ1^7a2!BAg8XMXAdvZug4AK=PA z#|xa%ttt88s@d~ay+{(hvB=b%&`So-22K$|w@U{0sd<2?c%RzD6mcG6+i>(_kCLN? z`JG1QHD2+WfUesBH5ZPYL%UeC!{sl#0@^6v&MTy7}MWBVY z1Zb#UWFbNBskjdR;)Pva3Cn=BicWJu5Z-0r37j*oW>;0dV+y0oyZY{>u9Y(;f!;NB z9>Y(d0)5st(0V}Rw+-wbxOYkX^1m|$HYsM90>CdDbMC3MoF~9y%Obhq(miWH#~pP< z682SZ99zXC$;~o&6ygq*=Yj+?+w9++T5}Cy^ zPx~QqYavzUNYkK*U-BCFXy5DR4S+jxtlJ)l^D1C_?|eJDi#pUjmfWNnD{AKJE+Cp( zIn$kT1*C{hpO+&$N>7_>V-Isiw)c%(U(|u<6^X~8;xo)rTan{QjC^+;?l5=G54EE> zr=Lw$fN5Ej^1@Es(slL9VXFSD&2WLB5#t`W-%XB3=v{SAKN{BGJq5XhKFK^8{Fi3w4Jz_J%bK{5b!ZHt zKP^K7Z{s7*N6qeu|i(YT$-9Gi-I3r@T2Q%eNv4BT?nZ99)+}{|k`VT0`S!DWdckzn0pjiXswEkFVEc3|1TXk8h6J&E<2DUo&i$6RZR)Qc0usPClJW{5%vyh_~an0BosA27o#}#OO z*Z#X3QWtSPN*q{fVq=A1G|cTcQpBcW2Th-Wt7?a;L|h1(P9--h|Azz6LI*Gq6>geQ zW1p{0;)>EK7E zDpT4=UWIB&f& z>-iI;K`!OJ4sm%=s)d?(VvoElg;~0>6F0goFFp1lac&WOaP@k1nQtmuZ9j05UFCw> z^ic{3-d9@;xQabkh*CY3S~y%uoR625!pEO{gwi;``c5Ug0LFwpnG|$wg}O~LDEzC! zJ*H^BXq!*N&MWujC*!9aZGG@#mc=GI+$b*GYW~i{uFOG01ld>hrD+U(%A=66dH0hU zfnSqok_vUex7@4kM}?m173x1qnn)CQsTEthbxPWG7a!SwpSoxOy%10rxkGOcfaJT2 zM5zKcL$GkgTTR+qIs~g%j=|86qe|_FqoKyZl6bOcBUHg@ayXPS7x2vj$BrzcPun4@ zYC8$}p(vsNEh4K5|4?(FAgsR}(kzIj7K6Bk^5P|?eMbqs3Ot;gVQsU)VpPW_54x@+ z^iy;VVUVI1HQCJrcN(^nMRMERc%QWp%h!xj4pFs&V9$XVd*Cbb z5h)tA>tPGcQC@<*RkG{x!(N&F1*QTI6_RZ2@(;*_nM>$9^LO=0~) z@KqjpEYvU*s|B%9M=YW+-yoyva^{@99r#3lK8rr?f)sYy&XL&hX6rl(1FjpTe<__) znC6C1^9T2rAZ68kyaR=-t6(C>6Hppd{o=Z=d>BO~ej4V?y(4+Q7jwv?x7=C7g|P9) z_zn*iEuc9N?G*0ogZe^~FIpsFMnUmW97#^eg+|b(oL;}syPr0ptZ_H`DE*+{^q~v` zR_eD9sm$=4abSiqcc<-U>Q>0!6zbGnV^%U)3ZiZE^)lrAM?T0S=H=bfW1ovVIA@26ek-n4tQjjPpJ(U2j&-j}reg>#|om)AuO^f{!MuT88pC3yp0-1@jXBo8{N?JhJ!QkX~9 zNla%MMz^3W1$}f)%Xzuk31RCl6#f7}rHyEE)$%TdEqjAi&+rx*cMR<~fE^j>}1!b8`@3-`0X<6?s z8HuJj&|)FJWs6}Y^4&{_-LiPw8&2(!ery_ekBX8+7(qU-IoBa{rW}0dB4*P0@yf}4 zQ+Gzk z)Eqi}l}SmMWxPC3H`I{yAl+ZXQOw5V#fHR40j=SMXoxW6;W)_CHJqkt(=2KMh)&N_(SX>9uP%{B0dP*~NI%=RYadm>%q zet1`a;vl#5gEUl`YeH%K+4xQ{U6#|_4BwR>CKWobz6pr_AR0iikuGf+Il~Ez zM&kT^fe6;oOyAxf$YA@u^pmWY&yx>c2;Wt#j#mj5nl6o)XEm_prwUCIglN^gAbAmt z@|(e3g1ZMJs5B1ntOuyE#6mDid3sss9*I15pL>JM3R@od#9!*azs~<|*>gR*87V5i zHY?Ru-#KjGK$<(Kge05NKkgUYGB3Y5g>&(FF~95;%%`|xMRh(pY~p=wuZFBxMQE!_ zj-lk1^lZ+YV!#{bo_I7@-t(EF2IhjAqyisXo~bnW+Wy?PRIB~|^ zidAdV~nD^b^{+3qwzOVNt+0@u) zao)=kT?OjhRk;CTYk|3P4A=g^XUz=3XBb6{ zM?NKC4_^I<2ErfD$wnW3CHqMcBi=rBrRA0V>jsPT)5{o~} zodVz=LOZv!WLTQtT0CyKh!S|$e>M9I#8U#s0)kt*RO{e&(}6qTcdM+V`TmaCY|02S zN~6)Z(?g~J31n81DR&e3kZc+dKBaw25vxB$nXy>23v@S;p! zqfrEpaV&Ie;^wS(?t`!gK65?w#&u&5-~kqoJb}I6wOYBaCl)qAHeCbUqs?bGXz9bD zK5gS@Niael3tdA;y!iq24@)SkX?$bFAESIJaUuMAb;fOBhZj#Pau|ap;92i-MogGE z_?UQ!)p)NPYFv)td&f!r`2m!lIVoP{yna*I@FNVmlKbqBi4Gn1SpD?r7SOVJH3$=nTI0Tb3B}8V~49b?aYPpz%wbw z6Ax4nLu_jCDS_CEe277Y+ASSxA7^Lzm(p7n-hka6omv>>K>bBXuqd867VWt-9UJd6 zqreqF=)Wg%t8=qQJ#6!)qq$|N{FRg{(!lP3!&aLbF0)=}Qi;z)h*rB0UwO-S2t%^t zph2V{OIA-WmaeK-nsjM$uZ&H(%6rM@aO(PZH6i))JKvp6Hov7di$_oLiZE%WGtYYE z8by$C)GzM|8|3i@s8(ioxrQZ=B+jMJox5=QiIz)T_&Vuqmp?z|`6rJJv6U_F!jI#I zie)BFkA>hAUN_?(zNxttK;!yehBud@ll+Jy+vdd}s?HpBOT9W)zm!kKu4Yy?xf;BN zUi0TXQgfy``M9@|)>E2-qix8&XJW9*YQmkZEulKuugZ|xqn86bHX2XJKBGQg))xIy zG5onFlgIF%?DC|~eEBucL2gSu^I+C=^0p2FlgRL%lY-f9WO8Hlx`912IeYN!Hsw=1 z?%=>zBD9#{h0|WG0qNfX^OQBy&i_m^0G`lCm&CmEgOpb^{s96hNAo0x$BZ|iVaVT} zAKv6cvjkjP5;{!pGG6n~X2cBSF*7;6q1wB?F4@NWmD8p3=Hhn0{~?iZaZ)5_qqOCb zZBl*oJ+sugI;5G3MvINGBd^z98XRZsKxjdl`LrhgdPT24Bn|pySPrqkmMonzV92oA zy`Uhn=)3n86&udSkCe9t{B4i%msL0&f)w|z5WDqmCa|(a79Ah753r_B7U?gQG3rKl z=54UX3Oir)st7$YnH#po$ZV6zJ$W9dK66k~>8@lfa15BL8~Q!C50k3}a--;!93$73 zPto!3^Swr=`^Q!<&&(|8DW6m##dfpP^eV!Y+22%$VDSf?1==Vyw^(BQ&FO}!)J5xp9sGMar*U|o$*5c~~ z$IQKn=9n1=16LXYFdu=8$TlgBL4bQseZJY&6mkw%gWEaSY_=aPyZEb$3s%NqtA`{v z|8*3JQNnH*GKMHYodDMY<7(TL=CxuqnadV0A5{dJ1uS21EvYdl@$~^Ylb!9S$dA_^ zV$esmb9yZw0gFK?&0CVeEODHNqZArTD}tR{KG*cJ&Kb-gWj@W&d1e%L+nCiI^KLTk zP_7$UY|7^FImv$`rqiHS!VbKKA@1yEp@>^8a8YbFN=bq=h#rq`x`ddWLqHFHMZ+E^ zXjQPxOKqXsG_#r6F7MUFv1Tzzkk{z!8b5!iuMylVw1>4P5o|oYsugob-)E>wV3S5k z^MS-lg)@OX-Mm3c{Dw?h3+7fx1XH`q=@X3gdo{Otr>b=x?_+pK$ivdwmA00C975yeU z*6e+=i#XRJYu;Giet7vU@AkKazF*ATpuFoNdzmz8v4zt%#64lf7VNw z9m3&tPnx}%pP^$|2l5P%H*T_GX`k-vO7SBtHQRFH&Pn>oyq#5(#mrJIY(gn6ZZD)D zSVwzJQ|5xe zQ{w4t-rz)Roq!l^s))o_TQ0mP)MDuqvS$aS4+^xP79o6uxZreSY z=GJ1WMjFhs10!b>o@Z9cyHA_Lb9R#O)59;lE>sUE;TWtx2Sqyzj=evikAtoh+ z)1Gr(iQ2#yrg;w5c$0&e#gN%Nl=(TXD)zatUW}?JM;S@zX*;7Nu;&q5W47h97#SdZ7+<*h%4(!njevyLFpNjO z2E2^*@HIMroo+mqT%e`qkgH?RGaKqf1%4=e^&>(XG>;PRTXkrrDhi~ZnwDB`b`sP{NTf(H8s7pFADR6q)^S(saN9cL@AfFzRax$t0+k!Q^tv=)ymB$Kla;x zxBmsE^YGzOGNRi388%a|>;s;M);qqQ=F3Eu8Us_$X@yCGf}`iK0dpirCmO@?vW1>` zEKyLYGZ8%;BZb0Hf#-Ort10gn2LSPglrphB<4R_@=Jz#?gDTxE{^rVvBwugsAR0W2 z>oYm2f<%XJx#q}mfn|+a-8kY1Vu^L4atM4a$3D@sOYIb%I;eD9Q$_V;UVmWPxb9`oWH0OvY@UjH%^5c z4TO+v4H2B5+A;ktj?ES3$*fv#_7<}>puK~Y6PemY$jx>B5}x*MfKyvPwBiyfbn-M; z8~kx8-4~s%AN)s%_>X1sdrbUC-T9BY^B*7TKa$!1VdfrBm4C|GpTtGi;3k^k<&Z{6 z3FhhKdg+6?>GDr=xvd2%agiGY-*zaEiHLd$iF%Qw>6^Rconhq6acmC#uujQu$J0)` zCzXd8DmFdpl;pZuB4Wj_KR+wqui4%nj$D=y5m)tE&)+R~NC7@^$rNj6SK^M^{VH2X z`IonOC*PXz3UTOajomNo#gDJs9CF?;^s?7;N2K&L06(%%FFWCd^lAPm$68*q5`{y= zw5#%nxF$lYUaCUj*Pbe82%B~QvY1`(2;c9LtZXSx>0Fj{Ft$B@=Cn%k(JLwFT~g@Wsv~{G@qDFJ8k$JQC=+ z6bcp!i|_vHho@yv3Wuh{vm4wI2f8`E*)Q40y!?~EM*83V?`K*BMWypDZAv|{zwchp zoJF%Lc5$Xp+L<1csr!EC&Zp(Q-OP`ljaAavZ4?W{)f?v9%U2t-i1$BX)x1Q{ANf*Qy&$oh8QeJ?p1vZI7=Lm?}SP$!*;ern+jJ z-Ip{w&iQ7co+pA?yNJ=qhJbHbSAlP|9Z{$O-+uCtBJe-C(d0yo-r4esR>1)EP2Vk@fKOU(D4e9+Dc1$(~_i6Yr6l z^6xLXBT3ULCuMV6tj1_4BHEjf4%gK0vBYd0*4SJ0*!k`nT#L4w@JX&PE8|#VmLTbvfSTW4^4ls*D4@ZKejul->UuAXNF)d*XpQlbzXeAuS}?n zU}>yp=grR!0qylhd3J?^Qfh+!UeAp#AIp|NQzzqI3}~T4c6%se56-JXhjzdp+AhSg zI&bdKy%ytD$!vI9rC(CdWRhEX%rH66~SlK z-!tqUtKovK?fgqB28{8w)C;rPLd9nm&NOzJ;tal0J+j{$3&T91%0d-3`qOx2;gyt+ zTq-{_haX$P_qBK9O44ex>+j_w+AnSiu=lC>7%nX-7|dHO<#REJkGo(g43BLw*?dat zd36JPr_ZiR=D2JvE!%GvvnDBK*}GAkgQHPW?@=x9D`FxM?qB`Q2VO-+tGurt~a}RwkiJjp%s0LFJ{<{KB%N(@9 zYg&|uZ#WjLVT~yz(tNk8PR(;$OT^XIym;?D@O*p=>cOd7hg#Yg=zM(_VoMYh{H-hR zMu#5hbxy@plOu~puL{98o)_d_ot^x+L9`9g5qEq2uVoB?bjTZl3YYOX&Q7SALXO<^ zd3-%%Q%1khxt_kaPu{zmMU^&AHfkoLmp9fi@rI?d&em7NEAhO!ip_w{NgiRAho~g) z^&_iF7RoJ-WwlHi3QJYH>^k@7gJzjt1ls*hkn2zER_j%LzLBtibQS9tP8xsSb0Z#) z-q}=Z<@{_utE zkg(KUq(^kU?-1broNQA_|Eey4>10ZK&M;rELg_KCH(I3Ip} zE8z0vP~}pYv$oK15DA4=mB^gq1l`$dm3m690DE=1*3p9H0o-h3D?>eIQvN)ahS?_* zpWg0Zu014WuJiWgC~FKLzIKpvd{h~8H^9@>y^InWywu+L?T)%XYDvGi>y3UoQuO9y zbL}AylsJQCf#e}>5}I#SX&~{ZM`FYE%~}rrORJGr9`q3)_DM!1Kk2{=?ccyOc)a}2 zkg9GO6$hARQyi6}C;Urcclm|Oh#uF(e42D?^PZm*V>;OBkwT^5T@2c$o|w|TLfT{d zs&R1lCU=nf}yDax#0F`HXyX!u$eouFp11^tsqeXh8 zS++fA!fXD$7j?rQ_U&D3?U*2?{X@qM;!~)1@2cE})d>^_HC^APCDgjq+f49&LQSbd zZUyp+8csYXI zmu&JX@-ahc085hi{$BzZP>m_Q_*uhFfQ$uAF1SN&_LnpPd=%aL>``lwOcrpwrCH?^ zrS!-2M1RXG6^vNKsI|p~^Z3lFseJi^UXB16`}O_c*Y|t~gn(ir<|5!h*wxBv{CRok ze@8UIF6Wvc_&^Mr|4Hj*oiG1nEBro80kbVDhxf43AAH3O@OwMS+>rXK31FCNB-V;| zp)8sO>X~2CSkyl4(*1Qn4#~MuDVe)a3oLvxd|-br4q@v&`EzXrUv~MTwXU|nbxlA8 z%tBke2p0F!{O?0ZU@AGRX%z+7_~$=9=;7>3TIT& zbl+lNTH^@FvyVQ4J()!ZgY+VKbCbm+&<0h%TmKmq&|XeH(XrFd=}i-4H}v`Xx@2_z zhb8c@4uZwv=1{#SEHUW848>n>Jo7iIH@N=wuVbF!oilv<9{N@9E@o&HaOk^y2YEld zX8F&-YbwPA;ySED-Af!0vZ3fuGbF#2`dc@jtiOf=&s7o_bqcqhqP6 zQT{%;V$)oIwN!5>_Iph0AV?QpD+e%RliCX1VM?oBOJc?=u>cvJN4 z-i*%;+?(ltx4ElqU`0Uny`i^knl&{*RXCKIr0zZOr+$r*2NRdarDeQvYSaxOi*@T( zN1p537aSUYcFG7{lV0N#Kl4gAL?K60#NQgecN6LN=agwWmI>tb7rv(+b-Uw|uyjUrS*928Nr)w0(o@OXr^Zb%#6!^5j*+CyOCASkde$F-4)Ek z3}v6kyb4m+%0Goshk?b^Pl%JKY%k%Gs`+$XfMOWW4dGl=Q zZOVvz5DP}mA{GhR`CAb|baO5D_k0X%h*RF+J{w2wOG^7aSS!la~2O;Tga%^8*98a;M93Ba{Kvhty)8WpUc^J z|F#}BE}87CcYQ(6;v>#6XQ}Mr71x_vs=gAmm98bgiaWK&v09e##>q%s!ny9FOA0sE z+}{@0p_aveUOs57b`qXE&5VWzg})mj%NQ28{mrNt@%v9v40JG7b(31{-#w?nO$YNj i [!NOTE] > Solcast have altered their API limits for new account creators > -> Solcast now only offer new account creators a quota of 10 API calls per day (used to be 50). +> Solcast now only offer new account creators a limit of 10 API calls per day (used to be 50). > Old account users still have 50 API calls. > > The integration currently no longer includes auto API polling. Users now need to create their own automations to call the update solcast service to poll for new data. Keep in mind your API poll limit. @@ -127,7 +127,7 @@ You probably **do not** want to do this! Use the HACS method above unless you kn [](https://github.com/BJReplay/ha-solcast-solar/blob/main/.github/SCREENSHOTS/Setupanewintegration.png) -1. Enter your `Solcast API Key`, `API quota` and click `Submit`. If you have more than one Solcast account because you have more than two rooftop setups, enter both account API keys separated by a comma `xxxxxxxx-xxxxx-xxxx,yyyyyyyy-yyyyy-yyyy` (_NB: this goes against Solcast T&C's by having more than one account_). If the API quota is the same for multiple accounts then enter a single value, or both values separated by a comma. +1. Enter your `Solcast API Key`, `API limit` and click `Submit`. If you have more than one Solcast account because you have more than two rooftop setups, enter both account API keys separated by a comma `xxxxxxxx-xxxxx-xxxx,yyyyyyyy-yyyyy-yyyy` (_NB: this goes against Solcast T&C's by having more than one account_). If the API limit is the same for multiple accounts then enter a single value, or both values separated by a comma. 1. Create your own automation to call the service `solcast_solar.update_forecasts` at the times you would like to update the solar forecast. 1. Set up HA Energy Dashboard settings. 1. To change other configuration options after installation, select the integration in `Devices & services` then `CONFIGURE`. diff --git a/custom_components/solcast_solar/strings.json b/custom_components/solcast_solar/strings.json index 2c6af6c3..8bd9a841 100755 --- a/custom_components/solcast_solar/strings.json +++ b/custom_components/solcast_solar/strings.json @@ -7,9 +7,9 @@ "user": { "data": { "api_key": "API key (comma separate multiple values)", - "api_quota": "API quota (optionally comma separate multiple values per key)" + "api_quota": "API limit (optionally comma separate multiple values per key)" }, - "description": "Solcast API Account Details" + "description": "Solcast Account Details" } } }, @@ -24,9 +24,9 @@ "api": { "data": { "api_key": "API key (comma separate multiple values)", - "api_quota": "API quota (optionally comma separate multiple values per key)" + "api_quota": "API limit (optionally comma separate multiple values per key)" }, - "description": "Solcast API Account Details" + "description": "Solcast Account Details" }, "dampen": { "data": { @@ -83,10 +83,10 @@ "selector": { "solcast_config_action": { "options": { - "configure_api": "Solcast API key", - "configure_dampening": "Configure Dampening", - "configure_customsensor": "Configure Custom Hour Sensor", - "configure_attributes": "Configure Available Attributes" + "configure_api": "Solcast account details", + "configure_dampening": "Configure dampening", + "configure_customsensor": "Configure custom hours sensor", + "configure_attributes": "Configure available attributes" } } }, diff --git a/custom_components/solcast_solar/translations/de.json b/custom_components/solcast_solar/translations/de.json index 3c5f323d..c64c6332 100644 --- a/custom_components/solcast_solar/translations/de.json +++ b/custom_components/solcast_solar/translations/de.json @@ -1,12 +1,15 @@ { "config": { + "abort": { + "single_instance_allowed": "Es ist nur eine Solcast-Instanz zulässig" + }, "step": { "user": { "data": { "api_key": "API-Schlüssel (mehrere Werte durch Kommas trennen)", "api_quota": "API-Kontingent (mehrere Werte durch Kommas trennen)" }, - "description": "Details zum Solcast API-Konto" + "description": "Solcast-Kontodaten" } } }, @@ -23,7 +26,7 @@ "api_key": "API-Schlüssel (mehrere Werte durch Kommas trennen)", "api_quota": "API-Kontingent (mehrere Werte durch Kommas trennen)" }, - "description": "Details zum Solcast API-Konto" + "description": "Solcast-Kontodaten" }, "dampen": { "data": { @@ -73,6 +76,16 @@ } } }, + "selector": { + "solcast_config_action": { + "options": { + "configure_api": "Solcast-Kontodaten", + "configure_dampening": "Dämpfung konfigurieren", + "configure_customsensor": "Konfigurieren Sie einen benutzerdefinierten Stundensensor", + "configure_attributes": "Konfigurieren Sie verfügbare Attribute" + } + } + }, "system_health": { "info": { "can_reach_server": "Verbindung zum Solcast-Server", diff --git a/custom_components/solcast_solar/translations/en.json b/custom_components/solcast_solar/translations/en.json index 7b7841e4..ab52dd31 100644 --- a/custom_components/solcast_solar/translations/en.json +++ b/custom_components/solcast_solar/translations/en.json @@ -7,9 +7,9 @@ "user": { "data": { "api_key": "API key (comma separate multiple values)", - "api_quota": "API quota (optionally comma separate multiple values per key)" + "api_quota": "API limit (optionally comma separate multiple values per key)" }, - "description": "Solcast API Account Details" + "description": "Solcast Account Details" } } }, @@ -24,9 +24,9 @@ "api": { "data": { "api_key": "API key (comma separate multiple values)", - "api_quota": "API quota (optionally comma separate multiple values per key)" + "api_quota": "API limit (optionally comma separate multiple values per key)" }, - "description": "Solcast API Account Details" + "description": "Solcast Account Details" }, "dampen": { "data": { @@ -83,10 +83,10 @@ "selector": { "solcast_config_action": { "options": { - "configure_api": "Solcast API key", - "configure_dampening": "Configure Dampening", - "configure_customsensor": "Configure Custom Hour Sensor", - "configure_attributes": "Configure Available Attributes" + "configure_api": "Solcast account details", + "configure_dampening": "Configure dampening", + "configure_customsensor": "Configure custom hours sensor", + "configure_attributes": "Configure available attributes" } } }, diff --git a/custom_components/solcast_solar/translations/fr.json b/custom_components/solcast_solar/translations/fr.json index c2a37ccc..4589c30c 100644 --- a/custom_components/solcast_solar/translations/fr.json +++ b/custom_components/solcast_solar/translations/fr.json @@ -9,7 +9,7 @@ "api_key": "Clé API (plusieurs valeurs séparées par des virgules)", "api_quota": "Quota d'API (plusieurs valeurs séparées par des virgules)" }, - "description": "Détails de votre compte API Solcast" + "description": "Détails du compte Solcast" } } }, @@ -26,7 +26,7 @@ "api_key": "Clé API (plusieurs valeurs séparées par des virgules)", "api_quota": "Quota d'API (plusieurs valeurs séparées par des virgules)" }, - "description": "Détails de votre compte API Solcast" + "description": "Détails du compte Solcast" }, "dampen": { "data": { @@ -83,8 +83,10 @@ "selector": { "solcast_config_action": { "options": { - "configure_api": "Clé API Solcast", - "configure_dampening": "Configurer le coefficient" + "configure_api": "Détails du compte Solcast", + "configure_dampening": "Configurer le coefficient", + "configure_customsensor": "Configure custom hours sensor", + "configure_attributes": "Configure available attributes" } } }, diff --git a/custom_components/solcast_solar/translations/pl.json b/custom_components/solcast_solar/translations/pl.json index 70df5acb..4312f367 100644 --- a/custom_components/solcast_solar/translations/pl.json +++ b/custom_components/solcast_solar/translations/pl.json @@ -9,7 +9,7 @@ "api_key": "Klucz API (wielokrotne wartości oddzielane przecinkiem)", "api_quota": "Limit API (opcjonalnie wielokrotne wartości oddzielane przecinkiem)" }, - "description": "Dane konta Solcast API" + "description": "Dane konta Solcast" } } }, @@ -26,7 +26,7 @@ "api_key": "Klucz API (wielokrotne wartości oddzielane przecinkiem)", "api_quota": "Limit API (opcjonalnie wielokrotne wartości oddzielane przecinkiem)" }, - "description": "Dane konta Solcast API" + "description": "Dane konta Solcast" }, "dampen": { "data": { @@ -83,7 +83,7 @@ "selector": { "solcast_config_action": { "options": { - "configure_api": "Konfiguruj klucz Solcast API", + "configure_api": "Dane konta Solcast", "configure_dampening": "Konfiguruj tłumienie", "configure_customsensor": "Konfiguruj niestandardowy czujnik godzin", "configure_attributes": "Konfiguruj dostępne atrybuty" diff --git a/custom_components/solcast_solar/translations/sk.json b/custom_components/solcast_solar/translations/sk.json index 88adf67b..3852f080 100644 --- a/custom_components/solcast_solar/translations/sk.json +++ b/custom_components/solcast_solar/translations/sk.json @@ -9,7 +9,7 @@ "api_key": "Kľúč API (viac hodnôt oddelených čiarkou)", "api_quota": "Kvóta rozhrania API (viac hodnôt oddelených čiarkou)" }, - "description": "Podrobnosti účtu Solcast API" + "description": "Podrobnosti o účte Solcast" } } }, @@ -26,7 +26,7 @@ "api_key": "Kľúč API (viac hodnôt oddelených čiarkou)", "api_quota": "Kvóta rozhrania API (viac hodnôt oddelených čiarkou)" }, - "description": "Podrobnosti účtu Solcast API" + "description": "Podrobnosti o účte Solcast" }, "dampen": { "data": { @@ -83,7 +83,7 @@ "selector": { "solcast_config_action": { "options": { - "configure_api": "Solcast API kľúč", + "configure_api": "Podrobnosti o účte Solcast", "configure_dampening": "Konfigurácia tlmenia", "configure_customsensor": "Konfigurácia vlastného snímača hodín", "configure_attributes": "Konfigurácia dostupných atribútov" diff --git a/custom_components/solcast_solar/translations/ur.json b/custom_components/solcast_solar/translations/ur.json index 101a40a5..653801fc 100644 --- a/custom_components/solcast_solar/translations/ur.json +++ b/custom_components/solcast_solar/translations/ur.json @@ -9,7 +9,7 @@ "api_key": "کلید (کوما سے الگ متعدد اقدار) API", "api_quota": "کوٹہ (کوما سے الگ متعدد اقدار) API" }, - "description": "سولکاسٹ API اکاؤنٹ کی تفصیلات" + "description": "سولکاسٹ اکاؤنٹ کی تفصیلات" } } }, @@ -26,7 +26,7 @@ "api_key": "کلید (کوما سے الگ متعدد اقدار) API", "api_quota": "کوٹہ (کوما سے الگ متعدد اقدار) API" }, - "description": "سولکاسٹ API اکاؤنٹ کی تفصیلات" + "description": "سولکاسٹ اکاؤنٹ کی تفصیلات" }, "dampen": { "data": { @@ -83,8 +83,10 @@ "selector": { "solcast_config_action": { "options": { - "configure_api": "سولکاسٹ API کلید", - "configure_dampening": "ڈیمپننگ کو ترتیب دیں۔" + "configure_api": "سولکاسٹ اکاؤنٹ کی تفصیلات", + "configure_dampening": "ڈیمپننگ کو ترتیب دیں۔", + "configure_customsensor": "حسب ضرورت اوقات کے سینسر کو ترتیب دیں۔", + "configure_attributes": "دستیاب صفات کو ترتیب دیں۔" } } }, From 30b430fcb14e98a8c66393acb05529b94fa05e83 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Wed, 21 Aug 2024 17:39:55 +1000 Subject: [PATCH 07/38] Refactor moment and remaining spline build --- custom_components/solcast_solar/solcastapi.py | 106 +++++------------- 1 file changed, 28 insertions(+), 78 deletions(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 9378c6cd..e633eae9 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -800,93 +800,43 @@ def get_forecast_list_slice(self, _data, start_utc, end_utc=None, search_past=Fa end_i = 0 return st_i, end_i - async def spline_moments(self): - """A cubic spline to retrieve interpolated inter-interval momentary estimates for five minute periods""" - df = ['pv_estimate'] - if self.options.attr_brk_estimate10: df.append('pv_estimate10') - if self.options.attr_brk_estimate90: df.append('pv_estimate90') - xx = [ i for i in range(0, 1800*len(self._spline_period), 300) ] - _data = self._data_forecasts - st, _ = self.get_forecast_list_slice(_data, self.get_day_start_utc()) # Get start of day index - self.fc_moment['all'] = {} + def get_spline(self, spline, st, xx, _data, df, reducing=False): for _data_field in df: if st > 0: y = [_data[st+i][_data_field] for i in range(0, len(self._spline_period))] - self.fc_moment['all'][_data_field] = cubic_interp(xx, self._spline_period, y) - for j in xx: - i = int(j/300) - if math.copysign(1.0, self.fc_moment['all'][_data_field][i]) < 0: self.fc_moment['all'][_data_field][i] = 0.0 # Suppress negative values - k = int(math.floor(j/1800)) - if k+1 <= len(y)-1 and y[k] == 0 and y[k+1] == 0: self.fc_moment['all'][_data_field][i] = 0.0 # Suppress spline bounce - self.fc_moment['all'][_data_field] = ([0]*3) + self.fc_moment['all'][_data_field] # Shift right by fifteen minutes because 30-minute averages, padding - else: # The list slice was not found, so zero the moments - self.fc_moment['all'][_data_field] = [0] * (len(self._spline_period) * 6) + if reducing: y = [0.5 * sum(y[i:]) for i in range(0, len(self._spline_period))] # If called for, build a decreasing set of forecasted values instead + spline[_data_field] = cubic_interp(xx, self._spline_period, y) + self.sanitise_spline(spline, _data_field, xx, y) + else: # The list slice was not found, so zero all values in the spline + spline[_data_field] = [0] * (len(self._spline_period) * 6) + + def sanitise_spline(self, spline, _data_field, xx, y): + for j in xx: + i = int(j/300) + if math.copysign(1.0, spline[_data_field][i]) < 0: spline[_data_field][i] = 0.0 # Suppress negative values + k = int(math.floor(j/1800)) + if k+1 <= len(y)-1 and y[k] == 0 and y[k+1] == 0: spline[_data_field][i] = 0.0 # Suppress spline bounce + spline[_data_field] = ([0]*3) + spline[_data_field] # Shift right by fifteen minutes because 30-minute averages, padding + + def splines_build(self, variant, reducing=False): + """A cubic spline to retrieve interpolated inter-interval momentary or reducing estimates for five minute periods""" + df = ['pv_estimate'] + (['pv_estimate10'] if self.options.attr_brk_estimate10 else []) + (['pv_estimate90'] if self.options.attr_brk_estimate90 else []) + xx = [ i for i in range(0, 1800*len(self._spline_period), 300) ] + st, _ = self.get_forecast_list_slice(self._data_forecasts, self.get_day_start_utc()) # Get start of day index + + variant['all'] = {} + self.get_spline(variant['all'], st, xx, self._data_forecasts, df, reducing=reducing) if self.options.attr_brk_site: for site in self._sites: - self.fc_moment[site['resource_id']] = {} - _data = self._site_data_forecasts[site['resource_id']] - st, _ = self.get_forecast_list_slice(_data, self.get_day_start_utc()) # Get start of day index - for _data_field in df: - if st > 0: - y = [_data[st+i][_data_field] for i in range(0, len(self._spline_period))] - self.fc_moment[site['resource_id']][_data_field] = cubic_interp(xx, self._spline_period, y) - for j in xx: - i = int(j/300) - if math.copysign(1.0, self.fc_moment[site['resource_id']][_data_field][i]) < 0: self.fc_moment[site['resource_id']][_data_field][i] = 0.0 # Suppress negative values - k = int(math.floor(j/1800)) - if k+1 <= len(y)-1 and y[k] == 0 and y[k+1] == 0: self.fc_moment[site['resource_id']][_data_field][i] = 0.0 # Suppress spline bounce - self.fc_moment[site['resource_id']][_data_field] = ([0]*3) + self.fc_moment[site['resource_id']][_data_field] # Shift right by fifteen minutes because 30-minute averages, padding - else: # The list slice was not found, so zero the moments - self.fc_moment[site['resource_id']][_data_field] = [0] * (len(self._spline_period) * 6) + variant[site['resource_id']] = {} + self.get_spline(variant[site['resource_id']], st, xx, self._data_forecasts, df, reducing=reducing) + + async def spline_moments(self): self.splines_build(self.fc_moment) def get_moment(self, site, _data_field, t): return self.fc_moment['all' if site is None else site][self._data_field if _data_field is None else _data_field][int(t / 300)] - async def spline_remaining(self): - """A cubic spline to retrieve interpolated inter-interval reducing estimates for five minute periods""" - def buildY(_data, _data_field, st): - y = [] - for i in range(0, len(self._spline_period)): - rem = 0 - for j in range(i, len(self._spline_period)): rem += _data[st+j][_data_field] - y.append(0.5 * rem) - return y - df = ['pv_estimate'] - if self.options.attr_brk_estimate10: df.append('pv_estimate10') - if self.options.attr_brk_estimate90: df.append('pv_estimate90') - xx = [ i for i in range(0, 1800*len(self._spline_period), 300) ] - _data = self._data_forecasts - st, _ = self.get_forecast_list_slice(_data, self.get_day_start_utc()) # Get start of day index - self.fc_remaining['all'] = {} - for _data_field in df: - if st > 0: - y = buildY(_data, _data_field, st) - self.fc_remaining['all'][_data_field] = cubic_interp(xx, self._spline_period, y) - for j in xx: - i = int(j/300) - k = int(math.floor(j/1800)) - if math.copysign(1.0, self.fc_remaining['all'][_data_field][i]) < 0: self.fc_remaining['all'][_data_field][i] = 0.0 # Suppress negative values - if k+1 <= len(y)-1 and y[k] == y[k+1] and self.fc_remaining['all'][_data_field][i] > round(y[k],4): self.fc_remaining['all'][_data_field][i] = y[k] # Suppress spline bounce - self.fc_remaining['all'][_data_field] = ([self.fc_remaining['all'][_data_field][0]]*3) + self.fc_remaining['all'][_data_field] # Shift right by fifteen minutes because 30-minute averages, padding - else: # The list slice was not found, so zero the remainings - self.fc_remaining['all'][_data_field] = [0] * (len(self._spline_period) * 6) - if self.options.attr_brk_site: - for site in self._sites: - self.fc_remaining[site['resource_id']] = {} - _data = self._site_data_forecasts[site['resource_id']] - st, _ = self.get_forecast_list_slice(_data, self.get_day_start_utc()) # Get start of day index - for _data_field in df: - if st > 0: - y = buildY(_data, _data_field, st) - self.fc_remaining[site['resource_id']][_data_field] = cubic_interp(xx, self._spline_period, y) - for j in xx: - i = int(j/300) - k = int(math.floor(j/1800)) - if math.copysign(1.0, self.fc_remaining[site['resource_id']][_data_field][i]) < 0: self.fc_remaining[site['resource_id']][_data_field][i] = 0.0 # Suppress negative values - if k+1 <= len(y)-1 and y[k] == y[k+1] and self.fc_remaining[site['resource_id']][_data_field][i] > round(y[k],4): self.fc_remaining[site['resource_id']][_data_field][i] = y[k] # Suppress spline bounce - self.fc_remaining[site['resource_id']][_data_field] = ([self.fc_remaining[site['resource_id']][_data_field][0]]*3) + self.fc_remaining[site['resource_id']][_data_field] # Shift right by fifteen minutes because 30-minute averages, padding - else: # The list slice was not found, so zero the remainings - self.fc_remaining[site['resource_id']][_data_field] = [0] * (len(self._spline_period) * 6) + async def spline_remaining(self): self.splines_build(self.fc_remaining, reducing=True) def get_remaining(self, site, _data_field, t): return self.fc_remaining['all' if site is None else site][self._data_field if _data_field is None else _data_field][int(t / 300)] From 71abb280c58fbc28307ca4c26ebea7db687c1e44 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Wed, 21 Aug 2024 19:59:33 +1000 Subject: [PATCH 08/38] Update test.py --- custom_components/solcast_solar/test.py | 29 ++++++++++++++++++++----- 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/custom_components/solcast_solar/test.py b/custom_components/solcast_solar/test.py index 2738498c..e376c78f 100755 --- a/custom_components/solcast_solar/test.py +++ b/custom_components/solcast_solar/test.py @@ -3,22 +3,39 @@ import asyncio import logging import traceback +from .const import SOLCAST_URL +from homeassistant.util import dt as dt_util -from aiohttp import ClientConnectionError, ClientSession +from aiohttp import ClientSession from .solcastapi import ConnectionOptions, SolcastApi -#logging.basicConfig(level=logging.DEBUG) +logging.basicConfig(level=logging.DEBUG) _LOGGER = logging.getLogger(__name__) async def test(): + print('This script is for development purposes only') try: - + optdamp = {} + for a in range(0,24): optdamp[str(a)] = 1.0 + options = ConnectionOptions( - "changetoyourapikey", - "https://api.solcast.com.au", - 'solcast.json' + "apikeygoeshere", + SOLCAST_URL, + 'solcast.json', + "/config", + await dt_util.async_get_time_zone(hass.config.time_zone), + optdamp, + 1, + "estimate", + 100, + True, + True, + True, + True, + True, + True ) async with ClientSession() as session: From aa46068d4176771ffe2d45768f940136d4c78102 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Thu, 22 Aug 2024 16:30:36 +1000 Subject: [PATCH 09/38] Prevent negative forecast for X hour sensor --- custom_components/solcast_solar/solcastapi.py | 26 +++++++++++++++---- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index e633eae9..ee811bdd 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -831,15 +831,31 @@ def splines_build(self, variant, reducing=False): variant[site['resource_id']] = {} self.get_spline(variant[site['resource_id']], st, xx, self._data_forecasts, df, reducing=reducing) - async def spline_moments(self): self.splines_build(self.fc_moment) + async def spline_moments(self): + try: + self.splines_build(self.fc_moment) + except Exception as e: + _LOGGER.debug('Exception in spline_moments(): %s', e) def get_moment(self, site, _data_field, t): - return self.fc_moment['all' if site is None else site][self._data_field if _data_field is None else _data_field][int(t / 300)] + try: + return self.fc_moment['all' if site is None else site][self._data_field if _data_field is None else _data_field][int(t / 300)] + except Exception as e: + _LOGGER.debug('Exception in get_moment(): %s', e) + return 0 - async def spline_remaining(self): self.splines_build(self.fc_remaining, reducing=True) + async def spline_remaining(self): + try: + self.splines_build(self.fc_remaining, reducing=True) + except Exception as e: + _LOGGER.debug('Exception in spline_remaining(): %s', e) def get_remaining(self, site, _data_field, t): - return self.fc_remaining['all' if site is None else site][self._data_field if _data_field is None else _data_field][int(t / 300)] + try: + return self.fc_remaining['all' if site is None else site][self._data_field if _data_field is None else _data_field][int(t / 300)] + except Exception as e: + _LOGGER.debug('Exception in get_remaining(): %s', e) + return 0 def get_forecast_pv_remaining(self, start_utc, end_utc=None, site=None, _use_data_field=None) -> float: """Return pv_estimates remaining for period""" @@ -872,7 +888,7 @@ def get_forecast_pv_remaining(self, start_utc, end_utc=None, site=None, _use_dat end_utc.strftime('%Y-%m-%d %H:%M:%S') if end_utc is not None else None, st_i, end_i, round(res,4) ) - return res + return res if res > 0 else 0 except Exception as ex: _LOGGER.error(f"Exception in get_forecast_pv_remaining(): {ex}") _LOGGER.error(traceback.format_exc()) From 250532360c2b90a7de48b38c55d626e936cf0623 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Thu, 22 Aug 2024 17:37:53 +1000 Subject: [PATCH 10/38] Suppress spline bounce for reducing spline --- custom_components/solcast_solar/solcastapi.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index ee811bdd..4ff3c2d6 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -806,17 +806,24 @@ def get_spline(self, spline, st, xx, _data, df, reducing=False): y = [_data[st+i][_data_field] for i in range(0, len(self._spline_period))] if reducing: y = [0.5 * sum(y[i:]) for i in range(0, len(self._spline_period))] # If called for, build a decreasing set of forecasted values instead spline[_data_field] = cubic_interp(xx, self._spline_period, y) - self.sanitise_spline(spline, _data_field, xx, y) + self.sanitise_spline(spline, _data_field, xx, y, reducing=reducing) else: # The list slice was not found, so zero all values in the spline spline[_data_field] = [0] * (len(self._spline_period) * 6) - def sanitise_spline(self, spline, _data_field, xx, y): + def sanitise_spline(self, spline, _data_field, xx, y, reducing=False): for j in xx: i = int(j/300) if math.copysign(1.0, spline[_data_field][i]) < 0: spline[_data_field][i] = 0.0 # Suppress negative values - k = int(math.floor(j/1800)) - if k+1 <= len(y)-1 and y[k] == 0 and y[k+1] == 0: spline[_data_field][i] = 0.0 # Suppress spline bounce - spline[_data_field] = ([0]*3) + spline[_data_field] # Shift right by fifteen minutes because 30-minute averages, padding + if reducing: + if i+1 <= len(xx)-1 and spline[_data_field][i+1] > spline[_data_field][i]: spline[_data_field][i+1] = spline[_data_field][i] + else: + k = int(math.floor(j/1800)) + if k+1 <= len(y)-1 and y[k] == 0 and y[k+1] == 0: spline[_data_field][i] = 0.0 # Suppress spline bounce + # Shift right by fifteen minutes because 30-minute averages, padding + if reducing: + spline[_data_field] = ([spline[_data_field][0]]*3) + spline[_data_field] + else: + spline[_data_field] = ([0]*3) + spline[_data_field] def splines_build(self, variant, reducing=False): """A cubic spline to retrieve interpolated inter-interval momentary or reducing estimates for five minute periods""" From c2030340350d30640c34b79da819d427f6912724 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Thu, 22 Aug 2024 18:16:17 +1000 Subject: [PATCH 11/38] Move some comments --- custom_components/solcast_solar/solcastapi.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 4ff3c2d6..7dd3aff0 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -813,12 +813,15 @@ def get_spline(self, spline, st, xx, _data, df, reducing=False): def sanitise_spline(self, spline, _data_field, xx, y, reducing=False): for j in xx: i = int(j/300) - if math.copysign(1.0, spline[_data_field][i]) < 0: spline[_data_field][i] = 0.0 # Suppress negative values + # Suppress negative values + if math.copysign(1.0, spline[_data_field][i]) < 0: + spline[_data_field][i] = 0.0 + # Suppress spline bounce if reducing: if i+1 <= len(xx)-1 and spline[_data_field][i+1] > spline[_data_field][i]: spline[_data_field][i+1] = spline[_data_field][i] else: k = int(math.floor(j/1800)) - if k+1 <= len(y)-1 and y[k] == 0 and y[k+1] == 0: spline[_data_field][i] = 0.0 # Suppress spline bounce + if k+1 <= len(y)-1 and y[k] == 0 and y[k+1] == 0: spline[_data_field][i] = 0.0 # Shift right by fifteen minutes because 30-minute averages, padding if reducing: spline[_data_field] = ([spline[_data_field][0]]*3) + spline[_data_field] From 65c7405d0b1bc04267cd57a2caa8c2bd349f5500 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Thu, 22 Aug 2024 20:19:40 +1000 Subject: [PATCH 12/38] Comment updates --- custom_components/solcast_solar/solcastapi.py | 20 +++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 7dd3aff0..2288f6a0 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -804,7 +804,9 @@ def get_spline(self, spline, st, xx, _data, df, reducing=False): for _data_field in df: if st > 0: y = [_data[st+i][_data_field] for i in range(0, len(self._spline_period))] - if reducing: y = [0.5 * sum(y[i:]) for i in range(0, len(self._spline_period))] # If called for, build a decreasing set of forecasted values instead + if reducing: + # Build a decreasing set of forecasted values instead + y = [0.5 * sum(y[i:]) for i in range(0, len(self._spline_period))] spline[_data_field] = cubic_interp(xx, self._spline_period, y) self.sanitise_spline(spline, _data_field, xx, y, reducing=reducing) else: # The list slice was not found, so zero all values in the spline @@ -822,14 +824,14 @@ def sanitise_spline(self, spline, _data_field, xx, y, reducing=False): else: k = int(math.floor(j/1800)) if k+1 <= len(y)-1 and y[k] == 0 and y[k+1] == 0: spline[_data_field][i] = 0.0 - # Shift right by fifteen minutes because 30-minute averages, padding + # Shift right by fifteen minutes because 30-minute averages, padding as appropriate if reducing: spline[_data_field] = ([spline[_data_field][0]]*3) + spline[_data_field] else: spline[_data_field] = ([0]*3) + spline[_data_field] - def splines_build(self, variant, reducing=False): - """A cubic spline to retrieve interpolated inter-interval momentary or reducing estimates for five minute periods""" + def build_splines(self, variant, reducing=False): + """Cubic splines for interpolated inter-interval momentary or reducing estimates""" df = ['pv_estimate'] + (['pv_estimate10'] if self.options.attr_brk_estimate10 else []) + (['pv_estimate90'] if self.options.attr_brk_estimate90 else []) xx = [ i for i in range(0, 1800*len(self._spline_period), 300) ] st, _ = self.get_forecast_list_slice(self._data_forecasts, self.get_day_start_utc()) # Get start of day index @@ -843,7 +845,7 @@ def splines_build(self, variant, reducing=False): async def spline_moments(self): try: - self.splines_build(self.fc_moment) + self.build_splines(self.fc_moment) except Exception as e: _LOGGER.debug('Exception in spline_moments(): %s', e) @@ -856,7 +858,7 @@ def get_moment(self, site, _data_field, t): async def spline_remaining(self): try: - self.splines_build(self.fc_remaining, reducing=True) + self.build_splines(self.fc_remaining, reducing=True) except Exception as e: _LOGGER.debug('Exception in spline_remaining(): %s', e) @@ -878,9 +880,11 @@ def get_forecast_pv_remaining(self, start_utc, end_utc=None, site=None, _use_dat res = self.get_remaining(site, _data_field, (start_utc - day_start).total_seconds()) if end_utc is not None: end_utc = end_utc.replace(minute = math.floor(end_utc.minute / 5) * 5) - if end_utc < day_start + timedelta(seconds=1800*len(self._spline_period)): # Spline data points are limited + if end_utc < day_start + timedelta(seconds=1800*len(self._spline_period)): + # End is within today so use spline data res -= self.get_remaining(site, _data_field, (end_utc - day_start).total_seconds()) else: + # End is beyond today, so revert to simple linear interpolation st_i2, _ = self.get_forecast_list_slice(_data, day_start + timedelta(seconds=1800*len(self._spline_period))) # Get post-spline day onwards start index for d in _data[st_i2:end_i]: d2 = d['period_start'] + timedelta(seconds=1800) @@ -888,7 +892,7 @@ def get_forecast_pv_remaining(self, start_utc, end_utc=None, site=None, _use_dat f = 0.5 * d[_data_field] if end_utc < d2: s -= (d2 - end_utc).total_seconds() - res += f * s / 1800 # Simple linear interpolation + res += f * s / 1800 else: res += f if _SENSOR_DEBUG_LOGGING: _LOGGER.debug( From aa27ab51e8f7ba7dac53ed6c8cd540d3fa744df3 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Fri, 23 Aug 2024 19:12:45 +1000 Subject: [PATCH 13/38] Ensure sites are configured #145 --- README.md | 2 ++ custom_components/solcast_solar/solcastapi.py | 14 +++++++++----- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 996cfdec..e536804f 100644 --- a/README.md +++ b/README.md @@ -52,6 +52,8 @@ Sign up for an API key (https://solcast.com/). > Solcast may take up to 24hrs to create the account. +Configure your rooftop sites correctly at `solcast.com`. + Copy the API Key for use with this integration (See [Configuration](#Configuration) below). ## Installation diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 2288f6a0..7dfbaa0a 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -225,11 +225,15 @@ def redact(s): except: raise if status == 200: - _LOGGER.debug(f"Writing sites cache") - async with aiofiles.open(apiCacheFileName, 'w') as f: - await f.write(json.dumps(resp_json, ensure_ascii=False)) - success = True - break + if resp_json['total_records'] > 0: + _LOGGER.debug(f"Writing sites cache") + async with aiofiles.open(apiCacheFileName, 'w') as f: + await f.write(json.dumps(resp_json, ensure_ascii=False)) + success = True + break + else: + _LOGGER.error('No sites for the API key %s are configured at solcast.com', self.redact_api_key(spl)) + return else: if cacheExists: useCacheImmediate = True From b50622bcddcbf0c9f2d16f6afaca4f332270f741 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Fri, 23 Aug 2024 20:40:55 +1000 Subject: [PATCH 14/38] Improve UI visibility of startup issues --- custom_components/solcast_solar/__init__.py | 7 ++++--- custom_components/solcast_solar/solcastapi.py | 19 +++++++++++++------ 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/custom_components/solcast_solar/__init__.py b/custom_components/solcast_solar/__init__.py index 0042241d..833325e7 100644 --- a/custom_components/solcast_solar/__init__.py +++ b/custom_components/solcast_solar/__init__.py @@ -133,10 +133,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: raise ConfigEntryNotReady(f"Getting sites data failed: {ex}") from ex if not solcast._sites_loaded: - raise ConfigEntryNotReady(f"Sites data could not be retrieved") + raise ConfigEntryNotReady('Sites data could not be retrieved') - if not await solcast.load_saved_data(): - raise ConfigEntryNotReady(f"Failed to load initial data from cache or the Solcast API") + status = await solcast.load_saved_data() + if status != '': + raise ConfigEntryNotReady(status) _VERSION = "" try: diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 7dfbaa0a..0a8dbd97 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -462,6 +462,7 @@ async def sites_weather(self): async def load_saved_data(self): try: + status = '' if len(self._sites) > 0: if file_exists(self._filename): async with aiofiles.open(self._filename) as data_file: @@ -504,16 +505,17 @@ async def load_saved_data(self): # No file to load _LOGGER.warning(f"There is no solcast.json to load, so fetching solar forecast, including past forecasts") # Could be a brand new install of the integation, or the file has been removed. Poll once now... - await self.http_data(dopast=True) - - if self._loaded_data: return True + status = await self.http_data(dopast=True) else: _LOGGER.error(f"Solcast site count is zero in load_saved_data(); the get sites must have failed, and there is no sites cache") + status = 'Solcast sites count is zero, add sites' except json.decoder.JSONDecodeError: _LOGGER.error("The cached data in solcast.json is corrupt in load_saved_data()") + status = 'The cached data in solcast.json is corrupt' except Exception as e: _LOGGER.error("Exception in load_saved_data(): %s", traceback.format_exc()) - return False + status = 'Exception in load_saved_data(): %s' % (e,) + return status async def delete_solcast_file(self, *args): _LOGGER.debug(f"Service event to delete old solcast.json file") @@ -988,9 +990,10 @@ def get_energy_data(self) -> dict[str, Any]: async def http_data(self, dopast = False): """Request forecast data for all sites""" try: + status = '' if self.get_last_updated_datetime() + timedelta(minutes=15) > dt.now(timezone.utc): _LOGGER.warning(f"Not requesting a forecast from Solcast because time is within fifteen minutes of last update ({self.get_last_updated_datetime().astimezone(self._tz)})") - return + return 'Not requesting a forecast from Solcast because time is within fifteen minutes of last update' failure = False sitesAttempted = 0 @@ -1004,6 +1007,7 @@ async def http_data(self, dopast = False): _LOGGER.warning('Forecast update for site %s failed, so not getting remaining sites', site['resource_id']) else: _LOGGER.warning('Forecast update for the last site queued failed (%s), so not getting remaining sites - API use count will look odd', site['resource_id']) + status = 'At least one site forecast get failed' break if sitesAttempted > 0 and not failure: @@ -1020,9 +1024,12 @@ async def http_data(self, dopast = False): _LOGGER.error("At least one Solcast site forecast failed to fetch, so forecast data has not been built") else: _LOGGER.error("No Solcast sites were attempted, so forecast data has not been built - check for earlier failure to retrieve sites") + status = 'At least one site forecast get failed' except Exception as ex: - _LOGGER.error("Exception in http_data(): %s - Forecast data has not been built", ex) + status = 'Exception in http_data(): %s - Forecast data has not been built' % (ex,) + _LOGGER.error(status) _LOGGER.error(traceback.format_exc()) + return status async def http_data_call(self, usageCacheFileName, r_id = None, api = None, dopast = False): """Request forecast data via the Solcast API""" From a811ffbf6c2c0b5dafb8347a5ce84e794c64cdcb Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Fri, 23 Aug 2024 22:19:42 +1000 Subject: [PATCH 15/38] Status message change --- custom_components/solcast_solar/solcastapi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 0a8dbd97..b39daa6d 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -511,7 +511,7 @@ async def load_saved_data(self): status = 'Solcast sites count is zero, add sites' except json.decoder.JSONDecodeError: _LOGGER.error("The cached data in solcast.json is corrupt in load_saved_data()") - status = 'The cached data in solcast.json is corrupt' + status = 'The cached data in /config/solcast.json is corrupted, suggest removing or repairing it' except Exception as e: _LOGGER.error("Exception in load_saved_data(): %s", traceback.format_exc()) status = 'Exception in load_saved_data(): %s' % (e,) From 94874f13f22f0d8218263714699bbafc4addf56d Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Sat, 24 Aug 2024 10:59:38 +1000 Subject: [PATCH 16/38] Drop forecast re-fetch suppress to one minute --- custom_components/solcast_solar/solcastapi.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index b39daa6d..87c3b753 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -991,9 +991,10 @@ async def http_data(self, dopast = False): """Request forecast data for all sites""" try: status = '' - if self.get_last_updated_datetime() + timedelta(minutes=15) > dt.now(timezone.utc): - _LOGGER.warning(f"Not requesting a forecast from Solcast because time is within fifteen minutes of last update ({self.get_last_updated_datetime().astimezone(self._tz)})") - return 'Not requesting a forecast from Solcast because time is within fifteen minutes of last update' + if self.get_last_updated_datetime() + timedelta(minutes=1) > dt.now(timezone.utc): + status = f"Not requesting a forecast from Solcast because time is within one minute of last update ({self.get_last_updated_datetime().astimezone(self._tz)})" + _LOGGER.warning(status) + return status failure = False sitesAttempted = 0 From b0eac8114c6df7697ac85196160a496812e09ca5 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Sat, 24 Aug 2024 16:37:58 +1000 Subject: [PATCH 17/38] Debug forecast data off by default Reduces log bloat while still providing great detail. A variable at the top of the script disables/enables the data dump to log. When troubleshooting an issue the solcast.json could be requested. --- custom_components/solcast_solar/solcastapi.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 87c3b753..f4a3ef55 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -33,6 +33,7 @@ currentFuncName = lambda n=0: sys._getframe(n + 1).f_code.co_name _SENSOR_DEBUG_LOGGING = False +_FORECAST_DEBUG_LOGGING = False _JSON_VERSION = 4 _LOGGER = logging.getLogger(__name__) @@ -1250,7 +1251,8 @@ async def fetch_data(self, usageCacheFileName, path="error", hours=168, site="", _LOGGER.error(f"The Solcast site cannot be found, status {translate(status)} returned") elif status == 200: d = cast(dict, resp_json) - _LOGGER.debug(f"Status {translate(status)} in fetch_data(), returned: {d}") + if _FORECAST_DEBUG_LOGGING: + _LOGGER.debug(f"Status {translate(status)} in fetch_data(), returned: {d}") return d #await self.format_json_data(d) except ConnectionRefusedError as err: From aef846239ca9d009fd6352e4e737bac81a1fa6e5 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Sat, 24 Aug 2024 16:44:51 +1000 Subject: [PATCH 18/38] Better suppression of forecast dump Retains status --- custom_components/solcast_solar/solcastapi.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index f4a3ef55..87a029d0 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -1251,8 +1251,7 @@ async def fetch_data(self, usageCacheFileName, path="error", hours=168, site="", _LOGGER.error(f"The Solcast site cannot be found, status {translate(status)} returned") elif status == 200: d = cast(dict, resp_json) - if _FORECAST_DEBUG_LOGGING: - _LOGGER.debug(f"Status {translate(status)} in fetch_data(), returned: {d}") + _LOGGER.debug(f"Status {translate(status)} in fetch_data(){', returned: %s' % (str(d),) if _FORECAST_DEBUG_LOGGING else ''}") return d #await self.format_json_data(d) except ConnectionRefusedError as err: From 81baa0547bbe8e2ecb347fde526a340123a665ac Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Sat, 24 Aug 2024 19:15:28 +1000 Subject: [PATCH 19/38] Fix note in sample Apex chart section of readme --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index e536804f..6829793d 100644 --- a/README.md +++ b/README.md @@ -422,7 +422,8 @@ The following YAML produces a graph of today's PV generation, PV forecast and PV Customise with appropriate Home Assistant sensors for today's total solar generation and solar panel PV power output. -> [!NOTE] The chart assumes that your Solar PV sensors are in kW, but if some are in W, add the line `transform: "return x / 1000;"` under the entity id to convert the sensor value to kW. +> [!NOTE] +> The chart assumes that your Solar PV sensors are in kW, but if some are in W, add the line `transform: "return x / 1000;"` under the entity id to convert the sensor value to kW. ### Reveal code

Click here From 7e4374e269bf24bed84562d0156a0ec2cd45b7d1 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Sun, 25 Aug 2024 16:43:41 +1000 Subject: [PATCH 20/38] Code cleanup and improved logging --- .gitignore | 1 + custom_components/solcast_solar/solcastapi.py | 118 +++++++++--------- 2 files changed, 59 insertions(+), 60 deletions(-) diff --git a/.gitignore b/.gitignore index d0d0cd07..17b2e6eb 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ .DS_Store .vscode/settings.json .vscode +.vs __pycache__ \ No newline at end of file diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 87a029d0..7c8aef4c 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -163,9 +163,11 @@ def redact_api_key(self, api_key): def redact_msg_api_key(self, msg, api_key): return msg.replace(api_key, self.redact_api_key(api_key)) - async def write_api_usage_cache_file(self, json_file, json_content, api_key): + async def write_api_usage_cache_file(self, api_key): try: + json_file = self.get_api_usage_cache_filename(api_key) _LOGGER.debug(f"Writing API usage cache file: {self.redact_msg_api_key(json_file, api_key)}") + json_content = {"daily_limit": self._api_limit[api_key], "daily_limit_consumed": self._api_used[api_key]} async with aiofiles.open(json_file, 'w') as f: await f.write(json.dumps(json_content, ensure_ascii=False)) except Exception as ex: @@ -181,11 +183,7 @@ def get_api_sites_cache_filename(self, entry_name): async def reset_api_usage(self): for api_key in self._api_used.keys(): self._api_used[api_key] = 0 - await self.write_api_usage_cache_file( - self.get_api_usage_cache_filename(api_key), - {"daily_limit": self._api_limit[api_key], "daily_limit_consumed": self._api_used[api_key]}, - api_key - ) + await self.write_api_usage_cache_file(api_key) async def sites_data(self): """Request sites detail""" @@ -324,27 +322,27 @@ async def sites_usage(self): for i in range(len(sp)): quota[sp[i]] = 10 for spl in sp: - sitekey = spl.strip() - _LOGGER.debug(f"Getting API usage from cache for API key {self.redact_api_key(sitekey)}") - apiCacheFileName = self.get_api_usage_cache_filename(sitekey) + api_key = spl.strip() + _LOGGER.debug(f"Getting API usage from cache for API key {self.redact_api_key(api_key)}") + apiCacheFileName = self.get_api_usage_cache_filename(api_key) _LOGGER.debug(f"{'API usage cache ' + ('exists' if file_exists(apiCacheFileName) else 'does not yet exist')}") if file_exists(apiCacheFileName): async with aiofiles.open(apiCacheFileName) as f: usage = json.loads(await f.read()) - if usage['daily_limit'] != quota[spl]: - usage['daily_limit'] = quota[spl] - await self.write_api_usage_cache_file(apiCacheFileName, usage, sitekey) + self._api_limit[api_key] = usage.get("daily_limit", None) + self._api_used[api_key] = usage.get("daily_limit_consumed", None) + if usage['daily_limit'] != quota[spl]: # Limit has been adjusted, so rewrite the cache + self._api_limit[api_key] = quota[spl] + await self.write_api_usage_cache_file(api_key) _LOGGER.info(f"API usage cache loaded and updated with new quota") else: _LOGGER.debug(f"API usage cache loaded") else: - _LOGGER.warning(f"No Solcast API usage cache found, creating one assuming zero API used") - usage = {'daily_limit': quota[spl], 'daily_limit_consumed': 0} - await self.write_api_usage_cache_file(apiCacheFileName, usage, sitekey) - - self._api_limit[sitekey] = usage.get("daily_limit", None) - self._api_used[sitekey] = usage.get("daily_limit_consumed", None) - _LOGGER.debug(f"API counter for {self.redact_api_key(sitekey)} is {self._api_used[sitekey]}/{self._api_limit[sitekey]}") + _LOGGER.warning(f"No Solcast API usage cache found, creating one and assuming zero API used") + self._api_limit[api_key] = quota[spl] + self._api_used[api_key] = 0 + await self.write_api_usage_cache_file(api_key) + _LOGGER.debug(f"API counter for {self.redact_api_key(api_key)} is {self._api_used[api_key]}/{self._api_limit[api_key]}") except: _LOGGER.error("Exception in sites_usage(): %s", traceback.format_exc()) @@ -356,11 +354,11 @@ async def sites_usage(self): sp = self.options.api_key.split(",") for spl in sp: - sitekey = spl.strip() - params = {"api_key": sitekey} - _LOGGER.debug(f"Getting API limit and usage from solcast for {self.redact_api_key(sitekey)}") + api_key = spl.strip() + params = {"api_key": api_key} + _LOGGER.debug(f"Getting API limit and usage from solcast for {self.redact_api_key(api_key)}") async with async_timeout.timeout(60): - apiCacheFileName = self.get_api_usage_cache_filename(sitekey) + apiCacheFileName = self.get_api_usage_cache_filename(api_key) _LOGGER.debug(f"{'API usage cache ' + ('exists' if file_exists(apiCacheFileName) else 'does not yet exist')}") retries = 3 retry = retries @@ -379,7 +377,10 @@ async def sites_usage(self): except: raise _LOGGER.debug(f"HTTP session returned status {translate(status)} in sites_usage()") if status == 200: - await self.write_api_usage_cache_file(apiCacheFileName, resp_json, sitekey) + d = cast(dict, resp_json) + self._api_limit[api_key] = d.get("daily_limit", None) + self._api_used[api_key] = d.get("daily_limit_consumed", None) + await self.write_api_usage_cache_file(api_key) retry = 0 success = True else: @@ -397,18 +398,19 @@ async def sites_usage(self): async with aiofiles.open(apiCacheFileName) as f: resp_json = json.loads(await f.read()) status = 200 + d = cast(dict, resp_json) + self._api_limit[api_key] = d.get("daily_limit", None) + self._api_used[api_key] = d.get("daily_limit_consumed", None) _LOGGER.info(f"Loaded API usage cache") else: _LOGGER.warning(f"No Solcast API usage cache found") if status == 200: - d = cast(dict, resp_json) - self._api_limit[sitekey] = d.get("daily_limit", None) - self._api_used[sitekey] = d.get("daily_limit_consumed", None) - _LOGGER.debug(f"API counter for {self.redact_api_key(sitekey)} is {self._api_used[sitekey]}/{self._api_limit[sitekey]}") + _LOGGER.debug(f"API counter for {self.redact_api_key(api_key)} is {self._api_used[api_key]}/{self._api_limit[api_key]}") else: - self._api_limit[sitekey] = 10 - self._api_used[sitekey] = 0 + self._api_limit[api_key] = 10 + self._api_used[api_key] = 0 + await self.write_api_usage_cache_file(api_key) raise Exception(f"Gathering site usage failed in sites_usage(). Request returned Status code: {translate(status)} - Response: {resp_json}.") except json.decoder.JSONDecodeError: @@ -485,7 +487,7 @@ async def load_saved_data(self): # Some site data does not exist yet so get it _LOGGER.info("New site(s) have been added, so getting forecast data for just those site(s)") for a in ks: - await self.http_data_call(self.get_api_usage_cache_filename(ks[a]), r_id=a, api=ks[a], dopast=True) + await self.http_data_call(r_id=a, api=ks[a], dopast=True) await self.serialize_data() # Check for sites that need to be removed @@ -1002,7 +1004,7 @@ async def http_data(self, dopast = False): for site in self._sites: sitesAttempted += 1 _LOGGER.info(f"Getting forecast update for Solcast site {site['resource_id']}") - result = await self.http_data_call(self.get_api_usage_cache_filename(site['apikey']), site['resource_id'], site['apikey'], dopast) + result = await self.http_data_call(site['resource_id'], site['apikey'], dopast) if not result: failure = True if len(self._sites) > sitesAttempted: @@ -1033,7 +1035,7 @@ async def http_data(self, dopast = False): _LOGGER.error(traceback.format_exc()) return status - async def http_data_call(self, usageCacheFileName, r_id = None, api = None, dopast = False): + async def http_data_call(self, r_id = None, api = None, dopast = False): """Request forecast data via the Solcast API""" try: lastday = self.get_day_start_utc() + timedelta(days=8) @@ -1046,7 +1048,7 @@ async def http_data_call(self, usageCacheFileName, r_id = None, api = None, dopa if dopast: # Run once, for a new install or if the solcast.json file is deleted. This will use up api call quota. ae = None - resp_dict = await self.fetch_data(usageCacheFileName, "estimated_actuals", 168, site=r_id, apikey=api, cachedname="actuals") + resp_dict = await self.fetch_data("estimated_actuals", 168, site=r_id, apikey=api, cachedname="actuals") if not isinstance(resp_dict, dict): _LOGGER.error(f"No data was returned for Solcast estimated_actuals so this WILL cause errors...") _LOGGER.error(f"Either your API limit is exhaused, Internet down, or networking is misconfigured...") @@ -1079,7 +1081,7 @@ async def http_data_call(self, usageCacheFileName, r_id = None, api = None, dopa } ) - resp_dict = await self.fetch_data(usageCacheFileName, "forecasts", numhours, site=r_id, apikey=api, cachedname="forecasts") + resp_dict = await self.fetch_data("forecasts", numhours, site=r_id, apikey=api, cachedname="forecasts") if resp_dict is None: return False @@ -1152,7 +1154,7 @@ async def http_data_call(self, usageCacheFileName, r_id = None, api = None, dopa return False - async def fetch_data(self, usageCacheFileName, path="error", hours=168, site="", apikey="", cachedname="forcasts") -> dict[str, Any]: + async def fetch_data(self, path="error", hours=168, site="", apikey="", cachedname="forcasts") -> dict[str, Any]: """Fetch forecast data""" try: params = {"format": "json", "api_key": apikey, "hours": hours} @@ -1166,7 +1168,7 @@ async def fetch_data(self, usageCacheFileName, path="error", hours=168, site="", async with aiofiles.open(apiCacheFileName) as f: resp_json = json.loads(await f.read()) status = 200 - _LOGGER.debug(f"Got cached file data for site {site}") + _LOGGER.debug(f"Offline cached mode enabled, loaded data for site {site}") else: if self._api_used[apikey] < self._api_limit[apikey]: tries = 10 @@ -1189,15 +1191,12 @@ async def fetch_data(self, usageCacheFileName, path="error", hours=168, site="", if rs is not None: if rs.get('error_code') == 'TooManyRequests': status = 998 - _LOGGER.debug(f"Exceeded daily free limit, setting API Counter to {self._api_limit[apikey]}") self._api_used[apikey] = self._api_limit[apikey] - await self.write_api_usage_cache_file(usageCacheFileName, - {"daily_limit": self._api_limit[apikey], "daily_limit_consumed": self._api_used[apikey]}, - apikey) + await self.write_api_usage_cache_file(apikey) break else: + status = 1000 _LOGGER.warning("An unexpected error occurred: %s", rs.get('message')) - status = 1000 # Intentionally not handled below break except: pass @@ -1214,44 +1213,43 @@ async def fetch_data(self, usageCacheFileName, path="error", hours=168, site="", if status == 200: _LOGGER.debug(f"Fetch successful") - _LOGGER.debug(f"API returned data. API Counter incremented from {self._api_used[apikey]} to {self._api_used[apikey] + 1}") - self._api_used[apikey] = self._api_used[apikey] + 1 - await self.write_api_usage_cache_file(usageCacheFileName, - {"daily_limit": self._api_limit[apikey], "daily_limit_consumed": self._api_used[apikey]}, - apikey) + _LOGGER.debug(f"API returned data, API counter incremented from {self._api_used[apikey]} to {self._api_used[apikey] + 1}") + self._api_used[apikey] += 1 + await self.write_api_usage_cache_file(apikey) resp_json = await resp.json(content_type=None) if self.apiCacheEnabled: async with aiofiles.open(apiCacheFileName, 'w') as f: await f.write(json.dumps(resp_json, ensure_ascii=False)) - elif status == 998: - _LOGGER.error(f"The Solcast API use quota has been exceeded, attempt failed") + elif status == 998: # Exceeded API limit + _LOGGER.error(f"API allowed polling limit has been exceeded, API counter set to {self._api_used[apikey]}/{self._api_limit[apikey]}") + return None + elif status == 999: # Attempts exhausted + _LOGGER.error(f"API was tried {tries} times, but all attempts failed") return None - elif status == 999: - _LOGGER.error(f"The Solcast API was tried {tries} times, but all attempts have failed") + elif status == 1000: # An unexpected response return None else: - _LOGGER.error(f"Solcast API returned status {translate(status)}. API used is {self._api_used[apikey]}/{self._api_limit[apikey]}") + _LOGGER.error(f"API returned status {translate(status)}, API used is {self._api_used[apikey]}/{self._api_limit[apikey]}") return None else: - _LOGGER.warning(f"API limit exceeded, not getting forecast") + _LOGGER.warning(f"API polling limit exhausted, not getting forecast, API used is {self._api_used[apikey]}/{self._api_limit[apikey]}") return None - _LOGGER.debug(f"HTTP session returned data type in fetch_data() is {type(resp_json)}") - _LOGGER.debug(f"HTTP session status in fetch_data() is {translate(status)}") + _LOGGER.debug(f"HTTP session returned data type {type(resp_json)}") + _LOGGER.debug(f"HTTP session status {translate(status)}") if status == 429: - _LOGGER.warning("Solcast is too busy or exceeded API allowed polling limit, API used is {self._api_used[apikey]}/{self._api_limit[apikey]}") + _LOGGER.warning("Solcast is too busy, try again later") elif status == 400: - _LOGGER.warning( - "Status {translate(status)}: The Solcast site is likely missing capacity, please specify capacity or provide historic data for tuning." - ) + _LOGGER.warning("Status {translate(status)}: The Solcast site is likely missing capacity, please specify capacity or provide historic data for tuning") elif status == 404: _LOGGER.error(f"The Solcast site cannot be found, status {translate(status)} returned") elif status == 200: d = cast(dict, resp_json) - _LOGGER.debug(f"Status {translate(status)} in fetch_data(){', returned: %s' % (str(d),) if _FORECAST_DEBUG_LOGGING else ''}") + if _FORECAST_DEBUG_LOGGING: + _LOGGER.debug('HTTP session returned: %s' % (str(d),)) return d #await self.format_json_data(d) except ConnectionRefusedError as err: From 727f3dd47146412952a8c131028d039d27bb1d8c Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Mon, 26 Aug 2024 23:51:49 +1000 Subject: [PATCH 21/38] Pylint coding style consistency updates --- custom_components/solcast_solar/__init__.py | 90 ++-- .../solcast_solar/config_flow.py | 161 +++--- custom_components/solcast_solar/const.py | 3 +- .../solcast_solar/coordinator.py | 70 ++- .../solcast_solar/diagnostics.py | 6 +- custom_components/solcast_solar/energy.py | 3 + custom_components/solcast_solar/recorder.py | 3 + custom_components/solcast_solar/select.py | 16 +- custom_components/solcast_solar/sensor.py | 57 +- custom_components/solcast_solar/solcastapi.py | 486 ++++++++++-------- custom_components/solcast_solar/spline.py | 72 +-- .../solcast_solar/system_health.py | 3 + 12 files changed, 535 insertions(+), 435 deletions(-) diff --git a/custom_components/solcast_solar/__init__.py b/custom_components/solcast_solar/__init__.py index 833325e7..8d18bee7 100644 --- a/custom_components/solcast_solar/__init__.py +++ b/custom_components/solcast_solar/__init__.py @@ -1,14 +1,17 @@ """Support for Solcast PV forecast.""" +# pylint: disable=C0304, C0321, E0401, E1135, W0212, W0613, W0702, W0718 + import logging import traceback import random import os import json -import aiofiles -import os.path as path from datetime import timedelta +from typing import Final +import aiofiles +import voluptuous as vol from homeassistant import loader from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, Platform @@ -45,10 +48,6 @@ from .coordinator import SolcastUpdateCoordinator from .solcastapi import ConnectionOptions, SolcastApi -from typing import Final - -import voluptuous as vol - PLATFORMS = [Platform.SENSOR, Platform.SELECT,] _LOGGER = logging.getLogger(__name__) @@ -97,7 +96,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # Introduced in core 2024.6.0: async_get_time_zone try: - dt_util.async_get_time_zone + dt_util.async_get_time_zone # pylint: disable=W0104 asynctz = True except: asynctz = False @@ -110,7 +109,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry.options[CONF_API_KEY], entry.options[API_QUOTA], SOLCAST_URL, - hass.config.path('%s/solcast.json' % os.path.abspath(os.path.join(os.path.dirname(__file__) ,"../.."))), + hass.config.path(f"{os.path.abspath(os.path.join(os.path.dirname(__file__) ,'../..'))}/solcast.json"), tz, optdamp, entry.options.get(CUSTOM_HOUR_SENSOR, 1), @@ -128,7 +127,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: try: await solcast.sites_data() - if solcast._sites_loaded: await solcast.sites_usage() + if solcast._sites_loaded: + await solcast.sites_usage() except Exception as ex: raise ConfigEntryNotReady(f"Getting sites data failed: {ex}") from ex @@ -139,22 +139,22 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: if status != '': raise ConfigEntryNotReady(status) - _VERSION = "" try: + _VERSION = "" # pylint: disable=C0103 integration = await loader.async_get_integration(hass, DOMAIN) - _VERSION = str(integration.version) - _LOGGER.info( - f"\n{'-'*67}\n" - f"Solcast integration version: {_VERSION}\n\n" - f"This is a custom integration. When troubleshooting a problem, after\n" - f"reviewing open and closed issues, and the discussions, check the\n" - f"required automation is functioning correctly and try enabling debug\n" - f"logging to see more. Troubleshooting tips available at:\n" - f"https://github.com/BJReplay/ha-solcast-solar/discussions/38\n\n" - f"Beta versions may also have addressed some issues so look at those.\n\n" - f"If all else fails, then open an issue and our community will try to\n" - f"help: https://github.com/BJReplay/ha-solcast-solar/issues\n" - f"{'-'*67}") + _VERSION = str(integration.version) # pylint: disable=C0103 + _LOGGER.info(''' +"\n%s\n +Solcast integration version: %s\n\n" +This is a custom integration. When troubleshooting a problem, after\n" +reviewing open and closed issues, and the discussions, check the\n" +required automation is functioning correctly and try enabling debug\n" +logging to see more. Troubleshooting tips available at:\n" +https://github.com/BJReplay/ha-solcast-solar/discussions/38\n\n" +Beta versions may also have addressed some issues so look at those.\n\n" +If all else fails, then open an issue and our community will try to\n" +help: https://github.com/BJReplay/ha-solcast-solar/issues\n" +%s''', '-'*67, _VERSION, '-'*67) except loader.IntegrationNotFound: pass @@ -170,13 +170,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry.async_on_unload(entry.add_update_listener(async_update_options)) - _LOGGER.debug(f"UTC times are converted to {hass.config.time_zone}") + _LOGGER.debug("UTC times are converted to %s", hass.config.time_zone) if options.hard_limit < 100: - _LOGGER.info( - f"Solcast inverter hard limit value has been set. If the forecasts and graphs are not as you expect, try running the service 'solcast_solar.remove_hard_limit' to remove this setting. " - f"This setting is really only for advanced quirky solar setups." - ) + _LOGGER.info("Solcast inverter hard limit value has been set. If the forecasts and graphs are not as you expect, remove this setting") # If the integration has failed for some time and then is restarted retrieve forecasts if solcast.get_api_used_count() == 0 and solcast.get_last_updated_datetime() < solcast.get_day_start_utc() - timedelta(days=1): @@ -187,24 +184,24 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator._dataUpdated = True await coordinator.update_integration_listeners() coordinator._dataUpdated = False - except Exception as ex: - _LOGGER.error("Exception force fetching data on stale start: %s", ex) + except Exception as e: + _LOGGER.error("Exception force fetching data on stale start: %s", e) _LOGGER.error(traceback.format_exc()) async def handle_service_update_forecast(call: ServiceCall): """Handle service call""" - _LOGGER.info(f"Service call: {SERVICE_UPDATE}") + _LOGGER.info("Service call: %s", SERVICE_UPDATE) await coordinator.service_event_update() async def handle_service_clear_solcast_data(call: ServiceCall): """Handle service call""" - _LOGGER.info(f"Service call: {SERVICE_CLEAR_DATA}") + _LOGGER.info("Service call: %s", SERVICE_CLEAR_DATA) await coordinator.service_event_delete_old_solcast_json_file() async def handle_service_get_solcast_data(call: ServiceCall) -> ServiceResponse: """Handle service call""" try: - _LOGGER.info(f"Service call: {SERVICE_QUERY_FORECAST_DATA}") + _LOGGER.info("Service call: %s", SERVICE_QUERY_FORECAST_DATA) start = call.data.get(EVENT_START_DATETIME, dt_util.now()) end = call.data.get(EVENT_END_DATETIME, dt_util.now()) @@ -221,12 +218,12 @@ async def handle_service_get_solcast_data(call: ServiceCall) -> ServiceResponse: async def handle_service_set_dampening(call: ServiceCall): """Handle service call""" try: - _LOGGER.info(f"Service call: {SERVICE_SET_DAMPENING}") + _LOGGER.info("Service call: %s", SERVICE_SET_DAMPENING) factors = call.data.get(DAMP_FACTOR, None) - if factors == None: - raise HomeAssistantError(f"Error processing {SERVICE_SET_DAMPENING}: Empty factor string") + if factors is None: + raise HomeAssistantError("Error processing {SERVICE_SET_DAMPENING}: Empty factor string") else: factors = factors.strip().replace(" ","") if len(factors) == 0: @@ -254,12 +251,12 @@ async def handle_service_set_dampening(call: ServiceCall): async def handle_service_set_hard_limit(call: ServiceCall): """Handle service call""" try: - _LOGGER.info(f"Service call: {SERVICE_SET_HARD_LIMIT}") + _LOGGER.info("Service call: %s", SERVICE_SET_HARD_LIMIT) hl = call.data.get(HARD_LIMIT, 100000) - if hl == None: + if hl is None: raise HomeAssistantError(f"Error processing {SERVICE_SET_HARD_LIMIT}: Empty hard limit value") else: val = int(hl) @@ -271,15 +268,15 @@ async def handle_service_set_hard_limit(call: ServiceCall): # solcast._hardlimit = val hass.config_entries.async_update_entry(entry, options=opt) - except ValueError: - raise HomeAssistantError(f"Error processing {SERVICE_SET_HARD_LIMIT}: Hard limit value not a positive number") + except ValueError as err: + raise HomeAssistantError(f"Error processing {SERVICE_SET_HARD_LIMIT}: Hard limit value not a positive number") from err except intent.IntentHandleError as err: raise HomeAssistantError(f"Error processing {SERVICE_SET_DAMPENING}: {err}") from err async def handle_service_remove_hard_limit(call: ServiceCall): """Handle service call""" try: - _LOGGER.info(f"Service call: {SERVICE_REMOVE_HARD_LIMIT}") + _LOGGER.info("Service call: %s", SERVICE_REMOVE_HARD_LIMIT) opt = {**entry.options} opt[HARD_LIMIT] = 100000 @@ -315,7 +312,7 @@ async def handle_service_remove_hard_limit(call: ServiceCall): return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Unload a config entry.""" + """Unload a config entry""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) @@ -330,6 +327,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return unload_ok async def async_remove_config_entry_device(hass: HomeAssistant, entry: ConfigEntry, device) -> bool: + """Remove ConfigEntry device""" device_registry(hass).async_remove_device(device.id) return True @@ -338,7 +336,7 @@ async def async_update_options(hass: HomeAssistant, entry: ConfigEntry): await hass.config_entries.async_reload(entry.entry_id) async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: - """Migrate old entry.""" + """Migrate old entry""" def upgraded(): _LOGGER.debug("Upgraded to options version %s", config_entry.version) @@ -437,10 +435,10 @@ def upgraded(): new = {**config_entry.options} try: default = [] - configDir = path.abspath(path.join(path.dirname(__file__) ,"../..")) + _config_dir = os.path.abspath(os.path.join(os.path.dirname(__file__) ,"../..")) for spl in new[CONF_API_KEY].split(','): - apiCacheFileName = "%s/solcast-usage%s.json" % (configDir, "" if len(new[CONF_API_KEY].split(',')) < 2 else "-" + spl.strip()) - async with aiofiles.open(apiCacheFileName) as f: + api_cache_filename = f"{_config_dir}/solcast-usage{'' if len(new[CONF_API_KEY].split(',')) < 2 else '-' + spl.strip()}.json" + async with aiofiles.open(api_cache_filename) as f: usage = json.loads(await f.read()) default.append(str(usage['daily_limit'])) default = ','.join(default) diff --git a/custom_components/solcast_solar/config_flow.py b/custom_components/solcast_solar/config_flow.py index a06c87f9..34bf5b03 100755 --- a/custom_components/solcast_solar/config_flow.py +++ b/custom_components/solcast_solar/config_flow.py @@ -1,4 +1,7 @@ """Config flow for Solcast Solar integration""" + +# pylint: disable=C0304, E0401, W0702 + from __future__ import annotations from typing import Any @@ -35,10 +38,10 @@ async def async_step_user( """Handle a flow initiated by the user.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") - + if user_input is not None: return self.async_create_entry( - title= TITLE, + title= TITLE, data = {}, options={ CONF_API_KEY: user_input[CONF_API_KEY], @@ -96,18 +99,20 @@ def __init__(self, config_entry: ConfigEntry) -> None: self.config_entry = config_entry self.options = dict(config_entry.options) - async def async_step_init(self, user_input=None): + async def async_step_init(self, user_input=None) -> Any: + """Initialise steps""" + errors = {} if user_input is not None: if "solcast_config_action" in user_input: - nextAction = user_input["solcast_config_action"] - if nextAction == "configure_dampening": + next_action = user_input["solcast_config_action"] + if next_action == "configure_dampening": return await self.async_step_dampen() - elif nextAction == "configure_api": + elif next_action == "configure_api": return await self.async_step_api() - elif nextAction == "configure_customsensor": + elif next_action == "configure_customsensor": return await self.async_step_customsensor() - elif nextAction == "configure_attributes": + elif next_action == "configure_attributes": return await self.async_step_attributes() else: errors["base"] = "incorrect_options_action" @@ -132,25 +137,25 @@ async def async_step_api(self, user_input: dict[str, Any] | None = None) -> Flow """Manage the API key/quota""" errors = {} - apiQuota = self.config_entry.options[API_QUOTA] - + api_quota = self.config_entry.options[API_QUOTA] + if user_input is not None: try: - apiQuota = user_input[API_QUOTA] + api_quota = user_input[API_QUOTA] - allConfigData = {**self.config_entry.options} + all_config_data = {**self.config_entry.options} k = user_input["api_key"].replace(" ","").strip() k = ','.join([s for s in k.split(',') if s]) - allConfigData["api_key"] = k - allConfigData[API_QUOTA] = apiQuota + all_config_data["api_key"] = k + all_config_data[API_QUOTA] = api_quota self.hass.config_entries.async_update_entry( self.config_entry, title=TITLE, - options=allConfigData, + options=all_config_data, ) return self.async_create_entry(title=TITLE, data=None) - except Exception as e: + except: errors["base"] = "unknown" return self.async_show_form( @@ -193,7 +198,7 @@ async def async_step_dampen(self, user_input: dict[str, Any] | None = None) -> F damp21 = self.config_entry.options["damp21"] damp22 = self.config_entry.options["damp22"] damp23 = self.config_entry.options["damp23"] - + if user_input is not None: try: damp00 = user_input["damp00"] @@ -221,40 +226,40 @@ async def async_step_dampen(self, user_input: dict[str, Any] | None = None) -> F damp22 = user_input["damp22"] damp23 = user_input["damp23"] - allConfigData = {**self.config_entry.options} - allConfigData["damp00"] = damp00 - allConfigData["damp01"] = damp01 - allConfigData["damp02"] = damp02 - allConfigData["damp03"] = damp03 - allConfigData["damp04"] = damp04 - allConfigData["damp05"] = damp05 - allConfigData["damp06"] = damp06 - allConfigData["damp07"] = damp07 - allConfigData["damp08"] = damp08 - allConfigData["damp09"] = damp09 - allConfigData["damp10"] = damp10 - allConfigData["damp11"] = damp11 - allConfigData["damp12"] = damp12 - allConfigData["damp13"] = damp13 - allConfigData["damp14"] = damp14 - allConfigData["damp15"] = damp15 - allConfigData["damp16"] = damp16 - allConfigData["damp17"] = damp17 - allConfigData["damp18"] = damp18 - allConfigData["damp19"] = damp19 - allConfigData["damp20"] = damp20 - allConfigData["damp21"] = damp21 - allConfigData["damp22"] = damp22 - allConfigData["damp23"] = damp23 + all_config_data = {**self.config_entry.options} + all_config_data["damp00"] = damp00 + all_config_data["damp01"] = damp01 + all_config_data["damp02"] = damp02 + all_config_data["damp03"] = damp03 + all_config_data["damp04"] = damp04 + all_config_data["damp05"] = damp05 + all_config_data["damp06"] = damp06 + all_config_data["damp07"] = damp07 + all_config_data["damp08"] = damp08 + all_config_data["damp09"] = damp09 + all_config_data["damp10"] = damp10 + all_config_data["damp11"] = damp11 + all_config_data["damp12"] = damp12 + all_config_data["damp13"] = damp13 + all_config_data["damp14"] = damp14 + all_config_data["damp15"] = damp15 + all_config_data["damp16"] = damp16 + all_config_data["damp17"] = damp17 + all_config_data["damp18"] = damp18 + all_config_data["damp19"] = damp19 + all_config_data["damp20"] = damp20 + all_config_data["damp21"] = damp21 + all_config_data["damp22"] = damp22 + all_config_data["damp23"] = damp23 self.hass.config_entries.async_update_entry( self.config_entry, title=TITLE, - options=allConfigData, + options=all_config_data, ) - + return self.async_create_entry(title=TITLE, data=None) - except Exception as e: + except: errors["base"] = "unknown" return self.async_show_form( @@ -320,22 +325,22 @@ async def async_step_customsensor(self, user_input: dict[str, Any] | None = None errors = {} customhoursensor = self.config_entry.options[CUSTOM_HOUR_SENSOR] - + if user_input is not None: try: customhoursensor = user_input[CUSTOM_HOUR_SENSOR] - allConfigData = {**self.config_entry.options} - allConfigData[CUSTOM_HOUR_SENSOR] = customhoursensor + all_config_data = {**self.config_entry.options} + all_config_data[CUSTOM_HOUR_SENSOR] = customhoursensor self.hass.config_entries.async_update_entry( self.config_entry, title=TITLE, - options=allConfigData, + options=all_config_data, ) - + return self.async_create_entry(title=TITLE, data=None) - except Exception as e: + except: errors["base"] = "unknown" return self.async_show_form( @@ -348,55 +353,55 @@ async def async_step_customsensor(self, user_input: dict[str, Any] | None = None ), errors=errors, ) - + async def async_step_attributes(self, user_input: dict[str, Any] | None = None) -> FlowResult: """Manage the attributes present""" errors = {} - estimateBreakdown = self.config_entry.options[BRK_ESTIMATE] - estimateBreakdown10 = self.config_entry.options[BRK_ESTIMATE10] - estimateBreakdown90 = self.config_entry.options[BRK_ESTIMATE90] - siteBreakdown = self.config_entry.options[BRK_SITE] - halfHourly = self.config_entry.options[BRK_HALFHOURLY] + estimate_breakdown = self.config_entry.options[BRK_ESTIMATE] + estimate_breakdown10 = self.config_entry.options[BRK_ESTIMATE10] + estimate_breakdown90 = self.config_entry.options[BRK_ESTIMATE90] + site_breakdown = self.config_entry.options[BRK_SITE] + half_hourly = self.config_entry.options[BRK_HALFHOURLY] hourly = self.config_entry.options[BRK_HOURLY] - + if user_input is not None: try: - estimateBreakdown = user_input[BRK_ESTIMATE] - estimateBreakdown10 = user_input[BRK_ESTIMATE10] - estimateBreakdown90 = user_input[BRK_ESTIMATE90] - siteBreakdown = user_input[BRK_SITE] - halfHourly = user_input[BRK_HALFHOURLY] + estimate_breakdown = user_input[BRK_ESTIMATE] + estimate_breakdown10 = user_input[BRK_ESTIMATE10] + estimate_breakdown90 = user_input[BRK_ESTIMATE90] + site_breakdown = user_input[BRK_SITE] + half_hourly = user_input[BRK_HALFHOURLY] hourly = user_input[BRK_HOURLY] - allConfigData = {**self.config_entry.options} - allConfigData[BRK_ESTIMATE] = estimateBreakdown - allConfigData[BRK_ESTIMATE10] = estimateBreakdown10 - allConfigData[BRK_ESTIMATE90] = estimateBreakdown90 - allConfigData[BRK_SITE] = siteBreakdown - allConfigData[BRK_HALFHOURLY] = halfHourly - allConfigData[BRK_HOURLY] = hourly + all_config_data = {**self.config_entry.options} + all_config_data[BRK_ESTIMATE] = estimate_breakdown + all_config_data[BRK_ESTIMATE10] = estimate_breakdown10 + all_config_data[BRK_ESTIMATE90] = estimate_breakdown90 + all_config_data[BRK_SITE] = site_breakdown + all_config_data[BRK_HALFHOURLY] = half_hourly + all_config_data[BRK_HOURLY] = hourly self.hass.config_entries.async_update_entry( self.config_entry, title=TITLE, - options=allConfigData, + options=all_config_data, ) - + return self.async_create_entry(title=TITLE, data=None) - except Exception as e: + except: errors["base"] = "unknown" return self.async_show_form( step_id="attributes", data_schema=vol.Schema( { - vol.Required(BRK_ESTIMATE10, description={"suggested_value": estimateBreakdown10}): bool, - vol.Required(BRK_ESTIMATE, description={"suggested_value": estimateBreakdown}): bool, - vol.Required(BRK_ESTIMATE90, description={"suggested_value": estimateBreakdown90}): bool, - vol.Required(BRK_SITE, description={"suggested_value": siteBreakdown}): bool, - vol.Required(BRK_HALFHOURLY, description={"suggested_value": halfHourly}): bool, + vol.Required(BRK_ESTIMATE10, description={"suggested_value": estimate_breakdown10}): bool, + vol.Required(BRK_ESTIMATE, description={"suggested_value": estimate_breakdown}): bool, + vol.Required(BRK_ESTIMATE90, description={"suggested_value": estimate_breakdown90}): bool, + vol.Required(BRK_SITE, description={"suggested_value": site_breakdown}): bool, + vol.Required(BRK_HALFHOURLY, description={"suggested_value": half_hourly}): bool, vol.Required(BRK_HOURLY, description={"suggested_value": hourly}): bool, } ), diff --git a/custom_components/solcast_solar/const.py b/custom_components/solcast_solar/const.py index 296ff12c..8233199b 100755 --- a/custom_components/solcast_solar/const.py +++ b/custom_components/solcast_solar/const.py @@ -1,5 +1,7 @@ """Constants for the Solcast Solar integration.""" +# pylint: disable=C0304, E0401 + from __future__ import annotations from typing import Final @@ -10,7 +12,6 @@ TITLE = "Solcast Solar" SOLCAST_URL = "https://api.solcast.com.au" - ATTR_ENTRY_TYPE: Final = "entry_type" ENTRY_TYPE_SERVICE: Final = "service" diff --git a/custom_components/solcast_solar/coordinator.py b/custom_components/solcast_solar/coordinator.py index 372af597..1c713397 100644 --- a/custom_components/solcast_solar/coordinator.py +++ b/custom_components/solcast_solar/coordinator.py @@ -1,7 +1,12 @@ """The Solcast Solar coordinator""" + +# pylint: disable=C0302, C0304, C0321, E0401, R0902, R0914, W0212, W0105, W0613, W0702, W0706, W0719 + from __future__ import annotations from datetime import datetime as dt +from typing import Any, Dict + import logging import traceback @@ -25,9 +30,9 @@ def __init__(self, hass: HomeAssistant, solcast: SolcastApi, version: str) -> No self._hass = hass self._previousenergy = None self._version = version - self._lastDay = None - self._dayChanged = False - self._dataUpdated = False + self._last_day = None + self._date_changed = False + self._data_updated = False super().__init__( hass, @@ -40,68 +45,76 @@ async def _async_update_data(self): """Update data via library""" return self.solcast._data - async def setup(self): + async def setup(self) -> None: + """Set up time change tracking""" d={} self._previousenergy = d - self._lastDay = dt.now(self.solcast._tz).day + self._last_day = dt.now(self.solcast._tz).day try: #4.0.18 - added reset usage call to reset usage sensors at UTC midnight async_track_utc_time_change(self._hass, self.update_utcmidnight_usage_sensor_data, hour=0,minute=0,second=0) async_track_utc_time_change(self._hass, self.update_integration_listeners, minute=range(0, 60, 5), second=0) - except Exception as error: + except: _LOGGER.error("Exception in Solcast coordinator setup: %s", traceback.format_exc()) - async def update_integration_listeners(self, *args): + async def update_integration_listeners(self, *args) -> None: + """Get updated sensor values""" try: - crtDay = dt.now(self.solcast._tz).day - self._dateChanged = (crtDay != self._lastDay) - if self._dateChanged: - self._lastDay = crtDay + current_day = dt.now(self.solcast._tz).day + self._date_changed = current_day != self._last_day + if self._date_changed: + self._last_day = current_day #4.0.41 - recalculate splines at midnight local await self.update_midnight_spline_recalc() self.async_update_listeners() - except Exception: + except: #_LOGGER.error("update_integration_listeners: %s", traceback.format_exc()) pass - async def update_utcmidnight_usage_sensor_data(self, *args): + async def update_utcmidnight_usage_sensor_data(self, *args) -> None: + """Resets tracked API usage at midnight UTC""" try: await self.solcast.reset_api_usage() - except Exception: + except: #_LOGGER.error("Exception in update_utcmidnight_usage_sensor_data(): %s", traceback.format_exc()) pass - async def update_midnight_spline_recalc(self, *args): + async def update_midnight_spline_recalc(self, *args) -> None: + """Re-calculates splines at midnight local time""" try: _LOGGER.debug('Recalculating splines') await self.solcast.spline_moments() await self.solcast.spline_remaining() - except Exception: + except: _LOGGER.error("Exception in update_midnight_spline_recalc(): %s", traceback.format_exc()) - pass - async def service_event_update(self, *args): + async def service_event_update(self, *args) -> None: + """Get updated forecast data when requested by a service call""" try: #await self.solcast.sites_weather() await self.solcast.http_data(dopast=False) - self._dataUpdated = True + self._data_updated = True await self.update_integration_listeners() - self._dataUpdated = False - except Exception as ex: + self._data_updated = False + except: _LOGGER.error("Exception in service_event_update(): %s", traceback.format_exc()) - async def service_event_delete_old_solcast_json_file(self, *args): + async def service_event_delete_old_solcast_json_file(self, *args) -> None: + """Delete the solcast.json file when requested by a service call""" await self.solcast.delete_solcast_file() async def service_query_forecast_data(self, *args) -> tuple: + """Return forecast data requested by a service call""" return await self.solcast.get_forecast_list(*args) - def get_energy_tab_data(self): + def get_energy_tab_data(self) -> dict[str, Any]: + """Return an energy page compatible dictionary""" return self.solcast.get_energy_data() - def get_sensor_value(self, key=""): + def get_sensor_value(self, key="") -> (int | dt | float | Any | str | bool | None): + """Return the value of a sensor""" match key: case "peak_w_today": return self.solcast.get_peak_w_day(0) @@ -152,7 +165,8 @@ def get_sensor_value(self, key=""): case _: return None - def get_sensor_extra_attributes(self, key=""): + def get_sensor_extra_attributes(self, key="") -> (Dict[str, Any] | None): + """Return the attributes for a sensor""" match key: case "forecast_this_hour": return self.solcast.get_forecasts_n_hour(0) @@ -207,14 +221,16 @@ def get_sensor_extra_attributes(self, key=""): case _: return None - def get_site_sensor_value(self, roof_id, key): + def get_site_sensor_value(self, roof_id, key) -> (float | None): + """Get the site total for today""" match key: case "site_data": return self.solcast.get_rooftop_site_total_today(roof_id) case _: return None - def get_site_sensor_extra_attributes(self, roof_id, key): + def get_site_sensor_extra_attributes(self, roof_id, key) -> (dict[str, Any] | None): + """Get the attributes for a sensor""" match key: case "site_data": return self.solcast.get_rooftop_site_extra_data(roof_id) diff --git a/custom_components/solcast_solar/diagnostics.py b/custom_components/solcast_solar/diagnostics.py index 057a0be4..73c89a80 100644 --- a/custom_components/solcast_solar/diagnostics.py +++ b/custom_components/solcast_solar/diagnostics.py @@ -1,4 +1,7 @@ """Support for the Solcast diagnostics.""" + +# pylint: disable=C0304, E0401, W0212 + from __future__ import annotations from typing import Any @@ -30,5 +33,4 @@ async def async_get_config_entry_diagnostics( "data": (coordinator.data, TO_REDACT), "energy_history_graph": coordinator._previousenergy, "energy_forecasts_graph": coordinator.solcast._dataenergy["wh_hours"], - } - + } \ No newline at end of file diff --git a/custom_components/solcast_solar/energy.py b/custom_components/solcast_solar/energy.py index 23715826..2771cc05 100755 --- a/custom_components/solcast_solar/energy.py +++ b/custom_components/solcast_solar/energy.py @@ -1,4 +1,7 @@ """Energy platform""" + +# pylint: disable=C0304, E0401 + from __future__ import annotations import logging diff --git a/custom_components/solcast_solar/recorder.py b/custom_components/solcast_solar/recorder.py index 8a8d2a3d..9f82f5a0 100644 --- a/custom_components/solcast_solar/recorder.py +++ b/custom_components/solcast_solar/recorder.py @@ -1,4 +1,7 @@ """Integration platform for recorder.""" + +# pylint: disable=C0304, E0401, W0613 + from __future__ import annotations from homeassistant.core import HomeAssistant, callback diff --git a/custom_components/solcast_solar/select.py b/custom_components/solcast_solar/select.py index 89feaa0f..60dbda73 100644 --- a/custom_components/solcast_solar/select.py +++ b/custom_components/solcast_solar/select.py @@ -1,4 +1,7 @@ """Selector to allow users to select the pv_ data field to use for calcualtions.""" + +# pylint: disable=C0304, E0401, W0212 + import logging from enum import IntEnum @@ -32,7 +35,6 @@ class PVEstimateMode(IntEnum): ESTIMATE - Default forecasts ESTIMATE10 = Forecasts 10 - cloudier than expected scenario ESTIMATE90 = Forecasts 90 - less cloudy than expected scenario - """ ESTIMATE = 0 @@ -63,12 +65,12 @@ async def async_setup_entry( entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: - + """Setup entry""" coordinator: SolcastUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] try: est_mode = coordinator.solcast.options.key_estimate - except (ValueError): + except ValueError: _LOGGER.debug("Could not read estimate mode", exc_info=True) else: entity = EstimateModeEntity( @@ -84,7 +86,7 @@ async def async_setup_entry( class EstimateModeEntity(SelectEntity): """Entity representing the solcast estimate field to use for calculations.""" - _attr_attribution = ATTRIBUTION + _attr_attribution = ATTRIBUTION _attr_should_poll = False _attr_has_entity_name = True @@ -103,7 +105,7 @@ def __init__( self.entity_description = entity_description self._attr_unique_id = f"{entity_description.key}" - + self._attr_options = supported_options self._attr_current_option = current_option @@ -114,7 +116,7 @@ def __init__( self._attr_device_info = { ATTR_IDENTIFIERS: {(DOMAIN, entry.entry_id)}, - ATTR_NAME: "Solcast PV Forecast", + ATTR_NAME: "Solcast PV Forecast", ATTR_MANUFACTURER: "BJReplay", ATTR_MODEL: "Solcast PV Forecast", ATTR_ENTRY_TYPE: DeviceEntryType.SERVICE, @@ -130,4 +132,4 @@ async def async_select_option(self, option: str) -> None: new = {**self._entry.options} new[KEY_ESTIMATE] = option - self.coordinator._hass.config_entries.async_update_entry(self._entry, options=new) + self.coordinator._hass.config_entries.async_update_entry(self._entry, options=new) \ No newline at end of file diff --git a/custom_components/solcast_solar/sensor.py b/custom_components/solcast_solar/sensor.py index 4bbdd531..364f671b 100755 --- a/custom_components/solcast_solar/sensor.py +++ b/custom_components/solcast_solar/sensor.py @@ -1,5 +1,7 @@ """Support for Solcast PV forecast sensors.""" +# pylint: disable=C0304, E0401, W0212, W0718 + from __future__ import annotations import logging @@ -235,10 +237,12 @@ } class SensorUpdatePolicy(Enum): + """Sensor update policy""" DEFAULT = 0 EVERY_TIME_INTERVAL = 1 -def getSensorUpdatePolicy(key) -> SensorUpdatePolicy: +def get_sensor_update_policy(key) -> SensorUpdatePolicy: + """Get the sensor update policy""" match key: case ( "forecast_this_hour" | @@ -264,7 +268,7 @@ async def async_setup_entry( coordinator: SolcastUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] entities = [] - for sensor_types in SENSORS: + for sensor_types, _ in SENSORS.items(): sen = SolcastSensor(coordinator, SENSORS[sensor_types], entry) entities.append(sen) @@ -310,7 +314,7 @@ def __init__( self.entity_description = entity_description self.coordinator = coordinator - self.update_policy = getSensorUpdatePolicy(entity_description.key) + self.update_policy = get_sensor_update_policy(entity_description.key) self._attr_unique_id = f"{entity_description.key}" self._attributes = {} @@ -318,10 +322,8 @@ def __init__( try: self._sensor_data = coordinator.get_sensor_value(entity_description.key) - except Exception as ex: - _LOGGER.error( - f"Unable to get sensor value {ex} %s", traceback.format_exc() - ) + except Exception as e: + _LOGGER.error("Unable to get sensor value: %s: %s", e, traceback.format_exc()) self._sensor_data = None if self._sensor_data is None: @@ -347,10 +349,8 @@ def extra_state_attributes(self): return self.coordinator.get_sensor_extra_attributes( self.entity_description.key ) - except Exception as ex: - _LOGGER.error( - f"Unable to get sensor value {ex} %s", traceback.format_exc() - ) + except Exception as e: + _LOGGER.error("Unable to get sensor value: %s: %s", e, traceback.format_exc()) return None @property @@ -373,16 +373,14 @@ def _handle_coordinator_update(self) -> None: # these sensors update when the date changed or when there is new data if self.update_policy == SensorUpdatePolicy.DEFAULT and not (self.coordinator._dateChanged or self.coordinator._dataUpdated) : - return + return try: self._sensor_data = self.coordinator.get_sensor_value( self.entity_description.key ) - except Exception as ex: - _LOGGER.error( - f"Unable to get sensor value {ex} %s", traceback.format_exc() - ) + except Exception as e: + _LOGGER.error("Unable to get sensor value: %s: %s", e, traceback.format_exc()) self._sensor_data = None if self._sensor_data is None: @@ -394,10 +392,17 @@ def _handle_coordinator_update(self) -> None: @dataclass class RooftopSensorEntityDescription(SensorEntityDescription): + """Representation of a rooftop entity description""" + key: str | None = None + name: str | None = None + icon: str | None = None + device_class: SensorDeviceClass = SensorDeviceClass.ENERGY + native_unit_of_measurement: UnitOfEnergy = UnitOfEnergy.KILO_WATT_HOUR + suggested_display_precision: int = 2 rooftop_id: str | None = None class RooftopSensor(CoordinatorEntity, SensorEntity): - """Representation of a Solcast Sensor device.""" + """Representation of a rooftop sensor device""" _attr_attribution = ATTRIBUTION @@ -423,10 +428,8 @@ def __init__( try: self._sensor_data = coordinator.get_site_sensor_value(self.rooftop_id, key) - except Exception as ex: - _LOGGER.error( - f"Unable to get sensor value {ex} %s", traceback.format_exc() - ) + except Exception as e: + _LOGGER.error("Unable to get sensor value: %s: %s", e, traceback.format_exc()) self._sensor_data = None self._attr_device_info = { @@ -464,10 +467,8 @@ def extra_state_attributes(self): self.rooftop_id, self.key, ) - except Exception as ex: - _LOGGER.error( - f"Unable to get sensor value {ex} %s", traceback.format_exc() - ) + except Exception as e: + _LOGGER.error("Unable to get sensor attributes: %s: %s", e, traceback.format_exc()) return None @property @@ -495,9 +496,7 @@ def _handle_coordinator_update(self) -> None: self.rooftop_id, self.key, ) - except Exception as ex: - _LOGGER.error( - f"Unable to get sensor value {ex} %s", traceback.format_exc() - ) + except Exception as e: + _LOGGER.error("Unable to get sensor value: %s: %s", e, traceback.format_exc()) self._sensor_data = None self.async_write_ha_state() \ No newline at end of file diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 7c8aef4c..3a6f13ea 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -1,8 +1,10 @@ """Solcast API""" + +# pylint: disable=C0302, C0304, C0321, E0401, R0902, R0914, W0105, W0702, W0706, W0718, W0719 + from __future__ import annotations import asyncio -import aiofiles import copy import json import logging @@ -13,7 +15,6 @@ import traceback import random import re -from .spline import cubic_interp from dataclasses import dataclass from datetime import datetime as dt from datetime import timedelta, timezone @@ -23,10 +24,13 @@ from typing import Any, Dict, cast import async_timeout +import aiofiles from aiohttp import ClientConnectionError, ClientSession from aiohttp.client_reqrep import ClientResponse from isodate import parse_datetime +from .spline import cubic_interp + # For current func name, specify 0 or no argument # For name of caller of current func, specify 1 # For name of caller of caller of current func, specify 2, etc. @@ -34,21 +38,27 @@ _SENSOR_DEBUG_LOGGING = False _FORECAST_DEBUG_LOGGING = False +_SPLINE_DEBUG_LOGGING = False _JSON_VERSION = 4 _LOGGER = logging.getLogger(__name__) class DateTimeEncoder(json.JSONEncoder): + """Date/time helper""" def default(self, o): if isinstance(o, dt): return o.isoformat() + else: + return None class JSONDecoder(json.JSONDecoder): - def __init__(self, *args, **kwargs): + """JSON decoder helper""" + def __init__(self, *args, **kwargs) -> None: json.JSONDecoder.__init__( self, object_hook=self.object_hook, *args, **kwargs) - def object_hook(self, obj): + def object_hook(self, obj) -> dict: + """Hook""" ret = {} for key, value in obj.items(): if key in {'period_start'}: @@ -57,12 +67,14 @@ def object_hook(self, obj): ret[key] = value return ret +# HTTP status code translation. +# A 418 error is included here for fun. This was included in RFC2324#section-2.3.2 as an April Fools joke in 1998. statusTranslate = { 200: 'Success', 401: 'Unauthorized', 403: 'Forbidden', 404: 'Not found', - 418: 'I\'m a teapot', # Included here for fun. An April Fools joke in 1998. Included in RFC2324#section-2.3.2 + 418: 'I\'m a teapot', 429: 'Try again later', 500: 'Internal web server error', 501: 'Not implemented', @@ -71,8 +83,9 @@ def object_hook(self, obj): 504: 'Gateway timeout', } -def translate(status): - return ('%s/%s' % (str(status), statusTranslate[status], )) if statusTranslate.get(status) else status +def translate(status) -> str | Any: + """Translate HTTP status code to a human-readable translation""" + return (f"{str(status)}/{statusTranslate[status]}") if statusTranslate.get(status) else status @dataclass @@ -103,12 +116,12 @@ def __init__( self, aiohttp_session: ClientSession, options: ConnectionOptions, - apiCacheEnabled: bool = False + api_cache_enabled: bool = False ): """Device init""" self.aiohttp_session = aiohttp_session self.options = options - self.apiCacheEnabled = apiCacheEnabled + self.api_cache_enabled = api_cache_enabled self._sites_loaded = False self._sites = [] self._data = {'siteinfo': {}, 'last_updated': dt.fromtimestamp(0, timezone.utc).isoformat()} @@ -116,14 +129,12 @@ def __init__( self._api_used = {} self._api_limit = {} self._filename = options.file_path - self.configDir = dirname(self._filename) - _LOGGER.debug("Configuration directory is %s", self.configDir) + self._config_dir = dirname(self._filename) self._tz = options.tz self._dataenergy = {} self._data_forecasts = [] self._site_data_forecasts = {} self._forecasts_start_idx = 0 - self._detailedForecasts = [] self._loaded_data = False self._serialize_lock = asyncio.Lock() self._damp = options.dampening @@ -135,6 +146,7 @@ def __init__( self.fc_moment = {} self.fc_remaining = {} #self._weather = "" + _LOGGER.debug("Configuration directory is %s", self._config_dir) async def serialize_data(self): """Serialize data to file""" @@ -153,41 +165,46 @@ async def serialize_data(self): async with aiofiles.open(self._filename, "w") as f: await f.write(json.dumps(self._data, ensure_ascii=False, cls=DateTimeEncoder)) _LOGGER.debug("Saved forecast cache") - except Exception as ex: - _LOGGER.error("Exception in serialize_data(): %s", ex) + except Exception as e: + _LOGGER.error("Exception in serialize_data(): %s", e) _LOGGER.error(traceback.format_exc()) - def redact_api_key(self, api_key): + def redact_api_key(self, api_key) -> str: + """Obfuscate API key""" return '*'*6 + api_key[-6:] - def redact_msg_api_key(self, msg, api_key): + def redact_msg_api_key(self, msg, api_key) -> str: + """Obfuscate API key in messages""" return msg.replace(api_key, self.redact_api_key(api_key)) async def write_api_usage_cache_file(self, api_key): + """Serialise the usage cache file""" try: json_file = self.get_api_usage_cache_filename(api_key) - _LOGGER.debug(f"Writing API usage cache file: {self.redact_msg_api_key(json_file, api_key)}") + _LOGGER.debug("Writing API usage cache file: %s", self.redact_msg_api_key(json_file, api_key)) json_content = {"daily_limit": self._api_limit[api_key], "daily_limit_consumed": self._api_used[api_key]} async with aiofiles.open(json_file, 'w') as f: await f.write(json.dumps(json_content, ensure_ascii=False)) - except Exception as ex: - _LOGGER.error("Exception in write_api_usage_cache_file(): %s", ex) + except Exception as e: + _LOGGER.error("Exception in write_api_usage_cache_file(): %s", e) _LOGGER.error(traceback.format_exc()) def get_api_usage_cache_filename(self, entry_name): - return "%s/solcast-usage%s.json" % (self.configDir, "" if len(self.options.api_key.split(",")) < 2 else "-" + entry_name) # For more than one API key use separate files + """Build a fully qualified API usage cache filename using a simple name or separate files for more than one API key""" + return '%s/solcast-usage%s.json' % (self._config_dir, "" if len(self.options.api_key.split(",")) < 2 else "-" + entry_name) # pylint: disable=C0209 def get_api_sites_cache_filename(self, entry_name): - return "%s/solcast-sites%s.json" % (self.configDir, "" if len(self.options.api_key.split(",")) < 2 else "-" + entry_name) # Ditto + """Build a fully qualified site details cache filename using a simple name or separate files for more than one API key""" + return '%s/solcast-sites%s.json' % (self._config_dir, "" if len(self.options.api_key.split(",")) < 2 else "-" + entry_name) # pylint: disable=C0209 async def reset_api_usage(self): - for api_key in self._api_used.keys(): + """Reset the daily API usage counter""" + for api_key, _ in self._api_used.items(): self._api_used[api_key] = 0 await self.write_api_usage_cache_file(api_key) async def sites_data(self): - """Request sites detail""" - + """Request site details""" try: def redact(s): return re.sub(r'itude\': [0-9\-\.]+', 'itude\': **.******', s) @@ -195,38 +212,39 @@ def redact(s): for spl in sp: params = {"format": "json", "api_key": spl.strip()} async with async_timeout.timeout(60): - apiCacheFileName = self.get_api_sites_cache_filename(spl) - _LOGGER.debug(f"{'Sites cache ' + ('exists' if file_exists(apiCacheFileName) else 'does not yet exist')}") - if self.apiCacheEnabled and file_exists(apiCacheFileName): - _LOGGER.debug(f"Loading cached sites data") + api_cache_filename = self.get_api_sites_cache_filename(spl) + _LOGGER.debug("%s", 'Sites cache ' + ('exists' if file_exists(api_cache_filename) else 'does not yet exist')) + if self.api_cache_enabled and file_exists(api_cache_filename): + _LOGGER.debug("Loading cached sites data") status = 404 - async with aiofiles.open(apiCacheFileName) as f: + async with aiofiles.open(api_cache_filename) as f: resp_json = json.loads(await f.read()) status = 200 else: - _LOGGER.debug(f"Connecting to {self.options.host}/rooftop_sites?format=json&api_key={self.redact_api_key(spl)}") + _LOGGER.debug("Connecting to %s/rooftop_sites?format=json&api_key=%s", self.options.host, self.redact_api_key(spl)) retries = 3 retry = retries success = False - useCacheImmediate = False - cacheExists = file_exists(apiCacheFileName) + use_cache_immediate = False + cache_exists = file_exists(api_cache_filename) while retry >= 0: resp: ClientResponse = await self.aiohttp_session.get( url=f"{self.options.host}/rooftop_sites", params=params, ssl=False ) status = resp.status - _LOGGER.debug(f"HTTP session returned status {translate(status)} in sites_data()") + _LOGGER.debug("HTTP session returned status %s in sites_data(), trying cache", translate(status)) try: resp_json = await resp.json(content_type=None) except json.decoder.JSONDecodeError: _LOGGER.error("JSONDecodeError in sites_data(): Solcast site could be having problems") - except: raise + except: + raise if status == 200: if resp_json['total_records'] > 0: - _LOGGER.debug(f"Writing sites cache") - async with aiofiles.open(apiCacheFileName, 'w') as f: + _LOGGER.debug("Writing sites cache") + async with aiofiles.open(api_cache_filename, 'w') as f: await f.write(json.dumps(resp_json, ensure_ascii=False)) success = True break @@ -234,29 +252,29 @@ def redact(s): _LOGGER.error('No sites for the API key %s are configured at solcast.com', self.redact_api_key(spl)) return else: - if cacheExists: - useCacheImmediate = True + if cache_exists: + use_cache_immediate = True break if retry > 0: - _LOGGER.debug(f"Will retry get sites, retry {(retries - retry) + 1}") + _LOGGER.debug("Will retry get sites, retry %d", (retries - retry) + 1) await asyncio.sleep(5) retry -= 1 if not success: - if not useCacheImmediate: - _LOGGER.warning(f"Retries exhausted gathering Solcast sites, last call result: {translate(status)}, using cached data if it exists") + if not use_cache_immediate: + _LOGGER.warning("Retries exhausted gathering Solcast sites, last call result: %s, using cached data if it exists", translate(status)) status = 404 - if cacheExists: - async with aiofiles.open(apiCacheFileName) as f: + if cache_exists: + async with aiofiles.open(api_cache_filename) as f: resp_json = json.loads(await f.read()) status = 200 - _LOGGER.info(f"Loaded sites cache for {self.redact_api_key(spl)}") + _LOGGER.info("Loaded sites cache for %s", self.redact_api_key(spl)) else: - _LOGGER.error(f"Cached Solcast sites are not yet available for {self.redact_api_key(spl)} to cope with API call failure") - _LOGGER.error(f"At least one successful API 'get sites' call is needed, so the integration will not function correctly") + _LOGGER.error("Cached Solcast sites are not yet available for %s to cope with API call failure", self.redact_api_key(spl)) + _LOGGER.error("At least one successful API 'get sites' call is needed, so the integration will not function correctly") if status == 200: d = cast(dict, resp_json) - _LOGGER.debug(f"Sites data: {redact(str(d))}") + _LOGGER.debug("Sites data: %s", redact(str(d))) for i in d['sites']: i['apikey'] = spl.strip() #v4.0.14 to stop HA adding a pin to the map @@ -265,8 +283,8 @@ def redact(s): self._sites = self._sites + d['sites'] self._sites_loaded = True else: - _LOGGER.error(f"{self.options.host} HTTP status error {translate(status)} in sites_data() while gathering sites") - raise Exception(f"HTTP sites_data error: Solcast Error gathering sites") + _LOGGER.error("%s HTTP status error %s in sites_data() while gathering sites", self.options.host, translate(status)) + raise Exception("HTTP sites_data error: Solcast Error gathering sites") except ConnectionRefusedError as err: _LOGGER.error("Connection refused in sites_data(): %s", err) except ClientConnectionError as e: @@ -276,14 +294,14 @@ def redact(s): _LOGGER.warning("Retrieving Solcast sites timed out, attempting to continue") error = False for spl in sp: - apiCacheFileName = self.get_api_sites_cache_filename(spl) - cacheExists = file_exists(apiCacheFileName) - if cacheExists: + api_cache_filename = self.get_api_sites_cache_filename(spl) + cache_exists = file_exists(api_cache_filename) + if cache_exists: _LOGGER.info("Loading cached Solcast sites for {self.redact_api_key(spl)}") - async with aiofiles.open(apiCacheFileName) as f: + async with aiofiles.open(api_cache_filename) as f: resp_json = json.loads(await f.read()) d = cast(dict, resp_json) - _LOGGER.debug(f"Sites data: {redact(str(d))}") + _LOGGER.debug("Sites data: %s", redact(str(d))) for i in d['sites']: i['apikey'] = spl.strip() #v4.0.14 to stop HA adding a pin to the map @@ -291,19 +309,19 @@ def redact(s): i.pop('latitude', None) self._sites = self._sites + d['sites'] self._sites_loaded = True - _LOGGER.info(f"Loaded sites cache for {self.redact_api_key(spl)}") + _LOGGER.info("Loaded sites cache for %s", self.redact_api_key(spl)) else: error = True - _LOGGER.error(f"Cached sites are not yet available for {self.redact_api_key(spl)} to cope with Solcast API call failure") - _LOGGER.error(f"At least one successful API 'get sites' call is needed, so the integration cannot function yet") + _LOGGER.error("Cached sites are not yet available for %s to cope with Solcast API call failure", self.redact_api_key(spl)) + _LOGGER.error("At least one successful API 'get sites' call is needed, so the integration cannot function yet") if error: _LOGGER.error("Timed out getting Solcast sites, and one or more site caches failed to load") _LOGGER.error("This is critical, and the integration cannot function reliably yet") _LOGGER.error("Suggestion: Double check your overall HA configuration, specifically networking related") - except Exception as e: + except: pass except Exception as e: - _LOGGER.error("Exception in sites_data(): %s", traceback.format_exc()) + _LOGGER.error(f"Exception in sites_data(): {e}: %s", traceback.format_exc()) async def sites_usage(self): """Load api usage cache""" @@ -313,38 +331,38 @@ async def sites_usage(self): qt = self.options.api_quota.split(",") try: for i in range(len(sp)): # If only one quota value is present, yet there are multiple sites then use the same quota - if len(qt) < i+1: qt.append(qt[i-1]) + if len(qt) < i+1: + qt.append(qt[i-1]) quota = { sp[i].strip(): int(qt[i].strip()) for i in range(len(qt)) } except Exception as e: - _LOGGER.error('Exception: %s', e) - _LOGGER.warning('Could not interpret API quota configuration string, using default of 10') - quota = {} - for i in range(len(sp)): quota[sp[i]] = 10 + _LOGGER.error("Exception: %s", e) + _LOGGER.warning("Could not interpret API quota configuration string, using default of 10") + quota = {s: 10 for s in sp} for spl in sp: api_key = spl.strip() - _LOGGER.debug(f"Getting API usage from cache for API key {self.redact_api_key(api_key)}") - apiCacheFileName = self.get_api_usage_cache_filename(api_key) - _LOGGER.debug(f"{'API usage cache ' + ('exists' if file_exists(apiCacheFileName) else 'does not yet exist')}") - if file_exists(apiCacheFileName): - async with aiofiles.open(apiCacheFileName) as f: + _LOGGER.debug("Getting API usage from cache for API key %s", self.redact_api_key(api_key)) + api_cache_filename = self.get_api_usage_cache_filename(api_key) + _LOGGER.debug("%s", 'API usage cache ' + ('exists' if file_exists(api_cache_filename) else 'does not yet exist')) + if file_exists(api_cache_filename): + async with aiofiles.open(api_cache_filename) as f: usage = json.loads(await f.read()) self._api_limit[api_key] = usage.get("daily_limit", None) self._api_used[api_key] = usage.get("daily_limit_consumed", None) if usage['daily_limit'] != quota[spl]: # Limit has been adjusted, so rewrite the cache self._api_limit[api_key] = quota[spl] await self.write_api_usage_cache_file(api_key) - _LOGGER.info(f"API usage cache loaded and updated with new quota") + _LOGGER.info("API usage cache loaded and updated with new quota") else: - _LOGGER.debug(f"API usage cache loaded") + _LOGGER.debug("API usage cache loaded") else: - _LOGGER.warning(f"No Solcast API usage cache found, creating one and assuming zero API used") + _LOGGER.warning("No Solcast API usage cache found, creating one and assuming zero API used") self._api_limit[api_key] = quota[spl] self._api_used[api_key] = 0 await self.write_api_usage_cache_file(api_key) - _LOGGER.debug(f"API counter for {self.redact_api_key(api_key)} is {self._api_used[api_key]}/{self._api_limit[api_key]}") - except: - _LOGGER.error("Exception in sites_usage(): %s", traceback.format_exc()) + _LOGGER.debug("API counter for %s is %d/%d", self.redact_api_key(api_key), self._api_used[api_key], self._api_limit[api_key]) + except Exception as e: + _LOGGER.error(f"Exception in sites_usage(): {e}: %s", traceback.format_exc()) ''' async def sites_usage(self): @@ -358,13 +376,13 @@ async def sites_usage(self): params = {"api_key": api_key} _LOGGER.debug(f"Getting API limit and usage from solcast for {self.redact_api_key(api_key)}") async with async_timeout.timeout(60): - apiCacheFileName = self.get_api_usage_cache_filename(api_key) - _LOGGER.debug(f"{'API usage cache ' + ('exists' if file_exists(apiCacheFileName) else 'does not yet exist')}") + api_cache_filename = self.get_api_usage_cache_filename(api_key) + _LOGGER.debug(f"{'API usage cache ' + ('exists' if file_exists(api_cache_filename) else 'does not yet exist')}") retries = 3 retry = retries success = False - useCacheImmediate = False - cacheExists = file_exists(apiCacheFileName) + use_cache_immediate = False + cache_exists = file_exists(api_cache_filename) while retry > 0: resp: ClientResponse = await self.aiohttp_session.get( url=f"{self.options.host}/json/reply/GetUserUsageAllowance", params=params, ssl=False @@ -384,18 +402,18 @@ async def sites_usage(self): retry = 0 success = True else: - if cacheExists: - useCacheImmediate = True + if cache_exists: + use_cache_immediate = True break _LOGGER.debug(f"Will retry GetUserUsageAllowance, retry {(retries - retry) + 1}") await asyncio.sleep(5) retry -= 1 if not success: - if not useCacheImmediate: + if not use_cache_immediate: _LOGGER.warning(f"Timeout getting Solcast API usage allowance, last call result: {translate(status)}, using cached data if it exists") status = 404 - if cacheExists: - async with aiofiles.open(apiCacheFileName) as f: + if cache_exists: + async with aiofiles.open(api_cache_filename) as f: resp_json = json.loads(await f.read()) status = 200 d = cast(dict, resp_json) @@ -464,64 +482,66 @@ async def sites_weather(self): ''' async def load_saved_data(self): + """Load the saved solcast.json data, also checking for new API keys and site removal""" try: status = '' if len(self._sites) > 0: if file_exists(self._filename): async with aiofiles.open(self._filename) as data_file: - jsonData = json.loads(await data_file.read(), cls=JSONDecoder) - json_version = jsonData.get("version", 1) - #self._weather = jsonData.get("weather", "unknown") - _LOGGER.debug(f"The saved data file exists, file type is {type(jsonData)}") + json_data = json.loads(await data_file.read(), cls=JSONDecoder) + json_version = json_data.get("version", 1) + #self._weather = json_data.get("weather", "unknown") + _LOGGER.debug("The saved data file exists, file type is %s", type(json_data)) if json_version == _JSON_VERSION: - self._data = jsonData + self._data = json_data self._loaded_data = True # Check for any new API keys so no sites data yet for those ks = {} for d in self._sites: - if not any(s == d.get('resource_id', '') for s in jsonData['siteinfo']): + if not any(s == d.get('resource_id', '') for s in json_data['siteinfo']): ks[d.get('resource_id')] = d.get('apikey') if len(ks.keys()) > 0: # Some site data does not exist yet so get it _LOGGER.info("New site(s) have been added, so getting forecast data for just those site(s)") - for a in ks: - await self.http_data_call(r_id=a, api=ks[a], dopast=True) + for a, _api_key in ks: + await self.http_data_call(r_id=a, api=_api_key, dopast=True) await self.serialize_data() # Check for sites that need to be removed l = [] - for s in jsonData['siteinfo']: + for s in json_data['siteinfo']: if not any(d.get('resource_id', '') == s for d in self._sites): - _LOGGER.info(f"Solcast site resource id {s} is no longer configured, removing saved data from cached file") + _LOGGER.info("Solcast site resource id %s is no longer configured, removing saved data from cached file", s) l.append(s) for ll in l: - del jsonData['siteinfo'][ll] + del json_data['siteinfo'][ll] # Create an up to date forecast await self.buildforecastdata() - _LOGGER.info(f"Loaded solcast.json forecast cache") + _LOGGER.info("Loaded solcast.json forecast cache") if not self._loaded_data: # No file to load - _LOGGER.warning(f"There is no solcast.json to load, so fetching solar forecast, including past forecasts") + _LOGGER.warning("There is no solcast.json to load, so fetching solar forecast, including past forecasts") # Could be a brand new install of the integation, or the file has been removed. Poll once now... status = await self.http_data(dopast=True) else: - _LOGGER.error(f"Solcast site count is zero in load_saved_data(); the get sites must have failed, and there is no sites cache") + _LOGGER.error("Solcast site count is zero in load_saved_data(); the get sites must have failed, and there is no sites cache") status = 'Solcast sites count is zero, add sites' except json.decoder.JSONDecodeError: _LOGGER.error("The cached data in solcast.json is corrupt in load_saved_data()") status = 'The cached data in /config/solcast.json is corrupted, suggest removing or repairing it' except Exception as e: _LOGGER.error("Exception in load_saved_data(): %s", traceback.format_exc()) - status = 'Exception in load_saved_data(): %s' % (e,) + status = f"Exception in load_saved_data(): {e}" return status - async def delete_solcast_file(self, *args): - _LOGGER.debug(f"Service event to delete old solcast.json file") + async def delete_solcast_file(self, *args): # pylint: disable=W0613 + """Service event to delete old solcast.json file""" + _LOGGER.debug("Service event to delete old solcast.json file") try: if file_exists(self._filename): os.remove(self._filename) @@ -531,9 +551,10 @@ async def delete_solcast_file(self, *args): else: _LOGGER.warning("There is no solcast.json to delete") except Exception: - _LOGGER.error(f"Service event to delete old solcast.json file failed") + _LOGGER.error("Service event to delete old solcast.json file failed") async def get_forecast_list(self, *args): + """Service event to get list of forecasts""" try: st_time = time.time() @@ -548,23 +569,22 @@ async def get_forecast_list(self, *args): return tuple( {**d, "period_start": d["period_start"].astimezone(self._tz)} for d in h ) except Exception: - _LOGGER.error(f"Service event to get list of forecasts failed") + _LOGGER.error("Service event to get list of forecasts failed") return None def get_api_used_count(self): - """Return API polling count for this UTC 24hr period""" + """Return total API polling count for this UTC 24hr period (all accounts combined)""" used = 0 - for _, v in self._api_used.items(): used += v + for _, v in self._api_used.items(): + used += v return used def get_api_limit(self): - """Return API polling limit for this account""" - try: - limit = 0 - for _, v in self._api_limit.items(): limit += v - return limit - except Exception: - return None + """Return API polling limit (all accounts combined)""" + limit = 0 + for _, v in self._api_limit.items(): + limit += v + return limit # def get_weather(self): # """Return weather description""" @@ -576,7 +596,8 @@ def get_last_updated_datetime(self) -> dt: def get_rooftop_site_total_today(self, site) -> float: """Return total kW for today for a site""" - if self._tally.get(site) == None: _LOGGER.warning(f"Site total kW forecast today is currently unavailable for {site}") + if self._tally.get(site) is None: + _LOGGER.warning("Site total kW forecast today is currently unavailable for %s", site) return self._tally.get(site) def get_rooftop_site_extra_data(self, site = ""): @@ -602,21 +623,25 @@ def get_rooftop_site_extra_data(self, site = ""): return ret def get_now_utc(self): + """Datetime helper""" return dt.now(self._tz).replace(second=0, microsecond=0).astimezone(timezone.utc) def get_interval_start_utc(self, moment): + """Datetime helper""" n = moment.replace(second=0, microsecond=0) return n.replace(minute=0 if n.minute < 30 else 30).astimezone(timezone.utc) def get_hour_start_utc(self): + """Datetime helper""" return dt.now(self._tz).replace(minute=0, second=0, microsecond=0).astimezone(timezone.utc) def get_day_start_utc(self): + """Datetime helper""" return dt.now(self._tz).replace(hour=0, minute=0, second=0, microsecond=0).astimezone(timezone.utc) def get_forecast_day(self, futureday) -> Dict[str, Any]: """Return forecast data for the Nth day ahead""" - noDataError = True + no_data_error = True start_utc = self.get_day_start_utc() + timedelta(days=futureday) end_utc = start_utc + timedelta(days=1) @@ -633,7 +658,7 @@ def get_forecast_day(self, futureday) -> Dict[str, Any]: tup = tuple( {**d, "period_start": d["period_start"].astimezone(self._tz)} for d in h ) if len(tup) < 48: - noDataError = False + no_data_error = False hourlytup = [] for index in range(0,len(tup),2): @@ -648,16 +673,18 @@ def get_forecast_day(self, futureday) -> Dict[str, Any]: x2 = round((tup[index]["pv_estimate10"]), 4) x3 = round((tup[index]["pv_estimate90"]), 4) hourlytup.append({"period_start":tup[index]["period_start"], "pv_estimate":x1, "pv_estimate10":x2, "pv_estimate90":x3}) - except Exception as ex: - _LOGGER.error("Exception in get_forecast_day(): %s", ex) + except Exception as e: + _LOGGER.error("Exception in get_forecast_day(): %s", e) _LOGGER.error(traceback.format_exc()) res = { "dayname": start_utc.astimezone(self._tz).strftime("%A"), - "dataCorrect": noDataError, + "dataCorrect": no_data_error, } - if self.options.attr_brk_halfhourly: res["detailedForecast"] = tup - if self.options.attr_brk_hourly: res["detailedHourly"] = hourlytup + if self.options.attr_brk_halfhourly: + res["detailedForecast"] = tup + if self.options.attr_brk_hourly: + res["detailedHourly"] = hourlytup return res def get_forecast_n_hour(self, n_hour, site=None, _use_data_field=None) -> int: @@ -668,14 +695,17 @@ def get_forecast_n_hour(self, n_hour, site=None, _use_data_field=None) -> int: return res def get_forecasts_n_hour(self, n_hour) -> Dict[str, Any]: + """Return forecast for the Nth hour for all sites and individual sites""" res = {} if self.options.attr_brk_site: for site in self._sites: res[site['resource_id']] = self.get_forecast_n_hour(n_hour, site=site['resource_id']) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): res[_data_field.replace('pv_','')+'-'+site['resource_id']] = self.get_forecast_n_hour(n_hour, site=site['resource_id'], _use_data_field=_data_field) + if self._estimen.get(_data_field): + res[_data_field.replace('pv_','')+'-'+site['resource_id']] = self.get_forecast_n_hour(n_hour, site=site['resource_id'], _use_data_field=_data_field) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): res[_data_field.replace('pv_','')] = self.get_forecast_n_hour(n_hour, _use_data_field=_data_field) + if self._estimen.get(_data_field): + res[_data_field.replace('pv_','')] = self.get_forecast_n_hour(n_hour, _use_data_field=_data_field) return res def get_forecast_custom_hours(self, n_hours, site=None, _use_data_field=None) -> int: @@ -686,14 +716,17 @@ def get_forecast_custom_hours(self, n_hours, site=None, _use_data_field=None) -> return res def get_forecasts_custom_hours(self, n_hour) -> Dict[str, Any]: + """Return forecast for the next N hours for all sites and individual sites""" res = {} if self.options.attr_brk_site: for site in self._sites: res[site['resource_id']] = self.get_forecast_custom_hours(n_hour, site=site['resource_id']) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): res[_data_field.replace('pv_','')+'-'+site['resource_id']] = self.get_forecast_custom_hours(n_hour, site=site['resource_id'], _use_data_field=_data_field) + if self._estimen.get(_data_field): + res[_data_field.replace('pv_','')+'-'+site['resource_id']] = self.get_forecast_custom_hours(n_hour, site=site['resource_id'], _use_data_field=_data_field) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): res[_data_field.replace('pv_','')] = self.get_forecast_custom_hours(n_hour, _use_data_field=_data_field) + if self._estimen.get(_data_field): + res[_data_field.replace('pv_','')] = self.get_forecast_custom_hours(n_hour, _use_data_field=_data_field) return res def get_power_n_mins(self, n_mins, site=None, _use_data_field=None) -> int: @@ -702,14 +735,17 @@ def get_power_n_mins(self, n_mins, site=None, _use_data_field=None) -> int: return round(1000 * self.get_forecast_pv_moment(time_utc, site=site, _use_data_field=_use_data_field)) def get_sites_power_n_mins(self, n_mins) -> Dict[str, Any]: + """Return expected power generation in the next N minutes for all sites and individual sites""" res = {} if self.options.attr_brk_site: for site in self._sites: res[site['resource_id']] = self.get_power_n_mins(n_mins, site=site['resource_id']) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): res[_data_field.replace('pv_','')+'-'+site['resource_id']] = self.get_power_n_mins(n_mins, site=site['resource_id'], _use_data_field=_data_field) + if self._estimen.get(_data_field): + res[_data_field.replace('pv_','')+'-'+site['resource_id']] = self.get_power_n_mins(n_mins, site=site['resource_id'], _use_data_field=_data_field) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): res[_data_field.replace('pv_','')] = self.get_power_n_mins(n_mins, site=None, _use_data_field=_data_field) + if self._estimen.get(_data_field): + res[_data_field.replace('pv_','')] = self.get_power_n_mins(n_mins, site=None, _use_data_field=_data_field) return res def get_peak_w_day(self, n_day, site=None, _use_data_field=None) -> int: @@ -721,14 +757,17 @@ def get_peak_w_day(self, n_day, site=None, _use_data_field=None) -> int: return 0 if res is None else round(1000 * res[_data_field]) def get_sites_peak_w_day(self, n_day) -> Dict[str, Any]: + """Return max kW for site N days ahead for all sites and individual sites""" res = {} if self.options.attr_brk_site: for site in self._sites: res[site['resource_id']] = self.get_peak_w_day(n_day, site=site['resource_id']) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): res[_data_field.replace('pv_','')+'-'+site['resource_id']] = self.get_peak_w_day(n_day, site=site['resource_id'], _use_data_field=_data_field) + if self._estimen.get(_data_field): + res[_data_field.replace('pv_','')+'-'+site['resource_id']] = self.get_peak_w_day(n_day, site=site['resource_id'], _use_data_field=_data_field) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): res[_data_field.replace('pv_','')] = self.get_peak_w_day(n_day, site=None, _use_data_field=_data_field) + if self._estimen.get(_data_field): + res[_data_field.replace('pv_','')] = self.get_peak_w_day(n_day, site=None, _use_data_field=_data_field) return res def get_peak_w_time_day(self, n_day, site=None, _use_data_field=None) -> dt: @@ -739,14 +778,17 @@ def get_peak_w_time_day(self, n_day, site=None, _use_data_field=None) -> dt: return res if res is None else res["period_start"] def get_sites_peak_w_time_day(self, n_day) -> Dict[str, Any]: + """Return hour of max kW for site N days ahead for all sites and individual sites""" res = {} if self.options.attr_brk_site: for site in self._sites: res[site['resource_id']] = self.get_peak_w_time_day(n_day, site=site['resource_id']) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): res[_data_field.replace('pv_','')+'-'+site['resource_id']] = self.get_peak_w_time_day(n_day, site=site['resource_id'], _use_data_field=_data_field) + if self._estimen.get(_data_field): + res[_data_field.replace('pv_','')+'-'+site['resource_id']] = self.get_peak_w_time_day(n_day, site=site['resource_id'], _use_data_field=_data_field) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): res[_data_field.replace('pv_','')] = self.get_peak_w_time_day(n_day, site=None, _use_data_field=_data_field) + if self._estimen.get(_data_field): + res[_data_field.replace('pv_','')] = self.get_peak_w_time_day(n_day, site=None, _use_data_field=_data_field) return res def get_forecast_remaining_today(self, site=None, _use_data_field=None) -> float: @@ -758,14 +800,17 @@ def get_forecast_remaining_today(self, site=None, _use_data_field=None) -> float return res def get_forecasts_remaining_today(self) -> Dict[str, Any]: + """Return remaining forecasted production for today for all sites and individual sites""" res = {} if self.options.attr_brk_site: for site in self._sites: res[site['resource_id']] = self.get_forecast_remaining_today(site=site['resource_id']) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): res[_data_field.replace('pv_','')+'-'+site['resource_id']] = self.get_forecast_remaining_today(site=site['resource_id'], _use_data_field=_data_field) + if self._estimen.get(_data_field): + res[_data_field.replace('pv_','')+'-'+site['resource_id']] = self.get_forecast_remaining_today(site=site['resource_id'], _use_data_field=_data_field) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): res[_data_field.replace('pv_','')] = self.get_forecast_remaining_today(_use_data_field=_data_field) + if self._estimen.get(_data_field): + res[_data_field.replace('pv_','')] = self.get_forecast_remaining_today(_use_data_field=_data_field) return res def get_total_kwh_forecast_day(self, n_day, site=None, _use_data_field=None) -> float: @@ -776,19 +821,23 @@ def get_total_kwh_forecast_day(self, n_day, site=None, _use_data_field=None) -> return res def get_sites_total_kwh_forecast_day(self, n_day) -> Dict[str, Any]: + """Return forecast kWh total for site N days ahead for all sites and individual sites""" res = {} if self.options.attr_brk_site: for site in self._sites: res[site['resource_id']] = self.get_total_kwh_forecast_day(n_day, site=site['resource_id']) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): res[_data_field.replace('pv_','')+'-'+site['resource_id']] = self.get_total_kwh_forecast_day(n_day, site=site['resource_id'], _use_data_field=_data_field) + if self._estimen.get(_data_field): + res[_data_field.replace('pv_','')+'-'+site['resource_id']] = self.get_total_kwh_forecast_day(n_day, site=site['resource_id'], _use_data_field=_data_field) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): res[_data_field.replace('pv_','')] = self.get_total_kwh_forecast_day(n_day, site=None, _use_data_field=_data_field) + if self._estimen.get(_data_field): + res[_data_field.replace('pv_','')] = self.get_total_kwh_forecast_day(n_day, site=None, _use_data_field=_data_field) return res - def get_forecast_list_slice(self, _data, start_utc, end_utc=None, search_past=False): + def get_forecast_list_slice(self, _data, start_utc, end_utc=None, search_past=False) -> tuple[int, int]: """Return pv_estimates list slice (st_i, end_i) for interval""" - if end_utc is None: end_utc = start_utc + timedelta(seconds=1800) + if end_utc is None: + end_utc = start_utc + timedelta(seconds=1800) crt_i = -1 st_i = -1 end_i = len(_data) @@ -809,7 +858,8 @@ def get_forecast_list_slice(self, _data, start_utc, end_utc=None, search_past=Fa end_i = 0 return st_i, end_i - def get_spline(self, spline, st, xx, _data, df, reducing=False): + def get_spline(self, spline, st, xx, _data, df, reducing=False) -> None: + """Build an individual site/forecast confidence spline""" for _data_field in df: if st > 0: y = [_data[st+i][_data_field] for i in range(0, len(self._spline_period))] @@ -820,8 +870,11 @@ def get_spline(self, spline, st, xx, _data, df, reducing=False): self.sanitise_spline(spline, _data_field, xx, y, reducing=reducing) else: # The list slice was not found, so zero all values in the spline spline[_data_field] = [0] * (len(self._spline_period) * 6) + if _SPLINE_DEBUG_LOGGING: + _LOGGER.debug(str(spline)) - def sanitise_spline(self, spline, _data_field, xx, y, reducing=False): + def sanitise_spline(self, spline, _data_field, xx, y, reducing=False) -> None: + """Ensures that no negative values are returned, and also shifts the spline to account for half-hour average input values""" for j in xx: i = int(j/300) # Suppress negative values @@ -829,18 +882,20 @@ def sanitise_spline(self, spline, _data_field, xx, y, reducing=False): spline[_data_field][i] = 0.0 # Suppress spline bounce if reducing: - if i+1 <= len(xx)-1 and spline[_data_field][i+1] > spline[_data_field][i]: spline[_data_field][i+1] = spline[_data_field][i] + if i+1 <= len(xx)-1 and spline[_data_field][i+1] > spline[_data_field][i]: + spline[_data_field][i+1] = spline[_data_field][i] else: k = int(math.floor(j/1800)) - if k+1 <= len(y)-1 and y[k] == 0 and y[k+1] == 0: spline[_data_field][i] = 0.0 + if k+1 <= len(y)-1 and y[k] == 0 and y[k+1] == 0: + spline[_data_field][i] = 0.0 # Shift right by fifteen minutes because 30-minute averages, padding as appropriate if reducing: spline[_data_field] = ([spline[_data_field][0]]*3) + spline[_data_field] else: spline[_data_field] = ([0]*3) + spline[_data_field] - def build_splines(self, variant, reducing=False): - """Cubic splines for interpolated inter-interval momentary or reducing estimates""" + def build_splines(self, variant, reducing=False) -> None: + """Build cubic splines for interpolated inter-interval momentary or reducing estimates""" df = ['pv_estimate'] + (['pv_estimate10'] if self.options.attr_brk_estimate10 else []) + (['pv_estimate90'] if self.options.attr_brk_estimate90 else []) xx = [ i for i in range(0, 1800*len(self._spline_period), 300) ] st, _ = self.get_forecast_list_slice(self._data_forecasts, self.get_day_start_utc()) # Get start of day index @@ -852,28 +907,32 @@ def build_splines(self, variant, reducing=False): variant[site['resource_id']] = {} self.get_spline(variant[site['resource_id']], st, xx, self._data_forecasts, df, reducing=reducing) - async def spline_moments(self): + async def spline_moments(self) -> None: + """Build the moments splines""" try: self.build_splines(self.fc_moment) except Exception as e: _LOGGER.debug('Exception in spline_moments(): %s', e) - def get_moment(self, site, _data_field, t): + def get_moment(self, site, _data_field, t) -> float: + """Get a time value from a moment spline, with times needing to be for today, and also on five-minute boundaries""" try: - return self.fc_moment['all' if site is None else site][self._data_field if _data_field is None else _data_field][int(t / 300)] + return self.fc_moment['all' if site is None else site][self._use_data_field if _data_field is None else _data_field][int(t / 300)] except Exception as e: _LOGGER.debug('Exception in get_moment(): %s', e) return 0 - async def spline_remaining(self): + async def spline_remaining(self) -> None: + """Build the descending splines""" try: self.build_splines(self.fc_remaining, reducing=True) except Exception as e: _LOGGER.debug('Exception in spline_remaining(): %s', e) - def get_remaining(self, site, _data_field, t): + def get_remaining(self, site, _data_field, t) -> float: + """Get a time value from a reducing spline, with times needing to be for today, and also on five-minute boundaries""" try: - return self.fc_remaining['all' if site is None else site][self._data_field if _data_field is None else _data_field][int(t / 300)] + return self.fc_remaining['all' if site is None else site][self._use_data_field if _data_field is None else _data_field][int(t / 300)] except Exception as e: _LOGGER.debug('Exception in get_remaining(): %s', e) return 0 @@ -912,8 +971,8 @@ def get_forecast_pv_remaining(self, start_utc, end_utc=None, site=None, _use_dat st_i, end_i, round(res,4) ) return res if res > 0 else 0 - except Exception as ex: - _LOGGER.error(f"Exception in get_forecast_pv_remaining(): {ex}") + except Exception as e: + _LOGGER.error("Exception in get_forecast_pv_remaining(): %s", e) _LOGGER.error(traceback.format_exc()) return 0 @@ -936,8 +995,8 @@ def get_forecast_pv_estimates(self, start_utc, end_utc, site=None, _use_data_fie st_i, end_i, round(res,4) ) return res - except Exception as ex: - _LOGGER.error(f"Exception in get_forecast_pv_estimates(): {ex}") + except Exception as e: + _LOGGER.error("Exception in get_forecast_pv_estimates(): %s", e) _LOGGER.error(traceback.format_exc()) return 0 @@ -954,8 +1013,8 @@ def get_forecast_pv_moment(self, time_utc, site=None, _use_data_field=None) -> f time_utc.strftime('%Y-%m-%d %H:%M:%S'), (time_utc - day_start).total_seconds(), round(res, 4) ) return res - except Exception as ex: - _LOGGER.error(f"Exception in get_forecast_pv_moment(): {ex}") + except Exception as e: + _LOGGER.error("Exception in get_forecast_pv_moment(): %s", e) _LOGGER.error(traceback.format_exc()) return 0 @@ -964,10 +1023,10 @@ def get_max_forecast_pv_estimate(self, start_utc, end_utc, site=None, _use_data_ try: _data = self._data_forecasts if site is None else self._site_data_forecasts[site] _data_field = self._use_data_field if _use_data_field is None else _use_data_field - res = None st_i, end_i = self.get_forecast_list_slice(_data, start_utc, end_utc) + res = _data[st_i] for d in _data[st_i:end_i]: - if res is None or res[_data_field] < d[_data_field]: + if res[_data_field] < d[_data_field]: res = d if _SENSOR_DEBUG_LOGGING: _LOGGER.debug( "Get max estimate: %s()%s %s st %s end %s st_i %d end_i %d res %s", @@ -977,16 +1036,17 @@ def get_max_forecast_pv_estimate(self, start_utc, end_utc, site=None, _use_data_ st_i, end_i, res ) return res - except Exception as ex: - _LOGGER.error(f"Exception in get_max_forecast_pv_estimate(): {ex}") + except Exception as e: + _LOGGER.error("Exception in get_max_forecast_pv_estimate(): %s", e) _LOGGER.error(traceback.format_exc()) return None def get_energy_data(self) -> dict[str, Any]: + """Get energy data""" try: return self._dataenergy - except Exception as ex: - _LOGGER.error(f"Exception in get_energy_data(): {ex}") + except Exception as e: + _LOGGER.error("Exception in get_energy_data(): %s", e) _LOGGER.error(traceback.format_exc()) return None @@ -1000,21 +1060,21 @@ async def http_data(self, dopast = False): return status failure = False - sitesAttempted = 0 + sites_attempted = 0 for site in self._sites: - sitesAttempted += 1 - _LOGGER.info(f"Getting forecast update for Solcast site {site['resource_id']}") + sites_attempted += 1 + _LOGGER.info("Getting forecast update for Solcast site %s", site['resource_id']) result = await self.http_data_call(site['resource_id'], site['apikey'], dopast) if not result: failure = True - if len(self._sites) > sitesAttempted: + if len(self._sites) > sites_attempted: _LOGGER.warning('Forecast update for site %s failed, so not getting remaining sites', site['resource_id']) else: _LOGGER.warning('Forecast update for the last site queued failed (%s), so not getting remaining sites - API use count will look odd', site['resource_id']) status = 'At least one site forecast get failed' break - if sitesAttempted > 0 and not failure: + if sites_attempted > 0 and not failure: self._data["last_updated"] = dt.now(timezone.utc).isoformat() #self._data["weather"] = self._weather @@ -1024,13 +1084,13 @@ async def http_data(self, dopast = False): await self.serialize_data() else: - if sitesAttempted > 0: + if sites_attempted > 0: _LOGGER.error("At least one Solcast site forecast failed to fetch, so forecast data has not been built") else: _LOGGER.error("No Solcast sites were attempted, so forecast data has not been built - check for earlier failure to retrieve sites") status = 'At least one site forecast get failed' - except Exception as ex: - status = 'Exception in http_data(): %s - Forecast data has not been built' % (ex,) + except Exception as e: + status = f"Exception in http_data(): {e} - Forecast data has not been built" _LOGGER.error(status) _LOGGER.error(traceback.format_exc()) return status @@ -1040,7 +1100,7 @@ async def http_data_call(self, r_id = None, api = None, dopast = False): try: lastday = self.get_day_start_utc() + timedelta(days=8) numhours = math.ceil((lastday - self.get_now_utc()).total_seconds() / 3600) - _LOGGER.debug(f"Polling API for site {r_id} lastday {lastday} numhours {numhours}") + _LOGGER.debug('Polling API for site %s lastday %d numhours %d', r_id, lastday, numhours) _data = [] _data2 = [] @@ -1050,16 +1110,15 @@ async def http_data_call(self, r_id = None, api = None, dopast = False): ae = None resp_dict = await self.fetch_data("estimated_actuals", 168, site=r_id, apikey=api, cachedname="actuals") if not isinstance(resp_dict, dict): - _LOGGER.error(f"No data was returned for Solcast estimated_actuals so this WILL cause errors...") - _LOGGER.error(f"Either your API limit is exhaused, Internet down, or networking is misconfigured...") - _LOGGER.error(f"This almost certainly not a problem with the integration, and sensor values will be wrong" - ) + _LOGGER.error('No data was returned for Solcast estimated_actuals so this WILL cause errors...') + _LOGGER.error('Either your API limit is exhaused, Internet down, or networking is misconfigured...') + _LOGGER.error('This almost certainly not a problem with the integration, and sensor values will be wrong') raise TypeError(f"Solcast API did not return a json object. Returned {resp_dict}") ae = resp_dict.get("estimated_actuals", None) if not isinstance(ae, list): - raise TypeError(f"estimated actuals must be a list, not {type(ae)}") + raise TypeError(f"Estimated actuals must be a list, not {type(ae)}") oldest = dt.now(self._tz).replace(hour=0,minute=0,second=0,microsecond=0) - timedelta(days=6) oldest = oldest.astimezone(timezone.utc) @@ -1092,7 +1151,7 @@ async def http_data_call(self, r_id = None, api = None, dopast = False): if not isinstance(af, list): raise TypeError(f"forecasts must be a list, not {type(af)}") - _LOGGER.debug(f"Solcast returned {len(af)} records") + _LOGGER.debug("Solcast returned %d records", len(af)) st_time = time.time() for x in af: @@ -1146,7 +1205,7 @@ async def http_data_call(self, r_id = None, api = None, dopast = False): self._data['siteinfo'].update({r_id:{'forecasts': copy.deepcopy(_forecasts)}}) - _LOGGER.debug(f"HTTP data call processing took {round(time.time() - st_time, 4)}s") + _LOGGER.debug("HTTP data call processing took %.4fs", round(time.time() - st_time, 4)) return True except Exception as ex: _LOGGER.error("Exception in http_data_call(): %s", ex) @@ -1159,23 +1218,23 @@ async def fetch_data(self, path="error", hours=168, site="", apikey="", cachedna try: params = {"format": "json", "api_key": apikey, "hours": hours} url=f"{self.options.host}/rooftop_sites/{site}/{path}" - _LOGGER.debug(f"Fetch data url: {url}") + _LOGGER.debug("Fetch data url: %s", url) async with async_timeout.timeout(900): - apiCacheFileName = self.configDir + '/' + cachedname + "_" + site + ".json" - if self.apiCacheEnabled and file_exists(apiCacheFileName): + api_cache_filename = self._config_dir + '/' + cachedname + "_" + site + ".json" + if self.api_cache_enabled and file_exists(api_cache_filename): status = 404 - async with aiofiles.open(apiCacheFileName) as f: + async with aiofiles.open(api_cache_filename) as f: resp_json = json.loads(await f.read()) status = 200 - _LOGGER.debug(f"Offline cached mode enabled, loaded data for site {site}") + _LOGGER.debug("Offline cached mode enabled, loaded data for site %s", site) else: if self._api_used[apikey] < self._api_limit[apikey]: tries = 10 counter = 0 backoff = 15 # On every retry the back-off increases by (at least) fifteen seconds more than the previous back-off while True: - _LOGGER.debug(f"Fetching forecast") + _LOGGER.debug("Fetching forecast") counter += 1 resp: ClientResponse = await self.aiohttp_session.get( url=url, params=params, ssl=False @@ -1205,51 +1264,51 @@ async def fetch_data(self, path="error", hours=168, site="", apikey="", cachedna break # Solcast is busy, so delay (15 seconds * counter), plus a random number of seconds between zero and 15 delay = (counter * backoff) + random.randrange(0,15) - _LOGGER.warning(f"The Solcast API is busy, pausing {delay} seconds before retry") + _LOGGER.warning("The Solcast API is busy, pausing %d seconds before retry", delay) await asyncio.sleep(delay) else: break if status == 200: - _LOGGER.debug(f"Fetch successful") + _LOGGER.debug("Fetch successful") - _LOGGER.debug(f"API returned data, API counter incremented from {self._api_used[apikey]} to {self._api_used[apikey] + 1}") + _LOGGER.debug("API returned data, API counter incremented from %d to %d", self._api_used[apikey], self._api_used[apikey] + 1) self._api_used[apikey] += 1 await self.write_api_usage_cache_file(apikey) resp_json = await resp.json(content_type=None) - if self.apiCacheEnabled: - async with aiofiles.open(apiCacheFileName, 'w') as f: + if self.api_cache_enabled: + async with aiofiles.open(api_cache_filename, 'w') as f: await f.write(json.dumps(resp_json, ensure_ascii=False)) elif status == 998: # Exceeded API limit - _LOGGER.error(f"API allowed polling limit has been exceeded, API counter set to {self._api_used[apikey]}/{self._api_limit[apikey]}") + _LOGGER.error("API allowed polling limit has been exceeded, API counter set to %d/%d", self._api_used[apikey], self._api_limit[apikey]) return None elif status == 999: # Attempts exhausted - _LOGGER.error(f"API was tried {tries} times, but all attempts failed") + _LOGGER.error("API was tried %d times, but all attempts failed", tries) return None elif status == 1000: # An unexpected response return None else: - _LOGGER.error(f"API returned status {translate(status)}, API used is {self._api_used[apikey]}/{self._api_limit[apikey]}") + _LOGGER.error("API returned status %s, API used is %d/%d", translate(status), self._api_used[apikey], self._api_limit[apikey]) return None else: - _LOGGER.warning(f"API polling limit exhausted, not getting forecast, API used is {self._api_used[apikey]}/{self._api_limit[apikey]}") + _LOGGER.warning("API polling limit exhausted, not getting forecast, API used is %d/%d", self._api_used[apikey], self._api_limit[apikey]) return None - _LOGGER.debug(f"HTTP session returned data type {type(resp_json)}") - _LOGGER.debug(f"HTTP session status {translate(status)}") + _LOGGER.debug("HTTP session returned data type %s", type(resp_json)) + _LOGGER.debug("HTTP session status %s", translate(status)) if status == 429: _LOGGER.warning("Solcast is too busy, try again later") elif status == 400: _LOGGER.warning("Status {translate(status)}: The Solcast site is likely missing capacity, please specify capacity or provide historic data for tuning") elif status == 404: - _LOGGER.error(f"The Solcast site cannot be found, status {translate(status)} returned") + _LOGGER.error("The Solcast site cannot be found, status %s returned", translate(status)) elif status == 200: d = cast(dict, resp_json) if _FORECAST_DEBUG_LOGGING: - _LOGGER.debug('HTTP session returned: %s' % (str(d),)) + _LOGGER.debug("HTTP session returned: %s", str(d)) return d #await self.format_json_data(d) except ConnectionRefusedError as err: @@ -1258,12 +1317,13 @@ async def fetch_data(self, path="error", hours=168, site="", apikey="", cachedna _LOGGER.error("Connection error in fetch_data(): %s", str(e)) except asyncio.TimeoutError: _LOGGER.error("Connection error in fetch_data(): Timed out connecting to Solcast API server") - except Exception as e: + except: _LOGGER.error("Exception in fetch_data(): %s", traceback.format_exc()) return None def makeenergydict(self) -> dict: + """Make an energy-compatible dictionary""" wh_hours = {} try: lastv = -1 @@ -1285,7 +1345,7 @@ def makeenergydict(self) -> dict: lastk = d lastv = v[self._use_data_field] - except Exception as e: + except: _LOGGER.error("Exception in makeenergydict(): %s", traceback.format_exc()) return wh_hours @@ -1328,7 +1388,12 @@ async def buildforecastdata(self): "pv_estimate90": min(round((x["pv_estimate90"] * self._damp[h]),4), self._hardlimit)} # Record the individual site forecast - _site_fcasts_dict[z] = {"period_start": z, "pv_estimate": round((x["pv_estimate"]),4), "pv_estimate10": round((x["pv_estimate10"]),4), "pv_estimate90": round((x["pv_estimate90"]),4)} + _site_fcasts_dict[z] = { + "period_start": z, + "pv_estimate": round((x["pv_estimate"]),4), + "pv_estimate10": round((x["pv_estimate10"]),4), + "pv_estimate90": round((x["pv_estimate90"]),4), + } self._site_data_forecasts[site] = sorted(_site_fcasts_dict.values(), key=itemgetter("period_start")) @@ -1337,33 +1402,34 @@ async def buildforecastdata(self): self._data_forecasts = sorted(_fcasts_dict.values(), key=itemgetter("period_start")) - self._forecasts_start_idx = self.calcForecastStartIndex() + self._forecasts_start_idx = self.calc_forecast_start_index() self._dataenergy = {"wh_hours": self.makeenergydict()} - await self.checkDataRecords() + await self.check_data_records() _LOGGER.debug('Calculating splines') await self.spline_moments() await self.spline_remaining() - _LOGGER.debug(f"Build forecast processing took {round(time.time()-st_time,4)}s") + _LOGGER.debug("Build forecast processing took %.4s", round(time.time()-st_time,4)) - except Exception as e: + except: _LOGGER.error("Exception in http_data(): %s", traceback.format_exc()) - def calcForecastStartIndex(self): + def calc_forecast_start_index(self): + """Get the start of forecasts as-at just before midnight (Doesn't stop at midnight because some sensors may need the previous interval)""" midnight_utc = self.get_day_start_utc() - # Search in reverse (less to iterate) and find the interval just before midnight - # (Doesn't stop at midnight because some sensors may need the previous interval) - for idx in range(len(self._data_forecasts)-1, -1, -1): - if self._data_forecasts[idx]["period_start"] < midnight_utc: break + for idx in range(len(self._data_forecasts)-1, -1, -1): # Search in reverse (less to iterate) + if self._data_forecasts[idx]["period_start"] < midnight_utc: + break _LOGGER.debug("Calc forecast start index midnight: %s UTC, idx %s, len %s", midnight_utc.strftime('%Y-%m-%d %H:%M:%S'), idx, len(self._data_forecasts)) return idx - async def checkDataRecords(self): + async def check_data_records(self): + """Verify that all records are present for each day""" for i in range(0, 8): start_utc = self.get_day_start_utc() + timedelta(days=i) end_utc = start_utc + timedelta(days=1) @@ -1372,6 +1438,6 @@ async def checkDataRecords(self): da = dt.now(self._tz).date() + timedelta(days=i) if num_rec == 48: - _LOGGER.debug(f"Data for {da} contains all 48 records") + _LOGGER.debug("Data for %s contains all 48 records", da.strftime('%Y-%m-%d')) else: - _LOGGER.debug(f"Data for {da} contains only {num_rec} of 48 records and may produce inaccurate forecast data") \ No newline at end of file + _LOGGER.debug("Data for %s contains only %d of 48 records and may produce inaccurate forecast data", da.strftime('%Y-%m-%d'), num_rec) \ No newline at end of file diff --git a/custom_components/solcast_solar/spline.py b/custom_components/solcast_solar/spline.py index 86cca871..4d61f5b8 100644 --- a/custom_components/solcast_solar/spline.py +++ b/custom_components/solcast_solar/spline.py @@ -1,69 +1,71 @@ +"""Cubic spline from one-dimensional arrays""" + +# pylint: disable=C0200, C0304, C0321, R0914 + import math def cubic_interp(x0, x, y): """ - Cubic spline from one-dimensional arrays - x0: Array of floats to interpolate at x : Array of floats in increasing order y : Array of floats to interpolate - Returns array of interpolaated values + Returns array of interpolated values """ def diff(lst): # numpy-like diff size = len(lst) - 1 r = [0] * size - for i in range(size): r[i] = lst[i+1] - lst[i] + for i in range(size): r[i] = lst[i+1] - lst[i] return r - - def clip(lst, min_val, max_val, inPlace = False): # numpy-like clip - if not inPlace: lst = lst[:] + + def clip(lst, min_val, max_val, in_place = False): # numpy-like clip + if not in_place: lst = lst[:] for i in range(len(lst)): if lst[i] < min_val: lst[i] = min_val elif lst[i] > max_val: - lst[i] = max_val + lst[i] = max_val return lst - - def searchsorted(listToInsert, insertInto): # numpy-like searchsorted - def float_searchsorted(floatToInsert, insertInto): - for i in range(len(insertInto)): - if floatToInsert <= insertInto[i]: return i - return len(insertInto) - return [float_searchsorted(i, insertInto) for i in listToInsert] - + + def searchsorted(list_to_insert, insert_into): # numpy-like searchsorted + def float_searchsorted(float_to_insert, insert_into): + for i in range(len(insert_into)): + if float_to_insert <= insert_into[i]: return i + return len(insert_into) + return [float_searchsorted(i, insert_into) for i in list_to_insert] + def subtract(a, b): return a - b - + size = len(x) xdiff = diff(x) ydiff = diff(y) - Li = [0] * size - Li_1 = [0] * (size - 1) + li = [0] * size + li_1 = [0] * (size - 1) z = [0] * (size) - Li[0] = math.sqrt(2 * xdiff[0]) - Li_1[0] = 0.0 - B0 = 0.0 - z[0] = B0 / Li[0] + li[0] = math.sqrt(2 * xdiff[0]) + li_1[0] = 0.0 + b0 = 0.0 + z[0] = b0 / li[0] for i in range(1, size - 1, 1): - Li_1[i] = xdiff[i-1] / Li[i-1] - Li[i] = math.sqrt(2 * (xdiff[i-1] + xdiff[i]) - Li_1[i-1] * Li_1[i-1]) - Bi = 6 * (ydiff[i] / xdiff[i] - ydiff[i-1] / xdiff[i-1]) - z[i] = (Bi - Li_1[i-1] * z[i-1]) / Li[i] + li_1[i] = xdiff[i-1] / li[i-1] + li[i] = math.sqrt(2 * (xdiff[i-1] + xdiff[i]) - li_1[i-1] * li_1[i-1]) + bi = 6 * (ydiff[i] / xdiff[i] - ydiff[i-1] / xdiff[i-1]) + z[i] = (bi - li_1[i-1] * z[i-1]) / li[i] i = size - 1 - Li_1[i-1] = xdiff[-1] / Li[i-1] - Li[i] = math.sqrt(2 * xdiff[-1] - Li_1[i-1] * Li_1[i-1]) - Bi = 0.0 - z[i] = (Bi - Li_1[i-1] * z[i-1]) / Li[i] + li_1[i-1] = xdiff[-1] / li[i-1] + li[i] = math.sqrt(2 * xdiff[-1] - li_1[i-1] * li_1[i-1]) + bi = 0.0 + z[i] = (bi - li_1[i-1] * z[i-1]) / li[i] i = size - 1 - z[i] = z[i] / Li[i] + z[i] = z[i] / li[i] for i in range(size - 2, -1, -1): - z[i] = (z[i] - Li_1[i-1] * z[i+1]) / Li[i] + z[i] = (z[i] - li_1[i-1] * z[i+1]) / li[i] index = searchsorted(x0, x) index = clip(index, 1, size - 1) @@ -82,8 +84,8 @@ def subtract(a, b): zi0[j] / (6 * hi1[j]) * (xi1[j] - x0[j]) ** 3 + \ zi1[j] / (6 * hi1[j]) * (x0[j] - xi0[j]) ** 3 + \ (yi1[j] / hi1[j] - zi1[j] * hi1[j] / 6) * (x0[j] - xi0[j]) + \ - (yi0[j] / hi1[j] - zi0[j] * hi1[j] / 6) * (xi1[j] - x0[j]) + (yi0[j] / hi1[j] - zi0[j] * hi1[j] / 6) * (xi1[j] - x0[j]) ,4 ) - + return f0 \ No newline at end of file diff --git a/custom_components/solcast_solar/system_health.py b/custom_components/solcast_solar/system_health.py index b84fce1f..0f7c816e 100644 --- a/custom_components/solcast_solar/system_health.py +++ b/custom_components/solcast_solar/system_health.py @@ -1,4 +1,7 @@ """Provide info to system health.""" + +# pylint: disable=C0304, E0401, W0212, W0613 + from __future__ import annotations from typing import Any From acd41b8b7152220efe9fd99d47d343d20efbf2fa Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Tue, 27 Aug 2024 00:34:19 +1000 Subject: [PATCH 22/38] Fix __init__.py banner oops --- custom_components/solcast_solar/__init__.py | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/custom_components/solcast_solar/__init__.py b/custom_components/solcast_solar/__init__.py index 8d18bee7..a330ce30 100644 --- a/custom_components/solcast_solar/__init__.py +++ b/custom_components/solcast_solar/__init__.py @@ -143,17 +143,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: _VERSION = "" # pylint: disable=C0103 integration = await loader.async_get_integration(hass, DOMAIN) _VERSION = str(integration.version) # pylint: disable=C0103 - _LOGGER.info(''' -"\n%s\n -Solcast integration version: %s\n\n" -This is a custom integration. When troubleshooting a problem, after\n" -reviewing open and closed issues, and the discussions, check the\n" -required automation is functioning correctly and try enabling debug\n" -logging to see more. Troubleshooting tips available at:\n" -https://github.com/BJReplay/ha-solcast-solar/discussions/38\n\n" -Beta versions may also have addressed some issues so look at those.\n\n" -If all else fails, then open an issue and our community will try to\n" -help: https://github.com/BJReplay/ha-solcast-solar/issues\n" + _LOGGER.info('''\n%s +Solcast integration version: %s\n +This is a custom integration. When troubleshooting a problem, after +reviewing open and closed issues, and the discussions, check the +required automation is functioning correctly and try enabling debug +logging to see more. Troubleshooting tips available at: +https://github.com/BJReplay/ha-solcast-solar/discussions/38\n +Beta versions may also have addressed some issues so look at those.\n +If all else fails, then open an issue and our community will try to +help: https://github.com/BJReplay/ha-solcast-solar/issues %s''', '-'*67, _VERSION, '-'*67) except loader.IntegrationNotFound: pass From b077a8d3c8afcb3ac2d4d5c2047fb21bfaa314ef Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Tue, 27 Aug 2024 09:09:15 +1000 Subject: [PATCH 23/38] Fix bug with data updates on fetch --- custom_components/solcast_solar/__init__.py | 4 ++-- custom_components/solcast_solar/coordinator.py | 1 - custom_components/solcast_solar/sensor.py | 4 ++-- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/custom_components/solcast_solar/__init__.py b/custom_components/solcast_solar/__init__.py index a330ce30..7aa09cec 100644 --- a/custom_components/solcast_solar/__init__.py +++ b/custom_components/solcast_solar/__init__.py @@ -180,9 +180,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: _LOGGER.info('Integration has been failed for some time, or your update automation has not been running (see readme). Retrieving forecasts.') #await solcast.sites_weather() await solcast.http_data(dopast=False) - coordinator._dataUpdated = True + coordinator._data_updated = True await coordinator.update_integration_listeners() - coordinator._dataUpdated = False + coordinator._data_updated = False except Exception as e: _LOGGER.error("Exception force fetching data on stale start: %s", e) _LOGGER.error(traceback.format_exc()) diff --git a/custom_components/solcast_solar/coordinator.py b/custom_components/solcast_solar/coordinator.py index 1c713397..7da04116 100644 --- a/custom_components/solcast_solar/coordinator.py +++ b/custom_components/solcast_solar/coordinator.py @@ -11,7 +11,6 @@ import traceback from homeassistant.core import HomeAssistant -from homeassistant.helpers.event import async_track_time_change from homeassistant.helpers.event import async_track_utc_time_change from homeassistant.helpers.update_coordinator import DataUpdateCoordinator diff --git a/custom_components/solcast_solar/sensor.py b/custom_components/solcast_solar/sensor.py index 364f671b..3e2b344f 100755 --- a/custom_components/solcast_solar/sensor.py +++ b/custom_components/solcast_solar/sensor.py @@ -368,11 +368,11 @@ def _handle_coordinator_update(self) -> None: """Handle updated data from the coordinator.""" # these sensors will pick-up the change on the next interval update (5mins) - if self.update_policy == SensorUpdatePolicy.EVERY_TIME_INTERVAL and self.coordinator._dataUpdated: + if self.update_policy == SensorUpdatePolicy.EVERY_TIME_INTERVAL and self.coordinator._data_updated: return # these sensors update when the date changed or when there is new data - if self.update_policy == SensorUpdatePolicy.DEFAULT and not (self.coordinator._dateChanged or self.coordinator._dataUpdated) : + if self.update_policy == SensorUpdatePolicy.DEFAULT and not (self.coordinator._date_changed or self.coordinator._data_updated) : return try: From a4c97e8e8d7ae158e6099cf495a39b08bc5a1a1e Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Tue, 27 Aug 2024 14:30:13 +1000 Subject: [PATCH 24/38] Fix lastday logging --- custom_components/solcast_solar/solcastapi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 3a6f13ea..20ee5215 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -1100,7 +1100,7 @@ async def http_data_call(self, r_id = None, api = None, dopast = False): try: lastday = self.get_day_start_utc() + timedelta(days=8) numhours = math.ceil((lastday - self.get_now_utc()).total_seconds() / 3600) - _LOGGER.debug('Polling API for site %s lastday %d numhours %d', r_id, lastday, numhours) + _LOGGER.debug('Polling API for site %s lastday %s numhours %d', r_id, lastday.strftime('%Y-%m-%d'), numhours) _data = [] _data2 = [] From 5a22948cdf4b868d8ea7455b268838ba7f2a8f74 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Tue, 27 Aug 2024 15:53:31 +1000 Subject: [PATCH 25/38] Update README.md --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 6829793d..e328fde9 100644 --- a/README.md +++ b/README.md @@ -550,7 +550,8 @@ series: ## Known issues -If a hard limit or dampening factors are set then the individual sites breakdown attributes will not be limited by these factors. The only way to implement this would be to have separate hard limits and dampening factors for each site, and this would become overly complex. +* Code was added that checked whether the integration had been down for a long time, and if so then on next start it would automatically poll Solcast. This feature is now broken, caused by Solcast removing the API call to get current usage. +* If a hard limit or dampening factors are set then the individual sites breakdown attributes will not be limited by these factors. The only way to implement this would be to have separate hard limits and dampening factors for each site, and this would become overly complex. ## Changes From 709d1b3f6651c9a36da2f98c40fb5913325ae826 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Tue, 27 Aug 2024 18:16:45 +1000 Subject: [PATCH 26/38] Ensure usage is reset on restart missing midnight --- custom_components/solcast_solar/solcastapi.py | 27 +++++++++++++++---- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 20ee5215..b0b820c3 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -128,6 +128,7 @@ def __init__( self._tally = {} self._api_used = {} self._api_limit = {} + self._api_used_reset = {} self._filename = options.file_path self._config_dir = dirname(self._filename) self._tz = options.tz @@ -177,12 +178,14 @@ def redact_msg_api_key(self, msg, api_key) -> str: """Obfuscate API key in messages""" return msg.replace(api_key, self.redact_api_key(api_key)) - async def write_api_usage_cache_file(self, api_key): + async def write_api_usage_cache_file(self, api_key, reset=False): """Serialise the usage cache file""" try: json_file = self.get_api_usage_cache_filename(api_key) + if reset: + self._api_used_reset[api_key] = self.get_day_start_utc() _LOGGER.debug("Writing API usage cache file: %s", self.redact_msg_api_key(json_file, api_key)) - json_content = {"daily_limit": self._api_limit[api_key], "daily_limit_consumed": self._api_used[api_key]} + json_content = {"daily_limit": self._api_limit[api_key], "daily_limit_consumed": self._api_used[api_key], "reset": self._api_used_reset[api_key]} async with aiofiles.open(json_file, 'w') as f: await f.write(json.dumps(json_content, ensure_ascii=False)) except Exception as e: @@ -201,7 +204,7 @@ async def reset_api_usage(self): """Reset the daily API usage counter""" for api_key, _ in self._api_used.items(): self._api_used[api_key] = 0 - await self.write_api_usage_cache_file(api_key) + await self.write_api_usage_cache_file(api_key, reset=True) async def sites_data(self): """Request site details""" @@ -282,6 +285,7 @@ def redact(s): i.pop('latitude', None) self._sites = self._sites + d['sites'] self._sites_loaded = True + self._api_used_reset[spl] = None else: _LOGGER.error("%s HTTP status error %s in sites_data() while gathering sites", self.options.host, translate(status)) raise Exception("HTTP sites_data error: Solcast Error gathering sites") @@ -309,6 +313,7 @@ def redact(s): i.pop('latitude', None) self._sites = self._sites + d['sites'] self._sites_loaded = True + self._api_used_reset[spl] = None _LOGGER.info("Loaded sites cache for %s", self.redact_api_key(spl)) else: error = True @@ -327,6 +332,9 @@ async def sites_usage(self): """Load api usage cache""" try: + if not self._sites_loaded: + _LOGGER.error("Internal error. Sites must be loaded before sites_usage() is called") + sp = self.options.api_key.split(",") qt = self.options.api_quota.split(",") try: @@ -349,17 +357,22 @@ async def sites_usage(self): usage = json.loads(await f.read()) self._api_limit[api_key] = usage.get("daily_limit", None) self._api_used[api_key] = usage.get("daily_limit_consumed", None) + self._api_used_reset[api_key] = usage.get("reset", None) if usage['daily_limit'] != quota[spl]: # Limit has been adjusted, so rewrite the cache self._api_limit[api_key] = quota[spl] await self.write_api_usage_cache_file(api_key) _LOGGER.info("API usage cache loaded and updated with new quota") else: _LOGGER.debug("API usage cache loaded") + if self._api_used_reset[api_key] is not None and self.get_real_now_utc() > self._api_used_reset[api_key] + timedelta(hours=24): + _LOGGER.warning("Resetting API usage for %s, last reset was more than 24-hours ago", self.redact_api_key(api_key)) + self._api_used[api_key] = 0 + await self.write_api_usage_cache_file(api_key, reset=True) else: - _LOGGER.warning("No Solcast API usage cache found, creating one and assuming zero API used") + _LOGGER.warning("No Solcast API usage cache found for %s, creating one and assuming zero API used", self.redact_api_key(api_key)) self._api_limit[api_key] = quota[spl] self._api_used[api_key] = 0 - await self.write_api_usage_cache_file(api_key) + await self.write_api_usage_cache_file(api_key, reset=True) _LOGGER.debug("API counter for %s is %d/%d", self.redact_api_key(api_key), self._api_used[api_key], self._api_limit[api_key]) except Exception as e: _LOGGER.error(f"Exception in sites_usage(): {e}: %s", traceback.format_exc()) @@ -626,6 +639,10 @@ def get_now_utc(self): """Datetime helper""" return dt.now(self._tz).replace(second=0, microsecond=0).astimezone(timezone.utc) + def get_real_now_utc(self): + """Datetime helper""" + return dt.now(self._tz).astimezone(timezone.utc) + def get_interval_start_utc(self, moment): """Datetime helper""" n = moment.replace(second=0, microsecond=0) From 759502279e96592e265544ae2af9d3f64da3ea1c Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Tue, 27 Aug 2024 20:04:49 +1000 Subject: [PATCH 27/38] Remove direct accesses of protected class members Unlike C++, Python does not prevent access to protected class members (those prefixed with an underscore), so nothing was breaking. Convention is now adhered to. --- custom_components/solcast_solar/__init__.py | 13 ++++--- .../solcast_solar/coordinator.py | 34 +++++++++++++++---- .../solcast_solar/diagnostics.py | 12 +++---- custom_components/solcast_solar/sensor.py | 30 +++++----------- custom_components/solcast_solar/solcastapi.py | 28 +++++++++++++++ 5 files changed, 76 insertions(+), 41 deletions(-) diff --git a/custom_components/solcast_solar/__init__.py b/custom_components/solcast_solar/__init__.py index 7aa09cec..cc75ea4b 100644 --- a/custom_components/solcast_solar/__init__.py +++ b/custom_components/solcast_solar/__init__.py @@ -1,6 +1,6 @@ """Support for Solcast PV forecast.""" -# pylint: disable=C0304, C0321, E0401, E1135, W0212, W0613, W0702, W0718 +# pylint: disable=C0304, C0321, E0401, E1135, W0613, W0702, W0718 import logging import traceback @@ -127,12 +127,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: try: await solcast.sites_data() - if solcast._sites_loaded: + if solcast.sites_loaded(): await solcast.sites_usage() except Exception as ex: raise ConfigEntryNotReady(f"Getting sites data failed: {ex}") from ex - if not solcast._sites_loaded: + if not solcast.sites_loaded(): raise ConfigEntryNotReady('Sites data could not be retrieved') status = await solcast.load_saved_data() @@ -180,9 +180,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: _LOGGER.info('Integration has been failed for some time, or your update automation has not been running (see readme). Retrieving forecasts.') #await solcast.sites_weather() await solcast.http_data(dopast=False) - coordinator._data_updated = True + coordinator.set_data_updated(True) await coordinator.update_integration_listeners() - coordinator._data_updated = False + coordinator.set_data_updated(False) except Exception as e: _LOGGER.error("Exception force fetching data on stale start: %s", e) _LOGGER.error(traceback.format_exc()) @@ -242,7 +242,7 @@ async def handle_service_set_dampening(call: ServiceCall): d.update({f"{i}": float(sp[i])}) opt[f"damp{i:02}"] = float(sp[i]) - solcast._damp = d + solcast.set_damp(d) hass.config_entries.async_update_entry(entry, options=opt) except intent.IntentHandleError as err: raise HomeAssistantError(f"Error processing {SERVICE_SET_DAMPENING}: {err}") from err @@ -264,7 +264,6 @@ async def handle_service_set_hard_limit(call: ServiceCall): opt = {**entry.options} opt[HARD_LIMIT] = val - # solcast._hardlimit = val hass.config_entries.async_update_entry(entry, options=opt) except ValueError as err: diff --git a/custom_components/solcast_solar/coordinator.py b/custom_components/solcast_solar/coordinator.py index 7da04116..9456f8f6 100644 --- a/custom_components/solcast_solar/coordinator.py +++ b/custom_components/solcast_solar/coordinator.py @@ -1,6 +1,6 @@ """The Solcast Solar coordinator""" -# pylint: disable=C0302, C0304, C0321, E0401, R0902, R0914, W0212, W0105, W0613, W0702, W0706, W0719 +# pylint: disable=C0302, C0304, C0321, E0401, R0902, R0914, W0105, W0613, W0702, W0706, W0719 from __future__ import annotations from datetime import datetime as dt @@ -42,13 +42,13 @@ def __init__(self, hass: HomeAssistant, solcast: SolcastApi, version: str) -> No async def _async_update_data(self): """Update data via library""" - return self.solcast._data + return self.solcast.get_data() async def setup(self) -> None: """Set up time change tracking""" d={} self._previousenergy = d - self._last_day = dt.now(self.solcast._tz).day + self._last_day = dt.now(self.solcast.get_tz()).day try: #4.0.18 - added reset usage call to reset usage sensors at UTC midnight async_track_utc_time_change(self._hass, self.update_utcmidnight_usage_sensor_data, hour=0,minute=0,second=0) @@ -60,7 +60,7 @@ async def setup(self) -> None: async def update_integration_listeners(self, *args) -> None: """Get updated sensor values""" try: - current_day = dt.now(self.solcast._tz).day + current_day = dt.now(self.solcast.get_tz()).day self._date_changed = current_day != self._last_day if self._date_changed: self._last_day = current_day @@ -108,10 +108,30 @@ async def service_query_forecast_data(self, *args) -> tuple: """Return forecast data requested by a service call""" return await self.solcast.get_forecast_list(*args) + def get_solcast_sites(self) -> dict[str, Any]: + """Return the active solcast sites""" + return self.solcast.get_sites() + + def get_previousenergy(self) -> dict[str, Any]: + """Return the prior energy dictionary""" + return self._previousenergy + def get_energy_tab_data(self) -> dict[str, Any]: """Return an energy page compatible dictionary""" return self.solcast.get_energy_data() + def get_data_updated(self) -> bool: + """Return whether all data has updated, which will trigger all sensor values to update""" + return self._data_updated + + def set_data_updated(self, updated) -> bool: + """Set whether all data has updated""" + self._data_updated = updated + + def get_date_changed(self) -> bool: + """Return whether rolled over to tomorrow, which will trigger all sensor values to update""" + return self._date_changed + def get_sensor_value(self, key="") -> (int | dt | float | Any | str | bool | None): """Return the value of a sensor""" match key: @@ -124,7 +144,7 @@ def get_sensor_value(self, key="") -> (int | dt | float | Any | str | bool | Non case "forecast_next_hour": return self.solcast.get_forecast_n_hour(1) case "forecast_custom_hours": - return self.solcast.get_forecast_custom_hours(self.solcast._customhoursensor) + return self.solcast.get_forecast_custom_hours(self.solcast.get_customhoursensor()) case "total_kwh_forecast_today": return self.solcast.get_total_kwh_forecast_day(0) case "total_kwh_forecast_tomorrow": @@ -158,7 +178,7 @@ def get_sensor_value(self, key="") -> (int | dt | float | Any | str | bool | Non case "lastupdated": return self.solcast.get_last_updated_datetime() case "hard_limit": - return False if self.solcast._hardlimit == 100 else f"{round(self.solcast._hardlimit * 1000)}w" + return False if self.solcast.get_hardlimit() == 100 else f"{round(self.solcast.get_hardlimit() * 1000)}w" # case "weather_description": # return self.solcast.get_weather() case _: @@ -172,7 +192,7 @@ def get_sensor_extra_attributes(self, key="") -> (Dict[str, Any] | None): case "forecast_next_hour": return self.solcast.get_forecasts_n_hour(1) case "forecast_custom_hours": - return self.solcast.get_forecasts_custom_hours(self.solcast._customhoursensor) + return self.solcast.get_forecasts_custom_hours(self.solcast.get_customhoursensor()) case "total_kwh_forecast_today": ret = self.solcast.get_forecast_day(0) ret = {**ret, **self.solcast.get_sites_total_kwh_forecast_day(0)} diff --git a/custom_components/solcast_solar/diagnostics.py b/custom_components/solcast_solar/diagnostics.py index 73c89a80..1c129c7b 100644 --- a/custom_components/solcast_solar/diagnostics.py +++ b/custom_components/solcast_solar/diagnostics.py @@ -1,6 +1,6 @@ """Support for the Solcast diagnostics.""" -# pylint: disable=C0304, E0401, W0212 +# pylint: disable=C0304, E0401 from __future__ import annotations @@ -25,12 +25,12 @@ async def async_get_config_entry_diagnostics( coordinator: SolcastUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] return { - "tz_conversion": coordinator.solcast._tz, + "tz_conversion": coordinator.solcast.get_tz(), "used_api_requests": coordinator.solcast.get_api_used_count(), "api_request_limit": coordinator.solcast.get_api_limit(), - "rooftop_site_count": len(coordinator.solcast._sites), - "forecast_hard_limit_set": coordinator.solcast._hardlimit < 100, + "rooftop_site_count": len(coordinator.solcast.get_sites()), + "forecast_hard_limit_set": coordinator.solcast.get_hardlimit() < 100, "data": (coordinator.data, TO_REDACT), - "energy_history_graph": coordinator._previousenergy, - "energy_forecasts_graph": coordinator.solcast._dataenergy["wh_hours"], + "energy_history_graph": coordinator.get_previousenergy(), + "energy_forecasts_graph": coordinator.solcast.get_energy_data()["wh_hours"], } \ No newline at end of file diff --git a/custom_components/solcast_solar/sensor.py b/custom_components/solcast_solar/sensor.py index 3e2b344f..9437173b 100755 --- a/custom_components/solcast_solar/sensor.py +++ b/custom_components/solcast_solar/sensor.py @@ -1,6 +1,6 @@ """Support for Solcast PV forecast sensors.""" -# pylint: disable=C0304, E0401, W0212, W0718 +# pylint: disable=C0304, E0401, W0718 from __future__ import annotations @@ -272,7 +272,7 @@ async def async_setup_entry( sen = SolcastSensor(coordinator, SENSORS[sensor_types], entry) entities.append(sen) - for site in coordinator.solcast._sites: + for site in coordinator.get_solcast_sites(): k = RooftopSensorEntityDescription( key=site["resource_id"], name=site["name"], @@ -346,9 +346,7 @@ def __init__( def extra_state_attributes(self): """Return the state extra attributes of the sensor.""" try: - return self.coordinator.get_sensor_extra_attributes( - self.entity_description.key - ) + return self.coordinator.get_sensor_extra_attributes(self.entity_description.key) except Exception as e: _LOGGER.error("Unable to get sensor value: %s: %s", e, traceback.format_exc()) return None @@ -368,17 +366,15 @@ def _handle_coordinator_update(self) -> None: """Handle updated data from the coordinator.""" # these sensors will pick-up the change on the next interval update (5mins) - if self.update_policy == SensorUpdatePolicy.EVERY_TIME_INTERVAL and self.coordinator._data_updated: + if self.update_policy == SensorUpdatePolicy.EVERY_TIME_INTERVAL and self.coordinator.get_data_updated(): return # these sensors update when the date changed or when there is new data - if self.update_policy == SensorUpdatePolicy.DEFAULT and not (self.coordinator._date_changed or self.coordinator._data_updated) : + if self.update_policy == SensorUpdatePolicy.DEFAULT and not (self.coordinator.get_date_changed() or self.coordinator.get_data_updated()) : return try: - self._sensor_data = self.coordinator.get_sensor_value( - self.entity_description.key - ) + self._sensor_data = self.coordinator.get_sensor_value(self.entity_description.key) except Exception as e: _LOGGER.error("Unable to get sensor value: %s: %s", e, traceback.format_exc()) self._sensor_data = None @@ -463,10 +459,7 @@ def unique_id(self): def extra_state_attributes(self): """Return the state extra attributes of the sensor.""" try: - return self.coordinator.get_site_sensor_extra_attributes( - self.rooftop_id, - self.key, - ) + return self.coordinator.get_site_sensor_extra_attributes(self.rooftop_id, self.key ) except Exception as e: _LOGGER.error("Unable to get sensor attributes: %s: %s", e, traceback.format_exc()) return None @@ -484,18 +477,13 @@ def should_poll(self) -> bool: async def async_added_to_hass(self) -> None: """When entity is added to hass.""" await super().async_added_to_hass() - self.async_on_remove( - self.coordinator.async_add_listener(self._handle_coordinator_update) - ) + self.async_on_remove(self.coordinator.async_add_listener(self._handle_coordinator_update)) @callback def _handle_coordinator_update(self) -> None: """Handle updated data from the coordinator.""" try: - self._sensor_data = self.coordinator.get_site_sensor_value( - self.rooftop_id, - self.key, - ) + self._sensor_data = self.coordinator.get_site_sensor_value(self.rooftop_id, self.key) except Exception as e: _LOGGER.error("Unable to get sensor value: %s: %s", e, traceback.format_exc()) self._sensor_data = None diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index b0b820c3..a0a2686e 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -149,6 +149,34 @@ def __init__( #self._weather = "" _LOGGER.debug("Configuration directory is %s", self._config_dir) + def get_tz(self) -> str: + """Return the time zone""" + return self._tz + + def sites_loaded(self) -> bool: + """Return the active sites""" + return self._sites_loaded + + def get_sites(self) -> dict[str, Any]: + """Return the active sites""" + return self._sites + + def get_data(self) -> dict[str, Any]: + """Return the data dictionary""" + return self._data + + def get_customhoursensor(self) -> int: + """Return the custom hour sensor""" + return self._customhoursensor + + def get_hardlimit(self) -> int: + """Return the hard limit""" + return self._hardlimit + + def set_damp(self, d) -> None: + """Set the dampening dictionary""" + self._damp = d + async def serialize_data(self): """Serialize data to file""" try: From 3706f76d9ddbe58c890a8b8b22acdb4466c58941 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Tue, 27 Aug 2024 21:25:20 +1000 Subject: [PATCH 28/38] Improve site failure logging --- custom_components/solcast_solar/solcastapi.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index a0a2686e..512db4e9 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -1112,10 +1112,13 @@ async def http_data(self, dopast = False): result = await self.http_data_call(site['resource_id'], site['apikey'], dopast) if not result: failure = True - if len(self._sites) > sites_attempted: - _LOGGER.warning('Forecast update for site %s failed, so not getting remaining sites', site['resource_id']) + if len(self._sites) > 1: + if sites_attempted < len(self._sites): + _LOGGER.warning('Forecast update for site %s failed so not getting remaining sites%s', site['resource_id'], ' - API use count may look odd' if len(self._sites > 2) else '') + else: + _LOGGER.warning('Forecast update for the last site queued failed (%s) so not getting remaining sites - API use count may look odd', site['resource_id']) else: - _LOGGER.warning('Forecast update for the last site queued failed (%s), so not getting remaining sites - API use count will look odd', site['resource_id']) + _LOGGER.warning('Forecast update for site %s failed', site['resource_id']) status = 'At least one site forecast get failed' break From ead0ad94a754d939eff5fece740d948aa3a71151 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Tue, 27 Aug 2024 21:39:03 +1000 Subject: [PATCH 29/38] Only 'trying cache' notification for status != 200 --- custom_components/solcast_solar/solcastapi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 512db4e9..b88b6d28 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -264,7 +264,7 @@ def redact(s): ) status = resp.status - _LOGGER.debug("HTTP session returned status %s in sites_data(), trying cache", translate(status)) + _LOGGER.debug("HTTP session returned status %s in sites_data()%s", translate(status), ', trying cache' if status != 200 else '') try: resp_json = await resp.json(content_type=None) except json.decoder.JSONDecodeError: From 3515476e2af82af6a14a1cb35644d1d42277929c Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Tue, 27 Aug 2024 22:43:40 +1000 Subject: [PATCH 30/38] Logging changes for issues and consistency --- custom_components/solcast_solar/__init__.py | 4 +- .../solcast_solar/coordinator.py | 2 +- custom_components/solcast_solar/solcastapi.py | 86 +++++++++---------- 3 files changed, 46 insertions(+), 46 deletions(-) diff --git a/custom_components/solcast_solar/__init__.py b/custom_components/solcast_solar/__init__.py index cc75ea4b..0f9f11b1 100644 --- a/custom_components/solcast_solar/__init__.py +++ b/custom_components/solcast_solar/__init__.py @@ -177,7 +177,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # If the integration has failed for some time and then is restarted retrieve forecasts if solcast.get_api_used_count() == 0 and solcast.get_last_updated_datetime() < solcast.get_day_start_utc() - timedelta(days=1): try: - _LOGGER.info('Integration has been failed for some time, or your update automation has not been running (see readme). Retrieving forecasts.') + _LOGGER.info('Integration has been failed for some time, or your update automation has not been running (see readme), so retrieving forecasts') #await solcast.sites_weather() await solcast.http_data(dopast=False) coordinator.set_data_updated(True) @@ -441,7 +441,7 @@ def upgraded(): default.append(str(usage['daily_limit'])) default = ','.join(default) except Exception as e: - _LOGGER.warning('Could not load API usage cache quota while upgrading config, using default: %s', e) + _LOGGER.warning('Could not load API usage cached limit while upgrading config, using default: %s', e) default = '10' if new.get(API_QUOTA) is None: new[API_QUOTA] = default try: diff --git a/custom_components/solcast_solar/coordinator.py b/custom_components/solcast_solar/coordinator.py index 9456f8f6..e285de1f 100644 --- a/custom_components/solcast_solar/coordinator.py +++ b/custom_components/solcast_solar/coordinator.py @@ -83,7 +83,7 @@ async def update_utcmidnight_usage_sensor_data(self, *args) -> None: async def update_midnight_spline_recalc(self, *args) -> None: """Re-calculates splines at midnight local time""" try: - _LOGGER.debug('Recalculating splines') + _LOGGER.debug("Recalculating splines") await self.solcast.spline_moments() await self.solcast.spline_remaining() except: diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index b88b6d28..040bef78 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -298,7 +298,7 @@ def redact(s): async with aiofiles.open(api_cache_filename) as f: resp_json = json.loads(await f.read()) status = 200 - _LOGGER.info("Loaded sites cache for %s", self.redact_api_key(spl)) + _LOGGER.info("Sites loaded for %s", self.redact_api_key(spl)) else: _LOGGER.error("Cached Solcast sites are not yet available for %s to cope with API call failure", self.redact_api_key(spl)) _LOGGER.error("At least one successful API 'get sites' call is needed, so the integration will not function correctly") @@ -314,13 +314,14 @@ def redact(s): self._sites = self._sites + d['sites'] self._sites_loaded = True self._api_used_reset[spl] = None + _LOGGER.info("Sites loaded for %s", self.redact_api_key(spl)) else: _LOGGER.error("%s HTTP status error %s in sites_data() while gathering sites", self.options.host, translate(status)) raise Exception("HTTP sites_data error: Solcast Error gathering sites") - except ConnectionRefusedError as err: - _LOGGER.error("Connection refused in sites_data(): %s", err) + except ConnectionRefusedError as e: + _LOGGER.error("Connection refused in sites_data(): %s", e) except ClientConnectionError as e: - _LOGGER.error('Connection error in sites_data(): %s', str(e)) + _LOGGER.error('Connection error in sites_data(): %s', e) except asyncio.TimeoutError: try: _LOGGER.warning("Retrieving Solcast sites timed out, attempting to continue") @@ -329,7 +330,7 @@ def redact(s): api_cache_filename = self.get_api_sites_cache_filename(spl) cache_exists = file_exists(api_cache_filename) if cache_exists: - _LOGGER.info("Loading cached Solcast sites for {self.redact_api_key(spl)}") + _LOGGER.info("Loading cached Solcast sites for %s", self.redact_api_key(spl)) async with aiofiles.open(api_cache_filename) as f: resp_json = json.loads(await f.read()) d = cast(dict, resp_json) @@ -342,7 +343,7 @@ def redact(s): self._sites = self._sites + d['sites'] self._sites_loaded = True self._api_used_reset[spl] = None - _LOGGER.info("Loaded sites cache for %s", self.redact_api_key(spl)) + _LOGGER.info("Sites loaded for %s", self.redact_api_key(spl)) else: error = True _LOGGER.error("Cached sites are not yet available for %s to cope with Solcast API call failure", self.redact_api_key(spl)) @@ -354,7 +355,7 @@ def redact(s): except: pass except Exception as e: - _LOGGER.error(f"Exception in sites_data(): {e}: %s", traceback.format_exc()) + _LOGGER.error("Exception in sites_data(): %s: %s", e, traceback.format_exc()) async def sites_usage(self): """Load api usage cache""" @@ -377,9 +378,8 @@ async def sites_usage(self): for spl in sp: api_key = spl.strip() - _LOGGER.debug("Getting API usage from cache for API key %s", self.redact_api_key(api_key)) api_cache_filename = self.get_api_usage_cache_filename(api_key) - _LOGGER.debug("%s", 'API usage cache ' + ('exists' if file_exists(api_cache_filename) else 'does not yet exist')) + _LOGGER.debug("%s for %s", 'Usage cache ' + ('exists' if file_exists(api_cache_filename) else 'does not yet exist'), self.redact_api_key(api_key)) if file_exists(api_cache_filename): async with aiofiles.open(api_cache_filename) as f: usage = json.loads(await f.read()) @@ -389,21 +389,21 @@ async def sites_usage(self): if usage['daily_limit'] != quota[spl]: # Limit has been adjusted, so rewrite the cache self._api_limit[api_key] = quota[spl] await self.write_api_usage_cache_file(api_key) - _LOGGER.info("API usage cache loaded and updated with new quota") + _LOGGER.info("Usage loaded and cache updated with new quota") else: - _LOGGER.debug("API usage cache loaded") + _LOGGER.info("Usage loaded for %s", self.redact_api_key(api_key)) if self._api_used_reset[api_key] is not None and self.get_real_now_utc() > self._api_used_reset[api_key] + timedelta(hours=24): - _LOGGER.warning("Resetting API usage for %s, last reset was more than 24-hours ago", self.redact_api_key(api_key)) + _LOGGER.warning("Resetting usage for %s, last reset was more than 24-hours ago", self.redact_api_key(api_key)) self._api_used[api_key] = 0 await self.write_api_usage_cache_file(api_key, reset=True) else: - _LOGGER.warning("No Solcast API usage cache found for %s, creating one and assuming zero API used", self.redact_api_key(api_key)) + _LOGGER.warning("No usage cache found for %s, creating one and assuming zero API used", self.redact_api_key(api_key)) self._api_limit[api_key] = quota[spl] self._api_used[api_key] = 0 await self.write_api_usage_cache_file(api_key, reset=True) _LOGGER.debug("API counter for %s is %d/%d", self.redact_api_key(api_key), self._api_used[api_key], self._api_limit[api_key]) except Exception as e: - _LOGGER.error(f"Exception in sites_usage(): {e}: %s", traceback.format_exc()) + _LOGGER.error("Exception in sites_usage(): %s: %s", e, traceback.format_exc()) ''' async def sites_usage(self): @@ -415,10 +415,10 @@ async def sites_usage(self): for spl in sp: api_key = spl.strip() params = {"api_key": api_key} - _LOGGER.debug(f"Getting API limit and usage from solcast for {self.redact_api_key(api_key)}") + _LOGGER.debug("Getting API limit and usage from solcast for %s", self.redact_api_key(api_key)) async with async_timeout.timeout(60): api_cache_filename = self.get_api_usage_cache_filename(api_key) - _LOGGER.debug(f"{'API usage cache ' + ('exists' if file_exists(api_cache_filename) else 'does not yet exist')}") + _LOGGER.debug("%s", 'API usage cache ' + ('exists' if file_exists(api_cache_filename) else 'does not yet exist')) retries = 3 retry = retries success = False @@ -434,7 +434,7 @@ async def sites_usage(self): except json.decoder.JSONDecodeError: _LOGGER.error("JSONDecodeError in sites_usage() - Solcast site could be having problems") except: raise - _LOGGER.debug(f"HTTP session returned status {translate(status)} in sites_usage()") + _LOGGER.debug("HTTP session returned status %s in sites_usage()", translate(status)) if status == 200: d = cast(dict, resp_json) self._api_limit[api_key] = d.get("daily_limit", None) @@ -446,12 +446,12 @@ async def sites_usage(self): if cache_exists: use_cache_immediate = True break - _LOGGER.debug(f"Will retry GetUserUsageAllowance, retry {(retries - retry) + 1}") + _LOGGER.debug("Will retry GetUserUsageAllowance, retry %d", (retries - retry) + 1) await asyncio.sleep(5) retry -= 1 if not success: if not use_cache_immediate: - _LOGGER.warning(f"Timeout getting Solcast API usage allowance, last call result: {translate(status)}, using cached data if it exists") + _LOGGER.warning("Timeout getting Solcast API usage allowance, last call result: %s, using cached data if it exists", translate(status)) status = 404 if cache_exists: async with aiofiles.open(api_cache_filename) as f: @@ -460,12 +460,12 @@ async def sites_usage(self): d = cast(dict, resp_json) self._api_limit[api_key] = d.get("daily_limit", None) self._api_used[api_key] = d.get("daily_limit_consumed", None) - _LOGGER.info(f"Loaded API usage cache") + _LOGGER.info("Loaded API usage cache") else: - _LOGGER.warning(f"No Solcast API usage cache found") + _LOGGER.warning("No Solcast API usage cache found") if status == 200: - _LOGGER.debug(f"API counter for {self.redact_api_key(api_key)} is {self._api_used[api_key]}/{self._api_limit[api_key]}") + _LOGGER.debug("API counter for %s is %d/%d", self.redact_api_key(api_key), self._api_used[api_key], self._api_limit[api_key]) else: self._api_limit[api_key] = 10 self._api_used[api_key] = 0 @@ -474,10 +474,10 @@ async def sites_usage(self): except json.decoder.JSONDecodeError: _LOGGER.error("JSONDecodeError in sites_usage(): Solcast site could be having problems") - except ConnectionRefusedError as err: - _LOGGER.error("Error in sites_usage(): %s", err) + except ConnectionRefusedError as e: + _LOGGER.error("Error in sites_usage(): %s", e) except ClientConnectionError as e: - _LOGGER.error('Connection error in sites_usage(): %s', str(e)) + _LOGGER.error('Connection error in sites_usage(): %s', e) except asyncio.TimeoutError: _LOGGER.error("Connection error in sites_usage(): Timed out connecting to solcast server") except Exception as e: @@ -494,7 +494,7 @@ async def sites_weather(self): rid = self._sites[0].get("resource_id", None) params = {"resourceId": rid, "api_key": sp[0]} - _LOGGER.debug(f"Get weather byline") + _LOGGER.debug("Get weather byline") async with async_timeout.timeout(60): resp: ClientResponse = await self.aiohttp_session.get( url=f"https://api.solcast.com.au/json/reply/GetRooftopSiteSparklines", params=params, ssl=False @@ -504,18 +504,18 @@ async def sites_weather(self): if status == 200: d = cast(dict, resp_json) - _LOGGER.debug(f"Returned data in sites_weather(): {d}") + _LOGGER.debug("Returned data in sites_weather(): %s", str(d)) self._weather = d.get("forecast_descriptor", None).get("description", None) - _LOGGER.debug(f"Weather description: {self._weather}") + _LOGGER.debug("Weather description: %s", self._weather) else: raise Exception(f"Gathering weather description failed. request returned Status code: {translate(status)} - Response: {resp_json}.") except json.decoder.JSONDecodeError: _LOGGER.error("JSONDecodeError in sites_weather(): Solcast site could be having problems") - except ConnectionRefusedError as err: - _LOGGER.error("Error in sites_weather(): %s", err) + except ConnectionRefusedError as e: + _LOGGER.error("Error in sites_weather(): %s", e) except ClientConnectionError as e: - _LOGGER.error("Connection error in sites_weather(): %s", str(e)) + _LOGGER.error("Connection error in sites_weather(): %s", e) except asyncio.TimeoutError: _LOGGER.error("Connection Error in sites_weather(): Timed out connection to solcast server") except Exception as e: @@ -532,7 +532,7 @@ async def load_saved_data(self): json_data = json.loads(await data_file.read(), cls=JSONDecoder) json_version = json_data.get("version", 1) #self._weather = json_data.get("weather", "unknown") - _LOGGER.debug("The saved data file exists, file type is %s", type(json_data)) + _LOGGER.debug("Data cache exists, file type is %s", type(json_data)) if json_version == _JSON_VERSION: self._data = json_data self._loaded_data = True @@ -562,7 +562,7 @@ async def load_saved_data(self): # Create an up to date forecast await self.buildforecastdata() - _LOGGER.info("Loaded solcast.json forecast cache") + _LOGGER.info("Data loaded") if not self._loaded_data: # No file to load @@ -1253,10 +1253,10 @@ async def http_data_call(self, r_id = None, api = None, dopast = False): self._data['siteinfo'].update({r_id:{'forecasts': copy.deepcopy(_forecasts)}}) - _LOGGER.debug("HTTP data call processing took %.4fs", round(time.time() - st_time, 4)) + _LOGGER.debug("HTTP data call processing took %.3f seconds", round(time.time() - st_time, 4)) return True - except Exception as ex: - _LOGGER.error("Exception in http_data_call(): %s", ex) + except Exception as e: + _LOGGER.error("Exception in http_data_call(): %s", e) _LOGGER.error(traceback.format_exc()) return False @@ -1350,7 +1350,7 @@ async def fetch_data(self, path="error", hours=168, site="", apikey="", cachedna if status == 429: _LOGGER.warning("Solcast is too busy, try again later") elif status == 400: - _LOGGER.warning("Status {translate(status)}: The Solcast site is likely missing capacity, please specify capacity or provide historic data for tuning") + _LOGGER.warning("Status %s: The Solcast site is likely missing capacity, please specify capacity or provide historic data for tuning", translate(status)) elif status == 404: _LOGGER.error("The Solcast site cannot be found, status %s returned", translate(status)) elif status == 200: @@ -1359,10 +1359,10 @@ async def fetch_data(self, path="error", hours=168, site="", apikey="", cachedna _LOGGER.debug("HTTP session returned: %s", str(d)) return d #await self.format_json_data(d) - except ConnectionRefusedError as err: - _LOGGER.error("Connection error in fetch_data(), connection refused: %s", err) + except ConnectionRefusedError as e: + _LOGGER.error("Connection error in fetch_data(), connection refused: %s", e) except ClientConnectionError as e: - _LOGGER.error("Connection error in fetch_data(): %s", str(e)) + _LOGGER.error("Connection error in fetch_data(): %s", e) except asyncio.TimeoutError: _LOGGER.error("Connection error in fetch_data(): Timed out connecting to Solcast API server") except: @@ -1456,11 +1456,11 @@ async def buildforecastdata(self): await self.check_data_records() - _LOGGER.debug('Calculating splines') + _LOGGER.debug("Calculating splines") await self.spline_moments() await self.spline_remaining() - _LOGGER.debug("Build forecast processing took %.4s", round(time.time()-st_time,4)) + _LOGGER.debug("Build forecast processing took %.3f seconds", round(time.time() - st_time, 4)) except: _LOGGER.error("Exception in http_data(): %s", traceback.format_exc()) @@ -1472,7 +1472,7 @@ def calc_forecast_start_index(self): for idx in range(len(self._data_forecasts)-1, -1, -1): # Search in reverse (less to iterate) if self._data_forecasts[idx]["period_start"] < midnight_utc: break - _LOGGER.debug("Calc forecast start index midnight: %s UTC, idx %s, len %s", midnight_utc.strftime('%Y-%m-%d %H:%M:%S'), idx, len(self._data_forecasts)) + _LOGGER.debug("Calc forecast start index midnight: %s UTC, idx %d, len %d", midnight_utc.strftime('%Y-%m-%d %H:%M:%S'), idx, len(self._data_forecasts)) return idx From efd10da0b36ef3fcad44e326ad3c60f0ea751e59 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Wed, 28 Aug 2024 10:54:34 +1000 Subject: [PATCH 31/38] Usage cache time updated issues fix --- custom_components/solcast_solar/solcastapi.py | 46 ++++++++++++------- 1 file changed, 29 insertions(+), 17 deletions(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 040bef78..c5a297bb 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -213,7 +213,7 @@ async def write_api_usage_cache_file(self, api_key, reset=False): if reset: self._api_used_reset[api_key] = self.get_day_start_utc() _LOGGER.debug("Writing API usage cache file: %s", self.redact_msg_api_key(json_file, api_key)) - json_content = {"daily_limit": self._api_limit[api_key], "daily_limit_consumed": self._api_used[api_key], "reset": self._api_used_reset[api_key]} + json_content = {"daily_limit": self._api_limit[api_key], "daily_limit_consumed": self._api_used[api_key], "reset": self._api_used_reset[api_key].strftime("%Y-%m-%dT%H:%M:%S+00:00")} async with aiofiles.open(json_file, 'w') as f: await f.write(json.dumps(json_content, ensure_ascii=False)) except Exception as e: @@ -380,24 +380,36 @@ async def sites_usage(self): api_key = spl.strip() api_cache_filename = self.get_api_usage_cache_filename(api_key) _LOGGER.debug("%s for %s", 'Usage cache ' + ('exists' if file_exists(api_cache_filename) else 'does not yet exist'), self.redact_api_key(api_key)) + cache = True if file_exists(api_cache_filename): async with aiofiles.open(api_cache_filename) as f: - usage = json.loads(await f.read()) - self._api_limit[api_key] = usage.get("daily_limit", None) - self._api_used[api_key] = usage.get("daily_limit_consumed", None) - self._api_used_reset[api_key] = usage.get("reset", None) - if usage['daily_limit'] != quota[spl]: # Limit has been adjusted, so rewrite the cache - self._api_limit[api_key] = quota[spl] - await self.write_api_usage_cache_file(api_key) - _LOGGER.info("Usage loaded and cache updated with new quota") - else: - _LOGGER.info("Usage loaded for %s", self.redact_api_key(api_key)) - if self._api_used_reset[api_key] is not None and self.get_real_now_utc() > self._api_used_reset[api_key] + timedelta(hours=24): - _LOGGER.warning("Resetting usage for %s, last reset was more than 24-hours ago", self.redact_api_key(api_key)) - self._api_used[api_key] = 0 - await self.write_api_usage_cache_file(api_key, reset=True) + try: + usage = json.loads(await f.read(), cls=JSONDecoder) + except: + cache = False + if cache: + self._api_limit[api_key] = usage.get("daily_limit", None) + self._api_used[api_key] = usage.get("daily_limit_consumed", None) + self._api_used_reset[api_key] = usage.get("reset", None) + try: + self._api_used_reset[api_key] = parse_datetime(self._api_used_reset[api_key]).astimezone(timezone.utc) + except: + _LOGGER.error("Internal error parsing datetime from usage cache, continuing") + _LOGGER.error(traceback.format_exc()) + if usage['daily_limit'] != quota[spl]: # Limit has been adjusted, so rewrite the cache + self._api_limit[api_key] = quota[spl] + await self.write_api_usage_cache_file(api_key) + _LOGGER.info("Usage loaded and cache updated with new quota") + else: + _LOGGER.info("Usage loaded for %s", self.redact_api_key(api_key)) + if self._api_used_reset[api_key] is not None and self.get_real_now_utc() > self._api_used_reset[api_key] + timedelta(hours=24): + _LOGGER.warning("Resetting usage for %s, last reset was more than 24-hours ago", self.redact_api_key(api_key)) + self._api_used[api_key] = 0 + await self.write_api_usage_cache_file(api_key, reset=True) else: - _LOGGER.warning("No usage cache found for %s, creating one and assuming zero API used", self.redact_api_key(api_key)) + cache = False + if not cache: + _LOGGER.warning("No usage cache found (or corrupt) for %s, creating one and assuming zero API used", self.redact_api_key(api_key)) self._api_limit[api_key] = quota[spl] self._api_used[api_key] = 0 await self.write_api_usage_cache_file(api_key, reset=True) @@ -1114,7 +1126,7 @@ async def http_data(self, dopast = False): failure = True if len(self._sites) > 1: if sites_attempted < len(self._sites): - _LOGGER.warning('Forecast update for site %s failed so not getting remaining sites%s', site['resource_id'], ' - API use count may look odd' if len(self._sites > 2) else '') + _LOGGER.warning('Forecast update for site %s failed so not getting remaining sites%s', site['resource_id'], ' - API use count may look odd' if len(self._sites) > 2 else '') else: _LOGGER.warning('Forecast update for the last site queued failed (%s) so not getting remaining sites - API use count may look odd', site['resource_id']) else: From 3b68733de1651cb1f132fed17494216b7a987c31 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Wed, 28 Aug 2024 18:14:07 +1000 Subject: [PATCH 32/38] Just better: sites_usage() exception handling --- custom_components/solcast_solar/solcastapi.py | 31 ++++++++++++------- 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index c5a297bb..5fdb4090 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -363,6 +363,7 @@ async def sites_usage(self): try: if not self._sites_loaded: _LOGGER.error("Internal error. Sites must be loaded before sites_usage() is called") + return sp = self.options.api_key.split(",") qt = self.options.api_quota.split(",") @@ -373,7 +374,7 @@ async def sites_usage(self): quota = { sp[i].strip(): int(qt[i].strip()) for i in range(len(qt)) } except Exception as e: _LOGGER.error("Exception: %s", e) - _LOGGER.warning("Could not interpret API quota configuration string, using default of 10") + _LOGGER.warning("Could not interpret API limit configuration string (%s), using default of 10", self.options.api_quota) quota = {s: 10 for s in sp} for spl in sp: @@ -385,21 +386,27 @@ async def sites_usage(self): async with aiofiles.open(api_cache_filename) as f: try: usage = json.loads(await f.read(), cls=JSONDecoder) - except: + except json.decoder.JSONDecodeError: + _LOGGER.error("The usage cache for %s is corrupt, re-creating cache with zero usage", self.redact_api_key(api_key)) + cache = False + except Exception as e: + _LOGGER.error("Load usage cache exception %s for %s, re-creating cache with zero usage", e, self.redact_api_key(api_key)) cache = False if cache: self._api_limit[api_key] = usage.get("daily_limit", None) self._api_used[api_key] = usage.get("daily_limit_consumed", None) self._api_used_reset[api_key] = usage.get("reset", None) - try: - self._api_used_reset[api_key] = parse_datetime(self._api_used_reset[api_key]).astimezone(timezone.utc) - except: - _LOGGER.error("Internal error parsing datetime from usage cache, continuing") - _LOGGER.error(traceback.format_exc()) - if usage['daily_limit'] != quota[spl]: # Limit has been adjusted, so rewrite the cache - self._api_limit[api_key] = quota[spl] + if self._api_used_reset[api_key] is not None: + try: + self._api_used_reset[api_key] = parse_datetime(self._api_used_reset[api_key]).astimezone(timezone.utc) + except: + _LOGGER.error("Internal error parsing datetime from usage cache, continuing") + _LOGGER.error(traceback.format_exc()) + self._api_used_reset[api_key] = None + if usage['daily_limit'] != quota[api_key]: # Limit has been adjusted, so rewrite the cache + self._api_limit[api_key] = quota[api_key] await self.write_api_usage_cache_file(api_key) - _LOGGER.info("Usage loaded and cache updated with new quota") + _LOGGER.info("Usage loaded and cache updated with new limit") else: _LOGGER.info("Usage loaded for %s", self.redact_api_key(api_key)) if self._api_used_reset[api_key] is not None and self.get_real_now_utc() > self._api_used_reset[api_key] + timedelta(hours=24): @@ -409,8 +416,8 @@ async def sites_usage(self): else: cache = False if not cache: - _LOGGER.warning("No usage cache found (or corrupt) for %s, creating one and assuming zero API used", self.redact_api_key(api_key)) - self._api_limit[api_key] = quota[spl] + _LOGGER.warning("Creating usage cache for %s, assuming zero API used", self.redact_api_key(api_key)) + self._api_limit[api_key] = quota[api_key] self._api_used[api_key] = 0 await self.write_api_usage_cache_file(api_key, reset=True) _LOGGER.debug("API counter for %s is %d/%d", self.redact_api_key(api_key), self._api_used[api_key], self._api_limit[api_key]) From e31009961357e37c90c8aaa9d98f1b95473eb555 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Wed, 28 Aug 2024 19:19:05 +1000 Subject: [PATCH 33/38] Serialise data more carefully --- custom_components/solcast_solar/solcastapi.py | 37 ++++++++++++++----- 1 file changed, 27 insertions(+), 10 deletions(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 5fdb4090..0959346c 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -179,6 +179,7 @@ def set_damp(self, d) -> None: async def serialize_data(self): """Serialize data to file""" + serialise = True try: if not self._loaded_data: _LOGGER.debug("Not saving forecast cache in serialize_data() as no data has been loaded yet") @@ -190,13 +191,19 @@ async def serialize_data(self): _LOGGER.error("Internal error: Solcast forecast cache date has not been set, not saving data") return - async with self._serialize_lock: - async with aiofiles.open(self._filename, "w") as f: - await f.write(json.dumps(self._data, ensure_ascii=False, cls=DateTimeEncoder)) - _LOGGER.debug("Saved forecast cache") + payload = json.dumps(self._data, ensure_ascii=False, cls=DateTimeEncoder) except Exception as e: _LOGGER.error("Exception in serialize_data(): %s", e) _LOGGER.error(traceback.format_exc()) + serialise = False + if serialise: + try: + async with self._serialize_lock: + async with aiofiles.open(self._filename, 'w') as f: + await f.write(payload) + _LOGGER.debug("Saved forecast cache") + except Exception as e: + _LOGGER.error("Exception writing forecast data: %s", e) def redact_api_key(self, api_key) -> str: """Obfuscate API key""" @@ -208,17 +215,25 @@ def redact_msg_api_key(self, msg, api_key) -> str: async def write_api_usage_cache_file(self, api_key, reset=False): """Serialise the usage cache file""" + serialise = True try: json_file = self.get_api_usage_cache_filename(api_key) if reset: self._api_used_reset[api_key] = self.get_day_start_utc() _LOGGER.debug("Writing API usage cache file: %s", self.redact_msg_api_key(json_file, api_key)) json_content = {"daily_limit": self._api_limit[api_key], "daily_limit_consumed": self._api_used[api_key], "reset": self._api_used_reset[api_key].strftime("%Y-%m-%dT%H:%M:%S+00:00")} - async with aiofiles.open(json_file, 'w') as f: - await f.write(json.dumps(json_content, ensure_ascii=False)) + payload = json.dumps(json_content, ensure_ascii=False) except Exception as e: _LOGGER.error("Exception in write_api_usage_cache_file(): %s", e) _LOGGER.error(traceback.format_exc()) + serialise = False + if serialise: + try: + async with self._serialize_lock: + async with aiofiles.open(json_file, 'w') as f: + await f.write(payload) + except Exception as e: + _LOGGER.error("Exception writing usage cache for %s: %s", self.redact_msg_api_key(json_file, api_key), e) def get_api_usage_cache_filename(self, entry_name): """Build a fully qualified API usage cache filename using a simple name or separate files for more than one API key""" @@ -275,8 +290,9 @@ def redact(s): if status == 200: if resp_json['total_records'] > 0: _LOGGER.debug("Writing sites cache") - async with aiofiles.open(api_cache_filename, 'w') as f: - await f.write(json.dumps(resp_json, ensure_ascii=False)) + async with self._serialize_lock: + async with aiofiles.open(api_cache_filename, 'w') as f: + await f.write(json.dumps(resp_json, ensure_ascii=False)) success = True break else: @@ -1346,8 +1362,9 @@ async def fetch_data(self, path="error", hours=168, site="", apikey="", cachedna resp_json = await resp.json(content_type=None) if self.api_cache_enabled: - async with aiofiles.open(api_cache_filename, 'w') as f: - await f.write(json.dumps(resp_json, ensure_ascii=False)) + async with self._serialize_lock: + async with aiofiles.open(api_cache_filename, 'w') as f: + await f.write(json.dumps(resp_json, ensure_ascii=False)) elif status == 998: # Exceeded API limit _LOGGER.error("API allowed polling limit has been exceeded, API counter set to %d/%d", self._api_used[apikey], self._api_limit[apikey]) return None From b74d05d94192d55fe800194a5ee268aac4d700f4 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Wed, 28 Aug 2024 19:40:24 +1000 Subject: [PATCH 34/38] Utilise DateTimeEncoder for serialise usage --- custom_components/solcast_solar/solcastapi.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 0959346c..04accef3 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -221,8 +221,8 @@ async def write_api_usage_cache_file(self, api_key, reset=False): if reset: self._api_used_reset[api_key] = self.get_day_start_utc() _LOGGER.debug("Writing API usage cache file: %s", self.redact_msg_api_key(json_file, api_key)) - json_content = {"daily_limit": self._api_limit[api_key], "daily_limit_consumed": self._api_used[api_key], "reset": self._api_used_reset[api_key].strftime("%Y-%m-%dT%H:%M:%S+00:00")} - payload = json.dumps(json_content, ensure_ascii=False) + json_content = {"daily_limit": self._api_limit[api_key], "daily_limit_consumed": self._api_used[api_key], "reset": self._api_used_reset[api_key]} + payload = json.dumps(json_content, ensure_ascii=False, cls=DateTimeEncoder) except Exception as e: _LOGGER.error("Exception in write_api_usage_cache_file(): %s", e) _LOGGER.error(traceback.format_exc()) From 9402c43cd1825216c6c4071ace2817d1f2ed41f4 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Thu, 29 Aug 2024 21:19:07 +1000 Subject: [PATCH 35/38] Code clean up, consistency --- custom_components/solcast_solar/__init__.py | 6 +- .../solcast_solar/coordinator.py | 12 +- .../solcast_solar/diagnostics.py | 6 +- custom_components/solcast_solar/sensor.py | 2 +- custom_components/solcast_solar/solcastapi.py | 380 ++++++++---------- .../solcast_solar/system_health.py | 2 +- custom_components/solcast_solar/test.py | 29 +- 7 files changed, 207 insertions(+), 230 deletions(-) diff --git a/custom_components/solcast_solar/__init__.py b/custom_components/solcast_solar/__init__.py index 0f9f11b1..d7a3e1a9 100644 --- a/custom_components/solcast_solar/__init__.py +++ b/custom_components/solcast_solar/__init__.py @@ -127,12 +127,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: try: await solcast.sites_data() - if solcast.sites_loaded(): + if solcast.sites_loaded: await solcast.sites_usage() except Exception as ex: raise ConfigEntryNotReady(f"Getting sites data failed: {ex}") from ex - if not solcast.sites_loaded(): + if not solcast.sites_loaded: raise ConfigEntryNotReady('Sites data could not be retrieved') status = await solcast.load_saved_data() @@ -242,7 +242,7 @@ async def handle_service_set_dampening(call: ServiceCall): d.update({f"{i}": float(sp[i])}) opt[f"damp{i:02}"] = float(sp[i]) - solcast.set_damp(d) + solcast.damp = d hass.config_entries.async_update_entry(entry, options=opt) except intent.IntentHandleError as err: raise HomeAssistantError(f"Error processing {SERVICE_SET_DAMPENING}: {err}") from err diff --git a/custom_components/solcast_solar/coordinator.py b/custom_components/solcast_solar/coordinator.py index e285de1f..aed0a8bc 100644 --- a/custom_components/solcast_solar/coordinator.py +++ b/custom_components/solcast_solar/coordinator.py @@ -48,7 +48,7 @@ async def setup(self) -> None: """Set up time change tracking""" d={} self._previousenergy = d - self._last_day = dt.now(self.solcast.get_tz()).day + self._last_day = dt.now(self.solcast.options.tz).day try: #4.0.18 - added reset usage call to reset usage sensors at UTC midnight async_track_utc_time_change(self._hass, self.update_utcmidnight_usage_sensor_data, hour=0,minute=0,second=0) @@ -60,7 +60,7 @@ async def setup(self) -> None: async def update_integration_listeners(self, *args) -> None: """Get updated sensor values""" try: - current_day = dt.now(self.solcast.get_tz()).day + current_day = dt.now(self.solcast.options.tz).day self._date_changed = current_day != self._last_day if self._date_changed: self._last_day = current_day @@ -110,7 +110,7 @@ async def service_query_forecast_data(self, *args) -> tuple: def get_solcast_sites(self) -> dict[str, Any]: """Return the active solcast sites""" - return self.solcast.get_sites() + return self.solcast.sites def get_previousenergy(self) -> dict[str, Any]: """Return the prior energy dictionary""" @@ -144,7 +144,7 @@ def get_sensor_value(self, key="") -> (int | dt | float | Any | str | bool | Non case "forecast_next_hour": return self.solcast.get_forecast_n_hour(1) case "forecast_custom_hours": - return self.solcast.get_forecast_custom_hours(self.solcast.get_customhoursensor()) + return self.solcast.get_forecast_custom_hours(self.solcast.custom_hour_sensor) case "total_kwh_forecast_today": return self.solcast.get_total_kwh_forecast_day(0) case "total_kwh_forecast_tomorrow": @@ -178,7 +178,7 @@ def get_sensor_value(self, key="") -> (int | dt | float | Any | str | bool | Non case "lastupdated": return self.solcast.get_last_updated_datetime() case "hard_limit": - return False if self.solcast.get_hardlimit() == 100 else f"{round(self.solcast.get_hardlimit() * 1000)}w" + return False if self.solcast.hard_limit == 100 else f"{round(self.solcast.hard_limit * 1000)}w" # case "weather_description": # return self.solcast.get_weather() case _: @@ -192,7 +192,7 @@ def get_sensor_extra_attributes(self, key="") -> (Dict[str, Any] | None): case "forecast_next_hour": return self.solcast.get_forecasts_n_hour(1) case "forecast_custom_hours": - return self.solcast.get_forecasts_custom_hours(self.solcast.get_customhoursensor()) + return self.solcast.get_forecasts_custom_hours(self.solcast.custom_hour_sensor) case "total_kwh_forecast_today": ret = self.solcast.get_forecast_day(0) ret = {**ret, **self.solcast.get_sites_total_kwh_forecast_day(0)} diff --git a/custom_components/solcast_solar/diagnostics.py b/custom_components/solcast_solar/diagnostics.py index 1c129c7b..3e11f35d 100644 --- a/custom_components/solcast_solar/diagnostics.py +++ b/custom_components/solcast_solar/diagnostics.py @@ -25,11 +25,11 @@ async def async_get_config_entry_diagnostics( coordinator: SolcastUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] return { - "tz_conversion": coordinator.solcast.get_tz(), + "tz_conversion": coordinator.solcast.options.tz, "used_api_requests": coordinator.solcast.get_api_used_count(), "api_request_limit": coordinator.solcast.get_api_limit(), - "rooftop_site_count": len(coordinator.solcast.get_sites()), - "forecast_hard_limit_set": coordinator.solcast.get_hardlimit() < 100, + "rooftop_site_count": len(coordinator.solcast.sites), + "forecast_hard_limit_set": coordinator.solcast.hard_limit < 100, "data": (coordinator.data, TO_REDACT), "energy_history_graph": coordinator.get_previousenergy(), "energy_forecasts_graph": coordinator.solcast.get_energy_data()["wh_hours"], diff --git a/custom_components/solcast_solar/sensor.py b/custom_components/solcast_solar/sensor.py index 9437173b..2d717b2a 100755 --- a/custom_components/solcast_solar/sensor.py +++ b/custom_components/solcast_solar/sensor.py @@ -310,7 +310,7 @@ def __init__( #doesnt work :() if entity_description.key == "forecast_custom_hours": - self._attr_translation_placeholders = {"forecast_custom_hours": f"{coordinator.solcast._customhoursensor}"} + self._attr_translation_placeholders = {"forecast_custom_hours": f"{coordinator.solcast.custom_hour_sensor}"} self.entity_description = entity_description self.coordinator = coordinator diff --git a/custom_components/solcast_solar/solcastapi.py b/custom_components/solcast_solar/solcastapi.py index 04accef3..2dee6351 100644 --- a/custom_components/solcast_solar/solcastapi.py +++ b/custom_components/solcast_solar/solcastapi.py @@ -34,7 +34,7 @@ # For current func name, specify 0 or no argument # For name of caller of current func, specify 1 # For name of caller of caller of current func, specify 2, etc. -currentFuncName = lambda n=0: sys._getframe(n + 1).f_code.co_name +currentFuncName = lambda n=0: sys._getframe(n + 1).f_code.co_name # pylint: disable=C3001, W0212 _SENSOR_DEBUG_LOGGING = False _FORECAST_DEBUG_LOGGING = False @@ -57,7 +57,7 @@ def __init__(self, *args, **kwargs) -> None: json.JSONDecoder.__init__( self, object_hook=self.object_hook, *args, **kwargs) - def object_hook(self, obj) -> dict: + def object_hook(self, obj) -> dict: # pylint: disable=E0202 """Hook""" ret = {} for key, value in obj.items(): @@ -98,7 +98,7 @@ class ConnectionOptions: file_path: str tz: timezone dampening: dict - customhoursensor: int + custom_hour_sensor: int key_estimate: str hard_limit: int attr_brk_estimate: bool @@ -119,11 +119,17 @@ def __init__( api_cache_enabled: bool = False ): """Device init""" - self.aiohttp_session = aiohttp_session + + # Public vars self.options = options - self.api_cache_enabled = api_cache_enabled - self._sites_loaded = False - self._sites = [] + self.hard_limit = options.hard_limit + self.custom_hour_sensor = options.custom_hour_sensor + self.damp = options.dampening + self.sites = [] + self.sites_loaded = False + + # Protected vars + self._aiohttp_session = aiohttp_session self._data = {'siteinfo': {}, 'last_updated': dt.fromtimestamp(0, timezone.utc).isoformat()} self._tally = {} self._api_used = {} @@ -132,50 +138,41 @@ def __init__( self._filename = options.file_path self._config_dir = dirname(self._filename) self._tz = options.tz - self._dataenergy = {} + self._data_energy = {} self._data_forecasts = [] self._site_data_forecasts = {} + self._spline_period = list(range(0, 90000, 1800)) + self._forecasts_moment = {} + self._forecasts_remaining = {} self._forecasts_start_idx = 0 self._loaded_data = False self._serialize_lock = asyncio.Lock() - self._damp = options.dampening - self._customhoursensor = options.customhoursensor self._use_data_field = f"pv_{options.key_estimate}" - self._hardlimit = options.hard_limit - self._estimen = {'pv_estimate': options.attr_brk_estimate, 'pv_estimate10': options.attr_brk_estimate10, 'pv_estimate90': options.attr_brk_estimate90} - self._spline_period = list(range(0, 90000, 1800)) - self.fc_moment = {} - self.fc_remaining = {} + self._estimate_set = {'pv_estimate': options.attr_brk_estimate, 'pv_estimate10': options.attr_brk_estimate10, 'pv_estimate90': options.attr_brk_estimate90} #self._weather = "" - _LOGGER.debug("Configuration directory is %s", self._config_dir) + self._api_cache_enabled = api_cache_enabled # For offline development - def get_tz(self) -> str: - """Return the time zone""" - return self._tz - - def sites_loaded(self) -> bool: - """Return the active sites""" - return self._sites_loaded - - def get_sites(self) -> dict[str, Any]: - """Return the active sites""" - return self._sites + _LOGGER.debug("Configuration directory is %s", self._config_dir) def get_data(self) -> dict[str, Any]: """Return the data dictionary""" return self._data - def get_customhoursensor(self) -> int: - """Return the custom hour sensor""" - return self._customhoursensor + def redact_api_key(self, api_key) -> str: + """Obfuscate API key""" + return '*'*6 + api_key[-6:] + + def redact_msg_api_key(self, msg, api_key) -> str: + """Obfuscate API key in messages""" + return msg.replace(api_key, self.redact_api_key(api_key)) - def get_hardlimit(self) -> int: - """Return the hard limit""" - return self._hardlimit + def get_usage_cache_filename(self, entry_name): + """Build a fully qualified API usage cache filename using a simple name or separate files for more than one API key""" + return '%s/solcast-usage%s.json' % (self._config_dir, "" if len(self.options.api_key.split(",")) < 2 else "-" + entry_name) # pylint: disable=C0209 - def set_damp(self, d) -> None: - """Set the dampening dictionary""" - self._damp = d + def get_sites_cache_filename(self, entry_name): + """Build a fully qualified site details cache filename using a simple name or separate files for more than one API key""" + return '%s/solcast-sites%s.json' % (self._config_dir, "" if len(self.options.api_key.split(",")) < 2 else "-" + entry_name) # pylint: disable=C0209 async def serialize_data(self): """Serialize data to file""" @@ -184,13 +181,11 @@ async def serialize_data(self): if not self._loaded_data: _LOGGER.debug("Not saving forecast cache in serialize_data() as no data has been loaded yet") return - # If the _loaded_data flag is True, yet last_updated is 1/1/1970 then data has not been - # loaded properly for some reason, or no forecast has been received since startup. - # Abort the save. + # If the _loaded_data flag is True, yet last_updated is 1/1/1970 then data has not been loaded + # properly for some reason, or no forecast has been received since startup then abort the save if self._data['last_updated'] == dt.fromtimestamp(0, timezone.utc).isoformat(): _LOGGER.error("Internal error: Solcast forecast cache date has not been set, not saving data") return - payload = json.dumps(self._data, ensure_ascii=False, cls=DateTimeEncoder) except Exception as e: _LOGGER.error("Exception in serialize_data(): %s", e) @@ -205,26 +200,18 @@ async def serialize_data(self): except Exception as e: _LOGGER.error("Exception writing forecast data: %s", e) - def redact_api_key(self, api_key) -> str: - """Obfuscate API key""" - return '*'*6 + api_key[-6:] - - def redact_msg_api_key(self, msg, api_key) -> str: - """Obfuscate API key in messages""" - return msg.replace(api_key, self.redact_api_key(api_key)) - - async def write_api_usage_cache_file(self, api_key, reset=False): + async def serialise_usage(self, api_key, reset=False): """Serialise the usage cache file""" serialise = True try: - json_file = self.get_api_usage_cache_filename(api_key) + json_file = self.get_usage_cache_filename(api_key) if reset: self._api_used_reset[api_key] = self.get_day_start_utc() _LOGGER.debug("Writing API usage cache file: %s", self.redact_msg_api_key(json_file, api_key)) json_content = {"daily_limit": self._api_limit[api_key], "daily_limit_consumed": self._api_used[api_key], "reset": self._api_used_reset[api_key]} payload = json.dumps(json_content, ensure_ascii=False, cls=DateTimeEncoder) except Exception as e: - _LOGGER.error("Exception in write_api_usage_cache_file(): %s", e) + _LOGGER.error("Exception in serialise_usage(): %s", e) _LOGGER.error(traceback.format_exc()) serialise = False if serialise: @@ -235,48 +222,34 @@ async def write_api_usage_cache_file(self, api_key, reset=False): except Exception as e: _LOGGER.error("Exception writing usage cache for %s: %s", self.redact_msg_api_key(json_file, api_key), e) - def get_api_usage_cache_filename(self, entry_name): - """Build a fully qualified API usage cache filename using a simple name or separate files for more than one API key""" - return '%s/solcast-usage%s.json' % (self._config_dir, "" if len(self.options.api_key.split(",")) < 2 else "-" + entry_name) # pylint: disable=C0209 - - def get_api_sites_cache_filename(self, entry_name): - """Build a fully qualified site details cache filename using a simple name or separate files for more than one API key""" - return '%s/solcast-sites%s.json' % (self._config_dir, "" if len(self.options.api_key.split(",")) < 2 else "-" + entry_name) # pylint: disable=C0209 - - async def reset_api_usage(self): - """Reset the daily API usage counter""" - for api_key, _ in self._api_used.items(): - self._api_used[api_key] = 0 - await self.write_api_usage_cache_file(api_key, reset=True) - async def sites_data(self): """Request site details""" try: - def redact(s): + def redact_lat_lon(s): return re.sub(r'itude\': [0-9\-\.]+', 'itude\': **.******', s) sp = self.options.api_key.split(",") for spl in sp: - params = {"format": "json", "api_key": spl.strip()} + api_key = spl.strip() async with async_timeout.timeout(60): - api_cache_filename = self.get_api_sites_cache_filename(spl) - _LOGGER.debug("%s", 'Sites cache ' + ('exists' if file_exists(api_cache_filename) else 'does not yet exist')) - if self.api_cache_enabled and file_exists(api_cache_filename): + cache_filename = self.get_sites_cache_filename(api_key) + _LOGGER.debug("%s", 'Sites cache ' + ('exists' if file_exists(cache_filename) else 'does not yet exist')) + if self._api_cache_enabled and file_exists(cache_filename): _LOGGER.debug("Loading cached sites data") status = 404 - async with aiofiles.open(api_cache_filename) as f: + async with aiofiles.open(cache_filename) as f: resp_json = json.loads(await f.read()) status = 200 else: - _LOGGER.debug("Connecting to %s/rooftop_sites?format=json&api_key=%s", self.options.host, self.redact_api_key(spl)) + url = f"{self.options.host}/rooftop_sites" + params = {"format": "json", "api_key": api_key} + _LOGGER.debug("Connecting to %s?format=json&api_key=%s", url, self.redact_api_key(api_key)) retries = 3 retry = retries success = False use_cache_immediate = False - cache_exists = file_exists(api_cache_filename) + cache_exists = file_exists(cache_filename) while retry >= 0: - resp: ClientResponse = await self.aiohttp_session.get( - url=f"{self.options.host}/rooftop_sites", params=params, ssl=False - ) + resp: ClientResponse = await self._aiohttp_session.get(url=url, params=params, ssl=False) status = resp.status _LOGGER.debug("HTTP session returned status %s in sites_data()%s", translate(status), ', trying cache' if status != 200 else '') @@ -291,12 +264,12 @@ def redact(s): if resp_json['total_records'] > 0: _LOGGER.debug("Writing sites cache") async with self._serialize_lock: - async with aiofiles.open(api_cache_filename, 'w') as f: + async with aiofiles.open(cache_filename, 'w') as f: await f.write(json.dumps(resp_json, ensure_ascii=False)) success = True break else: - _LOGGER.error('No sites for the API key %s are configured at solcast.com', self.redact_api_key(spl)) + _LOGGER.error('No sites for the API key %s are configured at solcast.com', self.redact_api_key(api_key)) return else: if cache_exists: @@ -311,26 +284,26 @@ def redact(s): _LOGGER.warning("Retries exhausted gathering Solcast sites, last call result: %s, using cached data if it exists", translate(status)) status = 404 if cache_exists: - async with aiofiles.open(api_cache_filename) as f: + async with aiofiles.open(cache_filename) as f: resp_json = json.loads(await f.read()) status = 200 - _LOGGER.info("Sites loaded for %s", self.redact_api_key(spl)) + _LOGGER.info("Sites loaded for %s", self.redact_api_key(api_key)) else: - _LOGGER.error("Cached Solcast sites are not yet available for %s to cope with API call failure", self.redact_api_key(spl)) + _LOGGER.error("Cached Solcast sites are not yet available for %s to cope with API call failure", self.redact_api_key(api_key)) _LOGGER.error("At least one successful API 'get sites' call is needed, so the integration will not function correctly") if status == 200: d = cast(dict, resp_json) - _LOGGER.debug("Sites data: %s", redact(str(d))) + _LOGGER.debug("Sites data: %s", redact_lat_lon(str(d))) for i in d['sites']: - i['apikey'] = spl.strip() + i['apikey'] = api_key #v4.0.14 to stop HA adding a pin to the map i.pop('longitude', None) i.pop('latitude', None) - self._sites = self._sites + d['sites'] - self._sites_loaded = True - self._api_used_reset[spl] = None - _LOGGER.info("Sites loaded for %s", self.redact_api_key(spl)) + self.sites = self.sites + d['sites'] + self.sites_loaded = True + self._api_used_reset[api_key] = None + _LOGGER.info("Sites loaded for %s", self.redact_api_key(api_key)) else: _LOGGER.error("%s HTTP status error %s in sites_data() while gathering sites", self.options.host, translate(status)) raise Exception("HTTP sites_data error: Solcast Error gathering sites") @@ -343,31 +316,30 @@ def redact(s): _LOGGER.warning("Retrieving Solcast sites timed out, attempting to continue") error = False for spl in sp: - api_cache_filename = self.get_api_sites_cache_filename(spl) - cache_exists = file_exists(api_cache_filename) + api_key = spl.strip() + cache_filename = self.get_sites_cache_filename(api_key) + cache_exists = file_exists(cache_filename) if cache_exists: - _LOGGER.info("Loading cached Solcast sites for %s", self.redact_api_key(spl)) - async with aiofiles.open(api_cache_filename) as f: + _LOGGER.info("Loading cached Solcast sites for %s", self.redact_api_key(api_key)) + async with aiofiles.open(cache_filename) as f: resp_json = json.loads(await f.read()) d = cast(dict, resp_json) - _LOGGER.debug("Sites data: %s", redact(str(d))) + _LOGGER.debug("Sites data: %s", redact_lat_lon(str(d))) for i in d['sites']: - i['apikey'] = spl.strip() + i['apikey'] = api_key #v4.0.14 to stop HA adding a pin to the map i.pop('longitude', None) i.pop('latitude', None) - self._sites = self._sites + d['sites'] - self._sites_loaded = True - self._api_used_reset[spl] = None - _LOGGER.info("Sites loaded for %s", self.redact_api_key(spl)) + self.sites = self.sites + d['sites'] + self.sites_loaded = True + self._api_used_reset[api_key] = None + _LOGGER.info("Sites loaded for %s", self.redact_api_key(api_key)) else: error = True - _LOGGER.error("Cached sites are not yet available for %s to cope with Solcast API call failure", self.redact_api_key(spl)) - _LOGGER.error("At least one successful API 'get sites' call is needed, so the integration cannot function yet") + _LOGGER.error("Cached Solcast sites are not yet available for %s to cope with API call failure", self.redact_api_key(api_key)) + _LOGGER.error("At least one successful API 'get sites' call is needed, so the integration will not function correctly") if error: - _LOGGER.error("Timed out getting Solcast sites, and one or more site caches failed to load") - _LOGGER.error("This is critical, and the integration cannot function reliably yet") - _LOGGER.error("Suggestion: Double check your overall HA configuration, specifically networking related") + _LOGGER.error("Suggestion: Check your overall HA configuration, specifically networking related (Is IPV6 an issue for you? DNS? Proxy?)") except: pass except Exception as e: @@ -377,7 +349,7 @@ async def sites_usage(self): """Load api usage cache""" try: - if not self._sites_loaded: + if not self.sites_loaded: _LOGGER.error("Internal error. Sites must be loaded before sites_usage() is called") return @@ -395,11 +367,11 @@ async def sites_usage(self): for spl in sp: api_key = spl.strip() - api_cache_filename = self.get_api_usage_cache_filename(api_key) - _LOGGER.debug("%s for %s", 'Usage cache ' + ('exists' if file_exists(api_cache_filename) else 'does not yet exist'), self.redact_api_key(api_key)) + cache_filename = self.get_usage_cache_filename(api_key) + _LOGGER.debug("%s for %s", 'Usage cache ' + ('exists' if file_exists(cache_filename) else 'does not yet exist'), self.redact_api_key(api_key)) cache = True - if file_exists(api_cache_filename): - async with aiofiles.open(api_cache_filename) as f: + if file_exists(cache_filename): + async with aiofiles.open(cache_filename) as f: try: usage = json.loads(await f.read(), cls=JSONDecoder) except json.decoder.JSONDecodeError: @@ -421,25 +393,31 @@ async def sites_usage(self): self._api_used_reset[api_key] = None if usage['daily_limit'] != quota[api_key]: # Limit has been adjusted, so rewrite the cache self._api_limit[api_key] = quota[api_key] - await self.write_api_usage_cache_file(api_key) + await self.serialise_usage(api_key) _LOGGER.info("Usage loaded and cache updated with new limit") else: _LOGGER.info("Usage loaded for %s", self.redact_api_key(api_key)) if self._api_used_reset[api_key] is not None and self.get_real_now_utc() > self._api_used_reset[api_key] + timedelta(hours=24): _LOGGER.warning("Resetting usage for %s, last reset was more than 24-hours ago", self.redact_api_key(api_key)) self._api_used[api_key] = 0 - await self.write_api_usage_cache_file(api_key, reset=True) + await self.serialise_usage(api_key, reset=True) else: cache = False if not cache: _LOGGER.warning("Creating usage cache for %s, assuming zero API used", self.redact_api_key(api_key)) self._api_limit[api_key] = quota[api_key] self._api_used[api_key] = 0 - await self.write_api_usage_cache_file(api_key, reset=True) + await self.serialise_usage(api_key, reset=True) _LOGGER.debug("API counter for %s is %d/%d", self.redact_api_key(api_key), self._api_used[api_key], self._api_limit[api_key]) except Exception as e: _LOGGER.error("Exception in sites_usage(): %s: %s", e, traceback.format_exc()) + async def reset_api_usage(self): + """Reset the daily API usage counter""" + for api_key, _ in self._api_used.items(): + self._api_used[api_key] = 0 + await self.serialise_usage(api_key, reset=True) + ''' async def sites_usage(self): """Load api usage""" @@ -449,20 +427,19 @@ async def sites_usage(self): for spl in sp: api_key = spl.strip() - params = {"api_key": api_key} _LOGGER.debug("Getting API limit and usage from solcast for %s", self.redact_api_key(api_key)) async with async_timeout.timeout(60): - api_cache_filename = self.get_api_usage_cache_filename(api_key) - _LOGGER.debug("%s", 'API usage cache ' + ('exists' if file_exists(api_cache_filename) else 'does not yet exist')) + cache_filename = self.get_usage_cache_filename(api_key) + _LOGGER.debug("%s", 'API usage cache ' + ('exists' if file_exists(cache_filename) else 'does not yet exist')) + url = f"{self.options.host}/json/reply/GetUserUsageAllowance" + params = {"api_key": api_key} retries = 3 retry = retries success = False use_cache_immediate = False - cache_exists = file_exists(api_cache_filename) + cache_exists = file_exists(cache_filename) while retry > 0: - resp: ClientResponse = await self.aiohttp_session.get( - url=f"{self.options.host}/json/reply/GetUserUsageAllowance", params=params, ssl=False - ) + resp: ClientResponse = await self._aiohttp_session.get(url=url, params=params, ssl=False) status = resp.status try: resp_json = await resp.json(content_type=None) @@ -474,7 +451,7 @@ async def sites_usage(self): d = cast(dict, resp_json) self._api_limit[api_key] = d.get("daily_limit", None) self._api_used[api_key] = d.get("daily_limit_consumed", None) - await self.write_api_usage_cache_file(api_key) + await self.serialise_usage(api_key) retry = 0 success = True else: @@ -489,7 +466,7 @@ async def sites_usage(self): _LOGGER.warning("Timeout getting Solcast API usage allowance, last call result: %s, using cached data if it exists", translate(status)) status = 404 if cache_exists: - async with aiofiles.open(api_cache_filename) as f: + async with aiofiles.open(cache_filename) as f: resp_json = json.loads(await f.read()) status = 200 d = cast(dict, resp_json) @@ -504,7 +481,7 @@ async def sites_usage(self): else: self._api_limit[api_key] = 10 self._api_used[api_key] = 0 - await self.write_api_usage_cache_file(api_key) + await self.serialise_usage(api_key) raise Exception(f"Gathering site usage failed in sites_usage(). Request returned Status code: {translate(status)} - Response: {resp_json}.") except json.decoder.JSONDecodeError: @@ -524,16 +501,14 @@ async def sites_weather(self): """Request site weather byline""" try: - if len(self._sites) > 0: + if len(self.sites) > 0: sp = self.options.api_key.split(",") - rid = self._sites[0].get("resource_id", None) - + rid = self.sites[0].get("resource_id", None) + url=f"{self.options.host}/json/reply/GetRooftopSiteSparklines" params = {"resourceId": rid, "api_key": sp[0]} _LOGGER.debug("Get weather byline") async with async_timeout.timeout(60): - resp: ClientResponse = await self.aiohttp_session.get( - url=f"https://api.solcast.com.au/json/reply/GetRooftopSiteSparklines", params=params, ssl=False - ) + resp: ClientResponse = await self._aiohttp_session.get(url=url, params=params, ssl=False) resp_json = await resp.json(content_type=None) status = resp.status @@ -561,7 +536,7 @@ async def load_saved_data(self): """Load the saved solcast.json data, also checking for new API keys and site removal""" try: status = '' - if len(self._sites) > 0: + if len(self.sites) > 0: if file_exists(self._filename): async with aiofiles.open(self._filename) as data_file: json_data = json.loads(await data_file.read(), cls=JSONDecoder) @@ -574,13 +549,13 @@ async def load_saved_data(self): # Check for any new API keys so no sites data yet for those ks = {} - for d in self._sites: + for d in self.sites: if not any(s == d.get('resource_id', '') for s in json_data['siteinfo']): ks[d.get('resource_id')] = d.get('apikey') if len(ks.keys()) > 0: # Some site data does not exist yet so get it - _LOGGER.info("New site(s) have been added, so getting forecast data for just those site(s)") + _LOGGER.info("New site(s) have been added, so getting forecast data for them") for a, _api_key in ks: await self.http_data_call(r_id=a, api=_api_key, dopast=True) await self.serialize_data() @@ -588,8 +563,8 @@ async def load_saved_data(self): # Check for sites that need to be removed l = [] for s in json_data['siteinfo']: - if not any(d.get('resource_id', '') == s for d in self._sites): - _LOGGER.info("Solcast site resource id %s is no longer configured, removing saved data from cached file", s) + if not any(d.get('resource_id', '') == s for d in self.sites): + _LOGGER.warning("Solcast site resource id %s is no longer configured, removing saved data from cached file", s) l.append(s) for ll in l: @@ -678,7 +653,7 @@ def get_rooftop_site_total_today(self, site) -> float: def get_rooftop_site_extra_data(self, site = ""): """Return information about a site""" - g = tuple(d for d in self._sites if d["resource_id"] == site) + g = tuple(d for d in self.sites if d["resource_id"] == site) if len(g) != 1: raise ValueError(f"Unable to find site {site}") site: Dict[str, Any] = g[0] @@ -778,13 +753,13 @@ def get_forecasts_n_hour(self, n_hour) -> Dict[str, Any]: """Return forecast for the Nth hour for all sites and individual sites""" res = {} if self.options.attr_brk_site: - for site in self._sites: + for site in self.sites: res[site['resource_id']] = self.get_forecast_n_hour(n_hour, site=site['resource_id']) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): + if self._estimate_set.get(_data_field): res[_data_field.replace('pv_','')+'-'+site['resource_id']] = self.get_forecast_n_hour(n_hour, site=site['resource_id'], _use_data_field=_data_field) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): + if self._estimate_set.get(_data_field): res[_data_field.replace('pv_','')] = self.get_forecast_n_hour(n_hour, _use_data_field=_data_field) return res @@ -799,13 +774,13 @@ def get_forecasts_custom_hours(self, n_hour) -> Dict[str, Any]: """Return forecast for the next N hours for all sites and individual sites""" res = {} if self.options.attr_brk_site: - for site in self._sites: + for site in self.sites: res[site['resource_id']] = self.get_forecast_custom_hours(n_hour, site=site['resource_id']) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): + if self._estimate_set.get(_data_field): res[_data_field.replace('pv_','')+'-'+site['resource_id']] = self.get_forecast_custom_hours(n_hour, site=site['resource_id'], _use_data_field=_data_field) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): + if self._estimate_set.get(_data_field): res[_data_field.replace('pv_','')] = self.get_forecast_custom_hours(n_hour, _use_data_field=_data_field) return res @@ -818,13 +793,13 @@ def get_sites_power_n_mins(self, n_mins) -> Dict[str, Any]: """Return expected power generation in the next N minutes for all sites and individual sites""" res = {} if self.options.attr_brk_site: - for site in self._sites: + for site in self.sites: res[site['resource_id']] = self.get_power_n_mins(n_mins, site=site['resource_id']) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): + if self._estimate_set.get(_data_field): res[_data_field.replace('pv_','')+'-'+site['resource_id']] = self.get_power_n_mins(n_mins, site=site['resource_id'], _use_data_field=_data_field) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): + if self._estimate_set.get(_data_field): res[_data_field.replace('pv_','')] = self.get_power_n_mins(n_mins, site=None, _use_data_field=_data_field) return res @@ -840,13 +815,13 @@ def get_sites_peak_w_day(self, n_day) -> Dict[str, Any]: """Return max kW for site N days ahead for all sites and individual sites""" res = {} if self.options.attr_brk_site: - for site in self._sites: + for site in self.sites: res[site['resource_id']] = self.get_peak_w_day(n_day, site=site['resource_id']) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): + if self._estimate_set.get(_data_field): res[_data_field.replace('pv_','')+'-'+site['resource_id']] = self.get_peak_w_day(n_day, site=site['resource_id'], _use_data_field=_data_field) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): + if self._estimate_set.get(_data_field): res[_data_field.replace('pv_','')] = self.get_peak_w_day(n_day, site=None, _use_data_field=_data_field) return res @@ -861,13 +836,13 @@ def get_sites_peak_w_time_day(self, n_day) -> Dict[str, Any]: """Return hour of max kW for site N days ahead for all sites and individual sites""" res = {} if self.options.attr_brk_site: - for site in self._sites: + for site in self.sites: res[site['resource_id']] = self.get_peak_w_time_day(n_day, site=site['resource_id']) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): + if self._estimate_set.get(_data_field): res[_data_field.replace('pv_','')+'-'+site['resource_id']] = self.get_peak_w_time_day(n_day, site=site['resource_id'], _use_data_field=_data_field) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): + if self._estimate_set.get(_data_field): res[_data_field.replace('pv_','')] = self.get_peak_w_time_day(n_day, site=None, _use_data_field=_data_field) return res @@ -883,13 +858,13 @@ def get_forecasts_remaining_today(self) -> Dict[str, Any]: """Return remaining forecasted production for today for all sites and individual sites""" res = {} if self.options.attr_brk_site: - for site in self._sites: + for site in self.sites: res[site['resource_id']] = self.get_forecast_remaining_today(site=site['resource_id']) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): + if self._estimate_set.get(_data_field): res[_data_field.replace('pv_','')+'-'+site['resource_id']] = self.get_forecast_remaining_today(site=site['resource_id'], _use_data_field=_data_field) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): + if self._estimate_set.get(_data_field): res[_data_field.replace('pv_','')] = self.get_forecast_remaining_today(_use_data_field=_data_field) return res @@ -904,13 +879,13 @@ def get_sites_total_kwh_forecast_day(self, n_day) -> Dict[str, Any]: """Return forecast kWh total for site N days ahead for all sites and individual sites""" res = {} if self.options.attr_brk_site: - for site in self._sites: + for site in self.sites: res[site['resource_id']] = self.get_total_kwh_forecast_day(n_day, site=site['resource_id']) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): + if self._estimate_set.get(_data_field): res[_data_field.replace('pv_','')+'-'+site['resource_id']] = self.get_total_kwh_forecast_day(n_day, site=site['resource_id'], _use_data_field=_data_field) for _data_field in ('pv_estimate', 'pv_estimate10', 'pv_estimate90'): - if self._estimen.get(_data_field): + if self._estimate_set.get(_data_field): res[_data_field.replace('pv_','')] = self.get_total_kwh_forecast_day(n_day, site=None, _use_data_field=_data_field) return res @@ -939,7 +914,7 @@ def get_forecast_list_slice(self, _data, start_utc, end_utc=None, search_past=Fa return st_i, end_i def get_spline(self, spline, st, xx, _data, df, reducing=False) -> None: - """Build an individual site/forecast confidence spline""" + """Build a forecast spline, momentary or day reducing""" for _data_field in df: if st > 0: y = [_data[st+i][_data_field] for i in range(0, len(self._spline_period))] @@ -983,21 +958,21 @@ def build_splines(self, variant, reducing=False) -> None: variant['all'] = {} self.get_spline(variant['all'], st, xx, self._data_forecasts, df, reducing=reducing) if self.options.attr_brk_site: - for site in self._sites: + for site in self.sites: variant[site['resource_id']] = {} self.get_spline(variant[site['resource_id']], st, xx, self._data_forecasts, df, reducing=reducing) async def spline_moments(self) -> None: """Build the moments splines""" try: - self.build_splines(self.fc_moment) + self.build_splines(self._forecasts_moment) except Exception as e: _LOGGER.debug('Exception in spline_moments(): %s', e) def get_moment(self, site, _data_field, t) -> float: """Get a time value from a moment spline, with times needing to be for today, and also on five-minute boundaries""" try: - return self.fc_moment['all' if site is None else site][self._use_data_field if _data_field is None else _data_field][int(t / 300)] + return self._forecasts_moment['all' if site is None else site][self._use_data_field if _data_field is None else _data_field][int(t / 300)] except Exception as e: _LOGGER.debug('Exception in get_moment(): %s', e) return 0 @@ -1005,14 +980,14 @@ def get_moment(self, site, _data_field, t) -> float: async def spline_remaining(self) -> None: """Build the descending splines""" try: - self.build_splines(self.fc_remaining, reducing=True) + self.build_splines(self._forecasts_remaining, reducing=True) except Exception as e: _LOGGER.debug('Exception in spline_remaining(): %s', e) def get_remaining(self, site, _data_field, t) -> float: """Get a time value from a reducing spline, with times needing to be for today, and also on five-minute boundaries""" try: - return self.fc_remaining['all' if site is None else site][self._use_data_field if _data_field is None else _data_field][int(t / 300)] + return self._forecasts_remaining['all' if site is None else site][self._use_data_field if _data_field is None else _data_field][int(t / 300)] except Exception as e: _LOGGER.debug('Exception in get_remaining(): %s', e) return 0 @@ -1124,7 +1099,7 @@ def get_max_forecast_pv_estimate(self, start_utc, end_utc, site=None, _use_data_ def get_energy_data(self) -> dict[str, Any]: """Get energy data""" try: - return self._dataenergy + return self._data_energy except Exception as e: _LOGGER.error("Exception in get_energy_data(): %s", e) _LOGGER.error(traceback.format_exc()) @@ -1141,17 +1116,17 @@ async def http_data(self, dopast = False): failure = False sites_attempted = 0 - for site in self._sites: + for site in self.sites: sites_attempted += 1 _LOGGER.info("Getting forecast update for Solcast site %s", site['resource_id']) result = await self.http_data_call(site['resource_id'], site['apikey'], dopast) if not result: failure = True - if len(self._sites) > 1: - if sites_attempted < len(self._sites): - _LOGGER.warning('Forecast update for site %s failed so not getting remaining sites%s', site['resource_id'], ' - API use count may look odd' if len(self._sites) > 2 else '') + if len(self.sites) > 1: + if sites_attempted < len(self.sites): + _LOGGER.warning('Forecast update for site %s failed so not getting remaining sites%s', site['resource_id'], ' - API use count may be odd' if len(self.sites) > 2 else '') else: - _LOGGER.warning('Forecast update for the last site queued failed (%s) so not getting remaining sites - API use count may look odd', site['resource_id']) + _LOGGER.warning('Forecast update for the last site queued failed (%s) so not getting remaining sites - API use count may be odd', site['resource_id']) else: _LOGGER.warning('Forecast update for site %s failed', site['resource_id']) status = 'At least one site forecast get failed' @@ -1168,12 +1143,12 @@ async def http_data(self, dopast = False): await self.serialize_data() else: if sites_attempted > 0: - _LOGGER.error("At least one Solcast site forecast failed to fetch, so forecast data has not been built") + _LOGGER.error("At least one Solcast site forecast failed to fetch, so forecast has not been built") else: - _LOGGER.error("No Solcast sites were attempted, so forecast data has not been built - check for earlier failure to retrieve sites") + _LOGGER.error("Internal error, there is no sites data so forecast has not been built") status = 'At least one site forecast get failed' except Exception as e: - status = f"Exception in http_data(): {e} - Forecast data has not been built" + status = f"Exception in http_data(): {e} - Forecast has not been built" _LOGGER.error(status) _LOGGER.error(traceback.format_exc()) return status @@ -1193,9 +1168,7 @@ async def http_data_call(self, r_id = None, api = None, dopast = False): ae = None resp_dict = await self.fetch_data("estimated_actuals", 168, site=r_id, apikey=api, cachedname="actuals") if not isinstance(resp_dict, dict): - _LOGGER.error('No data was returned for Solcast estimated_actuals so this WILL cause errors...') - _LOGGER.error('Either your API limit is exhaused, Internet down, or networking is misconfigured...') - _LOGGER.error('This almost certainly not a problem with the integration, and sensor values will be wrong') + _LOGGER.error('No data was returned for estimated_actuals so this WILL cause issues. Your API limit may be exhaused, or Solcast has a problem...') raise TypeError(f"Solcast API did not return a json object. Returned {resp_dict}") ae = resp_dict.get("estimated_actuals", None) @@ -1265,19 +1238,20 @@ async def http_data_call(self, r_id = None, api = None, dopast = False): _LOGGER.debug("Forecasts dictionary length %s", len(_fcasts_dict)) + #loop each site and its forecasts for x in _data: - #loop each site and its forecasts - itm = _fcasts_dict.get(x["period_start"]) if itm: itm["pv_estimate"] = x["pv_estimate"] itm["pv_estimate10"] = x["pv_estimate10"] itm["pv_estimate90"] = x["pv_estimate90"] else: - _fcasts_dict[x["period_start"]] = {"period_start": x["period_start"], - "pv_estimate": x["pv_estimate"], - "pv_estimate10": x["pv_estimate10"], - "pv_estimate90": x["pv_estimate90"]} + _fcasts_dict[x["period_start"]] = { + "period_start": x["period_start"], + "pv_estimate": x["pv_estimate"], + "pv_estimate10": x["pv_estimate10"], + "pv_estimate90": x["pv_estimate90"] + } # _fcasts_dict contains all data for the site up to 730 days worth # Delete data that is older than two years @@ -1299,29 +1273,27 @@ async def http_data_call(self, r_id = None, api = None, dopast = False): async def fetch_data(self, path="error", hours=168, site="", apikey="", cachedname="forcasts") -> dict[str, Any]: """Fetch forecast data""" try: - params = {"format": "json", "api_key": apikey, "hours": hours} - url=f"{self.options.host}/rooftop_sites/{site}/{path}" - _LOGGER.debug("Fetch data url: %s", url) - async with async_timeout.timeout(900): - api_cache_filename = self._config_dir + '/' + cachedname + "_" + site + ".json" - if self.api_cache_enabled and file_exists(api_cache_filename): - status = 404 - async with aiofiles.open(api_cache_filename) as f: - resp_json = json.loads(await f.read()) - status = 200 - _LOGGER.debug("Offline cached mode enabled, loaded data for site %s", site) + if self._api_cache_enabled: + api_cache_filename = self._config_dir + '/' + cachedname + "_" + site + ".json" + if file_exists(api_cache_filename): + status = 404 + async with aiofiles.open(api_cache_filename) as f: + resp_json = json.loads(await f.read()) + status = 200 + _LOGGER.debug("Offline cached mode enabled, loaded data for site %s", site) else: if self._api_used[apikey] < self._api_limit[apikey]: + url = f"{self.options.host}/rooftop_sites/{site}/{path}" + params = {"format": "json", "api_key": apikey, "hours": hours} + _LOGGER.debug("Fetch data url: %s", url) tries = 10 counter = 0 backoff = 15 # On every retry the back-off increases by (at least) fifteen seconds more than the previous back-off while True: _LOGGER.debug("Fetching forecast") counter += 1 - resp: ClientResponse = await self.aiohttp_session.get( - url=url, params=params, ssl=False - ) + resp: ClientResponse = await self._aiohttp_session.get(url=url, params=params, ssl=False) status = resp.status if status == 200: break @@ -1334,7 +1306,7 @@ async def fetch_data(self, path="error", hours=168, site="", apikey="", cachedna if rs.get('error_code') == 'TooManyRequests': status = 998 self._api_used[apikey] = self._api_limit[apikey] - await self.write_api_usage_cache_file(apikey) + await self.serialise_usage(apikey) break else: status = 1000 @@ -1357,11 +1329,11 @@ async def fetch_data(self, path="error", hours=168, site="", apikey="", cachedna _LOGGER.debug("API returned data, API counter incremented from %d to %d", self._api_used[apikey], self._api_used[apikey] + 1) self._api_used[apikey] += 1 - await self.write_api_usage_cache_file(apikey) + await self.serialise_usage(apikey) resp_json = await resp.json(content_type=None) - if self.api_cache_enabled: + if self._api_cache_enabled: async with self._serialize_lock: async with aiofiles.open(api_cache_filename, 'w') as f: await f.write(json.dumps(resp_json, ensure_ascii=False)) @@ -1377,7 +1349,7 @@ async def fetch_data(self, path="error", hours=168, site="", apikey="", cachedna _LOGGER.error("API returned status %s, API used is %d/%d", translate(status), self._api_used[apikey], self._api_limit[apikey]) return None else: - _LOGGER.warning("API polling limit exhausted, not getting forecast, API used is %d/%d", self._api_used[apikey], self._api_limit[apikey]) + _LOGGER.warning("API polling limit exhausted, not getting forecast for site %s, API used is %d/%d", site, self._api_used[apikey], self._api_limit[apikey]) return None _LOGGER.debug("HTTP session returned data type %s", type(resp_json)) @@ -1452,24 +1424,24 @@ async def buildforecastdata(self): z = x["period_start"] zz = z.astimezone(self._tz) #- timedelta(minutes=30) - # v4.0.8 added code to dampen the forecast data: (* self._damp[h]) + # v4.0.8 added code to dampen the forecast data: (* self.damp[h]) if yesterday < zz.date() < lastday: h = f"{zz.hour}" if zz.date() == today: - tally += min(x[self._use_data_field] * 0.5 * self._damp[h], self._hardlimit) + tally += min(x[self._use_data_field] * 0.5 * self.damp[h], self.hard_limit) # Add the forecast for this site to the total itm = _fcasts_dict.get(z) if itm: - itm["pv_estimate"] = min(round(itm["pv_estimate"] + (x["pv_estimate"] * self._damp[h]),4), self._hardlimit) - itm["pv_estimate10"] = min(round(itm["pv_estimate10"] + (x["pv_estimate10"] * self._damp[h]),4), self._hardlimit) - itm["pv_estimate90"] = min(round(itm["pv_estimate90"] + (x["pv_estimate90"] * self._damp[h]),4), self._hardlimit) + itm["pv_estimate"] = min(round(itm["pv_estimate"] + (x["pv_estimate"] * self.damp[h]),4), self.hard_limit) + itm["pv_estimate10"] = min(round(itm["pv_estimate10"] + (x["pv_estimate10"] * self.damp[h]),4), self.hard_limit) + itm["pv_estimate90"] = min(round(itm["pv_estimate90"] + (x["pv_estimate90"] * self.damp[h]),4), self.hard_limit) else: _fcasts_dict[z] = {"period_start": z, - "pv_estimate": min(round((x["pv_estimate"] * self._damp[h]),4), self._hardlimit), - "pv_estimate10": min(round((x["pv_estimate10"] * self._damp[h]),4), self._hardlimit), - "pv_estimate90": min(round((x["pv_estimate90"] * self._damp[h]),4), self._hardlimit)} + "pv_estimate": min(round((x["pv_estimate"] * self.damp[h]),4), self.hard_limit), + "pv_estimate10": min(round((x["pv_estimate10"] * self.damp[h]),4), self.hard_limit), + "pv_estimate90": min(round((x["pv_estimate90"] * self.damp[h]),4), self.hard_limit)} # Record the individual site forecast _site_fcasts_dict[z] = { @@ -1488,7 +1460,7 @@ async def buildforecastdata(self): self._forecasts_start_idx = self.calc_forecast_start_index() - self._dataenergy = {"wh_hours": self.makeenergydict()} + self._data_energy = {"wh_hours": self.makeenergydict()} await self.check_data_records() diff --git a/custom_components/solcast_solar/system_health.py b/custom_components/solcast_solar/system_health.py index 0f7c816e..5aa85ffb 100644 --- a/custom_components/solcast_solar/system_health.py +++ b/custom_components/solcast_solar/system_health.py @@ -29,5 +29,5 @@ async def system_health_info(hass: HomeAssistant) -> dict[str, Any]: return { "can_reach_server": system_health.async_check_can_reach_url(hass, SOLCAST_URL), "used_requests": used_requests, - "rooftop_site_count": len(coordinator.solcast._sites), + "rooftop_site_count": len(coordinator.solcast.sites), } \ No newline at end of file diff --git a/custom_components/solcast_solar/test.py b/custom_components/solcast_solar/test.py index e376c78f..4ced50f2 100755 --- a/custom_components/solcast_solar/test.py +++ b/custom_components/solcast_solar/test.py @@ -1,13 +1,15 @@ +"""Integration test - development only""" #!/usr/bin/python3 +# pylint: disable=C0304, E0401, W0702 + import asyncio import logging import traceback -from .const import SOLCAST_URL -from homeassistant.util import dt as dt_util - from aiohttp import ClientSession +from .const import SOLCAST_URL + from .solcastapi import ConnectionOptions, SolcastApi logging.basicConfig(level=logging.DEBUG) @@ -15,17 +17,19 @@ async def test(): + """testing""" print('This script is for development purposes only') try: optdamp = {} - for a in range(0,24): optdamp[str(a)] = 1.0 + for a in range(0,24): + optdamp[str(a)] = 1.0 options = ConnectionOptions( "apikeygoeshere", SOLCAST_URL, 'solcast.json', "/config", - await dt_util.async_get_time_zone(hass.config.time_zone), + "Australia/Sydney", optdamp, 1, "estimate", @@ -37,16 +41,17 @@ async def test(): True, True ) - + async with ClientSession() as session: - solcast = SolcastApi(session, options, apiCacheEnabled=True) + solcast = SolcastApi(session, options, api_cache_enabled=True) await solcast.sites_data() + await solcast.sites_usage() await solcast.load_saved_data() - print("Total today " + str(solcast.get_total_kwh_forecast_today())) - print("Peak today " + str(solcast.get_peak_w_today())) - print("Peak time today " + str(solcast.get_peak_w_time_today())) - except Exception as err: - _LOGGER.error("async_setup_entry: %s",traceback.format_exc()) + print("Total today " + str(solcast.get_total_kwh_forecast_day(0))) + print("Peak today " + str(solcast.get_peak_w_day(0))) + print("Peak time today " + str(solcast.get_peak_w_time_day(0))) + except: + _LOGGER.error(traceback.format_exc()) return False From 3febb2fb2402be55ca6ca023f380586d1ae5a384 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Fri, 23 Aug 2024 16:20:34 +1000 Subject: [PATCH 36/38] Update README.md --- README.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/README.md b/README.md index e328fde9..a142b92e 100644 --- a/README.md +++ b/README.md @@ -555,6 +555,18 @@ series: ## Changes +v4.1.4 +* Update Polish translation by @home409ca +* Rename integration in HACS to Solcast PV Forecast by @BJReplay +* Reduce aiofiles version requirement to >=23.2.0 by @autoSteve +* Configuration dialog improvements by @autoSteve +* Misc translation updates by @autoSteve +* Refactor moment and remaining spline build by @autoSteve +* Prevent negative forecast for X hour sensor by @autoSteve +* Suppress spline bounce for reducing spline by @autoSteve + +Full Changelog: https://github.com/BJReplay/ha-solcast-solar/compare/v4.1.3...v4.1.4 + v4.1.3 * Accommodate the removal of API call GetUserUsageAllowance by @autoSteve * Halve retry delays by @autoSteve From 8d0b28d60a887e5d816a4481542a3eafab337e32 Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Fri, 23 Aug 2024 16:20:55 +1000 Subject: [PATCH 37/38] Update manifest.json --- custom_components/solcast_solar/manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/custom_components/solcast_solar/manifest.json b/custom_components/solcast_solar/manifest.json index 21b325be..106d7536 100644 --- a/custom_components/solcast_solar/manifest.json +++ b/custom_components/solcast_solar/manifest.json @@ -10,5 +10,5 @@ "iot_class": "cloud_polling", "issue_tracker": "https://github.com/BJReplay/ha-solcast-solar/issues", "requirements": ["aiohttp>=3.8.5", "aiofiles>=23.2.0", "datetime>=4.3", "isodate>=0.6.1"], - "version": "4.1.3" + "version": "4.1.4" } From 59fbf37fa0c24776ee038405bff59335abe1129a Mon Sep 17 00:00:00 2001 From: Steve Saunders Date: Fri, 30 Aug 2024 18:07:50 +1000 Subject: [PATCH 38/38] Update README.md --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index a142b92e..626aedff 100644 --- a/README.md +++ b/README.md @@ -564,6 +564,8 @@ v4.1.4 * Refactor moment and remaining spline build by @autoSteve * Prevent negative forecast for X hour sensor by @autoSteve * Suppress spline bounce for reducing spline by @autoSteve +* More careful serialisation of solcast.json by @autoSteve +* Extensive code clean-up by #autoSteve Full Changelog: https://github.com/BJReplay/ha-solcast-solar/compare/v4.1.3...v4.1.4