From 7408cbe0d652ca69bda9e7fef9bfcf5849616d15 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 19 Jul 2022 12:08:11 +0000 Subject: [PATCH] fix(deps): require google-api-core>=1.32.0,>=2.8.0 (#400) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 459095142 Source-Link: https://github.com/googleapis/googleapis/commit/4f1be992601ed740a581a32cedc4e7b6c6a27793 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ae686d9cde4fc3e36d0ac02efb8643b15890c1ed Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWU2ODZkOWNkZTRmYzNlMzZkMGFjMDJlZmI4NjQzYjE1ODkwYzFlZCJ9 PiperOrigin-RevId: 456827138 Source-Link: https://github.com/googleapis/googleapis/commit/23f1a157189581734c7a77cddfeb7c5bc1e440ae Source-Link: https://github.com/googleapis/googleapis-gen/commit/4075a8514f676691ec156688a5bbf183aa9893ce Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDA3NWE4NTE0ZjY3NjY5MWVjMTU2Njg4YTViYmYxODNhYTk4OTNjZSJ9 chore: use gapic-generator-python 1.0.0 PiperOrigin-RevId: 451250442 Source-Link: https://github.com/googleapis/googleapis/commit/cca5e8181f6442b134e8d4d206fbe9e0e74684ba Source-Link: https://github.com/googleapis/googleapis-gen/commit/0b219da161a8bdcc3c6f7b2efcd82105182a30ca Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMGIyMTlkYTE2MWE4YmRjYzNjNmY3YjJlZmNkODIxMDUxODJhMzBjYSJ9 PiperOrigin-RevId: 444333013 Source-Link: https://github.com/googleapis/googleapis/commit/f91b6cf82e929280f6562f6110957c654bd9e2e6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/16eb36095c294e712c74a1bf23550817b42174e5 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTZlYjM2MDk1YzI5NGU3MTJjNzRhMWJmMjM1NTA4MTdiNDIxNzRlNSJ9 PiperOrigin-RevId: 441524537 Source-Link: https://github.com/googleapis/googleapis/commit/2a273915b3f70fe86c9d2a75470a0b83e48d0abf Source-Link: https://github.com/googleapis/googleapis-gen/commit/ab6756a48c89b5bcb9fb73443cb8e55d574f4643 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWI2NzU2YTQ4Yzg5YjViY2I5ZmI3MzQ0M2NiOGU1NWQ1NzRmNDY0MyJ9 PiperOrigin-RevId: 440970084 Source-Link: https://github.com/googleapis/googleapis/commit/5e0a3d57254ab9857ccac77fc6ffade7b69a2dc7 Source-Link: https://github.com/googleapis/googleapis-gen/commit/b0c628a3fade768f225d76992791ea1ba2a881be Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjBjNjI4YTNmYWRlNzY4ZjIyNWQ3Njk5Mjc5MWVhMWJhMmE4ODFiZSJ9 PiperOrigin-RevId: 440589618 Source-Link: https://github.com/googleapis/googleapis/commit/cef116753b4dd5f4f600b6dd0d0de5d54eeb384b Source-Link: https://github.com/googleapis/googleapis-gen/commit/0e0e2c965210466832326c8e5736da461054bca4 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMGUwZTJjOTY1MjEwNDY2ODMyMzI2YzhlNTczNmRhNDYxMDU0YmNhNCJ9 Committer: @harwayne PiperOrigin-RevId: 440226213 Source-Link: https://github.com/googleapis/googleapis/commit/c782e45288c42c411c41a2eca30bf4937c1e4edb Source-Link: https://github.com/googleapis/googleapis-gen/commit/d45c242ee81ffcbbb9eb985318ce897ac0d778af Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZDQ1YzI0MmVlODFmZmNiYmI5ZWI5ODUzMThjZTg5N2FjMGQ3NzhhZiJ9 Committer: @harwayne PiperOrigin-RevId: 440224385 Source-Link: https://github.com/googleapis/googleapis/commit/afc5066d9c3133493b93f83d39c03f10524efcaf Source-Link: https://github.com/googleapis/googleapis-gen/commit/07cf54aa66638cb92e2db6a6624cb3da9929740a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDdjZjU0YWE2NjYzOGNiOTJlMmRiNmE2NjI0Y2IzZGE5OTI5NzQwYSJ9 feat: add audience parameter fix!: Move `yarn_config` into a `oneof` fix!: Remove `temp_bucket` from VirtualClusterConfig, as its value was not used --- .../dataproc-v1-py.tar.gz | Bin 0 -> 114688 bytes .../async_client.py | 34 +- .../autoscaling_policy_service/client.py | 5 +- .../transports/base.py | 19 +- .../transports/grpc.py | 6 + .../transports/grpc_asyncio.py | 2 + .../services/batch_controller/async_client.py | 28 +- .../services/batch_controller/client.py | 5 +- .../batch_controller/transports/base.py | 19 +- .../batch_controller/transports/grpc.py | 6 + .../transports/grpc_asyncio.py | 2 + .../cluster_controller/async_client.py | 150 +- .../services/cluster_controller/client.py | 103 +- .../cluster_controller/transports/base.py | 19 +- .../cluster_controller/transports/grpc.py | 6 + .../transports/grpc_asyncio.py | 2 + .../services/job_controller/async_client.py | 128 +- .../services/job_controller/client.py | 87 +- .../job_controller/transports/base.py | 19 +- .../job_controller/transports/grpc.py | 6 + .../job_controller/transports/grpc_asyncio.py | 2 + .../workflow_template_service/async_client.py | 56 +- .../workflow_template_service/client.py | 15 +- .../transports/base.py | 19 +- .../transports/grpc.py | 6 + .../transports/grpc_asyncio.py | 2 + .../dataproc_v1/types/autoscaling_policies.py | 7 +- .../google/cloud/dataproc_v1/types/batches.py | 4 +- .../cloud/dataproc_v1/types/clusters.py | 32 +- .../google/cloud/dataproc_v1/types/jobs.py | 28 +- .../cloud/dataproc_v1/types/operations.py | 4 +- .../google/cloud/dataproc_v1/types/shared.py | 8 +- .../dataproc_v1/types/workflow_templates.py | 12 +- .../snippet_metadata_dataproc_v1.json | 2714 ++++++++++++++++- packages/google-cloud-dataproc/setup.py | 5 +- .../testing/constraints-3.6.txt | 11 - .../testing/constraints-3.7.txt | 2 +- .../test_autoscaling_policy_service.py | 187 +- .../dataproc_v1/test_batch_controller.py | 179 +- .../dataproc_v1/test_cluster_controller.py | 669 +++- .../gapic/dataproc_v1/test_job_controller.py | 594 +++- .../test_workflow_template_service.py | 203 +- 42 files changed, 4873 insertions(+), 532 deletions(-) create mode 100644 packages/google-cloud-dataproc/dataproc-v1-py.tar.gz delete mode 100644 packages/google-cloud-dataproc/testing/constraints-3.6.txt diff --git a/packages/google-cloud-dataproc/dataproc-v1-py.tar.gz b/packages/google-cloud-dataproc/dataproc-v1-py.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..978cc2e1ef613c2daa568d0fd873508e665042eb GIT binary patch literal 114688 zcmV(veXnb0Vt2cp zjE99tNMb^fT)x=W-t@oU=NteeCCiqbIBs_m;+S zCtu>|;&{Hh%YSh!{^jqzoqm6N@A*!Dr}w1W>u+y&pNQQr!p*`_WVuX*c#_6(ep|a$ z*Z)Hge_sF7Su%5iD0t8@NWptMJ2&fpZ|_OB-{0Hb?Qiq>-VXjf5#0yn;)df-uK#zW z@$*J9%O`Q(o-%zKFG1kaN#U1RO9#NdVM_9vEcuX7sCdi z>ePF+Hzeaklb~hWY$nuURe0b4TFfH2lsj^-l~p@aFQ`IF8~}311D1aVL(#nETxGw#4 z-TBtZhcV7)aglo90UW6$N07@L)9h>#WCH)B?<-%(oSB-&egXbMb)iBbaZzM)q{apD zIf_#ej6@bsRU9cpWvn7NPL2_%yBGUPU|J$i13DE8$XP&emjME8k&7fqL=a)>Aw)BF zmKgRJ? zbe?r6Tp*(;{ z)$!@84jASxKSiO+GNhP)6+w!r4rf9p;K$?C70OG2L?Fkh!Zn0taB!IhIRx28ZW!g4 zGF71B+bBFIEXy6{TyFqz$w)N!PsQPBQ+&67dU)Cfjh_$CemME*O#Hn6_U-=h+2N~G zaqGdMUJ=sVSRuOu3PzOqk!TnMAZb!k1C=lU3Xq&euTRP$>7kK^2OWk79UEu_LQFAL zk?t*qMG*ReB2F+aQhkn$fC-$HCJ|8LRvvxd*z_x7@QO4AEyyrcULL12k*S=Ok!K(P zx?~wN2ee?m1TrB>0C}D)59w8uxpkh63mFzj#F8Kc1WCg{#blDc@%vHT8Ya zTS{gkFJOxVQ)Ch5f3v5V5gG!fPEsKoC}6ReM|#28o-i2C)pT_l4T1w{F0xr>zsBXa zY!a%g1_&IEAhI0#U?pup%^K4%8nH^ylBCJt%qBBJjs(e&1(i}I8w5NeHefRLv_Upt z!$~B_po?;7T;{gaNI)(WLKu2JxJX_xX9W~o+Z+XAP0NwPjA^0+;UR)UI2DKRk~XXl zXGkuQ9)SY&2Y3xd+JuC2o~oV|H+M7L2)yn%;Ffvahc09^vjj># zTiD8#J5y;Bc%}FfjWa@-Y9>5Hm?7z)W*)FUauT)%Lv(|P_@br9chJ~**4m_K!ZY)b zIO8d(00IL_*J!;E?--LdYdwvHHQP(CWW{tU)7g@%0X!nrzn5IK#p;qvkxb$c@O;T7 z;$+F`i$GlxY{#YZhVgK)yj5dRj6yli8g#}BWtO=VU&xD~oFQDM*)W}4s?+oY7Qsd6M{>$muTc-OgYCZB~ z4zs4P2e?0Zc`}&GfZF(mL2f5!7ZQoz5@J~H5I zL+c~g(;tmpengiLW?B#i4JgZ|L0karaN=Sbt@8claFcjc0 zVUQy;H8nek>qTQnMWK$8~h#dR*eXM#q-Gqq^NhzzKeta0{U9=<^0Y1 zMs&b0;2Rd(lp9>zW_>$lo1C^g>XrJ8H-r2+;C5ADWs&2h`Dn1abSRNyX1h+Gen7} zr2Z(5IwMR|Y4p{m(Roz{v!{s<&_?%x$F}g}RY-*lpK?djvK<*J%|`p8ku&1QC=aGI znJ{!LCGG}lUwtRO^4sF8U)cYTj7|`l5g(&$K9ZYoQlT%M1Td60(Lu(b@C6_i==H8k znp7CaA4oRD+kAMkOLBH>of#O1SJ5q1r)L&$B2pm;Z1BXj${+>`(M2*4zmggJx=F(q zrkcR?YXFZ;!=^)~R4JFBVoG{wZEUsxKH!5xMKzX+d|guwMn&X7vAO36F9lBTq-5=i zDnc6B(ZJNDW@H+aRWK+SRk>MFAE;h4)UZ-HHh_LLIMoJ7hh7>a#-oY|#*znl$Vsw* zI$#NoBkb^l!KUR%Q+G8ITf#}ExQB_hLP``cqfJ=%OfNnbRNp10 zv8zwNus}+K(=y~(jv!U!Ql=i1Wcuv&GHMq&8A6Xsdq24&Dg|i>dempWn?^$tJZGwrZsyiZ+B=-5s z4=PL`8VdWOTm_h~PjteqQU;4ni;xi&!8cbA1C`5A4jJDy4F_aQW=0&a=}f0wS0F}o z(GdJw4^AQ0gjD3gzJR`?(}m(!Y>F$jdM^Q6;^J?n-{8mX(-w=3Pq# z&{LNIH7_R|YH-g1K_`3$6fKT|VJbP&(ui+(ztmO{W8|G;=oSP|(On}inFQHF1KL)p z8W?DnEF(3PQN+Z^T+F2raV*o6QeU=!IiCw$WiFyr z58E(xYvtvFe3^1hn{vE~iIgcprTL9Pqu+{)Zwwy$cHU%~6&DooYk6y5<2*uLe8mFX zNL~bEv#uq{cwd?f8fd(b5!;AZ7144CCU|Y2dXT0XY|5aJ7Q{Ag79J;KM! zrC3U!8qUo6pRRq4JuZZ4qgG~n-tpngv|`&r72=XQao5Y`I`-L-q5>5jrlb;gXIYPhr}?Tz=p!!tRRBEd)rs z`tl9ML)n*Wcl_jyA3wMVd{n=B?iD5psZ6dyE%zzuUq_s%ia%tfk5CC_?^+;l()TBr ztBI-K%j|LK>b68<8S~A9RI#sU!`I2O$up_d6&fmz;GP7&YUzvUEevPZrLPaQaj;q2 zK6D)CbH=GJSX~=ftv|ydwj(2=mO?0)|BtlU6hJ^PmE%OL2 zGgA%`w3);?%q$z|zy)44j1*Hi9t~oam}qI}r?*FX&Epadt#_(MK@`B>4H#$Cl3pxW zUYyYO8Bc{5AZm*(thtJQj@O_N7}*ffr-Mgf1O z^f@W$Gsx-ly=>`@QAj^E0!{c*%HLCNoL$gop0-NEVJ_X;dJ+w%p&LR2hZ>{=-tMTc zDKWIsGc5a>siD%FCi!fkx;~CC1>c86tda~fZ5nz9N!!Z;Q?=rwg($9C1Cj_+Z2fi( zfr{{Fbmz2 z1Z!x#&se@Ex`Jg^*%eBt&W1FhB9zyfC@c~|I^$AHZumkhU$>@Bh31l~xh&xCyGdtD z=LV^lw#n;j0q^XcVT$%tQT0K=ww=~5tqxtp0RY@I=291d2!4go0c=VufBl1r}+m6P@+HY(_8D<&IkZRr})@#5)LwIyS| zZVkhDX!>mR<=*ZH#rJ5}>vf-dy}x$9-WkcE??3PEdH!g$J=*R2f88$o>hzjz>5|n+ zW>1g4-r3f*BPQpVQdaMuEokaUo~eI+(gH`LL5ShXRo0Ac*{)qBkqHbjqeX>{mQJ{Q zszs$kOYZ0&n^sUQ{!Puv@#^i{HKJ4mTbjMDjySy@{xZ>CAKz*eW9ZIO)UC~>8g$BX z1mH5rXlcNpwp z>swc88Avab(rqkIAeW(3M|l&O(B^tIB0&&tvJ{;1eJG6Q%Q%*qikElk$;cVNRsMg(#sVfLn^LB}uxU;6{=~8qSe>S(*+-Ox`}=z1_(C0PZo%`C5QAdZvM{7RoyF zjx~oh+@(dw^$@5B*D}0cAmq#%(WY}wm8_bhbw`)gzFLFGZDa45^-Q}b0hh#yvJ(4L zXPk4yxh2(?s~qv2=Ip#W)0^|bPq81G(h}>ruBJ0}#oN~R&sBGcwU+v4M%yx0A?hN^ z2#iu_^H|Wa2OAwz1Zw1Z`$t000QK}6f=tvda>fmfa8T*i+% z{Fk?GNbz@Hp#ToqgiLwfJd2Yi+M!g-*tY6}M3_g_qwYzst|V0(i}!QJCPio%InW8j z)-QT1rFsg)=-Fx1y0}wFU*Ab%c5DpP9EgpkxdX|7QXnWFWO?D)+=av|R{2)5&B=7r z?zG6OvOA+@dDXc$4Q;w)((|;c^K-<4m}q4T@|TmkBUhTxZA>{b30x}8mM}(JQ;x4L zjLb@Qlb}OAZlaqe-aIj!AWrD7CAH9p?wtmytga|~&iSCCNad4M1^K_NoV9Eg$-uqy zcy4Sy(@DZ2yIDL-<;)$2#pn)Vu}!bb<2SbUV{?xoZ9vg_N!GA;9lDwzMN)lRnf3It zK~zkW*)7-YwsDdM7Z}~8r3!Ofn0B{oEqS9=hV_>7BnjS0AC)7QBrd$tRoY*(>r|IC zj(FwzE-h;SacOf*DP4Yrw(h4(Ij0rx@;1re`>Bclo@3ebKYDR5%^vD`msQVy_ICRV z&;RW9y8ZR@KaX($BV1Udx$aa|NQ6f&ffO=`On8VR_UL8_cL?Bs`%gEd%h6=pKq_9 z|9qU|jP!d;O?}a$vbyo&PDg`M2HmHwYvIAbJOb!&ZSU0V%F#JNCl&oMGNqR|v3p5^hGuPz#y9b3;{!hVv&i|1KdfRLN ze~e?l((fBq@2S>q9rK_HI_Ie8uotw>SSxhjluly=5Jm{DX?@=rbM@xqOy;b(0 z$149XrS};SV3q%G?=Iy3KD`xU?f;K)77f)Tr>4#AmUzX3imc)N=asT0tRsMfn==YBVEAxMUcftSrJ)Bzm|05g+ zTE7RC1AU>4e}ngakxakDX#cz!{`O;)|KAu&?-2%8`v0~3zrFK(jsG9z_}S?9%&7O? z@c&c%^XB~g#X`c(cKqj#46FQq9$kG}6|mC(yNmh1*I&Q?_i>K@iHCR3{h#7^OW^qz zaqtC0{d0Tu-H%oNzgUTVN(5Nx|GjJezuRB?|Dzn=k$&%k{y)VxmzMKiBnn(J*L57Y z{aAee_b8Mf)YlI?#tQ%M?JnZ~9`OI(+W#Npxbgk3jfFSBcK>VM0Gq8}0s9EAfNj{9 z)|t1J(OZQkc^rq1czGg@PtL?EaQ>IZI~{S(8|Lj*FT|^Uce{tEFy(dgwR^dQxkP|JFQ+FkFHJ>U+JE%o6I~iC)cI>Or3F%)8A(x#`n^jS8D(9GR#8!H*fTR z*fA)`-}C+Nd)=M&{XdU!EZ4vO9~~ZQjQi_<=XtNc*8gK1x9Pw9A3Z*EjQi_DMzpz;F^3is#;K(0yuqzmfl+y?6g=8%Y+&=V$&3UFUlaCTqkakGUUa zy}K+SGu%xeH!wSAPcBy$wg63x<&|s*ch1b;{;jHhsGpK;9)Te5*@=k0W-jBeWXM9EYe0=zTnj&yvAS(F+VSF5oD- z32yeY0hen}F`w{cHs!|!We?M}z@hekWW?E~yl)Eu1JuHa#-Q9}P=`r>n%`_A9G<7i zaF7gc;XpX#_Pd*azyxl$z#$GN+4vT~Fp>Wj0R59yC7+*x{+gYX2QIG9@V~N?c}AiF zUk?3}`*qq2q2p$6=!Z$j;8@(=yc7AJVf$XyiihmvTDz<2MrisK#rMH!_gu9_;~^4!~%gb-iIt3-Uf^#NtuR9t1JDT-~*W130)8RBv=FlB1Z<2Qp-wwD#5E3!{`c0vcb;xvX&BKuWu$dft zH?Tp(y&J$x!2Q3vDGHJRuv}3(m+3iHXVp3h`ET9E5nQBAh|IGD}o~c(*#Z zp!O|x#)#LdUHuwi>8MdX;W|28zZkA_b&$F{-1&<>WYujgo_U^pv>yqVu_PkX3+$jS z^9QG|*4RV#ma^aBPEzA3Q&8nYuPaku58ZUDuZZV2rw`HlE4c;BUxlYnJTaYj<#W{^ zC+Wd(u_<)o9**t^b8Z{`C*56Dl9lFxQmM^wD!G5sDbaTQ$X|4Cj(BW2Xw)BEJ{R%3 z!;K=TTJwy^r%q>w{8iJE|kJkW_1!_;rf4pKAtr@le{?K%x&i1b>v#MH~E8ZP?Oa1*~4E<)0g81X3vs* z#BHRqe|GOF%bX;4v}z+V9-`q#TQ7$-r#v6HiAFro>@Gg+nnw<|gy=_#M|shAYRyZ2 zBZJPv>#YTj_iNs)(%);|aq@ZNWG_`?x!AUjTQYD>Srv~htyAB7H4`-SKvRMyh0c6v z?mJb*@`>QS=PLJHd3cwXo6n6uLM5)e|2MuSb$PI0A9eYEHa0iS`+pm+oBTib@_81# zhvO&7+08q=4EAC7L-6YLM#xK@GKw&Z`;FuNXS^du6|Y$xx`0_!8ZPmuKC|rq=!SJ* z=D+{_{l>z5#qM^f_ zw;6vvivR~yQN7Ubo{RFxA>&+8Ha3C^@hLA0?`$aEWYo%VQv~=+R6eR~Nr~xzxmLcE zCzJddA>)@cF(6K1Q}7r2fL7*3qJf;nk@^G^+M$BB6|g*e8kh{>Yzjb})kkr^pG`-T zK>HX_mSAu!pQ>1xk2KycTZd14&>=nT$Qg$h$>cm6gwcEY_f2vdPluD^CBE~wCwxQf z=6RNHVXxynKD&swg8c&^`+x3!oW0NNk0AcSyLg#>H$~RcCs~Z$}e)QRl+ipxQ4_|HQ82S8gz0I}p4zvD(_|{rF zM!W!IfBO~2F#bktmBF9~gXAZBf3`Rb=>U0lHLRd{~bSZC2AJ zVQD7X6bm#feLB0tl>(;g9ZCghpCyy%VlphEgh%p4pyNmVB%ee~b7my2>&aVE`TC@( z%EvH>{cM!zB+u4{S&2UE1)?J~CJ2(p{sPt?*(5He7+Ds85UH#&Et&m38c{H|;vya4 z^(592NFBrM3inZlHivRN?TwWnJq>z49vuAmZa0DfML!?z3Ke2!_v6vt+r6Fbqum}1 zR7NEQmvNqw;Rkv{n^g|hEUMn!?t4ocessA;Rpcr z2ZW>!V8AqK*FcglX)!5EIO0_TlcB}vD&;>ZttDi0I@+3Oi@Mlq80bLNT0rdK3)P6Q z(R+i{ZNABx7Lart4w6eO6Mw$;a%1CVBNLgr{~&kKu}}&8}sC$Rr!R% z8wM79qUcm8>vbm(87DuHRIn2dhe%_eJ{@zT<)@QtkOVpb{d0VLyu}Zi!p$oEv(*R$ zj>&1EZqbi1kbyBS#ex`?_R8=*v0b2~o%47!N`~A5w|5EyxDFEhs|e2H@i-YRa8k~4 z)TkujlhF7|UCT2}T6@|>GCW-gR{!EP8aWOohHeRimL0AA3NI9r+MEJM*(>qM8M~;! zD^4#I-Zlx24GOU#Zb+}2fgj2#3D2C(suHp8by$nYV&J#D!9;=Zt8dT&s#9fQrfVUr<_$` zST6u@7$tRhM4#A=MvF?^D zcwjT}WD@sDlgL@%Y7ruk!ojALhU~V>DI@;4j59Yh5;|8$fF*lOx%5||K%nfQt8_TT zQDLDT8eoCUC1yfM<)E0yNk7FyHx&kLg$n-&y>#foXbFciYUHj(LVptN1+nrDDuw4+ zk=S||mBvFG3mhxgVFif??b%LJd2;Setj?~o(K3uM{tO5@Jx%%(T%0)Q_2fuVxTZt}I!l8K?a5UVS`B^N*zVSz#=wA6}Jd>7>orEcv=U3s2j?S1l3_-392pxx>!Fub523c z0dyedQNo4CMdlH0RoTnu{%ZS9|9tN-bhRJzLdPKjbJurUWUifx1m z?jK{Kn+A~|>_YP59_7%!BitdI`>|7iGGsvzM9BgN2dMLP>SRlJuh{|Xe=$l2eoz!X zv1LUHn1KjfWHS~fy-2btjPrDu#S;#J`8)0 zwXK%;VcV(OV*m0^I?h*V%w77g{elyn?@{E#!-bF0dZi4N=?Ym(BLd#8%l~zBGR{E& zXt#ceDR_=paS0R-Oo{?bCP^5EEl2a5U*?1UWwS7+>+Eg{`3Xd$p_mlU$m1T9_<5yD zwSt@nmXBWfgq>4K~CBzLGbC=7BI#ZP!PMD>s_XO~jE97FOzx_5d#))|OYr+CNRrZ^#n zaZD(h2kDI34}~TpIRw?>Jev*&Bu$;5?rCU!Y&H^EHLFYOT8~WzR4$+`#QJeVO&<_y z;PkB}C2{()$?v`4im%i=89xG=N=w3lP4A*FXq2LJurF>!i*R3pv0RiCiY_%D)e(yX zL9w8Tyr@E@#?6|}vQ7r;A&PDf`2CoMS|hpK`m_F)Fcj|?=I(nBFTtF!)^2%J^M z6QAu7=kw4x*T(Qfi~}6mYCauNICgo;xhRefHPix;BgrjR9;keOSuN+{aKX0Y80I6E z6kErgYh1}QKaRIrI}DYz;3I`S7)QY*zYU`2}s-jsgc?IR%alqB8gU z!8*Vnfa?->`M7T@)122`!9XG&{-;eub77UvCQWP5PZDm|iujPbl^&XIrTaxAuO03i z;Dl2Wu(ToLVZgbTJaEpFc#z~jsJI#o7*z!*6_@CdO}fDZ&!)C`uoCn8O4y z!k6_DPO^wxS)ygzRsq{w@oRMqLN%=pD;_i)Cd{{Nb2CM?imyOD!v$}C%6l z7L_jl*KJGbiZ7nwN(z^wOX=2BdBmMg;cQNv(=bnD3Ya3~oAY8#Ix~@Od1-?G=rB`> zv(r842v65;2`6+XTk@df4F&P!Wt5c|zKR3-Q7XKK6%hTtbD98+lz(&B|74`(BQx1) z>!@4k02!*&R*F1Q9nr<3@E%Qa<-p^uu)foQr;1t6b_u^~c>5zJ|bq9&@&4ydc%>@qP{Le9>KhSn%g;znhhM`c6; zP!vY|^BYwcZ>}2+(}L4jQp~O%*{RQOF+$?u>PRd}fjWCKdVj#jy2^yF6vnw)+^1rs zt?T2}jch`@QJBDDDiSVx#2pV(OVSX0TZ$6Op{nIa>DgXF8rofHR^A=Hh;oIvGB*QE z&jLgOqb?va{4ea(7mnBLiY`Jp)Ri)i4~kcU!97-wexqI8v3BK6WE0MIIEc?S+Z2N2 z%nT^%XKzA=Qgn`9CLEL8lS=;1YbV(;u0c7i$fz*{5j`$1AmeCl>N3LapO~V@Hu6(c z^&H3SBXn7Q6FQ$NS2Sv0<00G+xo*X()3~)obTq&yC~mzLhxjY-*IaukO2h2wGdIPJp5X3yhi; zOX#D>%Vi!MSEDaV2AbqcpTa8X8*rNw1q_=m(WVlCJj{v&#X_0Y+(BIG4sfS{DKW@j zWQ7pAS~hxeT+7t|@UxEhmb31bA|I}V94vdZB66qeoxD*x*v}?!F^$(vW(K%X%bydZ zmOr^+t0^1!z&|&y|57Ge_mu*gzy8nbSL-jD`ak#axz+VolWr-cfU4{Ny!ig*#)e-1 z=f%dW&DTx+pZoYU^?#cBKP+v7tl3Jt6c*syslh|}jg+uP)U`O&!(ymrNVHO7Dl-Z^A7{t#eh5U8sWB znLtziup(>2oS_oW+W{8)pGjMjsQ_`S8bG)~676cnFLM`_;eaR!Jx&QUs&vGi5(OF` z1e};X8w1(I`8C(Yk>yFlhyD}gL*vit*JLy)lN_S)|3U=ZGqny=g9BR#Qjzd- z5A`*)uo5-&vta-faf;~~c+3YL5_!Ut=Myg5%rJ;aNv0{~x+X}IY>cPGPe(@|);3pu z;Fefi^fbdnZoIg$$PYn=S-|PQth_@!q|fq?ZQ-^Fc``Egb1|8o6kEj7^aQ}@8v5<` zij?g9Dl7+0`PY5cvGc5uaiYs&ONMOU023>)e8;l4^-Z#}y1`JgBPBaLnriEs7jSMT zYD#%&^A(P?%I5!`W|LT!Ji%;WbM;=P+h6^Qx`>XV4LnkaED&*^ZCAclmL1skt>B6& zxmj_D+%!(V$Yh)xpvdOz!M6hGkFTH47444Rgsz?}hG=4*KcTAyh#Jk6(AA_`dr`KT zKuWTwhAOnW16#8O0o(#P9XBNgTbO)VrqrM#9)FIsp791UcGL5u0aDH8?35cY4-uyj z-9w}UVLMBin7*@UiW&b+M%(d5Bamx(W<<0``d|{Z1AW{!Y~ae|0FIR$mGk89sac44T`hY&u-^%|t6GWD3-@*g|1kXo zBA`T{m@5IR*zY1UW!a-M`ch?7u!uA9B6)J*hqEzT4i1ge$6{XI#NPfKm*!UMnh!da z>)ACQxE%(JaJz6^hTA_M{S+PU9DLY4>^OErm;K`ggL5PxLO}fK;LwN=5od;J!LHAP zzQB2XlS6uulX;Qj*d+xyUqr=9D*@mS;jy#qZq?UNaL=e13UZEgY@!athg*wEYd8_G z4w}rO+O{25+wm9 z=Xo|gbHsCzcto~|&k1S##`O;Cz_4lO4c^!9Q<_oY7)l24vc8eo@t#^!DxJto{slJ& zAlvCFK`RtQfrX{Zyqz{q)vg7m7)i5@N}1V>?Po#pa6gJJ*~8dXQZhu!x9i&w#_6#BMw;LV~Hwqf-G zZ20jA1VP&)5MZpCj%TZPp|_Lmki)hIJtMpHiyAjwzsfapu9stdY-a3TFHtB1PVUq6 zY?>#|O~T){;Cc`TsP6SVYgmQX`_>Z`G0hIYtP$osv9q#R)M+Mhi)K+kEBu+S#%K6* zL4lD<$fHdMANCaKWe@R-+oadyVR?eRP)B;X1{2he$tcP3He83CdUP_45;3$1GYi2A z!jH>TTJAC#W@8r`;f);cp^8}!(~~^Tjp8;=307H2l3-yee>qRc4~A-3fU&JGqW}8k zh{t`o>Zl*X~TOE1QLf^lzwpysJLp!eO&mjrS(xKL)X&1-OvtW;S zw65x4#w02K#Dzs6oUD~qCVkU{Y=TL?AQ7h%U@)cZhAbvE1Ux6z^%dvLti-2LY63$v zih9>BJAO#n?b>g;u6A%ogDEH~LZr5ax=`qVHuBRBoG}Zg zN%Hdi?7lrChQcKlY?zL^Y;as=iC~aj_`kEgJx+UpXC1xnqi?UUnJC<9slFfQ3Y3B$ zvS3o{JjSdAmP!JDL~AA=KjWno3bn3gA@XpNp;}jq0e?*2E)Rn#aCcZNX|lC!RNebA zcvo1Z=d^`;D?q6)IYp?h^QkkCoGerm2bbNd=fYG>l!Ce|9SvoBdj;6)ixPMmo0QI$ zBEEE#UsKnPUrg|=AnW814i~4~1G24oVQBK{Jg8WUXr&0$8LVKO?K zoP$)dy0K!zG+JV5!1OYivGK8C*0%1`a4=-m#T(>24rD;C>eC6=DGbVHWw{MG69lSp zgjZw6iJV%|R`%tJucEOEyDVy46P}>P4Ga>l^Jr#sri|HXDrurh1w#zr5|HsSTFQ(A2ASZjYLr|_y8+#48)U@4;~~5;xMo+}G#goZ>Qj`2 zJ#ShLR3_=g&`I%F-$%@vMf;|OMg`e@3)-(j`_)s} zTS~2}xQ`Ybc!nMf1(5Ta-7^-cL3b+?V#JhefZ2CYLIpmfKBKrw*s;Ij8H7ePwS$cG zYLR&7*UCDfaTke;alJ-?6SEjJCpzw4&y|d_oUZK zFU-PXjI6_6{-V_@n4PI$p7&agvUtIRS{_n8J$sse*b|g5HO%97n1>|NIXOq!?rzzJ zpqcr2hCylbGYoBqh00I!un7f6J|1DEAfAOy*ct0;Ifd4QO?Y;`QKm4Jw+@`Fqb~7p zErY0?m_5Ni>ZT@}G#ul0IEG5Hb21IB>D}@Tg(~J_9X1Uv&OIEYxhOBuQRibPR<^cT z*omDr#mi3Q{j6{ozs=4EoeiOcQU^va(vF+Q|LGMyh4S2QZP82)zC04*ZzbV_?^BcE zVa7xe_mC&ziRO2xNZ2rUriLa%!voIHfd5tmg+7|=n!d}C;*zFz%c%Nlrh2!bxtlrf z-OO2T&Ni1do6DMFOwDCY<@3-lYqm8uxF40jzHLr9Ye&!04>O zJ;{WQPcV13#-c>>Y?BLBn5G_-c}V`@Na)?9@?JcZ_pB)yyyu-G&B07L8Pvr!*7i!3c@fu7;u+e&cohczqlY1l-Q z)u730;Jf@^mCNAqrZR9RFlcx~lfR(JUvNwL3(TwqP0|A8vry84KP*g&Ps65~Oa=1u z?PV&cNKr6LhJyP`O>nFC|1pYW?)(26uV1`=t>6FO{Qi4*)ZG8Shfj0=zq$W^hxh;E zlfLWfzf5M-nJEhZyZ+DHjz6D8LR>1AoOkn$JLASH+-FyL^}qDz;bm(4B`P1ymNcO1 zB{*O5YlMtn(q!R;1-Ft>AVV;l0uE>NkqFm+qn9J%5Bc-szUu3+j zs}Wy_CJwoK;b&b$T4sY@B`mE9EGgp8O31;oM=ML5@#&qsQ99VqCU0R?1~-}6iZ2I3 zng0NGZMgmSoig3Q_6dw3MrWlA5oI7;2@j-GT5;S;X@?Mle7N9@SdXq+Lt13w$Le?( zPw@WJvOteEX>{b)#%f6+%^jLrd*k2lHNM?T{&!Q8`H|mX7moSnUVC$|y@@wFcHK#@ z-0sbV%;rL-_{@AEQ#8=6XNMRWGf~)J7`)*qL~|9&1212`2-=vh=Ws>v4%`KjRp?f$ zEorKvghw&dzp2#3Kv4!EY`0q_xF=>I5x4und1ec#4Sf z`;u$I=0;Pm1)#-@-GotE?%~|6=*{w?9j8BL$4vwq&a-izs#y@^m9s)aoOu(+5+yWD z<`}tXYA?P3#dd4igtkn5sE?OvHa2T(8J8SQcFhghcEpq#W*DGRY&Hh$zZT^H?1)Nq zeV#wp=%~IyZD?a9gwooW42_j_#+FrLiW5uQoJ0K>R}A7cW5MaX_j3$R(2Q*bsMYcV z#b8Vt6}y=n(pp-GaKN_KiJ_IM!qW;hJdXC~qu02{_8^CslL7kr zU4a2C2H@y=EIiFKrF!SDs=4s1f~u6)Bf9dHW);oMYB6xw;O4j(BdNwf9cQ?HjO-*U z43{GLM-^~~| z-#bmEp1c0s{QMsSugD8W*q{8=#Q$x6|9ZoS|9ic$vC*9W_wi}Y|K|L^v-95w`Wsz$ z0>;rNXxGT#xN`;qTIGMrm_X5Tp*S=V_oS%WACP&KVNL-tGr~zqJy8)9mTf)g^;CO8 zuUC4Qt;#Gw%V^L;11DlEK(#21*&Y5ZdPPPWc*gFs2lRMul8#LOA}h#1AH5!^Uohw> zf}wE(B2wjq-FpzTNO^XxUay?UX_^cNqJ|6Yo}A^RxyUH#$XUYtJkCDIeNGAIEwB>F>znKjf z98U|EW$d{%z&Ihd1!{OoF|mgz*ORZIF)z@8B`+lzL-mH8B z73!NxGb^LawzomzN0bm376%6oyDH){lcH&hgD3(6uhC#OW-7`nrrmm)nd^337V14l zY{AX$Y<0{)Lr>wTNzBEC1AKo>Nln1wz35;Ea>dM5(;Hvk+(5|;4PGmNma7O~*!?Ys ztR`+}lPT-o%|=gu;zP8u>0K-vH_ap%5j#rqtfmVi`NW|`tAG=q-FawOo7nC!wJ%UxkAY&F03DNJ)*+(NPAx|10xzGQU}PjYD0?v zFXFAbo6-wi|AbXFR{F#c9DRxwestXN;jRFmZXl&l8Q^%cq7cbw+LPude2MXQpdj*S zXW9Sz-X7;W;rlD~D1HgB%Nc(9JDT_;~Ap4+mhM1q!W z>bUD20_<-=LLN4>6j6+LkP<0neMZM2<<#)4EVMEcb2kri*?3cwY{Gbqf^s$WN^mzY zbhJiPbKTi$R71GoOW*T&gIUp-@s$;ljaPf+U&IWycJJhWzuvE>F2C_hd+y3^zwJA9 z{7u)t^*i<1+9|aqpd8iOF28N}d#;WaZoWgm{i|LW621#H0G5g$Oor8+j6XveCgmkb zqw@KR8y=!`z?GOcGTcI9SSF3x(=W#HQsM!aEpiq5EXG+Cu9|%o7C}03`I;vV3_?AUt=e+BRruJp;JqUb zg+4iUzl5n75wCJ|O8uL}89x9X&#IMs@Yk>X44MgRCfTsV>)qlJ17)2^Ft{O?NwrLN z38cov8(LaWPxaqdqa}jQv!6-t%}JtY7xm16`-Cpv{dgh3-h%!GMt)W63cKUSM%lgw z7h>qgG@Y7i!o^3m=a+B?D3$Gp)K(JcHUG+Z=urR^@?-f1W?dw z9^-qF&xRHE-{eiys8^1&Ta4Nj9<^R#nR#F#7$ROKquIl->cyN0JLFfV+9%VVkMKh3KGh#+1P+4(EMGBvX07-p7&6)&hj* znRh$IRm$Ox1e^P2|dC{0)$sjazG7ugE3OjP@UrENnG_JQHd zh;j9-v#AGIEUfO+#`*0gUUKT|i)HSn8?Jx{fQMNw<(Wyk1BncQ372%HJuB;|q$tB% zBc;5()-xg-dGh^lHP6g^t8=wrOpt|iC)8m`Z4(WS$wZ_m#7x3>OtN~rl!%{nBqNM0 zgm6SKSEXVO#@Z5^=Ooj5(9^R&;~!TQ&TSU0N^wzOog@y=alz?HfgWOKF^*}(_LwVs zOZB;yX!s!Ez?ZXiIu>`iFzPRsW}PqSY2WBNTSn>_7UhrvU! zCgR4w_9LQiD)6%Nsf+r~TsS-RKA&0f>w~5i0p``e@I-8H^`e?~URrimM0ajvd_H|+ zwCk7s3K@D@M1i2}myW*OK<0{Qx^Fh{D;vcna#rMAq7OQE$AmeRb_Gm7l2}#pI;#wK zLD^|x?U3U@@<9evVVzC_^unlk6Iu3Qu&L?Ob|?N!bUyYVlTmiw349@coz>95nB7p^ zV!X=|Liy)kKfHW{j=?}0FTLy*oCc&DDZs!13=_b5KJ`Kf(#FKtBRz&y+F+qoheiA{ zCZ!p;EzJjVj|IDdisRNpq`l7N0Kj_ zDyihxw3$!eyGy_;~$Z|Jq;@YzCeAGN3u8_ZSC=&xHzpLcB z2a45Sv`yQ8&+)!<|5HwLh93HReT>+h$%bOyWUs83hh%F@v&EpeHkz;O;T5uD>!9->jmzrYS z?l!cDi!0n4B~YlJP}~O}yNyR383!2F^CdX&5uLvdth|J@Qg9RFqMswuh%N0BTK)Iw z;c#=%u>}Qz`sve3lhwh~K@j(}eRXL=aQUGs{p*OfOX^i# z${KntE4G<@y@0+M8-iOU*bYJeulS#sZJbKGD6%}e|FCLS`1o~D@E&9FyD@+XQv0>P zVSSRbWQ|UAjLxqYuw`)N%0UUbobB#yW!vT|R))f8^6$4$=QvqLhDC%qU(y8pOM#3cv z&bAzO8KA&d4R|<2ypBx@vCx90=xycYvCg-?ZC>|#{Vc{x{ zz`Pb6Yd||N#rsQoLuaimkASvI%#{svFy&YsqoB=f!lpo5Z?4_T3U@>qfAFg0lYE_&rYy*Xb(L*g^|t zIc)_;7i+tf3>KZV_|zH5S_4)3?_=sb_INgO_6$u8LTz!X8jMIgv%Y7&g=rkth>y0$KL;)15?B1#DR3D^W@(r9%}xI9 z7TvsiwIiu1>08PuxvXjEbm|2wbN z*Z;oyx;X3~v`?M=Aoe#1BW_*rR~NFeCR0{m3P;~08eYjRXOQDkpW@(&6y`^g?p~!R zKmN`gNpf3(cOw7QkzWDzEBqZu6aJHS)#160v33ZJ%KcP$+-5#X``C)OG=H~=fCxq?Z1aw}5P?zX89A@UY!D=Ejfm zN=wDU50~M(TCe=YDi=n4y>N;hkB*VP1CytgxiDQN*BkV8+zhE;1E1dUNWiB0R2_|g zESP;N3!FoYHc5Nz=1Z+5kehfOzDlb`dPwCWW77N*MMth~JvwWM$wqf>g8sR%BlTl zTY-gkCdn-m*wSE(1ntdj^tu*_YBorIMPy}4%da?}cWd76gXz7&1yT#`y3^!FnbPVetp4x3?b3$l)i@OYQQT zot~RyM{!N+ysPwfs&d*~zJ3#p9_3u^VAotX19G(0+}rEVGTxzPyD__VR;*tDhfhIx z1y~dftF%XpYPpoc>YWBIFNZc2#J&9%Lzs*Skq@obwmS;x!)BFEV;7tsP1%%p!E}G|rNCMr@$!QtiTb5(hU}u4?UK_tkAsCGo*|-r`p0 zS79itXeE>}1j=`Ym;OZ$NLpJ0O}E)&hrlYSVZznn4VZo9Mvz!SE_l@ki+&YSObMhr zCvNkU&(6&KwSisH(}tD>-{7$iCSXl26-Fn$JwQJ}M2$7KsNiF_i2VF-^>X-7dVU1A zsCYJLMSVu-d%Of(i({ORAT$k%Op_1G>~lt2yYB`2n0MYY$Q4b8QS3;(bsNO8G`!AC zw?+)tIt99!LYE*gEF7RwC)I zpKWCuptElC_YGoH2v6w)3htB$6BP+UF+Vw(a^p*1Gu8xHRW#i)smzZ`7Z+q}VhPuL zgfbYtDt(l!{a@(MpzR|gX6cwj9vt-!mh+){lm$h{5XK^m59a6PmfmVLEnf0=G$C0W z0w~hlPC0wjFeff;khSO+9z@E+6dPNv`c$wu#tDOqF^>@wnSoxPgtKPLl+}IG)*99# zW!7+Nvxy2JAX|I@HUxv$AlM6cPM-3-hU1u>pvCx*gK$=GVh+r{EFkYBjN_;n@8!>F z2r!^cRV_OsuZ|l|!=&e3Wl-1efqFY}#DAvHn1n^cNRxR5Q?-CqsA+kk();W=kUy{Z zzV>jfTFH#c10Tm92C1i&PT7jXZvf+_(eA^cR-MvdApDFxgpK4li;rY+QfW_yPV+oQ z3f^&8qq0c67_@(i2#e+(e}G0H#&faUb{;}bhpJ549yIqJT}py_{})I#S3zxkG#tp6N%9W)Z*>tC4;JY2D9Q0gAKa4d(zAkk4goQZ|DV{fK&2BJU1mMkG?p7J6I}e&X^7;aBphWB5;`*0{wBLY=N{@ZFpkJ-P@*VZgy~EAZVOdb4LD ztnaoUp(!_!f6D|;dLCwG8ZX<_(SJhr*Q2%QDjBa@ zbUS{&7XN<8K(*Zekb&3#Ap--Ytdvht)N z$VWU($VF|zT@Q!|Mc~2{nKM-iTwdurC9DnG8sqv@PMhbk8jsoMh%{ZbRciaqbF?Sl zN^W56d}tiC>^&bkxzuX0@cgNf=TWudFd@?TD zaZ8xZ+n-I*vw7HfRVB%F%NC=jiYi)C`L4H=%x;UODoYwS(|`kV zeUj2O!sf<3da*Vf1sU@F=dcdo*JGPjY+dlQn8CEEOJkQ1qi#!-pmxCyvX?G(PWCzE z>!9J^PDei7+Kf6445x8s>3yITWOkmF6(RW}!qZXU0t`P@UWxdpA7Ee^$3|bE2zIT3 z^q>|BZ5;=CyJof_sF0Z+qW^g@=Je#T7E6@yUN)BLb6LbWOaY^3Q#2ZyY8B(IMzgx@ z^cFKgQ4(9vw@FzPMOE~Kb-U|2zwUGqwFGZI;3EpZCsC4H-oM;ZR=|(ebFP?<7deQ} znG~>Dowcn|Vg;=!x2dr3oBmV`iDrGz526xrgnR~Vy7Sp-BUl-w6 zgF?6E`I(>fRVavP?Vj6gN1=k^ICBY$oci(y6V%ge2h;tU_N*xwuif)j zQFBnv-s*_Km|NqTn|6=bgSgU6Z{0k zinmskA9;ObRGFdJLsgZ(v{yds#OXOzUt-qOk|6Y6gx1^!6B}iyV6n9KR|E(aP%as^ zEOMu17UX6Pe$JSUKN%t$H|3u}Iq;d-h|R`0Ax+=B=niphQ>@RsPxO6t_|GMGwi5C~ ziF`BfiD^6cAvYaTzC%hBY_`CJPpggFOdU<{3j}R3l+ohJ_9Tw?R*v#q!^69~o?`QE zYNwsI5geR)co`E~b-|wG%W6wPXA+o!lS!zSFY(+-mV4#%Y^h3We2C2+)}8jf+-s^3 zXH&}<9qCpYAPAfvt{MfcQcKQN2uG@kekI2To5rc7Xte(oygrPX@AG5#jFRlNXJ#3c zLme70M9m+Tir110fevl2f7mkqh8nEVv#X4)@tl5O(@J6!Ytlr?No8B)6a;idG9nx! z))?O8E_eD?4xGHJ*PQCVq=O7PZ&|w6%&{Zw%j`nttPXb6B zv)8rt_%4mgu8obgErCtl5MbrRT#11qgK{n{8O9o#8Omr$&9GZEG6tRr&o@!bK2^CB z-qYi?vqPBcNdeeHq`ySc(eN}0lnf>=?`2KCm=}1>K|S^aS9oqf!wlXM0(c)js2}y8 zw+ByObK6&j4mV8kW|A-GwzXZ~HM<1f@Allp`F?lc0|ivjKjx9>0GL#C1UrzUa>fbo z)j+4XxpJk7f2f{)yCT~|WjVik(P!WM07 zVc-36HX1)+;$N%G3Ry#RF4Ujdf=QO)3}qPpN-4$;?h^YcOZ}|n3UZcM;0rhFeM>#SK5S)wkwHR6Lmr_uA8nYO^4hc7_o^Eeill; z;5Xz27p$3rb@wg7s-~4{i1KEWV_vmHd&h7$Z-|xc<@*7*0fiA?oGuk1BVSb&;GCKY z7{Py)c2zxE4(lIDG(xh|*z zXK1dII9d;~D!STKUo$I6X?xP?ZAo9h$r#q-$qhxMB{xUvq4fyLp%(sB!i~u-6szq# zF9txPoz^*1q^_sb-bGWnlzGiwT`wn9UiNQQ5lXfo%ju$JKJc&2u`klshWzh4lV>== zbo`+3ltZF#j{RqMt$#aB2L4lWkvF{fCTpBQ*Ok~CHI8uMJ`2!VRql(8JV3Y+H)7Bd1nD^l4*? z*Nhm0M8?NdMc*aPX=MSt_$Vaht&Y9$G1|+v!T;!PZzzQ}JZWpnKR&bZH#hC9rtWxm zX_}Eb_;@=bBc5a=pHu9J9>uTtq3CBP`r>+?9BMA*)MX^Mot(F%|01Z|TCk$);ku#9 z5cn0%J_Z7K|9y6aVKHRBQNk-}Sv`YnZOYB`lWBUojx z4U>Fdo@=X(-;o{l?wa~!K8kiSfl?X}Z_|t(UeGs+SNQ9RX7iJy(dxJknh6-F=d)(w zGedon{@awUbK16zjh4EmbwZlxvN(w6{JlA#1+Yvp{;>jWgD$U~F*+U^en zH+f&|o**eo3RqfB%8KvbUrE~TKhAlGj^U>#s{Sz5Ycd{onXGI5CYE5?#U}$!(puxy-zgpqONX- z^(k{CG5F4L)rMNMpvI!MY`XU4rk-XS5Hdss#!r4fj$=o$79s(iL(I~&J}?NA_n3zr zf7s}4X1%K6+}+N0QXK{BC!K;jgzO-Vjz^a9Sxv_?kF01D=*KgRs#qNUFlr+t-k%q5 zmx3Q2){gbNgAp7McztuB;hOIp;SEGt3;QWt5+>0Zwk|Tvi7ab2Eix*jBuzq z@!TLwvH#cW1_c+el4op4O01JPKc3-T?M=1)o88V2hT%_Hw+^=yOY#HBr`GN zKGD(4Kme8K9rRE0z5y4>nbRk~L;Qq`Hv?4E1wR3*$c3^ZDOq0SWue-Q z4A=M?q%Sf;jWhX0}^M2 z9yR|4i6SPK{0p0~nmm+5Gj&qEQl!Hit|Mz_clb$LXJXcw7ZGhZ(mrj-p=eQ#>An+! z+)aZ?(xGLpX)(A7D)@0z30&yf$U7=!os_$!7#S&IpfdJ_!8rY29(u=qiQwO!YQP4u zTmmqtc#O`j0{H#tq?XvwTXlP&ZgCvN#Y*b2GrBMae-eI4s!DSgDx+mEj*3w=Z=}SS z3bE4G^ls3?`rYHa&lP+;bK3JXC7>mo5AJL)+(7Qs`ni7v{JGT+gM245y^YFbR0atj zHJu2oX%lpq|G0dnI8*;eX+DVZ%ruqvz$d4V0!xQ(rfAM1rjQm3`ByMSUacFhMzy&6 zBdt?ChNsMt!V=+nKKmi6;q>`;LZ@7*!#u^bWm*uiX)Wns=Dkz${*&$DJmZZ2VkQ zg|XG11+N3HAb+Ristw(>@`afitO)$ABB;b=e7s^?*dJdohNaOllzn=37|V-iTH#vq zE6t}s)+tk}mP5{trQ*+%!~N!BYhhPM-mTPo=X^u>KaM9^<2sm&VGw0U{dmNisgi9+ z+f2>PMLh7DU!`#*O!8Ac<_|m!><8qwabK@*mfjad^Y8*m^;fdvIl~y6FF`o^wXc>w zeR10un}^|Aqv6{{QAWX^?3?}kT|d1Z9mhK!VVR4HF<~!Pyo0oeCn=r%0w}ea9YH;& z>eriXEtpDCKt(|H#uzm4(%56gHgwY9LinW^%7{X5YvGu=ze|V6+MuDLWj3$noi9)6 zPhr|UzV*8B^vZ2`aj!6P3&IKk8?8raJol=rl9QdJAG`NIzk)*;x5$jqlm4EU_%=yd z&T7lFiYJ=b_#)HzA;5rJEu#v#5^8=j^`UO0U!JrgN~}!uh!!(Fiac{uaKr>Aban#FUwha*!+DX% z_`LBb&!F9T8$35h!uYx&Mx_RQX9aJ}7YjssX6ixitlV7^&LMGT_=!#ZRmSRyH!;MJ z7|IO^ImB0~sTs`(?=8g^g>v~B*~vnjBw<%_EV+TXv;}D-Aht_=YEW~B_%|AVOOZP` zwDa(d9JWn1x>q|lMzNglFZO)ItA$wW++%=`6fqS%3=}1|?;OHk^bP(Kb;9yixAxjnc5W4J{-+b`iyLba~EFv!%e#s$0dv@*0D=0e4c=oZ6W( z2WO-rk1YfUVJq^}^n;xJC@nCmk)CmkZ>O1zke-L@BPMWGPI&@s2MY@@(#-Tr2d^-j zN8?zPgV98I^k}!Hbgb|w4+T&qDTj)S10FFGCP@YHWcy{hqiM21mYEBn4!XU03k6G$ z27t#k*D%Sg+>qF zK}uS3$7+a#8!%MQAYdh8l;OnD30`evv(QJ#RK3=*9-ZL4(j@=3y16NAz4DGPUt<$|~kk zUHUg%Y8!!(*nL4meF_2$7m8!FuH+FbVLLDC{*(ZDEeYvR^C+A$n!p_6`_TQztPDKd zx;<(5JsOGq(^bT9@$Q#l_i&tA6clYraOgesh$tuwD zA0GFq^J(40jw+3Xca5-_=Ni4ao*Sm`;%gj+pO~Wa>+9%vBIbHozp}^Y^{!)g$(PU| zG9{Ssg?^@9a>?7ZM_^vQ6}|fN63+tjdVcPN=N~A>3mffU`%ezN?qv%@U*m&E?#k~D z9_+X$&;4_Mj`v(rJnrrVT3mhqOG$Qu>l6Ov9FA9E5kZfuk{gb46PZV^Dmjp@@k7qmcGpvs{ zcr~9tuk}I~G8&QPmyVlZ{x1yKySX3L?>`7n<827~cUjXNs9Q>TdWpuD9-_QO?~4%2`|?je%#teRg24c- zfHqD&MpTo#H&4fVSVsk;fz{o;Mfg`^RtFH{2}cH>;2OjhC+4uag;vze-JfE(FeII$ zq{T)Q)Sr}tdKx8wm<)3v@|$3E=a#w zz;AO~V10w{7d$@%59GW3Q|uG`(_T$W-_uu5kpIPH4{$b=0{9lG{_r8fAge*RIID9( zPMQTM;@Fahg+IYD8+WFWT_ejczrqjm1Ja(~usSl4cA%6455%TpEim3nvs1{=xFGO| z!gZqEVlp8RyD#<|fBz<=6RiSw1^ap@7Fa62SnkFg1=KM0W12l=)I47zMlE8X5qBYczK(;um7WYx?0f`EY=3V519a z_iw;{4BXbjAf`N8s{g~l)dD`qKV^_(=|Gv_vgv>l^WM-aYf?iFQ7Y zJ3k_+1wn5A%bfW;{MkbNeVml>0OpOgS>OVAC4=hy` z4sg()FnF>IrX$%yPg$782WDw2A|;&Iz}@|ijE%Q=O{v~D`J`mWw4upbF&{GzS#&Ed z#Y3AU@8Hp}^cWx%OgWq)6#izuM#%icsPNKUI8eeoWm@Xq1Mgdpu{t0y5Q@d-bbE+n z7iQTo!TeuOq&Nq!cMEfKVNFxX1g$K;EJ>!kx`5!5XGXcH5c(wheT+5s2vN_6AqHgv zKmS`1q}{sLkM{)m-j*gKA4z8vI_XS`Ng4|H7)~Vlim9D)kVcqQTBgX21KPSmfGq`0 zlpCmwe_l^9H3Vs%n!S#OKQ*Zdu%klB4T!|LNI>fWci&weT}N6?TmU* zu1h=F_c2bA@sbq{jSe+E5_sEg2G-K(gu;f;jG1a;?J-=FueVdfmphJw<&M1ErXcO& zNJ%#7aZ~r6W+Vi}!gwGF+z@zT@#ajwTl;o#p~K@z{JA+ z+vpwib`In94o&n9Io%|MrZE)BI?{SA1e0*gEtiUYt`3Al37EpxQc&~xeYuav4}U7G zI_HsQ1#;%lh7{tRi;Pj4DEK$ERI~;n0$gWJ(D72mPPX;t=2}!wzJmFwyDtBH@2UFe ziuS$~=+}@d$|UC{HSv$t6Tv^x<@I4klB{bol#B$O6g_F9HT^>kvVoTUErx0(Y-(yy z9PTvH#oKew%MYZv*y5oEl5Tj@OBMy z@KdY=VhV;HuPWXcKJ&%Zlti(t|1T~1b`r)M2`Z>rg0R;?A8$tOTG*Zfg(S=t!Asoj z41>+C-7netSchTcZFO?BL$3grCTp|wY>ibuEAvl}ZQmnd?3IMaaxF~Fa}@=(Q3f8c zKFOq~-QG) zy#Z1!aN0o`B7lRP)}oZk2Dwk}A|x|6w&^EE%o;w6k3$5#+jGochrTG6|HtQ~`1$w$ zBQTRXT*8q{Jfbr7`vwLMX;q{+RYPOhzi>LOj8sfLu?@R@+E&~J6aZVw2^Xu?BQsDn zwgJ#@0FlLOR-CAEi-IClwoMzz@RzOS0%4?#(qUS0CAzo* zGeLa&bM58Mk#;^r|9=LvhiYxoR{T&=XuwHPd-r};dh!dal&nGc+r@b*o4L#*EORg3zn460p|%1)d!xKtm4CS#c^zVR}MG+CfP z0b62RZUV1p0&p<}RvNJK?x;dl#ZZ7iiG6BVA8IQPZ3^p}7JK>CCuS#2<-GHH|563V z_%l{?fbNO%i5ObFf-J6%TSX4+f+~4CtJ0YT<;Dh0uUc+M<{6~?nGMyxv_!sy>*Wg_ z;|@%{9Q>Yx<2WDT224bHHCCHllt9C<#BwDKe>-7>b0=cSri&BoDmqi!=UPzsYKdz+ za#zuPvE31ROXiGZ%!}ch%G~GS=IkXpQ$o~>^dBpkpSo}H0TUN?F32(y*8xY}DGO*L zX@DL%3p+piR1Mgrjd~NV^(yCFvw4aEarvp_RfxT zl02W9z@+qmUKCnH90t00y9|-{;7i7~)+Dq}wTLJWl2+aFCw}{)AIt}lG88N@~ z?kT1EbB^~Po+h?58LtgL)&brel~OX5Gg?~pp=TtNe*A3M5k+ia8zI z5E1~UCVD~g@B?GM*I5aqoW&Dk8j?s%zSX5c&o?@l!T~{$kbt#XSmONhdH!k4ka@w^ z5uN*%+Z&0a#-n#&FDCK=R_(A3aZ0zEfoKg>C5u6#{^&~)Yf@h)$*T>g$r~~4%8)0> z|5s}cl_58zZ|BGM-o@hPK^2_F>6y_`fZXzw4>wnaCpCi+HbmC|yhRH$8ajVRj-ajY zEW$G@^?5#&;+hLJCiLpfHGkE|-K0j?7LiOH@6VUshROReS_srVd2r$ly}F8iH);0Y zWw6+AJ(wqHIVWb3U;+yHkxylu)P=^RVBk%)F+5U2=%_I!w}PR0RIt;SrJ;59Y??ft zrEnrhdF z+4kG(S?w#TqLAHn3my>+BW`ZI`AdaLim2cGk{(L)`AF=f;vE^fd(^kstp#usStipD ziMZmFPU9*FJw8L(3ANn3So?%L>cB=srYja%?YZN9;$GPo_ntUelR zv9i0{B>3AskQLahIFRjsF4@l?2{d!z`TB~#9-)aj;>EiNuXQP16Ax^|OOpS5<$CCerIoM;rY8^@Rtu%HoYXx`Y(h?!P z`Zf_zTo%^)b!q`NC+5M!din~NPlc_MRzp@bFHv2m6@)ZR8cV-Z_`=;xm2~aK0h3Q{ z9Us@G+us=<*9`hbwup4!@59eE>_4|ZrrBK&$Lmgy($}-L+*`Ftp$=9x2-Q_UsKM!D3g6! zB=@#TZv3) zGx2rPQ`tNP_F>1S-FZa!>DWst@t||e@k_%wYlob}8ab0A`reRnnmqHBwsKd9X+k1( zYBO`KLDF#BB;JPkH#-(7feyr&CmA}1bRx}SIJNBdT?T4`djjqqQ&EVHLbNB4=Y+j# zi6tQUJRrm5DCX~4ct*_MS5(m{F+eg0hpog{CH&S68#vGA_4yr5pHTc=@!?D8%7xd)lO53A+~3dxE`qE(jvS1lqjKU{%E1;Z@WAH z9mB8dm!fN_Pr8XR7qfAY>3RSA=7Sk?q{*l|3smXp%}wzFivu)2&>H4PcTXM!Ws(C1 z#aD~wqCHgx!^Q_TVKc)8SPI&nZqYb*QV!;iN@qO??w{zLSCG1Tt-(IJeX7|Da?N2(7Ok~g}>I@e9~&~npRY9b-p6K)V|dgW2sF(K?ZVdkjAocbnG8G_i+k2!Ua zIzin7P0-pM?WDE#PFtJW?s9ikc0kf5+UqFx*YIz10SygXh_|T3Te~gj-t+7G0pAQ^g+^PIu{hyW2R9sfhA7!ywJi=hWlYRR3MhWN^7z2BMGRZrIGT)GS z1eL7B=wL#0-i|JFHl+;V@s@dN*vyuWB(rR+TG`C2MVaL9t0)NT%G7Yc=PLqU0N$B| zPVUJJ8+WLOVRQqH0R*LYfY|A2TkF;cC()alb{?V7H%UGbjRME< zPJ>3GU^`JMYBc*Swv555yMwOy8o@ItrHLvv+|yd|*Ep^ibk?^#OoUUmQ;l*P!rdcftYEr53;T_Js16&u=8wK3qXMo z?SF^P)p$@Ec!(rPf(r`(Me!u*g^m!}>hY$XT!~xLEN0`>pVQ8F_6Z{(G3lX#N9MGz zIWwR;W#*$uGv$2f%*QpY0?cF4R;qTzQr4kQ>#FZ@x)nNZpc30&XP#Oo`!euJyo~Ywf;;}1M{mny*ICHVG#Q+96k5h{@u-22xRDVPDp<4vr>D{uWY6; zuuT|x3h=75-U>$<7Q`T9(np*lW<3sm)QGs)(7fESChlkCqT-h*<#GQD@pD$VZ%VGr za4P4Ah#}?po6T7XPzqhrkPfx_ro5}5Nh6mas7)456Vl(6=*cDlYh5!+ zY>4)7gd1;vFJ==^GcG(C+5V(p_+Y&INe;^7_}Q&Lm~gE;;d&bd>W8kNGn?hM zKzw?MwIs?>ca+Bes#ZWhf0-eRG5)@45PZw7{h8-mQPq@hoR=kX%O>WJ3XB^GJFCUq z5f?fIn?u8XxI78pKBVCiheHM@(NSLya1nq<(3WuF06&#RFzZ;+hfcmVg-$H!&Q^TV z{yZOlFt!P96w@sB7Hx!$U~f(ZMGuiTk8ZBmFuz8Q?1&gK=L&WOB$E_D;&cFf0orNU zdlL0Re?x3Zkj-R-Jt}txmQM7OgAS9QEEGgvrw%}cYCd^#!-FSC2gh~7Ngx$v%>Aj_ zQ?tDN={#WZ2hiYA<{mBMI{XcU4gt=EjGnlA+RR`g3)%wEo}oCBk^`bPTrZQ8$6?KF zY%Az9Z@1>q(~i(0hn9=hkC>S69%%6tUAlh1?U(->N3K5|O8uKbj1va>eiu8Pg(?@r z_w33kov52uoUvr8-RQsE?grF}vH~^ue{ptC(Uq|4wy0y)n`!MPMSN~QI2&Lf`B2L&?auNiQOJg z%jKwBpx?9THPPm)7~lS`0&q6jrnB095C$4UTtt0i1RtVbOg&U3k|l0>4U-(EhXJ?= zkV{=_gcC%p%UFMFQCBXig%4nP(jXjdtjyhmYS8^Yc;mpaa z5~y42cDm6ciDnJ$ki3LlpRTYd4)stxxuG?WfD{@e?N{UHTbB3QbM*UD7frAmJWrm3IHH%ZM_6tY<3pmwOmzQT5y98M6ET411`ENg7`AzJEZBhs zJsE5qd@m`oMN)ySE<;4@*=DSpI-QwSSa5@SWenn2L7uGuEiqV;wUy@ME)5as@9U1+ zXKc*c0%G&<^hP>(YGw1ttDmIvZ}neWxnJV&ukw7XH~84!Ru0n#KRsL8bF}9R%V*Z$ zm!s)tmORNt2~440iMJc{pu4GsxHzIMk7EF!XBU}~fbV;SIMh1PmuWa#)tC3)g5`7c z!S|$B?#^FB;n}`TttZg85N)H`8($ad=+Svk_w-Zgt~9e>Hqe%|6DH7}|4$A9t?C~RA*=G| zUk-sub7isd_M1bvAH~YWM{{rs=Pa2hpY^BA8oDURtF#Do%@xFJ!FbaOl47{wshPo5 z6Z(n0?^iPi7LXRGpjMb~V!)a7Wliib8ZiG{JT4G96blY?bXBVM!(#}d`xJrMIn2q7 z=Iew7H?9qWm&!_4Crl*s8$$@md{p}vhG5vKCgC(vh+LYiqssLMQdUEe8)!iA_(uwN z6vmU&O-Ug1N-C#CJ5oe;Fqa)0BSancrU6~f98<%aFmclm1D|9NkziA1cPfo@T~5FK zO~!hm6x|Qz6ZZoC;Ap50gaoqGu_8BLyn@kND>ZDZoPv5uCgbIcb+fq!As?pjYV z^o_Ye_?uOg&lfJ5dX@hoY*uYb+Da^k`vY@_QIvdiz;ek<`ALK~pgy7fh#^5Qo^7=H zyDwTSK%NQf(411f@QQkptkzA*8aT57n6!ri6Lt{OX>-s~_g()ROcTrI&A#$T2irx> zFM~#y*#{0}bNA1F;Gyvu$u!c9i1?u0?YORcaf?gD<%hTTaYi`Cy<@NM%Buw`*OhFy zvNd&{biuPus#nW$$daWk-wZw$L#Q%f`q3W`HPV4inRP3Io#3cjLZ2Y9q>5aejN=^= zRg);eZrkwnx-&K0{ds%h-M7bIv7NR2N&hAMto=gGw;d8}`i|{6ZpS-JCmrtRynuV?cUal7`Q zq+(_ks~ucORqJI%5<@iEoIh+e>6qHYD9S)GYP9>umj+S4B={MpQiLmStVjYdsQ5!`k@yNdh{p59gRPZ zL#W^I(SEA1Qrg~glb|O06;x{V4zpIL1^x|44)$LQnvxkFsOSs8W=yijXjf zgMy9*X}=RGoDfmjm4N;-c7$}mYyYf?>u+A7)3xO7xPL;5gc*GO`xXMx1` z9t_dI_a01;V|60dP`qOvS4g~oH!V}!FxXj5&gx6?;-not+F{i2GAER_Jisw-4t<1{ z)VT4zNCwy8{NVk)1ykK~+ zvaUBhZeZ7q?O!he#&-E1FQJsWbJ*%r=hIdk@AAj`x0etjCKyHdEQjepHbF>{X*vAt zBa4V`aM?dD0M*mH>&GLPjsN}8>_;pI2PHz2Q8Jse*nU*?!%4NhM|TByzn+Epk-SU3 z5B!dACYnk45bS#QCWzc|E8lI__QGW-gO{olWTJ`NZrUoFHd<4#HV{Hpjfo>xIs_6i z7EFn6Uk=%Ze7kLP;+1arKF60uLs=N=wVvY-4Y0OR+{(-!317B5ndO~DQ7CSsdh2bi zIAV7^2eFp75y&U6{pA-DDcutgh$4fvSyJQCUs<=&i(dJHCPIZmfp=Q{``|!S zMtP$qZ@xE!V0%@shW!2Uw=TpOvX77YbPyqG>fUb5t}ZqwA7^ajXqmCK2i>J|ez%M0 zTgJ)7tBRLtoZ+o?9$s77)?PZxgxwa6O5OW0q3KWWD8z={z-51S*NsVU_x7A%NZgXr zl8Hi!w@c|!{CQUy^;XoB9zJxcXPrTFu32E1&heo1cot$Mvbx{YbJi+1)0K8H&QWvJ z3j3c!K`=l3=yDP*eHZXdbtSCjGms+AduKYvo?iL#ZtLSYaM9EeFfDsC4eVe3`U^yD z5D!xS_zMfmx_|wJdMxaoL{(b7|J`4x9%5>-_oSvC5@7HCPk$j>23ghC_fM~L^8NfP zTmvBLL85}Y!s{GkwCabGLnNs=&_4h}_}$}wECY%3TP6Var4+awS^~~UOD0FH@?17(`nKamXzbOOhT38LcCRNlx{p7uks8K-O}Rb zx=^Y1&U+<8z{Mmtu%Op+iu~+rriFuL1X9D?^fs7>7~|Ynb*?Gaa9aaCN6pv6``GCF z?Rh+ZsW8Rs+5Gx`;A3u+5f==3c#P8;?_rW27_{I3#CzJj(jzZWd&0oWAMZ2OWMQ!@ijiJxzHj$ zjBaw_ZX9vYcUJ7_KteWqmgq|s zmWib)uh7Gt%OjJDK{s$=nPV8kj%yc$KH=bVfRi_^L(-R^nSB@x?@x8o>vuX2Vy`YB zmtt$>%iWK}e;Z+b=kb)-x*CR3)&GVjQ1>e{T{hVMr=MO=`T4fR-oI9ghzN$l#jOc~ zdP4y>hU?ATk!AGe^i!jWiF9yr0(q+fdgNA6C#CkkZfW@yQ>goSPxdk|$x9!v2{6 z%!3Utv82QP3c)O_zn)fy#;b0JdzWWNAvZNdj3U!2<1FDF$uOFpGADbYm2m=ihyEN_ z+p80K^-7G8L8DRb8tX%dnLi`szIx!WhPYP}|HRxq5+B6{w(1Z@&tSCGVvy>eCi7Cu z*o_`D@@kO@T2a@gROG)F!!uGNW|T(ox#j5Uv^L%nO{^6Dlkr$&&xL|{AhzUF-#q+& z=v$8fF0RGj#03a#pRpz{5Nm1#zT`dcwBA-LB>~{f#2LsO&Lda1yz@|E_TlxTlGvaQ zK#pXBDRyO-zA-twf>1bEFf}R{avp;e=M{a>8B1P0Qz(}>+a-5UiFP`#HV{h<~Mu@Z%M0felS zPIg?tgoZWKGiVBdFs|^!UvDDkA8$e^PF(ien*hbs1FE(A$D7E?g*UAsZu!G%c4TIf z7Rn{Hm}UB(LXAroxsjX@1^0^A=@`O@pjitaJd5nJ{IgH#iB?YqO(Xs zo3sK-e=lHZ*{3-p{wqHhyi7x>+U~d&NOQBcLa;8t965lg{Ci|dA#Qb9cq5h9L>Pe8 zZg$}ZtXAP)b)v3llWJu(CH}x^@>`vVh27tpvqBnM4sp_R`D;(q*t|0slM4~ASE(Vx zkVuE+dV<_G>DwrTxvQyoKki2xbA7fSWTmj|Hj-ni)-&Qb^PtUGO+*GbM1VVl^ zR`ndK^Lx^;TEBWrd8q$3E%>`KMMk=y?7aTi1fwKFxh4f3@wRH396~`J$hfYpV zqKGX*ONyT5&Y zQqaj5O;6mL-#4EDg%^v{?x5bW9k|kH5uJaz&^kpk)&4LQWU9{2Jesmwf&uq?amo!* z>)h;L-6h_x7n6pU;XD68FM#!`!b1-|{YMJNF#)zWvwniq+^iyU6k)?LI375ELRmeA zHuXNsCd*{mU8{OBkD_12g#`#%3j}WAa0{6sGSDy1KGRTNMk92iyN!yMhx3Lbq*iFaajDguZ{ah)((^2(VQsWY?y zVAf0e>^J=y5I&TlVy@%~!FerUoJWmgCC6MHINim}) zP*|KoX89`#;s@=1!y4jp*1_cXIl@+RaRNA?rBHhL7jUx+xd?D)5|b!a=zNSqqvB^^s&G$^)Ao+}83H z=B*-Pb#diH`hYy#UrT>5hqKXdOl{)&+w*x$P#S=2^uN%ek1#DFjL7{|p^{{&=>CH)1Etc@9jx&bD| zXjp)&oP?L`N@QZDp`ssd@v>fO)l=Bt2yFubK5+)>tM{-hvyduCO2R1)^P2$NNq^0t zrTZAxR*?+z*0rrY7E&tHlo?PdL3kuHA@PHBCFMD`8)E zSjgQBLbuc*c=Q;6hu)2%l_!4EcR&6e+?mWDP^%9hv=gUY@z}_3NKGqg3gQ#xSTmbt zWFOP`DO{a?ha{ZhDZ^}~F>#1Z4O0u(A4jGw{I=;*R#H8tdZK7SlWy<#T0<8OSP|XF zlDX{Zt@nmYe&zEn<}r4wQ#Of7yoy``;#{;#EEmiBX4K2oP=6@^bJv9&nc2(Yg$uq$ zrZnEF7%dqcaHd%ua%C0!xawTTKe?~IVzI~@fx%y0v0Dk)b zDW>ppO7WPiWypxk3A^bXyW4t2Nb{-@C2w;hrEHzT`?|4IGpRNt=ai*Kpt9vcS?YJ8 zXu0y@A{gP2ss?^?EYxt&&`~qQGN;^#-{oj3v-BZS3M~lM`g-{TLd~`ed_4f!`yf{O z55r<}7}n7u#`%y(l_0Ejj!65%!9mQgm9JzP=1+}r!=#cJ8vG#HpvlgZ2#WQh$S)qY zy-=l)Pc~ZeW}dK(07X4E)%aP9T(gZPBVGlYwv7f_$t-v6$gZ6s&M*+6b02x0>CkvfXrD~7qDUI&r9_7~EjRy}*C?gfE@&kJOu7EgyoOs*0gZU*VBmLNW3kIk%)E;DpS(t1 zcCzC0$%(RJfr}#b89}qC@==Rk%F&x_^>>@lQFL@fc{c-&7y3?xwwyT}H(XtF!BRqQeN)qV zt&E|EiX)x9`Vuk4z;as2IOEqagol*ss$v<$YLe>Ca-UENgaWvJQ6-3AnR4+!QGf6P zfFW3`VjG!zx`q)m`cr~1bP8=n%s}kTD_#ADx~^`M+@_=1Qjv)xSorNG-e51w;hh$1 zmNbZJY36Pn@2)uU{V+2KLmvg4smieqDFu2C;#2Qgh?~(EP^6U9d9d25rF~b?+2!C0u;5#b{&4IkU z_2aG8*3eJ{9w2+98TZ)!9^+hv|H2!S&TQ?iA&P^0UGv2H+{RwKn#7M`_&mGT>&mCe zhxn1&_3=rP!Ge3BXNJSZ?y@{zh7>(wuf(6`j>ExxJQ2MTYY3nq z(xZ{*H1MWVaASA}uY!e##Xn>CfGjy1zb6F8p$JkvM~U+s>IhU|B(C8Mv2qx(JodUR zvq}|t$rD=q*-E(ELXMSFZ-j0@Ma;js72QXJ+vCy(+_i$&V|@$HBjcSab0ceT(p5xR zd_Y3q=i*m?j&E%Lp+Ox6=huS7I%-_TJvoxDBqmxi=f%g6gaQ>MU zyUS#6$?83|X>aKsZ!3DFV}-<{1jKVsguX7!7@qw|I?{Bd4H^r)ZmNeT-0Qa?J2RTq zkPQt3s_Pj9;Fcy9){;%iz}~vT^oaZmN8J4UxT_fYtpT%*;_`7;RZ{QZ>OS37TnX&&lZOYSJK$N5i*HKy@s_kDnE?y92FZtZW*j3!0Y(6EeA22Cf5Cf;h)oN#=4)NMkJvJ zzPxzpsEH?v^0`T}PAoTSvno3jMC5DafW6bZo!Reb>>7P~VaN*3pm*nlW3I z%~;(45JY2ixF!E&2!onSH**IUQ^SK%(j)jGH&C)<{$NsWe!a1C``91njlg6X%*4j_ zEbWgSGCC_s9pWj>l+u6ESNE_%qW$hD^WZgXud&?lB)Z}4y>jB&3b?CHQsA4o9oxAF z(3|Bb=uU^{qVR4gz#HPz#Vdr(Mutea=;x?9nWZ5EOXzfG%-PI8Y{xh+IS-+41cOFw z{OBx5#(#2te6iMxZYWNlG@Th9U{ItuDn%-)V}}IO=v8NF)A)&Jm8U|m&#X>AAmUQh zNJp>F^V?93G-cC~&&X!&)?m$cy|pCPJZYjeBr#Oh_;VWW{k&Qp5&=mdeQd!FCNq;z zj-gj#KlC{X8DT2PI+^gT=U~Pf33iF#n35FB21dUQt5?LoNI%Q`v~nZNx$qXpTjnrj zG<4iBt0d_PFUa>$5=HM$vSF*x`q0L~^0$g1%&ETuh!%h>xe=25BViNew$jA~{YgOA zdQSKbt|~~ju*PXzeDjwEpitsMup~@7U2+t_lJH;@2rMnSqL7lYvAlQHE{?Mg?XM%8 z^R72^9nEOy-BN~odN+|B(GYD0bx2&-?av;>X`E{&9huyCE@81D=lXv+KI0w|UGa|- zEb$qx;K6NNPOx~F%rpkwNaKt%VD7D(fl46R)+(1&({xbnltB0_0n?W2%ZL~4Yik?) zVULenc*SIhV0=Po2uu$i@SRBk1G+|lCx2^l#Ufm)3OM-$clqQA+VbQ>Xvk-(k_Q^7 znuWP|RUy}I-x(fAYK{nU-^~&10{;)ITQ*?L=8Cx*CuTU{QaU%f-`e>!d3 z*VnynUf=$t(X)4IZF%`tfmdC6+&-t|u8=}>j@4NyKK$IcioSQ8dx-uH3uJA5zz!`< z^4a<7c6pur{ECcJt76}NJ3Kr?uVT}kzMELu^6vWIY#$fr1Zuy3+djj^*_j6kurB|5 z?=3_>pu|nmkC0H$6UYCO_V)?M*FL*7d*4HLBz^s}Tty;20FcQWB54%G#uO*%NFd*{ zJ!BkE0j7*XB-I22jsyh3=YXIQIf<-eVjOn;v!@_W4&WYmWTKX(rJ9&5MikOZ>~qUZ0YYsXRh1Vh0OSH-=?yD^Tw(zIE<%fC$~ksNY=AFBH$lZ+k-* zA9)=cecRiH)sEF)CRb)xdib9n*j86x)XzhkpJ_j0@=5eazbkJtJ@>-jl{d7U^^EVz zn`DODwf(;;Z$Ao|VHR`RrT<-d8w8{fmOs~+&FalWajFxEO6I!_;qwfC(}#0#%Xa-lNps4FE+^Je$?!Q>zJ`a~+=oDjVF|S~mwbSe z%)>!1s;y!BK}J}!3H2@gkmaccVJoJ^7keqMC`vdb(%V-wZiDdy@1n4Qde^|44dCeB z#DF=ViJu}f=>zyR`rTV4K*BgOVD_${_;8d(@BYR&nw8A{1w&E5Ve?9sA{+#ty28Vq z)W_r@_th!+WADVyfd=H@0&)w%XnD2w=a9RX3!<&>DUi|E)JAmO(@4bcp~va6>YrpY z#l2}wDk4J$#%p2Qd>SoN2_riSYD-OP{wK){tY-;mZSLd&3wAFh!xc(`YEn{t@81ju!=G^uV!*-2?)Or?he7^$j_D{PlL(zg;%XSW`fINS*K8O_M zLqSv@%90qVzElIV<5DS;lXj6l6CU7-g7aoBUCA)ve_x6o=LX)9}LbTMD zM~jE$QYXoB>12n(BQQ2TtI@hBWPuerLEtlUPi3SJf?_7)26PTF_ZWB7*T={UTe{!)keKTtU2kwx$vl5wn zALEc40qXfxLYs|w-&A;u$1J%GTsYeT z7U$JtIZcXlL*+}jHT}<{&Ywy`L$1|Sv+&ks3yfm{TseXB8D4)Wk>%r#p3}zNzvZ%e zBQ-P+dLNpDfB_Z#l>d0&0jbMGJw^KnVU*6?f2_9CH|K!fLS6%vTIGxu z&odg974$^GAWt7T!iZo@eM?9`$X0;tCQrF>ZcQRju8*qj284-mO;g7SR@8q!DVg3F zX&40$^kVyytnfIJ3ut+Z#TLIvQauQYihV35koK zE)#g?OlwArOR0}B_yHyQbA!n$Pu)RbRWf6MMH+?)n%J6%Jx<}=tR#t%*lH8yC(GjQ zxsLsl?E)wu06_3b<>P9Q1~W3{?$SAld_;^8UWGv^6xNy|g;G-!_{oqAY&8ee_j?JV zK(XJDUhfFWW&C1ypFb3olPgJ!Ja*yzMFJwP>tV>Xi0Iq1@4yYG6pKDU_W0ba!V_Fz zzn=Q`Kr$U@zi5`YJ7-G)6UE{A^wh<)tnY3vu);QePB0knGfj@&V8nX8#IaoNcgLx8 z5b|NFj;Mx}ZZE2Vjdo8j<6=L7{2=qJ_QecSEaZ>W_oGQWQ&i1NGP+`fa_GJ)sLER@ zsFa&H3UFmc-)(ybn`J~UFb%l+Bq~a8I1fevM)A!&0|SV_M!%(ybC@4TIfk^X#~|5| zUa&G0B9mDi{X*}ml=yJS?l0)<6+XGi!afdq!5E)uAuK_HYdLxCg7{AP>khYhFawG7 z-r(-O-SF6@s++Er zHMHxfr3U15^c?x3>@TZkBmL7lAq%J*anJfW z4o>C_yojlvZh-Z#*;XijqbS23jRcv~*M}^m18K$JpPGcXfCY;&gaMSOziuAp$t|a{ zZ^4zY-jZBb>GaG`t$bG{E&V10JD8-@UN$Ka$3a;S(F?f$%*8_^ED4MkcezUcY!kKs zaMUH}NpBBQb{;^hvVQRnX0&2_B81tA)tN=@Tckf_3NRRF81+ba5jB$S8j@~;!d>5F z)mnmUi5zR_nw`bwQW0NpEGt6qP4ZdCU&kNMu4tApYs3D-s zZI*H|{Gc0sI!IhOj4OQ z?E>joFeewU8azC(${3hcu>py#D4!1bXmJpA6Spi}%IQmez5EtBo-^kii#AhUMwLZG zySbUU<`iwFc{o#X-2R~q9aWnNca0Ua@OnAR%CS+4kyk|)2MbJVE%oM%@H*!|CMhL? z>GPYGbxf41FE(oC#Z`FR#-{=|Pp$2~P(ZE{)}CY}ecoNS8#=-_24F7;^w+m*ZLRHP zEvyOH9R&a-cWO3wA*12NOvlX~2}~WiZFn|Lbi4E?1NXUa9=zID+5+(?{x^4*V$ZpT zvo{!UtW3%hJ}2gwKYk?Mt}C`8y5il2|K^Y09pfN1%@Xc!#D|T;uupON+Tx0GDcN;q z^tRtxoDpJ$R(>TtoLWZH9U$1`3uw0vFC=nI5%Liu#uV9H7QI}hGw44HMu9z`D5=zx z#tW(tAx{{zo5lFCZ_3yw>Ao|d&pA_4u^G_|LsFq@ki@;6;Lro{T8osnx4IUC4%Qe$6CeF073^Qlhgk*Uha4 zSh&7P(`eCG#+OpCrqoCHh4n)?VIXZZgNfw^M9Lxi)&U4k7XSd!vBuifd0wn-kVGCY z|Fku`f|(1oT1`Zw$17TNv}%9veh=Q~VSIF9jeb9;B$@h-{ceSU&0FPeD=WFkixbP; zFvf6B`@s6TCzBszv%o~rgU$0IAiN@NFz+AxGR&>Z^UKC428mdnfD^TF`kFrlSkL|th!5Pr@?zO-nq ziw?RbF>_83EH4-n1X3xoBKR*&5u@v7=sjEE-Bi>;#q0?UZ*|@YB)96c1Yk3pO|V9c zePD_*{k#dYjS7_m)*9^cooW?BQcaUmwyvgd_H{|3Fhn5*-8H87<}6@QgWWf%_5+i( zn%bdtFp}x)`jo(XP9#VyNF|P~O_BJvB`Sg%GOzZr<$kF;0BRj5RBNQd#a)lYII8i_ zV@Z)Gax!hFwdv49-kVX?+HFcVZ&j zATDm~5NH-UL9>_EHaS<{jw&&pKYM8p{^P@K)kEQu)VM!#Y%2j(Zp^026v13+NHzy6jD5sNXR)B z^oVsh_okC)%K8q#6|zOwWCZEM^eog}8iGA0jG3e;Pf{gp&jf;=mTl9Rfgp4QQs+un z9tfhH=bXViHnMbFxWKMQaAwT;mHY#N=J6^Q=5cuj3pF_m)i0-LO3SDFn!dLtUO@pumN)#_$@dICnPwv`T2RlVpLo5p)= z3xSy^+%Up72O`1K_7QOe&Y#ihqV0I7c{R#9hDDHuvPiFm3L*amnV*1$A1_;YGs;Fr z5DjS-qh~Y7DI;K8ENBRnx(1T(D8=%{pq!M1tAQo&#tQsf;MPmR9^c>f;d@Qsqqq|# z#J48@=a2H-YRjr87tgDW%e3RZp^Xgx4|Y#+Pw=;n&5l)Z6h+VSN5uH&F3vXy{+on? zTWnDqi-~KyIMH%UFLaj0h{4nub>`031Z>N}@NMr40d2^KOi(EfNf#)R4> zLncPAc&;yD`Fm*<-mqOG5yH9rQfeWPrJI|7Getn^OqwdMVB=RJfPID1P9^H?K^n2y zQh^?<9jD#pQXiv40WySZ{#B%4FX56uvj>?!z0e@zxdVYj{86IT?v11l$XSdUl0|By zpax3iDNr^{N%!?u@)AkKvj6M+k{0znp^CX}0y)itBZheT2~tvUQh-uAso8DzeS>+E z!uPFR7>iuEdB74Thi*|2Yai+wsZ6O^T4&I>`kB> zU;X6L*$zf~XFR;zSs-cE9KHKDmm8C_%+C+F2z|v8V1Ra`0g2MfcthD1A^+gYd$4T( z{RGqN%m&)9le*tx9?KvJbADU`B2j#cu3sip9vLCvF$t*PsmohvGsjO=ebSjJJhBM&z_2U#Fy2yucj zkiKWAhni;^xo!5CMtu-9P8A-Y2L|p8?E!Zn`Td=Ozr^xkd1s8mWzwAmP&4Zlapsnv6PFlKjMI#v*M9-c}tt(*t zln1kS9cr+FN>|MxCC#TuAo;z5F9vcEKQtKB;^7~5Xaotg;@|8Lk>Sq&D?4-=o!#aj zV4u2@q3-f~&5L?9kE^UB+5}!9WMJ^<*{BYQd3L%djSPKnq@n8{dT50uc0$@|n45Y($iKe{I1T8@)iP!R^W)$_eGdQV$e5CeqSarYtRc_9AziXpg_00_#{rKjF>Pw)&&5iH8GVXPajI+1Tga~=(Z!XV+Ojn zjFV7n{s%Oavf|5UGtc~=(2&#=^#x7pHe^f+bq?mGD^TUr5Ju1}w5tgC7sJFQQHd{K zO{k$^*vF4{{sBd&U3Am^($Qi)MAKc-+x=d6Q^Ur5Y^bu>4#S44H#=FPvEt7(B>WVC!7y#t~D%hkK(=YT_l&%PSw}%pJ3?R!V8U+<>$kG2lcth<#{~x`fX@q}!Lq-rwpg~x< zgSr6=;6bw7w$)asTm|kd^8-$PJ!s#zPvDv1*!COcP)#XvBaz_|q26)8VHXdnXYiNL znLSyr5D^mWB8w#;rHK3$F2{mq#<{YP}}doAuyxQ_ji`O?Y_%5pFoWOv#*)P0$^LT zifY(!Q&o;|0fc&9AizO~6SqbWbPw(n2u>@{Y35q#u44PShca#EBM;8SA(O$)hg1hJ zVDcw(54H*9BeES^Ej!+jy!w$$*7Dx7UuU6V$tJ<%>mZ&vQEq{br_+b|{A76~L+$mr4r?XAOS}4L!yr{OAbwP2G zm2eLhU;Ogbtzo5eaJjO}8WmragjEM?e#OTVZrYw?y9|tH=Lwi5?lkGY#Yb#Oi<7MJZF_7l#L6a&rs-aAr1}d zGMU#x$Q6)yFa41d-=2h8?AqjcgzI$WCAD{p?A&W@ewy*vXaM0QlAA3iwLvQge+K;t!!7)@xam64u_HJM+Y zF-L^=r|5Y{0*ldq1X<|Ng1s~TYU6<^?B39?$sbv)RdBu<+Sbue%$!533}T_)GGzdA z$&kxnoHoiyCL2e5Mu zhNf%Is6LWjz1#W-=!wQ#o`@>y*Ih6PyZK?UGe|T~=A}%eFp-^y9*mpk8XySb_u&sL z5FG5{j|WLXFmmI%5=vn(DN0a0{0Q8}d@{B0CuL@NuplL>R=5yOA)Wu@mL^#Ev>RWN zOT{%~9Lz;-u~bInRB^uRGEFr@>3dVgk5_K7^imw6th*kKPpUR+BcRp~hIw=424M?R zxf01ZAqH0EjzX$0=V)ez zM&Wy`m??CCqUEy`WJt~%b#LAU{F7G&#klOdwVfQ&L22E9`pYf`4&sphm>(4qPn_>4 znt#JX7Q!r=>e$rjQM>P4xnHYqcJ33HbJ(fwEQ=%~973?1c*MSGBv)Ln(iFOOJ$f&a zxkv**FaQvVMTaXth2%MWfH@y{2*SeSoY$Fzr-3u2*;)|%L&D?ECVOcwQ`9)f$#tzb z2CW@oB5lx3@VOE^cSzR)NfJGcsXHeyIzdPL3Y^p<#xOYU!#+oFn1-T9HXv}4joQRk zpB0uh`&BehjCR^W_DrLoG(5!{>nyM5Yg0-!R_mo0Y|VmWT?{cGdiWZt`r;WZ`@NB^wMo0!5IAKXKM`EG}Ir}S^BMV2KYD^E>j9^og( zCtR^d?6_d~=Pz<<2(MhyrT7yBaxVJ|+Cnoy#Dk$9ph2a2z4uZ;sE;v>=SnnD2w%5q zu^RNtsJvRd2u-k@pRfq^>fHIoAsrckN%ghugT)yg>VcD`Y;<*J6AcXkM{vnD@N~=Q z{SN2xS%NKL)@BQgylk(IJ#J4*T$J~hMTMpAuvFEwIp)+TnZ2gdC?QGVR4sYZX$Oud5GGEm|x-9IQ)}5&ec} z*b>Roq~c`@Mha(*lxDsmn!;O=+7jg>MR(WbeMR^B7UfhHDmjM8Fsqq;g&paoeFgK| zVitGhR30i>yhO5Ssd(`Nw%cN_v;`+GK8%AqAB*GSZ-^%Bm}S5*b(iyB5Dm@P2PiWx zKwcf#6NyqQf6pl=RyfjuqYRz0eyWH3A>m7KUcQ{txao}nUmcn8U8ZZ4h$*wNu@d@{ z`Wp8{&?1S@8&v3YrYanZyYDq3Ip14fvd=RMx?HN2sca+j)#J?}#~Yhs`o~Us>@!Py z&WPpWK5JV+yNM$-Vl;h<2C*Rv=C}-mELfe-)i3ITCHA<+@!0ZmeL&etCSU zKk(%2a?aw=F@_OprX zYYq3%J6!Iya^99{%+j=w#*XF#+m`$9(ZaZ0-Qm!Lc{T{M>e_8`bl%puoE(g3`(KWX zy$XE~Wu`8%(R&;jGM`@Q7Gv z)=}X%Ns=Zyv1v1;H74wPsvI+wYzag9nHoa1Fgs;E4@0(AE)Vsu=mrJ@VY723>kW!Vc zU&soZH6W(o4)YT zntc}^s+dUp0x5b$>(*YhRz;kG@%u_h&$tK*(Pcja>uOkzU;Jj3k?Zf*#Gavhl|&M@ z{%9k!v8Q^d;atSf@cPX>BHEL>SGuP`?1WL#mD2dZrP6)D!?iI%-!&jxlrmAC_N z-duY4%Hv%*qwvdr_01*`LCVmEiA8nMeWw^nDP4W_5a)EL*#M!+T`+mllYeL%WMWy> z=ahzD9%e9Kp(bsFu%aNanRs!Li*a5-#Z5CO=*S*0SA4V54KW!24=uLtX)z3aEg3>g zKlV4qMV?n`Imb#!{E+4g%Y9}-`@&^8gv>7mM`A0=U*5yW^=Yy#BI8=&{X2&gQrk3D z91IF;m`Y%N)UAM)h(rLJtBwN|R@Sz(JsBfJvj>MU*TkO-KZO=x5oUGZ*}2ef{@fdQ z%>3%Wt8}HymJB1SXm3O8JZ^Tu)Uy#7{{bW`CvAeEV1s(a#>#m_NL=|ZOU5zr+miVj z{I+BW^hh_pnxgWWHtkuE-BR+=CD}5bH767L1_5l#L(x{f>kbwKSS>%~7gK|6>1!L? zPYiX&*u5BCb`Z%dukkNT#ySuoEC^ij?u*N6!*Pud_5M+RXrdF`2~3?7GWkp)=7>U6TKuX)uP&+RWoca{CthOhb+|odOM2t%A5A?hXfU&<00IUn_ZSvdme=^sW*Gi)G zPz9qB8`WEpG>cR2DtO-Tr#C8@_^nNmdFGa?GG)tdJ|5!T?%&;z;iU3wO~Hb@jine+ zl=n?KQyNrSD>^BcEN#wE(QFxcG-YA+F{^C_ew4O&)>*BS(b-&(eFB++N`)}L(Lq9RV+N0{B0YNY;=K})le9d}2z{r!}s74e0Z zqpAwy=KTKjdU!Yn`@bl=r|3u@eQVUQZQFLzaniAE+qP|XY}@YG>e#mJbX+I@wfEZl zTNh`1=iFqBy39q@sQSHgK9le3efaJ8;wJ0m<{w06NA}~d?vQ`eWAYg3OA8Z0vv4t!tk z`=xoyXO*Y-j~`x8e1CN&4VxrE29OYwFdXFz@)o zej32T1BjK@FS?NH^Y{RhRl~;fnvOO&8fK)KuiS=4r9bHnC%f*3qMd*=(K(_|3&6rQ zBrV~0*zJOFp1qK)Qmj)S!DO|~+3&@+Au??KS=0%FjP7s((%mXrH^${WlR!_Z5Whlm zI_8ALTbRs!`XqQ3v?qof6OG%Qh(bvh8Gz;gr|Rm_%Lk^+k_GSfu4%S68@>GZSUE_) z=D_}YHon3tn`&O%w;EFe2gxW_XAmh`H4vB>R~pH}Wkn1z)}aJBxL3`ctY8jrQ@xmfg2$ci}$jgEo6MO}^N=C!`0yyIZ7 zqJ1hSNLd{X`C(F@hl=JQ&bC4eGfg=iIiTP*#HuQFdQ`g-b#cIe*Qf-3pqPn zqTCFa-1W!t=`Z&3tE53!SDIyJ3-1-=s(^(Qcttz8RC{L+#|D$GuGNBL#{P@y!luf~ zb|2GkGX}5vA2Wt~ymQK5uR+TZ&V`)iQV$!a!zn@P!_Ea_#(*cf24@!^ro%*nLx&3m zU75P>YCx;`3AjF05A;rB1RPbzQN{}o)--Qr$oi@FFuvYwaOi~@k6fPu*$q>+ab|oh zrCszehk6GkQoG9_TY>C+>B-jG*M)4p^Udj0l{;rarJWkzk^kc?V>IFCL7yoM?!aei zkpPO~?Mi|qk*R;Wht?J1X@6nx(ay3z;K3djkO=rnJ4#rJIB||Yjfp0C2@^pU#W%L1 zO>O>Zw8BYxKjhBvpuxu`Jad5Rb^cKmukuBW16+@n zf`mq$o`akOlIFYNtONE2alvoI*cQt-qL(ciHGtToPC#6wlT zCzKlE&B#cQiJuj8vG5TgdT%<2RK;OBu)Azg;>d+wFlB-wQo&}8@=QT z+K{G(RdoBT3!Q{fxJ8Kb${Kbj$$|!L{S;Db(p#C?d>}{dxsn{?Vm7cNr4LF7yFN{= zNt4!pYeyp`M-DKzyr&Y)H|=9}I#Se3{8nKW2fNlIbl#hH*Kog87}@lHsW7@3jCaJ} zDvT+~6$CS9^FEieBQ7VzFG6yyTGlX>L0O@?L8&l?Uj-PWRyYlxlM7FXrYn&$R%@k> z#hbC?3T0+az7(^Z5|UKW4;Lx3%?^dk9kI(ht>)=_ro16S&FX5r{D@p-s`B>}f-&=B%C5M|IempRYiYz!WO=w}E;wdkK zzms!g1$Ra3j=A86Xcw6!(C-6ITz z#=YWZrlQi+v#B&ju6HWuH09u>niejWZJH~?R<)sPjSW|&8zS5l3p?2}NTG1gK^$#T zoWqjOw=97bl8vGj8rztLPikNF`n|B;7xntQ?cBb|)4ou)0ww@R{OVs(h+O-7*nklN zRvpAf*jc*0Hk2noa8AufOLqLPmm82AgAt&?Xpeb;l`}bxzJAwo=+l_$ce#AGSfK9} z)KcSBM;aJOvhW2g#R`se&=jE1znU9VlSy|>*F5){|EB|k&;D-*W(=$M|8QU=v3CB$ zf#K)=c3@)u*MSlGc3_-xaIBz8Hi?jl46xTBi_rwk+xbNYT!O#z%CuHIzk<+t(v%DK z2Y{)_m>i$2JgJu03wftiNr0pwV-NNNAVXoh7*4o)R>8>OL~zrJ&_5<_yb5QcL`n+$&!{KLLdMMA zDoO+?kQZ7!N@nLc#!dA|iov=>LSAIO?r%~M8cw9R(SE|NW0~}+S>ee~$BO)wYy4;# zyY#vH4uPtf7pU@xHfhB1km#LIIbZ|lyNC2pX;ELM3IW-Dj#u!XRfJL1UmdQOY)`jD znC69H7`q!N+N~8>SF|Vvq3S}rMlU%i=$RBnq`@Xo4R0-R5IBt(Kq{5LnK^LdBh;w_ z_+rD8Y517}ZbJYC#*6e{=N#Tm8t+pU@7~fw=2!wQvAsO^Om5SjcqgJLs#ACXWid9% zB2cN7^=1Jgvh?O3l16ew(gw*ELJWus>kjwh*bShzZKhVCCE~cYoeABC3|Rxu+Bj2eA3Nj336(P>HJ0&{l--`{r(HbQHhx zCjdF-_)pZ6?IA{soLYQ87???i^9dV6(Hf7y0(6Ck+i-r88>U%#q&8VLH%xU%MWf2O$58!aw3E~yq_q}nnGMnKe4R8bFj1D)Zc353P%HYOTRYfeY| zS2njmrbwL{AZbZYoV75wETX+QtTreZZk2Jd2fGweKj$61WZQ5~OTU&9syC&=h|v$s zh>g;v=#+6tkNd>L1>F&%RtuhbmElX=BIGK?)Cep7m%)GVJ?1<{b{{SI!H;)fU$0C9 znJt1ckgA57_x@kO-0JG@<)PTL6b@8Uh!L7|v)O7%Cdqgsy8QzXLzha8N!x_H z-ypr=ukHiEwi}cm;tH_hpg)JV#&}!_Fz3BZI9YDQJv%8iR2_IF%sCNd5$nPna3wP* zR<(!Zy(fedj$IdL_g9y7CHo{KZD(*L9- zx#JYAG$E5OCXeY2YbISZl~%Rw*L4>?m2l!UC5Z6ddki>j^fmFk9BU5jtYueH!^}pX zzq>QDe?xlqMu{=mLvwB_>z&WWELMJ%B4&}pP0uRe6|Q$-7j+OVAC$eFXeO=_@SD(L zT9ox^-%e|d{w2e{0iJG}W03)3l_e`Hvltub$PlWdr2OVj{8U7dTWGP0AlyYHDcF8! z|5;cOjzmIFW8M|`PV+Lu9EmuIR2*B%l{1y25Ym;)sUn7ht0cdUVEjG^yCp?TWM2<@iEHQ$H(a{Fu_?wE|^klY-+G$z!9p52^s> zmT7Smk*y3an(azqdZF$d(2Vq<=s6OnaSk=4@ZB8$6j8mwKLm&2yKK|SSZN<=y;my_ z%R(k>|B368R2Q2n1Gg2bv6WCVTBen+@H}UvOx}ZtxYM3r6*>$Y&H#sL(>oKyKLocR z<a?N2$7Lub$_u`jLKo)dOQR)_mvj-aN{6w3oFOG+()toj zPoZo*HfxAgvUyRBfB+6Vm|nPkras?S)w=^N zSDM}*rk!+}z{1v6gZ8aFUggnXPdF+7 zRu?nz&xQ`WMJkhRkV4p!Y7b8(B93>tcGIX0RQ|ne$2zp(guI9ag~jutHj*Nbeq;7@ za_z}wL*R#ZrmPY6X#&FJ^wbV!^+W_=>8gl@;`@_TkfNcaqD#EsGDkI4iR~Y%$CL4- z(MLD)Cmsrg-!%eZB~{r)?+myX_c9`aT>b#`s<5bH6#^&^p_L9Q>+Qj#Qi{Q-5f~FY z@UlU=T(16^%*7eQHk3g%C)vQ&5u_+{6s?-sXOvxL{yC?wP+~JX2{rjJO~Avl8dFc0 zWXxl%t!@%}wro3nRT~jVM1zic@oZo)Iv=ct1!wFOcEBIequO=N(+)Z_2T};%c+)*; zfc$vKsB!VeaMaSSx|UZ_S?fdhzqX9AC^7a+GEP^xD4&sKAJFO|H4w2En&WXo1lyFy z28PojSOSQ#p6r49`T0$a_+2lLLL$5 zI4v!&kIW-0kK&<|J`c@vaS&1kH_Y%7$Qnt253x`%p4fa|$PoD=?#GW!!!$EzG< zSh!y(I!iGZw4;WCn9-Ks_54XRKa;x5pspaxN559wtb5JRiGRlV&^{S>?v|Qwo-OQ0 zxicyTq({ha5uk45KM_`GJ^c40?f~39_ZeFerIVQLJ6C03a zuh}HpEiV*H@x3SP5L-Cpozc(if$V>LOg4En4Ei>kDWiUf_7~igGXk`XyHIHWnZi$B z>;v3W%V(rQ*ChDyQA!=3#CZ&qDbkO_bxT52 zk*B0U>iFC>ZaHRm4wX|os34_113nJ<$FI6zrKijL02>Z7c3@v<$vBMeiNgz88|8G8 zc8@{PaquffLOo~H;h5RAx_e8b~Q4qq@$&<3{ygxFYR;D z4w7DlspYISMG#Oi3N0Q;**pLK-f>IJ`5AjX@1Mi%lm@t1j_cfy{#7A{e~Js^sm78u zhPL)6;!M0VLj1>HD31ae!KGEk>vcb3MSOw?dpqTkK_-*a#W@XjoT{)7Ig)m%NPCPnnOMT8Zda?VtHc52o{sTf5S-JEM67JrfG_ezbI(eP|8S&wNUw-_giJb?JLlw zzh>j3OB)S|Klp#}ECJiF`DE8XTV zblN~@Vx6Q6tGnqvd$L(4wMU`Yr4m_Ep}WwCIpAl&x60S3qRb(~w2`HB`Liy9XS}{5 ztrnkx{&ro4I#+W!(!kcKf9ircpZpmVg`wf|J@nzzqcMJpQgSKj6Sg&ohynFQ()G(2 zf7b3+VCH14*Tw?T7{DP`%fsR1`G-GziKXeqrioR6r-tYGu8EUaonb^!mjW-C*C1*j zvo_~&#H282g^!_;xN*7B4>N9|l!mm;z&Q2bqm+4q-XN6~>@eImofSgcHj@>-d4!8I z%Q0>uiZyWm`%|N6)GRwG^X7nU20x_)S^_I@0mQ-QD8|Oftq?)rm*_7x_?ho~eZ7o3 z+LH$Kv7!_;kLVpJB!&*8Hbq}8q-}M0EltP-L~yKaVQ$04_8XKJiQT$>4KYX zqBYxwJ+IHf@)J@L9bqepQCzDQYQt*D6dpl%m(G3rC^gfL2+<`eHrxpklzX`lDy+Zu z8SQt*zDfN%W6v@|TF$@)f*?{MQDQ$C^%N^Vj#$Wc17;zf#Q2!)=QNKNED#;#5km-j zoUL08z1;Q9(hO_mW`m>!GVE$IYj$pG9KdQhY-Pdy8pZ+ngr(ALe4wktg@j<3{UmIPDi_`-Pyh_!B3B+9ET zYKR2rYxHx@9E8{GW);<_-O?-8-?4LG0>@xVt-YSD$w5GyrDqMg-G||@wI;ROQodmb zcS8R(wM0pDRSBJ7*bSeqqYISW90i(*8*yHJQ?%qlsxgvm$QBeOe%HIXdfx8b;+zYQ z@BE)ZRqt+Zb~>u!XAO8*iaUSjFmO9rh15UxvcD^^`vxU3m_cuKXVZakSOtwCEGB^v zC*X*1*g8CmvSw89&X4s~iN#$hqXL+i)zVT_ zRbBW?Lk|AOTlnb2chjy3F0pPR#>6pZ@l z4gc6?*qpaKG5~D#CcqMB2`+>E1O#pRhX4--Nn55Q{Y8umfCQ$Awh9&i$ps|kf>!DycsoC>wEhHqRj2~HJy3!Z_Xk*tWytsY+TB3c!D}~czuWqL2!~m>+zYF>1qo8>!O8v~?n2fDA?8XE|4b}yT%9T5@4h4>yU&Z8pr`NJ0IEK~y#M?`wfVjf3rU8utO^h*pDKt8 zoYiKVt+Xo56m)s7n`+0oCuke|z2k!UwMBg*hfXF)26oH>bw7pYxHL|3ux}g%as*=iF5@$Bjm8Nn!Mxz~zJ#wZ4j+*Pt;CF2MjsPo5^=UH za75Ds)%9PYkZiE^(Euf>nNsiAwfEQjjHf!|*UiGek2hI|Y`F-Oqr$aYxwr5_BiZzq zuGypJn0FW!ID*fB%}*}xyvYw&8^o=9o%SZ=7ewAbCbzmq2!bD6BCxQfI;%?J)B{Ps z6MSvb==o=f5Z18bhA!~alI;h1K~1>|gSn^94FFI#b_Z47kd3$qi9Fj@K=ra;?{0+a7Q+ z2EcAgiKozthcYf|I$LwXGP^^V)`$tszbK;mXJW@21b}@A?`v z&jk;m#s4OLhj6a%<1%wvsI3gNY(n8!AEqo8c9=YZ!1vg*V8t@~%&Luq7+?<1a@m6% znkx1$VhsXLN>!j)H5wx|DYN>^+zQ@ns~92pA07i|Jj;xh5O(A5te^_$Q#DiZwPqwW z$vrSta+#{Z+~MlYmT5G5lS_Vs3}nNz9(W-1D7MCOV5(?u-ylN)&i5_e=7d~( zV8h3RcG*sC-t{>;I-rqcfCQLLr2I2sY?4F7MzR>TNJV~h*>-h@$8O!c{ zQmJ%wy~2V&IjO4RIVOJMX!s4pCF5>riUel5+$=g`VMF6bbHwwzg{dTk#jqeI#{%jf zk#wOjvB+;|I~F}E_Vqu`zNe}@orLkv+t*h%gjdf7`DvE9u~hZDEYr%3aE_EVuvFBR zXAK$G4aEFX!q)#_+deQnp+Rn>CgIp~Xl6ppeUrMBfr#mqS2z;);L7r84ntL7VLCSd zI{WH+t;U>LSQpZt8&w(RgIHl5xpCb z1_!-O)h=_N8ta9X4fg+HGR*OU(?$S~kZ7J$d*KFqphSEJErnC!28Em=?Sda#(r|GZ zY2~R0G0a1MVYB5kV{k3Hl@612LWwxrtvAOpOs`Go)bQn`Q!Q0ncLEE=?%6l9=dZnn z7`a~N^AuWSTvlB>DD6+V?e8I3UM`dw+bw;#IKc}GQ|9}}#mBdqytF3$>jVsC7EHZJ z|Au@}#v2-LxHmALkKIGr3Y*;QOb{+$4x2_h}je-W~Rm z8c$+Ycad7^xwxZnZbKSn?b4kx3mg;Lu-LLFY&M^W0%p94t9ZCeKe&j0O=V`k5*jNR zscyzp+O6IRL-o5^<)H%E&^}54$Jq%%B(ih0GI|6E2;trCDhDM$h>rwbF#NZ=(VARc z$s?o5HVsCNK^wg&Z;GbJ@_xD{K8F7QhbDtCsp>5j#gB|3I+E;B8+0_PD+$?r4wJ8#eQ%Vp6(~csQhjc#yisxDXkWGXxA#Q781t}>DuOUxyv_kf zIx@uQj9s& z$)hx1Fw9OPlDM}ZjwM2Rp`@yvm()+k==uj|h-f^G6ng-rXH0oDQAYt8tpRg&KqGJe zo`f(}=c+>J{oK{|D`vqZdR!g#-H=!59uU&Ym+T9xR3*!{wvmnvtz?e!#zkOWlQQGjJ3QpI?JD9%s1ws-t@7bK^~~jd8KwYW{y+*U9r+5#zH?*KDnKcon!Kb zz-JYCc|jAsocxgWMqi<#Ewf9_iG%uNCg&_;)SORE_%=PBKK&UGtP^x&*(`PdCx8c` z&Oi4vk__wmL;57dLj`WlcOY6?vV85EG;N(HARJeh|5y&5S(&kI6JPCvk2ZQizAtO* zW_xY0M|KCDPQ@&!ghD^K+-J8*tOwN)xpeEEJ8Y$gARtclEF;Gh{gz-Xw74qqkIHAm zbRQocLu6K$Z?>!BzlWb%fOUg(QMZ)UZU!6&J=vkQl@h6GF->fU2tC;cbE`bmkf5Zf z%d97#c^O=Ix?rI?FIr0AEUwz_y*6&F&2T35W;iJ^T}(4U?%IAR0z21MsKqPib>ls* z+wWJQf}*dyM@A8j*xLD_jN_{gqSDzZ^nT)nQHl3+w5~pwN*aKAAX(Tj3{(D1#H|yS zL(k*P*2f%qC9$|!>`yVlqps^E+8}>js5%mztQeegHCO+u?X0)eMr&!2x2*I1A}U!0 zCJ&Y>XS({X&C$C1dW2i6I-TyY8bd&Tj zaoAy(|2nvMXwN^cV?MA6ff4z>SGTe<=-O*rJnvhSIF9g_$^nFmS_LONG4cL4^mz45 zl1IX!KzTYY9F{9s`kw-kRGWEtim{cA{eG$2GC}dgAqxan5s`Ym0ZEKRTA5~>4K4Mo z3$2WHzh-8q`b{=T1lz^T9>9q#pooGbQKhqVl2hXZwa3F1C~{iNlbL-9Kgg+S{`x8 zS7eI>OV3S}Bp`{haaF2HgeUOkTMHG3CiubHvTt-dQ}n_jM=iKXY`7K=(Z!l( zXjH;%I%)pLKp3VSnrB3;s4P}-(0H*=<5s)L;v-$gu0NZ&&VnCUdRW{@BaI5pcj*=n z5u2ChAW&PkHed2@LBQdPEp=$ujhz0o!Dm11#M8SEvg|8cfB7LenyH)P8;7v;rR)or zuyjCz7vCpMRqO9K2Uvzst$vJX5jBk?Rea+R47w6Hw%gI1Ndl9B(o38Y&VeCLQ_KYI zC`L^%RKGzOxl-mTHZ%06g>+RrGqrxcr^8qpRpEK{=5nNm+Lp3eZC$U|jrx+`!viA; zL3twyPGwH(*c@N`H+=*UfkB>j!N#*5{89x8u%c zvcQ#{d$?0%WTcOhs9}p3b&H1GwEW(*d^=}r^GAx0Ke(Q`yg zlOQ9UW=xHkar5Lg;J`_0SkImy9rD1U^?Rm}Bv77WkP#YK4s~?GDDt`@Gjfwq?1ow| z`&rGQQTjB5z0qt%mg#AfN-~0x+f`F8_Us*kY25=|l&HRHdnxu}c_}ufOY*T9b#+=U zLnwtpfS1}wQ44mXz&?<^#>TBPy*Hs{QB86xiYx;UoAAsu`t*;C(48`-a4tAfO(MCG zO$2laf{=4uQ45(?y;A#f-vVdX)p%JGO50#HBF|+cDL^upJfg6FM{dL~SnU3ih;J#y z4b;j(Ku8v;OAX!V5qo~fO@v+QBBN*U$LKnyYbRRSFY~)=@3I8uH?t?NV{%8jX`kF# z;b>DD+Y{k9DfwgvyVg?EZtI=QXB&U&9jw9)i^NG#KL?k&OSL66JbT|46l%w4QrWSB z1G^nOB#SDswEEZPJ=fxvEk;&WL+Pr0ON3&pGjW`@i~aal9iiC+(j{q0BY~(R3rQA4 z)nUPDhG{#jdJ&_PX5^uCUUm2*eENJONSJIImFhS=+hi0NJB{Ycs*ujPpF^5674w+B zNy~hvR=-u$Lef(Io;m&vy<2JBukucsWE9aHt$)p>iYBWs^LQ4y)aS`=8xiMaO~N@e zfmskB6hTdafK?nVD5P+kGu@_?ywm#Z_?(iCnf;fwSjEA7Whp!7yb^-xJasdyLpvc; zXX#r#c=E1ZcdGxA4w(NNo0gTusOJxGdN>+!zhGR}@BaGg++Fe|0$SYWLJYdOr^{05 zR{#4>`Wa%qx4yF-=GW`LcbfJZ#Vz7v-xikV1M{{0&@ra;%{I8We*?^ylO^$4jkm79sPf< z1L%K7Ry@7j_@Oxr9fJrXS%Tbe+){J<>uwDC#Wl9Ti-$v&T-FIjeP6->^Uz=8Au|Tw z8L9Ob;b_33)+;cPj=6GWVFV6C6*JfywOS$k?{pX~HGxhnL}i*sGF#67u^Ee-hi|`L zv{sQ`|E)6){@-*)LnS6$^!3^~lj-P4!`MQ^9&tkX`EUa5#qmq$qN=&r7eE-52|f za(zXnP&%JA=xqgN_q{)t5^1D$;cJH1%O&JXl$0@PQihy9`j&D7V)bU0bUw#OH!d~uTrMjfhPx-Slft(XGy%GSt`6}pTm}B z?ndn#wkN4JH%$4KS0$RsoP8*sD|0U<3w#Qb{#5EktwXw_C9zI|G-|#9ej)C0e}1J& zR}YvjZ4_r-qy@G>!8F*FOl^u49F*f26w0&J8)@ zL}#wtQSlGbt2knYR$T9cF!UuLRW$O-c_(C-lIp=X3Zjy*&LO)tZP1;GX6rlS;yAHF zYXBl)65yNCv11~TzZHN-O*M28MlMAXJ4>^;{=)6_b2fBLv*+3AAOy6+0zVG8fH=81 z{PCZ1iGUN0A=p4vw~fhbL}%`dQk9ZSBH2b}0-*m<0C=<#gy-RC%lfs5>^lRD38%ac z-LVluA3$UQkD(c>Z`Hdi7}q%o>Hs4F%%n5P2GLC^#;xcAq~1?GMVTDRwB!5=r;eEy z-(2+jUrUXvX`5w{*D{aR4Bpy1#Uhosa7&Tu=N(0qrL~^G;!cGa!D$Tm$XrTmAOEa( zh?m^hO>gH30N^NRHT&mJlB3JfCD4g6(aL2~2f1cBnDl8Uri{hmccZ9P6&)isA4Ge{ z&eQPn%Lgy}+d$6Z>+~hjyCJugDsvkHPlYmjlPkd^aHdqHmWs8!6SGY_c%p@+e#w2yztTkW{#i`a|@!&#E0#8zlxC31Xu1+ga zl-|%~8jbiHr&R|q-iwjK_@&rsgwG_`(I`UI7={hcSL7a`dtI|-K>3ML>oKrGM`E}R zXdCZ_%V@ae$Md+t>15#bHT)`a%KhN$X|f2S6f~=y1>U>?)47RfhAY7&=tTV(e0+{f zkoGmIMf+jY8?BUJ8b4h{MpG0dnjMKbo&+n99lJ(iXOYd1@o@g)me`wG+`iOt94vR_ ziw7N%K;h5L#YRFhoMGsddmjCZ z(LrzLFhTFoMDLKxO;TtYQ*pQ>z0bnL)yRIVQg_)+I!YmrAK%&bW?}pZ?7L~<&s#(r zRX}tiF-k!r?DM;&yg3XN_#%Itj+-HR`ljcLhjtea_#oP^J5$857xBEeahXz+W2nDp zSgWIDuEXohgWFD8w3WtXo*HZoB{Lz4Dh${fU|O#<)M1Vsks0)Gon*#99Z5e3f0Q7$ z_@*X8PpqB%pu)wUWm?y!To^q{H_&x&#p6a)*mB^KKYRfPzb<+e?x5sM!U zRX=N3a@cBd_`AWY5=^!fm4)QcgElM+G~wzoIh_qAbimrw6kh3n>-8l{rz6iz`5rJG zMfjR~I9jf*q3uiAnlR5+Gjy2JYL4Bq7onxJ;>4afX-dX92Hez+*=1=jBPWO4NT6f0 zjodK&mpvy9o%`$J?ezE_O%|cH8oW&dnqWcsSIAII$tt5wOP#YDl6YHnYP3FBfS6>BK|diLX}N%ElwcJI$G zZte`RNUZgcj8Fscz0o5)#t0pXL7Piq(ym0vwIzv3r}W%IM>`!YN2&ThQoq1prC%pJ zg&7!B20`F-!;wE%7{z(@7Rcq1_#HD|J#m39ZMZW}KUx+imN$BCRCt68IvN&{yD}P!NU7U#B#*ct*1PL_ z7iCRifT4A~gU1#5L}MHvN1KTB{hhfa)cUu`$5^U9k{xycw+pOIR6!fLx3D?Sf zF9Rk57+AMren8-BtrQr|;FAv5iZ2Dn6_^R)+b``;_pHNuOc$h7qAbFD z9p`*M3v^osBu8Tx=hrXVw0w0m#OvCk*rJdvx^xQc@rR~&?lLTxH2Qi&?(@ce$1{>3 zA-6ri^z$><6?m@eV~2Ei_kda9_jjB(zie8CD2p*VN#KX{91IN_=nv$Y44;n}GEAqV zG=6;p2$?04q{iAlkgs$z9hTQKuJP{*z2j0N#dn3CP1jH$cfqPgk6*E8#-R3sJCD20 zDuLc|f45QlNhh79bhaf@Dot>0TMxMPjm}A0z(cA{0$HD;m8-NCzZdFITWEQl7>_{? z8oXG6cw=5Pi>UP}#(eo@?Am2m9Fxp3&eJ@(liyBpm4u@0L=y>lqZvV|FwLcBv1W7M z?cqlSvVhD3bw^${ZpRi0MUd)G#k~lIfMceGLniDT*=Zpf%`Jz+HfF7$)nCPF<1w-E z@Sx4E{;|Qt@3dw=_mFWxooMppem~kPxP9LeTX614p;Qi_>I!2~f3y_VTuN`h?f?b$ zMNzEG?-UJ~ZJ34$f-DWrFl0-!XW;(MSXZ$C7I-O$j1mf9*%6Q*+7dbnW16HWsLV7` zJecN({yFjp^|V3VA#ji@Zd-4{RH|1KL(Ng?OZ9*u&v-`}i;DyPTe+Mm`_0Y*M}&+N zW~-i#YIIbsfehno?u3+%l*1z?BF8Jl{H%bB{BeQ)9-DE}7~U`Td-FV(GIdoE}v zE8lNa{?W9RY{y(f(XVuN#H}Ae(_C+)f%9ZH4E1?G*EW|xrPt9?E{-pTKnQ7UC!Nbe z>Kr=Wz;Eva{3FjpU-Vts=e-a73&$RHK5oqQ#(So!mN99UY;I+>49Y-LIRoF0SGyvj zXYjc`-E*@hrUjZ>zY?zYEC?w<#oq~h8=7%jyfxUW?xQ& z9_D)FL~q*lozj&5EpYrnxa~_<+wn28sZ+~c^XwBd4I6Ff>egzG0ia@JRdwhQ{d2e2 z*xx>DTvP!P5R1CjwHMtsh}o@eDz|(Z?kvAK_fv$^AYsYHSDLpm>>2YY9erZYICF_Q z`@}4B33|UvMp@4n{-5sohr*$Ejx>r7(T|kj2Q4)oOeNdHFGEt*o<*j`4a4$nTQ~8@ZD+jIjUaPJPvogx(|Ppr`mh=2(^4m`yffXe zM-+7%pkJGYUxXQdD<}Xn|E#u)edSF(t<+_H2z;+b-viiQ{d<(Y2#Ha)mF5YL8Xs)d z0*7{_1_*b`Czdis?@zja2}low9LM=Ni|fZ!v=`O+5WL}kwvLFYTSUDreaxOO;w)`?wA#&9 z(bcsJ@8}fR*UoaPpJvfMu5)v!pXQCBzn#NA-XLVbK2CwZ9fNe=w{O8dHi|k2=Q8R# z2luoK?QG`T+s$%vo@U`bPQkxz{1yV(s`1{o%OuZ$Ryyb7Sentg%%X8wz~F6mi{@EA zXh^=niot{atAxIA&EF3L`CURMNBF0NF6pJPjQ3qazZ6z>FuV(bZ7Myu`!1oQak3sA zDE?DI|DKgLADI~6ak|G{nL0e~kx05L9rt*UHSKCQm0Kxpx5PRhG$@y~%c5wT`aEuJ zH&uH}M_Fg^I&1!C782DYax-;4&4nH_MDlW^*Dyg3#3;Y$tL8 zAKX{n0Hr!%`6n`G*NPDn<5VAF1n0S)*=E3drBHO!P}D#f{`X^c$LnJEd$BVEfvhfY zGDJI;rj+$XPSbVVRpY(q9rr(n92=BO=~$%NhrS>XRV>}HaHdKFzQsPG<-*UJ4qp|c zhlSrQvf{9%JEq)S@)grSCU|(PFJ;_4+F{|pp<(OI3||^W8&ii$ghiba8n>kK$|M8r znTO=E_rEBlURP5*ik&(++pe>;|L(Is)OgPca)J3`D=PBpJI-$CTvU-ZKHD{VEl)%L*Wp@i(%+LWYheeBy+Dy<)gd?b zK}FV;l#vD#XMgfNE0wCikY&`vUHR-bw53gZf}+{<{HJf1OjCoV_QY=-`$p@0Fdh5P z&5?MVY=V4jyYE(cugnF|WrV4_Z?3?~X?c&3t)LHi$oy&sTx`lwyUTGO8f9whkMK9< zTC5@KZKA@5?HOh+A=Xy(e3n0mXu-^uDwk$X64sW)ESfDXa&Rrm=(%d;d!9N0r-^N} zdSDa*;i89^-7WjW9NTE>8pVWS?^Jmdv~nE#hYic2+TE1oh$ZjqWXk%B6G_@qJB0UO zHG0Yn$PddT$mKtxP`D;gVPqEFJUxuP&{YG0p^yn6kK0Et!OiS>`#UMmM1RRrZmnEg z>~(yu%cyt9zSps#*;{OpJlumk%tTjn>DmoIGy7b1bcevuCJ4FnZJz23>CZ=>fy>|> z`+grv*#?icVsAx9!J-KB(|{X1pxOB7;{zgUv%NEtT=mHAfJzfD2S~eMPt0S?r0tMj zJkj(=s9C3fYGs2K3+bR~5~}72on<&{+zcp~+^)}qw}7gQc@9a_{G?b= z#z%@=ge)fr=B=7ITC^7X!{F_xfOA$e;aA-yIns+Oy}!fk(lcnFR@oe|YP}(x<_wZh#dH%mcFIVwp&z)D^46 zZ(Z7FP@;=w{Q3Q_lyqyJqr7lmXGJqBIl_fFhe=ORDcz6!yZIfJ@!kBUQKhgItM;iB z?Z0b){VFrf-T=&OXSuZ`l6_xm*Ln*HKC+E((T(mnej++E!vD^)gwyHUy3vUztVn<< zZz!HuN$GRR>*@WBC1<5=qe;PFvXeepl~Bz{)HX2@F`+YA*RvOvG8}uwhMmaCu9)un z6V&SZcXc58uz1-#_XL1cE|y8%WdBsb58%}6@-#dF=wf9 zH~qS#1|Zo*_F5q3nJY+}k^s%skcrGDZ93`qb(HbHV3wMOinvI&MdMKwuv`p@&D0ic z(n}CmZicgmCf;B%;`CJK5#vvaRPPn580d;&w&bLElx=OIOv7PO>eO^v<=t+$Lu^>G zRp47_I2uC-*?f>kw?@bazqgjfT(QNwYh_R*aT$)NG7lXhn}#_cw#CL=qFJ;;x9ldZ zX)~#?6FA~+1e4$lpv4lG^+AZ^L!D~WWtq7<_Kc!$=#6ImZ~^c!TjglaZN1s$QDnJvi=Ubb{1lF*ok8Yj_W zRCKGoP*l@Ms;~p=CVOSW$+YCyqRR}+3{F#&9$dBIz+Dj4W&(~*w3CI3rddn)9VjXE z`e;6rGZ+5*MXOX!j!{&nqxnKlPqTu}_1LwR#Tg^ZRCBr!s+o?HCe#2iv1?TQF5t4G zn5Th}t7hzv)??uWW$kIWIeN0|>ClVZlC+H|c%pLbDs$P>a8?hbj=v422x|}jm?}c5 zv^w@eZlY`z+-e=ju6OMllXz~U&r)~gT%9AL0!qz*ttsh#kAm`)xd!7k zi=u*MlMNkuR_Q4B^fzX~GWV^snyBJXNJIo_&$ERATTic_oA^Dp7;xHiMQk`Yd>|oC zNM~#I7rlU|wpc*Y*H@dltXv1BS(#{=XBGGkQk7Os82d|#<0LV2!wyV17C%(;Q) zG9ZF?KV~IihpaQ5{$&`v_xCR5$q?iGQ)s+38=LYD#03@!dk-ZQ zmkyb4sNN=E@qlIxbn=c}$dwC$jvoA9@q26A#NT2Yyt({{P|Zonl03 zqpjVxZQJH*+qP}nwzb+`ZQHhO+qV7p_wSvPlbpMgj8xs#RlP~om}AW6DfT<^lm_4A zZyRn|K+&AFLB#+|=Xsb`-yGxRPtJ?nMa!6GMV#$Qg^b1F*bWWf+tFqn;I4Ndr0_i*XCv}AI$`1ty{zY%{3mC$I!rYcMgwmJn*oSDWM`MvB&==vHV z6mUp``5_UGtNML5L{l5@0upNJms;YggNfq}X}Hme3j-l^-k@$96>X0nb=?EroP5s4 zBubQ>RJkRTIAr}0cSqq!H9Qar+e6BoBD=^jm?pa-t(6@x>=#KR*^f?FDrcoouy|#e zc}k1IF=}o}riB?8Kqs+}em>VRV-rg^qYFT{V4%UqZe`Ij6fe!F7N$ZuObHnTPi~Xc zQ_+-hoDSX*A}qZZ3+8c1NoD-hSa8cQEGCFp1%)y>c^Gz;IDFE+FyZ4KZHp|Z2^2Cr zqt7>v5^Vq2_2W4kdYG7a4=NU$PWVW4G>;^91@712x5Tt?Og=GV6MfwN(n+~LSa+mT*rS|hy-0T1RSTv3ncQ<(7zc2&@k)v z!yWLW32dGN>*eE}N;tL&);@No1J-qk^J?YEr-p+KSbfx`5CCr*f{zH~GCgxCqZ5b0 ziTR_=O>fP=r)>d*;J{AdIEID&Awy)h>@rb^b_{+f6uQ3=hf+?(&c45Hyx!cbk@|0PT% z9t0IQrA-tiM^iM=VNNy@6`lYM1P(S^97CQ+2p~6rB3M$c ze$PFqqq1iY3N%|u5%gg@m{u8_vPv-m1kF}L@R-F_|#3R`w z_Elp0f{#pCbtIJ>uLhkk5ALOAg?X@|zOF|X`bj3WeKoXo2xvymufpb5uAb!Ct7}1T z%?GG$3^REii?my|CK#45(aoELi4_j@M0eu_^ja^X&1e}UX`uygifZk0#`Szyj5oy)Pl zV!OL~VO^tkC+qpH-pP`f$ZOi!EcVq)D8>WYY<}H|1-B#EX~Kcb!FGE6~D45Md;7cOF{1c=EVn7Mhl} zYk&M%{3KGM_5G}R!~bc^mtpQs=y}byYpzS$;G+8R3qT#z?A71YgUdR<|vET|W&7U3V13u*pF`dh`JmDwOlyQwqpf6)A zNKaVBVxepJImSuR7L;{p4;4bwG&o8Rk|yOmJgv;(^ao2(c=_;^JbYEAb07LdncfE` zR_9gnfUDXO!Bu}7Y8mC&H!Z#5{lm<}L-k$zAP16Q+4KKEiY&c)G1#8*)6zlX4TIoLsH%~M5Ez)V{ zy21>{;+58fgs9yB^e2dIg7%1cQn&LwCn)FNOfa-`yrLgrj4Ue@Lph|}L$_Cx8|0Dm z_&{r_0UD!pp>qTWW)K6sF_w^p9148q)@i5`M)&5byG9v8W93j5h$b2wMzDzq^(wB3 z$2`2Ti$T|3sbVl$Oo4G?PI-j1xbu4V6`8V}`$t4*Jdt0Lm`Xv|7M_=Z?rR;T-84LY z$lF#}R0j^O$Jxxt?=%&}X6Cc7-n$VjaP;Ff-v~oy={$9Zpz^ELZBmx=x(K+o5dL3yRE`kv;qLmwWINdSi!|y3;!9)u$IBh#;C)lv({Q^ z!ltF;_*Es$!a61}RAT@1svX_SGPXZ$$q>NCvUST}2Zm>l&>nqVN_0D~}o|;CF zKo!suB$`Qp8$H|NXIh1Jv{P3Xa=Dr4==j#$QX@LB zMoJ(m>4|QhfQPUWo zm}KKg84OHdc^1B_ZL;;FUAv;*X9Z-y#M17}GrR~l-T(_ZF#q}h7{aBUJtE5KCJiwf zH!KN$|7ApTBg$=gHInY8DfOfKBB*0OWFs+|erP#o3IQfI#3;KdGRi;%4kDW{l^HSM zlxrBmw}65r$KTOjX-{tpV$)6H$?FxhwG?ntXl?)+FG}W?O3{l!A(~YV8>}#YK-O%s z+zQ)7&bYB~>x^X^*F+LwG`Asutyt7w?=WyLvWX{_lTB}p9HR;n@*>Am@arbchHzZ$ zmE4GQ0CxrG5#9`XVvZuatS%$n(yP}o!~c05itxR+FC>gse~>LJyQCj)!BHH>!YtUb z?wFZ{@t%5Sn57Brz?gOJTu9?j!%Wcj-_Herlhg>5`B!w*kCyq+b2Ob$-+a{0yPe-V z_+bw7V7^(}gD0YyO0z_&uP4m)=gap;c(UM&*XS$Zg zRq5nFDae+a(}^0TDHcHs@V*?=MgmPuTlzyb5NtrYj8~F+8?JBOWF}A+&#rX{Ey|`v zJzSB3P($IJ<$3@p|JzA#kp#6)u>(OTj{YHeoHCd-*d7_$F9H3@2fvQuJ((kjqTknr zY0Qoj5x$)BFIX~Rj1C9a4P4aJ8@^9MtLJ0I9J1#Ih`UM~bhDvMSX7`{g`9A>ITzoX zS4Btk6<``o97)XP+5%#62H?mcZA)^lDt_v~21fr2%+}es4>N-UZw6R1XJKb|9wU51 z+M>qPT9m&t?d3#}qJV#Cimsqj6yO}VT-vVu-O5|O+}XyZXNvR^5Oh?q=$@ZoOK`44 ziCJ9E>pq2KJ|WtoI;op;u@KazHip)C=JZItNjW0j5O_uuyj*3&L++&_ur9P=^qd9_ z2-4@h!Oo^#9f>dnHYF#;oQ72*n=miYpyNo?#{bPDc`uJCKB&{-@bQ%tp?r5BL){Z& zaYUOOm15q+7rJb>~483s-Du4*vj8o#W zL9T0FOT&YE(!B*m=OvNm)wcrMdZrMzgq?&|BJ&`QA>4vzl0H2DClkYrRI}@pG+Rzf ztM3*rl!GB6_<)nXtngUCZkgYBTu_>Em1=|%KG_S)ek8u_cEbsJ+gr#*Z?{HM1}$UwQc^EJy2JeK^td0Cdqg&KI>* zDJS;)>e>w6-OP^uPLH=BIr=gqkhg3?WYA=Y~ua?X9e=b-$Wwh; z!0DQSm}djVS=SX&QIoX*Nd2n@X5dCerWir#8d=;@;)Qu2Vrq`TW2uYihUpTI?%rdx zRT&O!uFYU!_7Jz_@~?wsDo)SWJ%-0C-|!h?^&pKWR_(}K;EXYhNOKM9IUI&T5h!8X zuo9|y(&Dc>+ieynm1c9egpmaWs2K(gvKfUp>yc154OO5icjW3;Nd1DD3oGSw+h_~< zsIzG{8PW?K&!9NyIKPM*UQzi!MDJ~wI3f%Q%K7%CSLXKJY9rIN?iCcq5?Xdvks&ErhOsq4V?UnsD_mdy1CG zhEH<`$F%|Kb%8}2KvbxY(I~*iBY5dVEW>tQta7n6iK$(o^4}9-#1cQl&LNunC-0n3 zu;c?`eo?6qXO(8ATt#8V5lUkx#sI#cJgz|#MM=?`c(Z*5M&!7|9iSo{$x%E}tkAg8~9QLj|ql#FZlQjB0j2WBw!6h(2J{LALjLw`dk zW%#5d@VBVn2WH0UpjanKa2ovfh&h9RDp{QuO2UhQ+ z;ixV6207T(nQVkA{eE4!&e6ppF3v)WR-vmpq2)wyWulaWW5I}5a3vzu#26&f{Q+~Z ziMrc4D=;g>0}GZQZgM3Hup6noKArRB^I5^{JsY>YY4uWOJn`&0d80|G7zLSw9y(Uh zCv`F9hr<|^Zeu$ z_V? zyP{{utf0nJqo%k-m1*nT9bUFq4DNtliM8Fm(=MBU5IdudfSF?Y!fdYh;ccnA8*d=%rxw#zQZGnaCSbEll&ZkpqJ?P6g0B5xyE`gC>i?`*@mW11ANb75kvQ zb*TPE21pnGlM#%W3;II_IV_84;W?u;B1?I2Hg~tyH4o zgaiE7HL~~iiCcwN(;T$moia5D+0Ckgt3F?FOySr#CO-&Y_Cg^R&F6n!fF)&i51%0m z9LOGonLgL1Mkz3!s;}#bfP#R+7zOUh)|s&VMJa1J7h~Z}&K_~_L}pkMG9=5ac9Q9W z+kf`Ki4%5iC0UHQ&@VRnNwY|(r;4jceC6&$Br5Fd8VL+IWRRKEE?g08LrGZl$}w8N zQnZUplrCMw9Gd8hqnWM2?bOY*C0jOmJEDB!O?}OBBEkt|IK5j7yxEZqQ8r;uNKJqh zC=zcl1M>;JUWImCW!JQxrR$BLgdm%K+sh2inigT=We#A4Z8}pSvQC#SVr#GE>){@w z&UP;H@W{JP?7ma`6enS)us5Wn=qMnT7hwdqAl9f51=wq)GOk<;x4_IR$*F-sQqz1-n>MTR*5)(> zjf~f69!drOO)pio)RQLBB4oHWI8KQAQD*f$iK*XD+GqAF?ZaCE6rtobkjMWVD>kx9 zVxe-Vw?y=8MpKX>Nfs-srYInOglkS@_072C=1PQ6zDu9QzwMe)B|-d=Mk!0vG$VI2 z^b8=L^l|7PVMyFjJp!7lq8{WIXuA6*&B`>oMg&4?R2u3rbMuM;#;m3 zUMP_zaXWE#8eOa-Y{cw?%o!NZmSyr|Oh8n)T#Dba0g|4X|3flLb5V*(GU??IvRtP$ zjl3wW+W)SEjS@i$1>_DISs$KHrzuiGEUL>kdQF;6RGdfJS=$iIF8T1E3T2ul(UYW- z|B)EA96OJ={-631R{NwwJ4V7w8sdR9i>7|vnkB&{1Od(RWkvoRwSS*yh(3qL)!WvU za5uuT+Gq?bEU>*9ov}K|L@dMjV@e1*`OY@l*cZ1R59g>~A#q>-4_togKf0Np=8pH< z`_lv;XbqkrkUon67Koo@B1sJu+x3^)d-}+G*xFVu@xAq*FgIE3|EW3G{0vW|((Cqh zOiiuO>)LeZo@Cl>SN6Oq)qW!&-^SGZEc}NzqYArAf);%{I?uTqL*7l}w-!IBxZqw5 z9)k2Gn1q0Xcgx`pX$kX(0hX2$IibXr;2J0={=j_(93r#6zBR?$uj#cu4PgCG zNBi%CyN`$6)a-?Y?1G1&uZG=MmQ^0!buM;3cHTfeyP)g>KM+1#oGVu2{=-w|W6^>} z4n|pGD#pLi(a~_>ERjqBCg8)YtyqA>!229H<-qf5q85D9OygvrG+_K^0ln!B8DqF8 zEDyo9$eiN%oJ2~fdSo~y^6cl=L)o7Z?!jkV_atW~_khyaB|V)3hX(azsuraeIic;v zh#sSR@O1%&V<~^5AAZJ=zak4rE()!uM&pjED5TKRNB`muvQ_OXn8=LjDWf3?s*<57 zpba8gLn+LPYbJx50&Vt=l@kP=w1i{Bjnw zmz%cdc6ekf9u(?rxY||`h?21zIi&|Gd|N@IS#rN-K1!Z8j7=8BS}s%?VkHnYauiwZ zikdW?NmQ4sYTcdU)YkxZ6ygGeQdyy>2V+J^ToxH7H6@uT5PSG0Z!`y@rp6uCo5TT1 z%)^gVpgBkec0Cm8C@mN|EOi^XC-Km>K}GLbt7Nj0fW82suJqG`j6n7L6C^ODEScs2 z%KYcq+ioDK(D!A4fiNs%vjYW9DHPiwufXw`X-pMeT(UvMw&`$^YmEZG`c$~Xlcse} zC*Mm{);_g zviHPY?t>j*iPBu?b+lp<)|Yv&oulWc5yOooNI?#$2FQEygrkiCHI6=GI_o3knUZhJ z_=hMB*izX&r>DQ)A291Dw_sJ$U2^jq<$@i;2AL`wv1d*YY~0%sg(a z-#b{*nMD$}t3!p4S0HWql609;0%eYd#gVauz&|=pc>Hn9hAE1d0;$JYOKLv$ui~o2*;sddiKD%G%qi$ER zJV9!n)LB29R+3unk|U$c1$04Z6v{V9%98^8k-jL;XEDqH9?D1Uk>q8f4zep_BTb{q zkf$2qK~Mc)lieN~ItC}6kVUbjVX1a;yZX5e!V0vwy@eSSLZ@uPsdP~WnxnhH>02Md zP!VC7Pe%ca0pm`ab$B8Ndc-EML5z`U<%()X$`oy!WCn#8>8L+UL*=Uk^H;rB8 z-@J)c>qnc@Q<*F>;CkFv$b~c%sr_MBNPF;__s0o6za4vCs71}~!{aNKYT0e&iSJMs zdn_FSWTWm$#aGlA3$ibL&q;<=PM-BV{++bpQ&&?y+T*cA2~xiA?ZdqWi1PdK`hK>O!evg%QB8u}an&!dVbQ9M@r!{)v!htEsnZLq~!pUu`>lKWR=$9|lPa6~v3t*OzEuYI~ z8%jnEqCuIz=vBwwV`nzfRgmmIi#%nqhr$`0HD&9SJ%}?Ey1dFf3q^yr|CM9KK>j`KqD&5$G z$*yu9Qq0v@Vuec6_4|*K(O(=pcowaoDhD(OfZl>QI@Tchbb4_#x1+b^UCT-q;JqXi^|3S?7!q`8Z{>)^W{FR8)Fsnvw(|Z$4r=i z0`PO#WbK{8Gaf8UaO*ZpN>jDkX2v={2!{YOVG0fVp+SIF<&BO2Z?HNh^@^c==TrL!WZvgivQvaz zx7g~63AE(_Nf6;EP$XU4Vhp=yrr~^MO$?d)w^l(ZmK!zf-tA5447hToR3ft-cH_^! z2ryTY(=k?bj~S}FAs-R_cwOiOE(MHSS#SYk(ecbAAS)`I431LGCQQiho1eRaC)QMV z%2!it9?U3v>Z^S0P!ee{*t(I#o@I;&QD{coYmAQO#@}Ta;gy6pm2?$VbcavnRlju~ zO)BggP`BR3y;EqFLdYy&;*NwCGBwJQQB%!>dl0{~DcH?HPJ@7!leRu7ID{V6B*lOm zX)Qe?)3?(zR+O|Ro5~+#HMf467FiWFIVxQeWt&q(QxzhSo4~j~GFEW%JsR$mqm^V5 zF4uR~tCoY_RkXBBe5V&p295D_7jc`2xoJHW=_2o^4E^$zRX&mM7K@$=yP|ImHRk;$ z+GS08Dq7Dfm&Rrl-8%?|-epJy=g;vr0PL8;B(^$(eqR~QQ;5$bVky3pj z*JSn*aArx=_1Q*Ena2xE8`=)bszo;CmojkP$K$;9ShhrlBA8KfquWVOlxjOg zsN$>sAlyBxr#=MnSUgfyI=eZr%N8Z-rWN?5Scm)2u%7LdIUo|2iSBvTf5uo6#0|-$ z?X$}MDWfv(F1@-NF}u;mcy5X<=x?(OeA`FE8_|~7uOhSBgxkA6I8g#p-RMOv8aESH z_Qg?ZyydL9aM7(AX(q3wlcVC}0>)Hfj0vV0!<$)cQ0ddREQz+05+%AMI{TMwSk69M zxXZq0JDcGA_oiW1Ac3Rfm>An0?F5WcZno1RxH*eAmcUuj-P@@SZz*pcfuxPFWPe-x zE0xUo$<0Igpn)a6qGy^1b80z?HK^?~_3-UT(`F zSGdhaK>$3jaLIuIDqv_$OK%WzdmbLbG5p_cKkPP&MDzyd6zjI})zpD#>9uBvQj24{ zx-U{gXG_=irupN*jDz3ej&Iktw%uc%WOTLg*|X?_$iT22LBGy?3#I>^$sWu?{sm7@ ztC8mvOUT#9hk!fsuye^(y#J=y7RBIObhBQikzI~5O6oys_r*V~cGHUc44eD#s6AS7 zxj3Thefn*PDBvrQPMp3uT#qmV{8eLI(NU|o3G|Cv7#rvpo=y#tr;0s7%v5J#J#U_6WcHI4H3D_DHPJnk5xH1ZEb%0OtW z&e=EL3Qj$;mc*&FL99y75W$uwcf*M7oPBJDBPuHIXm76WWn-RLkNH=3v+Bb9I*s8S zwVCDg+Ir(6%Jzi3a`^Oh|mh~%i#0wDiFQ8~@Of#Ih~VWeat_K6!d5qh&Z zDUOFD&Nn8a?^n-b({I;twVn6TZQ|{2`3|<}G05O^RWL~>XJA}&y&PwqYxmp(Gj^m< zGo*NZ^Qszv6#Yh9xLOF2{lYk-9_s6wo+{O5UtNbWKW{hJrPKSqo6rl}*&#GojxY-l zw7y1-b132a{$*k8fAZP&>GFZt>LI$F2Og_g@uYD0H{C%~ZdFGvGQ);|Sj+YBNI7vx zJ%XPnC{1+)3dIXon#!wG9`j{?zOkNumW9MxE^DlA zn{3t6b>NMsKCX7$*Lf6nnPB3}57WsWVO{hjL#g zhB|+yH-^e;4u`#LQ`d$S>8k5t%PNxgrJdqsG!ql7E<+A`{(k|YhPpUay-qdz;UPtr~ zJ=)QIgY*dAGnz!YP1lB6EP7b?hBT)`9u03}P>p@C?m~V~jYli3V0NbbhKrY$ zIa0TWOyK>x5~fe)$7>ZV$`=)uu*K${aSsEg23i6i3IPC_y;x2vi)PX3tpvh03IA+B zKxA3N79zRUZ7^wA9AG^66@Y%Egas3~B*U8<`K81PkMJY+U0+|bktUVT(ctP5qBL^? zK{=L2ymY0Yh5sk$kt_9`o1%tAp1tDv7fNmY?gxWTGkHd_n@H`|b&5((LgY@e%fvx1 z3WMS2*X3(zFAX8bfH)+9EFC>K+CuD$K5l*hUEH$>>-PT^RiT84U>v=zaz8Pivo zymLnWNp$q>AU3Auj3I1V+QV~h(T^cu;kn5=|@syG&1u>if-2l_?%W~_0O zV>C_**g-*X6Nm?_iw{=H+~;623w%5gckbnK7Y2WWK>q0S3W}mC?V5eTog%(YIiHow z$ZXv#%A{#B`70bj9s%|S`hv~2Xs=Nb?-0vu19uZSLY=FByG}|YK_>n*&z^>y^-Vjk z6E{|llrVFODsWx{7q>);Wy|>TGe-G~E0t$=1aCqYp2bS*1fflXBHi;FX%|fNQ?TRY z?aew?>pC9)D!de*k`fCoqG#-zooOXZspO;PXnrgG$YW?MX$yT} z*#%v@L3c24>!u40(k&~9kH>FLRG*(5)1?Ru8#c?xER-ct^hm9Wa=7uXPy-(Q%(0*w zpSKI&e7xi8>sQUM5w|+;{3`P1)`D`S9ka-A56~((Lab>@2sQv;w@D>P>0CXoaikE8 z1V)xythOy~t~2ny+atU!8{lH^HCqc`*-nztK|RVTI6Fy~Isfo-9NW6S27EN4ijTj> z{sAm3W;nsSvUdX3G1b9O4nt!&TB^8dJNJ2vW(?a~pNf!o-Bm96v*-_(C-z~4UWq!S zn5_+z)jlg-F9AR>l1>qS*yY)V!C#eJZinZsilLGR2`r@)4d-;3mDnhrGp zXG1yH+!a7LQOI%40Hz>sh8h?+h$Y_W0d%y*(Ea0s zxIWnz^_`uC;eN7%qQQ){$*2G7`s2+D`F3Q>2Y25gPqx#*q_HYz zDtWP}5Gc@49IT?RJot%s9uoj1*LS~nLO|{W(^%IPF(?dOA;y&1^`jBrcSVFB8Ydc(mUMmiV5gQi<@J-fXz($(7=oq+O!&H9NATZA&R$$z!=J(L zkmdHqeL<_L-#503mC-)Bf6%NC5CN`CMizWlhE`TNp(57$@ zMmqXg>|SQHB$&Y(9xDG@XvBl%aElb#-^}%A>?qzW-yZBRwIGtraS1(ToiuyiznQ$4 z&XR2sG&r^H?+W)OGFXMk-#J8Hz9W^VCB*WgTnT#j;l$nq%Q_h3=!~S% z)^%_P`$g9t6IrEp7F(vlW1QwI{{(}kI z-VhCQd<-e=RI_54cb!5&%kfURFW!ds=zcVogNA=Xoa+2!YUJWhzL;5wq_co=$ZAnb zNV~!s6vGqpI{h?;Zd()0cJid+H=mqutx;Ghn?^*YL0P420?e@L1d;sO~ zt81%!JC^Hv|9t>k_qSZo(n*Coc|dbXA4QM$7c;L1P`)Kor_}$tz47a#0|Q&+a(h>V z1i(1I5l@P?*7Y^aP=fq%wwPTrUW~x0W6#=X(3HW7qU0L@piubBFt@=wrBD7}9<-6U z5DsBoD{-U(y>clX$X_!5*D!#1?cHQ}T{HfiqIc#0HG4bn)-<&`y1Hya7uQS^bgQ0P z^_yMYYdid$n|q*ei>YqFyC{rMVX1}#qqvYN{WZY0|D1-jP>h7BS%BQ|Qw9AxzJ>O7 zAR3P8JNd~c3;ej%|5?6$X-`0qcMeiq|9>4{z)V0T&?(^@g{cGs^08{q7i-c~R*VwijPkWl-=BtYK^e6!Xx<1;XMPY1qZdCYV!3iIQL*7`sWeuy>X z_Qn3R*fih3ohyPHAw$7hXfY1IjVOgU!ka+s^XX>9zhISpFPn1zPE(B)Nl5{vuJhUv z6w5D4!Hp);3{}upUbV1qArO?3&H*xVb*V2+N;Ny6wBC^qCE^h`p$vBe06DA~u;9bD z66I&=x2xfX1Mh{g73A;l{=gh%`jaSN75pQ-9DD9(cffYACDr*#&j9Va#CB=?(3;7D zqyO`jZLJ%et_c!0z?J&2bjz3k6v&$l7mUAK6bPhEN(uEf6rRn^u3D5j(Lv1qSyWU+ ztt=3p(f2!^5l$$k5$l)%ByJ2VHF4gCB-FxOfOrxe{s#fP+9K!d0UfbNhoC?}*Y~{( zHE3OAF{8{}I#FVvbf}8hM<|3E1wt2C0SBnm|0oNwCVrFzH0~{e2iHaNiv@=e{@wS8 zLM)bm@?_GBi)MNc36 z22-bhVcvI?C>oeL0G@QWPmxUBQM2VtY9v+A;oc0{mSEtvbd>W$1r6!D9IvMI35`K? z4!20Te1#XU3>U(bOzv<5<%Lg?=`ijXu(tasG4Wu}F{L3**ZG$W@RFCsj4JLtK*m!} zHL^)?X>8>UE*x0Si!#XB=dbN1jnPzW*2OxGP31cVw-&2MUJ3z($6ck6!EPyk7@W-J z5RmU=Ib_2nf$Nu4S`zAMJq7X>0(8^w>Cp;1!uFkNjoRejsPikkoJ z=rk&2tu)}wFJw(F;EeFnjcOs7l=;GS0jk=j4F+|N8>T&WiQj5fa&IfrEQb9CJyBkN z{n{&0;CH3@4w?%IXWR6K2*bG3>(w&n_64AMfuQteJ-B~rWX~H|B~W31sA}5Gzr9-a zbe4hoSKe1_wxPIsKQF9&;z@WeFSXLZDyRnu(qHQM72w2!{VgUmQsu*|R8)O*$wrT6 zNz{bhI*nt%sYMDq;DieCaY!?dCu$`WKvRW9M!f^_#va zDpL+=BONSJa>UkbZ_pMRL7jJIG1~CpA)3!tvKHEYpOK3WLZHg~K669Zr{RH)c_aT5Om8 zMhT~%opN*c3VjxHTl_5LVe7d6h;N)OfE7+{uQE@FB|Nol=RFCEpiM`l$rzQ+(w)cO zSreq`O5+sY!KS>TY=PDTL!K*u_lRU_7nyD2Xz#&BnW;9w2_ev=GJu#H zSWGu8v?pyy3{dEIWLHbjO=!eutkgJ8bzU54;HP4wRTFstoX5~xPsCFkr#U&EVMN>VJLEB@>5Ut4MgOZbnBrv3`;f$!miNaNEPn zU!&ummR3W?9&*OFPWP+IwH!<~!Z&~E|DI=yUjmQaH)?Qj=9T=@zsNj#V@+IaH6B7@ zoJ>(Lx?$3|Ud<=EW}D(lv&aHD+PX!(3C%neC@$#)N|&qWPrQcZL}AC5`D)@ZW@2^w ztcF`H1?w^|xY1*;Ghn+;jH}jkvIla`V%}(eF?9F9!cKr?6UmwC{zLbc!{EdqoZHkq zPGYzI*M?!vEY=?cq(8*AHtSg5S`WuXNOm5Wi=?FcSulVYgtJHhv$)HHk z6osX9VO=Ph(rJKNluQydH(1f)xQP2$PbRSQRo4k8Y;YBc!9 zCP!aGV{YQc?TpsWRY^y$ieX}&owAV zVl##76EFXkWE>~{JxU=pPmlA*xJHeZd^Z7<;?syc4x~3sYw+~Xmo=ma_XSV+2&i;t ziw?)AjwSLz(a&1$WE;O*+tVXbBBHNbC+MNHlu^LVbCIBmPpsC2CD^!}a24d$6pHQ$ zOVw}lvL;+RsPwQ_N}x>$uD^6TDoT$~@TOupjQuS`WBq6q1$(VYg(b*p z;ZULVC<%gHY6R|7*UT}~30N7LE}M*-WptTLVP*8kRbX;@oH;a+=L->Pt`tu1*(wch z(iIu9pwjH?Nt5M!=#uf0Tuiki1nnxG!1uJ5WWqfIcuA+*brpGe#6gCnfeDKxZxmfx z?j6&UsR+JCV^VCDvC^Bz>e`O&8lm#{-t$0oh<_-DGUuv@wvmkD&dw|laZPUC`&hhb zOivHQn2Tr2Sh&u^m!YKybu^@%}+;hC&ug7e4Gd)pj2XK2J>fYzU{I} zT~eR@zADO~z}(OJ)%<(WSq5d`(y{)ipNOLUO<5zMDf1eJTSO(wh#nsy3(@DnRR|_k zPAxr7&HbiY)awE%@1sH4p>UOF1D(0G6$L(YiS-`;LxIB>6pI1%HjqJm*cA|XJ~L{F zVm{Y%>n!4ux6Lnz735FPpu)#2_y*D3EKv3@{o@gHuGw9mBGg-uulrSBO>`+%$Ty=v zAZ@$p)Gt+&hgyV>O?o~fKpbyCnz81Wrl8QSBcYa2o(qrW#nc>G25xTxQu2ldK|T^+ zi&+SiD=I2=ToauZ8@g2%e&$vFqCc3_p%IqmID$5~7<*GJ7ELK#+H^aC-)Mh}{{@)c z*yS`FFn$+@wia`cl1QY@elU`raU}ua1^ehYXDzUbA6lEqVkFpatI}=PZ%Z&A9K014 ztd~{0@t`KU+Q5w|E4w_CyBvh*a%qU05SvX!dWwvYTo>d7-bO$PFQn*fo&7f#L**tj z%bbFRkkKLjCMprh>3AGcBJFrV9;x)%zmSBc6RW}k5+w_s5tEJAEeqQ6UVc$nXf0wa zLigqSOd>!885so}5Fb#4k*p@V(3v>enu|kbt-uprRJk?Nr4-7|n$TIcy04pT)b-hMMeTvIOA{9``_EB1x2rK$r_RZ%GL(_{Ps?byyHXqr!jKpO1P^} zf%VW)bz_lR4FkQ1caosm@^+E56B(CSE9^C@rhn9yNlB&@DuAEk64FESEF zxOEv>z02&?#cf$Z_2@UM0xH|>cTMM^L@FC@6GrL?Yr>{67!I2?^OU8G z`$}Y(o?d~gK|MHZ&wu*}ad0X^b+0EIVPz((?Qw|om#|WK;T8T1&Z)x`2U<^ql9A@$Q>)O z|F%4KK7h1Up~kD}XGaDq;7f_Hni9-6}) zQO*@nu9@VYwFtqQr-N|`G(pF!_fihW$d5?NJmkTTZMWywdkFH+%hQF(=T6uw&HfMF z{?E42=gw{Z0rXdeBM6J72;lK;PyY3Po=Ukw1Nru9r)sW7MY#Q58O)bf>x;g%iuVE) z-V!vHwh9E?=OY~9K`MI&r~aKb5fnLusZu{;a5rhei}xFQpPYVm+SFa}%%C~cQeH09 zu_B6beWT5;034_~&3=k6y!o{0Ix2~a=1{GWZX$rI_vDMjoO890<`L^`ZKB`XL`X_@M{ z&;2;8z%z#^n80FeRdxSXBLSbm|6yRyv}}@FcN2P#d??Btbblp}lqB1_MU^)F3fk^I zt~In0_>lh2u8qCWONvB`MoYNNNyv70y>{pIl07UxiqmX>0fn zwtqoAX(#6u%-W8H=M>(Ol4Ckq^#NeydeD@j{}l3bLO~EqI02?)j3MR;V}vSK+f+IW z*KERoG&)_HkuHEXIw=HSUDwl8$V=4bG%jb(TzkIRIE0~bj8z|~y5$1C3LFtaeos9P zAQhbF0pNKM=k|9g3frx@cSv0TG!-Gr`evzFRpp9Pr(-clJrCxv7*U5R#ueHJaWdftr@)2D!x|VuT?@71T-! zrv9ZaN-u{141&T-|FE;|(jjp5k}v^~phPDSa$v_0NivzwNVB{vi`?DHJu=FkzG3r9 zCU%VPE5~)*`$o-5=0yyza)g3GvaKD99*cJetZA*3upD3vJ#_wRzP?A%#!_)Ek=e}u zodZ`!-!IcbmM+Ki(=lkyJx94*b}0=j$xKV(y%ZQHr7l&XCd~rLT^PQ1$~4rI1rLq$ zrX(-nCKhX`b$!2Xe+2bT&3a5R8@6v-0pN9h6i3{%e6bPIxGov3sbn0TvP_h#{AjK? z%h8PUa`%h79$nV!Jm>=qfN4BON1IS^`hAW2aVs;?0G0@l0rvuBcRM)OPx%*fy3kTDeW2_u9}c{+Pf;(JFx)&$?R!U{bFHzRrUP zk4G9QbBC6pLS$*mh|7!xO$Mp}QVWlklI8ZY8k?J=eDdjvkX$sxCVcWAW5u*L?xji* zGo=V~(7+C`d%z4}a=FLdJ&Z#2$AIaYx}D~i)wjROTT<9AzLjpo$nm1SU?5*uWkz~M z(mp)N4Dbw1ysne&n0#kl^%NPT7y#?q{B(7{8Ga?3{B+gb9Z&Ute-QFQf82z8z4)y9 zISD%!!uKypufF5`7r4x?YMEikvgw_<37wyBm53N0+;HrtfKqzjt?v(wOH|E6HFXP; z%~Dp~>|+8S5&0r-6nPgCUj6HR%iKvPcBph%`&L-w+o&9G^M5$I$L>lOZc*2N?Nn^rwr!g`bG~csy;nQu%Q>y3jUSK?15fYw)xD9D)u23&i2%uDdqVDJ zeLhZrKXx&rgF7yVB0C%EGQ%ARuDGRLyk4bNuFMXLE}qc4EbG0G?7!gsH39HV+r7)= z9$&j}4L=Xt+`sI)d_aBCj=zDueqQ!Ix|{E}9p11uSb&PRIl`qOah!C;YWr@_y_g5U zgXB^xIkNEMAChN-R*{KygY2aF2O>>Likp4|ygH{xR@28N5Vkb02dOYdG#ax^*#K}B z^x@i>{hkA|j}RPeDh;iZr916XJF<;%Qh`lElwQgIS%GJT&==3rnqkY^6`PpgkoU(C zqeh}_T3?f=-7h8EpQzu}A9EjjgCxms3t!=!S%<*XF;WjCxNtGjrJ4Q13uQC3vV{s4 z=SCmiAgf5Ts$e^HRR}T7Pr@ZJcJ(6OlW5kZ9fON`Mc6NVTQAM!FO(p>R%$I)R-I?u z0nwL5T2fq6@mCz1cFK4K)LJydkwG$Z17;JD%T0#RlMd(`SV9QeD6{0lBM}vCyBu49 zPJ%ZoSSGAfGA;k3V5E3vM(QTO230f3w0;!>xnt}PL2!eLYS`AA3WlNuZVu^L>%2Ww zLtW8HKR6Nm1cOv_*swWz^=jdF6NR6ajmFeaOD24s zQ*OBFtkRTWqb|YUQHo(#BEw>lV zkK5mG>qFlXzTfNhq#@@>_cQrSb&COZq2yJn{a8ogDG@|XSMbAecKXV5AvGkfM3P=5=vcZ5%(8is`k~c?o*X`{%^|*Ra(?p7+p- zuXV?Uo5}WfhkNJWmT&ja>Af#A`)^0PXFeHBfZ~DL0N|0(I9bq@|*HL1G*ng1?yi;<#s5%3LjAQT^S^>=td}=jUe#q7!BlH2|L#y&pr9krQ{-y z^%!V{3(C;h$E|66_Ik9d3$5qurI{r}8>B2LRC24nC!_Y*e^+V!ccz=x^+`==-;+Xc z%x2t}1=b{5(|`7_QE_nlzrNkS8+`wN|HJ#q)&O%m8?#g8*lUuS0o%+%H7eQJ7 z<~69eYG$0qn-5L|8jWR}Wd0#iW#FX@K;FmPUqC(3>}FMdc2jn43=zOFDWryr;t2P2 z8V-6E=#a5BjvTgTJ{T;yj0YW8xG&ehaIQfhi;n~V&wGt)Sy(XCap+(IBH>R}lYBNs z(gphHN1|O(gss1k1aKG}2W%auc}zUnSL-Pf`Pq7|+(GNSAvzlspENzdQ?!ZB0Eqdy z^l@Ohi*XscoV2Wx&*ye27#9Gl71nhy0)r-oH5^*_k{a!^422@G>bl1~Z)6uy$PIzgH@LD<|U zO$=REvvVVt0!l}SAG-$^PqW@bYVR4TzoODxNhTpsm zeDD;5(?Dgu=xQ?JtS@*1Mb+eraxQ#ilsd0|E5@^q5mhva^>+uvE0=E zABv^&hhibGqX%)!+9~8^C;@=adq)E9yFW~=8v0McG7sYb0vM)f% z&h8vQx4gvf%~o)rD*S4bOve{AqJrSw@Gr$u{)Sfj55t+^qgEb8kr#jc_#$>j6o2 zysulB<7+ql@?_8IH^D5boYqD`v0-TH^#}z&Df>SJ5dy>qcrMa%H$9 z;jnBWbPFuAZ=mI%Py6^*DAvYtDBJ`2z|s_vUb}8J8DyB^92iX^=ZLi|X1ygL`zh&A zKD1a$ZX^-c7UJA6m1chQ^TDRab~H=nC@&&ue}VPDOh|(Z3x1edtQIlp7HN-7@-j3KLzcwL8j7xo3AB|A#c9dQI4rbvUk7+8nxZX_J zC(LwwMdvjI9Dg8F@LGX%`BJ=rHx7&#t!}t_rbdm z?$FL$5+CnFO~Fqy+l!Bvl%$Bg6Q+4rQqc+R)SiG65+uuu$!R_E(T&JGbJ$_nVQfZ?)cO>`jWPao5WGqo4ao_FeNUFj^8QZN) zi8Lj7tuPn)9{?J=k6U{F55U5;@QEd;wl1bSCz5w`JvY_PNIpm~kjnNyO@)>~Q^w)sw@Sso{uKsx+XOF&Ac zi+bV(N>Mx`xR|1QWn%N9t=G|K=3CD8qDYth%b@ zJJeK6WWB{fz+eU%t57at8rcuP8afz4>{zgQeS7NH2iI2rl~X`w$>G@3yT<9w6)?;O z4##xn({{im1zv>2^jTl`59&ng&O99N565T)G@1D*YVc|T3mm|@%`(7rT0V~3i7!29 z#@fs_zV671HgykmSO(5za9gE)rhQK^uH$(MIes=~R3n0psGNf*A&=?6z1z84Tyza% z=SxjYbQlXV%MMDW4yhmn1WHIJM`p19_$v%2|K+dH_zroxMMwTme5nn@mIVa{>xukD}#q!{|3CZ{c52NXFs>_qmzsQr*W$&7xW!EdseU=#4Zhl08hlsO zr!)(|Lg^IJrg|Tw;R+X_iMusmv^$J?!^t`5StyMpR%MAF7Jt= zB4C%nvK15u&<{{DVP%1shE5ppACWOj6fUXs8aKuGB7ujRu`KzSr;tW^ z21#96>7#ZnlG3+YRY^$0wM}xBzKsu>65wPNxySF26->g z^c5ACntSY$gYlhnsUsq1lLA)5_lk_&!E<%^qN+tZ>1p_D0SR&x$6v@sZ1+2U$zVzz z&z^*m=;ZTIr*qBRiDKLo_8LYH=c--fXhc;r2jVAa;Uq<#C^vnCn(#vJmIk)BBA@1W zfp_w^gnY6le?M0Lr5Z75Lo&-X6yk`_!}s2_kT72MMz*4Cl%BT$N4XOSUTsLfuCEuu zcgoK~ay3HFRi8KQJnc_2fD}nZ=DhJcVA&aLsdl5l)(ToRwwc(~MuHJflhr1*-0tIV z=kMMzYs1nNISWvW*xBz9)M#f1Gl*ZaAwIrr>Vf6>T#$c-xV;A!p#ZGgB6yaCnL>pj zT5Z(6D3|hjjbx@_Q=r3?oQD1cCYd8>#T`)qnobBl-RvOo#8-Wb*5W;;>u(|J4~D(c zpX;NhSOy(P<7Q8ubgV$E)z}%(1gzU2ENXt8LomOV0?h-6H0l`zQ5JLrz z>#Y#eK;0Ox;(5}f@^_H#mxhc}>BTE&SH2jqAYy8R(CpDr;6%mTKvv+?!6Q|?kkgRf zG~AHmepRDJt4i&tqwvbM-vW@c*%oLOU9gO^1w@?;GCE2l&qJV~gh8 zo5v2K+tY!qO$`$fJ(>61e%4`#4gk{$SXR*;P80?m%E`bnRFu`6r&eQO7gU8-$#7Bs!nQC^oGi1JV+;mG(P2Eh~8soD2 z^5hj{%6fV#*%5otwRXn$(x862Y-Irdy*SeUZYw>d1SvFX5FIWu9DdYlJeu4*Up5d$ z=?8~PhPOzlEY#9@TM!BUEP>ap2U)9|twOC|K7M=9r`2YxwSN0JgaRL6n6K1tgUEBX z(t5v=Ft@tQyJYa32m;90>$1s2p~V*oH4Qu}DdKR6awL^FwO*y=2-GI{#v^erjl|!p z-fa2w8B^|lF#s$H(rohGJTj+1Gg*D&xY4K2%VHzWWTRHq19_b2kA)n(ncu5^G0)c zr*rT)4|H}~{4O~w5+^o*6OScfPRYV>`cL%Z{Z5{wHIf>^Js=~;u-Af0K*MjQC0+Z| z{D8H6RCJUXKf9csXFsS2=QP^RjV{fdFiP6HEX^61O`aodcdWWm4wadTx?I{_szSfm zFXovd?~e#qpm!GDTUb$HZ1obsNe3LWZ%c*OyzXSMjMQ=tbcE4Z`MQlf%=@K;s194* z(+q^!boQF2)s%=-wFUu4_hNmQ4+@~#LXEG6mk6}Na?{YJB-^27AbmE3>v=`Dl+;v` z8Ii_zgD))n^h30Cd4{!ESr4(Od?|Sf%FPvD))&(9O(`#SCJGFHE@d^ARIfQZg^IG# zs+K4e|1`zk$<6oCD>!NmWw&4js}ro`@jApq<6kqj#gY;aq$W=Lh9X?83G-1G?T zkbR6i|Egu-;h?5^s8}VV(QWtIwcJ!OHs+Nd0->yCHEp7$(oDAkJJAD` zRp*tgab4uwq|g~1uxLw0I=tAjm{P9M*tYFT0;8_PfSI z@{=HsIWt5PIen65il3ZGfDbtGNxIEC`QR^CMS%0d6#Vv6!9Gxs`G7vER0(@|p=PtG zD)?XEwU(nS<}L)a?z@1*fM(d0EDt z?*w76i!-Zd}?+;M8`Vn*k8+0Ic<{(YSYxx!aeE4Lm5ILUfEt zaa6MMAa0(G89wKF>*q$ka&UKrSJQ8Yv;+LMOO|&&Kp)e0hf>2i!lK@;9e9wlAs7)NZw9E+4}SEnp^f)5qE(r`(c>k_rAavICxf3kLtE> zEl^$B(_qd*gR6cX4V)H%fk$Zf^+HG|*9w?KLpOp3=^B<^X+f#z*9So+1oHz6JLSQ< z04l99%6BSn-PB+wQI5OE*necF0>z zOqW^3)k#b!US($*#A#SBxsHj3i}o>Z=6g#bpA#u@ z=DA=i4zaNy9JjR8{5ymqscLzu5asQ|Y2Fi2Q5u?Yv_9E*bYd?ZGn86E5pUUTidmq` zAz{{sFJ#IvS)*_b_J_QSu(`GAKy=;a*&10m3ROGYk~OdyvUr=or(A#xwklwm-<$qS zug9aHk6VJzpg~W^H7H3>qTj*h=CaANF1HvyYZ@?Cxkf@SfxyWOOEtNnPR6#G#7Dqa zld_F|4IssRyJ?+_>$sf{O(QfgqR^8J>o|E;$pB%quFR`{8TKhUq+r^;{?I)*XIru% zMv6P2sHzS~{?Bk5R}%qBP#AmceQiVIL;1$q^>aMi*Kv1y0cXU!J)QLI);vENEJv`# zpDqCgPHPBZ`Yi$?)By~TV>rz_$ zis9y8m47>h5T{pUW z9Mt~(ik6}$YE^7>54aXEt#|VZ&FkyHyS*B!2V8LN)CN~_jVGZ;S5cEi1FieLe)!mq zSO=m+|L&=LOLbHH5q=1jy&sgaj~=>hdKpopk$88xU?-DYm+VL=jfrCeN|Pg+zBl@H z_vIF$Zvjol^X4@!>j8Z^*j&~rpB;P>?kpx7{7?DS@CK!%H<0D0+Y4i?Aug+3dzK0X zjO#pSo^-b^{s~Yk%HZ6@wWqC^JwSQGKoIXqKr+0-!7*|Z>qn?hNpflzx{^i4%f8|w zeO^G9g3->YZ6S&@AookGB202*BO#$+!-zyy(|;liG$=tarOHp_>bify{NOvS>Av2*KM^lxcelL?&@Z|te}XZU8s33@@Z7h87}c2JfKLZ(EVw@n(Ne0 zbiR+w4`qs|CaMoOMe0o8Lrg4!)MC|rSCM6>my}#!Zd2|br&@B6clP&U0K2erK3{7IRx=Cw993;c* zKlOJMP+fnET><`2;^ijl8{KDDqwPi^9-+dPZ~GnA3`|jBmy`=b%a{!7;dz&+bZ!LMd_G-pR(v9 zR0R;G4Z}kxiI!ofi=f2L;r47}Lvnaj38`^x9QjXn`ln`fZ+hLpz%A-eHL|pjShM*` z`lfUCg`sXMH%%$pB{-D54BC<$jzgWeFszWX?Q@roLUNwIZxz7WY?}w={#+vgmIo*b zBYOyL24(+-(Jg($Deuh$;)7NX5#vZ2*?4XAq!tJT#8f)3wr}sO_w>2B=Y4A3let39E9KXE8zZ%zMX1SJo0!b}i%JlT;hlCAHf_z}ZrqdrO zQ`xki1wAIT!s`E9&sE_>9 zcSR2V({~-;{ipA0&dD4h`{}zX!ufH={_VRgDgUp&YYWn-`KRwv=>b?}iu_FHnsjTL z=&FT4?x@$|9~NBk<-P~JlCU}25&4#th3UAr3$Mh4wUx&k@29$;1VW^np@S-A?f5vp zmnfxmeYcn245EC>R{7f}ORrQO9KH9Iu}@M8lty%PEWZl{;s%ur#|O@lMv;qoDnopv zj`0*{a*~Bkn1((Ty0SZ|1s^kR%XWmE_F{s5O<&1G$zS+Dn&aC}slW9EeR!QXrWf=3 z;bpNqTQ)`w;Qh~25L8Pxl==u*BY#-kdj8yU@eNA@pqMmQq*wzZcs~f_HgzIy@A#7&Ko6^Qnp#zd4x{tNSkpH5 zE#bo6CVK}K@v1WIdj46T+YN74)2NU97=dAY5XR##dq zm;QuwsUcJ_%4kH{@0%R=9S*ra8@YypASItSa2KpuFX`9ZABpGiHUwvmhRrEXyF<2Z zf7P<==Czxh5ZVP|Y1>E1^R7tDj`r3}>@VwTg>=aIx=9a@3!GhDEtg=nj*kI1weaSq zNO6OHVU>lT;zr*EeK{HcagpU%(5>ISpyqw~EZ<~2G2KHupY##AbR0L30C{I|&eim~ zwt2PBg}uIbm1)sn08@rb{3?$lQ~=JwD&iY%CEp-0Eg)p-?#FS?kEp$u00uJquvmx? zBsl0bpNX2oL)PQSny%ybpf@!gcwVO$X)+t%$Qc$3`OYP0BMfqt&OOh$wZ_XW*38C( z;=H;q*mq>l@N>u*2uke$8EunCHvT4C`JUM0?aO^GG%;MVc)oeD-u`3T)4#b6e;ptj z+G$jcF*|SFH~x+gj0yopt&WzLzB7ufe5A;bS8H>Lhj)RW)eINCg?DbL1GX*LH)mWl z_tTu!YN3EM2zyWd>_=6kIx#0-5B1OvqyG|Sag?KG&})(O+ATkTX0e(4uYIQc=`8b9 z@c&(%0cxk37_)O#sji$iYL@;c6_+A;V>!7POW^bF$8WW)V8gEH=gRXpsk%%bI<1j- zk8DJnV6(kDx~aDPM;H6NBl$-cb2Zur_4?mej^TV+w?8PIM#Nw?mZ~i>4_1K0&^`+BSB*Br7LYhH|A&yD zIPHz9U^t}UBH0+Ifh_J2yso76zd%?3LUfi5>FJ_o4790L2zdLunAv;9j6wOG&rAfP zDt)ddSxd_=R<CKnWejW>{S+sp6kp!;UD9?XSPO{bLsz3PNPX>JPO@st6 z(gJ3Q?D|?zsdK6sx8TDusn&y@J%BT)mX*th2+(zj3n7?-vbB>I%Itxq5Xq#NoMg}8N9lw|Hji0*ocm8@i(rkGwMP3o2sqjf#)p? z6)O4_M;~oQq8VEy$0b5X{*c0D(Wxpti&>T zw*l4)(T8*!a<1K=t%`u|h15z-E`Z&>U395eVqL{zsl>c##EKVIjK!usAf|lzjvJmn z#6#No4~_VUunnEK_@oOuaiLGUBLSgq)8X~#V21`8@Bv5znB9bjyt@OZ5R2A#K)KRP zs)Z)XgUKG6fvem+&ecWOdY=w#luT>EcMuENx=KNW5BK5?V>!nwgh~WgS{+q--mPkC zW8&Z(v!`eJ-w1CWpNo5A=tx{n3aQ$a;s~3zm0}Yb+20Re@r60Go}-uU3;0g}Z`#*4 zl74byZ|nNSNr1MXnX0saG6F!RSCP5kOfF!h{S!O_iVQ|~W#l$_ba>Z@sY3>dn-sw)dN2AJ#@|k7KaL{%21YFQ{_fc`U z2@)Ow-#*V7#!HRgzvQMHPpv0r#;zwT)J&eXAH4SukB%ai?R0IsHeTl6j^2*Wqka%W z(^{*F&)1>LZ&EE^*O~=;;L?Wg4Rzo8+qtH1-`BInp}w#Ce>S|ngFe5#SH2@kC9QcQ zdFS}~ngU)dV|xm@?epnd6+n%(N!{@oC2*AwbCGvcN~A1t3#y@T2!Q#b)W7{XY_hUd zRfJtt2=G4hnn~YsN3jkD(Fs95dvxZC72YJ4 z?-W-^y&nS0;qtMaQJfs+o~Ibe8Fc3A^)V5CMPDV$Wy`C__0YCWa2?>zQ1?jhl10vb zyM*To#REbT1Lt%92@1TCr8O@@11x|*F2YL+y5%TlC*}|KbY;xV6<4neigNXw1+X;g zISpU%*Wuz-w9xMOjbUb`1C?J-ie2;@C=GSagau+tG1dnWlMuNVw(h8^QyRmAuVw_e zB1S^y#5pw$69On&d}lzA+@tnM3$bYTnT5JtGccD`dT1Y)EoV2_L+6h#(n795KP z$z-Ks8MBB24pGP1uLi*}Na0||YmogEFk)=bVNSJUPyx%2_7|Y8zcNc`8cbd@UY$)* ziTvgffx^Lc4XDyhVAJq|ZG^#ypYF)XQz$1&q|Ingl{4zCof{k@q9Fet^DuC*>kV5t#Y_c!qdq11Es;vBZweBUUWCeu0hvZ(fYfw0!M zj}f_!u-ZD{T9i%otzpRFMuCItC3db+uGr^vx~PDL3K$UuAMwI}AL;_XYlWGQ)`uB=;v7=HqBHc=7N)qP`Lz?+3Cv)_7QDtrB0#7|LL@< z{GD*q7>Haq-+JzMf{Sn-Eevh<78QO{BB{2%W-PI56p+m^Cq3Add|%k;=$YlGJb7ZF z-2b9K(@h=Yi1fI7AqV5Ic>rL8nOz|&1IuYZHV+^x?-YTurEgpL5}iJ$^UGlKro5<+ zYOPQ?B{aixPwbT#g1#TC98h zNn8ogNs0KvAsR7?#}gJL;lN1AWZPiF4N^tD<6xtwVI}W^pM-hf5G(~}*8hm+(XQnqBAHQN zekH1AwKS>5GSCoHQErNBqH+NN)6Sw}+QSXZG><5v-bTOduD+i3&ccK4^p^S}7OiJ6 zb|LcYhyG3wTAj%gN6bT->79LzkO7~9#*LmTSLyR;_H}8x@9zS>G`TIDj(>H5tSDlu zsJB#NdABS#9ki#^0s*U90#c?Ox9arYd&sg>w%bsMT3oX4Z%{GGD4C62X%AcbsxPq- z%OMlhz+q=%c}15?Fc}lH)!<7hPT!4kZkD-rC)J{v7cDa(SCPv4JA@+(Bm!D6tzb4l zDBb@+84qaltU?E&9A-$bqNtnZw=Wv)isW^qkFN}n725LdSHcB8#yG}##N%JiB z3NatL3ktqDO>I6FE+`=HBO;LSR(&P`k3cRsB%$&{ELKe&`3t0o&Y*aak}B z0t~P}ZE2wiPNK4<7cXUPw`^Uv5eZteks)u6HMjnz;rY2%(2PtT%bYyr9IrnjDr$e7 z7EMi{P#HnLuSFvlX^m&A%r6E9jA@4C2eLN{DJTwoBR#}A&tB**oxcZUEu0#SfGSry zd1)pz%@Rozv2~7N|7RVVOU0L!>^$Zhsgxka)K|*l;7T~7rnVxFuWLFnQmfEWiV8_lq~{;R(lpU(CjL_(bWo;nK>qayU~T)fP4+ zoYQjY;0HS?Fj+GHD2o7F82gx~oBL>*&&0eyDGIw>P%$Sx#aAy$CUxI$(G%GgE5_nn;Dja2T!rF_Q{tngA`c%$%Ddm=EG~_YiI4!R_W`ns;8otjLKzzM22mE( z=A9M=&V)Ov`pjorAjOIHq|m$P*+F#tVFnVsbjW3Y^${ZQ?bUE?CiXxL@arE&q!@tV zvayz6%jB>OUL5G=SSW7e;)_93D?zi@?!+_Z4oc}yBz0QD5!5Ia(epZHa?-(;E#3$$)$n2BAy})$!IOIRlnLU=^w%J9zDt0 z_=)R@x}>442?+^m3x6>&>P{*sS&3`OUaX07inLs^_+fflrfbuSvGKAtMSAWbAPHXM zC8=A9kTiDI4U^dW4Sq z1eGK-mzeX%+aeo82;k>;t0KD~LoFmqu_Ap*+_aN3v`kD%komUq#}CP)4(+%@W(U|M z@7d?@%Te<7RyD|?LO5=?Eh243hOgxvd;0Z)?aqD^;E6-}=ihwCwxNfwrpz9u=ij3H zTi#@PW<-DH-AXTCt(5ES@M>5vz>JOytRABi2+ImRLGZtGjEPz~I3(MbEX&U`Pbs5^ z>PFYfk2v)iWc~vFh04U671^)h@yH00xq@Y=2huzoTgqpu*7uLq7!nMwCXev;?eHT+ z#Bt72R!i$ue{d!gU*uC7baI?Jz%vKp0?QLl5@b+E?uFEPDvbe@mua9@nqk`ef?@1{ zV9p`58;z21t#L7OhrZj#V2@nsTkt9skkZ*|Eh3S)qnqaNs&-iBXRr7nd&2?h&GbuO z2j)Vh@b`gicM(8xYhEkUsv_0nC2JvOH(#q_ z)9-h2;oBjNfDf=nVMm?Kk>J}QW`frr7X^#CJYnYIbUGm?Q_;k~uC<|Jw_Y2tJCpx( zhaC<1!Ygr`Z)n!5 zdUVJqZR~}6^jCED41@omLWr2m;4qI+f!NCXK3YG1DMIezpIC7lV zZQlD9hGXkCHy-2~Bis6H)DbqiD?VkAGc2ANw(bWSW4Ct&fdL0_p;}IhrR4~1+#zn9W`K6r#vA9@gs2hVaFdT(B#0Mj$R)0Lu3xpt~ zQ~?V<8H7K3{m*r~)95bZA6fXOat6&vHyRbSmk}y$PN{g~Ls2b%4z!7IAoL6xJ0#?< zy*R_t=t`EAi11&RswTB0n?_HX9o?(tmM02?zSd8~XRQ4;TM_HB*+==Z-J*vfnaRK3 z;iR_@)1#^DmJ?=zm+&?Pq2RJ7nTZc9ocr*_O7f@`^+hNqj2~2+pb9pZJ*QvAHGrzy z>S770AtT(@i-;mnXm4lfJ!R@BKr(*5VfU3gp~<7pY>lAvyvJQc)qfty&C$1dA*!f& zFCt8p>DMYutImWgSWVAglV#)mJ*ZqyKV`*Q&}|kzT8; zoIFgR3ddDovjr1L$5=*YK?Ko)uGKG`On(QVe+eT4SfY%vl=CZ$on)$1^&~AtTn_GZ z8DTM^!?Xqc>cQKWj^E+I?oM~*Cq4=X4qiBIRHcK)y=n307|7%xf&StlFu3y#bM|-vme8qo%)NN z$x)Or(;vtat8zd{;62{fO&9`pdFL&#NfI^r7|v#<0u$i0581OHP;NqC8a3rFV%SiO zLC;J|rM|Jd6ydRiv2tz3dr!8dk)TgP)jS9xy0S7*&-8miPIR;{2as(`ts~zm-x_c= zdbUle**~-Rm(^mACReWpcz9iU=t9{SI?Un$r*fv(Aa{=CB+$S^(KOuDjJqjxZPhbe zSW|snK!tL#VKZ+Nt#`5OZIv`vksI@j+NI+jmd5Dk$yD`U&-(hxGb`?x7>l8z1)&=o zL}?4=3+$aGYuiI$IXnd8#axkNe$;h~8q3!GLhzOoIrf|~%Qpvv3Dk7jjd@be$Zeix z_NE6GUe2ha@{i8>RHbbsU9ND)sID|ID?opS%8Dt5@G*gH@ud_Rh73Vc3^thd7UH~D zMB*RH32_htUj)pM9?YjW{ij*0LxbGoHjIvfMI=GgapWgTP`|q$RwAw{N1wMp%65EWz^(dJYe?5^9v2ZJal*77Gy%~14--pnh%iurN89q_sXi<^q zfoVlLI5dOfaz?Q*dE+)v48#9?+qfX}?Dav1N!TqNp7o$zRb52M{R^s`x4%oa0l`MI?EI`!&v{yDok z`gPU29{9-@sORiwy0FN&@w_71$IVHei&wY76&{PPLIh_itc$?A)7SU5q?MK zIQpFXoT_J=3LnQf*fxSAg*^xHo2M--y_bP9%HOp$EI~n3wI;|Y8pI;Ms?O8|A(^29 z7gG{#JW?88G(3i5U3N-{iOOQ?r9tJPcIV(p!h4qNXPtxEUG;uL2S}8uEIVzUdAgRBE>sElnW(JFc<9Z z*jpw}rxHuA*83+d#7(F9x;h48fk!qU6nLVW?1c|3FIJbI1Vb8P^G9gI%7N}8OyWKD z3kkEq)%yl;cm%&OSP$<7Sf)*4wg-rph(OySo3R-vk#tc*{i6R)M|DGUn05*xjQKleJ^eUE8z=3|X#e2@ z5iH)Iv4)H*wQln`CtGECl$>-I1=w5TmIjMiUY#!0*s7w@a$becam&sm3b$fFFy+U` zxUUd1-=p#NR^v{JI~b^Q2LmXeg$ag(QE?s1oZ|}ngwh6g>~f?qWf*VLW8^>~D>C+4 zylS596+YCm@Ay(e#VwR)Ed=ls)F$r%?nGsK(@MjNm5wZ&;|OM~5fTBoF^{M+aakFk z&T8Pu;Vl`8UZX_vxJNXwuuxVG%Hz zQ${`reEuM+1}b{(BH^@0P%YZ0YE z5E~?m!^3V*wGG?y-nZu_;{m1kdCRx46+~uQ5zXZQSI<=X3447COcg>*6yJ5?LnGx zw?+`7K<{3`_F!Ryzt1?!>Zr@J9crWb4`BUOp?IO8fTD{;TU-0xh_;P>Y?h4xlDt?? zehy^fy2)Bqc{e+jjr3bOUXY2FCjan_Ibxp*r-zm+f<;8dh8B(U*yis@8?k^H%|=Ld_Z0M zxZ)Uytto_q2JtQn5I$&+`c-ZCUS+TxQ^Auzt7Wy{|FD*VBP|F0yj2(NpkG(*gVQ z`p+-s*E=6OFAzSRurASGh#yYB8#x244Q&s!56TIcnU7gj;dBHCo#ggM+tuF@B7w*T6X$CJ+F9e*b-(-L zbg^itK|o-<>KVxkf^O-ZD%q>(bxWjJxq2JU2Tj9!eA^Lz`H zXaDa7zbdeyEjPl@K%Ti(tM}xyG4%sO2!2+buLnj^5&*>oW4{EV5#mMP#zRY z^m%|EY+l~}!iRf+tXa=brDm{`>FU1Rv$O9$g2+b#WFH8GvTRF+fkQH~*q&fvlBG z`VpBq`EbEcUQGu8caR^(scckI6*@uoP=}1AGsqsVdxEAUc+&Zr0~Uj5?2Gw16AZ@} z;j@)0F709BGG2)fr~&L~FP&u{w=7|Gg)`tBS3*s})>5eQgj_oDt`7fCt%m73=Ki&GipXU4e%{9d_nNfVEgVUczv} z2s(B-7sEOAGJWx8@q3-=@BD2NdS+Pn&7b0kT>gFg;4y7*Pwj#evvPjT1{GVH$o4XV z{w&h_y$C}MW;2XD4>H1!WN<3u1kPGacj&HK^)}aFkm1Iuy%csuB}n*8QT4{lBXbAD zctIOu7>5w4JwywA$;nIT|a(Vew zRg&)LX;hg44se|o0mJnleUFZ8q0+L65!pSa^m6bf(GkQ6-vK`@rq@|jXEwy6bRj` z(q+;CKPd5eEXRi|vNMig5eETjRIY9_s!*E0H zivT2+7imqSZa-@`$Rm2D#Jia7hbM{WCSO%Kw~+eY_WG2~}*0>J7QFagS0%?#|E${K4?qR zMl2#-c!u_vn9&Th01(Wyhz;+=uBukq={4U4xhuOq@xOLU}PI{?%J(`MQfu5v&r7hAUbFs zn_%Yu3S%^!%T-4=s~f_2cjst#|M1X>Z^n7nV3wk zU?;~|HJKWF;S)a8iqCLFd$H0zrdg)5w1)T@CNa6F1p?`sZttqjbf@_}KFbA{Qk>DV zv_E?=bnh(HJc)g{!adh=(u zJvPPC1=@Lp)YM|D`{T{mB6!u*VBLi`Lws+KTE0$Tb|DW=H2{MW2@zqg5i&Mj_|Qji z?sE%+ZekE~`Fgt^sbRXdJ=qdO+m4i>w_mXEQ?1f#7tJJtKifUf8+SN5*J|HU=;XWN zs=^hWggX%0ptjkZmx@azs}gaE-sv{9WCTUN8acY?tg%Xe?TRlFTEYow?cbxg^ND_t zHXo&7YM}OI5jrik7F*Cj(P z?Q%y5oxxBgOJ~KK0f+8eS62}k&;yE7e}{Cj8)qVNSCDi%qf|Bk(}Q-vc5l#vtRu}q zveAVw8P?VR#=6y>AzD6K8(2e9kMY{kGkeMm7!q+WF{}`xS2H*p4Tt2iB32uPE)1Lk z3yQ;sFVWSN?Hh$!Ab9g@Jk+(0{|eJ72P4M;5Q;&BT~Ds0R`3>7Dr%2u77J@{VIR(Hy1gSkmw~>Gs`nXUFH^2-Px71KsLAtbAQYN z_N5MXJ|JJ2GpeEJEKr?5WAi)0;q77^kA(z?UBLBuGmBO}bsQE-FOn?P8^Xh)r? znJ(gnownNnh4pBr6xi-w zvrcU9h_TC5K$jHG=m-1uBU&W(;fli!22Nk`Y_(23_FikVBV1jT)A{ziG_wPHDMi z`>`>zgwfv{4#=5q>;C=D`RjaHjyLEC)M$Mj-EE!!aGsdWGx@p$w&3&5${#kE8ZPQuAr@z>04gX8~qAvQeX;&XzktJQzf2OZ0H8J9&+k*ufGx$+9Me z3+8mqg@e12f%Oa4gy7qrPP2P<_`NzvY~TsRBVTxROzFX7Pnu5I-yi*Sbo|TFRK81l z$4C3*we+Mw$jRx;hDBc-OKBaOW+wd-Pi7A2RoA0r-ZZ_8$V|;;*C93?Zmd-++Jw$? zi;7+kMj!Vq+Nl+YLd?KemR!)*=}2W|2ZW2XxxYr4-Sp)Ckp8|@`b1JtF`4P5yhk?k zUc*InN+#TZ|2>D~mod(Dt^8XKGqgjoD)kqJ zGot>&@f0*(N^Zq{Mc3%UGw+O&|F~sNA~^0~k-}c)zSJCSja&ksB5^jmRel=5i86>l z6sb|>cm^w10~7I7bNCJ4KOaJNOIzdV6?q^b@}1Kp{WE z{z9>QgN64W4}RWn{j&f4kGnhPD#Xa&-=F;BoFUThlhggP^J5Ajfk^XK|EC6`;mq?+HjK zIbTyVf)P>PpYFGIc1c_9o8zNn@m9^j-u~hK`93K)T%Y*qJarYN;|;%S-1 z%!v0g0JPnR%ua!M4>XrbWpz1;ZPN~rO=aCAK-?+2CZiJDILSD@7(!PjUXY6zsiiY2 zmWgwzoWRe{!BVY8Its6z6hOI+CLL}VLnVXQNQb4w5IXCeePx>Gc_^bKX) zhlZkN&&m(4<~Trz_PSBxU*!p z^EA8!khm$r*~%o7BA9FnaD8cq<2Hlxo+d$%*V05 zjx!{J!H@9v5)e&ws);buG~|Y~@-<|v`ds|0^u?V?FK##)cm&r{>dyw_qDH_~N`i*g z;8fI^FR#KXG4|9B#~I=k0(1qb`knx#ZoVv%2684lGlDa z{xDQYjz6jt&mZ{9$2BoPc%ol!d=!a|)s}K5WG)E|oWsA|on-Vm?!_TYjM&^ZKUJFv zMdJWU71ok*?dPzz?<7zIikVem>NP78Q>#^zld$$d(mE)pC8OpbaN=U7#c7}Ga<^q$ zuaaQv7Dz~D6ZSES)ETUD-$lRFOG3vGzTStLvwXqrsHwwGrRW!l2T<98fDcb3z z_p?wK(LIg&NghdoL;*AEEz1b%x6uXq0!Ha&*dd?EZm@H5kgW)>o;W2(Anjh88Z2dx zOcNkvjHq$4fnYZwxiyS>(r+v8;$(4Zz;2W(uG(cDyK#6m0OmdJwC=S_MLQtdh3N&R zsbL5}420n$xOd2XR)+|x} zN{T@At0Y)qZ>5P+DUb?t7|+zP@|*bfyLP7hCcpk(@B+Wdxrw{KKPwh0=|-(#lJwM+ z)wPUqbou4d1x~Omn~5a3MLs{&JQ@{Iu~4VFmFr@%U0D`i z4Mg^WXF}8YEC6vOY6kn1Mi0J;jaHIy2dd zcnrBquZs$LQgSaMn7zAJQ2nAMAVH5tBlAi1idcMLh{o~0J3&zpZ*N3sq+)np z>QsZha29FqH2Ner z8Igu!((0h@aLcZQE2R$Eh*!F>R@xc3oFS;7Z&)m=XrXt{%q)v|uwhmaL}+`;<8znX z*ss{bwAXE3o;Ks|{C#xVELiP|QH=?utIrIvi2&A|HPx+flWv89P&zYyne=+eEnWU# zS5!0{?u8JcS#lQFb9PJ8PMU%NqN)xLEj3QrFVNvaDsr!?A@9R?CNm%RfqNSwEly-Nq>b> z94m)yaascP2)OlmG}8>Nl3VesK+AgNNPr$mF9`D-Q_B$zWS-fwcaYEuo7IBeW!K@5 zb5$dSR_0mqql8mpb;BNtCwGb!XFgr<-c}mID-4U2(-n2mnh;`GmZcv?rEYwANm=gb z>|vh0PT0BI`H?nPD5R(W{1y!!geDV9pivaN9dVJiV8Pf369s8^3)oa- zb;k>fPTspCEOGXt5DXP4Y|0Y33VnyEXOWg%V*V+&`lx_}=u^-@7Vao*dhSU@=WnzZrU&%$L-mQWV_mp?}YPj0f+JP6Vq77zvD&&djP8s>6#+g$nL5*3{ zeD~y$K1meMUW{!0C0SUbInMsz3iw>3Li<-B7gbzihwJhjQ{kuK4N!K}c&TJ8^22&y zISbD+8ds(aQE3p7eL4R8`B9QbThE_MpU>W1gO#>O$$lk6El$s>$s_i$5*E5yTjMH& zekjC~$CN266bv2qwsV~zjV{1y@i|9%l6Ar!!kii>@X1aAs0ND2W_|V&u25yv)@=!U!FZ65pZJkfOH%dV z>}(HRm_BiQ_dLgI98Y9g{NUg<285i5_}JiR$)Dr?sIN6ZU>ztSitprt1(GF`lw}g8 zce?1s793)?oLbP$+S5nvlHk=_h1tQ0TQO%2ujYHG(?J*KX@n{d0VzQ8OZzx5mtsd9 z2<_`I?UJ*ZGgEpmYmWVDILe%(06)g+k`XR0r#UTsjL$wCwT8Vgr)=3{w{rVkJfKzI zJ$dgGQ(z`WdpWTo{NH~@b26@yAehjnQsMlNiXWQ*i8gQAj zZ~&6z_i5a{ik3?9*&0-Mmdvu6v3rqORwr^}fq#E|eCP+g?t^Vpva#?Fj?Q0fdK-A- zgKzAlG4epsNL6Os4J-L?Ips}X=8>mW0|7n-89DkqOoSk9g{#Mpq9dh^S8+VV z?P)DiStAd<2s0D8G~|bq_ZgBSlRH`|+4=5t)Y(C=U)JCK&Y0(xxw-@;ZGOBGCH)nP zaO1i?pcbvi5hIZ0jiRo|b@MM&!q9mT|Eeyo3+d3}k~$(tj^v$3*v~q=GwgO!B#g!5 zzBp?Od}ekuwzX9<6~*6losAmnpd0*k8`(9jS&I)a-c@$VVfw~)$$?`|KvR=J|9h0= zVQ~XCLT`y!#Ac`Ea$t>y`udiglje-iaM*ZLuv@b?19%VVp0D+-LCB35EK0>OQ-~BZ zy@Iwg=M_V&iefy1@GSTKnJoT`wgRnT(!lCIDxc*@d`5u_!`>lAxG?A8Rh2V8r-l81oDb~!j7CMn60O?+`>oDP)U zR_zHLG@`1(GSGo_M+k3Qc^T{lvVD)og4iG4(=@ygBa7{;6w+OFng11{4By4Tl9JMX zIb8j<5$vq~&q_n^NbqcZ^)D+XH%U7@>%azt*R2)K`gMIbypHe+Yjvd=?1lrR;}_8t zHj$!BjeGfCfWabMUoiG)F)!4a7g2s2MT6i4werf8KRVYTr9VqGWBpp8R&gqp4AFT> z9uzK)n4=Ffb+6&r43_F8!(p7QK{cRTYqD83XKgyRWN#_apEUhx3U|?F=EQ^U-R?}YEWxf!M1*L2dHF&$1+5( ziPiXFO0XA5^Fcxo$3cxe4(7#>YKgSIgUyRP(g36lUrj$L028$ zAr2kNY{xK-`{-TFoz;T@N6_&uylUP1IEtHy>BphE>yD&$lwR|ypKGBGNgCZq{ul9} z7?`OHpizn-zrpk8Xl?uaxk>#u5a{a{8XYBKZYpm?1=1Wt@%HK4U8BIp`iob^l?-yz zag<*YFP&a?@E?|F=AU!Aa^wyf5S~A011#(wU#Z~Dz+EGN!>c4+^38}Y*;Bx9U0kh7qh9@tfol9J0kRO$?C3S z97`>t?HWX2UYegtm~i3FUP6|p>d~ut^yoe!9Xt7(OBbn>1*aN8fl72cQvj&2&bkB1 zsV=oI7qv>ys(Yw#!1)BSfLB4%{}p>{AiJeHYQ-!j@5h?7-b z^KiOFm!n>*8)5!fR>M#p4Ggp}6=G__qW~lOp;`;{)mZ?G2#gzSSg=$es>0zuOme{M7)59U5XHYp zM?;DUfh~bZdQ6_;g}x45WC3@4oRgN;v}9wfXS^{A78Uuz)@aze3^C7kuzcA|Li>`d znCQ32qe#oy8))q+;t3yuq>P;g)*gen@U@o#q!$uH!t_wT$C__{DxFAQPVUQ(k$(NJyCs|*bOs3giSSFbn$R%Ui+~SJfU)e{X_Zho1I~_IsEV4d&z@eAc)gaC>PdyAz8_P1OZ;Kujtr}oZBijBE zc>XfR8s~NzUo5fL0oGt0I_%k8l8x*f@dAij^P9j}Agy@KE;#K=iIiUC)v~q=3kOA0@@rVYYrsg3_jaiv zcb?>yov5_DAaf0ICdt)EI-L<>?AyW%wc(Pa&Gl98v4jOesn!QXlja7XVmQbw6@x2+ zr*s6PCN-2^yT;SLyDYO%WnF9sWF?!+c1Mf7~Falbki>bS!Paxxh_q3)Cx^x5|e1j^BTOSSrTo0QEz58jzdbKKeGtG|*p^ba-CyU)lmw9c@Ob z=(Owg+vVL#I48nFD%0#MY=yfJ4`53Ws}koKd7vk$h~Q5EFpL!4kW-zi;1f7Eh4Y2S zjLjuTg&|u}QANa{2A${#h88?n)x_}z*@7uRhG1?pv6c>6hqGP*N2AC_8Q*9NCh?tP zwpTt{9|W)dKR~lR7C$cTq^oX2(l?W>qLWIRj<^bVvmy1~VUlp7V!lk0#*qcEVn~=` z6k~SPT{r`88o^4lSRZc}Dy#aI>NXaxMW?Plx>M1`n$0y8 zb7z>tk|N4t#WiONfe_ZYdTHYUxh5eKA>T@tD<2QaS{zUVJ{>18a&|fc3+I1 zyc1=_F~+(^G|ETmKxuYl{^i0Yoy<(oJUFnE<&lyI_Q=Ni`npo&X5vZH>~)@eipJzNCSM=4 z$Dpze(KF$hXw*@LfSA9s!Il?d$6^SA=yT41th-S$1NS98sl}O9%c4!#T+-f*aY(Bq zghe*)us&IQBKkOvG|D=)Lrgg=S7p8_F9U@=*RxfA&Rb&;)O57MSmsoeRR4Gt$8o-G zVxgvvFJREz2L2JZR_sKUUoC|Euc3$(E;u;Yy5TK#ayg57mwIdu-n;lzV};|oK|SZg z6#J%eCUgBc1|*ow{~t{&$NT%+pGyD#jTf6QUT;|b{~NF1(Zc`#0X_@=|AqhmgTp+BUomDaP9JKxj)9!2RL zGAIVj_$Kl}Jo|Ug1{z=a3ZB}<09EqDdI&#HdSNH(1N&z^KZDgDW!A&f=n{@`*sc0= za+%<5gZN@qvB7d=MVLpU$D<2CH)_C%B>+z`5iG8e6$imz9%mBX_`(vfesEkhBSrze z+eHG^^VefVnDfqvZD3$dWfbmIq$llh4H?ZzP&~8^u1K$(#+ZH;s>K-XUWi)TT)!Dty|F*NW%CB*{D-{wP9fx&%V*l}OOFa6{~%WH(#5V_ln zuoiN93OxwUK0)jBWCS!eV~B6U9zK9|xk$1&zvJHegZdxt|Ga-#8r6U2@X(0pe}1~N zyKe;Zr>Qie_~YzT?0)&+7xXpQ4qmAT`=_TA-v70FLMsSQ-l!+XZ{Nc2Ka0HC4yI4% zAXzJF>Jqli;?EA@uD;fAYNDd<7`j&1s-!wO57toPV|Go;i7p;*u zoNQH4GbRu`g``1J(GNIyO0X>Bsst*oig+blecnpT8mibpE9-tDvSf*%3LIuZCgq6o zAk2$wEJ$=2(iz)akug#cjsXi2Xp;ERGYy7&B)k@%(s|%plVW)_!wf$pkY(% zUt!up_PZ<%Gg7l3ci)~-0%m#QgekM^!b3myBtJypOfSm_^s_4*^<|Q*UW_`QqI@kJ zguOdV*t~iX55g4H;UGSv5#lOH3YVkaYCnQe!25pMgg!ho!CiMR0bt%AuE8kt)r{MO ztpOZPAC30A94OMQiDQATtX(JlXl<0GARAmbTgKf(!d5C{i0K-8J z_C=W5H%DZ%tkaH+sQOSPKWm`>Rwp76wbzSDE(Vsz077mj5)|Nh#`f(%nNv()jQ}%b zgh>!9X33cPy3_S{i5Qq0Uiy%FqV}ux&m^uf(HB$7sq!%OQPH1?`$9xzS-{0`#UaNC ze~YOE@3K7VuNXC{%g`H2H7J6&gWw-Was*^HXrp>J##uxUK&WaAf)oB}Qnh(*I2XeA5M@P(rB+vjy@%0rl$eIs zM7ENfBT5oZK>RIWWI)`q5@h%Ma#7(hlDC4$k@DF_j-ZZHf(JgcOVnhdFucCuh% zVVV?DOx?T7E6i{&gdY(nMx0@$Wv)5|XHb_{w?SvjEFp>wwgT^SX2T|lk?ZwB7In> z%ShnTLWC@2p+_$Z$+L$t2RQ7u#-cw;Nx_MxpmHEgBrF&ptaQ(RP+?(}b3Zpc(trV_ zmSo_S2JFt2iHRz9%0=p zStw~B8wSnc9Ti;gYZd&OwM3IK7lk$#){B=(u}VfgeeNV~USb+wWHfB{!Q`t1D>1wS zb|JdD^05nFisuv~UND^trgJu?Q)~&H?X=dpLbGhMokqY=oIElhohVFaZ51g21Acn}}tD$DpYa$OiFn80AdWnQJqEs3oH4iXvrq z7zp!QD_zT$^&3A+*IaaIEBMWM39tVx8z?Ctioj$+fe+pS63WJsYTM4hIqZnNli1EwOE_h= zGtJNao_Q*9LFhUc!*4}N%hbjv>^!iZLRrsIDETOrN(zsqR$=SC{O)u`8f)Pc$~0I6 z^eiat`WmoBV>77!H;6CZMP_yA;u+om3{gC`1r6| z0CUL6T*Z)=46r%!ciczcGen_TeGRpw$`7=RuP{U%1{}V7Yj@A1(hUUJw2pLh?+8wy^2r5$7LEiU6q=%8d}S+&6do>= zMPD;JVxI%J?B5Javd|ZONnQ+5O;Z;U*Y$y_e!O?4tEQp|y7-mqiGX5rDJ0#xDbJ(T z6Cpb6a-fE()kFxRPzT{>!76@vR&k-B`Vtx{{C^(I=bP#L#!lzuICeVDjIo7^mVGxVEOPqTkuSd#HP{bydya`SXK!C&#DfJ4ffwpKlR{w-s;! zOKt$Fi2+)G_XWXZiuB^5Y9@N6x+5KmqX9CoU6$k7--S0D0X~{vp|f`ejv(WsAWoD%Stx}VpNkEO1p6e9PS;; zC^GV~AR4e8h|?#5$l+i!#R4*L(Q?6haNNXJ9@dvlM8h%wC)PxjP@YqgSX(OuOJOeD zJ*xhQk!R9sg+~&7Zf9)NZM@SuWF0r%3nVl~KuWaM0N|?vkSUtV&%U7-1@go31cOys zEx7E3S2FH|=-ck$!J}vU78`M;tT=Wi78`>$*C zy=hKf3)8g#>Bh5o*-8V^H1a<*&*iV`bVV$m$^D8ul{$87sdiy|MXA1fVe?G9oT5tQ zigvp(Pr00-0Uvvq*v@2ei31kOwoD*jkVb-U71}$%ia}oNZ~QARCJ=gc@cIH(^ce7G z=R4>7r7_@7_K)^pZy9mm&-TyXpS1RlkM=1@yL^25{^*E{76IaScaC=V4_gJ47xpV? z{`8z0ePg{kq^CtBdC_21B>A25^Zj=x=dHIp2Z!%ZvGiY_=yN0%tj08nlEzGx&C%KW z@82Dq^B{^@`tSGq@9EK|KGlc&IO6m7XEYb|hVi13l6pheYaAqnt|tu8X{)3IGZSfN ztIkVxRYHJtVxZ1aRi8>Y zAg5R7?-;5qh~vd%;8p~;B+@oj6WH-avT_4eD7-1bgm#0i)WMjPi*`<+0n!gmQp73B5alxGWs%~g zDe_fd77Au2n%J=u{FXH}u%~pk(sQwh&!2w}G&6V$eE~6-HR!QCgsJ5lpr~XJMk#P$ zdr(5HQ94ZIMHZj?2f4gY156oQ;mD#(%!POvBQKCfy(q-gTBY=%o+PDDA=a?itFg?C zwPIclwY)`(?;K*7D@;+geA!DvTi0rdfTMntqGG(L#U#$CqczqDC{@^olWi5Rhw(bN zHA;Kq3DS?}=O@AY(?eft+C;nyQ7UN!KgJneFrU#bY}kQN@xbcEgNR=<$rYP9D;vkE z%FL!AHTMm>HL~wRkqE@dtRmW6Cs~fm%-7N)lU>GXnLnF4Vd0fXmq}5U^hk|`)fxfd zY8GGgFm-vS6J^Q;Jsi6~ak%(GAT&(4Ek2s~YpWY2)25C|DL(jTZx z7Wi&*cBEqng+|cq3v)9KjfGSC#*C`~)1{r9QXN}o8kO$0^ngWmj;?1AS_Q*Ur} zJ7DT@JWhmCks}g`xhcBHOjQd9JFbr^U0Qj*8djo)GIL8ID#=f+n^$ZOdt|s`qODP_ zZpw(D<9o@@dUNdL@UDJ0i_oRxJ1PS}&atax>{DyiIgC$@HAf;l&&iCh>P;m1jn$aM z_-g(Lnbt?i)A~4hI{%Jl|L)cADhCIs*;yz&*JY>c;(9ADO|WV#zA{0Y+qAMGx3*e& zoO)Zknb5V%J`uwd2)G}*C@m#aR;LtMsN^sxJG2f1Me3GecrMroYqYIvmYXzFin_yh zZprNN!@A$Z$tW|fXv#ZSy9bs;GPWOR1Riy09svn#g+>P3369ax(Dj^(GY@*P=HdBs zse7oWc`)^6LJKjW20~UBX;#JEyySz*Y{0B*E3^a~Ch zChS=zgZ&OUShmwK#L_hBk<~?3t6K+`L?nPyY8V~DX8Up0iF&;dw(iK{-MAfLRWB5m zJdwm(f)oMNjQUO(80E0Y_C|pj?_%2Prqla&hqt@>bG`>BbDr#}&2)hgify*-z131}JOjWJy8u`h}(-W$scIaun8Rj&%BV zH`rKz@v1O9y1l(^#!-Grt}y9k2mfJ-X8t+nT(UY0>Kt^4&7ZJKJg-rInBO%55Ly~| zPpK9`@38@>PVZ%p&5D!tJ~&E1j-)$$%UR^brd$mn(5MyZ$_SYTU!JKH>$Q#;Rl4#h z)PPFogRn4z!VgL?HLWhl+JYl;lHMPrJ7L}sOJPV{1vrW?dG(NxdJRQsCW|rochXi0 zIhdzuCxIRWtFSXk)GZ{)rcicMX9S9*5$)=tg@W#&jPJGDG>FWgizJ^&g)rePtVGa6 z8WYBMg*a*MZP!M#p;rb957Ix3Q>tbVhyg%aaW=edGUm41;*2ECQM=72wkZN~J3J2D zCvlGV<#_Fn>Om058)RaNAmQu*yhPIJn3)uU`c&708}9YsGhJz57^M{g^Q>lZdRt1e zHrmo8->DjV;n?6w{>o>Br-WxSB8d_gr|QDXiMk-=O>@TYX6(bOiTY4Tz2;g-X>`^b z=sGLG0f^_;GY`RvbV-exh-wP=xPPf-xyVSA$U|G{#jY0&R zZaOC0no5i`r-S9HxdjgL-!^W_=2zQmFS~O(}pmspDLn+)c(`kS~ zq7sscA6i)APxRjb7@|t_IVP!^nEydCSp79h6LOC=zzE@9FoDW!koh)C*|0;!6ay@Z zk_SKV)7RwgW3-=9>jn&?C&5cGVqwy=u?_m|Q0*^{eeQy9zFe%h<4W$M zJC%g;;OP9trh8E6)YgMcQ-E|Vz0SzhfW$L060EqNmu7^zqwg%Hk)|EdJWV52;WWBJ z3>(|HLNQT6`7v7Ff20kJbfZc`ky1x-Q`ryG@}U?7H8s{wb4S`^LP2?ZuKZ8*KKpbS H;&MBUSusYV literal 0 HcmV?d00001 diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py index ba982e5872e7..1207dfc7f61b 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -231,9 +231,9 @@ async def create_autoscaling_policy( from google.cloud import dataproc_v1 - def sample_create_autoscaling_policy(): + async def sample_create_autoscaling_policy(): # Create a client - client = dataproc_v1.AutoscalingPolicyServiceClient() + client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() # Initialize request argument(s) policy = dataproc_v1.AutoscalingPolicy() @@ -247,7 +247,7 @@ def sample_create_autoscaling_policy(): ) # Make the request - response = client.create_autoscaling_policy(request=request) + response = await client.create_autoscaling_policy(request=request) # Handle the response print(response) @@ -353,14 +353,13 @@ async def update_autoscaling_policy( Disabled check for update_mask, because all updates will be full replacements. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_update_autoscaling_policy(): + async def sample_update_autoscaling_policy(): # Create a client - client = dataproc_v1.AutoscalingPolicyServiceClient() + client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() # Initialize request argument(s) policy = dataproc_v1.AutoscalingPolicy() @@ -373,7 +372,7 @@ def sample_update_autoscaling_policy(): ) # Make the request - response = client.update_autoscaling_policy(request=request) + response = await client.update_autoscaling_policy(request=request) # Handle the response print(response) @@ -470,9 +469,9 @@ async def get_autoscaling_policy( from google.cloud import dataproc_v1 - def sample_get_autoscaling_policy(): + async def sample_get_autoscaling_policy(): # Create a client - client = dataproc_v1.AutoscalingPolicyServiceClient() + client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() # Initialize request argument(s) request = dataproc_v1.GetAutoscalingPolicyRequest( @@ -480,7 +479,7 @@ def sample_get_autoscaling_policy(): ) # Make the request - response = client.get_autoscaling_policy(request=request) + response = await client.get_autoscaling_policy(request=request) # Handle the response print(response) @@ -587,9 +586,9 @@ async def list_autoscaling_policies( from google.cloud import dataproc_v1 - def sample_list_autoscaling_policies(): + async def sample_list_autoscaling_policies(): # Create a client - client = dataproc_v1.AutoscalingPolicyServiceClient() + client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() # Initialize request argument(s) request = dataproc_v1.ListAutoscalingPoliciesRequest( @@ -600,7 +599,7 @@ def sample_list_autoscaling_policies(): page_result = client.list_autoscaling_policies(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -715,14 +714,13 @@ async def delete_autoscaling_policy( delete an autoscaling policy that is in use by one or more clusters. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_delete_autoscaling_policy(): + async def sample_delete_autoscaling_policy(): # Create a client - client = dataproc_v1.AutoscalingPolicyServiceClient() + client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() # Initialize request argument(s) request = dataproc_v1.DeleteAutoscalingPolicyRequest( @@ -730,7 +728,7 @@ def sample_delete_autoscaling_policy(): ) # Make the request - client.delete_autoscaling_policy(request=request) + await client.delete_autoscaling_policy(request=request) Args: request (Union[google.cloud.dataproc_v1.types.DeleteAutoscalingPolicyRequest, dict]): diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py index e52f792ce63a..b0c7a0963daf 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib @@ -426,6 +426,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def create_autoscaling_policy( @@ -570,7 +571,6 @@ def update_autoscaling_policy( Disabled check for update_mask, because all updates will be full replacements. - .. code-block:: python from google.cloud import dataproc_v1 @@ -906,7 +906,6 @@ def delete_autoscaling_policy( delete an autoscaling policy that is in use by one or more clusters. - .. code-block:: python from google.cloud import dataproc_v1 diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py index 4a5a9c4f5820..e2bf3b599959 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py @@ -55,6 +55,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -81,10 +82,6 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} @@ -106,6 +103,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -118,6 +120,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -244,5 +251,9 @@ def delete_autoscaling_policy( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("AutoscalingPolicyServiceTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py index dc3800d9b772..2962dd25f89f 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py @@ -60,6 +60,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -155,6 +156,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: @@ -381,5 +383,9 @@ def delete_autoscaling_policy( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("AutoscalingPolicyServiceGrpcTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py index 38bf8786ce07..87267eaef5e9 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py @@ -105,6 +105,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -200,6 +201,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/async_client.py index eebdf5de8f0e..7b5b5f4304c1 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -223,14 +223,13 @@ async def create_batch( r"""Creates a batch workload that executes asynchronously. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_create_batch(): + async def sample_create_batch(): # Create a client - client = dataproc_v1.BatchControllerClient() + client = dataproc_v1.BatchControllerAsyncClient() # Initialize request argument(s) batch = dataproc_v1.Batch() @@ -246,7 +245,7 @@ def sample_create_batch(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -361,9 +360,9 @@ async def get_batch( from google.cloud import dataproc_v1 - def sample_get_batch(): + async def sample_get_batch(): # Create a client - client = dataproc_v1.BatchControllerClient() + client = dataproc_v1.BatchControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.GetBatchRequest( @@ -371,7 +370,7 @@ def sample_get_batch(): ) # Make the request - response = client.get_batch(request=request) + response = await client.get_batch(request=request) # Handle the response print(response) @@ -456,9 +455,9 @@ async def list_batches( from google.cloud import dataproc_v1 - def sample_list_batches(): + async def sample_list_batches(): # Create a client - client = dataproc_v1.BatchControllerClient() + client = dataproc_v1.BatchControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.ListBatchesRequest( @@ -469,7 +468,7 @@ def sample_list_batches(): page_result = client.list_batches(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -561,14 +560,13 @@ async def delete_batch( terminal state, the delete fails and the response returns ``FAILED_PRECONDITION``. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_delete_batch(): + async def sample_delete_batch(): # Create a client - client = dataproc_v1.BatchControllerClient() + client = dataproc_v1.BatchControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.DeleteBatchRequest( @@ -576,7 +574,7 @@ def sample_delete_batch(): ) # Make the request - client.delete_batch(request=request) + await client.delete_batch(request=request) Args: request (Union[google.cloud.dataproc_v1.types.DeleteBatchRequest, dict]): diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py index 2b71958df011..e1f162e3f553 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib @@ -431,6 +431,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def create_batch( @@ -447,7 +448,6 @@ def create_batch( r"""Creates a batch workload that executes asynchronously. - .. code-block:: python from google.cloud import dataproc_v1 @@ -785,7 +785,6 @@ def delete_batch( terminal state, the delete fails and the response returns ``FAILED_PRECONDITION``. - .. code-block:: python from google.cloud import dataproc_v1 diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/base.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/base.py index dd9dde35add8..6eae6f776959 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/base.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/base.py @@ -57,6 +57,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -83,10 +84,6 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} @@ -108,6 +105,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -120,6 +122,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -193,5 +200,9 @@ def delete_batch( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("BatchControllerTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/grpc.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/grpc.py index 2ae7edfe6814..d76c0ae07815 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/grpc.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/grpc.py @@ -62,6 +62,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -158,6 +159,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: @@ -352,5 +354,9 @@ def delete_batch(self) -> Callable[[batches.DeleteBatchRequest], empty_pb2.Empty def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("BatchControllerGrpcTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/grpc_asyncio.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/grpc_asyncio.py index e120908d0ca0..5536f3a03227 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/grpc_asyncio.py @@ -107,6 +107,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -203,6 +204,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py index 0d1876c76803..5e2e6fb8fb84 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -225,14 +225,13 @@ async def create_cluster( be `ClusterOperationMetadata `__. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_create_cluster(): + async def sample_create_cluster(): # Create a client - client = dataproc_v1.ClusterControllerClient() + client = dataproc_v1.ClusterControllerAsyncClient() # Initialize request argument(s) cluster = dataproc_v1.Cluster() @@ -250,7 +249,7 @@ def sample_create_cluster(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -330,6 +329,17 @@ def sample_create_cluster(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -370,14 +380,13 @@ async def update_cluster( [``RUNNING``][google.cloud.dataproc.v1.ClusterStatus.State] state or an error is returned. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_update_cluster(): + async def sample_update_cluster(): # Create a client - client = dataproc_v1.ClusterControllerClient() + client = dataproc_v1.ClusterControllerAsyncClient() # Initialize request argument(s) cluster = dataproc_v1.Cluster() @@ -396,7 +405,7 @@ def sample_update_cluster(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -553,6 +562,18 @@ def sample_update_cluster(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -586,9 +607,9 @@ async def stop_cluster( from google.cloud import dataproc_v1 - def sample_stop_cluster(): + async def sample_stop_cluster(): # Create a client - client = dataproc_v1.ClusterControllerClient() + client = dataproc_v1.ClusterControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.StopClusterRequest( @@ -602,7 +623,7 @@ def sample_stop_cluster(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -635,6 +656,18 @@ def sample_stop_cluster(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -668,9 +701,9 @@ async def start_cluster( from google.cloud import dataproc_v1 - def sample_start_cluster(): + async def sample_start_cluster(): # Create a client - client = dataproc_v1.ClusterControllerClient() + client = dataproc_v1.ClusterControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.StartClusterRequest( @@ -684,7 +717,7 @@ def sample_start_cluster(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -717,6 +750,18 @@ def sample_start_cluster(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -752,14 +797,13 @@ async def delete_cluster( be `ClusterOperationMetadata `__. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_delete_cluster(): + async def sample_delete_cluster(): # Create a client - client = dataproc_v1.ClusterControllerClient() + client = dataproc_v1.ClusterControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.DeleteClusterRequest( @@ -773,7 +817,7 @@ def sample_delete_cluster(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -864,6 +908,18 @@ def sample_delete_cluster(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -897,14 +953,13 @@ async def get_cluster( r"""Gets the resource representation for a cluster in a project. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_get_cluster(): + async def sample_get_cluster(): # Create a client - client = dataproc_v1.ClusterControllerClient() + client = dataproc_v1.ClusterControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.GetClusterRequest( @@ -914,7 +969,7 @@ def sample_get_cluster(): ) # Make the request - response = client.get_cluster(request=request) + response = await client.get_cluster(request=request) # Handle the response print(response) @@ -996,6 +1051,18 @@ def sample_get_cluster(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -1021,14 +1088,13 @@ async def list_clusters( r"""Lists all regions/{region}/clusters in a project alphabetically. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_list_clusters(): + async def sample_list_clusters(): # Create a client - client = dataproc_v1.ClusterControllerClient() + client = dataproc_v1.ClusterControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.ListClustersRequest( @@ -1040,7 +1106,7 @@ def sample_list_clusters(): page_result = client.list_clusters(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -1145,6 +1211,17 @@ def sample_list_clusters(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -1185,14 +1262,13 @@ async def diagnose_cluster( contains `DiagnoseClusterResults `__. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_diagnose_cluster(): + async def sample_diagnose_cluster(): # Create a client - client = dataproc_v1.ClusterControllerClient() + client = dataproc_v1.ClusterControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.DiagnoseClusterRequest( @@ -1206,7 +1282,7 @@ def sample_diagnose_cluster(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -1288,6 +1364,18 @@ def sample_diagnose_cluster(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = await rpc( request, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py index b7f031ebc047..39f664630211 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib @@ -431,6 +431,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def create_cluster( @@ -449,7 +450,6 @@ def create_cluster( be `ClusterOperationMetadata `__. - .. code-block:: python from google.cloud import dataproc_v1 @@ -545,6 +545,17 @@ def sample_create_cluster(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_cluster] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + # Send the request. response = rpc( request, @@ -585,7 +596,6 @@ def update_cluster( [``RUNNING``][google.cloud.dataproc.v1.ClusterStatus.State] state or an error is returned. - .. code-block:: python from google.cloud import dataproc_v1 @@ -759,6 +769,18 @@ def sample_update_cluster(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_cluster] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = rpc( request, @@ -842,6 +864,18 @@ def sample_stop_cluster(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.stop_cluster] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = rpc( request, @@ -925,6 +959,18 @@ def sample_start_cluster(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.start_cluster] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = rpc( request, @@ -960,7 +1006,6 @@ def delete_cluster( be `ClusterOperationMetadata `__. - .. code-block:: python from google.cloud import dataproc_v1 @@ -1063,6 +1108,18 @@ def sample_delete_cluster(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_cluster] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = rpc( request, @@ -1096,7 +1153,6 @@ def get_cluster( r"""Gets the resource representation for a cluster in a project. - .. code-block:: python from google.cloud import dataproc_v1 @@ -1184,6 +1240,18 @@ def sample_get_cluster(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_cluster] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = rpc( request, @@ -1209,7 +1277,6 @@ def list_clusters( r"""Lists all regions/{region}/clusters in a project alphabetically. - .. code-block:: python from google.cloud import dataproc_v1 @@ -1322,6 +1389,17 @@ def sample_list_clusters(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_clusters] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + # Send the request. response = rpc( request, @@ -1362,7 +1440,6 @@ def diagnose_cluster( contains `DiagnoseClusterResults `__. - .. code-block:: python from google.cloud import dataproc_v1 @@ -1456,6 +1533,18 @@ def sample_diagnose_cluster(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.diagnose_cluster] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = rpc( request, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py index 2fa2ac05846c..3cc68102d7be 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py @@ -56,6 +56,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -82,10 +83,6 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} @@ -107,6 +104,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -119,6 +121,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -308,5 +315,9 @@ def diagnose_cluster( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("ClusterControllerTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py index a2c8d97727c9..735222933cb6 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py @@ -61,6 +61,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -157,6 +158,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: @@ -475,5 +477,9 @@ def diagnose_cluster( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("ClusterControllerGrpcTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py index 6ebeb2a3be8b..182d7278a297 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py @@ -106,6 +106,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -202,6 +203,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/async_client.py index 866e6ab90ea6..eac60439e6b3 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -219,9 +219,9 @@ async def submit_job( from google.cloud import dataproc_v1 - def sample_submit_job(): + async def sample_submit_job(): # Create a client - client = dataproc_v1.JobControllerClient() + client = dataproc_v1.JobControllerAsyncClient() # Initialize request argument(s) job = dataproc_v1.Job() @@ -235,7 +235,7 @@ def sample_submit_job(): ) # Make the request - response = client.submit_job(request=request) + response = await client.submit_job(request=request) # Handle the response print(response) @@ -311,6 +311,17 @@ def sample_submit_job(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -339,9 +350,9 @@ async def submit_job_as_operation( from google.cloud import dataproc_v1 - def sample_submit_job_as_operation(): + async def sample_submit_job_as_operation(): # Create a client - client = dataproc_v1.JobControllerClient() + client = dataproc_v1.JobControllerAsyncClient() # Initialize request argument(s) job = dataproc_v1.Job() @@ -359,7 +370,7 @@ def sample_submit_job_as_operation(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -440,6 +451,17 @@ def sample_submit_job_as_operation(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -473,14 +495,13 @@ async def get_job( r"""Gets the resource representation for a job in a project. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_get_job(): + async def sample_get_job(): # Create a client - client = dataproc_v1.JobControllerClient() + client = dataproc_v1.JobControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.GetJobRequest( @@ -490,7 +511,7 @@ def sample_get_job(): ) # Make the request - response = client.get_job(request=request) + response = await client.get_job(request=request) # Handle the response print(response) @@ -569,6 +590,18 @@ def sample_get_job(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -597,9 +630,9 @@ async def list_jobs( from google.cloud import dataproc_v1 - def sample_list_jobs(): + async def sample_list_jobs(): # Create a client - client = dataproc_v1.JobControllerClient() + client = dataproc_v1.JobControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.ListJobsRequest( @@ -611,7 +644,7 @@ def sample_list_jobs(): page_result = client.list_jobs(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -708,6 +741,17 @@ def sample_list_jobs(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -742,9 +786,9 @@ async def update_job( from google.cloud import dataproc_v1 - def sample_update_job(): + async def sample_update_job(): # Create a client - client = dataproc_v1.JobControllerClient() + client = dataproc_v1.JobControllerAsyncClient() # Initialize request argument(s) job = dataproc_v1.Job() @@ -759,7 +803,7 @@ def sample_update_job(): ) # Make the request - response = client.update_job(request=request) + response = await client.update_job(request=request) # Handle the response print(response) @@ -797,6 +841,18 @@ def sample_update_job(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -825,14 +881,13 @@ async def cancel_job( or `regions/{region}/jobs.get `__. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_cancel_job(): + async def sample_cancel_job(): # Create a client - client = dataproc_v1.JobControllerClient() + client = dataproc_v1.JobControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.CancelJobRequest( @@ -842,7 +897,7 @@ def sample_cancel_job(): ) # Make the request - response = client.cancel_job(request=request) + response = await client.cancel_job(request=request) # Handle the response print(response) @@ -920,6 +975,18 @@ def sample_cancel_job(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -945,14 +1012,13 @@ async def delete_job( r"""Deletes the job from the project. If the job is active, the delete fails, and the response returns ``FAILED_PRECONDITION``. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_delete_job(): + async def sample_delete_job(): # Create a client - client = dataproc_v1.JobControllerClient() + client = dataproc_v1.JobControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.DeleteJobRequest( @@ -962,7 +1028,7 @@ def sample_delete_job(): ) # Make the request - client.delete_job(request=request) + await client.delete_job(request=request) Args: request (Union[google.cloud.dataproc_v1.types.DeleteJobRequest, dict]): @@ -1031,6 +1097,18 @@ def sample_delete_job(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + # Send the request. await rpc( request, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py index 62015b2b2a33..45a6941e3b57 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib @@ -402,6 +402,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def submit_job( @@ -504,6 +505,17 @@ def sample_submit_job(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.submit_job] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + # Send the request. response = rpc( request, @@ -624,6 +636,17 @@ def sample_submit_job_as_operation(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.submit_job_as_operation] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + # Send the request. response = rpc( request, @@ -657,7 +680,6 @@ def get_job( r"""Gets the resource representation for a job in a project. - .. code-block:: python from google.cloud import dataproc_v1 @@ -742,6 +764,18 @@ def sample_get_job(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_job] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + # Send the request. response = rpc( request, @@ -870,6 +904,17 @@ def sample_list_jobs(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_jobs] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + # Send the request. response = rpc( request, @@ -951,6 +996,18 @@ def sample_update_job(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_job] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + # Send the request. response = rpc( request, @@ -979,7 +1036,6 @@ def cancel_job( or `regions/{region}/jobs.get `__. - .. code-block:: python from google.cloud import dataproc_v1 @@ -1063,6 +1119,18 @@ def sample_cancel_job(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.cancel_job] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + # Send the request. response = rpc( request, @@ -1088,7 +1156,6 @@ def delete_job( r"""Deletes the job from the project. If the job is active, the delete fails, and the response returns ``FAILED_PRECONDITION``. - .. code-block:: python from google.cloud import dataproc_v1 @@ -1165,6 +1232,18 @@ def sample_delete_job(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_job] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + # Send the request. rpc( request, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/base.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/base.py index 52f16ec8a902..9e6d02b07e35 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/base.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/base.py @@ -57,6 +57,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -83,10 +84,6 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} @@ -108,6 +105,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -120,6 +122,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -293,5 +300,9 @@ def delete_job( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("JobControllerTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py index 420b7956a356..e2bff3e09de8 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py @@ -61,6 +61,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -157,6 +158,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: @@ -424,5 +426,9 @@ def delete_job(self) -> Callable[[jobs.DeleteJobRequest], empty_pb2.Empty]: def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("JobControllerGrpcTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py index e97072a9a814..c9454d3a51f8 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py @@ -106,6 +106,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -202,6 +203,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py index b50c39f0b7b2..0e3c256e70a9 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -236,9 +236,9 @@ async def create_workflow_template( from google.cloud import dataproc_v1 - def sample_create_workflow_template(): + async def sample_create_workflow_template(): # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() + client = dataproc_v1.WorkflowTemplateServiceAsyncClient() # Initialize request argument(s) template = dataproc_v1.WorkflowTemplate() @@ -253,7 +253,7 @@ def sample_create_workflow_template(): ) # Make the request - response = client.create_workflow_template(request=request) + response = await client.create_workflow_template(request=request) # Handle the response print(response) @@ -364,14 +364,13 @@ async def get_workflow_template( Can retrieve previously instantiated template by specifying optional version parameter. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_get_workflow_template(): + async def sample_get_workflow_template(): # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() + client = dataproc_v1.WorkflowTemplateServiceAsyncClient() # Initialize request argument(s) request = dataproc_v1.GetWorkflowTemplateRequest( @@ -379,7 +378,7 @@ def sample_get_workflow_template(): ) # Make the request - response = client.get_workflow_template(request=request) + response = await client.get_workflow_template(request=request) # Handle the response print(response) @@ -478,9 +477,7 @@ async def instantiate_workflow_template( ] = None, *, name: str = None, - parameters: Sequence[ - workflow_templates.InstantiateWorkflowTemplateRequest.ParametersEntry - ] = None, + parameters: Mapping[str, str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -507,14 +504,13 @@ async def instantiate_workflow_template( [Operation.response][google.longrunning.Operation.response] will be [Empty][google.protobuf.Empty]. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_instantiate_workflow_template(): + async def sample_instantiate_workflow_template(): # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() + client = dataproc_v1.WorkflowTemplateServiceAsyncClient() # Initialize request argument(s) request = dataproc_v1.InstantiateWorkflowTemplateRequest( @@ -526,7 +522,7 @@ def sample_instantiate_workflow_template(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -555,7 +551,7 @@ def sample_instantiate_workflow_template(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - parameters (:class:`Sequence[google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest.ParametersEntry]`): + parameters (:class:`Mapping[str, str]`): Optional. Map from parameter names to values that should be used for those parameters. Values may not exceed 1000 @@ -690,14 +686,13 @@ async def instantiate_inline_workflow_template( [Operation.response][google.longrunning.Operation.response] will be [Empty][google.protobuf.Empty]. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_instantiate_inline_workflow_template(): + async def sample_instantiate_inline_workflow_template(): # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() + client = dataproc_v1.WorkflowTemplateServiceAsyncClient() # Initialize request argument(s) template = dataproc_v1.WorkflowTemplate() @@ -716,7 +711,7 @@ def sample_instantiate_inline_workflow_template(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -850,14 +845,13 @@ async def update_workflow_template( template must contain version that matches the current server version. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_update_workflow_template(): + async def sample_update_workflow_template(): # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() + client = dataproc_v1.WorkflowTemplateServiceAsyncClient() # Initialize request argument(s) template = dataproc_v1.WorkflowTemplate() @@ -871,7 +865,7 @@ def sample_update_workflow_template(): ) # Make the request - response = client.update_workflow_template(request=request) + response = await client.update_workflow_template(request=request) # Handle the response print(response) @@ -966,14 +960,13 @@ async def list_workflow_templates( r"""Lists workflows that match the specified filter in the request. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_list_workflow_templates(): + async def sample_list_workflow_templates(): # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() + client = dataproc_v1.WorkflowTemplateServiceAsyncClient() # Initialize request argument(s) request = dataproc_v1.ListWorkflowTemplatesRequest( @@ -984,7 +977,7 @@ def sample_list_workflow_templates(): page_result = client.list_workflow_templates(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -1097,14 +1090,13 @@ async def delete_workflow_template( r"""Deletes a workflow template. It does not cancel in-progress workflows. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_delete_workflow_template(): + async def sample_delete_workflow_template(): # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() + client = dataproc_v1.WorkflowTemplateServiceAsyncClient() # Initialize request argument(s) request = dataproc_v1.DeleteWorkflowTemplateRequest( @@ -1112,7 +1104,7 @@ def sample_delete_workflow_template(): ) # Make the request - client.delete_workflow_template(request=request) + await client.delete_workflow_template(request=request) Args: request (Union[google.cloud.dataproc_v1.types.DeleteWorkflowTemplateRequest, dict]): diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py index 30cc058ffd1a..b141ce374a9a 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib @@ -453,6 +453,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def create_workflow_template( @@ -590,7 +591,6 @@ def get_workflow_template( Can retrieve previously instantiated template by specifying optional version parameter. - .. code-block:: python from google.cloud import dataproc_v1 @@ -693,9 +693,7 @@ def instantiate_workflow_template( ] = None, *, name: str = None, - parameters: Sequence[ - workflow_templates.InstantiateWorkflowTemplateRequest.ParametersEntry - ] = None, + parameters: Mapping[str, str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -722,7 +720,6 @@ def instantiate_workflow_template( [Operation.response][google.longrunning.Operation.response] will be [Empty][google.protobuf.Empty]. - .. code-block:: python from google.cloud import dataproc_v1 @@ -770,7 +767,7 @@ def sample_instantiate_workflow_template(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - parameters (Sequence[google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest.ParametersEntry]): + parameters (Mapping[str, str]): Optional. Map from parameter names to values that should be used for those parameters. Values may not exceed 1000 @@ -899,7 +896,6 @@ def instantiate_inline_workflow_template( [Operation.response][google.longrunning.Operation.response] will be [Empty][google.protobuf.Empty]. - .. code-block:: python from google.cloud import dataproc_v1 @@ -1056,7 +1052,6 @@ def update_workflow_template( template must contain version that matches the current server version. - .. code-block:: python from google.cloud import dataproc_v1 @@ -1163,7 +1158,6 @@ def list_workflow_templates( r"""Lists workflows that match the specified filter in the request. - .. code-block:: python from google.cloud import dataproc_v1 @@ -1283,7 +1277,6 @@ def delete_workflow_template( r"""Deletes a workflow template. It does not cancel in-progress workflows. - .. code-block:: python from google.cloud import dataproc_v1 diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py index f220cbc6e33c..779f15b6a5e8 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py @@ -57,6 +57,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -83,10 +84,6 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} @@ -108,6 +105,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -120,6 +122,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -316,5 +323,9 @@ def delete_workflow_template( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("WorkflowTemplateServiceTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py index 4d373025f487..758128b86e46 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py @@ -62,6 +62,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -158,6 +159,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: @@ -505,5 +507,9 @@ def delete_workflow_template( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("WorkflowTemplateServiceGrpcTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py index 139fe9dba3b4..686b55a8b3d0 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py @@ -107,6 +107,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -203,6 +204,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/autoscaling_policies.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/autoscaling_policies.py index b1685e6b850b..56e4c9429e26 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/autoscaling_policies.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/autoscaling_policies.py @@ -71,7 +71,7 @@ class AutoscalingPolicy(proto.Message): secondary_worker_config (google.cloud.dataproc_v1.types.InstanceGroupAutoscalingPolicyConfig): Optional. Describes how the autoscaler will operate for secondary workers. - labels (Sequence[google.cloud.dataproc_v1.types.AutoscalingPolicy.LabelsEntry]): + labels (Mapping[str, str]): Optional. The labels to associate with this autoscaling policy. Label **keys** must contain 1 to 63 characters, and must conform to `RFC @@ -116,9 +116,13 @@ class AutoscalingPolicy(proto.Message): class BasicAutoscalingAlgorithm(proto.Message): r"""Basic algorithm for autoscaling. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: yarn_config (google.cloud.dataproc_v1.types.BasicYarnAutoscalingConfig): Required. YARN autoscaling configuration. + + This field is a member of `oneof`_ ``config``. cooldown_period (google.protobuf.duration_pb2.Duration): Optional. Duration between scaling events. A scaling period starts after the update operation from the previous event @@ -130,6 +134,7 @@ class BasicAutoscalingAlgorithm(proto.Message): yarn_config = proto.Field( proto.MESSAGE, number=1, + oneof="config", message="BasicYarnAutoscalingConfig", ) cooldown_period = proto.Field( diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/batches.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/batches.py index 761b3fb72ebc..e014bff1c705 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/batches.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/batches.py @@ -225,7 +225,7 @@ class Batch(proto.Message): creator (str): Output only. The email address of the user who created the batch. - labels (Sequence[google.cloud.dataproc_v1.types.Batch.LabelsEntry]): + labels (Mapping[str, str]): Optional. The labels to associate with this batch. Label **keys** must contain 1 to 63 characters, and must conform to `RFC 1035 `__. @@ -547,7 +547,7 @@ class SparkSqlBatch(proto.Message): query_file_uri (str): Required. The HCFS URI of the script that contains Spark SQL queries to execute. - query_variables (Sequence[google.cloud.dataproc_v1.types.SparkSqlBatch.QueryVariablesEntry]): + query_variables (Mapping[str, str]): Optional. Mapping of query variable names to values (equivalent to the Spark SQL command: ``SET name="value";``). diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py index 573d2302e417..cd201379b766 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py @@ -89,7 +89,7 @@ class Cluster(proto.Message): Note that Dataproc may set default values, and values may change when clusters are updated. Exactly one of config or virtualClusterConfig must be specified. - labels (Sequence[google.cloud.dataproc_v1.types.Cluster.LabelsEntry]): + labels (Mapping[str, str]): Optional. The labels to associate with this cluster. Label **keys** must contain 1 to 63 characters, and must conform to `RFC 1035 `__. @@ -327,20 +327,6 @@ class VirtualClusterConfig(proto.Message): buckets `__). **This field requires a Cloud Storage bucket name, not a ``gs://...`` URI to a Cloud Storage bucket.** - temp_bucket (str): - Optional. A Cloud Storage bucket used to store ephemeral - cluster and jobs data, such as Spark and MapReduce history - files. If you do not specify a temp bucket, Dataproc will - determine a Cloud Storage location (US, ASIA, or EU) for - your cluster's temp bucket according to the Compute Engine - zone where your cluster is deployed, and then create and - manage this project-level, per-location bucket. The default - bucket has a TTL of 90 days, but you can use any TTL (or - none) if you specify a bucket (see `Dataproc staging and - temp - buckets `__). - **This field requires a Cloud Storage bucket name, not a - ``gs://...`` URI to a Cloud Storage bucket.** kubernetes_cluster_config (google.cloud.dataproc_v1.types.KubernetesClusterConfig): Required. The configuration for running the Dataproc cluster on Kubernetes. @@ -355,10 +341,6 @@ class VirtualClusterConfig(proto.Message): proto.STRING, number=1, ) - temp_bucket = proto.Field( - proto.STRING, - number=2, - ) kubernetes_cluster_config = proto.Field( proto.MESSAGE, number=6, @@ -400,7 +382,7 @@ class EndpointConfig(proto.Message): r"""Endpoint config for this cluster Attributes: - http_ports (Sequence[google.cloud.dataproc_v1.types.EndpointConfig.HttpPortsEntry]): + http_ports (Mapping[str, str]): Output only. The map of port descriptions to URLs. Will only be populated if enable_http_port_access is true. enable_http_port_access (bool): @@ -543,7 +525,7 @@ class GceClusterConfig(proto.Message): The Compute Engine tags to add to all instances (see `Tagging instances `__). - metadata (Sequence[google.cloud.dataproc_v1.types.GceClusterConfig.MetadataEntry]): + metadata (Mapping[str, str]): The Compute Engine metadata entries to add to all instances (see `Project and instance metadata `__). @@ -1203,7 +1185,7 @@ class IdentityConfig(proto.Message): based secure multi-tenancy user mappings. Attributes: - user_service_account_mapping (Sequence[google.cloud.dataproc_v1.types.IdentityConfig.UserServiceAccountMappingEntry]): + user_service_account_mapping (Mapping[str, str]): Required. Map of user to service account. """ @@ -1227,7 +1209,7 @@ class SoftwareConfig(proto.Message): "1.2.29"), or the `"preview" version `__. If unspecified, it defaults to the latest Debian version. - properties (Sequence[google.cloud.dataproc_v1.types.SoftwareConfig.PropertiesEntry]): + properties (Mapping[str, str]): Optional. The properties to set on daemon config files. Property keys are specified in ``prefix:property`` format, @@ -1356,9 +1338,9 @@ class ClusterMetrics(proto.Message): only. It may be changed before final release. Attributes: - hdfs_metrics (Sequence[google.cloud.dataproc_v1.types.ClusterMetrics.HdfsMetricsEntry]): + hdfs_metrics (Mapping[str, int]): The HDFS metrics. - yarn_metrics (Sequence[google.cloud.dataproc_v1.types.ClusterMetrics.YarnMetricsEntry]): + yarn_metrics (Mapping[str, int]): The YARN metrics. """ diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/jobs.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/jobs.py index a4161903208b..2e785931f60b 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/jobs.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/jobs.py @@ -54,7 +54,7 @@ class LoggingConfig(proto.Message): r"""The runtime logging config of the job. Attributes: - driver_log_levels (Sequence[google.cloud.dataproc_v1.types.LoggingConfig.DriverLogLevelsEntry]): + driver_log_levels (Mapping[str, google.cloud.dataproc_v1.types.LoggingConfig.Level]): The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: @@ -131,7 +131,7 @@ class HadoopJob(proto.Message): extracted in the working directory of Hadoop drivers and tasks. Supported file types: .jar, .tar, .tar.gz, .tgz, or .zip. - properties (Sequence[google.cloud.dataproc_v1.types.HadoopJob.PropertiesEntry]): + properties (Mapping[str, str]): Optional. A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Dataproc API may be overwritten. Can include @@ -220,7 +220,7 @@ class SparkJob(proto.Message): extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. - properties (Sequence[google.cloud.dataproc_v1.types.SparkJob.PropertiesEntry]): + properties (Mapping[str, str]): Optional. A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Dataproc API may @@ -300,7 +300,7 @@ class PySparkJob(proto.Message): extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. - properties (Sequence[google.cloud.dataproc_v1.types.PySparkJob.PropertiesEntry]): + properties (Mapping[str, str]): Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc @@ -404,10 +404,10 @@ class HiveJob(proto.Message): Optional. Whether to continue executing queries if a query fails. The default value is ``false``. Setting to ``true`` can be useful when executing independent parallel queries. - script_variables (Sequence[google.cloud.dataproc_v1.types.HiveJob.ScriptVariablesEntry]): + script_variables (Mapping[str, str]): Optional. Mapping of query variable names to values (equivalent to the Hive command: ``SET name="value";``). - properties (Sequence[google.cloud.dataproc_v1.types.HiveJob.PropertiesEntry]): + properties (Mapping[str, str]): Optional. A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties @@ -472,11 +472,11 @@ class SparkSqlJob(proto.Message): A list of queries. This field is a member of `oneof`_ ``queries``. - script_variables (Sequence[google.cloud.dataproc_v1.types.SparkSqlJob.ScriptVariablesEntry]): + script_variables (Mapping[str, str]): Optional. Mapping of query variable names to values (equivalent to the Spark SQL command: SET ``name="value";``). - properties (Sequence[google.cloud.dataproc_v1.types.SparkSqlJob.PropertiesEntry]): + properties (Mapping[str, str]): Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the @@ -546,10 +546,10 @@ class PigJob(proto.Message): Optional. Whether to continue executing queries if a query fails. The default value is ``false``. Setting to ``true`` can be useful when executing independent parallel queries. - script_variables (Sequence[google.cloud.dataproc_v1.types.PigJob.ScriptVariablesEntry]): + script_variables (Mapping[str, str]): Optional. Mapping of query variable names to values (equivalent to the Pig command: ``name=[value]``). - properties (Sequence[google.cloud.dataproc_v1.types.PigJob.PropertiesEntry]): + properties (Mapping[str, str]): Optional. A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties @@ -623,7 +623,7 @@ class SparkRJob(proto.Message): extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. - properties (Sequence[google.cloud.dataproc_v1.types.SparkRJob.PropertiesEntry]): + properties (Mapping[str, str]): Optional. A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc @@ -699,7 +699,7 @@ class PrestoJob(proto.Message): client_tags (Sequence[str]): Optional. Presto client tags to attach to this query - properties (Sequence[google.cloud.dataproc_v1.types.PrestoJob.PropertiesEntry]): + properties (Mapping[str, str]): Optional. A mapping of property names to values. Used to set Presto `session properties `__ @@ -754,7 +754,7 @@ class JobPlacement(proto.Message): cluster_uuid (str): Output only. A cluster UUID generated by the Dataproc service when the job is submitted. - cluster_labels (Sequence[google.cloud.dataproc_v1.types.JobPlacement.ClusterLabelsEntry]): + cluster_labels (Mapping[str, str]): Optional. Cluster labels to identify a cluster where the job will be submitted. """ @@ -993,7 +993,7 @@ class Job(proto.Message): control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as ``driver_output_uri``. - labels (Sequence[google.cloud.dataproc_v1.types.Job.LabelsEntry]): + labels (Mapping[str, str]): Optional. The labels to associate with this job. Label **keys** must contain 1 to 63 characters, and must conform to `RFC 1035 `__. diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/operations.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/operations.py index 7ad8176116d2..572f35f99806 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/operations.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/operations.py @@ -44,7 +44,7 @@ class BatchOperationMetadata(proto.Message): The operation type. description (str): Short description of the operation. - labels (Sequence[google.cloud.dataproc_v1.types.BatchOperationMetadata.LabelsEntry]): + labels (Mapping[str, str]): Labels associated with the operation. warnings (Sequence[str]): Warnings encountered during operation @@ -155,7 +155,7 @@ class ClusterOperationMetadata(proto.Message): Output only. The operation type. description (str): Output only. Short description of operation. - labels (Sequence[google.cloud.dataproc_v1.types.ClusterOperationMetadata.LabelsEntry]): + labels (Mapping[str, str]): Output only. Labels associated with the operation warnings (Sequence[str]): diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py index d482d90d8c9a..e0fd80249397 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py @@ -72,7 +72,7 @@ class RuntimeConfig(proto.Message): Optional. Optional custom container image for the job runtime environment. If not specified, a default container image will be used. - properties (Sequence[google.cloud.dataproc_v1.types.RuntimeConfig.PropertiesEntry]): + properties (Mapping[str, str]): Optional. A mapping of property names to values, which are used to configure workload execution. @@ -222,7 +222,7 @@ class RuntimeInfo(proto.Message): r"""Runtime information about workload execution. Attributes: - endpoints (Sequence[google.cloud.dataproc_v1.types.RuntimeInfo.EndpointsEntry]): + endpoints (Mapping[str, str]): Output only. Map of remote access endpoints (such as web interfaces and APIs) to their URIs. output_uri (str): @@ -325,14 +325,14 @@ class KubernetesSoftwareConfig(proto.Message): on Kubernetes. Attributes: - component_version (Sequence[google.cloud.dataproc_v1.types.KubernetesSoftwareConfig.ComponentVersionEntry]): + component_version (Mapping[str, str]): The components that should be installed in this Dataproc cluster. The key must be a string from the KubernetesComponent enumeration. The value is the version of the software to be installed. At least one entry must be specified. - properties (Sequence[google.cloud.dataproc_v1.types.KubernetesSoftwareConfig.PropertiesEntry]): + properties (Mapping[str, str]): The properties to set on daemon config files. Property keys are specified in ``prefix:property`` format, diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/workflow_templates.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/workflow_templates.py index b2cb76f1532f..17bff61cba8c 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/workflow_templates.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/workflow_templates.py @@ -84,7 +84,7 @@ class WorkflowTemplate(proto.Message): update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time template was last updated. - labels (Sequence[google.cloud.dataproc_v1.types.WorkflowTemplate.LabelsEntry]): + labels (Mapping[str, str]): Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created by the workflow instance. @@ -228,7 +228,7 @@ class ManagedCluster(proto.Message): characters. config (google.cloud.dataproc_v1.types.ClusterConfig): Required. The cluster configuration. - labels (Sequence[google.cloud.dataproc_v1.types.ManagedCluster.LabelsEntry]): + labels (Mapping[str, str]): Optional. The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and @@ -270,7 +270,7 @@ class ClusterSelector(proto.Message): selection of the cluster. If unspecified, the zone of the first cluster matching the selector is used. - cluster_labels (Sequence[google.cloud.dataproc_v1.types.ClusterSelector.ClusterLabelsEntry]): + cluster_labels (Mapping[str, str]): Required. The cluster labels. Cluster must have all labels to match. """ @@ -342,7 +342,7 @@ class OrderedJob(proto.Message): Optional. Job is a Presto job. This field is a member of `oneof`_ ``job_type``. - labels (Sequence[google.cloud.dataproc_v1.types.OrderedJob.LabelsEntry]): + labels (Mapping[str, str]): Optional. The labels to associate with this job. Label keys must be between 1 and 63 characters long, and @@ -626,7 +626,7 @@ class WorkflowMetadata(proto.Message): Output only. The workflow state. cluster_name (str): Output only. The name of the target cluster. - parameters (Sequence[google.cloud.dataproc_v1.types.WorkflowMetadata.ParametersEntry]): + parameters (Mapping[str, str]): Map from parameter names to values that were used for those parameters. start_time (google.protobuf.timestamp_pb2.Timestamp): @@ -915,7 +915,7 @@ class InstantiateWorkflowTemplateRequest(proto.Message): The tag must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). The maximum length is 40 characters. - parameters (Sequence[google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest.ParametersEntry]): + parameters (Mapping[str, str]): Optional. Map from parameter names to values that should be used for those parameters. Values may not exceed 1000 characters. diff --git a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_dataproc_v1.json b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_dataproc_v1.json index 09954e8597ae..127808b74211 100644 --- a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_dataproc_v1.json +++ b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_dataproc_v1.json @@ -1,16 +1,65 @@ { + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.dataproc.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-dataproc" + }, "snippets": [ { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient", + "shortName": "AutoscalingPolicyServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient.create_autoscaling_policy", "method": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.CreateAutoscalingPolicy", "service": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", "shortName": "AutoscalingPolicyService" }, "shortName": "CreateAutoscalingPolicy" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.CreateAutoscalingPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "policy", + "type": "google.cloud.dataproc_v1.types.AutoscalingPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.AutoscalingPolicy", + "shortName": "create_autoscaling_policy" }, + "description": "Sample for CreateAutoscalingPolicy", "file": "dataproc_v1_generated_autoscaling_policy_service_create_autoscaling_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_CreateAutoscalingPolicy_async", "segments": [ { @@ -43,18 +92,58 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_autoscaling_policy_service_create_autoscaling_policy_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient", + "shortName": "AutoscalingPolicyServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient.create_autoscaling_policy", "method": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.CreateAutoscalingPolicy", "service": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", "shortName": "AutoscalingPolicyService" }, "shortName": "CreateAutoscalingPolicy" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.CreateAutoscalingPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "policy", + "type": "google.cloud.dataproc_v1.types.AutoscalingPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.AutoscalingPolicy", + "shortName": "create_autoscaling_policy" }, + "description": "Sample for CreateAutoscalingPolicy", "file": "dataproc_v1_generated_autoscaling_policy_service_create_autoscaling_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_CreateAutoscalingPolicy_sync", "segments": [ { @@ -87,19 +176,54 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_autoscaling_policy_service_create_autoscaling_policy_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient", + "shortName": "AutoscalingPolicyServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient.delete_autoscaling_policy", "method": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.DeleteAutoscalingPolicy", "service": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", "shortName": "AutoscalingPolicyService" }, "shortName": "DeleteAutoscalingPolicy" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DeleteAutoscalingPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_autoscaling_policy" }, + "description": "Sample for DeleteAutoscalingPolicy", "file": "dataproc_v1_generated_autoscaling_policy_service_delete_autoscaling_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_DeleteAutoscalingPolicy_async", "segments": [ { @@ -130,18 +254,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_autoscaling_policy_service_delete_autoscaling_policy_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient", + "shortName": "AutoscalingPolicyServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient.delete_autoscaling_policy", "method": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.DeleteAutoscalingPolicy", "service": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", "shortName": "AutoscalingPolicyService" }, "shortName": "DeleteAutoscalingPolicy" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DeleteAutoscalingPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_autoscaling_policy" }, + "description": "Sample for DeleteAutoscalingPolicy", "file": "dataproc_v1_generated_autoscaling_policy_service_delete_autoscaling_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_DeleteAutoscalingPolicy_sync", "segments": [ { @@ -172,19 +331,55 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_autoscaling_policy_service_delete_autoscaling_policy_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient", + "shortName": "AutoscalingPolicyServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient.get_autoscaling_policy", "method": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.GetAutoscalingPolicy", "service": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", "shortName": "AutoscalingPolicyService" }, "shortName": "GetAutoscalingPolicy" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.GetAutoscalingPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.AutoscalingPolicy", + "shortName": "get_autoscaling_policy" }, + "description": "Sample for GetAutoscalingPolicy", "file": "dataproc_v1_generated_autoscaling_policy_service_get_autoscaling_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_GetAutoscalingPolicy_async", "segments": [ { @@ -217,18 +412,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_autoscaling_policy_service_get_autoscaling_policy_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient", + "shortName": "AutoscalingPolicyServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient.get_autoscaling_policy", "method": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.GetAutoscalingPolicy", "service": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", "shortName": "AutoscalingPolicyService" }, "shortName": "GetAutoscalingPolicy" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.GetAutoscalingPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.AutoscalingPolicy", + "shortName": "get_autoscaling_policy" }, + "description": "Sample for GetAutoscalingPolicy", "file": "dataproc_v1_generated_autoscaling_policy_service_get_autoscaling_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_GetAutoscalingPolicy_sync", "segments": [ { @@ -261,19 +492,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_autoscaling_policy_service_get_autoscaling_policy_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient", + "shortName": "AutoscalingPolicyServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient.list_autoscaling_policies", "method": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.ListAutoscalingPolicies", "service": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", "shortName": "AutoscalingPolicyService" }, "shortName": "ListAutoscalingPolicies" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.ListAutoscalingPoliciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.services.autoscaling_policy_service.pagers.ListAutoscalingPoliciesAsyncPager", + "shortName": "list_autoscaling_policies" }, + "description": "Sample for ListAutoscalingPolicies", "file": "dataproc_v1_generated_autoscaling_policy_service_list_autoscaling_policies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_ListAutoscalingPolicies_async", "segments": [ { @@ -306,18 +573,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_autoscaling_policy_service_list_autoscaling_policies_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient", + "shortName": "AutoscalingPolicyServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient.list_autoscaling_policies", "method": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.ListAutoscalingPolicies", "service": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", "shortName": "AutoscalingPolicyService" }, "shortName": "ListAutoscalingPolicies" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.ListAutoscalingPoliciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.services.autoscaling_policy_service.pagers.ListAutoscalingPoliciesPager", + "shortName": "list_autoscaling_policies" }, + "description": "Sample for ListAutoscalingPolicies", "file": "dataproc_v1_generated_autoscaling_policy_service_list_autoscaling_policies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_ListAutoscalingPolicies_sync", "segments": [ { @@ -350,19 +653,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_autoscaling_policy_service_list_autoscaling_policies_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient", + "shortName": "AutoscalingPolicyServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient.update_autoscaling_policy", "method": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.UpdateAutoscalingPolicy", "service": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", "shortName": "AutoscalingPolicyService" }, "shortName": "UpdateAutoscalingPolicy" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.UpdateAutoscalingPolicyRequest" + }, + { + "name": "policy", + "type": "google.cloud.dataproc_v1.types.AutoscalingPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.AutoscalingPolicy", + "shortName": "update_autoscaling_policy" }, + "description": "Sample for UpdateAutoscalingPolicy", "file": "dataproc_v1_generated_autoscaling_policy_service_update_autoscaling_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_UpdateAutoscalingPolicy_async", "segments": [ { @@ -395,18 +734,54 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_autoscaling_policy_service_update_autoscaling_policy_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient", + "shortName": "AutoscalingPolicyServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient.update_autoscaling_policy", "method": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.UpdateAutoscalingPolicy", "service": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", "shortName": "AutoscalingPolicyService" }, "shortName": "UpdateAutoscalingPolicy" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.UpdateAutoscalingPolicyRequest" + }, + { + "name": "policy", + "type": "google.cloud.dataproc_v1.types.AutoscalingPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.AutoscalingPolicy", + "shortName": "update_autoscaling_policy" }, + "description": "Sample for UpdateAutoscalingPolicy", "file": "dataproc_v1_generated_autoscaling_policy_service_update_autoscaling_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_UpdateAutoscalingPolicy_sync", "segments": [ { @@ -439,19 +814,63 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_autoscaling_policy_service_update_autoscaling_policy_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient", + "shortName": "BatchControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient.create_batch", "method": { + "fullName": "google.cloud.dataproc.v1.BatchController.CreateBatch", "service": { + "fullName": "google.cloud.dataproc.v1.BatchController", "shortName": "BatchController" }, "shortName": "CreateBatch" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.CreateBatchRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "batch", + "type": "google.cloud.dataproc_v1.types.Batch" + }, + { + "name": "batch_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_batch" }, + "description": "Sample for CreateBatch", "file": "dataproc_v1_generated_batch_controller_create_batch_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_BatchController_CreateBatch_async", "segments": [ { @@ -484,18 +903,62 @@ "start": 50, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_batch_controller_create_batch_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.BatchControllerClient", + "shortName": "BatchControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.BatchControllerClient.create_batch", "method": { + "fullName": "google.cloud.dataproc.v1.BatchController.CreateBatch", "service": { + "fullName": "google.cloud.dataproc.v1.BatchController", "shortName": "BatchController" }, "shortName": "CreateBatch" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.CreateBatchRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "batch", + "type": "google.cloud.dataproc_v1.types.Batch" + }, + { + "name": "batch_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_batch" }, + "description": "Sample for CreateBatch", "file": "dataproc_v1_generated_batch_controller_create_batch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_BatchController_CreateBatch_sync", "segments": [ { @@ -528,19 +991,54 @@ "start": 50, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_batch_controller_create_batch_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient", + "shortName": "BatchControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient.delete_batch", "method": { + "fullName": "google.cloud.dataproc.v1.BatchController.DeleteBatch", "service": { + "fullName": "google.cloud.dataproc.v1.BatchController", "shortName": "BatchController" }, "shortName": "DeleteBatch" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DeleteBatchRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_batch" }, + "description": "Sample for DeleteBatch", "file": "dataproc_v1_generated_batch_controller_delete_batch_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_BatchController_DeleteBatch_async", "segments": [ { @@ -571,18 +1069,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_batch_controller_delete_batch_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.BatchControllerClient", + "shortName": "BatchControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.BatchControllerClient.delete_batch", "method": { + "fullName": "google.cloud.dataproc.v1.BatchController.DeleteBatch", "service": { + "fullName": "google.cloud.dataproc.v1.BatchController", "shortName": "BatchController" }, "shortName": "DeleteBatch" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DeleteBatchRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_batch" }, + "description": "Sample for DeleteBatch", "file": "dataproc_v1_generated_batch_controller_delete_batch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_BatchController_DeleteBatch_sync", "segments": [ { @@ -613,19 +1146,55 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_batch_controller_delete_batch_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient", + "shortName": "BatchControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient.get_batch", "method": { + "fullName": "google.cloud.dataproc.v1.BatchController.GetBatch", "service": { + "fullName": "google.cloud.dataproc.v1.BatchController", "shortName": "BatchController" }, "shortName": "GetBatch" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.GetBatchRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Batch", + "shortName": "get_batch" }, + "description": "Sample for GetBatch", "file": "dataproc_v1_generated_batch_controller_get_batch_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_BatchController_GetBatch_async", "segments": [ { @@ -658,18 +1227,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_batch_controller_get_batch_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.BatchControllerClient", + "shortName": "BatchControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.BatchControllerClient.get_batch", "method": { + "fullName": "google.cloud.dataproc.v1.BatchController.GetBatch", "service": { + "fullName": "google.cloud.dataproc.v1.BatchController", "shortName": "BatchController" }, "shortName": "GetBatch" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.GetBatchRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Batch", + "shortName": "get_batch" }, + "description": "Sample for GetBatch", "file": "dataproc_v1_generated_batch_controller_get_batch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_BatchController_GetBatch_sync", "segments": [ { @@ -702,19 +1307,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_batch_controller_get_batch_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient", + "shortName": "BatchControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient.list_batches", "method": { + "fullName": "google.cloud.dataproc.v1.BatchController.ListBatches", "service": { + "fullName": "google.cloud.dataproc.v1.BatchController", "shortName": "BatchController" }, "shortName": "ListBatches" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.ListBatchesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.services.batch_controller.pagers.ListBatchesAsyncPager", + "shortName": "list_batches" }, + "description": "Sample for ListBatches", "file": "dataproc_v1_generated_batch_controller_list_batches_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_BatchController_ListBatches_async", "segments": [ { @@ -747,18 +1388,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_batch_controller_list_batches_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.BatchControllerClient", + "shortName": "BatchControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.BatchControllerClient.list_batches", "method": { + "fullName": "google.cloud.dataproc.v1.BatchController.ListBatches", "service": { + "fullName": "google.cloud.dataproc.v1.BatchController", "shortName": "BatchController" }, "shortName": "ListBatches" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.ListBatchesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.services.batch_controller.pagers.ListBatchesPager", + "shortName": "list_batches" }, + "description": "Sample for ListBatches", "file": "dataproc_v1_generated_batch_controller_list_batches_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_BatchController_ListBatches_sync", "segments": [ { @@ -791,19 +1468,63 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_batch_controller_list_batches_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", + "shortName": "ClusterControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.create_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.CreateCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "CreateCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.CreateClusterRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "cluster", + "type": "google.cloud.dataproc_v1.types.Cluster" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_cluster" }, + "description": "Sample for CreateCluster", "file": "dataproc_v1_generated_cluster_controller_create_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_CreateCluster_async", "segments": [ { @@ -836,18 +1557,62 @@ "start": 52, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_create_cluster_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", + "shortName": "ClusterControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.create_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.CreateCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "CreateCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.CreateClusterRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "cluster", + "type": "google.cloud.dataproc_v1.types.Cluster" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_cluster" }, + "description": "Sample for CreateCluster", "file": "dataproc_v1_generated_cluster_controller_create_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_CreateCluster_sync", "segments": [ { @@ -880,19 +1645,63 @@ "start": 52, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_create_cluster_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", + "shortName": "ClusterControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.delete_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.DeleteCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "DeleteCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DeleteClusterRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "cluster_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_cluster" }, + "description": "Sample for DeleteCluster", "file": "dataproc_v1_generated_cluster_controller_delete_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_DeleteCluster_async", "segments": [ { @@ -925,18 +1734,62 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_delete_cluster_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", + "shortName": "ClusterControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.delete_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.DeleteCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "DeleteCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DeleteClusterRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "cluster_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_cluster" }, + "description": "Sample for DeleteCluster", "file": "dataproc_v1_generated_cluster_controller_delete_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_DeleteCluster_sync", "segments": [ { @@ -969,19 +1822,63 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_delete_cluster_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", + "shortName": "ClusterControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.diagnose_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.DiagnoseCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "DiagnoseCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DiagnoseClusterRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "cluster_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "diagnose_cluster" }, + "description": "Sample for DiagnoseCluster", "file": "dataproc_v1_generated_cluster_controller_diagnose_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_DiagnoseCluster_async", "segments": [ { @@ -1014,18 +1911,62 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_diagnose_cluster_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", + "shortName": "ClusterControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.diagnose_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.DiagnoseCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "DiagnoseCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DiagnoseClusterRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "cluster_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "diagnose_cluster" }, + "description": "Sample for DiagnoseCluster", "file": "dataproc_v1_generated_cluster_controller_diagnose_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_DiagnoseCluster_sync", "segments": [ { @@ -1058,19 +1999,63 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_diagnose_cluster_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", + "shortName": "ClusterControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.get_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.GetCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "GetCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.GetClusterRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "cluster_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Cluster", + "shortName": "get_cluster" }, + "description": "Sample for GetCluster", "file": "dataproc_v1_generated_cluster_controller_get_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_GetCluster_async", "segments": [ { @@ -1103,18 +2088,62 @@ "start": 44, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_get_cluster_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", + "shortName": "ClusterControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.get_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.GetCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "GetCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.GetClusterRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "cluster_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Cluster", + "shortName": "get_cluster" }, + "description": "Sample for GetCluster", "file": "dataproc_v1_generated_cluster_controller_get_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_GetCluster_sync", "segments": [ { @@ -1147,19 +2176,63 @@ "start": 44, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_get_cluster_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", + "shortName": "ClusterControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.list_clusters", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.ListClusters", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "ListClusters" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.ListClustersRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.services.cluster_controller.pagers.ListClustersAsyncPager", + "shortName": "list_clusters" }, + "description": "Sample for ListClusters", "file": "dataproc_v1_generated_cluster_controller_list_clusters_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_ListClusters_async", "segments": [ { @@ -1192,18 +2265,62 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_list_clusters_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", + "shortName": "ClusterControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.list_clusters", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.ListClusters", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "ListClusters" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.ListClustersRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.services.cluster_controller.pagers.ListClustersPager", + "shortName": "list_clusters" }, + "description": "Sample for ListClusters", "file": "dataproc_v1_generated_cluster_controller_list_clusters_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_ListClusters_sync", "segments": [ { @@ -1236,19 +2353,51 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_list_clusters_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", + "shortName": "ClusterControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.start_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.StartCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "StartCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.StartClusterRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "start_cluster" }, + "description": "Sample for StartCluster", "file": "dataproc_v1_generated_cluster_controller_start_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_StartCluster_async", "segments": [ { @@ -1281,18 +2430,50 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_start_cluster_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", + "shortName": "ClusterControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.start_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.StartCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "StartCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.StartClusterRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "start_cluster" }, + "description": "Sample for StartCluster", "file": "dataproc_v1_generated_cluster_controller_start_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_StartCluster_sync", "segments": [ { @@ -1325,19 +2506,51 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_start_cluster_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", + "shortName": "ClusterControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.stop_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.StopCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "StopCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.StopClusterRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "stop_cluster" }, + "description": "Sample for StopCluster", "file": "dataproc_v1_generated_cluster_controller_stop_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_StopCluster_async", "segments": [ { @@ -1370,18 +2583,50 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_stop_cluster_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", + "shortName": "ClusterControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.stop_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.StopCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "StopCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.StopClusterRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "stop_cluster" }, + "description": "Sample for StopCluster", "file": "dataproc_v1_generated_cluster_controller_stop_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_StopCluster_sync", "segments": [ { @@ -1414,19 +2659,71 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_stop_cluster_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", + "shortName": "ClusterControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.update_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.UpdateCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "UpdateCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.UpdateClusterRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "cluster_name", + "type": "str" + }, + { + "name": "cluster", + "type": "google.cloud.dataproc_v1.types.Cluster" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_cluster" }, + "description": "Sample for UpdateCluster", "file": "dataproc_v1_generated_cluster_controller_update_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_UpdateCluster_async", "segments": [ { @@ -1459,18 +2756,70 @@ "start": 53, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_update_cluster_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", + "shortName": "ClusterControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.update_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.UpdateCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "UpdateCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.UpdateClusterRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "cluster_name", + "type": "str" + }, + { + "name": "cluster", + "type": "google.cloud.dataproc_v1.types.Cluster" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_cluster" }, + "description": "Sample for UpdateCluster", "file": "dataproc_v1_generated_cluster_controller_update_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_UpdateCluster_sync", "segments": [ { @@ -1503,19 +2852,63 @@ "start": 53, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_update_cluster_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient", + "shortName": "JobControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient.cancel_job", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.CancelJob", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "CancelJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.CancelJobRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Job", + "shortName": "cancel_job" }, + "description": "Sample for CancelJob", "file": "dataproc_v1_generated_job_controller_cancel_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_CancelJob_async", "segments": [ { @@ -1548,18 +2941,62 @@ "start": 44, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_cancel_job_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerClient", + "shortName": "JobControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerClient.cancel_job", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.CancelJob", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "CancelJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.CancelJobRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Job", + "shortName": "cancel_job" }, + "description": "Sample for CancelJob", "file": "dataproc_v1_generated_job_controller_cancel_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_CancelJob_sync", "segments": [ { @@ -1592,19 +3029,62 @@ "start": 44, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_cancel_job_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient", + "shortName": "JobControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient.delete_job", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.DeleteJob", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "DeleteJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DeleteJobRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_job" }, + "description": "Sample for DeleteJob", "file": "dataproc_v1_generated_job_controller_delete_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_DeleteJob_async", "segments": [ { @@ -1635,18 +3115,61 @@ "end": 45, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_delete_job_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerClient", + "shortName": "JobControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerClient.delete_job", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.DeleteJob", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "DeleteJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DeleteJobRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_job" }, + "description": "Sample for DeleteJob", "file": "dataproc_v1_generated_job_controller_delete_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_DeleteJob_sync", "segments": [ { @@ -1677,19 +3200,63 @@ "end": 45, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_delete_job_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient", + "shortName": "JobControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient.get_job", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.GetJob", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "GetJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.GetJobRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Job", + "shortName": "get_job" }, + "description": "Sample for GetJob", "file": "dataproc_v1_generated_job_controller_get_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_GetJob_async", "segments": [ { @@ -1722,18 +3289,62 @@ "start": 44, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_get_job_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerClient", + "shortName": "JobControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerClient.get_job", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.GetJob", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "GetJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.GetJobRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Job", + "shortName": "get_job" }, + "description": "Sample for GetJob", "file": "dataproc_v1_generated_job_controller_get_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_GetJob_sync", "segments": [ { @@ -1766,19 +3377,63 @@ "start": 44, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_get_job_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient", + "shortName": "JobControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient.list_jobs", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.ListJobs", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "ListJobs" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.ListJobsRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.services.job_controller.pagers.ListJobsAsyncPager", + "shortName": "list_jobs" }, + "description": "Sample for ListJobs", "file": "dataproc_v1_generated_job_controller_list_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_ListJobs_async", "segments": [ { @@ -1811,18 +3466,62 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_list_jobs_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerClient", + "shortName": "JobControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerClient.list_jobs", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.ListJobs", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "ListJobs" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.ListJobsRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.services.job_controller.pagers.ListJobsPager", + "shortName": "list_jobs" }, + "description": "Sample for ListJobs", "file": "dataproc_v1_generated_job_controller_list_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_ListJobs_sync", "segments": [ { @@ -1855,19 +3554,63 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_list_jobs_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient", + "shortName": "JobControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient.submit_job_as_operation", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.SubmitJobAsOperation", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "SubmitJobAsOperation" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.SubmitJobRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "job", + "type": "google.cloud.dataproc_v1.types.Job" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "submit_job_as_operation" }, + "description": "Sample for SubmitJobAsOperation", "file": "dataproc_v1_generated_job_controller_submit_job_as_operation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_SubmitJobAsOperation_async", "segments": [ { @@ -1900,18 +3643,62 @@ "start": 52, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_submit_job_as_operation_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerClient", + "shortName": "JobControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerClient.submit_job_as_operation", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.SubmitJobAsOperation", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "SubmitJobAsOperation" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.SubmitJobRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "job", + "type": "google.cloud.dataproc_v1.types.Job" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "submit_job_as_operation" }, + "description": "Sample for SubmitJobAsOperation", "file": "dataproc_v1_generated_job_controller_submit_job_as_operation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_SubmitJobAsOperation_sync", "segments": [ { @@ -1944,19 +3731,63 @@ "start": 52, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_submit_job_as_operation_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient", + "shortName": "JobControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient.submit_job", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.SubmitJob", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "SubmitJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.SubmitJobRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "job", + "type": "google.cloud.dataproc_v1.types.Job" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Job", + "shortName": "submit_job" }, + "description": "Sample for SubmitJob", "file": "dataproc_v1_generated_job_controller_submit_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_SubmitJob_async", "segments": [ { @@ -1989,18 +3820,62 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_submit_job_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerClient", + "shortName": "JobControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerClient.submit_job", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.SubmitJob", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "SubmitJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.SubmitJobRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "job", + "type": "google.cloud.dataproc_v1.types.Job" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Job", + "shortName": "submit_job" }, + "description": "Sample for SubmitJob", "file": "dataproc_v1_generated_job_controller_submit_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_SubmitJob_sync", "segments": [ { @@ -2033,19 +3908,51 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_submit_job_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient", + "shortName": "JobControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient.update_job", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.UpdateJob", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "UpdateJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.UpdateJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Job", + "shortName": "update_job" }, + "description": "Sample for UpdateJob", "file": "dataproc_v1_generated_job_controller_update_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_UpdateJob_async", "segments": [ { @@ -2078,18 +3985,50 @@ "start": 49, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_update_job_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerClient", + "shortName": "JobControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerClient.update_job", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.UpdateJob", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "UpdateJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.UpdateJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Job", + "shortName": "update_job" }, + "description": "Sample for UpdateJob", "file": "dataproc_v1_generated_job_controller_update_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_UpdateJob_sync", "segments": [ { @@ -2122,19 +4061,59 @@ "start": 49, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_update_job_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient", + "shortName": "WorkflowTemplateServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient.create_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "CreateWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.CreateWorkflowTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "template", + "type": "google.cloud.dataproc_v1.types.WorkflowTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.WorkflowTemplate", + "shortName": "create_workflow_template" }, + "description": "Sample for CreateWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_create_workflow_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_CreateWorkflowTemplate_async", "segments": [ { @@ -2167,18 +4146,58 @@ "start": 49, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_create_workflow_template_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient", + "shortName": "WorkflowTemplateServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient.create_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "CreateWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.CreateWorkflowTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "template", + "type": "google.cloud.dataproc_v1.types.WorkflowTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.WorkflowTemplate", + "shortName": "create_workflow_template" }, + "description": "Sample for CreateWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_create_workflow_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_CreateWorkflowTemplate_sync", "segments": [ { @@ -2211,19 +4230,54 @@ "start": 49, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_create_workflow_template_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient", + "shortName": "WorkflowTemplateServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient.delete_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "DeleteWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DeleteWorkflowTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_workflow_template" }, + "description": "Sample for DeleteWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_delete_workflow_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_DeleteWorkflowTemplate_async", "segments": [ { @@ -2254,18 +4308,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_delete_workflow_template_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient", + "shortName": "WorkflowTemplateServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient.delete_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "DeleteWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DeleteWorkflowTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_workflow_template" }, + "description": "Sample for DeleteWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_delete_workflow_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_DeleteWorkflowTemplate_sync", "segments": [ { @@ -2296,19 +4385,55 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_delete_workflow_template_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient", + "shortName": "WorkflowTemplateServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient.get_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.GetWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "GetWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.GetWorkflowTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.WorkflowTemplate", + "shortName": "get_workflow_template" }, + "description": "Sample for GetWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_get_workflow_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_GetWorkflowTemplate_async", "segments": [ { @@ -2341,18 +4466,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_get_workflow_template_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient", + "shortName": "WorkflowTemplateServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient.get_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.GetWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "GetWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.GetWorkflowTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.WorkflowTemplate", + "shortName": "get_workflow_template" }, + "description": "Sample for GetWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_get_workflow_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_GetWorkflowTemplate_sync", "segments": [ { @@ -2385,19 +4546,59 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_get_workflow_template_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient", + "shortName": "WorkflowTemplateServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient.instantiate_inline_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateInlineWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "InstantiateInlineWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.InstantiateInlineWorkflowTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "template", + "type": "google.cloud.dataproc_v1.types.WorkflowTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "instantiate_inline_workflow_template" }, + "description": "Sample for InstantiateInlineWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_instantiate_inline_workflow_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_InstantiateInlineWorkflowTemplate_async", "segments": [ { @@ -2430,18 +4631,58 @@ "start": 53, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_instantiate_inline_workflow_template_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient", + "shortName": "WorkflowTemplateServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient.instantiate_inline_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateInlineWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "InstantiateInlineWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.InstantiateInlineWorkflowTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "template", + "type": "google.cloud.dataproc_v1.types.WorkflowTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "instantiate_inline_workflow_template" }, + "description": "Sample for InstantiateInlineWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_instantiate_inline_workflow_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_InstantiateInlineWorkflowTemplate_sync", "segments": [ { @@ -2474,19 +4715,59 @@ "start": 53, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_instantiate_inline_workflow_template_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient", + "shortName": "WorkflowTemplateServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient.instantiate_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "InstantiateWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "parameters", + "type": "Mapping[str, str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "instantiate_workflow_template" }, + "description": "Sample for InstantiateWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_instantiate_workflow_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_InstantiateWorkflowTemplate_async", "segments": [ { @@ -2519,18 +4800,58 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_instantiate_workflow_template_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient", + "shortName": "WorkflowTemplateServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient.instantiate_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "InstantiateWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "parameters", + "type": "Mapping[str, str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "instantiate_workflow_template" }, + "description": "Sample for InstantiateWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_instantiate_workflow_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_InstantiateWorkflowTemplate_sync", "segments": [ { @@ -2563,19 +4884,55 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_instantiate_workflow_template_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient", + "shortName": "WorkflowTemplateServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient.list_workflow_templates", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.ListWorkflowTemplates", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "ListWorkflowTemplates" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.ListWorkflowTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.services.workflow_template_service.pagers.ListWorkflowTemplatesAsyncPager", + "shortName": "list_workflow_templates" }, + "description": "Sample for ListWorkflowTemplates", "file": "dataproc_v1_generated_workflow_template_service_list_workflow_templates_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_ListWorkflowTemplates_async", "segments": [ { @@ -2608,18 +4965,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_list_workflow_templates_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient", + "shortName": "WorkflowTemplateServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient.list_workflow_templates", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.ListWorkflowTemplates", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "ListWorkflowTemplates" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.ListWorkflowTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.services.workflow_template_service.pagers.ListWorkflowTemplatesPager", + "shortName": "list_workflow_templates" }, + "description": "Sample for ListWorkflowTemplates", "file": "dataproc_v1_generated_workflow_template_service_list_workflow_templates_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_ListWorkflowTemplates_sync", "segments": [ { @@ -2652,19 +5045,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_list_workflow_templates_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient", + "shortName": "WorkflowTemplateServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient.update_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.UpdateWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "UpdateWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.UpdateWorkflowTemplateRequest" + }, + { + "name": "template", + "type": "google.cloud.dataproc_v1.types.WorkflowTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.WorkflowTemplate", + "shortName": "update_workflow_template" }, + "description": "Sample for UpdateWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_update_workflow_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_UpdateWorkflowTemplate_async", "segments": [ { @@ -2697,18 +5126,54 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_update_workflow_template_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient", + "shortName": "WorkflowTemplateServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient.update_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.UpdateWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "UpdateWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.UpdateWorkflowTemplateRequest" + }, + { + "name": "template", + "type": "google.cloud.dataproc_v1.types.WorkflowTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.WorkflowTemplate", + "shortName": "update_workflow_template" }, + "description": "Sample for UpdateWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_update_workflow_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_UpdateWorkflowTemplate_sync", "segments": [ { @@ -2741,7 +5206,8 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_update_workflow_template_sync.py" } ] } diff --git a/packages/google-cloud-dataproc/setup.py b/packages/google-cloud-dataproc/setup.py index 7656c623533e..0b606ad59196 100644 --- a/packages/google-cloud-dataproc/setup.py +++ b/packages/google-cloud-dataproc/setup.py @@ -29,10 +29,7 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - # NOTE: Maintainers, please do not require google-api-core>=2.x.x - # Until this issue is closed - # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0", + "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", "proto-plus >= 1.15.0, <2.0.0dev", "protobuf >= 3.19.0, <4.0.0dev", ] diff --git a/packages/google-cloud-dataproc/testing/constraints-3.6.txt b/packages/google-cloud-dataproc/testing/constraints-3.6.txt deleted file mode 100644 index 3ef972c666c5..000000000000 --- a/packages/google-cloud-dataproc/testing/constraints-3.6.txt +++ /dev/null @@ -1,11 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List *all* library dependencies and extras in this file. -# Pin the version to the lower bound. -# -# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", -# Then this file should have foo==1.14.0 -google-api-core==1.31.5 -libcst==0.2.5 -proto-plus==1.15.0 -protobuf==3.19.0 diff --git a/packages/google-cloud-dataproc/testing/constraints-3.7.txt b/packages/google-cloud-dataproc/testing/constraints-3.7.txt index 3ef972c666c5..70a508fd2e96 100644 --- a/packages/google-cloud-dataproc/testing/constraints-3.7.txt +++ b/packages/google-cloud-dataproc/testing/constraints-3.7.txt @@ -5,7 +5,7 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==1.31.5 +google-api-core==1.32.0 libcst==0.2.5 proto-plus==1.15.0 protobuf==3.19.0 diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py index c7786a626952..d83c5f5e0065 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py @@ -14,7 +14,13 @@ # limitations under the License. # import os -import mock + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio @@ -91,24 +97,26 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - AutoscalingPolicyServiceClient, - AutoscalingPolicyServiceAsyncClient, + (AutoscalingPolicyServiceClient, "grpc"), + (AutoscalingPolicyServiceAsyncClient, "grpc_asyncio"), ], ) -def test_autoscaling_policy_service_client_from_service_account_info(client_class): +def test_autoscaling_policy_service_client_from_service_account_info( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") @pytest.mark.parametrize( @@ -137,27 +145,33 @@ def test_autoscaling_policy_service_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - AutoscalingPolicyServiceClient, - AutoscalingPolicyServiceAsyncClient, + (AutoscalingPolicyServiceClient, "grpc"), + (AutoscalingPolicyServiceAsyncClient, "grpc_asyncio"), ], ) -def test_autoscaling_policy_service_client_from_service_account_file(client_class): +def test_autoscaling_policy_service_client_from_service_account_file( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") def test_autoscaling_policy_service_client_get_transport_class(): @@ -228,6 +242,7 @@ def test_autoscaling_policy_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -245,6 +260,7 @@ def test_autoscaling_policy_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -262,6 +278,7 @@ def test_autoscaling_policy_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -291,6 +308,25 @@ def test_autoscaling_policy_service_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -368,6 +404,7 @@ def test_autoscaling_policy_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -402,6 +439,7 @@ def test_autoscaling_policy_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -424,6 +462,7 @@ def test_autoscaling_policy_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -545,6 +584,7 @@ def test_autoscaling_policy_service_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -583,6 +623,7 @@ def test_autoscaling_policy_service_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -603,6 +644,7 @@ def test_autoscaling_policy_service_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -641,6 +683,7 @@ def test_autoscaling_policy_service_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -786,7 +829,7 @@ def test_create_autoscaling_policy_field_headers(): # a field header. Set these to a non-empty value. request = autoscaling_policies.CreateAutoscalingPolicyRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -804,7 +847,7 @@ def test_create_autoscaling_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -818,7 +861,7 @@ async def test_create_autoscaling_policy_field_headers_async(): # a field header. Set these to a non-empty value. request = autoscaling_policies.CreateAutoscalingPolicyRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -838,7 +881,7 @@ async def test_create_autoscaling_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1052,7 +1095,7 @@ def test_update_autoscaling_policy_field_headers(): # a field header. Set these to a non-empty value. request = autoscaling_policies.UpdateAutoscalingPolicyRequest() - request.policy.name = "policy.name/value" + request.policy.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1070,7 +1113,7 @@ def test_update_autoscaling_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "policy.name=policy.name/value", + "policy.name=name_value", ) in kw["metadata"] @@ -1084,7 +1127,7 @@ async def test_update_autoscaling_policy_field_headers_async(): # a field header. Set these to a non-empty value. request = autoscaling_policies.UpdateAutoscalingPolicyRequest() - request.policy.name = "policy.name/value" + request.policy.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1104,7 +1147,7 @@ async def test_update_autoscaling_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "policy.name=policy.name/value", + "policy.name=name_value", ) in kw["metadata"] @@ -1308,7 +1351,7 @@ def test_get_autoscaling_policy_field_headers(): # a field header. Set these to a non-empty value. request = autoscaling_policies.GetAutoscalingPolicyRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1326,7 +1369,7 @@ def test_get_autoscaling_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1340,7 +1383,7 @@ async def test_get_autoscaling_policy_field_headers_async(): # a field header. Set these to a non-empty value. request = autoscaling_policies.GetAutoscalingPolicyRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1360,7 +1403,7 @@ async def test_get_autoscaling_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1555,7 +1598,7 @@ def test_list_autoscaling_policies_field_headers(): # a field header. Set these to a non-empty value. request = autoscaling_policies.ListAutoscalingPoliciesRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1573,7 +1616,7 @@ def test_list_autoscaling_policies_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1587,7 +1630,7 @@ async def test_list_autoscaling_policies_field_headers_async(): # a field header. Set these to a non-empty value. request = autoscaling_policies.ListAutoscalingPoliciesRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1607,7 +1650,7 @@ async def test_list_autoscaling_policies_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1744,7 +1787,7 @@ def test_list_autoscaling_policies_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all( isinstance(i, autoscaling_policies.AutoscalingPolicy) for i in results @@ -1839,7 +1882,7 @@ async def test_list_autoscaling_policies_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1889,7 +1932,9 @@ async def test_list_autoscaling_policies_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_autoscaling_policies(request={})).pages: + async for page_ in ( + await client.list_autoscaling_policies(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1992,7 +2037,7 @@ def test_delete_autoscaling_policy_field_headers(): # a field header. Set these to a non-empty value. request = autoscaling_policies.DeleteAutoscalingPolicyRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2010,7 +2055,7 @@ def test_delete_autoscaling_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2024,7 +2069,7 @@ async def test_delete_autoscaling_policy_field_headers_async(): # a field header. Set these to a non-empty value. request = autoscaling_policies.DeleteAutoscalingPolicyRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2042,7 +2087,7 @@ async def test_delete_autoscaling_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2221,6 +2266,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = AutoscalingPolicyServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = AutoscalingPolicyServiceClient( @@ -2267,6 +2325,14 @@ def test_autoscaling_policy_service_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_autoscaling_policy_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -2332,6 +2398,28 @@ def test_autoscaling_policy_service_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutoscalingPolicyServiceGrpcTransport, + transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, + ], +) +def test_autoscaling_policy_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2416,24 +2504,40 @@ def test_autoscaling_policy_service_grpc_transport_client_cert_source_for_mtls( ) -def test_autoscaling_policy_service_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_autoscaling_policy_service_host_no_port(transport_name): client = AutoscalingPolicyServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") -def test_autoscaling_policy_service_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_autoscaling_policy_service_host_with_port(transport_name): client = AutoscalingPolicyServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "dataproc.googleapis.com:8000" + assert client.transport._host == ("dataproc.googleapis.com:8000") def test_autoscaling_policy_service_grpc_transport_channel(): @@ -2796,4 +2900,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py index 93a34e146e2d..7a966d5026df 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py @@ -14,7 +14,13 @@ # limitations under the License. # import os -import mock + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio @@ -95,24 +101,26 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - BatchControllerClient, - BatchControllerAsyncClient, + (BatchControllerClient, "grpc"), + (BatchControllerAsyncClient, "grpc_asyncio"), ], ) -def test_batch_controller_client_from_service_account_info(client_class): +def test_batch_controller_client_from_service_account_info( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") @pytest.mark.parametrize( @@ -141,27 +149,33 @@ def test_batch_controller_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - BatchControllerClient, - BatchControllerAsyncClient, + (BatchControllerClient, "grpc"), + (BatchControllerAsyncClient, "grpc_asyncio"), ], ) -def test_batch_controller_client_from_service_account_file(client_class): +def test_batch_controller_client_from_service_account_file( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") def test_batch_controller_client_get_transport_class(): @@ -224,6 +238,7 @@ def test_batch_controller_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -241,6 +256,7 @@ def test_batch_controller_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -258,6 +274,7 @@ def test_batch_controller_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -287,6 +304,25 @@ def test_batch_controller_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -364,6 +400,7 @@ def test_batch_controller_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -398,6 +435,7 @@ def test_batch_controller_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -420,6 +458,7 @@ def test_batch_controller_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -534,6 +573,7 @@ def test_batch_controller_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -572,6 +612,7 @@ def test_batch_controller_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -592,6 +633,7 @@ def test_batch_controller_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -630,6 +672,7 @@ def test_batch_controller_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -753,7 +796,7 @@ def test_create_batch_field_headers(): # a field header. Set these to a non-empty value. request = batches.CreateBatchRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_batch), "__call__") as call: @@ -769,7 +812,7 @@ def test_create_batch_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -783,7 +826,7 @@ async def test_create_batch_field_headers_async(): # a field header. Set these to a non-empty value. request = batches.CreateBatchRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_batch), "__call__") as call: @@ -801,7 +844,7 @@ async def test_create_batch_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1028,7 +1071,7 @@ def test_get_batch_field_headers(): # a field header. Set these to a non-empty value. request = batches.GetBatchRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_batch), "__call__") as call: @@ -1044,7 +1087,7 @@ def test_get_batch_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1058,7 +1101,7 @@ async def test_get_batch_field_headers_async(): # a field header. Set these to a non-empty value. request = batches.GetBatchRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_batch), "__call__") as call: @@ -1074,7 +1117,7 @@ async def test_get_batch_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1256,7 +1299,7 @@ def test_list_batches_field_headers(): # a field header. Set these to a non-empty value. request = batches.ListBatchesRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_batches), "__call__") as call: @@ -1272,7 +1315,7 @@ def test_list_batches_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1286,7 +1329,7 @@ async def test_list_batches_field_headers_async(): # a field header. Set these to a non-empty value. request = batches.ListBatchesRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_batches), "__call__") as call: @@ -1304,7 +1347,7 @@ async def test_list_batches_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1435,7 +1478,7 @@ def test_list_batches_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, batches.Batch) for i in results) @@ -1524,7 +1567,7 @@ async def test_list_batches_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1570,7 +1613,9 @@ async def test_list_batches_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_batches(request={})).pages: + async for page_ in ( + await client.list_batches(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1666,7 +1711,7 @@ def test_delete_batch_field_headers(): # a field header. Set these to a non-empty value. request = batches.DeleteBatchRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_batch), "__call__") as call: @@ -1682,7 +1727,7 @@ def test_delete_batch_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1696,7 +1741,7 @@ async def test_delete_batch_field_headers_async(): # a field header. Set these to a non-empty value. request = batches.DeleteBatchRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_batch), "__call__") as call: @@ -1712,7 +1757,7 @@ async def test_delete_batch_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1887,6 +1932,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = BatchControllerClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = BatchControllerClient( @@ -1937,6 +1995,14 @@ def test_batch_controller_base_transport(): with pytest.raises(NotImplementedError): transport.operations_client + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_batch_controller_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -2002,6 +2068,28 @@ def test_batch_controller_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.BatchControllerGrpcTransport, + transports.BatchControllerGrpcAsyncIOTransport, + ], +) +def test_batch_controller_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2082,24 +2170,40 @@ def test_batch_controller_grpc_transport_client_cert_source_for_mtls(transport_c ) -def test_batch_controller_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_batch_controller_host_no_port(transport_name): client = BatchControllerClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") -def test_batch_controller_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_batch_controller_host_with_port(transport_name): client = BatchControllerClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "dataproc.googleapis.com:8000" + assert client.transport._host == ("dataproc.googleapis.com:8000") def test_batch_controller_grpc_transport_channel(): @@ -2488,4 +2592,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py index 37caa1140431..6898bbb81441 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py @@ -14,7 +14,13 @@ # limitations under the License. # import os -import mock + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio @@ -98,24 +104,26 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - ClusterControllerClient, - ClusterControllerAsyncClient, + (ClusterControllerClient, "grpc"), + (ClusterControllerAsyncClient, "grpc_asyncio"), ], ) -def test_cluster_controller_client_from_service_account_info(client_class): +def test_cluster_controller_client_from_service_account_info( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") @pytest.mark.parametrize( @@ -144,27 +152,33 @@ def test_cluster_controller_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - ClusterControllerClient, - ClusterControllerAsyncClient, + (ClusterControllerClient, "grpc"), + (ClusterControllerAsyncClient, "grpc_asyncio"), ], ) -def test_cluster_controller_client_from_service_account_file(client_class): +def test_cluster_controller_client_from_service_account_file( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") def test_cluster_controller_client_get_transport_class(): @@ -227,6 +241,7 @@ def test_cluster_controller_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -244,6 +259,7 @@ def test_cluster_controller_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -261,6 +277,7 @@ def test_cluster_controller_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -290,6 +307,25 @@ def test_cluster_controller_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -367,6 +403,7 @@ def test_cluster_controller_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -401,6 +438,7 @@ def test_cluster_controller_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -423,6 +461,7 @@ def test_cluster_controller_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -537,6 +576,7 @@ def test_cluster_controller_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -575,6 +615,7 @@ def test_cluster_controller_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -595,6 +636,7 @@ def test_cluster_controller_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -633,6 +675,7 @@ def test_cluster_controller_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -747,6 +790,69 @@ async def test_create_cluster_async_from_dict(): await test_create_cluster_async(request_type=dict) +def test_create_cluster_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.CreateClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_cluster_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.CreateClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + def test_create_cluster_flattened(): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -932,6 +1038,71 @@ async def test_update_cluster_async_from_dict(): await test_update_cluster_async(request_type=dict) +def test_update_cluster_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.UpdateClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_cluster_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.UpdateClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + def test_update_cluster_flattened(): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1137,6 +1308,71 @@ async def test_stop_cluster_async_from_dict(): await test_stop_cluster_async(request_type=dict) +def test_stop_cluster_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.StopClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.stop_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_stop_cluster_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.StopClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.stop_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -1220,6 +1456,71 @@ async def test_start_cluster_async_from_dict(): await test_start_cluster_async(request_type=dict) +def test_start_cluster_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.StartClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.start_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_start_cluster_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.StartClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.start_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -1303,6 +1604,71 @@ async def test_delete_cluster_async_from_dict(): await test_delete_cluster_async(request_type=dict) +def test_delete_cluster_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.DeleteClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_cluster_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.DeleteClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + def test_delete_cluster_flattened(): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1502,6 +1868,69 @@ async def test_get_cluster_async_from_dict(): await test_get_cluster_async(request_type=dict) +def test_get_cluster_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.GetClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + call.return_value = clusters.Cluster() + client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_cluster_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.GetClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clusters.Cluster()) + await client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + def test_get_cluster_flattened(): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1691,6 +2120,69 @@ async def test_list_clusters_async_from_dict(): await test_list_clusters_async(request_type=dict) +def test_list_clusters_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.ListClustersRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + call.return_value = clusters.ListClustersResponse() + client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_clusters_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.ListClustersRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clusters.ListClustersResponse() + ) + await client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + def test_list_clusters_flattened(): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1831,11 +2323,19 @@ def test_list_clusters_pager(transport_name: str = "grpc"): ) metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", ""), + ("region", ""), + ) + ), + ) pager = client.list_clusters(request={}) assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, clusters.Cluster) for i in results) @@ -1924,7 +2424,7 @@ async def test_list_clusters_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1970,7 +2470,9 @@ async def test_list_clusters_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_clusters(request={})).pages: + async for page_ in ( + await client.list_clusters(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2059,6 +2561,71 @@ async def test_diagnose_cluster_async_from_dict(): await test_diagnose_cluster_async(request_type=dict) +def test_diagnose_cluster_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.DiagnoseClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.diagnose_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.diagnose_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_diagnose_cluster_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.DiagnoseClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.diagnose_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.diagnose_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + def test_diagnose_cluster_flattened(): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2252,6 +2819,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = ClusterControllerClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = ClusterControllerClient( @@ -2306,6 +2886,14 @@ def test_cluster_controller_base_transport(): with pytest.raises(NotImplementedError): transport.operations_client + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_cluster_controller_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -2371,6 +2959,28 @@ def test_cluster_controller_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterControllerGrpcTransport, + transports.ClusterControllerGrpcAsyncIOTransport, + ], +) +def test_cluster_controller_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2451,24 +3061,40 @@ def test_cluster_controller_grpc_transport_client_cert_source_for_mtls(transport ) -def test_cluster_controller_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_cluster_controller_host_no_port(transport_name): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") -def test_cluster_controller_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_cluster_controller_host_with_port(transport_name): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "dataproc.googleapis.com:8000" + assert client.transport._host == ("dataproc.googleapis.com:8000") def test_cluster_controller_grpc_transport_channel(): @@ -2860,4 +3486,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py index bbd5768712d6..b7919cd3933f 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py @@ -14,7 +14,13 @@ # limitations under the License. # import os -import mock + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio @@ -92,24 +98,24 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - JobControllerClient, - JobControllerAsyncClient, + (JobControllerClient, "grpc"), + (JobControllerAsyncClient, "grpc_asyncio"), ], ) -def test_job_controller_client_from_service_account_info(client_class): +def test_job_controller_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") @pytest.mark.parametrize( @@ -138,27 +144,31 @@ def test_job_controller_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - JobControllerClient, - JobControllerAsyncClient, + (JobControllerClient, "grpc"), + (JobControllerAsyncClient, "grpc_asyncio"), ], ) -def test_job_controller_client_from_service_account_file(client_class): +def test_job_controller_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") def test_job_controller_client_get_transport_class(): @@ -221,6 +231,7 @@ def test_job_controller_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -238,6 +249,7 @@ def test_job_controller_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -255,6 +267,7 @@ def test_job_controller_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -284,6 +297,25 @@ def test_job_controller_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -351,6 +383,7 @@ def test_job_controller_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -385,6 +418,7 @@ def test_job_controller_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -407,6 +441,7 @@ def test_job_controller_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -521,6 +556,7 @@ def test_job_controller_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -559,6 +595,7 @@ def test_job_controller_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -579,6 +616,7 @@ def test_job_controller_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -617,6 +655,7 @@ def test_job_controller_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -750,6 +789,67 @@ async def test_submit_job_async_from_dict(): await test_submit_job_async(request_type=dict) +def test_submit_job_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.SubmitJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_job), "__call__") as call: + call.return_value = jobs.Job() + client.submit_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_submit_job_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.SubmitJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) + await client.submit_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + def test_submit_job_flattened(): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -939,6 +1039,73 @@ async def test_submit_job_as_operation_async_from_dict(): await test_submit_job_as_operation_async(request_type=dict) +def test_submit_job_as_operation_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.SubmitJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_job_as_operation), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.submit_job_as_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_submit_job_as_operation_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.SubmitJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_job_as_operation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.submit_job_as_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + def test_submit_job_as_operation_flattened(): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1147,6 +1314,69 @@ async def test_get_job_async_from_dict(): await test_get_job_async(request_type=dict) +def test_get_job_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.GetJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + call.return_value = jobs.Job() + client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_job_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.GetJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) + await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + def test_get_job_flattened(): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1336,6 +1566,69 @@ async def test_list_jobs_async_from_dict(): await test_list_jobs_async(request_type=dict) +def test_list_jobs_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.ListJobsRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + call.return_value = jobs.ListJobsResponse() + client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_jobs_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.ListJobsRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + jobs.ListJobsResponse() + ) + await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + def test_list_jobs_flattened(): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1476,11 +1769,19 @@ def test_list_jobs_pager(transport_name: str = "grpc"): ) metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", ""), + ("region", ""), + ) + ), + ) pager = client.list_jobs(request={}) assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, jobs.Job) for i in results) @@ -1569,7 +1870,7 @@ async def test_list_jobs_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1615,7 +1916,9 @@ async def test_list_jobs_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_jobs(request={})).pages: + async for page_ in ( + await client.list_jobs(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1723,6 +2026,69 @@ async def test_update_job_async_from_dict(): await test_update_job_async(request_type=dict) +def test_update_job_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.UpdateJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + call.return_value = jobs.Job() + client.update_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_job_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.UpdateJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) + await client.update_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -1825,6 +2191,69 @@ async def test_cancel_job_async_from_dict(): await test_cancel_job_async(request_type=dict) +def test_cancel_job_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.CancelJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + call.return_value = jobs.Job() + client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_job_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.CancelJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) + await client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + def test_cancel_job_flattened(): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2006,6 +2435,69 @@ async def test_delete_job_async_from_dict(): await test_delete_job_async(request_type=dict) +def test_delete_job_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.DeleteJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + call.return_value = None + client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_job_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.DeleteJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + def test_delete_job_flattened(): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2197,6 +2689,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = JobControllerClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = JobControllerClient( @@ -2250,6 +2755,14 @@ def test_job_controller_base_transport(): with pytest.raises(NotImplementedError): transport.operations_client + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_job_controller_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -2315,6 +2828,28 @@ def test_job_controller_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.JobControllerGrpcTransport, + transports.JobControllerGrpcAsyncIOTransport, + ], +) +def test_job_controller_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2395,24 +2930,40 @@ def test_job_controller_grpc_transport_client_cert_source_for_mtls(transport_cla ) -def test_job_controller_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_job_controller_host_no_port(transport_name): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") -def test_job_controller_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_job_controller_host_with_port(transport_name): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "dataproc.googleapis.com:8000" + assert client.transport._host == ("dataproc.googleapis.com:8000") def test_job_controller_grpc_transport_channel(): @@ -2773,4 +3324,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py index 6b297b0fcc58..eb71a5f79841 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py @@ -14,7 +14,13 @@ # limitations under the License. # import os -import mock + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio @@ -100,24 +106,26 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - WorkflowTemplateServiceClient, - WorkflowTemplateServiceAsyncClient, + (WorkflowTemplateServiceClient, "grpc"), + (WorkflowTemplateServiceAsyncClient, "grpc_asyncio"), ], ) -def test_workflow_template_service_client_from_service_account_info(client_class): +def test_workflow_template_service_client_from_service_account_info( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") @pytest.mark.parametrize( @@ -146,27 +154,33 @@ def test_workflow_template_service_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - WorkflowTemplateServiceClient, - WorkflowTemplateServiceAsyncClient, + (WorkflowTemplateServiceClient, "grpc"), + (WorkflowTemplateServiceAsyncClient, "grpc_asyncio"), ], ) -def test_workflow_template_service_client_from_service_account_file(client_class): +def test_workflow_template_service_client_from_service_account_file( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") def test_workflow_template_service_client_get_transport_class(): @@ -233,6 +247,7 @@ def test_workflow_template_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -250,6 +265,7 @@ def test_workflow_template_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -267,6 +283,7 @@ def test_workflow_template_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -296,6 +313,25 @@ def test_workflow_template_service_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -373,6 +409,7 @@ def test_workflow_template_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -407,6 +444,7 @@ def test_workflow_template_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -429,6 +467,7 @@ def test_workflow_template_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -549,6 +588,7 @@ def test_workflow_template_service_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -587,6 +627,7 @@ def test_workflow_template_service_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -607,6 +648,7 @@ def test_workflow_template_service_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -645,6 +687,7 @@ def test_workflow_template_service_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -789,7 +832,7 @@ def test_create_workflow_template_field_headers(): # a field header. Set these to a non-empty value. request = workflow_templates.CreateWorkflowTemplateRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -807,7 +850,7 @@ def test_create_workflow_template_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -821,7 +864,7 @@ async def test_create_workflow_template_field_headers_async(): # a field header. Set these to a non-empty value. request = workflow_templates.CreateWorkflowTemplateRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -841,7 +884,7 @@ async def test_create_workflow_template_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1054,7 +1097,7 @@ def test_get_workflow_template_field_headers(): # a field header. Set these to a non-empty value. request = workflow_templates.GetWorkflowTemplateRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1072,7 +1115,7 @@ def test_get_workflow_template_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1086,7 +1129,7 @@ async def test_get_workflow_template_field_headers_async(): # a field header. Set these to a non-empty value. request = workflow_templates.GetWorkflowTemplateRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1106,7 +1149,7 @@ async def test_get_workflow_template_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1295,7 +1338,7 @@ def test_instantiate_workflow_template_field_headers(): # a field header. Set these to a non-empty value. request = workflow_templates.InstantiateWorkflowTemplateRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1313,7 +1356,7 @@ def test_instantiate_workflow_template_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1327,7 +1370,7 @@ async def test_instantiate_workflow_template_field_headers_async(): # a field header. Set these to a non-empty value. request = workflow_templates.InstantiateWorkflowTemplateRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1347,7 +1390,7 @@ async def test_instantiate_workflow_template_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1546,7 +1589,7 @@ def test_instantiate_inline_workflow_template_field_headers(): # a field header. Set these to a non-empty value. request = workflow_templates.InstantiateInlineWorkflowTemplateRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1564,7 +1607,7 @@ def test_instantiate_inline_workflow_template_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1578,7 +1621,7 @@ async def test_instantiate_inline_workflow_template_field_headers_async(): # a field header. Set these to a non-empty value. request = workflow_templates.InstantiateInlineWorkflowTemplateRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1598,7 +1641,7 @@ async def test_instantiate_inline_workflow_template_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1811,7 +1854,7 @@ def test_update_workflow_template_field_headers(): # a field header. Set these to a non-empty value. request = workflow_templates.UpdateWorkflowTemplateRequest() - request.template.name = "template.name/value" + request.template.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1829,7 +1872,7 @@ def test_update_workflow_template_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "template.name=template.name/value", + "template.name=name_value", ) in kw["metadata"] @@ -1843,7 +1886,7 @@ async def test_update_workflow_template_field_headers_async(): # a field header. Set these to a non-empty value. request = workflow_templates.UpdateWorkflowTemplateRequest() - request.template.name = "template.name/value" + request.template.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1863,7 +1906,7 @@ async def test_update_workflow_template_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "template.name=template.name/value", + "template.name=name_value", ) in kw["metadata"] @@ -2058,7 +2101,7 @@ def test_list_workflow_templates_field_headers(): # a field header. Set these to a non-empty value. request = workflow_templates.ListWorkflowTemplatesRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2076,7 +2119,7 @@ def test_list_workflow_templates_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -2090,7 +2133,7 @@ async def test_list_workflow_templates_field_headers_async(): # a field header. Set these to a non-empty value. request = workflow_templates.ListWorkflowTemplatesRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2110,7 +2153,7 @@ async def test_list_workflow_templates_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -2247,7 +2290,7 @@ def test_list_workflow_templates_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, workflow_templates.WorkflowTemplate) for i in results) @@ -2340,7 +2383,7 @@ async def test_list_workflow_templates_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -2390,7 +2433,9 @@ async def test_list_workflow_templates_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_workflow_templates(request={})).pages: + async for page_ in ( + await client.list_workflow_templates(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2493,7 +2538,7 @@ def test_delete_workflow_template_field_headers(): # a field header. Set these to a non-empty value. request = workflow_templates.DeleteWorkflowTemplateRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2511,7 +2556,7 @@ def test_delete_workflow_template_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2525,7 +2570,7 @@ async def test_delete_workflow_template_field_headers_async(): # a field header. Set these to a non-empty value. request = workflow_templates.DeleteWorkflowTemplateRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2543,7 +2588,7 @@ async def test_delete_workflow_template_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2722,6 +2767,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = WorkflowTemplateServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = WorkflowTemplateServiceClient( @@ -2775,6 +2833,14 @@ def test_workflow_template_service_base_transport(): with pytest.raises(NotImplementedError): transport.operations_client + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_workflow_template_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -2840,6 +2906,28 @@ def test_workflow_template_service_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.WorkflowTemplateServiceGrpcTransport, + transports.WorkflowTemplateServiceGrpcAsyncIOTransport, + ], +) +def test_workflow_template_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2924,24 +3012,40 @@ def test_workflow_template_service_grpc_transport_client_cert_source_for_mtls( ) -def test_workflow_template_service_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_workflow_template_service_host_no_port(transport_name): client = WorkflowTemplateServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") -def test_workflow_template_service_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_workflow_template_service_host_with_port(transport_name): client = WorkflowTemplateServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "dataproc.googleapis.com:8000" + assert client.transport._host == ("dataproc.googleapis.com:8000") def test_workflow_template_service_grpc_transport_channel(): @@ -3364,4 +3468,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, )